-
Notifications
You must be signed in to change notification settings - Fork 192
Expand file tree
/
Copy patho2dpg_sim_workflow.py
More file actions
executable file
·2143 lines (1888 loc) · 119 KB
/
o2dpg_sim_workflow.py
File metadata and controls
executable file
·2143 lines (1888 loc) · 119 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python3
#
# A script producing a consistent MC->RECO->AOD workflow
# It aims to handle the different MC possible configurations
# It just creates a workflow.json txt file, to execute the workflow one must execute right after
# ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json
#
# Execution examples:
# - pp PYTHIA jets, 2 events, triggered on high pT decay photons on all barrel calorimeters acceptance, eCMS 13 TeV
# ./o2dpg_sim_workflow.py -e TGeant3 -ns 2 -j 8 -tf 1 -col pp -eCM 13000 \
# -proc "jets" -ptHatBin 3 \
# -trigger "external" -ini "\$O2DPG_ROOT/MC/config/PWGGAJE/ini/trigger_decay_gamma_allcalo_TrigPt3_5.ini"
#
# - pp PYTHIA ccbar events embedded into heavy-ion environment, 2 PYTHIA events into 1 bkg event, beams energy 2.510
# ./o2dpg_sim_workflow.py -e TGeant3 -nb 1 -ns 2 -j 8 -tf 1 \
# -col pp -eA 2.510 -proc "ccbar" --embedding
#
# TODO:
# - pickup the final list of configKey values from the anchorConfig
import sys
import importlib.util
import argparse
from os import environ, mkdir
from os.path import join, dirname, isdir, isabs, isfile
import random
import json
import itertools
import math
import requests, re
from functools import lru_cache
pandas_available = True
try:
import pandas as pd
except (ImportError, ValueError): # ARM architecture has problems with pandas + numpy
pandas_available = False
sys.path.append(join(dirname(__file__), '.', 'o2dpg_workflow_utils'))
from o2dpg_workflow_utils import createTask, createGlobalInitTask, dump_workflow, adjust_RECO_environment, isActive, activate_detector, deactivate_detector, compute_n_workers, merge_dicts
from o2dpg_qc_finalization_workflow import include_all_QC_finalization
from o2dpg_sim_config import create_sim_config, create_geant_config, constructConfigKeyArg, option_if_available, overwrite_config
from o2dpg_dpl_config_tools import dpl_option_from_config, TaskFinalizer, quote_if_needed
# for some JAliEn interaction
from alienpy.alien import JAlien
parser = argparse.ArgumentParser(description='Create an ALICE (Run3) MC simulation workflow')
# the run-number of data taking or default if unanchored
parser.add_argument('-run', type=int, help="Run number for this MC. See https://twiki.cern.ch/twiki/bin/view/ALICE/O2DPGMCSamplingSchema for possible pre-defined choices.", default=300000)
parser.add_argument('-productionTag',help="Production tag for this MC", default='unknown')
# the timestamp at which this MC workflow will be run
# - in principle it should be consistent with the time of the "run" number above
# - some external tool should sample it within
# - we can also sample it ourselfs here
parser.add_argument('--timestamp', type=int, help="Anchoring timestamp (defaults to now)", default=-1)
parser.add_argument('--conditionDB',help="CCDB url for QC workflows", default='http://alice-ccdb.cern.ch')
parser.add_argument('--qcdbHost',help="QCDB url for QC object uploading", default='http://ali-qcdbmc-gpn.cern.ch:8083')
parser.add_argument('--condition-not-after', type=int, help="only consider CCDB objects not created after this timestamp (for TimeMachine)", default=3385078236000)
parser.add_argument('--orbitsPerTF', type=int, help="Timeframe size in number of LHC orbits", default=32)
parser.add_argument('--anchor-config',help="JSON file to contextualise workflow with external configs (config values etc.) for instance coming from data reco workflows.", default='')
parser.add_argument('--overwrite-config',help="extra JSON file with configs (config values etc.) overwriting defaults or the config coming from --anchor-config", default='')
parser.add_argument('--dump-config',help="Dump JSON file with all settings used in workflow", default='user_config.json')
parser.add_argument('-ns',type=int,help='number of signal events / timeframe', default=20)
parser.add_argument('-gen',help='generator: pythia8, extgen', default='')
parser.add_argument('-proc',help='process type: inel, dirgamma, jets, ccbar, ...', default='none')
parser.add_argument('-trigger',help='event selection: particle, external', default='')
parser.add_argument('-ini',help='generator init parameters file (full paths required), for example: ${O2DPG_ROOT}/MC/config/PWGHF/ini/GeneratorHF.ini', default='')
parser.add_argument('-confKey',help='o2sim, generator or trigger configuration key values, for example: "GeneratorPythia8.config=pythia8.cfg;A.x=y"', default='')
parser.add_argument('--detectorList',help='pick which version of ALICE should be simulated', default='ALICE2')
parser.add_argument('--readoutDets',help='comma separated string of detectors readout (does not modify material budget - only hit creation)', default='all')
parser.add_argument('--make-evtpool', help='Generate workflow for event pool creation.', action='store_true')
parser.add_argument('-interactionRate',help='Interaction rate, used in digitization', default=-1)
parser.add_argument('-bcPatternFile',help='Bunch crossing pattern file, used in digitization (a file name or "ccdb")', default='')
parser.add_argument('-meanVertexPerRunTxtFile',help='Txt file with mean vertex settings per run', default='')
parser.add_argument('-eCM',help='CMS energy', default=-1)
parser.add_argument('-eA',help='Beam A energy', default=-1) #6369 PbPb, 2.510 pp 5 TeV, 4 pPb
parser.add_argument('-eB',help='Beam B energy', default=-1)
parser.add_argument('-col',help='collision system: pp, PbPb, pPb, Pbp, ..., in case of embedding collision system of signal', default='pp')
parser.add_argument('-field',help='L3 field rounded to kGauss, allowed values: +-2,+-5 and 0; +-5U for uniform field; or "ccdb" to take from conditions database', default='ccdb')
parser.add_argument('--with-qed',action='store_true', help='Enable QED background contribution (for PbPb always included)')
parser.add_argument('-ptHatMin',help='pT hard minimum when no bin requested', default=0)
parser.add_argument('-ptHatMax',help='pT hard maximum when no bin requested', default=-1)
parser.add_argument('-weightPow',help='Flatten pT hard spectrum with power', default=-1)
parser.add_argument('--embedding',action='store_true', help='With embedding into background')
parser.add_argument('--embeddPattern',help='How signal is to be injected into background', default='@0:e1')
parser.add_argument('-nb',help='number of background events / timeframe', default=20)
parser.add_argument('-genBkg',help='embedding background generator', default='') #pythia8, not recomended: pythia8hi, pythia8pp
parser.add_argument('-procBkg',help='process type: inel, ..., do not set it for Pythia8 PbPb', default='heavy_ion')
parser.add_argument('-iniBkg',help='embedding background generator init parameters file (full path required)', default='${O2DPG_ROOT}/MC/config/common/ini/basic.ini')
parser.add_argument('-confKeyBkg',help='embedding background configuration key values, for example: "GeneratorPythia8.config=pythia8bkg.cfg"', default='')
parser.add_argument('-colBkg',help='embedding background collision system', default='PbPb')
parser.add_argument('-confKeyQED',help='Config key parameters influencing the QED background simulator', default='')
parser.add_argument('-e',help='simengine', default='TGeant4', choices=['TGeant4', 'TGeant3', 'TFluka'])
parser.add_argument('-tf',type=int,help='number of timeframes', default=2)
parser.add_argument('--production-offset',help='Offset determining bunch-crossing '
+ ' range within a (GRID) production. This number sets first orbit to '
+ 'Offset x Number of TimeFrames x OrbitsPerTimeframe (up for further sophistication)', default=0)
parser.add_argument('-j', '--n-workers', dest='n_workers', help='number of workers (if applicable)', default=8, type=int)
parser.add_argument('--force-n-workers', dest='force_n_workers', action='store_true', help='by default, number of workers is re-computed '
'for given interaction rate; '
'pass this to avoid that')
parser.add_argument('--skipModules',nargs="*", help="List of modules to skip in geometry budget (and therefore processing)", default=["ZDC"])
parser.add_argument('--skipReadout',nargs="*", help="List of modules to take out from readout", default=[""])
parser.add_argument('--with-ZDC', action='store_true', help='Enable ZDC in workflow')
parser.add_argument('-seed',help='random seed number', default=None)
parser.add_argument('-o',help='output workflow file', default='workflow.json')
parser.add_argument('--noIPC',help='disable shared memory in DPL')
# arguments for background event caching
parser.add_argument('--upload-bkg-to',help='where to upload background event files (alien path)')
parser.add_argument('--use-bkg-from',help='take background event from given alien path')
# argument for early cleanup
parser.add_argument('--early-tf-cleanup',action='store_true', help='whether to cleanup intermediate artefacts after each timeframe is done')
# power features (for playing) --> does not appear in help message
# help='Treat smaller sensors in a single digitization')
parser.add_argument('--pregenCollContext', action='store_true', help=argparse.SUPPRESS) # Now the default, giving this option or not makes not difference. We keep it for backward compatibility
parser.add_argument('--data-anchoring', type=str, default='', help="Take collision contexts (from data) stored in this path")
parser.add_argument('--no-combine-smaller-digi', action='store_true', help=argparse.SUPPRESS)
parser.add_argument('--no-combine-dpl-devices', action='store_true', help=argparse.SUPPRESS)
parser.add_argument('--no-mc-labels', action='store_true', default=False, help=argparse.SUPPRESS)
parser.add_argument('--no-tpc-digitchunking', action='store_true', help=argparse.SUPPRESS)
parser.add_argument('--no-strangeness-tracking', action='store_true', default=False, help="Disable strangeness tracking")
parser.add_argument('--combine-tpc-clusterization', action='store_true', help=argparse.SUPPRESS) #<--- useful for small productions (pp, low interaction rate, small number of events)
parser.add_argument('--first-orbit', default=256, type=int, help=argparse.SUPPRESS) # to set the first orbit number of the run for HBFUtils (only used when anchoring); default 256 for convenience to allow for some orbits-early
# (consider doing this rather in O2 digitization code directly)
parser.add_argument('--orbits-early', default=1, type=float, help=argparse.SUPPRESS) # number of orbits to start simulating earlier
# to reduce start of timeframe effects in MC --> affects collision context
parser.add_argument('--sor', default=-1, type=int, help=argparse.SUPPRESS) # may pass start of run with this (otherwise it is autodetermined from run number)
parser.add_argument('--run-anchored', action='store_true', help=argparse.SUPPRESS)
parser.add_argument('--alternative-reco-software', default="", help=argparse.SUPPRESS) # power feature to set CVFMS alienv software version for reco steps (different from default)
parser.add_argument('--dpl-child-driver', default="", help="Child driver to use in DPL processes (expert mode)")
parser.add_argument('--event-gen-mode', choices=['separated', 'integrated'], default='separated', help="Whether event generation is done before (separated) or within detector simulation (integrated).")
# QC related arguments
parser.add_argument('--include-qc', '--include-full-qc', action='store_true', help='includes QC in the workflow, both per-tf processing and finalization')
parser.add_argument('--include-local-qc', action='store_true', help='includes the per-tf QC, but skips the finalization (e.g. to allow for subjob merging first)')
# O2 Analysis related arguments
parser.add_argument('--include-analysis', '--include-an', '--analysis',
action='store_true', help='a flag to include O2 analysis in the workflow')
# MFT reconstruction configuration
parser.add_argument('--mft-reco-full', action='store_true', help='enables complete mft reco instead of simplified misaligned version')
parser.add_argument('--mft-assessment-full', action='store_true', help='enables complete assessment of mft reco')
# TPC options
parser.add_argument('--tpc-distortion-type', default=0, type=int, help='Simulate distortions in the TPC (0=no distortions, 1=distortions without scaling, 2=distortions with CTP scaling)')
parser.add_argument('--tpc-corrmap-lumi-mode', default=2, type=int, help='TPC corrections mode (0=linear, 1=derivative, 2=derivative for special MC maps')
parser.add_argument('--ctp-scaler', default=0, type=float, help='CTP raw scaler value used for distortion simulation')
# Global Forward reconstruction configuration
parser.add_argument('--fwdmatching-assessment-full', action='store_true', help='enables complete assessment of global forward reco')
parser.add_argument('--fwdmatching-4-param', action='store_true', help='excludes q/pt from matching parameters')
parser.add_argument('--fwdmatching-cut-4-param', action='store_true', help='apply selection cuts on position and angular parameters')
# Matching training for machine learning
parser.add_argument('--fwdmatching-save-trainingdata', action='store_true', help='enables saving parameters at plane for matching training with machine learning')
args = parser.parse_args()
print (args)
# make sure O2DPG + O2 is loaded
O2DPG_ROOT=environ.get('O2DPG_ROOT')
O2_ROOT=environ.get('O2_ROOT')
QUALITYCONTROL_ROOT=environ.get('QUALITYCONTROL_ROOT')
O2PHYSICS_ROOT=environ.get('O2PHYSICS_ROOT')
if O2DPG_ROOT == None:
print('Error: This needs O2DPG loaded')
# exit(1)
if O2_ROOT == None:
print('Error: This needs O2 loaded')
# exit(1)
if (args.include_qc or args.include_local_qc) and QUALITYCONTROL_ROOT is None:
print('Error: Arguments --include-qc and --include-local-qc need QUALITYCONTROL_ROOT loaded')
# exit(1)
if args.include_analysis and (QUALITYCONTROL_ROOT is None or O2PHYSICS_ROOT is None):
print('Error: Argument --include-analysis needs O2PHYSICS_ROOT and QUALITYCONTROL_ROOT loaded')
# exit(1)
module_name = "o2dpg_analysis_test_workflow"
spec = importlib.util.spec_from_file_location(module_name, join(O2DPG_ROOT, "MC", "analysis_testing", f"{module_name}.py"))
o2dpg_analysis_test_workflow = importlib.util.module_from_spec(spec)
sys.modules[module_name] = o2dpg_analysis_test_workflow
spec.loader.exec_module(o2dpg_analysis_test_workflow)
from o2dpg_analysis_test_workflow import add_analysis_tasks, add_analysis_qc_upload_tasks
# fetch an external configuration if given
# loads the workflow specification
def load_external_config(configfile):
fp=open(configfile)
config=json.load(fp)
return config
anchorConfig = {}
anchorConfig_generic = { "ConfigParams": create_sim_config(args) }
if args.anchor_config != '':
print ("** Using external config **")
anchorConfig = load_external_config(args.anchor_config)
# adjust the anchorConfig with keys from the generic config, not mentioned in the external config
# (useful for instance for digitization parameters or others not usually mentioned in async reco)
for key in anchorConfig_generic["ConfigParams"]:
if not key in anchorConfig["ConfigParams"]:
print (f"Transcribing key {key} from generic config into final config")
anchorConfig["ConfigParams"][key] = anchorConfig_generic["ConfigParams"][key]
else:
# we load a generic config
print ("** Using generic config **")
anchorConfig = anchorConfig_generic
# we apply additional external user choices for the configuration
# this will overwrite config from earlier stages
if args.overwrite_config != '':
# apply final JSON overwrite
config_overwrite = load_external_config(args.overwrite_config)
# let's make sure the configs follow the same format
if ("ConfigParams" in anchorConfig) != ("ConfigParams" in config_overwrite):
print ("Error: overwrite config not following same format as base config; Cannot merge")
exit (1)
# merge the dictionaries into anchorConfig, the latter takes precedence
merge_dicts(anchorConfig, config_overwrite)
# We still may need adjust configurations manually for consistency:
#
# * Force simpler TPC digitization of if TPC reco does not have the mc-time-gain option:
async_envfile = 'env_async.env' if environ.get('ALIEN_JDL_O2DPG_ASYNC_RECO_TAG') is not None else None
tpcreco_mctimegain = option_if_available('o2-tpc-reco-workflow', '--tpc-mc-time-gain', envfile=async_envfile)
if tpcreco_mctimegain == '':
# this was communicated by Jens Wiechula@TPC; avoids dEdX issue https://its.cern.ch/jira/browse/O2-5486 for the 2tag mechanism
print ("TPC reco does not support --tpc-mc-time-gain. Adjusting some config for TPC digitization")
overwrite_config(anchorConfig['ConfigParams'],'TPCGasParam','OxygenCont',5e-6)
overwrite_config(anchorConfig['ConfigParams'],'TPCGEMParam','TotalGainStack',2000)
overwrite_config(anchorConfig['ConfigParams'],'GPU_global','dEdxDisableResidualGain',1)
# TODO: put into it's own function for better modularity
# with the config, we'll create a task_finalizer functor
# this object takes care of customizing/finishing task command with externally given (anchor) config
task_finalizer = TaskFinalizer(anchorConfig, logger="o2dpg_config_replacements.log")
# write this config
config_key_param_path = args.dump_config
with open(config_key_param_path, "w") as f:
print(f"INFO: Written additional config key parameters to JSON {config_key_param_path}")
json.dump(anchorConfig, f, indent=2)
# Processing skipped material budget (modules):
# - If user did NOT specify --with-ZDC
# - AND ZDC is not already in the list
# --> append ZDC automatically
if args.with_ZDC:
# User wants ZDC to *not* be skipped → ensure it's removed
args.skipModules = [m for m in args.skipModules if m != "ZDC"]
else:
# If user did not request --with-ZDC,
# auto-append ZDC unless already present
if "ZDC" not in args.skipModules:
args.skipModules.append("ZDC")
# with this we can tailor the workflow to the presence of
# certain detectors
# these are all detectors that should be assumed active
readout_detectors = args.readoutDets
# here are all detectors that have been set in an anchored script
activeDetectors = dpl_option_from_config(anchorConfig, 'o2-ctf-reader-workflow', key='--onlyDet', default_value='all')
if activeDetectors == 'all':
# if "all" here, there was in fact nothing in the anchored script, set to what is passed to this script (which it either also "all" or a subset)
activeDetectors = readout_detectors
elif readout_detectors != 'all' and activeDetectors != 'all':
# in this case both are comma-separated lists. Take intersection
r = set(readout_detectors.split(','))
a = set(activeDetectors.split(','))
activeDetectors = ','.join(r & a)
# the last case: simply take what comes from the anchored config
# convert to set/hashmap
activeDetectors = { det:1 for det in activeDetectors.split(',') if det not in args.skipModules and det not in args.skipReadout}
for det in activeDetectors:
activate_detector(det)
# function to finalize detector source lists based on activeDetectors
# detector source lists are comma separated lists of DET1, DET2, DET1-DET2, ...
def cleanDetectorInputList(inputlist):
sources_list = inputlist.split(",")
# Filter the sources
filtered_sources = [
src for src in sources_list
if all(isActive(part) for part in src.split("-"))
]
# Recompose into a comma-separated string
return ",".join(filtered_sources)
if not args.with_ZDC:
# deactivate to be able to use isActive consistently for ZDC
deactivate_detector('ZDC')
if 'ZDC' in activeDetectors:
del activeDetectors['ZDC']
def addWhenActive(detID, needslist, appendstring):
if isActive(detID):
needslist.append(appendstring)
def retrieve_sor(run_number):
"""
retrieves start of run (sor)
from the RCT/Info/RunInformation table with a simple http request
in case of problems, 0 will be returned. Simple http request has advantage
of not needing to initialize a Ccdb object.
"""
url="http://alice-ccdb.cern.ch/browse/RCT/Info/RunInformation/"+str(run_number)
ansobject=requests.get(url)
tokens=ansobject.text.split("\n")
# determine start of run, earlier values take precedence (see also implementation in BasicCCDBManager::getRunDuration)
STF=0
# extract SOR by pattern matching
for t in tokens:
match_object=re.match(r"\s*(STF\s*=\s*)([0-9]*)\s*", t)
if match_object != None:
STF=int(match_object[2])
break
if STF > 0:
return STF
SOX=0
# extract SOX by pattern matching
for t in tokens:
match_object=re.match(r"\s*(STF\s*=\s*)([0-9]*)\s*", t)
if match_object != None:
SOX=int(match_object[2])
break
if SOX > 0:
return SOX
SOR=0
# extract SOR by pattern matching
for t in tokens:
match_object=re.match(r"\s*(SOR\s*=\s*)([0-9]*)\s*", t)
if match_object != None:
SOR=int(match_object[2])
break
return SOR
# check and sanitize config-key values (extract and remove diamond vertex arguments into finalDiamondDict)
def extractVertexArgs(configKeyValuesStr, finalDiamondDict):
# tokenize configKeyValueStr on ;
tokens=configKeyValuesStr.split(';')
for t in tokens:
if "Diamond" in t:
left, right = t.split("=")
value = finalDiamondDict.get(left,None)
if value == None:
finalDiamondDict[left] = right
else:
# we have seen this before, check if consistent right hand side, otherwise crash
if value != right:
print("Inconsistent repetition in Diamond values; Aborting")
sys.exit(1)
vertexDict = {}
extractVertexArgs(args.confKey, vertexDict)
extractVertexArgs(args.confKeyBkg, vertexDict)
CONFKEYMV=""
# rebuild vertex only config-key string
for e in vertexDict:
if len(CONFKEYMV) > 0:
CONFKEYMV+=';'
CONFKEYMV+=str(e) + '=' + str(vertexDict[e])
print ("Diamond is " + CONFKEYMV)
# Recover mean vertex settings from external txt file
if (pandas_available):
if len(args.meanVertexPerRunTxtFile) > 0:
if len(CONFKEYMV) > 0:
print("confKey already sets diamond, stop!")
sys.exit(1)
df = pd.read_csv(args.meanVertexPerRunTxtFile, delimiter="\t", header=None) # for tabular
df.columns = ["runNumber", "vx", "vy", "vz", "sx", "sy", "sz"]
#print(df) # print full table
MV_SX = float(df.loc[df['runNumber'].eq(args.run), 'sx'])
MV_SY = float(df.loc[df['runNumber'].eq(args.run), 'sy'])
MV_SZ = float(df.loc[df['runNumber'].eq(args.run), 'sz'])
MV_VX = float(df.loc[df['runNumber'].eq(args.run), 'vx'])
MV_VY = float(df.loc[df['runNumber'].eq(args.run), 'vy'])
MV_VZ = float(df.loc[df['runNumber'].eq(args.run), 'vz'])
print("** Using mean vertex parameters from file",args.meanVertexPerRunTxtFile,"for run =",args.run,
": \n \t vx =",MV_VX,", vy =",MV_VY,", vz =",MV_VZ,",\n \t sx =",MV_SX,", sy =",MV_SY,", sz =",MV_SZ)
CONFKEYMV='Diamond.width[2]='+str(MV_SZ)+';Diamond.width[1]='+str(MV_SY)+';Diamond.width[0]='+str(MV_SX)+';Diamond.position[2]='+str(MV_VZ)+';Diamond.position[1]='+str(MV_VY)+';Diamond.position[0]='+str(MV_VX)+';'
args.confKey=args.confKey + CONFKEYMV
args.confKeyBkg=args.confKeyBkg + CONFKEYMV
print("** confKey args + MeanVertex:",args.confKey)
else:
print ("Pandas not available. Not reading mean vertex from external file")
# ----------- START WORKFLOW CONSTRUCTION -----------------------------
# set the time to start of run (if no timestamp specified)
if args.sor==-1:
args.sor = retrieve_sor(args.run)
assert (args.sor != 0)
if args.timestamp==-1:
args.timestamp = args.sor
NTIMEFRAMES=int(args.tf)
NWORKERS=args.n_workers
SKIPMODULES = " ".join(["--skipModules"] + args.skipModules) if len(args.skipModules) > 0 else ""
SIMENGINE=args.e
BFIELD=args.field
RNDSEED=args.seed # typically the argument should be the jobid, but if we get None the current time is used for the initialisation
random.seed(RNDSEED)
print ("Using initialisation seed: ", RNDSEED)
SIMSEED = random.randint(1, 900000000 - NTIMEFRAMES - 1) # PYTHIA maximum seed is 900M for some reason
# ---- initialize global (physics variables) for signal parts ----
ECMS=float(args.eCM)
EBEAMA=float(args.eA)
EBEAMB=float(args.eB)
NSIGEVENTS=args.ns
GENERATOR=args.gen
if GENERATOR =='':
print('o2dpg_sim_workflow: Error! generator name not provided')
exit(1)
INIFILE=''
if args.ini!= '':
INIFILE=' --configFile ' + args.ini
PROCESS=args.proc
TRIGGER=''
if args.trigger != '':
TRIGGER=' -t ' + args.trigger
## Pt Hat productions
WEIGHTPOW=float(args.weightPow)
PTHATMIN=float(args.ptHatMin)
PTHATMAX=float(args.ptHatMax)
colsys = {'pp':[2212,2212], 'pPb':[2212,1000822080], 'Pbp':[1000822080,2212], 'PbPb':[1000822080,1000822080], 'pO':[2212,1000080160], 'Op':[1000080160,2212], 'HeO':[1000020040,1000080160], 'OHe':[1000080160,1000020040], 'OO':[1000080160,1000080160], 'NeNe':[1000100200,1000100200]}
# translate here collision type to PDG of allowed particles
COLTYPE=args.col
if COLTYPE in colsys.keys():
PDGA=colsys[COLTYPE][0]
PDGB=colsys[COLTYPE][1]
else:
print('o2dpg_sim_workflow: Error! Unknown collision system %s' % COLTYPE)
exit(1)
doembedding=True if args.embedding=='True' or args.embedding==True else False
# If not set previously, set beam energy B equal to A
if EBEAMB < 0 and ECMS < 0:
EBEAMB=EBEAMA
print('o2dpg_sim_workflow: Set beam energy same in A and B beams')
if PDGA != PDGB:
print('o2dpg_sim_workflow: Careful! Set same energies for different particle beams!')
if ECMS > 0:
if PDGA != PDGB:
print('o2dpg_sim_workflow: Careful! ECM set for for different particle beams!')
if ECMS < 0 and EBEAMA < 0 and EBEAMB < 0:
print('o2dpg_sim_workflow: Error! CM or Beam Energy not set!!!')
exit(1)
# Determine interaction rate
INTRATE=int(args.interactionRate)
if INTRATE <= 0:
print('o2dpg_sim_workflow: Error! Interaction rate not >0 !!!')
exit(1)
BCPATTERN=args.bcPatternFile
# ----- global background specific stuff -------
COLTYPEBKG=args.colBkg
havePbPb = (COLTYPE == 'PbPb' or (doembedding and COLTYPEBKG == "PbPb"))
workflow={}
workflow['stages'] = []
### setup global environment variables which are valid for all tasks
global_env = {'ALICEO2_CCDB_CONDITION_NOT_AFTER': args.condition_not_after} if args.condition_not_after else None
globalinittask = createGlobalInitTask(global_env)
globalinittask['cmd'] = 'o2-ccdb-cleansemaphores -p ${ALICEO2_CCDB_LOCALCACHE}'
workflow['stages'].append(globalinittask)
####
usebkgcache=args.use_bkg_from!=None
includeFullQC=args.include_qc=='True' or args.include_qc==True
includeLocalQC=args.include_local_qc=='True' or args.include_local_qc==True
includeAnalysis = args.include_analysis
includeTPCResiduals=True if environ.get('ALIEN_JDL_DOTPCRESIDUALEXTRACTION') == '1' else False
includeTPCSyncMode=True if environ.get('ALIEN_JDL_DOTPCSYNCMODE') == '1' else False
ccdbRemap = environ.get('ALIEN_JDL_REMAPPINGS')
qcdir = "QC"
if (includeLocalQC or includeFullQC) and not isdir(qcdir):
mkdir(qcdir)
def getDPL_global_options(bigshm=False, ccdbbackend=True, runcommand=True):
common=" "
if runcommand:
common=common + ' -b --run '
if len(args.dpl_child_driver) > 0:
common=common + ' --child-driver ' + str(args.dpl_child_driver)
if ccdbbackend:
common=common + " --condition-not-after " + str(args.condition_not_after)
if ccdbRemap != None:
common=common + f" --condition-remap {quote_if_needed(ccdbRemap)} "
if args.noIPC!=None:
return common + " --no-IPC "
if bigshm:
return common + " --shm-segment-size ${SHMSIZE:-50000000000} "
else:
return common
# prefetch the aligned geometry object (for use in reconstruction)
GEOM_PREFETCH_TASK = createTask(name='geomprefetch', cpu='0')
# We need to query the config if this is done with or without parallel world. This needs to be improved
# as it could be defaulted in the ConfigKey system
with_parallel_world = 1 if args.confKey.find("useParallelWorld=1") != -1 else 0
geom_cmd = f'''
# -- Create aligned geometry using ITS ideal alignment to avoid overlaps in geant
ENABLEPW={with_parallel_world}
# when parallel world processing is disabled we need to switch off ITS alignment
if [ "${{ENABLEPW}}" == "0" ]; then
CCDBOBJECTS_IDEAL_MC="ITS/Calib/Align"
TIMESTAMP_IDEAL_MC=1
${{O2_ROOT}}/bin/o2-ccdb-downloadccdbfile --host http://alice-ccdb.cern.ch/ -p ${{CCDBOBJECTS_IDEAL_MC}} \
-d ${{ALICEO2_CCDB_LOCALCACHE}} --timestamp ${{TIMESTAMP_IDEAL_MC}}
CCDB_RC="$?"
if [ ! "${{CCDB_RC}}" == "0" ]; then
echo "Problem during CCDB prefetching of ${{CCDBOBJECTS_IDEAL_MC}}. Exiting."
exit ${{CCDB_RC}}
fi
fi
if [ "$ENABLEPW" == "0" ]; then
REMAP_OPT="--condition-remap=file://${{ALICEO2_CCDB_LOCALCACHE}}=ITS/Calib/Align"
else
REMAP_OPT=""
fi
# fetch the global alignment geometry
${{O2_ROOT}}/bin/o2-create-aligned-geometry-workflow ${{ALIEN_JDL_CCDB_CONDITION_NOT_AFTER:+--condition-not-after $ALIEN_JDL_CCDB_CONDITION_NOT_AFTER}} \
--configKeyValues "HBFUtils.startTime={args.timestamp}" -b --run ${{REMAP_OPT}}
# copy the object into the CCDB cache
mkdir -p $ALICEO2_CCDB_LOCALCACHE/GLO/Config/GeometryAligned
ln -s -f $PWD/o2sim_geometry-aligned.root $ALICEO2_CCDB_LOCALCACHE/GLO/Config/GeometryAligned/snapshot.root
if [ "$ENABLEPW" == "0" ]; then
[[ -f $PWD/its_GeometryTGeo.root ]] && mkdir -p $ALICEO2_CCDB_LOCALCACHE/ITS/Config/Geometry && ln -s -f $PWD/its_GeometryTGeo.root $ALICEO2_CCDB_LOCALCACHE/ITS/Config/Geometry/snapshot.root
fi
# MFT
[[ -f $PWD/mft_GeometryTGeo.root ]] && mkdir -p $ALICEO2_CCDB_LOCALCACHE/MFT/Config/Geometry && ln -s -f $PWD/mft_GeometryTGeo.root $ALICEO2_CCDB_LOCALCACHE/MFT/Config/Geometry/snapshot.root
'''
with open("geomprefetcher_script.sh",'w') as f:
f.write(geom_cmd)
GEOM_PREFETCH_TASK['cmd'] = 'chmod +x ${PWD}/geomprefetcher_script.sh; ${PWD}/geomprefetcher_script.sh'
workflow['stages'].append(GEOM_PREFETCH_TASK)
# create/publish the GRPs and other GLO objects for consistent use further down the pipeline
orbitsPerTF=int(args.orbitsPerTF)
GRP_TASK = createTask(name='grpcreate', needs=["geomprefetch"], cpu='0')
GRP_TASK['cmd'] = 'o2-grp-simgrp-tool createGRPs --timestamp ' + str(args.timestamp) + ' --run ' + str(args.run) + ' --publishto ${ALICEO2_CCDB_LOCALCACHE:-.ccdb} -o grp --hbfpertf ' + str(orbitsPerTF) + ' --field ' + args.field
GRP_TASK['cmd'] += ' --detectorList ' + args.detectorList + ' --readoutDets ' + " ".join(activeDetectors) + ' --print ' + ('','--lhcif-CCDB')[args.run_anchored]
if len(args.bcPatternFile) > 0:
GRP_TASK['cmd'] += ' --bcPatternFile ' + str(args.bcPatternFile)
if len(CONFKEYMV) > 0:
# this is allowing the possibility to setup/use a different MeanVertex object than the one from CCDB
GRP_TASK['cmd'] += ' --vertex Diamond --configKeyValues "' + CONFKEYMV + '"'
workflow['stages'].append(GRP_TASK)
# QED is enabled only for same beam species for now
QED_enabled = True if (PDGA==PDGB and PDGA!=2212) else False
includeQED = (QED_enabled or (doembedding and QED_enabled)) or (args.with_qed == True)
signalprefix='sgn'
# No vertexing for event pool generation; otherwise the vertex comes from CCDB and later from CollContext
# (Note that the CCDB case covers the kDiamond case, since this is picked up in GRP_TASK)
vtxmode_precoll = 'kNoVertex' if args.make_evtpool else 'kCCDB'
vtxmode_sgngen = 'kCollContext'
# preproduce the collision context / timeframe structure for all timeframes at once
precollneeds=[GRP_TASK['name']]
# max number of QED events simulated per timeframe.
# A large pool of QED events (0.6*INTRATE) is needed to avoid repetition of events in the same or
# neighbouring ITS readout frames, which would fire already activated pixel, discarding the event.
# Discussed in detail in https://its.cern.ch/jira/browse/O2-5861
NEventsQED = max(10000, int(INTRATE*0.6))
# Hadronic cross section values are taken from Glauber MC
XSecSys = {'PbPb': 8., 'OO': 1.273, 'NeNe': 1.736}
# QED cross section values were calculated with TEPEMGEN
# OO and NeNe at 5.36 TeV, while the old PbPb value was kept as before
# If the collision energy changes these values need to be updated
# More info on the calculation can be found in the TEPEMGEN folder of AEGIS
# specifically in the epemgen.f file
QEDXSecExpected = {'PbPb': 35237.5, 'OO': 3.17289, 'NeNe': 7.74633} # expected magnitude of QED cross section from TEPEMGEN
Zsys = {'PbPb': 82, 'OO': 8, 'NeNe': 10} # atomic number of colliding species
PreCollContextTask=createTask(name='precollcontext', needs=precollneeds, cpu='1')
# adapt timeframeID + orbits + seed + qed
# apply max-collisision offset
# apply vertexing
interactionspecification = signalprefix + ',' + str(INTRATE) + ',' + str(1000000) + ':' + str(1000000)
if doembedding:
interactionspecification = 'bkg,' + str(INTRATE) + ',' + str(NTIMEFRAMES*args.ns) + ':' + str(args.nb) + ' ' + signalprefix + ',' + args.embeddPattern
PreCollContextTask['cmd']='${O2_ROOT}/bin/o2-steer-colcontexttool -i ' + interactionspecification \
+ ' --show-context ' \
+ ' --timeframeID ' + str(int(args.production_offset)*NTIMEFRAMES) \
+ ' --orbitsPerTF ' + str(orbitsPerTF) \
+ ' --orbits ' + str(NTIMEFRAMES * (orbitsPerTF)) \
+ ' --seed ' + str(RNDSEED) \
+ ' --noEmptyTF --first-orbit ' + str(args.first_orbit) \
+ ' --extract-per-timeframe tf:sgn' \
+ ' --with-vertices ' + vtxmode_precoll \
+ ' --maxCollsPerTF ' + str(args.ns) \
+ ' --orbitsEarly ' + str(args.orbits_early) \
+ ('',f" --import-external {args.data_anchoring}")[len(args.data_anchoring) > 0]
PreCollContextTask['cmd'] += ' --bcPatternFile ccdb' # <--- the object should have been set in (local) CCDB
if includeQED:
if PDGA==2212 or PDGB==2212:
# QED is not enabled for pp and pA collisions
print('o2dpg_sim_workflow: Warning! QED is not enabled for pp or pA collisions')
includeQED = False
else:
qedrate = INTRATE * QEDXSecExpected[COLTYPE] / XSecSys[COLTYPE] # hadronic interaction rate * cross_section_ratio
qedspec = 'qed' + ',' + str(qedrate) + ',10000000:' + str(NEventsQED)
PreCollContextTask['cmd'] += ' --QEDinteraction ' + qedspec
workflow['stages'].append(PreCollContextTask)
if doembedding:
if not usebkgcache:
# ---- do background transport task -------
NBKGEVENTS=args.nb
GENBKG=args.genBkg
if GENBKG =='':
print('o2dpg_sim_workflow: Error! embedding background generator name not provided')
exit(1)
# PDG translation for background
if COLTYPEBKG in colsys.keys():
PDGABKG=colsys[COLTYPEBKG][0]
PDGBBKG=colsys[COLTYPEBKG][1]
else:
print('o2dpg_sim_workflow: Error! Unknown background collision system %s' % COLTYPEBKG)
exit(1)
PROCESSBKG=args.procBkg
ECMSBKG=float(args.eCM)
EBEAMABKG=float(args.eA)
EBEAMBBKG=float(args.eB)
if COLTYPEBKG == 'PbPb':
if ECMSBKG < 0: # assign 5.02 TeV to Pb-Pb
print('o2dpg_sim_workflow: Set BKG CM Energy to PbPb case 5.02 TeV')
ECMSBKG=5020.0
if GENBKG == 'pythia8' and PROCESSBKG != 'heavy_ion':
PROCESSBKG = 'heavy_ion'
print('o2dpg_sim_workflow: Process type not considered for Pythia8 PbPb')
# If not set previously, set beam energy B equal to A
if EBEAMBBKG < 0 and ECMSBKG < 0:
EBEAMBBKG=EBEAMABKG
print('o2dpg_sim_workflow: Set beam energy same in A and B beams')
if PDGABKG != PDGBBKG:
print('o2dpg_sim_workflow: Careful! Set same energies for different background beams!')
if ECMSBKG > 0:
if PDGABKG != PDGBBKG:
print('o2dpg_sim_workflow: Careful! ECM set for different background beams!')
if ECMSBKG < 0 and EBEAMABKG < 0 and EBEAMBBKG < 0:
print('o2dpg_sim_workflow: Error! bkg ECM or Beam Energy not set!!!')
exit(1)
# Background PYTHIA configuration
BKG_CONFIG_task=createTask(name='genbkgconf')
BKG_CONFIG_task['cmd'] = 'echo "placeholder / dummy task"'
if GENBKG == 'pythia8':
print('Background generator seed: ', SIMSEED)
BKG_CONFIG_task['cmd'] = '${O2DPG_ROOT}/MC/config/common/pythia8/utils/mkpy8cfg.py \
--output=pythia8bkg.cfg \
--seed='+str(SIMSEED)+' \
--idA='+str(PDGABKG)+' \
--idB='+str(PDGBBKG)+' \
--eCM='+str(ECMSBKG)+' \
--eA='+str(EBEAMABKG)+' \
--eB='+str(EBEAMBBKG)+' \
--process='+str(PROCESSBKG)
# if we configure pythia8 here --> we also need to adjust the configuration
# TODO: we need a proper config container/manager so as to combine these local configs with external configs etc.
args.confKeyBkg = 'GeneratorPythia8.config=pythia8bkg.cfg;' + args.confKeyBkg
workflow['stages'].append(BKG_CONFIG_task)
# background task configuration
INIBKG=''
if args.iniBkg!= '':
INIBKG=' --configFile ' + args.iniBkg
# determine final configKey values for background transport
CONFKEYBKG = constructConfigKeyArg(create_geant_config(args, args.confKeyBkg))
bkgsimneeds = [BKG_CONFIG_task['name'], GRP_TASK['name'], PreCollContextTask['name']]
BKGtask=createTask(name='bkgsim', lab=["GEANT"], needs=bkgsimneeds, cpu=NWORKERS)
BKGtask['cmd']='${O2_ROOT}/bin/o2-sim -e ' + SIMENGINE + ' -j ' + str(NWORKERS) + ' -n ' + str(NBKGEVENTS) \
+ ' -g ' + str(GENBKG) + ' ' + str(SKIPMODULES) + ' -o bkg ' + str(INIBKG) \
+ ' --field ccdb ' + str(CONFKEYBKG) \
+ ('',' --timestamp ' + str(args.timestamp))[args.timestamp!=-1] + ' --run ' + str(args.run) \
+ ' --vertexMode ' + vtxmode_sgngen \
+ ' --fromCollContext collisioncontext.root:bkg ' \
+ ' --detectorList ' + args.detectorList
if not isActive('all'):
BKGtask['cmd'] += ' --readoutDetectors ' + " ".join(activeDetectors)
workflow['stages'].append(BKGtask)
# check if we should upload background event
if args.upload_bkg_to!=None:
BKGuploadtask=createTask(name='bkgupload', needs=[BKGtask['name']], cpu='0')
BKGuploadtask['cmd']='alien.py mkdir ' + args.upload_bkg_to + ';'
BKGuploadtask['cmd']+='alien.py cp -f bkg* ' + args.upload_bkg_to + ';'
workflow['stages'].append(BKGuploadtask)
else:
# here we are reusing existing background events from ALIEN
# when using background caches, we have multiple smaller tasks
# this split makes sense as they are needed at different stages
# 1: --> download bkg_MCHeader.root + grp + geometry
# 2: --> download bkg_Hit files (individually)
# 3: --> download bkg_Kinematics
# (A problem with individual copying might be higher error probability but
# we can introduce a "retry" feature in the copy process)
# Step 1: header and link files
BKG_HEADER_task=createTask(name='bkgdownloadheader', cpu='0', lab=['BKGCACHE'])
BKG_HEADER_task['cmd']='alien.py cp ' + args.use_bkg_from + 'bkg_MCHeader.root .'
BKG_HEADER_task['cmd']=BKG_HEADER_task['cmd'] + ';alien.py cp ' + args.use_bkg_from + 'bkg_geometry.root .'
BKG_HEADER_task['cmd']=BKG_HEADER_task['cmd'] + ';alien.py cp ' + args.use_bkg_from + 'bkg_grp.root .'
workflow['stages'].append(BKG_HEADER_task)
# a list of smaller sensors (used to construct digitization tasks in a parametrized way)
smallsensorlist = [ "ITS", "TOF", "FDD", "MCH", "MID", "MFT", "HMP", "PHS", "CPV", "ZDC" ]
if args.detectorList == 'ALICE2.1':
smallsensorlist = ['IT3' if sensor == 'ITS' else sensor for sensor in smallsensorlist]
# a list of detectors that serve as input for the trigger processor CTP --> these need to be processed together for now
ctp_trigger_inputlist = [ "FT0", "FV0", "EMC" ]
BKG_HITDOWNLOADER_TASKS={}
for det in [ 'TPC', 'TRD' ] + smallsensorlist + ctp_trigger_inputlist:
if usebkgcache:
BKG_HITDOWNLOADER_TASKS[det] = createTask(str(det) + 'hitdownload', cpu='0', lab=['BKGCACHE'])
BKG_HITDOWNLOADER_TASKS[det]['cmd'] = 'alien.py cp ' + args.use_bkg_from + 'bkg_Hits' + str(det) + '.root .'
workflow['stages'].append(BKG_HITDOWNLOADER_TASKS[det])
else:
BKG_HITDOWNLOADER_TASKS[det] = None
if usebkgcache:
BKG_KINEDOWNLOADER_TASK = createTask(name='bkgkinedownload', cpu='0', lab=['BKGCACHE'])
BKG_KINEDOWNLOADER_TASK['cmd'] = 'alien.py cp ' + args.use_bkg_from + 'bkg_Kine.root .'
workflow['stages'].append(BKG_KINEDOWNLOADER_TASK)
# We download some binary files, necessary for processing
# Eventually, these files/objects should be queried directly from within these tasks?
# Fix (residual) geometry alignment for simulation stage
# Detectors that prefer to apply special alignments (for example residual effects) should be listed here and download these files.
# These object will take precedence over ordinary align objects **and** will only be applied in transport simulation
# and digitization (Det/Calib/Align is only read in simulation since reconstruction tasks use GLO/Config/AlignedGeometry automatically).
SIM_ALIGNMENT_PREFETCH_TASK = createTask(name='sim_alignment', cpu='0')
SIM_ALIGNMENT_PREFETCH_TASK['cmd'] = '${O2_ROOT}/bin/o2-ccdb-downloadccdbfile --host http://alice-ccdb.cern.ch -p MID/MisCalib/Align --timestamp ' + str(args.timestamp) + ' --created-not-after ' \
+ str(args.condition_not_after) + ' -d ${ALICEO2_CCDB_LOCALCACHE}/MID/Calib/Align --no-preserve-path ; '
SIM_ALIGNMENT_PREFETCH_TASK['cmd'] += '${O2_ROOT}/bin/o2-ccdb-downloadccdbfile --host http://alice-ccdb.cern.ch -p MCH/MisCalib/Align --timestamp ' + str(args.timestamp) + ' --created-not-after ' \
+ str(args.condition_not_after) + ' -d ${ALICEO2_CCDB_LOCALCACHE}/MCH/Calib/Align --no-preserve-path '
workflow['stages'].append(SIM_ALIGNMENT_PREFETCH_TASK)
# query initial configKey args for signal transport; mainly used to setup generators
simInitialConfigKeys = create_geant_config(args, args.confKey)
# loop over timeframes
for tf in range(1, NTIMEFRAMES + 1):
TFSEED = SIMSEED + tf
print("Timeframe " + str(tf) + " seed: ", TFSEED)
timeframeworkdir='tf'+str(tf)
# ---- transport task -------
# produce QED background for PbPb collissions
QEDdigiargs = ""
if includeQED:
qedneeds=[GRP_TASK['name'], PreCollContextTask['name']]
QED_task=createTask(name='qedsim_'+str(tf), needs=qedneeds, tf=tf, cwd=timeframeworkdir, cpu='1')
########################################################################################################
#
# ATTENTION: CHANGING THE PARAMETERS/CUTS HERE MIGHT INVALIDATE THE QED INTERACTION RATES USED ELSEWHERE
#
########################################################################################################
# determine final conf key for QED simulation
QEDBaseConfig = "GeneratorExternal.fileName=$O2_ROOT/share/Generators/external/QEDLoader.C;QEDGenParam.yMin=-7;QEDGenParam.yMax=7;QEDGenParam.ptMin=0.001;QEDGenParam.ptMax=1.;QEDGenParam.xSectionHad="+str(XSecSys[COLTYPE])+";QEDGenParam.Z="+str(Zsys[COLTYPE])+";QEDGenParam.cmEnergy="+str(ECMS)+";Diamond.width[2]=6.;"
QEDCONFKEY = constructConfigKeyArg(create_geant_config(args, QEDBaseConfig + args.confKeyQED))
qed_detectorlist = ' ITS MFT FT0 FV0 FDD '
if args.detectorList == 'ALICE2.1':
qed_detectorlist = qed_detectorlist.replace('ITS', 'IT3')
QED_task['cmd'] = 'o2-sim -e TGeant3 --field ccdb -j ' + str('1') + ' -o qed' \
+ ' -n ' + str(NEventsQED) + ' -m ' + qed_detectorlist \
+ ('', ' --timestamp ' + str(args.timestamp))[args.timestamp!=-1] + ' --run ' + str(args.run) \
+ ' --seed ' + str(TFSEED) \
+ ' -g extgen ' \
+ ' --detectorList ' + args.detectorList + ' ' \
+ QEDCONFKEY
QED_task['cmd'] += '; RC=$?; QEDXSecCheck=`grep xSectionQED qedgenparam.ini | sed \'s/xSectionQED=//\'`'
QED_task['cmd'] += '; echo "CheckXSection ' + str(QEDXSecExpected[COLTYPE]) + ' = $QEDXSecCheck"; [[ ${RC} == 0 ]]'
# TODO: propagate the Xsecion ratio dynamically
QEDdigiargs=' --simPrefixQED qed' + ' --qed-x-section-ratio ' + str(QEDXSecExpected[COLTYPE]/XSecSys[COLTYPE])
workflow['stages'].append(QED_task)
# recompute the number of workers to increase CPU efficiency
NWORKERS_TF = compute_n_workers(INTRATE, COLTYPE, n_workers_user = NWORKERS) if (not args.force_n_workers) else NWORKERS
# produce the signal configuration
SGN_CONFIG_task=createTask(name='gensgnconf_'+str(tf), tf=tf, cwd=timeframeworkdir)
SGN_CONFIG_task['cmd'] = 'echo "placeholder / dummy task"'
if GENERATOR == 'pythia8':
# see if config is given externally
externalPythia8Config = simInitialConfigKeys.get("GeneratorPythia8", {}).get("config", None)
if externalPythia8Config != None:
# check if this refers to a file with ABSOLUTE path
if not isabs(externalPythia8Config):
print ('Error: Argument to GeneratorPythia8.config must be absolute path')
exit (1)
# in this case, we copy the external config to the local dir (maybe not even necessary)
SGN_CONFIG_task['cmd'] = 'cp ' + externalPythia8Config + ' pythia8.cfg'
else:
SGN_CONFIG_task['cmd'] = '${O2DPG_ROOT}/MC/config/common/pythia8/utils/mkpy8cfg.py \
--output=pythia8.cfg \
--seed='+str(TFSEED)+' \
--idA='+str(PDGA)+' \
--idB='+str(PDGB)+' \
--eCM='+str(ECMS)+' \
--eA='+str(EBEAMA)+' \
--eB='+str(EBEAMB)+' \
--process='+str(PROCESS)+' \
--ptHatMin='+str(PTHATMIN)+' \
--ptHatMax='+str(PTHATMAX)
if WEIGHTPOW > 0:
SGN_CONFIG_task['cmd'] = SGN_CONFIG_task['cmd'] + ' --weightPow=' + str(WEIGHTPOW)
# if we configure pythia8 here --> we also need to adjust the configuration
# TODO: we need a proper config container/manager so as to combine these local configs with external configs etc.
args.confKey = args.confKey + ";GeneratorPythia8.config=pythia8.cfg"
# elif GENERATOR == 'extgen': what do we do if generator is not pythia8?
# NOTE: Generator setup might be handled in a different file or different files (one per
# possible generator)
workflow['stages'].append(SGN_CONFIG_task)
# default flags for extkinO2 signal simulation (no transport)
extkinO2Config = ''
if GENERATOR == 'extkinO2':
extkinO2Config = ';GeneratorFromO2Kine.randomize=true;GeneratorFromO2Kine.rngseed=' + str(TFSEED)
# determine final conf key for signal simulation
CONFKEY = constructConfigKeyArg(create_geant_config(args, args.confKey + extkinO2Config))
# -----------------
# transport signals
# -----------------
signalneeds=[ SGN_CONFIG_task['name'], GRP_TASK['name'] ]
signalneeds.append(PreCollContextTask['name'])
# add embedIntoFile only if embeddPattern does contain a '@'
embeddinto= "--embedIntoFile ../bkg_MCHeader.root" if (doembedding & ("@" in args.embeddPattern)) else ""
if doembedding:
if not usebkgcache:
signalneeds = signalneeds + [ BKGtask['name'] ]
else:
signalneeds = signalneeds + [ BKG_HEADER_task['name'] ]
# (separate) event generation task
sep_event_mode = args.event_gen_mode == 'separated'
sgngenneeds=signalneeds
# for HepMC we need some special treatment since we need
# to ensure that different timeframes read different events from this file
if GENERATOR=="hepmc" and tf > 1:
sgngenneeds=signalneeds + ['sgngen_' + str(tf-1)] # we serialize event generation
SGNGENtask=createTask(name='sgngen_'+str(tf), needs=sgngenneeds, tf=tf, cwd='tf'+str(tf), lab=["GEN"],
cpu=8 if args.make_evtpool else 1, mem=1000)
SGNGENtask['cmd']=''
if GENERATOR=="hepmc":
if tf == 1:
# determine the offset number
eventOffset = environ.get('HEPMCOFFSET')
print("HEPMCOFFSET: ", eventOffset)
if eventOffset == None:
eventOffset = 0
cmd = 'export HEPMCEVENTSKIP=$(${O2DPG_ROOT}/UTILS/InitHepMCEventSkip.sh ../HepMCEventSkip.json ' + str(eventOffset) + ');'
elif tf > 1:
# determine the skip number
cmd = 'export HEPMCEVENTSKIP=$(${O2DPG_ROOT}/UTILS/ReadHepMCEventSkip.sh ../HepMCEventSkip.json ' + str(tf) + ');'
SGNGENtask['cmd'] = cmd
generationtimeout = -1 # possible timeout for event pool generation
if args.make_evtpool:
JOBTTL=environ.get('JOBTTL', None)
if JOBTTL != None:
generationtimeout = 0.95*int(JOBTTL) # for GRID jobs, determine timeout automatically
SGNGENtask['cmd'] +=('','timeout ' + str(generationtimeout) + ' ')[args.make_evtpool and generationtimeout>0] \
+ '${O2_ROOT}/bin/o2-sim --noGeant -j 1 --field ccdb --vertexMode ' + vtxmode_sgngen \
+ ' --run ' + str(args.run) + ' ' + str(CONFKEY) + str(TRIGGER) \
+ ' -g ' + str(GENERATOR) + ' ' + str(INIFILE) + ' -o genevents ' + embeddinto \
+ ('', ' --timestamp ' + str(args.timestamp))[args.timestamp!=-1] \
+ ' --seed ' + str(TFSEED) + ' -n ' + str(NSIGEVENTS) \
+ ' --detectorList ' + args.detectorList \
+ ' --fromCollContext collisioncontext.root:' + signalprefix
if GENERATOR=="hepmc":
SGNGENtask['cmd'] += "; RC=$?; ${O2DPG_ROOT}/UTILS/UpdateHepMCEventSkip.sh ../HepMCEventSkip.json " + str(tf) + '; [[ ${RC} == 0 ]]'
if sep_event_mode == True:
workflow['stages'].append(SGNGENtask)
signalneeds = signalneeds + [SGNGENtask['name']]
if args.make_evtpool:
if generationtimeout > 0:
# final adjustment of command for event pools and timeout --> we need to analyse the return code
# if we have a timeout then we finish what we can and are also happy with return code 124
SGNGENtask['cmd'] += ' ; RC=$? ; [[ ${RC} == 0 || ${RC} == 124 ]]'
continue
# GeneratorFromO2Kine parameters are needed only before the transport
CONFKEY = re.sub(r'GeneratorFromO2Kine.*?;', '', CONFKEY)
sgnmem = 6000 if COLTYPE == 'PbPb' else 4000
SGNtask=createTask(name='sgnsim_'+str(tf), needs=signalneeds, tf=tf, cwd='tf'+str(tf), lab=["GEANT"],
relative_cpu=7/8, n_workers=NWORKERS_TF, mem=str(sgnmem))
sgncmdbase = '${O2_ROOT}/bin/o2-sim -e ' + str(SIMENGINE) + ' ' + str(SKIPMODULES) + ' -n ' + str(NSIGEVENTS) + ' --seed ' + str(TFSEED) \
+ ' --field ccdb -j ' + str(NWORKERS_TF) + ' ' + str(CONFKEY) + ' ' + str(INIFILE) + ' -o ' + signalprefix + ' ' + embeddinto \
+ ' --detectorList ' + args.detectorList \
+ ('', ' --timestamp ' + str(args.timestamp))[args.timestamp!=-1] + ' --run ' + str(args.run)
if sep_event_mode:
SGNtask['cmd'] = sgncmdbase + ' -g extkinO2 --extKinFile genevents_Kine.root ' + ' --vertexMode kNoVertex'
else:
SGNtask['cmd'] = sgncmdbase + ' -g ' + str(GENERATOR) + ' ' + str(TRIGGER) + ' --vertexMode kCCDB '
if not isActive('all'):
SGNtask['cmd'] += ' --readoutDetectors ' + " ".join(activeDetectors)
SGNtask['cmd'] += ' --fromCollContext collisioncontext.root'
workflow['stages'].append(SGNtask)
# some tasks further below still want geometry + grp in fixed names, so we provide it here
# Alternatively, since we have timeframe isolation, we could just work with standard o2sim_ files
# We need to be careful here and distinguish between embedding and non-embedding cases
# (otherwise it can confuse itstpcmatching, see O2-2026). This is because only one of the GRPs is updated during digitization.
if doembedding:
LinkGRPFileTask=createTask(name='linkGRP_'+str(tf), needs=[BKG_HEADER_task['name'] if usebkgcache else BKGtask['name'] ], tf=tf, cwd=timeframeworkdir, cpu='0',mem='0')
LinkGRPFileTask['cmd']='''
ln -nsf ../bkg_grp.root o2sim_grp.root;
ln -nsf ../bkg_grpecs.root o2sim_grpecs.root;
ln -nsf ../bkg_geometry.root o2sim_geometry.root;
ln -nsf ../bkg_geometry.root bkg_geometry.root;
ln -nsf ../bkg_geometry-aligned.root bkg_geometry-aligned.root;
ln -nsf ../bkg_geometry-aligned.root o2sim_geometry-aligned.root;
ln -nsf ../bkg_MCHeader.root bkg_MCHeader.root;
ln -nsf ../bkg_grp.root bkg_grp.root;
ln -nsf ../bkg_grpecs.root bkg_grpecs.root
'''
else:
LinkGRPFileTask=createTask(name='linkGRP_'+str(tf), needs=[SGNtask['name']], tf=tf, cwd=timeframeworkdir, cpu='0', mem='0')
LinkGRPFileTask['cmd']='ln -nsf ' + signalprefix + '_grp.root o2sim_grp.root ; ln -nsf ' + signalprefix + '_geometry.root o2sim_geometry.root; ln -nsf ' + signalprefix + '_geometry-aligned.root o2sim_geometry-aligned.root'
workflow['stages'].append(LinkGRPFileTask)