-
Notifications
You must be signed in to change notification settings - Fork 13
/
condor_SUEP_ZH.py
79 lines (69 loc) · 2.13 KB
/
condor_SUEP_ZH.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
import argparse
import os
# Import coffea specific features
from coffea import processor
from coffea.processor import Runner, futures_executor, run_uproot_job
# SUEP Repo Specific
from workflows import SUEP_coffea_ZH, merger
# Begin argparse
parser = argparse.ArgumentParser("")
parser.add_argument("--isMC", type=int, default=1, help="")
parser.add_argument("--jobNum", type=int, default=1, help="")
parser.add_argument("--era", type=str, default="2018", help="")
parser.add_argument("--doSyst", type=int, default=1, help="")
parser.add_argument("--infile", required=True, type=str, default=None, help="")
parser.add_argument("--dataset", type=str, default="X", help="")
parser.add_argument("--nevt", type=str, default=-1, help="")
parser.add_argument(
"--SR",
action="store_true",
default=False,
help="Save only SR-level results. For quick checks",
)
parser.add_argument(
"--doOF",
action="store_true",
default=False,
help="Do emu final state instead of ee+mumu",
)
parser.add_argument(
"--isDY",
action="store_true",
default=False,
help="Activate to save the gen-level of the Z pT, needed to clean the overlap in DY samples",
)
options = parser.parse_args()
out_dir = os.getcwd()
modules_era = []
modules_era.append(
SUEP_coffea_ZH.SUEP_cluster_ZH(
isMC=options.isMC,
era=str(options.era),
scouting=0,
do_syst=options.doSyst,
syst_var="",
sample=options.dataset,
weight_syst="",
flag=False,
output_location=out_dir,
SRonly=options.SRonly,
doOF=options.doOF,
isDY=options.isDY,
)
)
for instance in modules_era:
runner = processor.Runner(
executor=processor.FuturesExecutor(compression=None, workers=1),
schema=processor.NanoAODSchema,
xrootdtimeout=60,
chunksize=100000000,
)
runner.automatic_retries(
retries=3,
skipbadfiles=False,
func=runner.run,
fileset={options.dataset: [options.infile]},
treename="Events",
processor_instance=instance,
)
merger.merge(options, pattern="condor_*.hdf5", outFile="out.hdf5")