Warning, /detector_benchmarks/Snakefile is written in an unsupported language. File is not indexed.
0001 configfile: "snakemake.yml"
0002
0003 import functools
0004 import os
0005 from snakemake.logging import logger
0006
0007
0008 rule compile_analysis:
0009 input:
0010 "{path}/{filename}.cxx",
0011 output:
0012 "{path}/{filename}_cxx.d",
0013 "{path}/{filename}_cxx.so",
0014 "{path}/{filename}_cxx_ACLiC_dict_rdict.pcm",
0015 shell:
0016 """
0017 root -l -b -q -e '.L {input}+'
0018 """
0019
0020
0021 @functools.cache
0022 def get_spack_package_hash(package_name):
0023 import json
0024 try:
0025 ver_info = json.loads(subprocess.check_output(["spack", "find", "--json", package_name]))
0026 return ver_info[0]["hash"]
0027 except FileNotFoundError as e:
0028 logger.warning("Spack is not installed")
0029 return ""
0030 except subprocess.CalledProcessError as e:
0031 print(e)
0032 return ""
0033
0034
0035 @functools.cache
0036 def find_epic_libraries():
0037 import ctypes.util
0038 # if library is not found (not avaliable) we return an empty list to let DAG still evaluate
0039 libs = []
0040 lib = ctypes.util.find_library("epic")
0041 if lib is not None:
0042 libs.append(os.environ["DETECTOR_PATH"] + "/../../lib/" + lib)
0043 return libs
0044
0045
0046 include: "benchmarks/backgrounds/Snakefile"
0047 include: "benchmarks/backwards_ecal/Snakefile"
0048 include: "benchmarks/barrel_ecal/Snakefile"
0049 include: "benchmarks/beamline/Snakefile"
0050 include: "benchmarks/calo_pid/Snakefile"
0051 include: "benchmarks/campaign/Snakefile"
0052 include: "benchmarks/ecal_gaps/Snakefile"
0053 include: "benchmarks/far_forward_dvcs/Snakefile"
0054 include: "benchmarks/lowq2_reconstruction/Snakefile"
0055 include: "benchmarks/material_scan/Snakefile"
0056 include: "benchmarks/secondary_vertexing_dis/Snakefile"
0057 include: "benchmarks/tracking_performances/Snakefile"
0058 include: "benchmarks/tracking_performances_dis/Snakefile"
0059 include: "benchmarks/lfhcal/Snakefile"
0060 include: "benchmarks/zdc_lyso/Snakefile"
0061 include: "benchmarks/zdc_neutron/Snakefile"
0062 include: "benchmarks/insert_muon/Snakefile"
0063 include: "benchmarks/zdc_lambda/Snakefile"
0064 include: "benchmarks/zdc_photon/Snakefile"
0065 include: "benchmarks/zdc_pi0/Snakefile"
0066 include: "benchmarks/zdc_sigma/Snakefile"
0067 include: "benchmarks/insert_neutron/Snakefile"
0068 include: "benchmarks/insert_tau/Snakefile"
0069 include: "benchmarks/femc_electron/Snakefile"
0070 include: "benchmarks/femc_photon/Snakefile"
0071 include: "benchmarks/femc_pi0/Snakefile"
0072 include: "benchmarks/nhcal_acceptance/Snakefile"
0073 include: "benchmarks/nhcal_basic_distribution/Snakefile"
0074 include: "benchmarks/nhcal_sampling_fraction/Snakefile"
0075 include: "benchmarks/nhcal_dimuon_photoproduction/Snakefile"
0076 include: "benchmarks/nhcal_pion_rejection/Snakefile"
0077
0078 use_s3 = config["remote_provider"].lower() == "s3"
0079 use_xrootd = config["remote_provider"].lower() == "xrootd"
0080
0081
0082 def get_remote_path(path):
0083 if use_s3:
0084 return f"s3https://eics3.sdcc.bnl.gov:9000/eictest/{path}"
0085 elif use_xrootd:
0086 return f"root://dtn-eic.jlab.org//volatile/eic/{path}"
0087 else:
0088 raise runtime_exception('Unexpected value for config["remote_provider"]: {config["remote_provider"]}')
0089
0090
0091 localrules: fetch_epic
0092
0093 rule fetch_epic:
0094 output:
0095 filepath="EPIC/{PATH}"
0096 params:
0097 # wildcards are not included in hash for caching, we need to add them as params
0098 PATH=lambda wildcards: wildcards.PATH
0099 cache: True
0100 retries: 3
0101 shell: """
0102 xrdcp --debug 2 root://dtn-eic.jlab.org//volatile/eic/EPIC/{wildcards.PATH} {output.filepath}
0103 """ if use_xrootd else """
0104 mc cp S3/eictest/EPIC/{wildcards.PATH} {output.filepath}
0105 """ if use_s3 else f"""
0106 echo 'Unexpected value for config["remote_provider"]: {config["remote_provider"]}'
0107 exit 1
0108 """
0109
0110
0111 rule warmup_run:
0112 output:
0113 "warmup.edm4hep.root",
0114 message: "Ensuring that calibrations/fieldmaps are available"
0115 shell: """
0116 set -m # monitor mode to prevent lingering processes
0117 exec ddsim \
0118 --runType batch \
0119 --numberOfEvents 1 \
0120 --compactFile "$DETECTOR_PATH/epic_ip6.xml" \
0121 --outputFile "{output}" \
0122 --enableGun
0123 """
0124
0125
0126 rule matplotlibrc:
0127 output:
0128 ".matplotlibrc",
0129 run:
0130 with open(output[0], "wt") as fp:
0131 fp.write("backend: Agg\n")
0132 # interactive mode prevents plt.show() from blocking
0133 fp.write("interactive : True\n")
0134
0135
0136 rule org2py:
0137 input:
0138 notebook=workflow.basedir + "/{NOTEBOOK}.org",
0139 converter=workflow.source_path("benchmarks/common/org2py.awk"),
0140 output:
0141 "{NOTEBOOK}.py"
0142 shell:
0143 """
0144 awk -f {input.converter} {input.notebook} > {output}
0145 """
0146
0147
0148 rule metadata:
0149 output:
0150 "results/metadata.json"
0151 shell:
0152 """
0153 cat > {output} <<EOF
0154 {{
0155 "CI_COMMIT_REF_NAME": "${{CI_COMMIT_REF_NAME:-}}",
0156 "CI_COMMIT_SHA": "${{CI_COMMIT_SHA:-}}",
0157 "CI_PIPELINE_ID": "${{CI_PIPELINE_ID:-}}",
0158 "CI_PIPELINE_SOURCE": "${{CI_PIPELINE_SOURCE:-}}",
0159 "CI_PROJECT_ID": "${{CI_PROJECT_ID:-}}",
0160 "GITHUB_REPOSITORY": "${{GITHUB_REPOSITORY:-}}",
0161 "GITHUB_SHA": "${{GITHUB_SHA:-}}",
0162 "GITHUB_PR": "${{GITHUB_PR:-}}",
0163 "PIPELINE_NAME": $(echo "${{PIPELINE_NAME:-}}" | jq -Rs .)
0164 }}
0165 EOF
0166 # validate JSON
0167 jq '.' {output}
0168 """