Warning, /detector_benchmarks/Snakefile is written in an unsupported language. File is not indexed.
0001 configfile: "snakemake.yml"
0002
0003 import functools
0004 import os
0005 from snakemake.logging import logger
0006
0007
0008 @functools.cache
0009 def get_spack_package_hash(package_name):
0010 import json
0011 try:
0012 ver_info = json.loads(subprocess.check_output(["spack", "find", "--json", package_name]))
0013 return ver_info[0]["package_hash"]
0014 except FileNotFoundError as e:
0015 logger.warning("Spack is not installed")
0016 return ""
0017 except subprocess.CalledProcessError as e:
0018 print(e)
0019 return ""
0020
0021
0022 @functools.cache
0023 def find_epic_libraries():
0024 import ctypes.util
0025 # if library is not found (not avaliable) we return an empty list to let DAG still evaluate
0026 libs = []
0027 lib = ctypes.util.find_library("epic")
0028 if lib is not None:
0029 libs.append(os.environ["DETECTOR_PATH"] + "/../../lib/" + lib)
0030 return libs
0031
0032
0033 include: "benchmarks/backgrounds/Snakefile"
0034 include: "benchmarks/backwards_ecal/Snakefile"
0035 include: "benchmarks/barrel_ecal/Snakefile"
0036 include: "benchmarks/beamline/Snakefile"
0037 include: "benchmarks/calo_pid/Snakefile"
0038 include: "benchmarks/campaign/Snakefile"
0039 include: "benchmarks/ecal_gaps/Snakefile"
0040 include: "benchmarks/material_scan/Snakefile"
0041 include: "benchmarks/tracking_performances/Snakefile"
0042 include: "benchmarks/tracking_performances_dis/Snakefile"
0043 include: "benchmarks/lfhcal/Snakefile"
0044 include: "benchmarks/zdc_lyso/Snakefile"
0045 include: "benchmarks/zdc_neutron/Snakefile"
0046 include: "benchmarks/insert_muon/Snakefile"
0047 include: "benchmarks/zdc_lambda/Snakefile"
0048 include: "benchmarks/zdc_photon/Snakefile"
0049 include: "benchmarks/zdc_pi0/Snakefile"
0050 include: "benchmarks/zdc_sigma/Snakefile"
0051 include: "benchmarks/insert_neutron/Snakefile"
0052 include: "benchmarks/insert_tau/Snakefile"
0053 include: "benchmarks/femc_electron/Snakefile"
0054 include: "benchmarks/femc_photon/Snakefile"
0055 include: "benchmarks/femc_pi0/Snakefile"
0056 include: "benchmarks/nhcal_acceptance/Snakefile"
0057 include: "benchmarks/nhcal_basic_distribution/Snakefile"
0058
0059 use_s3 = config["remote_provider"].lower() == "s3"
0060 use_xrootd = config["remote_provider"].lower() == "xrootd"
0061
0062
0063 def get_remote_path(path):
0064 if use_s3:
0065 return f"s3https://eics3.sdcc.bnl.gov:9000/eictest/{path}"
0066 elif use_xrootd:
0067 return f"root://dtn-eic.jlab.org//volatile/eic/{path}"
0068 else:
0069 raise runtime_exception('Unexpected value for config["remote_provider"]: {config["remote_provider"]}')
0070
0071
0072 rule fetch_epic:
0073 output:
0074 filepath="EPIC/{PATH}"
0075 params:
0076 # wildcards are not included in hash for caching, we need to add them as params
0077 PATH=lambda wildcards: wildcards.PATH
0078 cache: True
0079 retries: 3
0080 shell: """
0081 xrdcp --debug 2 root://dtn-eic.jlab.org//volatile/eic/{output.filepath} {output.filepath}
0082 """ if use_xrootd else """
0083 mc cp S3/eictest/{output.filepath} {output.filepath}
0084 """ if use_s3 else f"""
0085 echo 'Unexpected value for config["remote_provider"]: {config["remote_provider"]}'
0086 exit 1
0087 """
0088
0089
0090 rule warmup_run:
0091 output:
0092 "warmup/{DETECTOR_CONFIG}.edm4hep.root",
0093 message: "Ensuring that calibrations/fieldmaps are available for {wildcards.DETECTOR_CONFIG}"
0094 shell: """
0095 set -m # monitor mode to prevent lingering processes
0096 exec ddsim \
0097 --runType batch \
0098 --numberOfEvents 1 \
0099 --compactFile "$DETECTOR_PATH/{wildcards.DETECTOR_CONFIG}.xml" \
0100 --outputFile "{output}" \
0101 --enableGun
0102 """
0103
0104
0105 rule matplotlibrc:
0106 output:
0107 ".matplotlibrc",
0108 run:
0109 with open(output[0], "wt") as fp:
0110 fp.write("backend: Agg\n")
0111 # interactive mode prevents plt.show() from blocking
0112 fp.write("interactive : True\n")
0113
0114
0115 rule org2py:
0116 input:
0117 notebook=workflow.basedir + "/{NOTEBOOK}.org",
0118 converter=workflow.source_path("benchmarks/common/org2py.awk"),
0119 output:
0120 "{NOTEBOOK}.py"
0121 shell:
0122 """
0123 awk -f {input.converter} {input.notebook} > {output}
0124 """
0125
0126
0127 rule metadata:
0128 output:
0129 "results/metadata.json"
0130 shell:
0131 """
0132 cat > {output} <<EOF
0133 {{
0134 "CI_COMMIT_REF_NAME": "${{CI_COMMIT_REF_NAME:-}}",
0135 "CI_COMMIT_SHA": "${{CI_COMMIT_SHA:-}}",
0136 "CI_PIPELINE_ID": "${{CI_PIPELINE_ID:-}}",
0137 "CI_PIPELINE_SOURCE": "${{CI_PIPELINE_SOURCE:-}}",
0138 "CI_PROJECT_ID": "${{CI_PROJECT_ID:-}}",
0139 "GITHUB_REPOSITORY": "${{GITHUB_REPOSITORY:-}}",
0140 "GITHUB_SHA": "${{GITHUB_SHA:-}}",
0141 "GITHUB_PR": "${{GITHUB_PR:-}}",
0142 "PIPELINE_NAME": "${{PIPELINE_NAME:-}}"
0143 }}
0144 EOF
0145 # validate JSON
0146 jq '.' {output}
0147 """