File indexing completed on 2025-01-30 09:14:57
0001 import pytest
0002 import os
0003 from pathlib import Path
0004 import multiprocessing
0005
0006 from helpers import (
0007 geant4Enabled,
0008 edm4hepEnabled,
0009 AssertCollectionExistsAlg,
0010 )
0011
0012 import acts
0013 from acts import UnitConstants as u
0014 from acts.examples import (
0015 RootParticleWriter,
0016 RootParticleReader,
0017 RootMaterialTrackReader,
0018 RootTrackSummaryReader,
0019 CsvParticleWriter,
0020 CsvParticleReader,
0021 CsvMeasurementWriter,
0022 CsvMeasurementReader,
0023 CsvSimHitWriter,
0024 CsvSimHitReader,
0025 Sequencer,
0026 )
0027 from acts.examples.odd import getOpenDataDetector, getOpenDataDetectorDirectory
0028
0029
0030 @pytest.mark.root
0031 def test_root_particle_reader(tmp_path, conf_const, ptcl_gun):
0032
0033 s = Sequencer(numThreads=1, events=10, logLevel=acts.logging.WARNING)
0034 evGen = ptcl_gun(s)
0035
0036 file = tmp_path / "particles.root"
0037 s.addWriter(
0038 conf_const(
0039 RootParticleWriter,
0040 acts.logging.WARNING,
0041 inputParticles=evGen.config.outputParticles,
0042 filePath=str(file),
0043 )
0044 )
0045
0046 s.run()
0047
0048
0049
0050 s2 = Sequencer(numThreads=1, logLevel=acts.logging.WARNING)
0051
0052 s2.addReader(
0053 conf_const(
0054 RootParticleReader,
0055 acts.logging.WARNING,
0056 outputParticles="particles_generated",
0057 filePath=str(file),
0058 )
0059 )
0060
0061 alg = AssertCollectionExistsAlg(
0062 "particles_generated", "check_alg", acts.logging.WARNING
0063 )
0064 s2.addAlgorithm(alg)
0065
0066 s2.run()
0067
0068 assert alg.events_seen == 10
0069
0070
0071 @pytest.mark.csv
0072 def test_csv_particle_reader(tmp_path, conf_const, ptcl_gun):
0073 s = Sequencer(numThreads=1, events=10, logLevel=acts.logging.WARNING)
0074 evGen = ptcl_gun(s)
0075
0076 out = tmp_path / "csv"
0077
0078 out.mkdir()
0079
0080 s.addWriter(
0081 conf_const(
0082 CsvParticleWriter,
0083 acts.logging.WARNING,
0084 inputParticles=evGen.config.outputParticles,
0085 outputStem="particle",
0086 outputDir=str(out),
0087 )
0088 )
0089
0090 s.run()
0091
0092
0093 s = Sequencer(numThreads=1, logLevel=acts.logging.WARNING)
0094
0095 s.addReader(
0096 conf_const(
0097 CsvParticleReader,
0098 acts.logging.WARNING,
0099 inputDir=str(out),
0100 inputStem="particle",
0101 outputParticles="input_particles",
0102 )
0103 )
0104
0105 alg = AssertCollectionExistsAlg(
0106 "input_particles", "check_alg", acts.logging.WARNING
0107 )
0108
0109 s.addAlgorithm(alg)
0110
0111 s.run()
0112
0113 assert alg.events_seen == 10
0114
0115
0116 @pytest.mark.parametrize(
0117 "reader",
0118 [RootParticleReader, RootTrackSummaryReader],
0119 )
0120 @pytest.mark.root
0121 def test_root_reader_interface(reader, conf_const, tmp_path):
0122 assert hasattr(reader, "Config")
0123
0124 config = reader.Config
0125
0126 assert hasattr(config, "filePath")
0127
0128 kw = {"level": acts.logging.INFO, "filePath": str(tmp_path / "file.root")}
0129
0130 assert conf_const(reader, **kw)
0131
0132
0133 @pytest.mark.slow
0134 @pytest.mark.root
0135 @pytest.mark.odd
0136 @pytest.mark.skipif(not geant4Enabled, reason="Geant4 not set up")
0137 def test_root_material_track_reader(material_recording):
0138 input_tracks = material_recording / "geant4_material_tracks.root"
0139 assert input_tracks.exists()
0140
0141 s = Sequencer(numThreads=1)
0142
0143 s.addReader(
0144 RootMaterialTrackReader(
0145 level=acts.logging.INFO,
0146 fileList=[str(input_tracks)],
0147 outputMaterialTracks="material-tracks",
0148 )
0149 )
0150
0151 alg = AssertCollectionExistsAlg(
0152 "material-tracks", "check_alg", acts.logging.WARNING
0153 )
0154 s.addAlgorithm(alg)
0155
0156 s.run()
0157
0158 assert alg.events_seen == 2
0159
0160
0161 @pytest.mark.csv
0162 def test_csv_meas_reader(tmp_path, fatras, trk_geo, conf_const):
0163 s = Sequencer(numThreads=1, events=10)
0164 evGen, simAlg, digiAlg = fatras(s)
0165
0166 out = tmp_path / "csv"
0167 out.mkdir()
0168
0169 s.addWriter(
0170 CsvMeasurementWriter(
0171 level=acts.logging.INFO,
0172 inputMeasurements=digiAlg.config.outputMeasurements,
0173 inputClusters=digiAlg.config.outputClusters,
0174 inputMeasurementSimHitsMap=digiAlg.config.outputMeasurementSimHitsMap,
0175 outputDir=str(out),
0176 )
0177 )
0178
0179
0180 s.addWriter(
0181 CsvSimHitWriter(
0182 level=acts.logging.INFO,
0183 inputSimHits=simAlg.config.outputSimHits,
0184 outputDir=str(out),
0185 outputStem="hits",
0186 )
0187 )
0188
0189 s.run()
0190
0191
0192 s = Sequencer(numThreads=1)
0193
0194 s.addReader(
0195 CsvSimHitReader(
0196 level=acts.logging.INFO,
0197 outputSimHits=simAlg.config.outputSimHits,
0198 inputDir=str(out),
0199 inputStem="hits",
0200 )
0201 )
0202
0203 s.addReader(
0204 conf_const(
0205 CsvMeasurementReader,
0206 level=acts.logging.WARNING,
0207 outputMeasurements="measurements",
0208 outputMeasurementSimHitsMap="simhitsmap",
0209 outputMeasurementParticlesMap="meas_ptcl_map",
0210 inputSimHits=simAlg.config.outputSimHits,
0211 inputDir=str(out),
0212 )
0213 )
0214
0215 algs = [
0216 AssertCollectionExistsAlg(k, f"check_alg_{k}", acts.logging.WARNING)
0217 for k in ("measurements", "simhitsmap", "meas_ptcl_map")
0218 ]
0219 for alg in algs:
0220 s.addAlgorithm(alg)
0221
0222 s.run()
0223
0224 for alg in algs:
0225 assert alg.events_seen == 10
0226
0227
0228 @pytest.mark.csv
0229 def test_csv_simhits_reader(tmp_path, fatras, conf_const):
0230 s = Sequencer(numThreads=1, events=10)
0231 evGen, simAlg, digiAlg = fatras(s)
0232
0233 out = tmp_path / "csv"
0234 out.mkdir()
0235
0236 s.addWriter(
0237 CsvSimHitWriter(
0238 level=acts.logging.INFO,
0239 inputSimHits=simAlg.config.outputSimHits,
0240 outputDir=str(out),
0241 outputStem="hits",
0242 )
0243 )
0244
0245 s.run()
0246
0247 s = Sequencer(numThreads=1)
0248
0249 s.addReader(
0250 conf_const(
0251 CsvSimHitReader,
0252 level=acts.logging.INFO,
0253 inputDir=str(out),
0254 inputStem="hits",
0255 outputSimHits="simhits",
0256 )
0257 )
0258
0259 alg = AssertCollectionExistsAlg("simhits", "check_alg", acts.logging.WARNING)
0260 s.addAlgorithm(alg)
0261
0262 s.run()
0263
0264 assert alg.events_seen == 10
0265
0266
0267 def generate_input_test_edm4hep_simhit_reader(input, output):
0268 from DDSim.DD4hepSimulation import DD4hepSimulation
0269
0270 ddsim = DD4hepSimulation()
0271 if isinstance(ddsim.compactFile, list):
0272 ddsim.compactFile = [input]
0273 else:
0274 ddsim.compactFile = input
0275 ddsim.enableGun = True
0276 ddsim.gun.direction = (1, 0, 0)
0277 ddsim.gun.particle = "pi-"
0278 ddsim.gun.distribution = "eta"
0279 ddsim.numberOfEvents = 10
0280 ddsim.outputFile = output
0281 ddsim.run()
0282
0283
0284 @pytest.mark.slow
0285 @pytest.mark.edm4hep
0286 @pytest.mark.skipif(not edm4hepEnabled, reason="EDM4hep is not set up")
0287 def test_edm4hep_simhit_particle_reader(tmp_path):
0288 from acts.examples.edm4hep import EDM4hepReader
0289
0290 tmp_file = str(tmp_path / "output_edm4hep.root")
0291 odd_xml_file = str(getOpenDataDetectorDirectory() / "xml" / "OpenDataDetector.xml")
0292
0293
0294 spawn_context = multiprocessing.get_context("spawn")
0295 p = spawn_context.Process(
0296 target=generate_input_test_edm4hep_simhit_reader, args=(odd_xml_file, tmp_file)
0297 )
0298 p.start()
0299 p.join()
0300
0301 assert os.path.exists(tmp_file)
0302
0303 s = Sequencer(numThreads=1)
0304
0305 with getOpenDataDetector() as detector:
0306 trackingGeometry = detector.trackingGeometry()
0307
0308 s.addReader(
0309 EDM4hepReader(
0310 level=acts.logging.INFO,
0311 inputPath=tmp_file,
0312 inputSimHits=[
0313 "PixelBarrelReadout",
0314 "PixelEndcapReadout",
0315 "ShortStripBarrelReadout",
0316 "ShortStripEndcapReadout",
0317 "LongStripBarrelReadout",
0318 "LongStripEndcapReadout",
0319 ],
0320 outputParticlesGenerator="particles_generated",
0321 outputParticlesSimulation="particles_simulated",
0322 outputSimHits="simhits",
0323 dd4hepDetector=detector,
0324 trackingGeometry=trackingGeometry,
0325 )
0326 )
0327
0328 alg = AssertCollectionExistsAlg("simhits", "check_alg", acts.logging.WARNING)
0329 s.addAlgorithm(alg)
0330
0331 alg = AssertCollectionExistsAlg(
0332 "particles_generated", "check_alg", acts.logging.WARNING
0333 )
0334 s.addAlgorithm(alg)
0335
0336 s.run()
0337
0338 assert alg.events_seen == 10
0339
0340
0341 @pytest.mark.edm4hep
0342 @pytest.mark.skipif(not edm4hepEnabled, reason="EDM4hep is not set up")
0343 def test_edm4hep_measurement_reader(tmp_path, fatras, conf_const):
0344 from acts.examples.edm4hep import (
0345 EDM4hepMeasurementWriter,
0346 EDM4hepMeasurementReader,
0347 )
0348
0349 s = Sequencer(numThreads=1, events=10)
0350 _, simAlg, digiAlg = fatras(s)
0351
0352 out = tmp_path / "measurements_edm4hep.root"
0353
0354 config = EDM4hepMeasurementWriter.Config(
0355 inputMeasurements=digiAlg.config.outputMeasurements,
0356 inputClusters=digiAlg.config.outputClusters,
0357 outputPath=str(out),
0358 )
0359 s.addWriter(EDM4hepMeasurementWriter(level=acts.logging.INFO, config=config))
0360 s.run()
0361
0362
0363 s = Sequencer(numThreads=1)
0364
0365 s.addReader(
0366 conf_const(
0367 EDM4hepMeasurementReader,
0368 level=acts.logging.WARNING,
0369 outputMeasurements="measurements",
0370 outputMeasurementSimHitsMap="simhitsmap",
0371 inputPath=str(out),
0372 )
0373 )
0374
0375 algs = [
0376 AssertCollectionExistsAlg(k, f"check_alg_{k}", acts.logging.WARNING)
0377 for k in ("measurements", "simhitsmap")
0378 ]
0379 for alg in algs:
0380 s.addAlgorithm(alg)
0381
0382 s.run()
0383
0384 for alg in algs:
0385 assert alg.events_seen == 10
0386
0387
0388 @pytest.mark.edm4hep
0389 @pytest.mark.skipif(not edm4hepEnabled, reason="EDM4hep is not set up")
0390 def test_edm4hep_tracks_reader(tmp_path):
0391 from acts.examples.edm4hep import EDM4hepTrackWriter, EDM4hepTrackReader
0392
0393 detector = acts.examples.GenericDetector()
0394 trackingGeometry = detector.trackingGeometry()
0395
0396 field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T))
0397
0398 from truth_tracking_kalman import runTruthTrackingKalman
0399
0400 s = Sequencer(numThreads=1, events=10)
0401 runTruthTrackingKalman(
0402 trackingGeometry,
0403 field,
0404 digiConfigFile=Path(
0405 str(
0406 Path(__file__).parent.parent.parent.parent
0407 / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json"
0408 )
0409 ),
0410 outputDir=tmp_path,
0411 s=s,
0412 )
0413
0414 out = tmp_path / "tracks_edm4hep.root"
0415
0416 s.addWriter(
0417 EDM4hepTrackWriter(
0418 level=acts.logging.VERBOSE,
0419 inputTracks="kf_tracks",
0420 outputPath=str(out),
0421 Bz=2 * u.T,
0422 )
0423 )
0424
0425 s.run()
0426
0427 s = Sequencer(numThreads=1)
0428 s.addReader(
0429 EDM4hepTrackReader(
0430 level=acts.logging.VERBOSE,
0431 outputTracks="kf_tracks",
0432 inputPath=str(out),
0433 Bz=2 * u.T,
0434 )
0435 )
0436
0437 s.run()
0438
0439
0440 @pytest.mark.root
0441 def test_buffered_reader(tmp_path, conf_const, ptcl_gun):
0442
0443
0444 eventsInBuffer = 5
0445 eventsToProcess = 10
0446
0447 s = Sequencer(numThreads=1, events=eventsInBuffer, logLevel=acts.logging.WARNING)
0448 evGen = ptcl_gun(s)
0449
0450 file = tmp_path / "particles.root"
0451 s.addWriter(
0452 conf_const(
0453 RootParticleWriter,
0454 acts.logging.WARNING,
0455 inputParticles=evGen.config.outputParticles,
0456 filePath=str(file),
0457 )
0458 )
0459
0460 s.run()
0461
0462
0463 s2 = Sequencer(events=eventsToProcess, numThreads=1, logLevel=acts.logging.WARNING)
0464
0465 reader = acts.examples.RootParticleReader(
0466 level=acts.logging.WARNING,
0467 outputParticles="particles_input",
0468 filePath=str(file),
0469 )
0470
0471 s2.addReader(
0472 acts.examples.BufferedReader(
0473 level=acts.logging.WARNING,
0474 upstreamReader=reader,
0475 bufferSize=eventsInBuffer,
0476 )
0477 )
0478
0479 alg = AssertCollectionExistsAlg(
0480 "particles_input", "check_alg", acts.logging.WARNING
0481 )
0482 s2.addAlgorithm(alg)
0483
0484 s2.run()
0485
0486 assert alg.events_seen == eventsToProcess