File indexing completed on 2026-04-10 08:39:07
0001 import sys
0002 import time
0003 import uuid
0004
0005 from pandaserver.taskbuffer.FileSpec import FileSpec
0006 from pandaserver.taskbuffer.JobSpec import JobSpec
0007 from pandaserver.userinterface import Client
0008
0009 if len(sys.argv) > 1:
0010 site = sys.argv[1]
0011 cloud = None
0012 else:
0013 site = None
0014 cloud = "US"
0015
0016 cloud = "US"
0017
0018 dataset_name = f"panda.destDB.{str(uuid.uuid4())}"
0019 destination_se = "BNL_ATLAS_2"
0020
0021 files = {
0022 "EVNT.023986._00001.pool.root.1": None,
0023
0024 }
0025
0026 job_list = []
0027
0028 index = 0
0029 for lfn in files:
0030 index += 1
0031 job = JobSpec()
0032 job.jobDefinitionID = (time.time()) % 10000
0033 job.jobName = "%s_%d" % (str(uuid.uuid4()), index)
0034 job.AtlasRelease = "Atlas-14.2.20"
0035 job.homepackage = "AtlasProduction/14.2.20.1"
0036 job.transformation = "csc_simul_reco_trf.py"
0037 job.destinationDBlock = dataset_name
0038 job.destinationSE = destination_se
0039 job.computingSite = site
0040 job.prodDBlock = "mc08.105031.Jimmy_jetsJ2.evgen.EVNT.e347_tid023986"
0041
0042
0043 job.prodSourceLabel = "test"
0044 job.processingType = "test"
0045 job.currentPriority = 10000
0046 job.cloud = cloud
0047
0048 fileI = FileSpec()
0049 fileI.dataset = job.prodDBlock
0050 fileI.prodDBlock = job.prodDBlock
0051 fileI.lfn = lfn
0052 fileI.type = "input"
0053 job.addFile(fileI)
0054
0055 fileD = FileSpec()
0056 fileD.dataset = "ddo.000001.Atlas.Ideal.DBRelease.v050601"
0057 fileD.prodDBlock = "ddo.000001.Atlas.Ideal.DBRelease.v050601"
0058 fileD.lfn = "DBRelease-5.6.1.tar.gz"
0059 fileD.type = "input"
0060 job.addFile(fileD)
0061
0062 fileOA = FileSpec()
0063 fileOA.lfn = f"{job.jobName}.AOD.pool.root"
0064 fileOA.destinationDBlock = job.destinationDBlock
0065 fileOA.destinationSE = job.destinationSE
0066 fileOA.dataset = job.destinationDBlock
0067 fileOA.destinationDBlockToken = "ATLASDATADISK"
0068 fileOA.type = "output"
0069 job.addFile(fileOA)
0070
0071 fileOE = FileSpec()
0072 fileOE.lfn = f"{job.jobName}.ESD.pool.root"
0073 fileOE.destinationDBlock = job.destinationDBlock
0074 fileOE.destinationSE = job.destinationSE
0075 fileOE.dataset = job.destinationDBlock
0076 fileOE.destinationDBlockToken = "ATLASDATADISK"
0077 fileOE.type = "output"
0078 job.addFile(fileOE)
0079
0080 fileOL = FileSpec()
0081 fileOL.lfn = f"{job.jobName}.job.log.tgz"
0082 fileOL.destinationDBlock = job.destinationDBlock
0083 fileOL.destinationSE = job.destinationSE
0084 fileOL.dataset = job.destinationDBlock
0085 fileOL.destinationDBlockToken = "ATLASDATADISK"
0086 fileOL.type = "log"
0087 job.addFile(fileOL)
0088
0089 job.jobParameters = (
0090 "%s %s 30 500 3 ATLAS-GEO-02-01-00 3 3 QGSP_BERT jobConfig.VertexPosFastIDKiller.py FastSimulationJobTransforms/FastCaloSimAddCellsRecConfig.py,NoTrackSlimming.py %s OFF NONE NONE %s NONE"
0091 % (fileI.lfn, fileOA.lfn, fileD.lfn, fileOE.lfn)
0092 )
0093
0094 job_list.append(job)
0095
0096 status, output = Client.submit_jobs(job_list)
0097 print("---------------------")
0098 print(f"Status: {status}. Output: {output}")