diff --git a/Pinpoint/submission/submit.py b/Pinpoint/submission/submit.py new file mode 100644 index 0000000..6d94227 --- /dev/null +++ b/Pinpoint/submission/submit.py @@ -0,0 +1,49 @@ +import argparse +import os +import subprocess + +import numpy as np + + +def submit(run: int, num_events: int, num_events_per_job: int) -> None: + njobs = (num_events + num_events_per_job - 1) // num_events_per_job + + f = open(f"submit_pinpoint_{run:05d}.condor", "w") + print("Executable = submit.sh", file=f) + print(f"Log = submit_{run:05d}.log", file=f) + print("Input = /dev/null", file=f) + print(f"Output = submit_{run:05d}.out", file=f) + print(f"Error = submit_{run:05d}.err", file=f) + print(f"initialdir = {os.getcwd()}", file=f) + print('+JobFlavour = "workday"', file=f) + print('+AccountingGroup = "group_u_FASER.users"', file=f) + print("Rank = -SlotID", file=f) + for job in range(njobs): + print(f"arguments = {run:05d} {job:03d} {num_events_per_job * job}", file=f) + print("queue", file=f) + f.close() + + print(f"Submitting condor job: submit_pinpoint_{run:05d}.condor") + proc = subprocess.run( + ["/usr/bin/condor_submit", f"submit_pinpoint_{run:05d}.condor"], + capture_output=True, + text=True, + ) + for line in proc.stdout.split("\n"): + print(line) + + +if __name__ == "__main__": + np.random.seed(0) + + parser = argparse.ArgumentParser() + parser.add_argument("--run", "-r", default=0, type=int) + parser.add_argument("--num-events", "-n", default=10000, type=int) + parser.add_argument("--num-events-per-job", default=1000, type=int) + args = parser.parse_args() + + submit( + run=args.run, + num_events=args.num_events, + num_events_per_job=args.num_events_per_job, + ) diff --git a/Pinpoint/submission/submit.sh b/Pinpoint/submission/submit.sh new file mode 100755 index 0000000..de89f77 --- /dev/null +++ b/Pinpoint/submission/submit.sh @@ -0,0 +1,20 @@ +#!/bin/bash +# arguments: run: int, job: int, first_event: int + +date + +# Check if correct number of commandline arguments is given +if [ "$#" -ne 3 ]; then + echo "Usage: $0 " + exit 1 +fi + +RUN=$1 +JOB=$2 +FIRST_EVENT=$3 +PINPOINT_DIR=/eos/project/f/fasersim-bonn/public/pinpoint + +source ${PINPOINT_DIR}/Pinpoint_G4/Pinpoint/setup.sh +mkdir ${PINPOINT_DIR}/data/${RUN}/${JOB} +cd ${PINPOINT_DIR}/data/${RUN}/${JOB} +${PINPOINT_DIR}/Pinpoint_G4/Pinpoint/build/pinpoint ${PINPOINT_DIR}/data/${RUN}/run.mac -f ${FIRST_EVENT} diff --git a/README.md b/README.md index c1b73ae..dfb079f 100644 --- a/README.md +++ b/README.md @@ -63,6 +63,9 @@ There are a number of user defined macro commands which can be used to control t |/out/saveTrack | if `true` save all tracks, `false` by default, requires `\tracking\storeTrajectory 1`| +### Submission +Use the `Pinpoint/submission/submit.py` script to submit jobs to HTCondor. The script uses the Pinpoint version build in `/eos/project/f/fasersim-bonn/public/pinpoint/Pinpoint_G4/Pinpoint/build` and expects a macro file in `/eos/project/f/fasersim-bonn/public/pinpoint/data/run` with the details. You can specify the run number, total number of events and number of events per jobs as command line arguments. The CERN HTCondor supports only submissions from a `afs` space, so you have to copy the submission script to your personal `afs` directory, but the samples will be produced in the common `eos` space. + ### Next steps - [ ] Geometry (Dhruv) - [ ] Add scintillator layers