diff --git a/reduction_tools/partialator.py b/reduction_tools/partialator.py index e4259bb..2ea1176 100644 --- a/reduction_tools/partialator.py +++ b/reduction_tools/partialator.py @@ -25,6 +25,7 @@ python partialator.py -s """ # modules +from sys import exit import pandas as pd import numpy as np import subprocess @@ -35,12 +36,22 @@ from tqdm import tqdm import regex as re import matplotlib.pyplot as plt from scipy.optimize import curve_fit +from loguru import logger -def submit_job( job_file ): +def submit_job( job_file, reservation ): # submit the job - submit_cmd = ["sbatch", "--cpus-per-task=32", "--" ,job_file] - job_output = subprocess.check_output(submit_cmd) + if reservation: + print( "using a ra beamtime reservation = {0}".format( reservation ) ) + submit_cmd = [ "sbatch", "--reservation={0}".format( reservation ), "--cpus-per-task=32", "--" , job_file ] + else: + submit_cmd = [ "sbatch", "--cpus-per-task=32", "--" , job_file ] + + try: + job_output = subprocess.check_output( submit_cmd ) + except subprocess.CalledProcessError as e: + print( "please give the correct ra reservation or remove the -R from the arguements" ) + exit() # scrub job id from - example Submitted batch job 742403 pattern = r"Submitted batch job (\d+)" @@ -54,7 +65,7 @@ def wait_for_jobs( job_ids, total_jobs ): while job_ids: completed_jobs = set() for job_id in job_ids: - status_cmd = ["squeue", "-h", "-j", str(job_id)] + status_cmd = [ "squeue", "-h", "-j", str(job_id) ] status = subprocess.check_output(status_cmd) if not status: completed_jobs.add(job_id) @@ -78,6 +89,7 @@ def run_partialator( proc_dir, name, stream, pointgroup, model, iterations, cell part_sh.write( " -y {0} \\\n".format( pointgroup ) ) part_sh.write( " --model={0} \\\n".format( model ) ) part_sh.write( " --max-adu={0} \\\n".format( adu ) ) + part_sh.write( " -j 32 \\\n" ) part_sh.write( " --iterations={0}\n\n".format( iterations ) ) part_sh.write( "check_hkl --shell-file=mult.dat *.hkl -p {0} --nshells={1} --highres={2} &> check_hkl.log\n".format( cell, shells, part_h_res ) ) part_sh.write( "check_hkl --ltest --ignore-negs --shell-file=ltest.dat *.hkl -p {0} --nshells={1} --highres={2} &> ltest.log\n".format( cell, shells, part_h_res ) ) @@ -244,9 +256,8 @@ def get_mean_cell( stream ): return mean_a, mean_b, mean_c, mean_alpha, mean_beta, mean_gamma -def main( cwd, name, stream, pointgroup, model, iterations, cell, shells, part_h_res, adu ): +def main( cwd, name, stream, pointgroup, model, iterations, cell, shells, part_h_res, adu, reservation ): - print( "begin job" ) # submitted job set submitted_job_ids = set() @@ -257,18 +268,17 @@ def main( cwd, name, stream, pointgroup, model, iterations, cell, shells, part_h # move to part directory os.chdir( part_dir ) - print( "making partialator files" ) + print( "making partialator file" ) # make partialator run file part_run_file = run_partialator( part_dir, name, stream, pointgroup, model, iterations, cell, shells, part_h_res, adu ) # submit job - job_id = submit_job( part_run_file ) + job_id = submit_job( part_run_file, reservation ) print(f"job submitted: {0}".format( job_id ) ) submitted_job_ids.add( job_id ) - print( "DONE" ) # use progress bar to track job completion - time.sleep(30) + time.sleep(10) wait_for_jobs(submitted_job_ids, 1 ) print("slurm processing done") @@ -280,7 +290,6 @@ def main( cwd, name, stream, pointgroup, model, iterations, cell, shells, part_h # make summary data table stats_df = summary_stats( cc_dat, ccstar_dat, mult_dat, rsplit_dat ) - print( stats_df.to_string() ) print_df = stats_df[ [ "1_d", "d", "min", "max", "nref", "poss", "comp", "obs", "mult", @@ -361,13 +370,33 @@ if __name__ == "__main__": "-a", "--max_adu", help="maximum detector counts to allow. Default is 12000.", - type=int + type=int, + default=12000 + ) + parser.add_argument( + "-R", + "--reservation", + help="reservation name for ra cluster. Usually along the lines of P11111_2024-12-10", + type=str, + default=None + ) + parser.add_argument( + "-d", + "--debug", + help="output debug to terminal.", + type=bool, + default=False ) args = parser.parse_args() + # set loguru + if not args.debug: + logger.remove() + logfile = "{0}.log".format( args.name ) + logger.add( logfile, format="{message}", level="INFO") # run main cwd = os.getcwd() print( "top working directory = {0}".format( cwd ) ) - main( cwd, args.name, args.stream_file, args.pointgroup, args.model, args.iterations, args.cell_file, args.bins, args.resolution, args.max_adu ) + main( cwd, args.name, args.stream_file, args.pointgroup, args.model, args.iterations, args.cell_file, args.bins, args.resolution, args.max_adu, args.reservation )