@@ -47,8 +47,9 @@ def make_jobs(jar, xml, aux_source_path, output_directory, df_mapping, engine,
4747@click .option ('--conditions' , help = 'Name of the data conditions as given in datacheck_conditions.py e.g standard' , default = 'standard' )
4848@click .option ('--max_delta_t' , default = 30 , help = 'Maximum time difference (minutes) allowed between drs and data files.' , type = click .INT )
4949@click .option ('--local' , default = False ,is_flag = True , help = 'Flag indicating whether jobs should be executed localy .' )
50+ @click .option ('--yes' , help = "Assume 'yes'if your asked to continue processing and start jobs" , default = False , is_flag = True )
5051@click .password_option (help = 'password to read from the always awesome RunDB' )
51- def main (earliest_night , latest_night , data_dir , jar , xml , aux_source , out , queue , walltime , engine , num_runs , vmem , log_level , port , source , conditions , max_delta_t , local , password ):
52+ def main (earliest_night , latest_night , data_dir , jar , xml , aux_source , out , queue , walltime , engine , num_runs , vmem , log_level , port , source , conditions , max_delta_t , local , yes , password ):
5253
5354 level = logging .INFO
5455 if log_level is 'DEBUG' :
@@ -82,7 +83,8 @@ def main(earliest_night, latest_night, data_dir, jar, xml, aux_source, out, queu
8283 logger .warn ("Missing {} dataruns due to missing datafiles" .format (len (df_runs_missing )))
8384
8485 logger .info ("Would process {} jobs with {} runs per job" .format (len (df_runs )// num_runs , num_runs ))
85- click .confirm ('Do you want to continue processing and start jobs?' , abort = True )
86+ if not yes :
87+ click .confirm ('Do you want to continue processing and start jobs?' , abort = True )
8688
8789 job_list = make_jobs (jarpath , xmlpath , aux_source_path , output_directory , df_runs , engine , queue , vmem , num_runs , walltime )
8890 job_outputs = gridmap .process_jobs (job_list , max_processes = len (job_list ), local = local )
0 commit comments