Skip to content

Commit d173003

Browse files
authored
Merge pull request #65 from fact-project/forSPEonMCs
add option to allways asume yes
2 parents 28b1070 + c8dc0e9 commit d173003

File tree

3 files changed

+13
-6
lines changed

3 files changed

+13
-6
lines changed

erna/scripts/process_fact_data.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,9 @@ def make_jobs(jar, xml, aux_source_path, output_directory, df_mapping, engine,
4747
@click.option('--conditions', help='Name of the data conditions as given in datacheck_conditions.py e.g standard', default='standard')
4848
@click.option('--max_delta_t', default=30, help='Maximum time difference (minutes) allowed between drs and data files.', type=click.INT)
4949
@click.option('--local', default=False,is_flag=True, help='Flag indicating whether jobs should be executed localy .')
50+
@click.option('--yes', help="Assume 'yes'if your asked to continue processing and start jobs", default=False, is_flag=True)
5051
@click.password_option(help='password to read from the always awesome RunDB')
51-
def main(earliest_night, latest_night, data_dir, jar, xml, aux_source, out, queue, walltime, engine, num_runs, vmem, log_level, port, source, conditions, max_delta_t, local, password):
52+
def main(earliest_night, latest_night, data_dir, jar, xml, aux_source, out, queue, walltime, engine, num_runs, vmem, log_level, port, source, conditions, max_delta_t, local, yes, password):
5253

5354
level=logging.INFO
5455
if log_level is 'DEBUG':
@@ -82,7 +83,8 @@ def main(earliest_night, latest_night, data_dir, jar, xml, aux_source, out, queu
8283
logger.warn("Missing {} dataruns due to missing datafiles".format(len(df_runs_missing)))
8384

8485
logger.info("Would process {} jobs with {} runs per job".format(len(df_runs)//num_runs, num_runs))
85-
click.confirm('Do you want to continue processing and start jobs?', abort=True)
86+
if not yes:
87+
click.confirm('Do you want to continue processing and start jobs?', abort=True)
8688

8789
job_list = make_jobs(jarpath, xmlpath, aux_source_path, output_directory, df_runs, engine, queue, vmem, num_runs, walltime)
8890
job_outputs = gridmap.process_jobs(job_list, max_processes=len(job_list), local=local)

erna/scripts/process_fact_data_qsub.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,10 +69,11 @@ def read_outputs_to_list(job_output_paths):
6969
@click.option('--conditions', help='Name of the data conditions as given in datacheck_conditions.py e.g std', default='data')
7070
@click.option('--max_delta_t', default=30, help='Maximum time difference (minutes) allowed between drs and data files.', type=click.INT)
7171
@click.option('--local', default=False,is_flag=True, help='Flag indicating whether jobs should be executed localy .')
72+
@click.option('--yes', help="Assume 'yes'if your asked to continue processing and start jobs", default=False, is_flag=True)
7273
@click.password_option(help='password to read from the always awesome RunDB')
7374
def main(earliest_night, latest_night, data_dir, jar, xml, aux_source, out, queue, mail,
7475
walltime, engine, num_runs, qjobs, vmem, log_level, port, source, conditions,
75-
max_delta_t, local, password):
76+
max_delta_t, local, yes, password):
7677

7778
level=logging.INFO
7879
if log_level is 'DEBUG':
@@ -102,7 +103,8 @@ def main(earliest_night, latest_night, data_dir, jar, xml, aux_source, out, queu
102103
df_loaded.to_hdf(out+".tmp", "loaded", mode="a")
103104

104105
logger.info("Processing {} jobs with {} runs per job.".format(int(len(df_loaded)/num_runs), num_runs))
105-
click.confirm('Do you want to continue processing and start jobs?', abort=True)
106+
if not yes:
107+
click.confirm('Do you want to continue processing and start jobs?', abort=True)
106108

107109
#ensure that the max number of queuable jobs is smaller than the total number of jobs
108110
if qjobs > len(df_loaded):

erna/scripts/process_fact_mc.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,9 @@ def make_jobs(jar, xml, data_paths, drs_paths,
9494
@click.option('--local_output_format', default="{basename}_{num}.json", help="Give the file format for the local output funktionality."
9595
+ "%b will replace the out filename and %[1-9]n the given local number."
9696
+ "Default is: '{basename}_{num}.json'.Only works with option --local_output. ")
97-
def main( jar, xml, out, mc_path, queue, walltime, engine, num_jobs, vmem, log_level, port, local, local_output, mcdrs, mcwildcard, local_output_format):
97+
@click.option('--yes', help="Assume 'yes'if your asked to continue processing and start jobs", default=False, is_flag=True)
98+
def main( jar, xml, out, mc_path, queue, walltime, engine, num_jobs, vmem, log_level, port, local, local_output, mcdrs, mcwildcard, local_output_format, yes):
99+
98100
'''
99101
Script to execute fact-tools on MonteCarlo files. Use the MC_PATH argument to specifiy the folders containing the MC
100102
'''
@@ -140,7 +142,8 @@ def main( jar, xml, out, mc_path, queue, walltime, engine, num_jobs, vmem, log_l
140142
logger.error("You specified more jobs than files. This doesn't make sense.")
141143
return
142144

143-
click.confirm('Do you want to continue processing and start jobs?', abort=True)
145+
if not yes:
146+
click.confirm('Do you want to continue processing and start jobs?', abort=True)
144147

145148
mc_paths_array = np.array(files)
146149
drs_paths_array = np.repeat(np.array(drspath), len(mc_paths_array))

0 commit comments

Comments
 (0)