-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcreate_test_script.py
110 lines (87 loc) · 3.88 KB
/
create_test_script.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
###################################################################################################
#
# Copyright (C) 2023 Analog Devices, Inc. All Rights Reserved.
# This software is proprietary and confidential to Analog Devices, Inc. and its licensors.
#
###################################################################################################
"""
Create training bash scripts for test
"""
import argparse
import os
import yaml
def joining(lst):
"""
Join list based on the ' ' delimiter
"""
join_str = ' '.join(lst)
return join_str
parser = argparse.ArgumentParser()
parser.add_argument('--testconf', help='Enter the config file for the test', required=True)
parser.add_argument('--testpaths', help='Enter the paths for the test', required=True)
args = parser.parse_args()
yaml_path = args.testconf
test_path = args.testpaths
# Open the YAML file
with open(yaml_path, 'r', encoding='utf-8') as yaml_file:
# Load the YAML content into a Python dictionary
config = yaml.safe_load(yaml_file)
with open(test_path, 'r', encoding='utf-8') as path_file:
# Load the YAML content into a Python dictionary
pathconfig = yaml.safe_load(path_file)
# Folder containing the files to be concatenated
script_path = pathconfig["script_path"]
# Output file name and path
output_file_path = pathconfig["output_file_path"]
# global log_file_names
log_file_names = []
# Loop through all files in the folder
with open(output_file_path, "w", encoding='utf-8') as output_file:
for filename in os.listdir(script_path):
# Check if the file is a text file
if filename.startswith("train"):
# Open the file and read its contents
with open(os.path.join(script_path, filename), encoding='utf-8') as input_file:
contents = input_file.read()
temp = contents.split()
temp.insert(1, "\n")
i = temp.index('--epochs')
j = temp.index('--model')
k = temp.index('--dataset')
# if config["Qat_Test"]:
if '--qat-policy' in temp:
x = temp.index('--qat-policy')
temp[x+1] = "policies/qat_policy.yaml"
else:
temp.insert(-1, ' --qat-policy policies/qat_policy.yaml')
log_model = temp[j+1]
log_data = temp[k+1]
if log_model == "ai87imageneteffnetv2":
num = temp.index("--batch-size")
temp[num+1] = "128"
log_name = temp[j+1] + '-' + temp[k+1]
log_file_names.append(filename[:-3])
if log_data == "FaceID":
continue
if log_data == "VGGFace2_FaceDetection":
continue
try:
temp[i+1] = str(config[log_data][log_model]["epoch"])
except KeyError:
print(f"\033[93m\u26A0\033[0m Warning: {temp[j+1]} model is" +
" missing information in test configuration files.")
continue
if '--deterministic' not in temp:
temp.insert(-1, '--deterministic')
temp.insert(-1, '--name ' + log_name)
try:
path_data = config[log_data]["datapath"]
temp[i+1] = str(config[log_data][log_model]["epoch"])
except KeyError:
print(f"\033[93m\u26A0\033[0m Warning: {temp[j+1]} model is" +
" missing information in test configuration files.")
continue
temp.insert(-1, '--data ' + path_data)
temp.append("\n")
contents = joining(temp)
output_file.write(contents)