@@ -147,14 +147,18 @@ def __init__(self, submission: Submission):
147
147
self .sub_dirpath = PathManager .submission_dir (str (submission .user_id ), submission .id )
148
148
if not exists (self .sub_dirpath ):
149
149
os .makedirs (self .sub_dirpath , exist_ok = True )
150
- prob_dir = PathManager .problem_dir (submission .problem ._id ) # use display id
151
- print ('problem id:' , submission .problem ._id )
152
- print ('problem dir:' , prob_dir )
150
+ problem : Problem = submission .problem
151
+ prob_dir = PathManager .problem_dir (problem ._id ) # use display id
153
152
if not exists (prob_dir ):
154
153
raise Exception ("problem dir {} not exists" .format (prob_dir ))
155
154
for filename in os .listdir (prob_dir ):
156
- print (f'copy { join (prob_dir , filename )} to { self .sub_dirpath } ' )
157
155
shutil .copy2 (join (prob_dir , filename ), self .sub_dirpath )
156
+ for index , codename in enumerate (problem .code_names ):
157
+ filecontent = submission .code_list [index ]
158
+ codepath = join (self .sub_dirpath , codename )
159
+ with open (codepath , "w" ) as wfp :
160
+ print (f"substitude { codepath } with user implemented" )
161
+ wfp .write (filecontent )
158
162
159
163
def judge (self ):
160
164
tester_path = join (self .sub_dirpath , TESTER_NAME )
@@ -174,25 +178,27 @@ def judge(self):
174
178
failed_info = []
175
179
with open (join (log_path , "results.json" )) as fp :
176
180
res = json .load (fp )
181
+ grade = res ['grade' ]
177
182
self .sub .grade = res ['grade' ]
178
183
for idx in res ['failed' ]:
179
184
log_fp = open (join (log_path , f"testcase{ idx } .log" ), "r" )
180
185
testcase_fp = open (join (self .sub_dirpath , TESTCASE_NAME ), "r" )
181
186
testcase_json_data = json .load (testcase_fp )
187
+ # notice the index here, logical index starts from 1
182
188
if "config" in testcase_json_data :
183
189
# global config
184
190
config = testcase_json_data ["config" ]
185
191
else :
186
- cur_data = testcase_json_data ["testcases" ][idx ]
192
+ cur_data = testcase_json_data ["testcases" ][idx - 1 ]
187
193
if "config" in cur_data :
188
194
# specific config for testcase
189
195
config = ["config" ]
190
196
else :
191
197
# no config
192
198
config = None
193
199
displayed_test = {
194
- "input" : testcase_json_data ["testcases" ][idx ]["input" ],
195
- "expected_output" :testcase_json_data ["testcases" ][idx ]["output" ],
200
+ "input" : testcase_json_data ["testcases" ][idx - 1 ]["input" ],
201
+ "expected_output" :testcase_json_data ["testcases" ][idx - 1 ]["output" ],
196
202
}
197
203
# config, "testcase"
198
204
failed_info .append ({
@@ -204,5 +210,10 @@ def judge(self):
204
210
log_fp .close ()
205
211
testcase_fp .close ()
206
212
self .sub .failed_info = failed_info
207
- self .sub .result = JudgeStatus .FINISHED
213
+ if grade == 0 :
214
+ self .sub .result = JudgeStatus .ALL_FAILED
215
+ elif grade == 100 :
216
+ self .sub .result = JudgeStatus .ALL_PASSED
217
+ else :
218
+ self .sub .result = JudgeStatus .SOME_PASSED
208
219
self .sub .save ()
0 commit comments