|
25 | 25 | )
|
26 | 26 | from reana_commons.k8s.secrets import REANAUserSecretsStore
|
27 | 27 | from reana_commons.utils import (
|
28 |
| - calculate_file_access_time, |
29 |
| - calculate_hash_of_dir, |
30 |
| - calculate_job_input_hash, |
31 | 28 | build_unique_component_name,
|
32 | 29 | )
|
33 | 30 | from reana_db.database import Session
|
34 |
| -from reana_db.models import Job, JobCache, Workflow, RunStatus |
| 31 | +from reana_db.models import Job, Workflow, RunStatus |
35 | 32 | from sqlalchemy.exc import SQLAlchemyError
|
36 | 33 | from sqlalchemy.orm.attributes import flag_modified
|
37 | 34 |
|
@@ -109,9 +106,6 @@ def on_message(self, body, message):
|
109 | 106 | if "progress" in msg:
|
110 | 107 | _update_run_progress(workflow_uuid, msg)
|
111 | 108 | _update_job_progress(workflow_uuid, msg)
|
112 |
| - # Caching: calculate input hash and store in JobCache |
113 |
| - if "caching_info" in msg: |
114 |
| - _update_job_cache(msg) |
115 | 109 | Session.commit()
|
116 | 110 | else:
|
117 | 111 | logging.error(
|
@@ -193,10 +187,7 @@ def _update_commit_status(workflow, status):
|
193 | 187 | def _update_run_progress(workflow_uuid, msg):
|
194 | 188 | """Register succeeded Jobs to DB."""
|
195 | 189 | workflow = Session.query(Workflow).filter_by(id_=workflow_uuid).one_or_none()
|
196 |
| - cached_jobs = None |
197 | 190 | job_progress = workflow.job_progress
|
198 |
| - if "cached" in msg["progress"]: |
199 |
| - cached_jobs = msg["progress"]["cached"] # noqa: F841 |
200 | 191 | for status, _ in PROGRESS_STATUSES:
|
201 | 192 | if status in msg["progress"]:
|
202 | 193 | previous_status = workflow.job_progress.get(status)
|
@@ -240,46 +231,6 @@ def _update_job_progress(workflow_uuid, msg):
|
240 | 231 | job.status = job_status
|
241 | 232 |
|
242 | 233 |
|
243 |
| -def _update_job_cache(msg): |
244 |
| - """Update caching information for finished job.""" |
245 |
| - cached_job = ( |
246 |
| - Session.query(JobCache) |
247 |
| - .filter_by(job_id=msg["caching_info"].get("job_id")) |
248 |
| - .first() |
249 |
| - ) |
250 |
| - |
251 |
| - input_files = [] |
252 |
| - if cached_job: |
253 |
| - file_access_times = calculate_file_access_time( |
254 |
| - msg["caching_info"].get("workflow_workspace") |
255 |
| - ) |
256 |
| - for filename in cached_job.access_times: |
257 |
| - if filename in file_access_times: |
258 |
| - input_files.append(filename) |
259 |
| - else: |
260 |
| - return |
261 |
| - cmd = msg["caching_info"]["job_spec"]["cmd"] |
262 |
| - # removes cd to workspace, to be refactored |
263 |
| - clean_cmd = ";".join(cmd.split(";")[1:]) |
264 |
| - msg["caching_info"]["job_spec"]["cmd"] = clean_cmd |
265 |
| - |
266 |
| - if "workflow_workspace" in msg["caching_info"]["job_spec"]: |
267 |
| - del msg["caching_info"]["job_spec"]["workflow_workspace"] |
268 |
| - input_hash = calculate_job_input_hash( |
269 |
| - msg["caching_info"]["job_spec"], msg["caching_info"]["workflow_json"] |
270 |
| - ) |
271 |
| - workspace_hash = calculate_hash_of_dir( |
272 |
| - msg["caching_info"].get("workflow_workspace"), input_files |
273 |
| - ) |
274 |
| - if workspace_hash == -1: |
275 |
| - return |
276 |
| - |
277 |
| - cached_job.parameters = input_hash |
278 |
| - cached_job.result_path = msg["caching_info"].get("result_path") |
279 |
| - cached_job.workspace_hash = workspace_hash |
280 |
| - Session.add(cached_job) |
281 |
| - |
282 |
| - |
283 | 234 | def _delete_workflow_job(workflow: Workflow) -> None:
|
284 | 235 | job_name = build_unique_component_name("run-batch", workflow.id_)
|
285 | 236 | try:
|
|
0 commit comments