Skip to content

Commit 6561f7a

Browse files
committed
Replace file objects used as function arguments
By replacing file objects passed as function arguments with the read file content, we simplify temporary file objects life cycle management. Temporary files are handled in a single function. This is done for metadata files, which are fully read into memory right after download, anyway. Same is not true for target files which preferably should be treated in chunks so targets download and verification still deal with file objects (not in a stream-like manner, though). Signed-off-by: Teodora Sechkova <[email protected]>
1 parent ad760fd commit 6561f7a

File tree

2 files changed

+29
-41
lines changed

2 files changed

+29
-41
lines changed

tuf/client_rework/metadata_wrapper.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -22,9 +22,8 @@ def __init__(self, meta):
2222
self._meta = meta
2323

2424
@classmethod
25-
def from_json_object(cls, tmp_file):
25+
def from_json_object(cls, raw_data):
2626
"""Loads JSON-formatted TUF metadata from a file object."""
27-
raw_data = tmp_file.read()
2827
# Use local scope import to avoid circular import errors
2928
# pylint: disable=import-outside-toplevel
3029
from tuf.api.serialization.json import JSONDeserializer

tuf/client_rework/updater_rework.py

+28-39
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
import fnmatch
1111
import logging
1212
import os
13-
from typing import BinaryIO, Dict, Optional, TextIO
13+
from typing import Dict, Optional
1414

1515
from securesystemslib import exceptions as sslib_exceptions
1616
from securesystemslib import hash as sslib_hash
@@ -158,9 +158,8 @@ def download_target(self, target: Dict, destination_directory: str):
158158
temp_obj = download.download_file(
159159
file_mirror, target["fileinfo"]["length"], self._fetcher
160160
)
161-
162-
temp_obj.seek(0)
163-
self._verify_target_file(temp_obj, target)
161+
_check_file_length(temp_obj, target["fileinfo"]["length"])
162+
_check_hashes_obj(temp_obj, target["fileinfo"]["hashes"])
164163
break
165164

166165
except Exception as exception:
@@ -297,7 +296,7 @@ def _root_mirrors_download(self, root_mirrors: Dict) -> "RootWrapper":
297296
)
298297

299298
temp_obj.seek(0)
300-
intermediate_root = self._verify_root(temp_obj)
299+
intermediate_root = self._verify_root(temp_obj.read())
301300
# When we reach this point, a root file has been successfully
302301
# downloaded and verified so we can exit the loop.
303302
break
@@ -344,7 +343,7 @@ def _load_timestamp(self) -> None:
344343
)
345344

346345
temp_obj.seek(0)
347-
verified_timestamp = self._verify_timestamp(temp_obj)
346+
verified_timestamp = self._verify_timestamp(temp_obj.read())
348347
break
349348

350349
except Exception as exception: # pylint: disable=broad-except
@@ -397,7 +396,7 @@ def _load_snapshot(self) -> None:
397396
)
398397

399398
temp_obj.seek(0)
400-
verified_snapshot = self._verify_snapshot(temp_obj)
399+
verified_snapshot = self._verify_snapshot(temp_obj.read())
401400
break
402401

403402
except Exception as exception: # pylint: disable=broad-except
@@ -451,7 +450,7 @@ def _load_targets(self, targets_role: str, parent_role: str) -> None:
451450

452451
temp_obj.seek(0)
453452
verified_targets = self._verify_targets(
454-
temp_obj, targets_role, parent_role
453+
temp_obj.read(), targets_role, parent_role
455454
)
456455
break
457456

@@ -472,12 +471,12 @@ def _load_targets(self, targets_role: str, parent_role: str) -> None:
472471
self._get_full_meta_name(targets_role, extension=".json")
473472
)
474473

475-
def _verify_root(self, temp_obj: TextIO) -> RootWrapper:
474+
def _verify_root(self, file_content: bytes) -> RootWrapper:
476475
"""
477476
TODO
478477
"""
479478

480-
intermediate_root = RootWrapper.from_json_object(temp_obj)
479+
intermediate_root = RootWrapper.from_json_object(file_content)
481480

482481
# Check for an arbitrary software attack
483482
trusted_root = self._metadata["root"]
@@ -490,7 +489,6 @@ def _verify_root(self, temp_obj: TextIO) -> RootWrapper:
490489

491490
# Check for a rollback attack.
492491
if intermediate_root.version < trusted_root.version:
493-
temp_obj.close()
494492
raise exceptions.ReplayedMetadataError(
495493
"root", intermediate_root.version(), trusted_root.version()
496494
)
@@ -499,11 +497,11 @@ def _verify_root(self, temp_obj: TextIO) -> RootWrapper:
499497

500498
return intermediate_root
501499

502-
def _verify_timestamp(self, temp_obj: TextIO) -> TimestampWrapper:
500+
def _verify_timestamp(self, file_content: bytes) -> TimestampWrapper:
503501
"""
504502
TODO
505503
"""
506-
intermediate_timestamp = TimestampWrapper.from_json_object(temp_obj)
504+
intermediate_timestamp = TimestampWrapper.from_json_object(file_content)
507505

508506
# Check for an arbitrary software attack
509507
trusted_root = self._metadata["root"]
@@ -517,7 +515,6 @@ def _verify_timestamp(self, temp_obj: TextIO) -> TimestampWrapper:
517515
intermediate_timestamp.signed.version
518516
<= self._metadata["timestamp"].version
519517
):
520-
temp_obj.close()
521518
raise exceptions.ReplayedMetadataError(
522519
"root",
523520
intermediate_timestamp.version(),
@@ -529,7 +526,6 @@ def _verify_timestamp(self, temp_obj: TextIO) -> TimestampWrapper:
529526
intermediate_timestamp.snapshot.version
530527
<= self._metadata["timestamp"].snapshot["version"]
531528
):
532-
temp_obj.close()
533529
raise exceptions.ReplayedMetadataError(
534530
"root",
535531
intermediate_timestamp.snapshot.version(),
@@ -540,24 +536,23 @@ def _verify_timestamp(self, temp_obj: TextIO) -> TimestampWrapper:
540536

541537
return intermediate_timestamp
542538

543-
def _verify_snapshot(self, temp_obj: TextIO) -> SnapshotWrapper:
539+
def _verify_snapshot(self, file_content: bytes) -> SnapshotWrapper:
544540
"""
545541
TODO
546542
"""
547543

548544
# Check against timestamp metadata
549545
if self._metadata["timestamp"].snapshot.get("hash"):
550546
_check_hashes(
551-
temp_obj, self._metadata["timestamp"].snapshot.get("hash")
547+
file_content, self._metadata["timestamp"].snapshot.get("hash")
552548
)
553549

554-
intermediate_snapshot = SnapshotWrapper.from_json_object(temp_obj)
550+
intermediate_snapshot = SnapshotWrapper.from_json_object(file_content)
555551

556552
if (
557553
intermediate_snapshot.version
558554
!= self._metadata["timestamp"].snapshot["version"]
559555
):
560-
temp_obj.close()
561556
raise exceptions.BadVersionNumberError
562557

563558
# Check for an arbitrary software attack
@@ -573,15 +568,14 @@ def _verify_snapshot(self, temp_obj: TextIO) -> SnapshotWrapper:
573568
target_role["version"]
574569
!= self._metadata["snapshot"].meta[target_role]["version"]
575570
):
576-
temp_obj.close()
577571
raise exceptions.BadVersionNumberError
578572

579573
intermediate_snapshot.expires()
580574

581575
return intermediate_snapshot
582576

583577
def _verify_targets(
584-
self, temp_obj: TextIO, filename: str, parent_role: str
578+
self, file_content: bytes, filename: str, parent_role: str
585579
) -> TargetsWrapper:
586580
"""
587581
TODO
@@ -590,15 +584,14 @@ def _verify_targets(
590584
# Check against timestamp metadata
591585
if self._metadata["snapshot"].role(filename).get("hash"):
592586
_check_hashes(
593-
temp_obj, self._metadata["snapshot"].targets.get("hash")
587+
file_content, self._metadata["snapshot"].targets.get("hash")
594588
)
595589

596-
intermediate_targets = TargetsWrapper.from_json_object(temp_obj)
590+
intermediate_targets = TargetsWrapper.from_json_object(file_content)
597591
if (
598592
intermediate_targets.version
599593
!= self._metadata["snapshot"].role(filename)["version"]
600594
):
601-
temp_obj.close()
602595
raise exceptions.BadVersionNumberError
603596

604597
# Check for an arbitrary software attack
@@ -612,15 +605,6 @@ def _verify_targets(
612605

613606
return intermediate_targets
614607

615-
@staticmethod
616-
def _verify_target_file(temp_obj: BinaryIO, targetinfo: Dict) -> None:
617-
"""
618-
TODO
619-
"""
620-
621-
_check_file_length(temp_obj, targetinfo["fileinfo"]["length"])
622-
_check_hashes(temp_obj, targetinfo["fileinfo"]["hashes"])
623-
624608
def _preorder_depth_first_walk(self, target_filepath) -> Dict:
625609
"""
626610
TODO
@@ -849,19 +833,24 @@ def _check_file_length(file_object, trusted_file_length):
849833
)
850834

851835

852-
def _check_hashes(file_object, trusted_hashes):
836+
def _check_hashes_obj(file_object, trusted_hashes):
837+
"""
838+
TODO
839+
"""
840+
file_object.seek(0)
841+
return _check_hashes(file_object.read(), trusted_hashes)
842+
843+
844+
def _check_hashes(file_content, trusted_hashes):
853845
"""
854846
TODO
855847
"""
856848
# Verify each trusted hash of 'trusted_hashes'. If all are valid, simply
857849
# return.
858850
for algorithm, trusted_hash in trusted_hashes.items():
859851
digest_object = sslib_hash.digest(algorithm)
860-
# Ensure we read from the beginning of the file object
861-
# TODO: should we store file position (before the loop) and reset
862-
# after we seek about?
863-
file_object.seek(0)
864-
digest_object.update(file_object.read())
852+
853+
digest_object.update(file_content)
865854
computed_hash = digest_object.hexdigest()
866855

867856
# Raise an exception if any of the hashes are incorrect.

0 commit comments

Comments
 (0)