60
60
61
61
import logging
62
62
import os
63
- from typing import Any , Dict , List , Optional , Set , Tuple
63
+ from typing import List , Optional , Set , Tuple
64
64
from urllib import parse
65
65
66
66
from securesystemslib import util as sslib_util
67
67
68
68
from tuf import exceptions
69
- from tuf .api .metadata import Targets
69
+ from tuf .api .metadata import TargetFile , Targets
70
70
from tuf .ngclient ._internal import requests_fetcher , trusted_metadata_set
71
71
from tuf .ngclient .config import UpdaterConfig
72
72
from tuf .ngclient .fetcher import FetcherInterface
@@ -144,8 +144,8 @@ def refresh(self) -> None:
144
144
145
145
def get_one_valid_targetinfo (
146
146
self , target_path : str
147
- ) -> Optional [Dict [ str , Any ] ]:
148
- """Returns target information for 'target_path'.
147
+ ) -> Optional [TargetFile ]:
148
+ """Returns TargetFile instance with information for 'target_path'.
149
149
150
150
The return value can be used as an argument to
151
151
:func:`download_target()` and :func:`updated_targets()`.
@@ -172,14 +172,14 @@ def get_one_valid_targetinfo(
172
172
TODO: download-related errors
173
173
174
174
Returns:
175
- A targetinfo dictionary or None
175
+ A TargetFile instance or None.
176
176
"""
177
177
return self ._preorder_depth_first_walk (target_path )
178
178
179
179
@staticmethod
180
180
def updated_targets (
181
- targets : List [Dict [ str , Any ] ], destination_directory : str
182
- ) -> List [Dict [ str , Any ] ]:
181
+ targets : List [TargetFile ], destination_directory : str
182
+ ) -> List [TargetFile ]:
183
183
"""Checks whether local cached target files are up to date
184
184
185
185
After retrieving the target information for the targets that should be
@@ -202,17 +202,14 @@ def updated_targets(
202
202
# against each hash listed for its fileinfo. Note: join() discards
203
203
# 'destination_directory' if 'filepath' contains a leading path
204
204
# separator (i.e., is treated as an absolute path).
205
- filepath = target ["filepath" ]
206
- target_fileinfo : "TargetFile" = target ["fileinfo" ]
207
-
208
- target_filepath = os .path .join (destination_directory , filepath )
205
+ target_filepath = os .path .join (destination_directory , target .path )
209
206
210
207
if target_filepath in updated_targetpaths :
211
208
continue
212
209
213
210
try :
214
211
with open (target_filepath , "rb" ) as target_file :
215
- target_fileinfo .verify_length_and_hashes (target_file )
212
+ target .verify_length_and_hashes (target_file )
216
213
# If the file does not exist locally or length and hashes
217
214
# do not match, append to updated targets.
218
215
except (OSError , exceptions .LengthOrHashMismatchError ):
@@ -223,15 +220,15 @@ def updated_targets(
223
220
224
221
def download_target (
225
222
self ,
226
- targetinfo : Dict ,
223
+ targetinfo : TargetFile ,
227
224
destination_directory : str ,
228
225
target_base_url : Optional [str ] = None ,
229
226
):
230
227
"""Downloads the target file specified by 'targetinfo'.
231
228
232
229
Args:
233
- targetinfo: data received from get_one_valid_targetinfo() or
234
- updated_targets().
230
+ targetinfo: TargetFile instance received from
231
+ get_one_valid_targetinfo() or updated_targets().
235
232
destination_directory: existing local directory to download into.
236
233
Note that new directories may be created inside
237
234
destination_directory as required.
@@ -252,27 +249,26 @@ def download_target(
252
249
else :
253
250
target_base_url = _ensure_trailing_slash (target_base_url )
254
251
255
- target_fileinfo : "TargetFile" = targetinfo ["fileinfo" ]
256
- target_filepath = targetinfo ["filepath" ]
252
+ target_filepath = targetinfo .path
257
253
consistent_snapshot = self ._trusted_set .root .signed .consistent_snapshot
258
254
if consistent_snapshot and self .config .prefix_targets_with_hash :
259
- hashes = list (target_fileinfo .hashes .values ())
255
+ hashes = list (targetinfo .hashes .values ())
260
256
target_filepath = f"{ hashes [0 ]} .{ target_filepath } "
261
257
full_url = parse .urljoin (target_base_url , target_filepath )
262
258
263
259
with self ._fetcher .download_file (
264
- full_url , target_fileinfo .length
260
+ full_url , targetinfo .length
265
261
) as target_file :
266
262
try :
267
- target_fileinfo .verify_length_and_hashes (target_file )
263
+ targetinfo .verify_length_and_hashes (target_file )
268
264
except exceptions .LengthOrHashMismatchError as e :
269
265
raise exceptions .RepositoryError (
270
266
f"{ target_filepath } length or hashes do not match"
271
267
) from e
272
268
273
269
# Store the target file name without the HASH prefix.
274
270
local_filepath = os .path .join (
275
- destination_directory , targetinfo [ "filepath" ]
271
+ destination_directory , targetinfo . path
276
272
)
277
273
sslib_util .persist_temp_file (target_file , local_filepath )
278
274
@@ -381,7 +377,7 @@ def _load_targets(self, role: str, parent_role: str) -> None:
381
377
382
378
def _preorder_depth_first_walk (
383
379
self , target_filepath : str
384
- ) -> Optional [Dict [ str , Any ] ]:
380
+ ) -> Optional [TargetFile ]:
385
381
"""
386
382
Interrogates the tree of target delegations in order of appearance
387
383
(which implicitly order trustworthiness), and returns the matching
@@ -414,7 +410,8 @@ def _preorder_depth_first_walk(
414
410
415
411
if target is not None :
416
412
logger .debug ("Found target in current role %s" , role_name )
417
- return {"filepath" : target_filepath , "fileinfo" : target }
413
+ target .targetname = target_filepath
414
+ return target
418
415
419
416
# After preorder check, add current role to set of visited roles.
420
417
visited_role_names .add ((role_name , parent_role ))
0 commit comments