From 1348aa7bf09d981f757bcaf5467a9d0691453716 Mon Sep 17 00:00:00 2001 From: zgeor Date: Sun, 30 Jun 2024 17:53:25 +0100 Subject: [PATCH] initial commit --- field_notices_tests.py | 4 +--- jobs/get_features.py | 40 ++++++++++++---------------------------- routers/features.py | 4 +--- utils.py | 16 ++++------------ 4 files changed, 18 insertions(+), 46 deletions(-) diff --git a/field_notices_tests.py b/field_notices_tests.py index 4c8088d..9e164cf 100644 --- a/field_notices_tests.py +++ b/field_notices_tests.py @@ -28,9 +28,7 @@ By.XPATH, '//div[@data-config-metrics-title="Products by Category"]' ) links = section.find_elements(By.TAG_NAME, "a") -product_list = [ - {"product": i.text, "url": i.get_attribute("href")} for i in links -] +product_list = [{"product": i.text, "url": i.get_attribute("href")} for i in links] # Initialize a dictionary to hold the products by category products_by_category = {} diff --git a/jobs/get_features.py b/jobs/get_features.py index 39e4236..3f2d8ad 100644 --- a/jobs/get_features.py +++ b/jobs/get_features.py @@ -26,9 +26,7 @@ class Config: FETCH_FEATURES_ONLINE = True CONCURRENT_REQUESTS_LIMIT = 5 REQUEST_DELAY = 1 - FEATURES_DIR: Path = Path( - os.path.join(os.getcwd(), "data", "product_features") - ) + FEATURES_DIR: Path = Path(os.path.join(os.getcwd(), "data", "product_features")) FEATURES_DIR.mkdir(exist_ok=True, parents=True) TYPES = [ "Switches", @@ -109,9 +107,7 @@ async def _fetch_releases( """ platform_id = each_platform.get("platform_id") self.logger.info(f"Fetching releases for {platform_id}") - request = RequestModel( - platform_id=platform_id, mdf_product_type=each_type - ) + request = RequestModel(platform_id=platform_id, mdf_product_type=each_type) response = await client.post( self.config.REQUEST_2, headers=self.config.HEADERS, @@ -166,7 +162,9 @@ async def _fetch_features( feature["platform_id"] = each_release["platform_id"] feature["release_id"] = each_release["release_id"] - file_name = f"{each_release['platform_id']}_{each_release['release_id']}.json" + file_name = ( + f"{each_release['platform_id']}_{each_release['release_id']}.json" + ) file_path = self.config.FEATURES_DIR / file_name async with aiofiles.open(file_path, "w") as file: @@ -179,9 +177,7 @@ async def _fetch_features( f"Failed to fetch features for platform {each_release['platform_id']} and release {each_release['release_id']}, status code: {response.status_code}" ) - async def _read_file( - self, filename: str - ) -> Dict[str, List[Dict[str, Any]]]: + async def _read_file(self, filename: str) -> Dict[str, List[Dict[str, Any]]]: """ Read data from a local JSON file. :param filename: The name of the file to read. @@ -203,9 +199,7 @@ async def _fetch_all_features( :param tar: The tar file to add the features data. """ async with httpx.AsyncClient(timeout=900) as client: - semaphore = asyncio.Semaphore( - self.config.CONCURRENT_REQUESTS_LIMIT - ) + semaphore = asyncio.Semaphore(self.config.CONCURRENT_REQUESTS_LIMIT) async def fetch_features_with_semaphore( each_release: Dict[str, Any], mdf_product_type: str @@ -219,9 +213,7 @@ async def fetch_features_with_semaphore( for mdf_product_type, releases_list in releases.items(): for each_release in releases_list: feature_tasks.append( - fetch_features_with_semaphore( - each_release, mdf_product_type - ) + fetch_features_with_semaphore(each_release, mdf_product_type) ) await asyncio.gather(*feature_tasks) self.logger.info("Fetched all features data") @@ -244,9 +236,7 @@ async def _fetch_platforms_data(self) -> Dict[str, Any]: return await self._fetch_online_platforms() return await self._read_file("platforms") - async def _fetch_releases_data( - self, platforms: Dict[str, Any] - ) -> Dict[str, Any]: + async def _fetch_releases_data(self, platforms: Dict[str, Any]) -> Dict[str, Any]: """ Fetch or read releases data. :param platforms: A dictionary containing platforms data. @@ -278,15 +268,11 @@ async def _fetch_online_platforms(self) -> Dict[str, Any]: platforms_results = await asyncio.gather(*platform_tasks) platforms = { each_type: data - for each_type, data in zip( - self.config.TYPES, platforms_results - ) + for each_type, data in zip(self.config.TYPES, platforms_results) } return platforms - async def _fetch_online_releases( - self, platforms: Dict[str, Any] - ) -> Dict[str, Any]: + async def _fetch_online_releases(self, platforms: Dict[str, Any]) -> Dict[str, Any]: """ Fetch releases data from the online API for the given platforms. :param platforms: A dictionary containing platforms data. @@ -302,9 +288,7 @@ async def _fetch_online_releases( ] releases_results = await asyncio.gather(*release_tasks) releases[each_type] = [ - release - for sublist in releases_results - for release in sublist + release for sublist in releases_results for release in sublist ] self.logger.info( f"Retrieved {len(releases[each_type])} releases for {each_type}" diff --git a/routers/features.py b/routers/features.py index 8bd6f72..120b85e 100644 --- a/routers/features.py +++ b/routers/features.py @@ -72,9 +72,7 @@ def get_features( file_name = f"{platform_id}_{release_id}.json" if not os.path.exists(tar_path): - raise HTTPException( - status_code=404, detail="Feature archive not found." - ) + raise HTTPException(status_code=404, detail="Feature archive not found.") features = extract_feature(tar_path, file_name) return features diff --git a/utils.py b/utils.py index 3833777..f07b06f 100644 --- a/utils.py +++ b/utils.py @@ -13,9 +13,7 @@ class PaginationParams: def __init__( self, - page: int = Query( - 1, ge=1, description="The page number for pagination" - ), + page: int = Query(1, ge=1, description="The page number for pagination"), limit: int = Query( 20, ge=1, @@ -33,9 +31,7 @@ def offset(self): def paginate(func: Callable[..., Union[List[Dict[str, Any]], Dict[str, Any]]]): @wraps(func) - async def async_wrapper( - *args, pagination: PaginationParams = Depends(), **kwargs - ): + async def async_wrapper(*args, pagination: PaginationParams = Depends(), **kwargs): limit = pagination.limit offset = pagination.offset @@ -48,13 +44,9 @@ async def async_wrapper( total_items = len(results) paginated_results = results[offset : offset + limit] else: - raise HTTPException( - status_code=500, detail="Results should be a list." - ) + raise HTTPException(status_code=500, detail="Results should be a list.") - total_pages = ( - total_items + limit - 1 - ) // limit # Calculate total pages + total_pages = (total_items + limit - 1) // limit # Calculate total pages current_page = pagination.page has_more = offset + limit < total_items