Skip to content

Commit

Permalink
initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
r00tmebaby committed Jun 30, 2024
1 parent 76aef8c commit 1348aa7
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 46 deletions.
4 changes: 1 addition & 3 deletions field_notices_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,7 @@
By.XPATH, '//div[@data-config-metrics-title="Products by Category"]'
)
links = section.find_elements(By.TAG_NAME, "a")
product_list = [
{"product": i.text, "url": i.get_attribute("href")} for i in links
]
product_list = [{"product": i.text, "url": i.get_attribute("href")} for i in links]
# Initialize a dictionary to hold the products by category
products_by_category = {}

Expand Down
40 changes: 12 additions & 28 deletions jobs/get_features.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,7 @@ class Config:
FETCH_FEATURES_ONLINE = True
CONCURRENT_REQUESTS_LIMIT = 5
REQUEST_DELAY = 1
FEATURES_DIR: Path = Path(
os.path.join(os.getcwd(), "data", "product_features")
)
FEATURES_DIR: Path = Path(os.path.join(os.getcwd(), "data", "product_features"))
FEATURES_DIR.mkdir(exist_ok=True, parents=True)
TYPES = [
"Switches",
Expand Down Expand Up @@ -109,9 +107,7 @@ async def _fetch_releases(
"""
platform_id = each_platform.get("platform_id")
self.logger.info(f"Fetching releases for {platform_id}")
request = RequestModel(
platform_id=platform_id, mdf_product_type=each_type
)
request = RequestModel(platform_id=platform_id, mdf_product_type=each_type)
response = await client.post(
self.config.REQUEST_2,
headers=self.config.HEADERS,
Expand Down Expand Up @@ -166,7 +162,9 @@ async def _fetch_features(
feature["platform_id"] = each_release["platform_id"]
feature["release_id"] = each_release["release_id"]

file_name = f"{each_release['platform_id']}_{each_release['release_id']}.json"
file_name = (
f"{each_release['platform_id']}_{each_release['release_id']}.json"
)
file_path = self.config.FEATURES_DIR / file_name

async with aiofiles.open(file_path, "w") as file:
Expand All @@ -179,9 +177,7 @@ async def _fetch_features(
f"Failed to fetch features for platform {each_release['platform_id']} and release {each_release['release_id']}, status code: {response.status_code}"
)

async def _read_file(
self, filename: str
) -> Dict[str, List[Dict[str, Any]]]:
async def _read_file(self, filename: str) -> Dict[str, List[Dict[str, Any]]]:
"""
Read data from a local JSON file.
:param filename: The name of the file to read.
Expand All @@ -203,9 +199,7 @@ async def _fetch_all_features(
:param tar: The tar file to add the features data.
"""
async with httpx.AsyncClient(timeout=900) as client:
semaphore = asyncio.Semaphore(
self.config.CONCURRENT_REQUESTS_LIMIT
)
semaphore = asyncio.Semaphore(self.config.CONCURRENT_REQUESTS_LIMIT)

async def fetch_features_with_semaphore(
each_release: Dict[str, Any], mdf_product_type: str
Expand All @@ -219,9 +213,7 @@ async def fetch_features_with_semaphore(
for mdf_product_type, releases_list in releases.items():
for each_release in releases_list:
feature_tasks.append(
fetch_features_with_semaphore(
each_release, mdf_product_type
)
fetch_features_with_semaphore(each_release, mdf_product_type)
)
await asyncio.gather(*feature_tasks)
self.logger.info("Fetched all features data")
Expand All @@ -244,9 +236,7 @@ async def _fetch_platforms_data(self) -> Dict[str, Any]:
return await self._fetch_online_platforms()
return await self._read_file("platforms")

async def _fetch_releases_data(
self, platforms: Dict[str, Any]
) -> Dict[str, Any]:
async def _fetch_releases_data(self, platforms: Dict[str, Any]) -> Dict[str, Any]:
"""
Fetch or read releases data.
:param platforms: A dictionary containing platforms data.
Expand Down Expand Up @@ -278,15 +268,11 @@ async def _fetch_online_platforms(self) -> Dict[str, Any]:
platforms_results = await asyncio.gather(*platform_tasks)
platforms = {
each_type: data
for each_type, data in zip(
self.config.TYPES, platforms_results
)
for each_type, data in zip(self.config.TYPES, platforms_results)
}
return platforms

async def _fetch_online_releases(
self, platforms: Dict[str, Any]
) -> Dict[str, Any]:
async def _fetch_online_releases(self, platforms: Dict[str, Any]) -> Dict[str, Any]:
"""
Fetch releases data from the online API for the given platforms.
:param platforms: A dictionary containing platforms data.
Expand All @@ -302,9 +288,7 @@ async def _fetch_online_releases(
]
releases_results = await asyncio.gather(*release_tasks)
releases[each_type] = [
release
for sublist in releases_results
for release in sublist
release for sublist in releases_results for release in sublist
]
self.logger.info(
f"Retrieved {len(releases[each_type])} releases for {each_type}"
Expand Down
4 changes: 1 addition & 3 deletions routers/features.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,7 @@ def get_features(
file_name = f"{platform_id}_{release_id}.json"

if not os.path.exists(tar_path):
raise HTTPException(
status_code=404, detail="Feature archive not found."
)
raise HTTPException(status_code=404, detail="Feature archive not found.")

features = extract_feature(tar_path, file_name)
return features
16 changes: 4 additions & 12 deletions utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@
class PaginationParams:
def __init__(
self,
page: int = Query(
1, ge=1, description="The page number for pagination"
),
page: int = Query(1, ge=1, description="The page number for pagination"),
limit: int = Query(
20,
ge=1,
Expand All @@ -33,9 +31,7 @@ def offset(self):

def paginate(func: Callable[..., Union[List[Dict[str, Any]], Dict[str, Any]]]):
@wraps(func)
async def async_wrapper(
*args, pagination: PaginationParams = Depends(), **kwargs
):
async def async_wrapper(*args, pagination: PaginationParams = Depends(), **kwargs):
limit = pagination.limit
offset = pagination.offset

Expand All @@ -48,13 +44,9 @@ async def async_wrapper(
total_items = len(results)
paginated_results = results[offset : offset + limit]
else:
raise HTTPException(
status_code=500, detail="Results should be a list."
)
raise HTTPException(status_code=500, detail="Results should be a list.")

total_pages = (
total_items + limit - 1
) // limit # Calculate total pages
total_pages = (total_items + limit - 1) // limit # Calculate total pages
current_page = pagination.page
has_more = offset + limit < total_items

Expand Down

0 comments on commit 1348aa7

Please sign in to comment.