Skip to content

fix: tgi image uri unit tests #5127

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Apr 15, 2025
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions tests/unit/sagemaker/image_uris/test_huggingface_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from __future__ import absolute_import

import pytest
from packaging.version import parse

from sagemaker.huggingface import get_huggingface_llm_image_uri
from tests.unit.sagemaker.image_uris import expected_uris, conftest
Expand Down Expand Up @@ -72,10 +73,31 @@ def test_huggingface_uris(load_config):
VERSIONS = load_config["inference"]["versions"]
device = load_config["inference"]["processors"][0]
backend = "huggingface-neuronx" if device == "inf2" else "huggingface"

# Fail if device is not in mapping
if device not in HF_VERSIONS_MAPPING:
raise ValueError(f"Device {device} not found in HF_VERSIONS_MAPPING")

# Get highest version for the device
highest_version = max(HF_VERSIONS_MAPPING[device].keys(), key=lambda x: parse(x))

for version in VERSIONS:
ACCOUNTS = load_config["inference"]["versions"][version]["registries"]
for region in ACCOUNTS.keys():
uri = get_huggingface_llm_image_uri(backend, region=region, version=version)

# Skip only if test version is higher than highest known version.
# There's now automation to add new TGI releases to image_uri_config directory
# that doesn't involve a human raising a PR.
if parse(version) > parse(highest_version):
print(
f"Skipping version check for {version} as there is "
"automation that now updates the image_uri_config "
"without a human raising a PR. Tests will pass for "
f"versions higher than {highest_version} that are not in HF_VERSIONS_MAPPING."
)
continue

expected = expected_uris.huggingface_llm_framework_uri(
"huggingface-pytorch-tgi-inference",
ACCOUNTS[region],
Expand Down