Skip to content

Commit a253fe7

Browse files
Add support for paligemma deploy (#257)
* Add support for paligemma deploy * fix(pre_commit): 🎨 auto format pre-commit hooks * make style * Add more logs for paligemma deploy --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 9c716fb commit a253fe7

File tree

2 files changed

+59
-4
lines changed

2 files changed

+59
-4
lines changed

roboflow/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from roboflow.models import CLIPModel, GazeModel # noqa: F401
1515
from roboflow.util.general import write_line
1616

17-
__version__ = "1.1.29"
17+
__version__ = "1.1.30"
1818

1919

2020
def check_key(api_key, model, notebook, num_retries=0):

roboflow/core/version.py

+58-3
Original file line numberDiff line numberDiff line change
@@ -432,11 +432,15 @@ def deploy(self, model_type: str, model_path: str, filename: str = "weights/best
432432
filename (str, optional): The name of the weights file. Defaults to "weights/best.pt".
433433
"""
434434

435-
supported_models = ["yolov5", "yolov7-seg", "yolov8", "yolov9", "yolonas"]
435+
supported_models = ["yolov5", "yolov7-seg", "yolov8", "yolov9", "yolonas", "paligemma"]
436436

437437
if not any(supported_model in model_type for supported_model in supported_models):
438438
raise (ValueError(f"Model type {model_type} not supported. Supported models are" f" {supported_models}"))
439439

440+
if "paligemma" in model_type:
441+
self.deploy_paligemma(model_type, model_path, filename)
442+
return
443+
440444
if "yolonas" in model_type:
441445
self.deploy_yolonas(model_type, model_path, filename)
442446
return
@@ -548,6 +552,57 @@ def deploy(self, model_type: str, model_path: str, filename: str = "weights/best
548552

549553
self.upload_zip(model_type, model_path)
550554

555+
def deploy_paligemma(
556+
self, model_type: str, model_path: str, filename: str = "fine-tuned-paligemma-3b-pt-224.f16.npz"
557+
) -> None:
558+
# Check if model_path exists
559+
if not os.path.exists(model_path):
560+
raise FileNotFoundError(f"Model path {model_path} does not exist.")
561+
model_files = os.listdir(model_path)
562+
print(f"Model files found in {model_path}: {model_files}")
563+
564+
files_to_deploy = []
565+
566+
# Find first .npz file in model_path
567+
npz_filename = next((file for file in model_files if file.endswith(".npz")), None)
568+
if any([file.endswith(".safetensors") for file in model_files]):
569+
print("Found .safetensors file in model path. Deploying PyTorch PaliGemma model.")
570+
necessary_files = [
571+
"config.json",
572+
"generation_config.json",
573+
"model.safetensors.index.json",
574+
"preprocessor_config.json",
575+
"special_tokens_map.json",
576+
"tokenizer_config.json",
577+
"tokenizer.json",
578+
]
579+
for file in necessary_files:
580+
if file not in model_files:
581+
print("Missing necessary file", file)
582+
res = input("Do you want to continue? (y/n)")
583+
if res.lower() != "y":
584+
exit(1)
585+
for file in model_files:
586+
files_to_deploy.append(file)
587+
elif npz_filename is not None:
588+
print(f"Found .npz file {npz_filename} in model path. Deploying JAX PaliGemma model.")
589+
files_to_deploy.append(npz_filename)
590+
else:
591+
raise FileNotFoundError(f"No .npz or .safetensors file found in model path {model_path}.")
592+
593+
if len(files_to_deploy) == 0:
594+
raise FileNotFoundError(f"No valid files found in model path {model_path}.")
595+
print(f"Zipping files for deploy: {files_to_deploy}")
596+
597+
import tarfile
598+
599+
with tarfile.open(os.path.join(model_path, "roboflow_deploy.tar"), "w") as tar:
600+
for file in files_to_deploy:
601+
tar.add(os.path.join(model_path, file), arcname=file)
602+
603+
print("Uploading to Roboflow... May take several minutes.")
604+
self.upload_zip(model_type, model_path, "roboflow_deploy.tar")
605+
551606
def deploy_yolonas(self, model_type: str, model_path: str, filename: str = "weights/best.pt") -> None:
552607
try:
553608
import torch
@@ -613,7 +668,7 @@ def deploy_yolonas(self, model_type: str, model_path: str, filename: str = "weig
613668

614669
self.upload_zip(model_type, model_path)
615670

616-
def upload_zip(self, model_type: str, model_path: str):
671+
def upload_zip(self, model_type: str, model_path: str, model_file_name: str = "roboflow_deploy.zip"):
617672
res = requests.get(
618673
f"{API_URL}/{self.workspace}/{self.project}/{self.version}"
619674
f"/uploadModel?api_key={self.__api_key}&modelType={model_type}&nocache=true"
@@ -632,7 +687,7 @@ def upload_zip(self, model_type: str, model_path: str):
632687

633688
res = requests.put(
634689
res.json()["url"],
635-
data=open(os.path.join(model_path, "roboflow_deploy.zip"), "rb"),
690+
data=open(os.path.join(model_path, model_file_name), "rb"),
636691
)
637692
try:
638693
res.raise_for_status()

0 commit comments

Comments
 (0)