@@ -432,11 +432,15 @@ def deploy(self, model_type: str, model_path: str, filename: str = "weights/best
432
432
filename (str, optional): The name of the weights file. Defaults to "weights/best.pt".
433
433
"""
434
434
435
- supported_models = ["yolov5" , "yolov7-seg" , "yolov8" , "yolov9" , "yolonas" ]
435
+ supported_models = ["yolov5" , "yolov7-seg" , "yolov8" , "yolov9" , "yolonas" , "paligemma" ]
436
436
437
437
if not any (supported_model in model_type for supported_model in supported_models ):
438
438
raise (ValueError (f"Model type { model_type } not supported. Supported models are" f" { supported_models } " ))
439
439
440
+ if "paligemma" in model_type :
441
+ self .deploy_paligemma (model_type , model_path , filename )
442
+ return
443
+
440
444
if "yolonas" in model_type :
441
445
self .deploy_yolonas (model_type , model_path , filename )
442
446
return
@@ -548,6 +552,57 @@ def deploy(self, model_type: str, model_path: str, filename: str = "weights/best
548
552
549
553
self .upload_zip (model_type , model_path )
550
554
555
+ def deploy_paligemma (
556
+ self , model_type : str , model_path : str , filename : str = "fine-tuned-paligemma-3b-pt-224.f16.npz"
557
+ ) -> None :
558
+ # Check if model_path exists
559
+ if not os .path .exists (model_path ):
560
+ raise FileNotFoundError (f"Model path { model_path } does not exist." )
561
+ model_files = os .listdir (model_path )
562
+ print (f"Model files found in { model_path } : { model_files } " )
563
+
564
+ files_to_deploy = []
565
+
566
+ # Find first .npz file in model_path
567
+ npz_filename = next ((file for file in model_files if file .endswith (".npz" )), None )
568
+ if any ([file .endswith (".safetensors" ) for file in model_files ]):
569
+ print ("Found .safetensors file in model path. Deploying PyTorch PaliGemma model." )
570
+ necessary_files = [
571
+ "config.json" ,
572
+ "generation_config.json" ,
573
+ "model.safetensors.index.json" ,
574
+ "preprocessor_config.json" ,
575
+ "special_tokens_map.json" ,
576
+ "tokenizer_config.json" ,
577
+ "tokenizer.json" ,
578
+ ]
579
+ for file in necessary_files :
580
+ if file not in model_files :
581
+ print ("Missing necessary file" , file )
582
+ res = input ("Do you want to continue? (y/n)" )
583
+ if res .lower () != "y" :
584
+ exit (1 )
585
+ for file in model_files :
586
+ files_to_deploy .append (file )
587
+ elif npz_filename is not None :
588
+ print (f"Found .npz file { npz_filename } in model path. Deploying JAX PaliGemma model." )
589
+ files_to_deploy .append (npz_filename )
590
+ else :
591
+ raise FileNotFoundError (f"No .npz or .safetensors file found in model path { model_path } ." )
592
+
593
+ if len (files_to_deploy ) == 0 :
594
+ raise FileNotFoundError (f"No valid files found in model path { model_path } ." )
595
+ print (f"Zipping files for deploy: { files_to_deploy } " )
596
+
597
+ import tarfile
598
+
599
+ with tarfile .open (os .path .join (model_path , "roboflow_deploy.tar" ), "w" ) as tar :
600
+ for file in files_to_deploy :
601
+ tar .add (os .path .join (model_path , file ), arcname = file )
602
+
603
+ print ("Uploading to Roboflow... May take several minutes." )
604
+ self .upload_zip (model_type , model_path , "roboflow_deploy.tar" )
605
+
551
606
def deploy_yolonas (self , model_type : str , model_path : str , filename : str = "weights/best.pt" ) -> None :
552
607
try :
553
608
import torch
@@ -613,7 +668,7 @@ def deploy_yolonas(self, model_type: str, model_path: str, filename: str = "weig
613
668
614
669
self .upload_zip (model_type , model_path )
615
670
616
- def upload_zip (self , model_type : str , model_path : str ):
671
+ def upload_zip (self , model_type : str , model_path : str , model_file_name : str = "roboflow_deploy.zip" ):
617
672
res = requests .get (
618
673
f"{ API_URL } /{ self .workspace } /{ self .project } /{ self .version } "
619
674
f"/uploadModel?api_key={ self .__api_key } &modelType={ model_type } &nocache=true"
@@ -632,7 +687,7 @@ def upload_zip(self, model_type: str, model_path: str):
632
687
633
688
res = requests .put (
634
689
res .json ()["url" ],
635
- data = open (os .path .join (model_path , "roboflow_deploy.zip" ), "rb" ),
690
+ data = open (os .path .join (model_path , model_file_name ), "rb" ),
636
691
)
637
692
try :
638
693
res .raise_for_status ()
0 commit comments