Skip to content

Commit

Permalink
Add tests
Browse files Browse the repository at this point in the history
Signed-off-by: Rafael Vasquez <[email protected]>
  • Loading branch information
rafvasq committed Mar 12, 2024
1 parent b9f837a commit ad93a4e
Show file tree
Hide file tree
Showing 6 changed files with 198 additions and 2 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/fvt-base.yml
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ jobs:
docker pull openvino/model_server:2022.2
# docker pull pytorch/torchserve:0.7.1-cpu
docker pull kserve/modelmesh:latest
docker pull kserve/modelmesh-minio-dev-examples:latest
docker pull rafvasq/modelmesh-minio-dev-examples:latest
docker pull kserve/modelmesh-minio-examples:latest
docker pull kserve/modelmesh-runtime-adapter:latest
docker pull kserve/rest-proxy:latest
Expand Down
2 changes: 1 addition & 1 deletion config/dependencies/fvt.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ spec:
value: AKIAIOSFODNN7EXAMPLE
- name: MINIO_SECRET_KEY
value: wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
image: kserve/modelmesh-minio-dev-examples:latest
image: rafvasq/modelmesh-minio-dev-examples:latest
name: minio
---
apiVersion: v1
Expand Down
40 changes: 40 additions & 0 deletions fvt/inference.go
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,26 @@ func ExpectSuccessfulInference_lightgbmMushroom(predictorName string) {
Expect(math.Round(float64(inferResponse.Outputs[0].Contents.Fp64Contents[0])*10) / 10).To(BeEquivalentTo(0.0))
}

// LightGBM Mushroom via Triton
// COS path: fvt/lightgbm/mushroom-fil
func ExpectSuccessfulInference_lightgbmFILMushroom(predictorName string) {
// build the grpc inference call
inferInput := &inference.ModelInferRequest_InferInputTensor{
Name: "input__0",
Shape: []int64{1, 126},
Datatype: "FP32",
Contents: &inference.InferTensorContents{Fp32Contents: mushroomInputData},
}
inferRequest := &inference.ModelInferRequest{
ModelName: predictorName,
Inputs: []*inference.ModelInferRequest_InferInputTensor{inferInput},
}

inferResponse, err := FVTClientInstance.RunKfsInference(inferRequest)
Expect(err).ToNot(HaveOccurred())
Expect(inferResponse).ToNot(BeNil())
}

// XGBoost Mushroom
// COS path: fvt/xgboost/mushroom
func ExpectSuccessfulInference_xgboostMushroom(predictorName string) {
Expand All @@ -324,6 +344,26 @@ func ExpectSuccessfulInference_xgboostMushroom(predictorName string) {
Expect(math.Round(float64(inferResponse.Outputs[0].Contents.Fp32Contents[0])*10) / 10).To(BeEquivalentTo(0.0))
}

// XGBoost Mushroom via Triton
// COS path: fvt/xgboost/mushroom-fil
func ExpectSuccessfulInference_xgboostFILMushroom(predictorName string) {
// build the grpc inference call
inferInput := &inference.ModelInferRequest_InferInputTensor{
Name: "input__0",
Shape: []int64{1, 126},
Datatype: "FP32",
Contents: &inference.InferTensorContents{Fp32Contents: mushroomInputData},
}
inferRequest := &inference.ModelInferRequest{
ModelName: predictorName,
Inputs: []*inference.ModelInferRequest_InferInputTensor{inferInput},
}

inferResponse, err := FVTClientInstance.RunKfsInference(inferRequest)
Expect(err).ToNot(HaveOccurred())
Expect(inferResponse).ToNot(BeNil())
}

// Helpers

var mushroomInputData []float32 = []float32{1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0}
Expand Down
104 changes: 104 additions & 0 deletions fvt/predictor/predictor_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,22 @@ var predictorsArray = []FVTPredictor{
differentPredictorName: "xgboost",
differentPredictorFilename: "xgboost-predictor.yaml",
},
{
predictorName: "xgboost-fil",
predictorFilename: "xgboost-fil-predictor.yaml",
currentModelPath: "fvt/xgboost/mushroom-fil",
updatedModelPath: "fvt/xgboost/mushroom-fil-dup",
differentPredictorName: "onnx",
differentPredictorFilename: "onnx-predictor.yaml",
},
{
predictorName: "lightgbm-fil",
predictorFilename: "lightgbm-fil-predictor.yaml",
currentModelPath: "fvt/lightgbm/mushroom-fil",
updatedModelPath: "fvt/lightgbm/mushroom-fil-dup",
differentPredictorName: "onnx",
differentPredictorFilename: "onnx-predictor.yaml",
},
// TorchServe test is currently disabled
// {
// predictorName: "pytorch-mar",
Expand Down Expand Up @@ -731,6 +747,50 @@ var _ = Describe("Predictor", func() {
})
})

var _ = Describe("XGBoost FIL inference", Ordered, func() {
var xgboostPredictorObject *unstructured.Unstructured
var xgboostPredictorName string

BeforeAll(func() {
// load the test predictor object
xgboostPredictorObject = NewPredictorForFVT("xgboost-fil-predictor.yaml")
xgboostPredictorName = xgboostPredictorObject.GetName()

CreatePredictorAndWaitAndExpectLoaded(xgboostPredictorObject)

err := FVTClientInstance.ConnectToModelServing(Insecure)
Expect(err).ToNot(HaveOccurred())
})

AfterAll(func() {
FVTClientInstance.DeletePredictor(xgboostPredictorName)
})

It("should successfully run an inference", func() {
ExpectSuccessfulInference_xgboostFILMushroom(xgboostPredictorName)
})

It("should fail with invalid shape", func() {
// build the grpc inference call
inferInput := &inference.ModelInferRequest_InferInputTensor{
Name: "input__0",
Shape: []int64{1, 28777},
Datatype: "FP32",
Contents: &inference.InferTensorContents{Fp32Contents: []float32{}},
}
inferRequest := &inference.ModelInferRequest{
ModelName: xgboostPredictorName,
Inputs: []*inference.ModelInferRequest_InferInputTensor{inferInput},
}

inferResponse, err := FVTClientInstance.RunKfsInference(inferRequest)

Expect(inferResponse).To(BeNil())
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("unexpected shape for input 'input__0'"))
})
})

var _ = Describe("Pytorch inference", Ordered, func() {
var ptPredictorObject *unstructured.Unstructured
var ptPredictorName string
Expand Down Expand Up @@ -888,6 +948,50 @@ var _ = Describe("Predictor", func() {
Expect(err.Error()).To(ContainSubstring("INTERNAL: builtins.ValueError: cannot reshape array"))
})
})

var _ = Describe("LightGBM FIL inference", Ordered, func() {
var lightGBMPredictorObject *unstructured.Unstructured
var lightGBMPredictorName string

BeforeAll(func() {
// load the test predictor object
lightGBMPredictorObject = NewPredictorForFVT("lightgbm-fil-predictor.yaml")
lightGBMPredictorName = lightGBMPredictorObject.GetName()

CreatePredictorAndWaitAndExpectLoaded(lightGBMPredictorObject)

err := FVTClientInstance.ConnectToModelServing(Insecure)
Expect(err).ToNot(HaveOccurred())
})

AfterAll(func() {
FVTClientInstance.DeletePredictor(lightGBMPredictorName)
})

It("should successfully run an inference", func() {
ExpectSuccessfulInference_lightgbmFILMushroom(lightGBMPredictorName)
})

It("should fail with invalid shape input", func() {
// build the grpc inference call
inferInput := &inference.ModelInferRequest_InferInputTensor{
Name: "input__0",
Shape: []int64{1, 28777},
Datatype: "FP32",
Contents: &inference.InferTensorContents{Fp32Contents: []float32{}},
}
inferRequest := &inference.ModelInferRequest{
ModelName: lightGBMPredictorName,
Inputs: []*inference.ModelInferRequest_InferInputTensor{inferInput},
}

inferResponse, err := FVTClientInstance.RunKfsInference(inferRequest)

Expect(inferResponse).To(BeNil())
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("unexpected shape for input 'input__0'"))
})
})
})

// These tests verify that an invalid Predictor fails to load. These are in a
Expand Down
26 changes: 26 additions & 0 deletions fvt/testdata/predictors/lightgbm-fil-predictor.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Copyright 2021 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
apiVersion: serving.kserve.io/v1alpha1
kind: Predictor
metadata:
name: lightgbm-fil-predictor
spec:
modelType:
name: lightgbm
runtime:
name: triton-2.x
path: fvt/lightgbm/mushroom-fil
storage:
s3:
secretKey: localMinIO
26 changes: 26 additions & 0 deletions fvt/testdata/predictors/xgboost-fil-predictor.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Copyright 2021 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
apiVersion: serving.kserve.io/v1alpha1
kind: Predictor
metadata:
name: xgboost-fil-mushroom
spec:
modelType:
name: xgboost
runtime:
name: triton-2.x
path: fvt/xgboost/mushroom-fil
storage:
s3:
secretKey: localMinIO

0 comments on commit ad93a4e

Please sign in to comment.