Skip to content

Commit

Permalink
[release-1.16] fix ML example (#6184)
Browse files Browse the repository at this point in the history
* fix ML example

* fix dockerize

* type variable df

* fix code

---------

Co-authored-by: Yasuhiro Matsumoto <[email protected]>
  • Loading branch information
knative-prow-robot and mattn authored Dec 29, 2024
1 parent 9de70f0 commit 4e53037
Show file tree
Hide file tree
Showing 3 changed files with 121 additions and 69 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -39,15 +39,18 @@ as API endpoint with Knative Serving.
API, which is the entry point for accessing this machine learning service.

```python
from bentoml import env, artifacts, api, BentoService
from bentoml.handlers import DataframeHandler
from bentoml.artifact import SklearnModelArtifact
import bentoml
import joblib
@env(auto_pip_dependencies=True)
@artifacts([SklearnModelArtifact('model')])
class IrisClassifier(BentoService):
@api(DataframeHandler)
@bentoml.service
class IrisClassifier:
iris_model = bentoml.models.get("iris_classifier:latest")
def __init__(self):
self.model = joblib.load(self.iris_model.path_of("model.pkl"))
@bentoml.api
def predict(self, df):
return self.artifacts.model.predict(df)
```
Expand All @@ -58,10 +61,11 @@ as API endpoint with Knative Serving.
given data and then save the model with BentoML to local disk.

```python
import joblib
from sklearn import svm
from sklearn import datasets
from iris_classifier import IrisClassifier
import bentoml
if __name__ == "__main__":
# Load training data
Expand All @@ -72,14 +76,9 @@ as API endpoint with Knative Serving.
clf = svm.SVC(gamma='scale')
clf.fit(X, y)
# Create a iris classifier service instance
iris_classifier_service = IrisClassifier()
# Pack the newly trained model artifact
iris_classifier_service.pack('model', clf)
# Save the prediction service to disk for model serving
saved_path = iris_classifier_service.save()
with bentoml.models.create("iris_classifier") as bento_model:
joblib.dump(clf, bento_model.path_of("model.pkl"))
print(f"Model saved: {bento_model}")
```
1. Run the `main.py` file to train and save the model:
Expand All @@ -91,48 +90,97 @@ as API endpoint with Knative Serving.
1. Use BentoML CLI to check saved model's information.
```bash
bentoml get IrisClassifier:latest
bentoml get iris_classifier:latest
```
Example:
```bash
> bentoml get IrisClassifier:latest
> bentoml get iris_classifier:latest -o json
{
"name": "IrisClassifier",
"version": "20200305171229_0A1411",
"uri": {
"type": "LOCAL",
"uri": "/Users/bozhaoyu/bentoml/repository/IrisClassifier/20200305171229_0A1411"
"service": "iris_classifier:IrisClassifier",
"name": "iris_classifier",
"version": "ar67rxqxqcrqi7ol",
"bentoml_version": "1.2.16",
"creation_time": "2024-05-21T14:40:20.737900+00:00",
"labels": {
"owner": "bentoml-team",
"project": "gallery"
},
"bentoServiceMetadata": {
"models": [],
"runners": [],
"entry_service": "IrisClassifier",
"services": [
{
"name": "IrisClassifier",
"service": "",
"models": [
{
"tag": "iris_sklearn:ml5evdaxpwrqi7ol",
"module": "",
"creation_time": "2024-05-21T14:21:17.070059+00:00"
}
],
"dependencies": [],
"config": {}
}
],
"envs": [],
"schema": {
"name": "IrisClassifier",
"version": "20200305171229_0A1411",
"createdAt": "2020-03-06T01:12:49.431011Z",
"env": {
"condaEnv": "name: bentoml-IrisClassifier\nchannels:\n- defaults\ndependencies:\n- python=3.7.3\n- pip\n",
"pipDependencies": "bentoml==0.6.2\nscikit-learn",
"pythonVersion": "3.7.3"
},
"artifacts": [
{
"name": "model",
"artifactType": "SklearnModelArtifact"
}
],
"apis": [
"type": "service",
"routes": [
{
"name": "predict",
"handlerType": "DataframeHandler",
"docs": "BentoService API",
"handlerConfig": {
"orient": "records",
"typ": "frame",
"input_dtypes": null,
"output_orient": "records"
"route": "/predict",
"batchable": false,
"input": {
"properties": {
"df": {
"title": "Df"
}
},
"required": [
"df"
],
"title": "Input",
"type": "object"
},
"output": {
"title": "AnyIODescriptor"
}
}
]
},
"apis": [],
"docker": {
"distro": "debian",
"python_version": "3.11",
"cuda_version": null,
"env": null,
"system_packages": null,
"setup_script": null,
"base_image": null,
"dockerfile_template": null
},
"python": {
"requirements_txt": "./requirements.txt",
"packages": null,
"lock_packages": true,
"pack_git_packages": true,
"index_url": null,
"no_index": null,
"trusted_host": null,
"find_links": null,
"extra_index_url": null,
"pip_args": null,
"wheels": null
},
"conda": {
"environment_yml": null,
"channels": null,
"dependencies": null,
"pip": null
}
}
```
Expand All @@ -141,7 +189,7 @@ as API endpoint with Knative Serving.
BentoML CLI command to start an API server locally and test it with the `curl` command.
```bash
bentoml serve IrisClassifier:latest
bentoml serve iris_classifier:latest
```
In another terminal window, make `curl` request with sample data to the API server
Expand All @@ -164,12 +212,8 @@ a Dockerfile is automatically generated when saving the model.
username and run the following commands.
```bash
# jq might not be installed on your local system, please follow jq install
# instruction at https://stedolan.github.io/jq/download/
saved_path=$(bentoml get IrisClassifier:latest -q | jq -r ".uri.uri")
# Build and push the container on your local machine.
docker buildx build --platform linux/arm64,linux/amd64 -t "{username}/iris-classifier" --push $saved_path
bentoml containerize iris_classifier:latest -t "{username}/iris-classifier" --push
```
1. In `service.yaml`, replace `{username}` with your Docker hub username:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,23 @@
from bentoml import env, artifacts, api, BentoService
from bentoml.handlers import DataframeHandler
from bentoml.artifact import SklearnModelArtifact
import numpy as np
import bentoml
from pydantic import Field
from bentoml.validators import Shape
from typing_extensions import Annotated
import joblib

@env(auto_pip_dependencies=True)
@artifacts([SklearnModelArtifact('model')])
class IrisClassifier(BentoService):

@api(DataframeHandler)
def predict(self, df):
return self.artifacts.model.predict(df)
@bentoml.service
class IrisClassifier:
iris_model = bentoml.models.get("iris_sklearn:latest")

def __init__(self):
self.model = joblib.load(self.iris_model.path_of("model.pkl"))

@bentoml.api
def predict(
self,
df: Annotated[np.ndarray, Shape((-1, 4))] = Field(
default=[[5.2, 2.3, 5.0, 0.7]]
),
) -> np.ndarray:
return self.model.predict(df)
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import joblib
from sklearn import svm
from sklearn import datasets

from iris_classifier import IrisClassifier
import bentoml

if __name__ == "__main__":
# Load training data
Expand All @@ -12,11 +13,6 @@
clf = svm.SVC(gamma='scale')
clf.fit(X, y)

# Create a iris classifier service instance
iris_classifier_service = IrisClassifier()

# Pack the newly trained model artifact
iris_classifier_service.pack('model', clf)

# Save the prediction service to disk for model serving
saved_path = iris_classifier_service.save()
with bentoml.models.create("iris_classifier") as bento_model:
joblib.dump(clf, bento_model.path_of("model.pkl"))
print(f"Model saved: {bento_model}")

0 comments on commit 4e53037

Please sign in to comment.