Skip to content

Commit

Permalink
fix + update doc
Browse files Browse the repository at this point in the history
  • Loading branch information
phatvo9 committed Oct 19, 2023
1 parent 529afbc commit d9b5a1a
Show file tree
Hide file tree
Showing 6 changed files with 175 additions and 48 deletions.
65 changes: 44 additions & 21 deletions clarifai/models/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,25 @@
# limitations under the License.
"""Interface to Clarifai Models API."""

from typing import Dict, Type
from typing import Dict, List, Type

from clarifai_grpc.grpc.api import resources_pb2, service_pb2
from google.protobuf.json_format import MessageToDict
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Struct, Value

from clarifai.auth.helper import ClarifaiAuthHelper
from clarifai.client import create_stub


def _make_default_value_proto(dtype, value):
if dtype == 1:
return Value(bool_value=value)
elif dtype == 2:
return Value(string_value=value)
elif dtype == 3:
return Value(number_value=value)


class Models:
"""
Interface to Clarifai models api
Expand Down Expand Up @@ -126,13 +135,12 @@ def init_model(

return MessageToDict(post_models_response, preserving_proto_field_name=True)

def post_model_version(
self,
model_id: str,
model_zip_url: str,
input: dict,
outputs: dict,
):
def post_model_version(self,
model_id: str,
model_zip_url: str,
input: dict,
outputs: dict,
param_specs: List[dict] = None):
"""Post a new version of an existing model in the Clarifai platform.
Args:
Expand All @@ -143,7 +151,7 @@ def post_model_version(
{clarifai_input_field: triton_input_filed}.
outputs (dict): a dict where the keys are clarifai output fields and the values are triton model outputs,
{clarifai_output_field1: triton_output_filed1, clarifai_output_field2: triton_output_filed2,...}.
param_specs (List[dict]): list of dicts - keys are path, field_type, default_value, description. Default is None
Returns:
dict: clarifai api response
"""
Expand All @@ -157,6 +165,20 @@ def _parse_fields_map(x):

input_fields_map = _parse_fields_map(input)
output_fields_map = _parse_fields_map(outputs)
#resources_pb2.ModelTypeField(path="abc", default_value=1, description="test")
if param_specs:
iterative_proto_params = []
for param in param_specs:
dtype = param.get("field_type")
proto_param = resources_pb2.ModelTypeField(
path=param.get("path"),
field_type=dtype,
default_value=_make_default_value_proto(dtype=dtype, value=param.get("default_value")),
description=param.get("description"),
)
iterative_proto_params.append(proto_param)
param_specs = iterative_proto_params

post_model_versions = self.stub.PostModelVersions(
service_pb2.PostModelVersionsRequest(
user_app_id=user_data_object,
Expand All @@ -166,21 +188,21 @@ def _parse_fields_map(x):
pretrained_model_config=resources_pb2.PretrainedModelConfig(
model_zip_url=model_zip_url,
input_fields_map=input_fields_map,
output_fields_map=output_fields_map))
output_fields_map=output_fields_map),
output_info=resources_pb2.OutputInfo(params_specs=param_specs))
]),
metadata=self.auth.metadata)

return MessageToDict(post_model_versions, preserving_proto_field_name=True)

def upload_model(
self,
model_id: str,
model_zip_url: str,
input: dict,
outputs: dict,
model_type: str,
description: str = "",
):
def upload_model(self,
model_id: str,
model_zip_url: str,
input: dict,
outputs: dict,
model_type: str,
description: str = "",
param_specs: List[dict] = None):
"""Doing 2 requests for initializing and creating version for a new trained model to the Clarifai platform.
Args:
Expand All @@ -192,14 +214,15 @@ def upload_model(
{clarifai_output_field1: triton_output_filed1, clarifai_output_field2: triton_output_filed2,...}
model_type (str): Clarifai model type.
description (str, optional): a description of the model. Defaults to "".
param_specs (List[dict]): list of dicts - keys are path, field_type, default_value, description. Default is None
Returns:
dict: Clarifai api response
"""
init_resp = self.init_model(model_id, model_type, description)
if init_resp["status"]["code"] != "SUCCESS":
return init_resp
version_resp = self.post_model_version(model_id, model_zip_url, input, outputs)
version_resp = self.post_model_version(model_id, model_zip_url, input, outputs, param_specs)

return version_resp

Expand Down
41 changes: 22 additions & 19 deletions clarifai/models/model_serving/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,15 @@ A step by step guide to building your own triton inference model and deploying i

1. Generate a triton model repository via commandline.
```console
$ clarifai-model-upload-init --model_name <Your model name> \
clarifai-model-upload-init --model_name <Your model name> \
--model_type <select model type from available ones> \
--repo_dir <directory in which to create your model repository>
```
2. Edit the `requirements.txt` file with dependencies needed to run inference on your model and the `labels.txt` (if available in dir) with the labels your model is to predict.
3. Add your model loading and inference code inside `inference.py` script of the generated model repository under the `setup()` and `predict()` functions respectively. Refer to The [Inference Script section]() for a description of this file.
4. Testing your implementation locally by running `<your_triton_folder>/1/test.py` with basic predefined tests.
To avoid missing dependencies when deploying, recommend to use conda to create clean environment from [Clarifai base envs](./envs/). Then install everything in `requirements.txt`. Follow instruction inside [test.py](./models/test.py) for implementing custom tests.
2. 1. Edit the `requirements.txt` file with dependencies needed to run inference on your model and the `labels.txt` (if available in dir) with the labels your model is to predict.
2. Add your model loading and inference code inside `inference.py` script of the generated model repository under the `setup()` and `predict()` functions respectively. Refer to The [Inference Script section]() for a description of this file.
3. Inference parameters (optional): you can define some inference parameters that can be adjusted on model view of Clarifai platform when making prediction. Follow [this doc](./docs/inference_parameters.md) to build the json file.
3. Testing (Recommend) your implementation locally by running `<your_triton_folder>/1/test.py` with basic predefined tests.
To avoid missing dependencies when deploying, recommend to use conda to create clean environment. Then install everything in `requirements.txt`. Follow instruction inside [test.py](./models/test.py) for implementing custom tests.
* Create conda env and install requirements:
```bash
# create env (note: only python version 3.8 is supported currently)
Expand All @@ -33,41 +34,43 @@ pip install -r <your_triton_folder>/requirements.txt
# to see std output
pytest --log-cli-level=INFO -s ./your_triton_folder/1/test.py
```
5. Generate a zip of your triton model for deployment via commandline.
4. Generate a zip of your triton model for deployment via commandline.
```console
$ clarifai-triton-zip --triton_model_repository <path to triton model repository to be compressed> \
clarifai-triton-zip --triton_model_repository <path to triton model repository to be compressed> \
--zipfile_name <name of the triton model zip> (Recommended to use <model_name>_<model-type> convention for naming)
```
6. Upload the generated zip to a public file storage service to get a URL to the zip. This URL must be publicly accessible and downloadable as it's necessary for the last step: uploading the model to a Clarifai app.
7. Set your Clarifai auth credentials as environment variables.
5. Upload the generated zip to a public file storage service to get a URL to the zip. This URL must be publicly accessible and downloadable as it's necessary for the last step: uploading the model to a Clarifai app.
6. Set your Clarifai auth credentials as environment variables.
```console
$ export CLARIFAI_USER_ID=<your clarifai user_id>
$ export CLARIFAI_APP_ID=<your clarifai app_id>
$ export CLARIFAI_PAT=<your clarifai PAT>
export CLARIFAI_USER_ID=<your clarifai user_id>
export CLARIFAI_APP_ID=<your clarifai app_id>
export CLARIFAI_PAT=<your clarifai PAT>
```
8. Upload your model to Clarifai. Please ensure that your configuration field maps adhere to [this](https://github.com/Clarifai/clarifai-python-utils/blob/main/clarifai/models/model_serving/model_config/deploy.py)
7. Upload your model to Clarifai. Please ensure that your configuration field maps adhere to [this](https://github.com/Clarifai/clarifai-python-utils/blob/main/clarifai/models/model_serving/model_config/deploy.py)
```console
$ clarifai-upload-model --url <URL to your model zip. Your zip file name is expected to have "zipfile_name" format (in clarifai-triton-zip), if not you need to specify your model_id and model_type> \
clarifai-upload-model --url <URL to your model zip. Your zip file name is expected to have "zipfile_name" format (in clarifai-triton-zip), if not you need to specify your model_id and model_type> \
--model_id <Your model ID on the platform> \
--model_type <Clarifai model types> \
--desc <A description of your model>
--desc <A description of your model> \
--update_version <Optional. Add new version of existing model> \
--infer_param <Optional. Path to json file contains inference parameters>
```

* Finally, navigate to your Clarifai app models and check that the deployed model appears. Click it on the model name to go the model versions table to track the status of the model deployment.

## Triton Model Repository

```diff
<model_name>/
├── config.pbtx
├── requirements.txt
├── labels.txt (If applicable for given model-type)
├── triton_conda.yaml
|
- ├── triton_conda.yaml
└── 1/
├── __init__.py
├── inference.py
├── test.py
└── model.py
```

A generated triton model repository looks as illustrated in the directory tree above. Any additional files such as model checkpoints and folders needed at inference time must all be placed under the `1/` directory.

Expand Down Expand Up @@ -110,7 +113,7 @@ class InferenceModel:
#self.model: Callable = <load_your_model_here from checkpoint or folder>

#Add relevant model type decorator to the method below (see docs/model_types for ref.)
def get_predictions(self, input_data):
def get_predictions(self, input_data, **kwargs):
"""
Main model inference method.
Expand Down
22 changes: 18 additions & 4 deletions clarifai/models/model_serving/cli/deploy_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,15 @@
from clarifai.auth.helper import ClarifaiAuthHelper
from clarifai.models.api import Models
from clarifai.models.model_serving.model_config import MODEL_TYPES, get_model_config
from clarifai.models.model_serving.model_config.inference_parameter import InferParamManager


def deploy(model_url,
model_id: str = None,
model_type: str = None,
desc: str = "",
update_version: bool = False):
update_version: bool = False,
inference_params_file: str = ""):
# init Auth from env vars
auth = ClarifaiAuthHelper.from_env()
# init api
Expand Down Expand Up @@ -50,14 +52,17 @@ def _parse_name(name):
# key map
assert model_type in MODEL_TYPES, f"model_type should be one of {MODEL_TYPES}"
clarifai_key_map = get_model_config(model_type=model_type).field_maps
# inference parameters
inference_parameters = InferParamManager(json_path=inference_params_file).get_list_params()

# if updating new version of existing model
if update_version:
resp = model_api.post_model_version(
model_id=model_id,
model_zip_url=model_url,
input=clarifai_key_map.input_fields_map,
outputs=clarifai_key_map.output_fields_map,
)
param_specs=inference_parameters)
# creating new model
else:
# post model
Expand All @@ -68,13 +73,14 @@ def _parse_name(name):
input=clarifai_key_map.input_fields_map,
outputs=clarifai_key_map.output_fields_map,
description=desc,
)
param_specs=inference_parameters)
# response
if resp["status"]["code"] != "SUCCESS":
raise Exception("Post models failed, details: {}, {}".format(resp["status"]["description"],
resp["status"]["details"]))
else:
print("Success!")
print(f'Model version: {resp["model"]["model_version"]["id"]}')


def main():
Expand All @@ -96,13 +102,21 @@ def main():
action="store_true",
required=False,
help="Update exist model with new version")

parser.add_argument(
"--infer_param",
required=False,
default="",
help="Path to json file contains inference parameters")

args = parser.parse_args()
deploy(
model_url=args.url,
model_id=args.model_id,
desc=args.desc,
model_type=args.model_type,
update_version=args.update_version)
update_version=args.update_version,
inference_params_file=args.infer_param)


if __name__ == "__main__":
Expand Down
82 changes: 82 additions & 0 deletions clarifai/models/model_serving/docs/inference_parameters.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
## Inference paramters

When making prediction, you may need to change some paramters to adjust the result. Those paramters will be passed through `paramters()` of a request in triton python model.

In order to send it to `**kwargs` of `get_predictions` in `inference.py`, there are 2 ways:
1. You can send any arbitrary parameters via clarifai API.
2. You can define some parameters and they will be visible and adjustable on Clarifai model views.

This document helps you to create your inference parameters that can be visibale and adjustable easily on Clarifai platform. `Again, you can still send any parameters via API but undefined parameters won't appear on Clarifai UI`. The defined parameters will be sent as `json` file when you use `clarifai-upload-model` cli.

### JSON file structure:
The file contains a list of object has 4 fields:
* `path` (str): name of your parameter, it must be valid as python variable
* `field_type` (int): the parameter data type is one of {1,2,3}, it means {boolean, string, number} respectively. `Number` means `int` or `float`
* `default_value`: a default value of the parameter.
* `description` (str): short sentence describes what the parameter does

An example of 3 parameters:
```json
[
{
"path": "boolean_var",
"field_type": 1,
"default_value": true,
"description": "a boolean variable"
},
{
"path": "string_var",
"field_type": 2,
"default_value": "string_1",
"description": "a string variable"
},
{
"path": "number_var",
"field_type": 3,
"default_value": 9.9,
"description": "a float number variable"
}
]
```

### Generate JSON file
1. Manually create the file based on above structure
2. By code:
```python
from clarifai.models.model_serving.model_config.inference_parameter import InferParamManager, InferParam, InferParamType

# 2.1. Fully setup
params = [
InferParam(
path="boolean_var",
field_type=InferParamType.BOOL,
default_value=True,
description="a boolean varaiabe"
),
InferParam(
path="string_var",
field_type=InferParamType.STRING,
default_value="string_1",
description="a string varaiabe"
),
InferParam(
path="number_var",
field_type=InferParamType.NUMBER,
default_value=9.9,
description="a float number varaiabe"
),
]

ipm = InferParamManager(params=params)
ipm.export("your_file.json")

# 2.2. Shorten
# `NOTE`: in this way `description` field will be set as empty aka ""
# *You need to modify* `description` in order to be able to upload the settings to Clarifai
params = dict(boolean_var=True, string_var="string_1", number_var=9.9)
ipm = InferParamManager.from_kwargs(**params)
ipm.export("your_file.json")

```

3. In `test.py`. You can define your paramaters like `2.2. Shorten` in `inference_paramters` attribute of `CustomTestInferenceModel`, the file will be generated when you run the test. Keep in mind to change `description`
3 changes: 2 additions & 1 deletion clarifai/models/model_serving/docs/model_types.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ The predictions from user defined models in the [inference script](../README.md#

Clarifai [model types](../models/model_types.py) are decorator functions that are responsible for passing input batch requests to user defined inference models to get predictions and format the resultant predictions into Triton Inference responses that are sent by the server for each client inference request.

## Supported Model Types:
## Supported Model Types Wrapper Functions:

- visual_detector
- visual_classifier
Expand All @@ -17,3 +17,4 @@ Clarifai [model types](../models/model_types.py) are decorator functions that ar
- text_to_image
- visual_embedder
- visual_segmenter
- multimodal_embedder
Loading

0 comments on commit d9b5a1a

Please sign in to comment.