diff --git a/README.md b/README.md index dab2739..79b22fc 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,44 @@ # hydro-serving-python Python runtime for [Hydrosphere Serving](https://github.com/Hydrospheredata/hydro-serving). -Provides GRPC API for a Python scripts. -Supported versions are: python-3.4 python-3.5 python-3.6 +Provides a GRPC API for Python scripts. + +Supported versions are: python-3.6 python-3.7 python-3.8 ## Build commands - `make test` - `make python` - build docker runtime with python:latest-alpine base image - `make python-${VERSION}` - build docker runtime with python:${VERSION}-alpine base image - `make clean` - clean repository from temp files + +## Usage + +This runtime uses `src/func_main.py` script as an entry point. +You may create any arbitrary Python application within, +just keep in mind that the entry point of your script has to be located in + `src/func_main.py`. + + +Example of a `func_main.py`: + +```python +import pandas as pd +from joblib import load + +# Load an ML model during runtime initialisation +clf = load('/model/files/classification_model.joblib') + +# This function is called on each request +# Input and output must comply with your model's signature +def predict(**kwargs): + # kwargs is a dict with Numpy arrays or scalars you've specified in a signature + x = pd.DataFrame.from_dict({"request": kwargs}).T + predicted = clf.predict(x) + return {"income": int(predicted)} +``` + +or if you wish to work with proto messages: +```python + return {"income": TensorProto(int_val=[int(predicted)], + dtype=DT_INT32, + tensor_shape=TensorShapeProto())} +``` diff --git a/requirements.txt b/requirements.txt index 92c6ffe..7e44283 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,7 @@ grpcio~=1.19 -hydro-serving-grpc==2.1.0rc1 +hydro-serving-grpc==2.2.1 docker~=3.0 pytest -hydrosdk==2.3.1 - +hydrosdk==2.3.2 pandas~=1.0.3 numpy~=1.18.3 \ No newline at end of file diff --git a/src/PythonRuntimeService.py b/src/PythonRuntimeService.py index 2ed1cf8..4b14500 100644 --- a/src/PythonRuntimeService.py +++ b/src/PythonRuntimeService.py @@ -5,9 +5,9 @@ import grpc import hydro_serving_grpc as hs -import numpy as np +from hydro_serving_grpc import TensorProto from hydro_serving_grpc.tf.api import PredictionServiceServicer -from hydrosdk.data.conversions import tensor_proto_to_nparray, nparray_to_tensor_proto +from hydrosdk.data.conversions import tensor_proto_to_np, np_to_tensor_proto class PythonRuntimeService(PredictionServiceServicer): @@ -46,11 +46,15 @@ def Predict(self, request, context): else: self.logger.info("Received inference request: {}".format(request)[:256]) try: - numpy_request_inputs: Dict[str, np.array] = {k: tensor_proto_to_nparray(t) for k, t in request.inputs.items()} - numpy_outputs: Dict[str, np.array] = self.executable(**numpy_request_inputs) + + numpy_request_inputs: Dict[str] = {k: tensor_proto_to_np(t) for k, t in request.inputs.items()} + numpy_outputs: Dict[str] = self.executable(**numpy_request_inputs) try: - tensor_proto_outputs = {k: nparray_to_tensor_proto(v) for k, v in numpy_outputs.items()} + # If TensorProto is returned, than pass it. If Numpy is returned, cast it to TensorProto + tensor_proto_outputs: Dict[str, TensorProto] = {k: (v if isinstance(v, TensorProto) else np_to_tensor_proto(v)) + for k, v in numpy_outputs.items()} + result = hs.PredictResponse(outputs=tensor_proto_outputs) self.logger.info("Answer: {}".format(result)[:256]) return result diff --git a/version b/version index 2bf1c1c..e703481 100644 --- a/version +++ b/version @@ -1 +1 @@ -2.3.1 +2.3.2 \ No newline at end of file