From 90fc8187eb8d039af42f65d668ea8491d4b84d85 Mon Sep 17 00:00:00 2001 From: ljcornel Date: Fri, 17 May 2024 16:27:57 +0200 Subject: [PATCH 1/3] Add initial support for Tiling inference [synchronous mode only] --- geti_sdk/data_models/enums/domain.py | 1 + geti_sdk/deployment/deployed_model.py | 61 ++++++++++++++----- .../results_to_prediction_converter.py | 22 +++++++ 3 files changed, 70 insertions(+), 14 deletions(-) diff --git a/geti_sdk/data_models/enums/domain.py b/geti_sdk/data_models/enums/domain.py index 2bcaf7d7..af4e4648 100644 --- a/geti_sdk/data_models/enums/domain.py +++ b/geti_sdk/data_models/enums/domain.py @@ -30,6 +30,7 @@ class Domain(Enum): ANOMALY_SEGMENTATION = "ANOMALY_SEGMENTATION" INSTANCE_SEGMENTATION = "INSTANCE_SEGMENTATION" ROTATED_DETECTION = "ROTATED_DETECTION" + ANOMALY = "ANOMALY" def __str__(self) -> str: """ diff --git a/geti_sdk/deployment/deployed_model.py b/geti_sdk/deployment/deployed_model.py index 34289d32..945fa53f 100644 --- a/geti_sdk/deployment/deployed_model.py +++ b/geti_sdk/deployment/deployed_model.py @@ -25,6 +25,7 @@ import numpy as np from model_api.adapters import OpenvinoAdapter, OVMSAdapter from model_api.models import Model as model_api_Model +from model_api.tilers import DetectionTiler, InstanceSegmentationTiler, Tiler from openvino.runtime import Core from packaging.version import Version @@ -63,6 +64,11 @@ SEGMENTATION_SALIENCY_KEY = "soft_prediction" FEATURE_VECTOR_KEY = "feature_vector" +TILER_MAPPING = { + Domain.DETECTION: DetectionTiler, + Domain.INSTANCE_SEGMENTATION: InstanceSegmentationTiler, +} + OVMS_TIMEOUT = 10 # Max time to wait for OVMS models to become available @@ -95,6 +101,8 @@ def __attrs_post_init__(self): self._feature_vector_location: Optional[str] = None self._converter: Optional[InferenceResultsToPredictionConverter] = None + self._tiling_enabled: bool = False + self._tiler: Optional[Tiler] = None @property def model_data_path(self) -> str: @@ -288,6 +296,11 @@ def load_inference_model( ) self._parse_label_schema_from_dict(label_dictionary) + # Load a Results-to-Prediction converter + self._converter = ConverterFactory.create_converter( + self.label_schema, configuration + ) + model = model_api_Model.create_model( model=model_adapter, model_type=model_type, @@ -297,10 +310,20 @@ def load_inference_model( ) self._inference_model = model - # Load a Results-to-Prediction converter - self._converter = ConverterFactory.create_converter( - self.label_schema, configuration - ) + # Extract tiling parameters, if applicable + tiling_parameters = configuration_json.get("tiling_parameters", None) + + enable_tiling = False + if tiling_parameters is not None: + enable_tiling = tiling_parameters.get("enable_tiling", False) + if isinstance(enable_tiling, dict): + enable_tiling = enable_tiling.get("value", False) + + if enable_tiling: + logging.info("Tiling is enabled for this model, initializing Tiler") + tiler_type = TILER_MAPPING[self._converter.domain] + self._tiler = tiler_type(model=model, execution_mode="sync") + self._tiling_enabled = True # TODO: This is a workaround to fix the issue that causes the output blob name # to be unset. Remove this once it has been fixed on ModelAPI side @@ -563,22 +586,32 @@ def infer(self, image: np.ndarray, explain: bool = False) -> Prediction: image :return: Dictionary containing the model outputs """ - preprocessed_image, metadata = self._preprocess(image) - # metadata is a dict with keys 'original_shape' and 'resized_shape' - inference_results: Dict[str, np.ndarray] = self._inference_model.infer_sync( - preprocessed_image - ) - postprocessing_results = self._postprocess(inference_results, metadata=metadata) + if not self._tiling_enabled: + preprocessed_image, metadata = self._preprocess(image) + # metadata is a dict with keys 'original_shape' and 'resized_shape' + inference_results: Dict[str, np.ndarray] = self._inference_model.infer_sync( + preprocessed_image + ) + postprocessing_results = self._postprocess( + inference_results, metadata=metadata + ) + else: + postprocessing_results = self._tiler(image) prediction = self._converter.convert_to_prediction( - postprocessing_results, image_shape=metadata["original_shape"] + postprocessing_results, image_shape=image.shape ) # Add optional explainability outputs if explain: - saliency_map, repr_vector = self._postprocess_explain_outputs( - inference_results=inference_results, metadata=metadata - ) + if not self._tiling_enabled: + saliency_map, repr_vector = self._postprocess_explain_outputs( + inference_results=inference_results, metadata=metadata + ) + else: + repr_vector = postprocessing_results.feature_vector + saliency_map = postprocessing_results.saliency_map + prediction.feature_vector = repr_vector result_medium = ResultMedium(name="saliency map", type="saliency map") result_medium.data = saliency_map diff --git a/geti_sdk/deployment/predictions_postprocessing/results_converter/results_to_prediction_converter.py b/geti_sdk/deployment/predictions_postprocessing/results_converter/results_to_prediction_converter.py index a5fc6ff6..bffd296f 100644 --- a/geti_sdk/deployment/predictions_postprocessing/results_converter/results_to_prediction_converter.py +++ b/geti_sdk/deployment/predictions_postprocessing/results_converter/results_to_prediction_converter.py @@ -50,6 +50,16 @@ class InferenceResultsToPredictionConverter(metaclass=abc.ABCMeta): """Interface for the converter""" + @property + @abc.abstractmethod + def domain(self) -> Domain: + """ + Return the domain for which the converter applies + + :return: The task domain for which the label converter applies + """ + raise NotImplementedError + @abc.abstractmethod def convert_to_prediction(self, predictions: NamedTuple, **kwargs) -> Prediction: """ @@ -68,6 +78,8 @@ class ClassificationToPredictionConverter(InferenceResultsToPredictionConverter) :param label_schema: LabelSchema containing the label info of the task """ + domain = Domain.CLASSIFICATION + def __init__(self, label_schema: LabelSchema): all_labels = label_schema.get_labels(include_empty=True) # add empty labels if only one non-empty label exits @@ -119,6 +131,8 @@ class DetectionToPredictionConverter(InferenceResultsToPredictionConverter): :param configuration: optional model configuration setting """ + domain = Domain.DETECTION + def __init__( self, label_schema: LabelSchema, configuration: Optional[Dict[str, Any]] = None ): @@ -191,6 +205,8 @@ class RotatedRectToPredictionConverter(DetectionToPredictionConverter): :param label_schema: LabelSchema containing the label info of the task """ + domain = Domain.ROTATED_DETECTION + def convert_to_prediction( self, predictions: InstanceSegmentationResult, **kwargs ) -> Prediction: @@ -256,6 +272,8 @@ def convert_to_prediction( class MaskToAnnotationConverter(InferenceResultsToPredictionConverter): """Converts DetectionBox Predictions ModelAPI to Prediction object.""" + domain = Domain.INSTANCE_SEGMENTATION + def __init__( self, label_schema: LabelSchema, configuration: Optional[Dict[str, Any]] = None ): @@ -337,6 +355,8 @@ class SegmentationToPredictionConverter(InferenceResultsToPredictionConverter): :param label_schema: LabelSchema containing the label info of the task """ + domain = Domain.SEGMENTATION + def __init__(self, label_schema: LabelSchema): self.labels = label_schema.get_labels(include_empty=False) # NB: index=0 is reserved for the background label @@ -366,6 +386,8 @@ class AnomalyToPredictionConverter(InferenceResultsToPredictionConverter): :param label_schema: LabelSchema containing the label info of the task """ + domain = Domain.ANOMALY + def __init__(self, label_schema: LabelSchema): self.labels = label_schema.get_labels(include_empty=False) self.normal_label = next( From 96471626a3d8fce4112da26e9f4ee29681ac055d Mon Sep 17 00:00:00 2001 From: Ludo Cornelissen Date: Tue, 21 May 2024 12:30:51 +0200 Subject: [PATCH 2/3] Update geti_sdk/deployment/deployed_model.py Co-authored-by: Igor Davidyuk --- geti_sdk/deployment/deployed_model.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/geti_sdk/deployment/deployed_model.py b/geti_sdk/deployment/deployed_model.py index 945fa53f..6c68d15c 100644 --- a/geti_sdk/deployment/deployed_model.py +++ b/geti_sdk/deployment/deployed_model.py @@ -321,7 +321,9 @@ def load_inference_model( if enable_tiling: logging.info("Tiling is enabled for this model, initializing Tiler") - tiler_type = TILER_MAPPING[self._converter.domain] + tiler_type = TILER_MAPPING.get(self._converter.domain, None) + if tiler_type is None: + raise ValueError(f"Tiling is not supported for domain {self._converter.domain}") self._tiler = tiler_type(model=model, execution_mode="sync") self._tiling_enabled = True From 6e10928a179b04d282a136cfbd4aee6e36a3cc8c Mon Sep 17 00:00:00 2001 From: ljcornel Date: Tue, 21 May 2024 12:53:12 +0200 Subject: [PATCH 3/3] Apply `black` formatting --- geti_sdk/deployment/deployed_model.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/geti_sdk/deployment/deployed_model.py b/geti_sdk/deployment/deployed_model.py index 6c68d15c..344e5fbd 100644 --- a/geti_sdk/deployment/deployed_model.py +++ b/geti_sdk/deployment/deployed_model.py @@ -323,7 +323,9 @@ def load_inference_model( logging.info("Tiling is enabled for this model, initializing Tiler") tiler_type = TILER_MAPPING.get(self._converter.domain, None) if tiler_type is None: - raise ValueError(f"Tiling is not supported for domain {self._converter.domain}") + raise ValueError( + f"Tiling is not supported for domain {self._converter.domain}" + ) self._tiler = tiler_type(model=model, execution_mode="sync") self._tiling_enabled = True