From b8d1fd1bb231956c369a16a827fee19e54c5c05f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Tue, 2 Jul 2024 18:41:47 +0200 Subject: [PATCH 01/15] Create first draft of implementation for most important components --- .../common/dynamic_blocks/__init__.py | 0 .../common/dynamic_blocks/entities.py | 44 ++++ .../dynamic_blocks/manifest_assembler.py | 193 ++++++++++++++++++ .../core_steps/dynamic_blocs/__init__.py | 0 .../core_steps/dynamic_blocs/python_code.py | 24 +++ inference/core/workflows/prototypes/block.py | 3 +- 6 files changed, 262 insertions(+), 2 deletions(-) create mode 100644 inference/core/workflows/core_steps/common/dynamic_blocks/__init__.py create mode 100644 inference/core/workflows/core_steps/common/dynamic_blocks/entities.py create mode 100644 inference/core/workflows/core_steps/common/dynamic_blocks/manifest_assembler.py create mode 100644 inference/core/workflows/core_steps/dynamic_blocs/__init__.py create mode 100644 inference/core/workflows/core_steps/dynamic_blocs/python_code.py diff --git a/inference/core/workflows/core_steps/common/dynamic_blocks/__init__.py b/inference/core/workflows/core_steps/common/dynamic_blocks/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/inference/core/workflows/core_steps/common/dynamic_blocks/entities.py b/inference/core/workflows/core_steps/common/dynamic_blocks/entities.py new file mode 100644 index 0000000000..4ed1169fde --- /dev/null +++ b/inference/core/workflows/core_steps/common/dynamic_blocks/entities.py @@ -0,0 +1,44 @@ +from enum import Enum +from typing import Dict, List, Any + +from pydantic import BaseModel, Field + + +class SelectorType(Enum): + INPUT_IMAGE = "input_image" + INPUT_PARAMETER = "input_parameter" + STEP_OUTPUT = "step_output" + + +class ValueType(Enum): + ANY = "any" + INTEGER = "integer" + FLOAT = "float" + BOOLEAN = "boolean" + DICT = "dict" + LIST = "list" + STRING = "string" + + +class DynamicInputDefinition(BaseModel): + is_optional: bool = Field(default=False) + is_dimensionality_reference: bool = Field(default=False) + has_default_value: bool = Field(default=False) + dimensionality_offset: int = Field(default=0, ge=-1, le=1) + selector_types: List[SelectorType] = Field(default_factory=list) + selector_data_kind: Dict[SelectorType, List[str]] = Field(default_factory=dict) + value_types: List[ValueType] = Field(default_factory=lambda: [ValueType.ANY]) + default_value: Any = Field(default=None) + + +class DynamicOutputDefinition(BaseModel): + kind: List[str] = Field(default_factory=list) + + +class DynamicBlockManifest(BaseModel): + inputs: Dict[str, DynamicInputDefinition] + outputs: Dict[str, DynamicOutputDefinition] = Field(default_factory=dict) + output_dimensionality_offset: int = Field(default=0, ge=-1, le=1) + accepts_batch_input: bool = Field(default=False) + accepts_empty_values: bool = Field(default=False) + diff --git a/inference/core/workflows/core_steps/common/dynamic_blocks/manifest_assembler.py b/inference/core/workflows/core_steps/common/dynamic_blocks/manifest_assembler.py new file mode 100644 index 0000000000..6c5d729649 --- /dev/null +++ b/inference/core/workflows/core_steps/common/dynamic_blocks/manifest_assembler.py @@ -0,0 +1,193 @@ +from typing import Literal, Dict, Tuple, Any, Union, Optional, List + +from pydantic import create_model, ConfigDict, Field + +from inference.core.workflows.core_steps.common.dynamic_blocks.entities import DynamicBlockManifest, \ + DynamicInputDefinition, \ + SelectorType, ValueType, DynamicOutputDefinition +from inference.core.workflows.entities.base import OutputDefinition +from inference.core.workflows.entities.types import Kind, WorkflowImageSelector, WorkflowParameterSelector, \ + StepOutputSelector, WILDCARD_KIND +from inference.core.workflows.prototypes.block import WorkflowBlockManifest + + +def assembly_dynamic_block_manifest( + block_name: str, + block_type: str, + dynamic_manifest: DynamicBlockManifest, + kinds_lookup: Dict[str, Kind] +) -> WorkflowBlockManifest: + model_name = f"DynamicBlock{block_name}Type{block_type}" + inputs_definitions = build_inputs(inputs=dynamic_manifest.inputs, kinds_lookup=kinds_lookup) + model = create_model( + model_name, + __config__=ConfigDict(extra="allow"), + name=(str, ...), + type=(Literal[block_type], ...), + **inputs_definitions, + ) + outputs_definitions = build_outputs_definitions( + outputs=dynamic_manifest.outputs, + kinds_lookup=kinds_lookup, + ) + describe_outputs = lambda cls: outputs_definitions + setattr(model, "describe_outputs", classmethod(describe_outputs)) + setattr(model, "get_actual_outputs", describe_outputs) + accepts_batch_input = lambda cls: dynamic_manifest.accepts_batch_input + setattr(model, "accepts_batch_input", classmethod(accepts_batch_input)) + input_dimensionality_offsets = collect_input_dimensionality_offsets(inputs=dynamic_manifest.inputs) + get_input_dimensionality_offsets = lambda cls: input_dimensionality_offsets + setattr(model, "get_input_dimensionality_offsets", classmethod(get_input_dimensionality_offsets)) + dimensionality_reference = pick_dimensionality_referencE_property(inputs=dynamic_manifest.inputs) + get_dimensionality_reference_property = lambda cls: dimensionality_reference + setattr(model, "get_dimensionality_reference_property", classmethod(get_dimensionality_reference_property)) + get_output_dimensionality_offset = lambda cls: dynamic_manifest.output_dimensionality_offset + setattr(model, "get_output_dimensionality_offset", classmethod(get_output_dimensionality_offset)) + accepts_batch_input = lambda cls: dynamic_manifest.accepts_batch_input + setattr(model, "accepts_batch_input", classmethod(accepts_batch_input)) + accepts_empty_values = lambda cls: dynamic_manifest.accepts_empty_values + setattr(model, "accepts_empty_values", classmethod(accepts_empty_values)) + return model + + +PYTHON_TYPES_MAPPING = { + ValueType.ANY: Any, + ValueType.INTEGER: int, + ValueType.FLOAT: float, + ValueType.BOOLEAN: bool, + ValueType.DICT: dict, + ValueType.LIST: list, + ValueType.STRING: str, +} + + +def build_inputs( + inputs: Dict[str, DynamicInputDefinition], + kinds_lookup: Dict[str, Kind], +) -> Dict[str, Tuple[type, Field]]: + result = {} + for input_name, input_definition in inputs.items(): + input_type_union_elements = [] + for selector_type in input_definition.selector_types: + selector_kind_names = input_definition.selector_data_kind.get(selector_type, ["*"]) + selector_kind = [] + for kind_name in selector_kind_names: + selector_kind.append(kinds_lookup[kind_name]) + if selector_type is SelectorType.INPUT_IMAGE: + input_type_union_elements.append(WorkflowImageSelector) + elif selector_type is SelectorType.INPUT_PARAMETER: + input_type_union_elements.append(WorkflowParameterSelector(kind=selector_kind)) + else: + input_type_union_elements.append(StepOutputSelector(kind=selector_kind)) + for value_type_name in input_definition.value_types: + value_type = PYTHON_TYPES_MAPPING[value_type_name] + input_type_union_elements.append(value_type) + if not input_type_union_elements: + input_type_union_elements.append(Any) + if len(input_type_union_elements) > 1: + input_type = Union[tuple(input_type_union_elements)] + else: + input_type = input_type_union_elements[0] + if input_definition.is_optional: + input_type = Optional[input_type] + field_metadata = Field() + if input_definition.has_default_value: + default_value = input_definition.default_value + field_metadata_params = {} + if isinstance(default_value, list) or isinstance(default_value, dict) or isinstance(default_value, set): + field_metadata_params["default_factory"] = lambda: default_value + else: + field_metadata_params["default"] = default_value + field_metadata = Field(**field_metadata_params) + result[input_name] = input_type, field_metadata + return result + + +def build_outputs_definitions( + outputs: Dict[str, DynamicOutputDefinition], + kinds_lookup: Dict[str, Kind], +) -> List[OutputDefinition]: + result = [] + for name, definition in outputs.items(): + if not definition.kind: + result.append(OutputDefinition(name=name, kind=[WILDCARD_KIND])) + else: + actual_kinds = [kinds_lookup[kind_name] for kind_name in definition.kind] + result.append(OutputDefinition(name=name, kind=actual_kinds)) + return result + + +def collect_input_dimensionality_offsets( + inputs: Dict[str, DynamicInputDefinition], +) -> Dict[str, int]: + result = {} + for name, definition in inputs.items(): + if definition.dimensionality_offset != 0: + result[name] = definition.dimensionality_offset + return result + + +def pick_dimensionality_referencE_property(inputs: Dict[str, DynamicInputDefinition]) -> Optional[str]: + references = [] + for name, definition in inputs.items(): + if definition.is_dimensionality_reference: + references.append(name) + if not references: + return None + if len(references) == 1: + return references[0] + raise ValueError("Not expected to have multiple dimensionality references") + + +if __name__ == '__main__': + lookup = {"image": Kind(name="image"), "predictions": Kind(name="predictions")} + dynamic_manifest = DynamicBlockManifest( + inputs={ + "images": DynamicInputDefinition( + is_dimensionality_reference=True, + selector_types=[SelectorType.INPUT_IMAGE, SelectorType.STEP_OUTPUT], + selector_data_kind={ + SelectorType.INPUT_IMAGE: ["image"], + SelectorType.STEP_OUTPUT: ["image"], + }, + ), + "predictions": DynamicInputDefinition( + selector_types=[SelectorType.STEP_OUTPUT], + selector_data_kind={ + SelectorType.STEP_OUTPUT: ["predictions"], + }, + dimensionality_offset=1, + ), + "param": DynamicInputDefinition( + is_optional=True, + has_default_value=True, + value_types=[ValueType.STRING, ValueType.FLOAT], + default_value=None, + ) + }, + outputs={"result": DynamicOutputDefinition()}, + ) + + result = assembly_dynamic_block_manifest( + block_name="my_block", + block_type="custom_block", + dynamic_manifest=dynamic_manifest, + kinds_lookup=lookup, + ) + + result_instance = result( + name="a", + type="custom_block", + images="$inputs.image", + predictions="$steps.step.predictions", + ) + print(result_instance) + print(result_instance.describe_outputs()) + print(result_instance.get_actual_outputs()) + print(result_instance.get_input_dimensionality_offsets()) + print(result_instance.get_dimensionality_reference_property()) + print(result_instance.get_output_dimensionality_offset()) + print(result_instance.accepts_batch_input()) + print(result_instance.accepts_empty_values()) + + diff --git a/inference/core/workflows/core_steps/dynamic_blocs/__init__.py b/inference/core/workflows/core_steps/dynamic_blocs/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/inference/core/workflows/core_steps/dynamic_blocs/python_code.py b/inference/core/workflows/core_steps/dynamic_blocs/python_code.py new file mode 100644 index 0000000000..c8a6ce3e34 --- /dev/null +++ b/inference/core/workflows/core_steps/dynamic_blocs/python_code.py @@ -0,0 +1,24 @@ +import types + + +def create_dynamic_module(code: str, module_name: str) -> types.ModuleType: + dynamic_module = types.ModuleType(module_name) + exec(code, dynamic_module.__dict__) + return dynamic_module + + +MY_CODE = """ +SOME = 31 + +def function(a, b): +return a + b +""" + + + +if __name__ == '__main__': + module = create_dynamic_module(code=MY_CODE, module_name="dynamic_module") + + print(module.function(a=[1, 2], c=[3, 4])) + + SyntaxError \ No newline at end of file diff --git a/inference/core/workflows/prototypes/block.py b/inference/core/workflows/prototypes/block.py index d143465d78..667c55c3df 100644 --- a/inference/core/workflows/prototypes/block.py +++ b/inference/core/workflows/prototypes/block.py @@ -1,10 +1,9 @@ from abc import ABC, abstractmethod -from typing import Any, Dict, List, Literal, Optional, Tuple, Type, Union +from typing import Any, Dict, List, Optional, Type, Union from openai import BaseModel from pydantic import ConfigDict, Field -from inference.core import logger from inference.core.workflows.entities.base import OutputDefinition from inference.core.workflows.entities.types import FlowControl from inference.core.workflows.errors import BlockInterfaceError From 52b2e6f6056498228ee6a5327ee964573194b83e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Mon, 8 Jul 2024 14:33:38 +0200 Subject: [PATCH 02/15] Add first scratch of implementation for python code block --- docs/workflows/blocks.md | 1 + docs/workflows/kinds.md | 28 +- .../common/dynamic_blocks/entities.py | 8 +- .../dynamic_blocks/manifest_assembler.py | 293 ++++++++++-------- .../core_steps/dynamic_blocs/python_code.py | 95 +++++- inference/core/workflows/core_steps/loader.py | 22 +- .../execution_engine/compiler/core.py | 6 + .../execution_engine/compiler/entities.py | 6 +- .../compiler/steps_initialiser.py | 45 ++- .../introspection/blocks_loader.py | 12 +- .../introspection/entities.py | 7 +- inference_cli/lib/cloud_adapter.py | 4 + 12 files changed, 364 insertions(+), 163 deletions(-) diff --git a/docs/workflows/blocks.md b/docs/workflows/blocks.md index adc30dd5ab..bf4095caf8 100644 --- a/docs/workflows/blocks.md +++ b/docs/workflows/blocks.md @@ -36,6 +36,7 @@ hide:

+

diff --git a/docs/workflows/kinds.md b/docs/workflows/kinds.md index faeef32b67..3dbc94c961 100644 --- a/docs/workflows/kinds.md +++ b/docs/workflows/kinds.md @@ -8,26 +8,26 @@ resolved we need a simple type system - that's what we call `kinds`. ## List of `workflows` kinds -* [`roboflow_project`](/workflows/kinds/roboflow_project): Roboflow project name -* [`dictionary`](/workflows/kinds/dictionary): Dictionary -* [`string`](/workflows/kinds/string): String value +* [`list_of_values`](/workflows/kinds/list_of_values): List of values of any types +* [`*`](/workflows/kinds/*): Equivalent of any element * [`Batch[dictionary]`](/workflows/kinds/batch_dictionary): Batch of dictionaries -* [`Batch[keypoint_detection_prediction]`](/workflows/kinds/batch_keypoint_detection_prediction): `'predictions'` key from Keypoint Detection Model output -* [`Batch[parent_id]`](/workflows/kinds/batch_parent_id): Identifier of parent for step output -* [`Batch[classification_prediction]`](/workflows/kinds/batch_classification_prediction): `'predictions'` key from Classification Model outputs -* [`roboflow_model_id`](/workflows/kinds/roboflow_model_id): Roboflow model id * [`Batch[top_class]`](/workflows/kinds/batch_top_class): Batch of string values representing top class predicted by classification model +* [`integer`](/workflows/kinds/integer): Integer value +* [`dictionary`](/workflows/kinds/dictionary): Dictionary +* [`Batch[classification_prediction]`](/workflows/kinds/batch_classification_prediction): `'predictions'` key from Classification Model outputs +* [`Batch[boolean]`](/workflows/kinds/batch_boolean): Boolean values batch * [`boolean`](/workflows/kinds/boolean): Boolean flag +* [`Batch[prediction_type]`](/workflows/kinds/batch_prediction_type): String value with type of prediction +* [`Batch[parent_id]`](/workflows/kinds/batch_parent_id): Identifier of parent for step output +* [`string`](/workflows/kinds/string): String value * [`Batch[instance_segmentation_prediction]`](/workflows/kinds/batch_instance_segmentation_prediction): `'predictions'` key from Instance Segmentation Model outputs -* [`*`](/workflows/kinds/*): Equivalent of any element -* [`integer`](/workflows/kinds/integer): Integer value +* [`float_zero_to_one`](/workflows/kinds/float_zero_to_one): `float` value in range `[0.0, 1.0]` * [`Batch[image_metadata]`](/workflows/kinds/batch_image_metadata): Dictionary with image metadata required by supervision -* [`Batch[bar_code_detection]`](/workflows/kinds/batch_bar_code_detection): Prediction with barcode detection * [`Batch[image]`](/workflows/kinds/batch_image): Image in workflows +* [`roboflow_project`](/workflows/kinds/roboflow_project): Roboflow project name * [`Batch[string]`](/workflows/kinds/batch_string): Batch of string values -* [`list_of_values`](/workflows/kinds/list_of_values): List of values of any types -* [`Batch[boolean]`](/workflows/kinds/batch_boolean): Boolean values batch * [`Batch[object_detection_prediction]`](/workflows/kinds/batch_object_detection_prediction): `'predictions'` key from Object Detection Model output -* [`float_zero_to_one`](/workflows/kinds/float_zero_to_one): `float` value in range `[0.0, 1.0]` -* [`Batch[prediction_type]`](/workflows/kinds/batch_prediction_type): String value with type of prediction +* [`Batch[keypoint_detection_prediction]`](/workflows/kinds/batch_keypoint_detection_prediction): `'predictions'` key from Keypoint Detection Model output +* [`Batch[bar_code_detection]`](/workflows/kinds/batch_bar_code_detection): Prediction with barcode detection +* [`roboflow_model_id`](/workflows/kinds/roboflow_model_id): Roboflow model id diff --git a/inference/core/workflows/core_steps/common/dynamic_blocks/entities.py b/inference/core/workflows/core_steps/common/dynamic_blocks/entities.py index 4ed1169fde..1a17fad034 100644 --- a/inference/core/workflows/core_steps/common/dynamic_blocks/entities.py +++ b/inference/core/workflows/core_steps/common/dynamic_blocks/entities.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import Dict, List, Any +from typing import Any, Dict, List from pydantic import BaseModel, Field @@ -21,24 +21,22 @@ class ValueType(Enum): class DynamicInputDefinition(BaseModel): + default_value: Any is_optional: bool = Field(default=False) is_dimensionality_reference: bool = Field(default=False) - has_default_value: bool = Field(default=False) dimensionality_offset: int = Field(default=0, ge=-1, le=1) selector_types: List[SelectorType] = Field(default_factory=list) selector_data_kind: Dict[SelectorType, List[str]] = Field(default_factory=dict) value_types: List[ValueType] = Field(default_factory=lambda: [ValueType.ANY]) - default_value: Any = Field(default=None) class DynamicOutputDefinition(BaseModel): kind: List[str] = Field(default_factory=list) -class DynamicBlockManifest(BaseModel): +class ManifestDescription(BaseModel): inputs: Dict[str, DynamicInputDefinition] outputs: Dict[str, DynamicOutputDefinition] = Field(default_factory=dict) output_dimensionality_offset: int = Field(default=0, ge=-1, le=1) accepts_batch_input: bool = Field(default=False) accepts_empty_values: bool = Field(default=False) - diff --git a/inference/core/workflows/core_steps/common/dynamic_blocks/manifest_assembler.py b/inference/core/workflows/core_steps/common/dynamic_blocks/manifest_assembler.py index 6c5d729649..4f43084680 100644 --- a/inference/core/workflows/core_steps/common/dynamic_blocks/manifest_assembler.py +++ b/inference/core/workflows/core_steps/common/dynamic_blocks/manifest_assembler.py @@ -1,25 +1,37 @@ -from typing import Literal, Dict, Tuple, Any, Union, Optional, List +from typing import Any, Dict, List, Literal, Optional, Tuple, Type, Union -from pydantic import create_model, ConfigDict, Field +from pydantic import BaseModel, ConfigDict, Field, create_model -from inference.core.workflows.core_steps.common.dynamic_blocks.entities import DynamicBlockManifest, \ - DynamicInputDefinition, \ - SelectorType, ValueType, DynamicOutputDefinition +from inference.core.workflows.core_steps.common.dynamic_blocks.entities import ( + DynamicInputDefinition, + DynamicOutputDefinition, + ManifestDescription, + SelectorType, + ValueType, +) from inference.core.workflows.entities.base import OutputDefinition -from inference.core.workflows.entities.types import Kind, WorkflowImageSelector, WorkflowParameterSelector, \ - StepOutputSelector, WILDCARD_KIND -from inference.core.workflows.prototypes.block import WorkflowBlockManifest +from inference.core.workflows.entities.types import ( + WILDCARD_KIND, + Kind, + StepOutputSelector, + WorkflowImageSelector, + WorkflowParameterSelector, +) def assembly_dynamic_block_manifest( block_name: str, block_type: str, - dynamic_manifest: DynamicBlockManifest, - kinds_lookup: Dict[str, Kind] -) -> WorkflowBlockManifest: - model_name = f"DynamicBlock{block_name}Type{block_type}" - inputs_definitions = build_inputs(inputs=dynamic_manifest.inputs, kinds_lookup=kinds_lookup) - model = create_model( + manifest_description: ManifestDescription, + kinds_lookup: Optional[Dict[str, Kind]] = None, +) -> Type[BaseModel]: + if not kinds_lookup: + kinds_lookup = {} + model_name = create_block_type_name(block_name=block_name) + inputs_definitions = build_inputs( + inputs=manifest_description.inputs, kinds_lookup=kinds_lookup + ) + manifest_class = create_model( model_name, __config__=ConfigDict(extra="allow"), name=(str, ...), @@ -27,27 +39,21 @@ def assembly_dynamic_block_manifest( **inputs_definitions, ) outputs_definitions = build_outputs_definitions( - outputs=dynamic_manifest.outputs, + outputs=manifest_description.outputs, kinds_lookup=kinds_lookup, ) - describe_outputs = lambda cls: outputs_definitions - setattr(model, "describe_outputs", classmethod(describe_outputs)) - setattr(model, "get_actual_outputs", describe_outputs) - accepts_batch_input = lambda cls: dynamic_manifest.accepts_batch_input - setattr(model, "accepts_batch_input", classmethod(accepts_batch_input)) - input_dimensionality_offsets = collect_input_dimensionality_offsets(inputs=dynamic_manifest.inputs) - get_input_dimensionality_offsets = lambda cls: input_dimensionality_offsets - setattr(model, "get_input_dimensionality_offsets", classmethod(get_input_dimensionality_offsets)) - dimensionality_reference = pick_dimensionality_referencE_property(inputs=dynamic_manifest.inputs) - get_dimensionality_reference_property = lambda cls: dimensionality_reference - setattr(model, "get_dimensionality_reference_property", classmethod(get_dimensionality_reference_property)) - get_output_dimensionality_offset = lambda cls: dynamic_manifest.output_dimensionality_offset - setattr(model, "get_output_dimensionality_offset", classmethod(get_output_dimensionality_offset)) - accepts_batch_input = lambda cls: dynamic_manifest.accepts_batch_input - setattr(model, "accepts_batch_input", classmethod(accepts_batch_input)) - accepts_empty_values = lambda cls: dynamic_manifest.accepts_empty_values - setattr(model, "accepts_empty_values", classmethod(accepts_empty_values)) - return model + return assembly_manifest_class_methods( + manifest_class=manifest_class, + outputs_definitions=outputs_definitions, + manifest_description=manifest_description, + ) + + +def create_block_type_name(block_name: str) -> str: + block_title = ( + block_name.strip().replace("-", " ").replace("_", " ").title().replace(" ", "") + ) + return f"DynamicBlock{block_title}" PYTHON_TYPES_MAPPING = { @@ -67,42 +73,95 @@ def build_inputs( ) -> Dict[str, Tuple[type, Field]]: result = {} for input_name, input_definition in inputs.items(): - input_type_union_elements = [] - for selector_type in input_definition.selector_types: - selector_kind_names = input_definition.selector_data_kind.get(selector_type, ["*"]) - selector_kind = [] - for kind_name in selector_kind_names: - selector_kind.append(kinds_lookup[kind_name]) - if selector_type is SelectorType.INPUT_IMAGE: - input_type_union_elements.append(WorkflowImageSelector) - elif selector_type is SelectorType.INPUT_PARAMETER: - input_type_union_elements.append(WorkflowParameterSelector(kind=selector_kind)) - else: - input_type_union_elements.append(StepOutputSelector(kind=selector_kind)) - for value_type_name in input_definition.value_types: - value_type = PYTHON_TYPES_MAPPING[value_type_name] - input_type_union_elements.append(value_type) - if not input_type_union_elements: - input_type_union_elements.append(Any) - if len(input_type_union_elements) > 1: - input_type = Union[tuple(input_type_union_elements)] + result[input_name] = build_input( + input_definition=input_definition, kinds_lookup=kinds_lookup + ) + return result + + +def build_input( + input_definition: DynamicInputDefinition, + kinds_lookup: Dict[str, Kind], +) -> Tuple[type, Field]: + input_type = build_input_field_type( + input_definition=input_definition, kinds_lookup=kinds_lookup + ) + field_metadata = build_input_field_metadata(input_definition=input_definition) + return input_type, field_metadata + + +def build_input_field_type( + input_definition: DynamicInputDefinition, + kinds_lookup: Dict[str, Kind], +) -> type: + input_type_union_elements = collect_python_types_for_selectors( + input_definition=input_definition, + kinds_lookup=kinds_lookup, + ) + input_type_union_elements += collect_python_types_for_values( + input_definition=input_definition + ) + if not input_type_union_elements: + input_type_union_elements.append(Any) + if len(input_type_union_elements) > 1: + input_type = Union[tuple(input_type_union_elements)] + else: + input_type = input_type_union_elements[0] + if input_definition.is_optional: + input_type = Optional[input_type] + return input_type + + +def collect_python_types_for_selectors( + input_definition: DynamicInputDefinition, + kinds_lookup: Dict[str, Kind], +) -> List[type]: + result = [] + for selector_type in input_definition.selector_types: + selector_kind_names = input_definition.selector_data_kind.get( + selector_type, ["*"] + ) + selector_kind = [] + for kind_name in selector_kind_names: + selector_kind.append(kinds_lookup.get(kind_name, Kind(name=kind_name))) + if selector_type is SelectorType.INPUT_IMAGE: + result.append(WorkflowImageSelector) + elif selector_type is SelectorType.INPUT_PARAMETER: + result.append(WorkflowParameterSelector(kind=selector_kind)) else: - input_type = input_type_union_elements[0] - if input_definition.is_optional: - input_type = Optional[input_type] - field_metadata = Field() - if input_definition.has_default_value: - default_value = input_definition.default_value - field_metadata_params = {} - if isinstance(default_value, list) or isinstance(default_value, dict) or isinstance(default_value, set): - field_metadata_params["default_factory"] = lambda: default_value - else: - field_metadata_params["default"] = default_value - field_metadata = Field(**field_metadata_params) - result[input_name] = input_type, field_metadata + result.append(StepOutputSelector(kind=selector_kind)) + return result + + +def collect_python_types_for_values( + input_definition: DynamicInputDefinition, +) -> List[type]: + result = [] + for value_type_name in input_definition.value_types: + value_type = PYTHON_TYPES_MAPPING[value_type_name] + result.append(value_type) return result +def build_input_field_metadata(input_definition: DynamicInputDefinition) -> Field: + default_value = input_definition.default_value + field_metadata_params = {} + if default_holds_compound_object(default_value=default_value): + field_metadata_params["default_factory"] = lambda: default_value + else: + field_metadata_params["default"] = default_value + field_metadata = Field(**field_metadata_params) + return field_metadata + + +def default_holds_compound_object(default_value: Any) -> bool: + return ( + isinstance(default_value, list) + or isinstance(default_value, dict) + or isinstance(default_value, set) + ) + + def build_outputs_definitions( outputs: Dict[str, DynamicOutputDefinition], kinds_lookup: Dict[str, Kind], @@ -112,7 +171,10 @@ def build_outputs_definitions( if not definition.kind: result.append(OutputDefinition(name=name, kind=[WILDCARD_KIND])) else: - actual_kinds = [kinds_lookup[kind_name] for kind_name in definition.kind] + actual_kinds = [ + kinds_lookup.get(kind_name, Kind(name=kind_name)) + for kind_name in definition.kind + ] result.append(OutputDefinition(name=name, kind=actual_kinds)) return result @@ -127,7 +189,50 @@ def collect_input_dimensionality_offsets( return result -def pick_dimensionality_referencE_property(inputs: Dict[str, DynamicInputDefinition]) -> Optional[str]: +def assembly_manifest_class_methods( + manifest_class: Type[BaseModel], + outputs_definitions: List[OutputDefinition], + manifest_description: ManifestDescription, +) -> Type[BaseModel]: + describe_outputs = lambda cls: outputs_definitions + setattr(manifest_class, "describe_outputs", classmethod(describe_outputs)) + setattr(manifest_class, "get_actual_outputs", describe_outputs) + accepts_batch_input = lambda cls: manifest_description.accepts_batch_input + setattr(manifest_class, "accepts_batch_input", classmethod(accepts_batch_input)) + input_dimensionality_offsets = collect_input_dimensionality_offsets( + inputs=manifest_description.inputs + ) + get_input_dimensionality_offsets = lambda cls: input_dimensionality_offsets + setattr( + manifest_class, + "get_input_dimensionality_offsets", + classmethod(get_input_dimensionality_offsets), + ) + dimensionality_reference = pick_dimensionality_reference_property( + inputs=manifest_description.inputs + ) + get_dimensionality_reference_property = lambda cls: dimensionality_reference + setattr( + manifest_class, + "get_dimensionality_reference_property", + classmethod(get_dimensionality_reference_property), + ) + get_output_dimensionality_offset = ( + lambda cls: manifest_description.output_dimensionality_offset + ) + setattr( + manifest_class, + "get_output_dimensionality_offset", + classmethod(get_output_dimensionality_offset), + ) + accepts_empty_values = lambda cls: manifest_description.accepts_empty_values + setattr(manifest_class, "accepts_empty_values", classmethod(accepts_empty_values)) + return manifest_class + + +def pick_dimensionality_reference_property( + inputs: Dict[str, DynamicInputDefinition] +) -> Optional[str]: references = [] for name, definition in inputs.items(): if definition.is_dimensionality_reference: @@ -137,57 +242,3 @@ def pick_dimensionality_referencE_property(inputs: Dict[str, DynamicInputDefinit if len(references) == 1: return references[0] raise ValueError("Not expected to have multiple dimensionality references") - - -if __name__ == '__main__': - lookup = {"image": Kind(name="image"), "predictions": Kind(name="predictions")} - dynamic_manifest = DynamicBlockManifest( - inputs={ - "images": DynamicInputDefinition( - is_dimensionality_reference=True, - selector_types=[SelectorType.INPUT_IMAGE, SelectorType.STEP_OUTPUT], - selector_data_kind={ - SelectorType.INPUT_IMAGE: ["image"], - SelectorType.STEP_OUTPUT: ["image"], - }, - ), - "predictions": DynamicInputDefinition( - selector_types=[SelectorType.STEP_OUTPUT], - selector_data_kind={ - SelectorType.STEP_OUTPUT: ["predictions"], - }, - dimensionality_offset=1, - ), - "param": DynamicInputDefinition( - is_optional=True, - has_default_value=True, - value_types=[ValueType.STRING, ValueType.FLOAT], - default_value=None, - ) - }, - outputs={"result": DynamicOutputDefinition()}, - ) - - result = assembly_dynamic_block_manifest( - block_name="my_block", - block_type="custom_block", - dynamic_manifest=dynamic_manifest, - kinds_lookup=lookup, - ) - - result_instance = result( - name="a", - type="custom_block", - images="$inputs.image", - predictions="$steps.step.predictions", - ) - print(result_instance) - print(result_instance.describe_outputs()) - print(result_instance.get_actual_outputs()) - print(result_instance.get_input_dimensionality_offsets()) - print(result_instance.get_dimensionality_reference_property()) - print(result_instance.get_output_dimensionality_offset()) - print(result_instance.accepts_batch_input()) - print(result_instance.accepts_empty_values()) - - diff --git a/inference/core/workflows/core_steps/dynamic_blocs/python_code.py b/inference/core/workflows/core_steps/dynamic_blocs/python_code.py index c8a6ce3e34..5fdcf96ef7 100644 --- a/inference/core/workflows/core_steps/dynamic_blocs/python_code.py +++ b/inference/core/workflows/core_steps/dynamic_blocs/python_code.py @@ -1,24 +1,93 @@ import types +from typing import List, Literal, Type +from uuid import uuid4 +from inference.core.workflows.core_steps.common.dynamic_blocks.entities import ( + ManifestDescription, +) +from inference.core.workflows.core_steps.common.dynamic_blocks.manifest_assembler import ( + assembly_dynamic_block_manifest, +) +from inference.core.workflows.entities.base import OutputDefinition +from inference.core.workflows.entities.types import WILDCARD_KIND +from inference.core.workflows.prototypes.block import ( + BlockResult, + WorkflowBlock, + WorkflowBlockManifest, +) -def create_dynamic_module(code: str, module_name: str) -> types.ModuleType: - dynamic_module = types.ModuleType(module_name) - exec(code, dynamic_module.__dict__) - return dynamic_module +IMPORTS_LINES = [ + "import supervision as sv", + "import numpy as np", + "import math", + "from inference.core.workflows.entities.base import Batch, WorkflowImageData", + "from inference.core.workflows.prototypes.block import BlockResult", +] + + +class CustomPythonDeclaredManifest(WorkflowBlockManifest): + name: str + type: Literal["CustomPython"] + manifest_description: ManifestDescription + python_code: str + function_name: str + @classmethod + def describe_outputs(cls) -> List[OutputDefinition]: + return [OutputDefinition(name="*", kind=[WILDCARD_KIND])] -MY_CODE = """ -SOME = 31 -def function(a, b): -return a + b -""" +def assembly_custom_python_block( + declared_manifest: CustomPythonDeclaredManifest, +) -> Type[WorkflowBlock]: + actual_manifest = assembly_dynamic_block_manifest( + block_name=declared_manifest.name, + block_type=declared_manifest.type, + manifest_description=declared_manifest.manifest_description, + ) + code_module = create_dynamic_module( + code=declared_manifest.python_code, + module_name=f"dynamic_module_{uuid4()}", + ) + if not hasattr(code_module, declared_manifest.function_name): + raise ValueError( + f"Cannot find function: {declared_manifest.function_name} in declared code." + ) + run_function = getattr(code_module, declared_manifest.function_name) + async def run(self, *args, **kwargs) -> BlockResult: + if not self._allow_custom_python_execution: + raise RuntimeError( + "It is not possible to execute CustomPython block in that configuration of `inference`. Set " + "ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=true" + ) + return run_function(*args, **kwargs) + def constructor(self, allow_custom_python_execution: bool): + self._allow_custom_python_execution = allow_custom_python_execution -if __name__ == '__main__': - module = create_dynamic_module(code=MY_CODE, module_name="dynamic_module") + @classmethod + def get_init_parameters(cls) -> List[str]: + return ["allow_custom_python_execution"] - print(module.function(a=[1, 2], c=[3, 4])) + @classmethod + def get_manifest(cls) -> Type[WorkflowBlockManifest]: + return actual_manifest - SyntaxError \ No newline at end of file + return type( + f"CustomPythonBlock-{uuid4()}", + (WorkflowBlock,), + { + "__init__": constructor, + "get_init_parameters": get_init_parameters, + "get_manifest": get_manifest, + "run": run, + }, + ) + + +def create_dynamic_module(code: str, module_name: str) -> types.ModuleType: + dynamic_module = types.ModuleType(module_name) + imports = "\n".join(IMPORTS_LINES) + "\n\n\n\n" + exec(imports + code, dynamic_module.__dict__) + return dynamic_module diff --git a/inference/core/workflows/core_steps/loader.py b/inference/core/workflows/core_steps/loader.py index e05a058bcf..8ae104e2ac 100644 --- a/inference/core/workflows/core_steps/loader.py +++ b/inference/core/workflows/core_steps/loader.py @@ -1,5 +1,9 @@ -from typing import List, Type +from typing import Callable, List, Tuple, Type, Union +from inference.core.workflows.core_steps.dynamic_blocs.python_code import ( + CustomPythonDeclaredManifest, + assembly_custom_python_block, +) from inference.core.workflows.core_steps.flow_control.continue_if import ContinueIfBlock from inference.core.workflows.core_steps.fusion.detections_consensus import ( DetectionsConsensusBlock, @@ -63,10 +67,21 @@ from inference.core.workflows.core_steps.transformations.relative_static_crop import ( RelativeStaticCropBlock, ) -from inference.core.workflows.prototypes.block import WorkflowBlock +from inference.core.workflows.prototypes.block import ( + WorkflowBlock, + WorkflowBlockManifest, +) -def load_blocks() -> List[Type[WorkflowBlock]]: +def load_blocks() -> List[ + Union[ + Type[WorkflowBlock], + Tuple[ + Type[WorkflowBlockManifest], + Callable[[Type[WorkflowBlockManifest]], WorkflowBlock], + ], + ] +]: return [ DetectionsConsensusBlock, ClipComparisonBlock, @@ -91,4 +106,5 @@ def load_blocks() -> List[Type[WorkflowBlock]]: ContinueIfBlock, PerspectiveCorrectionBlock, DynamicZonesBlock, + (CustomPythonDeclaredManifest, assembly_custom_python_block), ] diff --git a/inference/core/workflows/execution_engine/compiler/core.py b/inference/core/workflows/execution_engine/compiler/core.py index 2744cb88cd..2aaa2dd516 100644 --- a/inference/core/workflows/execution_engine/compiler/core.py +++ b/inference/core/workflows/execution_engine/compiler/core.py @@ -10,6 +10,7 @@ prepare_execution_graph, ) from inference.core.workflows.execution_engine.compiler.steps_initialiser import ( + initialise_dynamic_blocks, initialise_steps, ) from inference.core.workflows.execution_engine.compiler.syntactic_parser import ( @@ -38,6 +39,10 @@ def compile_workflow( parsed_workflow_definition = parse_workflow_definition( raw_workflow_definition=workflow_definition, ) + parsed_workflow_definition, dynamic_blocks_classes = initialise_dynamic_blocks( + available_blocks=available_blocks, + parsed_workflow_definition=parsed_workflow_definition, + ) validate_workflow_specification(workflow_definition=parsed_workflow_definition) execution_graph = prepare_execution_graph( workflow_definition=parsed_workflow_definition, @@ -45,6 +50,7 @@ def compile_workflow( steps = initialise_steps( steps_manifest=parsed_workflow_definition.steps, available_bocks=available_blocks, + dynamic_blocks_classes=dynamic_blocks_classes, explicit_init_parameters=init_parameters, initializers=initializers, ) diff --git a/inference/core/workflows/execution_engine/compiler/entities.py b/inference/core/workflows/execution_engine/compiler/entities.py index 4bb52d7dd6..f663c9b4fd 100644 --- a/inference/core/workflows/execution_engine/compiler/entities.py +++ b/inference/core/workflows/execution_engine/compiler/entities.py @@ -1,7 +1,7 @@ from abc import abstractmethod from dataclasses import dataclass, field from enum import Enum -from typing import Any, Dict, Generator, List, Optional, Set, Type, Union +from typing import Any, Callable, Dict, Generator, List, Optional, Set, Type, Union import networkx as nx @@ -19,7 +19,9 @@ class BlockSpecification: block_source: str identifier: str - block_class: Type[WorkflowBlock] + block_class: Union[ + Type[WorkflowBlock], Callable[[WorkflowBlockManifest], Type[WorkflowBlock]] + ] manifest_class: Type[WorkflowBlockManifest] diff --git a/inference/core/workflows/execution_engine/compiler/steps_initialiser.py b/inference/core/workflows/execution_engine/compiler/steps_initialiser.py index c9285867b8..27aa0982c2 100644 --- a/inference/core/workflows/execution_engine/compiler/steps_initialiser.py +++ b/inference/core/workflows/execution_engine/compiler/steps_initialiser.py @@ -1,4 +1,5 @@ -from typing import Any, Callable, Dict, List, Union +from dataclasses import replace +from typing import Any, Callable, Dict, List, Tuple, Type, Union from inference.core.workflows.errors import ( BlockInitParameterNotProvidedError, @@ -8,19 +9,61 @@ from inference.core.workflows.execution_engine.compiler.entities import ( BlockSpecification, InitialisedStep, + ParsedWorkflowDefinition, ) from inference.core.workflows.prototypes.block import WorkflowBlockManifest +def initialise_dynamic_blocks( + available_blocks: List[BlockSpecification], + parsed_workflow_definition: ParsedWorkflowDefinition, +) -> Tuple[ + ParsedWorkflowDefinition, Dict[Type[WorkflowBlockManifest], BlockSpecification] +]: + dynamic_blocks = { + block.manifest_class: block.block_class + for block in available_blocks + if not isinstance(block.block_class, type) + } + block_specification_by_class = { + block.manifest_class: block for block in available_blocks + } + dynamic_blocks_classes = {} + new_steps = [] + for step in parsed_workflow_definition.steps: + if type(step) not in dynamic_blocks: + new_steps.append(step) + continue + dynamic_block = dynamic_blocks[type(step)](step) + dynamic_block_manifest = dynamic_block.get_manifest() + manifest_instance = dynamic_block_manifest( + name=step.name, + type=step.type, + ) + new_steps.append(manifest_instance) + reference_specification = block_specification_by_class[type(step)] + dynamic_blocks_classes[dynamic_block_manifest] = BlockSpecification( + block_source=reference_specification.block_source, + identifier=reference_specification.identifier, + block_class=dynamic_block, + manifest_class=dynamic_block_manifest, + ) + updated_definition = replace(parsed_workflow_definition, steps=new_steps) + return updated_definition, dynamic_blocks_classes + + def initialise_steps( steps_manifest: List[WorkflowBlockManifest], available_bocks: List[BlockSpecification], + dynamic_blocks_classes: Dict[Type[WorkflowBlockManifest], BlockSpecification], explicit_init_parameters: Dict[str, Union[Any, Callable[[None], Any]]], initializers: Dict[str, Union[Any, Callable[[None], Any]]], ) -> List[InitialisedStep]: available_blocks_by_manifest_class = { block.manifest_class: block for block in available_bocks } + for manifest_class, specification in dynamic_blocks_classes.items(): + available_blocks_by_manifest_class[manifest_class] = specification initialised_steps = [] for step_manifest in steps_manifest: if type(step_manifest) not in available_blocks_by_manifest_class: diff --git a/inference/core/workflows/execution_engine/introspection/blocks_loader.py b/inference/core/workflows/execution_engine/introspection/blocks_loader.py index ca29730a5e..cc6cd376ec 100644 --- a/inference/core/workflows/execution_engine/introspection/blocks_loader.py +++ b/inference/core/workflows/execution_engine/introspection/blocks_loader.py @@ -110,14 +110,22 @@ def load_core_workflow_blocks() -> List[BlockSpecification]: already_spotted_blocks = set() result = [] for block in core_blocks: + if isinstance(block, tuple): + manifest_class = block[0] + block = block[1] + identifier = get_full_type_name(selected_type=manifest_class) + identifier = f"{identifier}DynamicBlock" + else: + manifest_class = block.get_manifest() + identifier = get_full_type_name(selected_type=block) if block in already_spotted_blocks: continue result.append( BlockSpecification( block_source="workflows_core", - identifier=get_full_type_name(selected_type=block), + identifier=identifier, block_class=block, - manifest_class=block.get_manifest(), + manifest_class=manifest_class, ) ) already_spotted_blocks.add(block) diff --git a/inference/core/workflows/execution_engine/introspection/entities.py b/inference/core/workflows/execution_engine/introspection/entities.py index f906d9514f..32849ca436 100644 --- a/inference/core/workflows/execution_engine/introspection/entities.py +++ b/inference/core/workflows/execution_engine/introspection/entities.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import Dict, List, Optional, Set, Type +from typing import Callable, Dict, List, Optional, Set, Type, Union from pydantic import BaseModel, Field @@ -91,7 +91,10 @@ class DiscoveredConnections: class BlockDescription(BaseModel): manifest_class: Type[WorkflowBlockManifest] = Field(exclude=True) - block_class: Type[WorkflowBlock] = Field(exclude=True) + block_class: Union[ + Type[WorkflowBlock], + Callable[[Type[WorkflowBlockManifest]], Type[WorkflowBlock]], + ] = Field(exclude=True) block_schema: dict = Field( description="OpenAPI specification of block manifest that " "can be used to create workflow step in JSON definition." diff --git a/inference_cli/lib/cloud_adapter.py b/inference_cli/lib/cloud_adapter.py index 06ac95c2be..b546ea3c01 100644 --- a/inference_cli/lib/cloud_adapter.py +++ b/inference_cli/lib/cloud_adapter.py @@ -90,24 +90,28 @@ def _random_char(y): def cloud_status(): import sky + print("Getting status from skypilot...") print(sky.status()) def cloud_stop(cluster_name): import sky + print(f"Stopping skypilot deployment {cluster_name}...") print(sky.stop(cluster_name)) def cloud_start(cluster_name): import sky + print(f"Starting skypilot deployment {cluster_name}") print(sky.start(cluster_name)) def cloud_undeploy(cluster_name): import sky + print( f"Undeploying Roboflow Inference and deleting {cluster_name}, this may take a few minutes." ) From 987802f4fe48f29ba388397e2e657dc7cd36aff1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Mon, 8 Jul 2024 15:18:45 +0200 Subject: [PATCH 03/15] Add tests for custom python blocks --- .../core_steps/dynamic_blocs/python_code.py | 1 + .../test_workflow_with_custom_python_block.py | 183 ++++++++++++++++++ 2 files changed, 184 insertions(+) create mode 100644 tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py diff --git a/inference/core/workflows/core_steps/dynamic_blocs/python_code.py b/inference/core/workflows/core_steps/dynamic_blocs/python_code.py index 5fdcf96ef7..d1ec3c40c1 100644 --- a/inference/core/workflows/core_steps/dynamic_blocs/python_code.py +++ b/inference/core/workflows/core_steps/dynamic_blocs/python_code.py @@ -17,6 +17,7 @@ ) IMPORTS_LINES = [ + "from typing import Any, List, Dict, Set", "import supervision as sv", "import numpy as np", "import math", diff --git a/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py b/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py new file mode 100644 index 0000000000..50b978d764 --- /dev/null +++ b/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py @@ -0,0 +1,183 @@ +import numpy as np +import pytest + +from inference.core.env import WORKFLOWS_MAX_CONCURRENT_STEPS +from inference.core.managers.base import ModelManager +from inference.core.workflows.core_steps.common.entities import StepExecutionMode +from inference.core.workflows.execution_engine.core import ExecutionEngine + +FUNCTION_TO_GET_OVERLAP_OF_BBOXES = """ +def function(predictions: sv.Detections, class_x: str, class_y: str) -> BlockResult: + bboxes_class_x = predictions[predictions.data["class_name"] == class_x] + bboxes_class_y = predictions[predictions.data["class_name"] == class_y] + overlap = [] + for bbox_x in bboxes_class_x: + bbox_x_coords = bbox_x[0] + bbox_overlaps = [] + for bbox_y in bboxes_class_y: + if bbox_y[-1]["detection_id"] == bbox_x[-1]["detection_id"]: + continue + bbox_y_coords = bbox_y[0] + x_min = max(bbox_x_coords[0], bbox_y_coords[0]) + y_min = max(bbox_x_coords[1], bbox_y_coords[1]) + x_max = min(bbox_x_coords[2], bbox_y_coords[2]) + y_max = min(bbox_x_coords[3], bbox_y_coords[3]) + # compute the area of intersection rectangle + intersection_area = max(0, x_max - x_min + 1) * max(0, y_max - y_min + 1) + box_x_area = (bbox_x_coords[2] - bbox_x_coords[0] + 1) * (bbox_x_coords[3] - bbox_x_coords[1] + 1) + local_overlap = intersection_area / (box_x_area + 1e-5) + bbox_overlaps.append(local_overlap) + overlap.append(bbox_overlaps) + return {"overlap": overlap} +""" + + +FUNCTION_TO_GET_MAXIMUM_OVERLAP = """ +def function(overlaps: List[List[float]]) -> BlockResult: + max_value = -1 + for overlap in overlaps: + for overlap_value in overlap: + if not max_value: + max_value = overlap_value + else: + max_value = max(max_value, overlap_value) + return {"max_value": max_value} +""" + +WORKFLOW_WITH_OVERLAP_MEASUREMENT = { + "version": "1.0", + "inputs": [ + {"type": "WorkflowImage", "name": "image"}, + ], + "steps": [ + { + "type": "RoboflowObjectDetectionModel", + "name": "model", + "image": "$inputs.image", + "model_id": "yolov8n-640", + }, + { + "type": "CustomPython", + "name": "overlap_measurement", + "manifest_description": { + "inputs": { + "predictions": { + "selector_types": ["step_output"], + "default_value": "$steps.model.predictions", + }, + "class_x": { + "default_value": "dog", + }, + "class_y": { + "default_value": "dog", + }, + }, + "outputs": {"overlap": {"kind": []}}, + }, + "python_code": FUNCTION_TO_GET_OVERLAP_OF_BBOXES, + "function_name": "function", + }, + { + "type": "ContinueIf", + "name": "continue_if", + "condition_statement": { + "type": "StatementGroup", + "statements": [ + { + "type": "BinaryStatement", + "left_operand": { + "type": "DynamicOperand", + "operand_name": "overlaps", + "operations": [{"type": "SequenceLength"}], + }, + "comparator": {"type": "(Number) >="}, + "right_operand": { + "type": "StaticOperand", + "value": 1, + }, + } + ], + }, + "evaluation_parameters": {"overlaps": "$steps.overlap_measurement.overlap"}, + "next_steps": ["$steps.maximum_overlap"], + }, + { + "type": "CustomPython", + "name": "maximum_overlap", + "manifest_description": { + "inputs": { + "overlaps": { + "selector_types": ["step_output"], + "default_value": "$steps.overlap_measurement.overlap", + }, + }, + "outputs": {"max_value": {"kind": []}}, + }, + "python_code": FUNCTION_TO_GET_MAXIMUM_OVERLAP, + "function_name": "function", + }, + ], + "outputs": [ + { + "type": "JsonField", + "name": "overlaps", + "selector": "$steps.overlap_measurement.overlap", + }, + { + "type": "JsonField", + "name": "max_overlap", + "selector": "$steps.maximum_overlap.max_value", + }, + ], +} + + +@pytest.mark.asyncio +async def test_workflow_with_custom_python_blocks_measuring_overlap( + model_manager: ModelManager, + dogs_image: np.ndarray, + crowd_image: np.ndarray, +) -> None: + # given + workflow_init_parameters = { + "workflows_core.model_manager": model_manager, + "workflows_core.api_key": None, + "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, + "workflows_core.allow_custom_python_execution": True, + } + execution_engine = ExecutionEngine.init( + workflow_definition=WORKFLOW_WITH_OVERLAP_MEASUREMENT, + init_parameters=workflow_init_parameters, + max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS, + ) + + # when + result = await execution_engine.run_async( + runtime_parameters={ + "image": [dogs_image, crowd_image], + } + ) + + # then + assert isinstance(result, list), "Expected list to be delivered" + assert len(result) == 2, "Expected 2 elements in the output for two input images" + assert set(result[0].keys()) == { + "overlaps", + "max_overlap", + }, "Expected all declared outputs to be delivered" + assert set(result[1].keys()) == { + "overlaps", + "max_overlap", + }, "Expected all declared outputs to be delivered" + assert ( + len(result[0]["overlaps"]) == 2 + ), "Expected 2 instances of dogs found, each overlap with another for first image" + assert ( + abs(result[0]["max_overlap"] - 0.177946) < 1e-3 + ), "Expected max overlap to be calculated properly" + assert ( + len(result[1]["overlaps"]) == 0 + ), "Expected no instances of dogs found for second image" + assert ( + result[1]["max_overlap"] is None + ), "Expected `max_overlap` not to be calculated for second image due to conditional execution" From 38e1e90ab0b0f100341587c883f329d39830b6af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Thu, 11 Jul 2024 14:56:28 +0200 Subject: [PATCH 04/15] WIP - this is safe commit, will not work --- inference/core/interfaces/http/http_api.py | 3 - .../interfaces/stream/inference_pipeline.py | 4 - inference/core/workflows/core_steps/loader.py | 61 +++++++- .../execution_engine/compiler/entities.py | 4 +- .../introspection/blocks_loader.py | 137 +++++++++++++----- 5 files changed, 154 insertions(+), 55 deletions(-) diff --git a/inference/core/interfaces/http/http_api.py b/inference/core/interfaces/http/http_api.py index 6663c196f4..aa9bbe08a9 100644 --- a/inference/core/interfaces/http/http_api.py +++ b/inference/core/interfaces/http/http_api.py @@ -461,13 +461,10 @@ async def process_workflow_inference_request( workflow_specification: dict, background_tasks: Optional[BackgroundTasks], ) -> WorkflowInferenceResponse: - step_execution_mode = StepExecutionMode(WORKFLOWS_STEP_EXECUTION_MODE) workflow_init_parameters = { "workflows_core.model_manager": model_manager, "workflows_core.api_key": workflow_request.api_key, "workflows_core.background_tasks": background_tasks, - "workflows_core.cache": cache, - "workflows_core.step_execution_mode": step_execution_mode, } execution_engine = ExecutionEngine.init( workflow_definition=workflow_specification, diff --git a/inference/core/interfaces/stream/inference_pipeline.py b/inference/core/interfaces/stream/inference_pipeline.py index 7e85939a93..95cefd6849 100644 --- a/inference/core/interfaces/stream/inference_pipeline.py +++ b/inference/core/interfaces/stream/inference_pipeline.py @@ -555,10 +555,6 @@ def init_with_workflow( workflow_init_parameters["workflows_core.background_tasks"] = ( background_tasks ) - workflow_init_parameters["workflows_core.cache"] = cache - workflow_init_parameters["workflows_core.step_execution_mode"] = ( - StepExecutionMode.LOCAL - ) execution_engine = ExecutionEngine.init( workflow_definition=workflow_specification, init_parameters=workflow_init_parameters, diff --git a/inference/core/workflows/core_steps/loader.py b/inference/core/workflows/core_steps/loader.py index 8ae104e2ac..f76c8e4bb8 100644 --- a/inference/core/workflows/core_steps/loader.py +++ b/inference/core/workflows/core_steps/loader.py @@ -1,9 +1,8 @@ from typing import Callable, List, Tuple, Type, Union -from inference.core.workflows.core_steps.dynamic_blocs.python_code import ( - CustomPythonDeclaredManifest, - assembly_custom_python_block, -) +from inference.core.cache import cache +from inference.core.env import API_KEY, WORKFLOWS_STEP_EXECUTION_MODE +from inference.core.workflows.core_steps.common.entities import StepExecutionMode from inference.core.workflows.core_steps.flow_control.continue_if import ContinueIfBlock from inference.core.workflows.core_steps.fusion.detections_consensus import ( DetectionsConsensusBlock, @@ -67,11 +66,26 @@ from inference.core.workflows.core_steps.transformations.relative_static_crop import ( RelativeStaticCropBlock, ) +from inference.core.workflows.entities.types import Kind, WILDCARD_KIND, IMAGE_KIND, BATCH_OF_IMAGES_KIND, \ + ROBOFLOW_MODEL_ID_KIND, ROBOFLOW_PROJECT_KIND, ROBOFLOW_API_KEY_KIND, FLOAT_ZERO_TO_ONE_KIND, LIST_OF_VALUES_KIND, \ + BATCH_OF_SERIALISED_PAYLOADS_KIND, BOOLEAN_KIND, BATCH_OF_BOOLEAN_KIND, INTEGER_KIND, STRING_KIND, \ + BATCH_OF_STRING_KIND, BATCH_OF_TOP_CLASS_KIND, FLOAT_KIND, DICTIONARY_KIND, BATCH_OF_DICTIONARY_KIND, \ + BATCH_OF_CLASSIFICATION_PREDICTION_KIND, DETECTION_KIND, POINT_KIND, ZONE_KIND, OBJECT_DETECTION_PREDICTION_KIND, \ + BATCH_OF_OBJECT_DETECTION_PREDICTION_KIND, INSTANCE_SEGMENTATION_PREDICTION_KIND, \ + BATCH_OF_INSTANCE_SEGMENTATION_PREDICTION_KIND, KEYPOINT_DETECTION_PREDICTION_KIND, \ + BATCH_OF_KEYPOINT_DETECTION_PREDICTION_KIND, BATCH_OF_QR_CODE_DETECTION_KIND, BATCH_OF_BAR_CODE_DETECTION_KIND, \ + BATCH_OF_PREDICTION_TYPE_KIND, BATCH_OF_PARENT_ID_KIND, BATCH_OF_IMAGE_METADATA_KIND from inference.core.workflows.prototypes.block import ( WorkflowBlock, WorkflowBlockManifest, ) +REGISTERED_INITIALIZERS = { + "api_key": lambda: API_KEY, + "cache": cache, + "step_execution_mode": StepExecutionMode(WORKFLOWS_STEP_EXECUTION_MODE), +} + def load_blocks() -> List[ Union[ @@ -106,5 +120,42 @@ def load_blocks() -> List[ ContinueIfBlock, PerspectiveCorrectionBlock, DynamicZonesBlock, - (CustomPythonDeclaredManifest, assembly_custom_python_block), + ] + + +def load_kinds() -> List[Kind]: + return [ + WILDCARD_KIND, + IMAGE_KIND, + BATCH_OF_IMAGES_KIND, + ROBOFLOW_MODEL_ID_KIND, + ROBOFLOW_PROJECT_KIND, + ROBOFLOW_API_KEY_KIND, + FLOAT_ZERO_TO_ONE_KIND, + LIST_OF_VALUES_KIND, + BATCH_OF_SERIALISED_PAYLOADS_KIND, + BOOLEAN_KIND, + BATCH_OF_BOOLEAN_KIND, + INTEGER_KIND, + STRING_KIND, + BATCH_OF_STRING_KIND, + BATCH_OF_TOP_CLASS_KIND, + FLOAT_KIND, + DICTIONARY_KIND, + BATCH_OF_DICTIONARY_KIND, + BATCH_OF_CLASSIFICATION_PREDICTION_KIND, + DETECTION_KIND, + POINT_KIND, + ZONE_KIND, + OBJECT_DETECTION_PREDICTION_KIND, + BATCH_OF_OBJECT_DETECTION_PREDICTION_KIND, + INSTANCE_SEGMENTATION_PREDICTION_KIND, + BATCH_OF_INSTANCE_SEGMENTATION_PREDICTION_KIND, + KEYPOINT_DETECTION_PREDICTION_KIND, + BATCH_OF_KEYPOINT_DETECTION_PREDICTION_KIND, + BATCH_OF_QR_CODE_DETECTION_KIND, + BATCH_OF_BAR_CODE_DETECTION_KIND, + BATCH_OF_PREDICTION_TYPE_KIND, + BATCH_OF_PARENT_ID_KIND, + BATCH_OF_IMAGE_METADATA_KIND, ] diff --git a/inference/core/workflows/execution_engine/compiler/entities.py b/inference/core/workflows/execution_engine/compiler/entities.py index f663c9b4fd..c0ccbe04c3 100644 --- a/inference/core/workflows/execution_engine/compiler/entities.py +++ b/inference/core/workflows/execution_engine/compiler/entities.py @@ -19,9 +19,7 @@ class BlockSpecification: block_source: str identifier: str - block_class: Union[ - Type[WorkflowBlock], Callable[[WorkflowBlockManifest], Type[WorkflowBlock]] - ] + block_class: Union[Type[WorkflowBlock]] manifest_class: Type[WorkflowBlockManifest] diff --git a/inference/core/workflows/execution_engine/introspection/blocks_loader.py b/inference/core/workflows/execution_engine/introspection/blocks_loader.py index cc6cd376ec..e9f805c709 100644 --- a/inference/core/workflows/execution_engine/introspection/blocks_loader.py +++ b/inference/core/workflows/execution_engine/introspection/blocks_loader.py @@ -4,7 +4,7 @@ from collections import Counter from typing import Any, Callable, Dict, List, Union -from inference.core.workflows.core_steps.loader import load_blocks +from inference.core.workflows.core_steps.loader import load_blocks, load_kinds, REGISTERED_INITIALIZERS from inference.core.workflows.entities.types import Kind from inference.core.workflows.errors import PluginInterfaceError, PluginLoadingError from inference.core.workflows.execution_engine.compiler.entities import ( @@ -21,31 +21,18 @@ build_human_friendly_block_name, get_full_type_name, ) +from inference.core.workflows.prototypes.block import WorkflowBlock WORKFLOWS_PLUGINS_ENV = "WORKFLOWS_PLUGINS" +WORKFLOWS_CORE_PLUGIN_NAME = "workflows_core" def describe_available_blocks() -> BlocksDescription: blocks = load_workflow_blocks() - declared_kinds = [] result = [] for block in blocks: block_schema = block.manifest_class.model_json_schema() outputs_manifest = block.manifest_class.describe_outputs() - schema_selectors = retrieve_selectors_from_schema( - schema=block_schema, - inputs_dimensionality_offsets=block.manifest_class.get_input_dimensionality_offsets(), - dimensionality_reference_property=block.manifest_class.get_dimensionality_reference_property(), - ) - block_kinds = [ - k - for s in schema_selectors.values() - for r in s.allowed_references - for k in r.kind - ] - declared_kinds.extend(block_kinds) - for output in outputs_manifest: - declared_kinds.extend(output.kind) manifest_type_identifiers = get_manifest_type_identifiers( block_schema=block_schema, block_source=block.block_source, @@ -68,8 +55,7 @@ def describe_available_blocks() -> BlocksDescription: ) _validate_loaded_blocks_names_uniqueness(blocks=result) _validate_loaded_blocks_manifest_type_identifiers(blocks=result) - declared_kinds = list(set(declared_kinds)) - _validate_used_kinds_uniqueness(declared_kinds=declared_kinds) + declared_kinds = load_all_defined_kinds() return BlocksDescription(blocks=result, declared_kinds=declared_kinds) @@ -110,19 +96,13 @@ def load_core_workflow_blocks() -> List[BlockSpecification]: already_spotted_blocks = set() result = [] for block in core_blocks: - if isinstance(block, tuple): - manifest_class = block[0] - block = block[1] - identifier = get_full_type_name(selected_type=manifest_class) - identifier = f"{identifier}DynamicBlock" - else: - manifest_class = block.get_manifest() - identifier = get_full_type_name(selected_type=block) + manifest_class = block.get_manifest() + identifier = get_full_type_name(selected_type=block) if block in already_spotted_blocks: continue result.append( BlockSpecification( - block_source="workflows_core", + block_source=WORKFLOWS_CORE_PLUGIN_NAME, identifier=identifier, block_class=block, manifest_class=manifest_class, @@ -140,13 +120,6 @@ def load_plugins_blocks() -> List[BlockSpecification]: return custom_blocks -def get_plugin_modules() -> List[str]: - plugins_to_load = os.environ.get(WORKFLOWS_PLUGINS_ENV) - if plugins_to_load is None: - return [] - return plugins_to_load.split(",") - - def load_blocks_from_plugin(plugin_name: str) -> List[BlockSpecification]: try: return _load_blocks_from_plugin(plugin_name=plugin_name) @@ -171,7 +144,21 @@ def _load_blocks_from_plugin(plugin_name: str) -> List[BlockSpecification]: blocks = module.load_blocks() already_spotted_blocks = set() result = [] - for block in blocks: + if not isinstance(blocks, list): + raise PluginInterfaceError( + public_message=f"Provided workflow plugin `{plugin_name}` implement `load_blocks()` function " + f"incorrectly. Expected to return list of entries being subclass of `WorkflowBlock`, " + f"but entry of different characteristics found: {type(blocks)}.", + context="workflow_compilation | blocks_loading", + ) + for i, block in enumerate(blocks): + if not isinstance(block, type) or not issubclass(block, WorkflowBlock): + raise PluginInterfaceError( + public_message=f"Provided workflow plugin `{plugin_name}` implement `load_blocks()` function " + f"incorrectly. Expected to return list of entries being subclass of `WorkflowBlock`, " + f"but entry of different characteristics found: {block} at position: {i}.", + context="workflow_compilation | blocks_loading", + ) if block in already_spotted_blocks: continue result.append( @@ -187,15 +174,20 @@ def _load_blocks_from_plugin(plugin_name: str) -> List[BlockSpecification]: def load_initializers() -> Dict[str, Union[Any, Callable[[None], Any]]]: - plugins_to_load = os.environ.get(WORKFLOWS_PLUGINS_ENV) - if plugins_to_load is None: - return {} - result = {} - for plugin_name in plugins_to_load.split(","): + plugins_to_load = get_plugin_modules() + result = load_core_blocks_initializers() + for plugin_name in plugins_to_load: result.update(load_initializers_from_plugin(plugin_name=plugin_name)) return result +def load_core_blocks_initializers() -> Dict[str, Union[Any, Callable[[None], Any]]]: + return { + f"{WORKFLOWS_CORE_PLUGIN_NAME}.{parameter_name}": initializer + for parameter_name, initializer in REGISTERED_INITIALIZERS.items() + } + + def load_initializers_from_plugin( plugin_name: str, ) -> Dict[str, Union[Any, Callable[[None], Any]]]: @@ -273,3 +265,68 @@ def _validate_used_kinds_uniqueness(declared_kinds: List[Kind]) -> None: f"the same name.", context="workflow_compilation | blocks_loading", ) + + +def load_all_defined_kinds() -> List[Kind]: + core_blocks_kinds = load_kinds() + plugins_kinds = load_plugins_kinds() + declared_kinds = core_blocks_kinds + plugins_kinds + declared_kinds = list(set(declared_kinds)) + _validate_used_kinds_uniqueness(declared_kinds=declared_kinds) + return declared_kinds + + +def load_plugins_kinds() -> List[Kind]: + plugins_to_load = get_plugin_modules() + result = [] + for plugin_name in plugins_to_load: + result.extend(load_plugin_kinds(plugin_name=plugin_name)) + return result + + +def load_plugin_kinds(plugin_name: str) -> List[Kind]: + try: + return _load_plugin_kinds(plugin_name=plugin_name) + except ImportError as e: + raise PluginLoadingError( + public_message=f"It is not possible to load kinds from workflow plugin `{plugin_name}`. " + f"Make sure the library providing custom step is correctly installed in Python environment.", + context="workflow_compilation | blocks_loading", + inner_error=e, + ) from e + except AttributeError as e: + raise PluginInterfaceError( + public_message=f"Provided workflow plugin `{plugin_name}` do not implement blocks loading " + f"interface correctly and cannot be loaded.", + context="workflow_compilation | blocks_loading", + inner_error=e, + ) from e + + +def _load_plugin_kinds(plugin_name: str) -> List[Kind]: + module = importlib.import_module(plugin_name) + if not hasattr(module, "load_kinds"): + return [] + kinds_extractor = getattr(module, "load_kinds") + if not callable(kinds_extractor): + logging.warning( + f"Found `load_kinds` symbol in plugin `{plugin_name}` module init, but it is not callable. " + f"Not importing kinds from that plugin." + ) + return [] + kinds = kinds_extractor() + if not isinstance(kinds, list) or not all(isinstance(e, Kind) for e in kinds): + raise PluginInterfaceError( + public_message=f"Provided workflow plugin `{plugin_name}` do not implement blocks loading " + f"interface correctly and cannot be loaded. Return value of `load_kinds()` " + f"is not list of objects `Kind`.", + context="workflow_compilation | blocks_loading", + ) + return kinds + + +def get_plugin_modules() -> List[str]: + plugins_to_load = os.environ.get(WORKFLOWS_PLUGINS_ENV) + if plugins_to_load is None: + return [] + return plugins_to_load.split(",") From 92160637ee450919ce40d84362c75005287c71e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Mon, 15 Jul 2024 18:05:14 +0200 Subject: [PATCH 05/15] Change the way on how python block works into suggested direction --- inference/core/env.py | 3 + inference/core/interfaces/http/http_api.py | 3 +- .../common/dynamic_blocks/entities.py | 42 ----- .../core_steps/dynamic_blocs/__init__.py | 0 inference/core/workflows/core_steps/loader.py | 47 ++++- .../execution_engine/compiler/core.py | 19 +- .../compiler/steps_initialiser.py | 41 ---- .../compiler/syntactic_parser.py | 17 +- .../dynamic_blocs}/__init__.py | 0 .../dynamic_blocs/block_assembler.py} | 128 ++++++++++--- .../dynamic_blocs/block_scaffolding.py} | 47 ++--- .../dynamic_blocs/entities.py | 130 +++++++++++++ .../execution_engine/dynamic_blocs/loader.py | 12 ++ .../introspection/blocks_loader.py | 29 +-- .../test_workflow_with_custom_python_block.py | 84 ++++++--- .../formatters/test_property_extraction.py | 2 +- .../test_detections_classes_replacement.py | 176 ++++++++++++------ .../fusion/test_domension_collapse.py | 9 +- .../introspection/test_blocks_loader.py | 4 +- 19 files changed, 518 insertions(+), 275 deletions(-) delete mode 100644 inference/core/workflows/core_steps/common/dynamic_blocks/entities.py delete mode 100644 inference/core/workflows/core_steps/dynamic_blocs/__init__.py rename inference/core/workflows/{core_steps/common/dynamic_blocks => execution_engine/dynamic_blocs}/__init__.py (100%) rename inference/core/workflows/{core_steps/common/dynamic_blocks/manifest_assembler.py => execution_engine/dynamic_blocs/block_assembler.py} (67%) rename inference/core/workflows/{core_steps/dynamic_blocs/python_code.py => execution_engine/dynamic_blocs/block_scaffolding.py} (55%) create mode 100644 inference/core/workflows/execution_engine/dynamic_blocs/entities.py create mode 100644 inference/core/workflows/execution_engine/dynamic_blocs/loader.py diff --git a/inference/core/env.py b/inference/core/env.py index 84beea90fb..f0a9e9f7e5 100644 --- a/inference/core/env.py +++ b/inference/core/env.py @@ -393,6 +393,9 @@ WORKFLOWS_REMOTE_EXECUTION_MAX_STEP_CONCURRENT_REQUESTS = int( os.getenv("WORKFLOWS_REMOTE_EXECUTION_MAX_STEP_CONCURRENT_REQUESTS", "8") ) +ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS = str2bool( + os.getenv("ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS", True) +) MODEL_VALIDATION_DISABLED = str2bool(os.getenv("MODEL_VALIDATION_DISABLED", "False")) diff --git a/inference/core/interfaces/http/http_api.py b/inference/core/interfaces/http/http_api.py index aa9bbe08a9..085f90f9e3 100644 --- a/inference/core/interfaces/http/http_api.py +++ b/inference/core/interfaces/http/http_api.py @@ -957,7 +957,8 @@ async def get_dynamic_block_outputs( "outputs": [], } parsed_definition = parse_workflow_definition( - raw_workflow_definition=dummy_workflow_definition + raw_workflow_definition=dummy_workflow_definition, + dynamic_blocks=[], ) parsed_manifest = parsed_definition.steps[0] return parsed_manifest.get_actual_outputs() diff --git a/inference/core/workflows/core_steps/common/dynamic_blocks/entities.py b/inference/core/workflows/core_steps/common/dynamic_blocks/entities.py deleted file mode 100644 index 1a17fad034..0000000000 --- a/inference/core/workflows/core_steps/common/dynamic_blocks/entities.py +++ /dev/null @@ -1,42 +0,0 @@ -from enum import Enum -from typing import Any, Dict, List - -from pydantic import BaseModel, Field - - -class SelectorType(Enum): - INPUT_IMAGE = "input_image" - INPUT_PARAMETER = "input_parameter" - STEP_OUTPUT = "step_output" - - -class ValueType(Enum): - ANY = "any" - INTEGER = "integer" - FLOAT = "float" - BOOLEAN = "boolean" - DICT = "dict" - LIST = "list" - STRING = "string" - - -class DynamicInputDefinition(BaseModel): - default_value: Any - is_optional: bool = Field(default=False) - is_dimensionality_reference: bool = Field(default=False) - dimensionality_offset: int = Field(default=0, ge=-1, le=1) - selector_types: List[SelectorType] = Field(default_factory=list) - selector_data_kind: Dict[SelectorType, List[str]] = Field(default_factory=dict) - value_types: List[ValueType] = Field(default_factory=lambda: [ValueType.ANY]) - - -class DynamicOutputDefinition(BaseModel): - kind: List[str] = Field(default_factory=list) - - -class ManifestDescription(BaseModel): - inputs: Dict[str, DynamicInputDefinition] - outputs: Dict[str, DynamicOutputDefinition] = Field(default_factory=dict) - output_dimensionality_offset: int = Field(default=0, ge=-1, le=1) - accepts_batch_input: bool = Field(default=False) - accepts_empty_values: bool = Field(default=False) diff --git a/inference/core/workflows/core_steps/dynamic_blocs/__init__.py b/inference/core/workflows/core_steps/dynamic_blocs/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/inference/core/workflows/core_steps/loader.py b/inference/core/workflows/core_steps/loader.py index c34b2abaf2..116b209839 100644 --- a/inference/core/workflows/core_steps/loader.py +++ b/inference/core/workflows/core_steps/loader.py @@ -79,22 +79,49 @@ from inference.core.workflows.core_steps.transformations.relative_static_crop import ( RelativeStaticCropBlock, ) -from inference.core.workflows.entities.types import Kind, WILDCARD_KIND, IMAGE_KIND, BATCH_OF_IMAGES_KIND, \ - ROBOFLOW_MODEL_ID_KIND, ROBOFLOW_PROJECT_KIND, ROBOFLOW_API_KEY_KIND, FLOAT_ZERO_TO_ONE_KIND, LIST_OF_VALUES_KIND, \ - BATCH_OF_SERIALISED_PAYLOADS_KIND, BOOLEAN_KIND, BATCH_OF_BOOLEAN_KIND, INTEGER_KIND, STRING_KIND, \ - BATCH_OF_STRING_KIND, BATCH_OF_TOP_CLASS_KIND, FLOAT_KIND, DICTIONARY_KIND, BATCH_OF_DICTIONARY_KIND, \ - BATCH_OF_CLASSIFICATION_PREDICTION_KIND, DETECTION_KIND, POINT_KIND, ZONE_KIND, OBJECT_DETECTION_PREDICTION_KIND, \ - BATCH_OF_OBJECT_DETECTION_PREDICTION_KIND, INSTANCE_SEGMENTATION_PREDICTION_KIND, \ - BATCH_OF_INSTANCE_SEGMENTATION_PREDICTION_KIND, KEYPOINT_DETECTION_PREDICTION_KIND, \ - BATCH_OF_KEYPOINT_DETECTION_PREDICTION_KIND, BATCH_OF_QR_CODE_DETECTION_KIND, BATCH_OF_BAR_CODE_DETECTION_KIND, \ - BATCH_OF_PREDICTION_TYPE_KIND, BATCH_OF_PARENT_ID_KIND, BATCH_OF_IMAGE_METADATA_KIND +from inference.core.workflows.entities.types import ( + BATCH_OF_BAR_CODE_DETECTION_KIND, + BATCH_OF_BOOLEAN_KIND, + BATCH_OF_CLASSIFICATION_PREDICTION_KIND, + BATCH_OF_DICTIONARY_KIND, + BATCH_OF_IMAGE_METADATA_KIND, + BATCH_OF_IMAGES_KIND, + BATCH_OF_INSTANCE_SEGMENTATION_PREDICTION_KIND, + BATCH_OF_KEYPOINT_DETECTION_PREDICTION_KIND, + BATCH_OF_OBJECT_DETECTION_PREDICTION_KIND, + BATCH_OF_PARENT_ID_KIND, + BATCH_OF_PREDICTION_TYPE_KIND, + BATCH_OF_QR_CODE_DETECTION_KIND, + BATCH_OF_SERIALISED_PAYLOADS_KIND, + BATCH_OF_STRING_KIND, + BATCH_OF_TOP_CLASS_KIND, + BOOLEAN_KIND, + DETECTION_KIND, + DICTIONARY_KIND, + FLOAT_KIND, + FLOAT_ZERO_TO_ONE_KIND, + IMAGE_KIND, + INSTANCE_SEGMENTATION_PREDICTION_KIND, + INTEGER_KIND, + KEYPOINT_DETECTION_PREDICTION_KIND, + LIST_OF_VALUES_KIND, + OBJECT_DETECTION_PREDICTION_KIND, + POINT_KIND, + ROBOFLOW_API_KEY_KIND, + ROBOFLOW_MODEL_ID_KIND, + ROBOFLOW_PROJECT_KIND, + STRING_KIND, + WILDCARD_KIND, + ZONE_KIND, + Kind, +) from inference.core.workflows.prototypes.block import ( WorkflowBlock, WorkflowBlockManifest, ) REGISTERED_INITIALIZERS = { - "api_key": lambda: API_KEY, + "api_key": API_KEY, "cache": cache, "step_execution_mode": StepExecutionMode(WORKFLOWS_STEP_EXECUTION_MODE), } diff --git a/inference/core/workflows/execution_engine/compiler/core.py b/inference/core/workflows/execution_engine/compiler/core.py index 2aaa2dd516..2b0c53ec4f 100644 --- a/inference/core/workflows/execution_engine/compiler/core.py +++ b/inference/core/workflows/execution_engine/compiler/core.py @@ -10,7 +10,6 @@ prepare_execution_graph, ) from inference.core.workflows.execution_engine.compiler.steps_initialiser import ( - initialise_dynamic_blocks, initialise_steps, ) from inference.core.workflows.execution_engine.compiler.syntactic_parser import ( @@ -23,6 +22,9 @@ validate_workflow_specification, ) from inference.core.workflows.execution_engine.debugger.core import dump_execution_graph +from inference.core.workflows.execution_engine.dynamic_blocs.block_assembler import ( + compile_dynamic_blocks, +) from inference.core.workflows.execution_engine.introspection.blocks_loader import ( load_initializers, load_workflow_blocks, @@ -34,14 +36,16 @@ def compile_workflow( workflow_definition: dict, init_parameters: Dict[str, Union[Any, Callable[[None], Any]]], ) -> CompiledWorkflow: - available_blocks = load_workflow_blocks() + statically_defined_blocks = load_workflow_blocks() initializers = load_initializers() + dynamic_blocks = compile_dynamic_blocks( + dynamic_blocks_definitions=workflow_definition.get( + "dynamic_blocks_definitions", [] + ) + ) parsed_workflow_definition = parse_workflow_definition( raw_workflow_definition=workflow_definition, - ) - parsed_workflow_definition, dynamic_blocks_classes = initialise_dynamic_blocks( - available_blocks=available_blocks, - parsed_workflow_definition=parsed_workflow_definition, + dynamic_blocks=dynamic_blocks, ) validate_workflow_specification(workflow_definition=parsed_workflow_definition) execution_graph = prepare_execution_graph( @@ -49,8 +53,7 @@ def compile_workflow( ) steps = initialise_steps( steps_manifest=parsed_workflow_definition.steps, - available_bocks=available_blocks, - dynamic_blocks_classes=dynamic_blocks_classes, + available_bocks=statically_defined_blocks + dynamic_blocks, explicit_init_parameters=init_parameters, initializers=initializers, ) diff --git a/inference/core/workflows/execution_engine/compiler/steps_initialiser.py b/inference/core/workflows/execution_engine/compiler/steps_initialiser.py index f3d195f99e..b7ce38c3a0 100644 --- a/inference/core/workflows/execution_engine/compiler/steps_initialiser.py +++ b/inference/core/workflows/execution_engine/compiler/steps_initialiser.py @@ -14,56 +14,15 @@ from inference.core.workflows.prototypes.block import WorkflowBlockManifest -def initialise_dynamic_blocks( - available_blocks: List[BlockSpecification], - parsed_workflow_definition: ParsedWorkflowDefinition, -) -> Tuple[ - ParsedWorkflowDefinition, Dict[Type[WorkflowBlockManifest], BlockSpecification] -]: - dynamic_blocks = { - block.manifest_class: block.block_class - for block in available_blocks - if not isinstance(block.block_class, type) - } - block_specification_by_class = { - block.manifest_class: block for block in available_blocks - } - dynamic_blocks_classes = {} - new_steps = [] - for step in parsed_workflow_definition.steps: - if type(step) not in dynamic_blocks: - new_steps.append(step) - continue - dynamic_block = dynamic_blocks[type(step)](step) - dynamic_block_manifest = dynamic_block.get_manifest() - manifest_instance = dynamic_block_manifest( - name=step.name, - type=step.type, - ) - new_steps.append(manifest_instance) - reference_specification = block_specification_by_class[type(step)] - dynamic_blocks_classes[dynamic_block_manifest] = BlockSpecification( - block_source=reference_specification.block_source, - identifier=reference_specification.identifier, - block_class=dynamic_block, - manifest_class=dynamic_block_manifest, - ) - updated_definition = replace(parsed_workflow_definition, steps=new_steps) - return updated_definition, dynamic_blocks_classes - - def initialise_steps( steps_manifest: List[WorkflowBlockManifest], available_bocks: List[BlockSpecification], - dynamic_blocks_classes: Dict[Type[WorkflowBlockManifest], BlockSpecification], explicit_init_parameters: Dict[str, Union[Any, Callable[[None], Any]]], initializers: Dict[str, Union[Any, Callable[[None], Any]]], ) -> List[InitialisedStep]: available_blocks_by_manifest_class = { block.manifest_class: block for block in available_bocks } - for manifest_class, specification in dynamic_blocks_classes.items(): - available_blocks_by_manifest_class[manifest_class] = specification initialised_steps = [] for step_manifest in steps_manifest: if type(step_manifest) not in available_blocks_by_manifest_class: diff --git a/inference/core/workflows/execution_engine/compiler/syntactic_parser.py b/inference/core/workflows/execution_engine/compiler/syntactic_parser.py index 7a19799b56..859d4a90ec 100644 --- a/inference/core/workflows/execution_engine/compiler/syntactic_parser.py +++ b/inference/core/workflows/execution_engine/compiler/syntactic_parser.py @@ -7,17 +7,24 @@ from inference.core.workflows.entities.base import InputType, JsonField from inference.core.workflows.errors import WorkflowSyntaxError from inference.core.workflows.execution_engine.compiler.entities import ( + BlockSpecification, ParsedWorkflowDefinition, ) +from inference.core.workflows.execution_engine.dynamic_blocs.entities import ( + DynamicBlockDefinition, +) from inference.core.workflows.execution_engine.introspection.blocks_loader import ( + load_all_defined_kinds, load_workflow_blocks, ) def parse_workflow_definition( - raw_workflow_definition: dict, + raw_workflow_definition: dict, dynamic_blocks: List[BlockSpecification] ) -> ParsedWorkflowDefinition: - workflow_definition_class = build_workflow_definition_entity() + workflow_definition_class = build_workflow_definition_entity( + dynamic_blocks=dynamic_blocks, + ) try: workflow_definition = workflow_definition_class.model_validate( raw_workflow_definition @@ -36,8 +43,10 @@ def parse_workflow_definition( ) from e -def build_workflow_definition_entity() -> Type[BaseModel]: - blocks = load_workflow_blocks() +def build_workflow_definition_entity( + dynamic_blocks: List[BlockSpecification], +) -> Type[BaseModel]: + blocks = load_workflow_blocks() + dynamic_blocks steps_manifests = tuple(block.manifest_class for block in blocks) block_manifest_types_union = Union[steps_manifests] block_type = Annotated[block_manifest_types_union, Field(discriminator="type")] diff --git a/inference/core/workflows/core_steps/common/dynamic_blocks/__init__.py b/inference/core/workflows/execution_engine/dynamic_blocs/__init__.py similarity index 100% rename from inference/core/workflows/core_steps/common/dynamic_blocks/__init__.py rename to inference/core/workflows/execution_engine/dynamic_blocs/__init__.py diff --git a/inference/core/workflows/core_steps/common/dynamic_blocks/manifest_assembler.py b/inference/core/workflows/execution_engine/dynamic_blocs/block_assembler.py similarity index 67% rename from inference/core/workflows/core_steps/common/dynamic_blocks/manifest_assembler.py rename to inference/core/workflows/execution_engine/dynamic_blocs/block_assembler.py index 4f43084680..51dcc174c8 100644 --- a/inference/core/workflows/core_steps/common/dynamic_blocks/manifest_assembler.py +++ b/inference/core/workflows/execution_engine/dynamic_blocs/block_assembler.py @@ -1,14 +1,8 @@ from typing import Any, Dict, List, Literal, Optional, Tuple, Type, Union +from uuid import uuid4 from pydantic import BaseModel, ConfigDict, Field, create_model -from inference.core.workflows.core_steps.common.dynamic_blocks.entities import ( - DynamicInputDefinition, - DynamicOutputDefinition, - ManifestDescription, - SelectorType, - ValueType, -) from inference.core.workflows.entities.base import OutputDefinition from inference.core.workflows.entities.types import ( WILDCARD_KIND, @@ -17,25 +11,87 @@ WorkflowImageSelector, WorkflowParameterSelector, ) +from inference.core.workflows.execution_engine.compiler.entities import ( + BlockSpecification, +) +from inference.core.workflows.execution_engine.dynamic_blocs.block_scaffolding import ( + assembly_custom_python_block, +) +from inference.core.workflows.execution_engine.dynamic_blocs.entities import ( + BLOCK_SOURCE, + DynamicBlockDefinition, + DynamicInputDefinition, + DynamicOutputDefinition, + ManifestDescription, + SelectorType, + ValueType, +) +from inference.core.workflows.execution_engine.introspection.blocks_loader import ( + load_all_defined_kinds, +) +from inference.core.workflows.execution_engine.introspection.utils import ( + get_full_type_name, +) +from inference.core.workflows.prototypes.block import WorkflowBlockManifest + + +def compile_dynamic_blocks( + dynamic_blocks_definitions: List[dict], +) -> List[BlockSpecification]: + all_defined_kinds = load_all_defined_kinds() + kinds_lookup = {kind.name: kind for kind in all_defined_kinds} + dynamic_blocks = [ + DynamicBlockDefinition.model_validate(dynamic_block) + for dynamic_block in dynamic_blocks_definitions + ] + compiled_blocks = [] + for dynamic_block in dynamic_blocks: + block_specification = create_dynamic_block_specification( + dynamic_block_definition=dynamic_block, + kinds_lookup=kinds_lookup, + ) + compiled_blocks.append(block_specification) + return compiled_blocks + + +def create_dynamic_block_specification( + dynamic_block_definition: DynamicBlockDefinition, + kinds_lookup: Dict[str, Kind], +) -> BlockSpecification: + unique_identifier = str(uuid4()) + block_manifest = assembly_dynamic_block_manifest( + unique_identifier=unique_identifier, + manifest_description=dynamic_block_definition.manifest, + kinds_lookup=kinds_lookup, + ) + block_class = assembly_custom_python_block( + unique_identifier=unique_identifier, + manifest=block_manifest, + python_code=dynamic_block_definition.code, + ) + return BlockSpecification( + block_source=BLOCK_SOURCE, + identifier=get_full_type_name(selected_type=block_class), + block_class=block_class, + manifest_class=block_manifest, + ) def assembly_dynamic_block_manifest( - block_name: str, - block_type: str, + unique_identifier: str, manifest_description: ManifestDescription, - kinds_lookup: Optional[Dict[str, Kind]] = None, -) -> Type[BaseModel]: - if not kinds_lookup: - kinds_lookup = {} - model_name = create_block_type_name(block_name=block_name) + kinds_lookup: Dict[str, Kind], +) -> Type[WorkflowBlockManifest]: inputs_definitions = build_inputs( - inputs=manifest_description.inputs, kinds_lookup=kinds_lookup + block_type=manifest_description.block_type, + inputs=manifest_description.inputs, + kinds_lookup=kinds_lookup, ) manifest_class = create_model( - model_name, + f"DynamicBlockManifest[{unique_identifier}]", __config__=ConfigDict(extra="allow"), name=(str, ...), - type=(Literal[block_type], ...), + type=(Literal[manifest_description.block_type], ...), **inputs_definitions, ) outputs_definitions = build_outputs_definitions( @@ -49,13 +105,6 @@ def assembly_dynamic_block_manifest( ) -def create_block_type_name(block_name: str) -> str: - block_title = ( - block_name.strip().replace("-", " ").replace("_", " ").title().replace(" ", "") - ) - return f"DynamicBlock{block_title}" - - PYTHON_TYPES_MAPPING = { ValueType.ANY: Any, ValueType.INTEGER: int, @@ -68,29 +117,40 @@ def create_block_type_name(block_name: str) -> str: def build_inputs( + block_type: str, inputs: Dict[str, DynamicInputDefinition], kinds_lookup: Dict[str, Kind], ) -> Dict[str, Tuple[type, Field]]: result = {} for input_name, input_definition in inputs.items(): result[input_name] = build_input( - input_definition=input_definition, kinds_lookup=kinds_lookup + block_type=block_type, + input_name=input_name, + input_definition=input_definition, + kinds_lookup=kinds_lookup, ) return result def build_input( + block_type: str, + input_name: str, input_definition: DynamicInputDefinition, kinds_lookup: Dict[str, Kind], ) -> Tuple[type, Field]: input_type = build_input_field_type( - input_definition=input_definition, kinds_lookup=kinds_lookup + block_type=block_type, + input_name=input_name, + input_definition=input_definition, + kinds_lookup=kinds_lookup, ) field_metadata = build_input_field_metadata(input_definition=input_definition) return input_type, field_metadata def build_input_field_type( + block_type: str, + input_name: str, input_definition: DynamicInputDefinition, kinds_lookup: Dict[str, Kind], ) -> type: @@ -102,7 +162,10 @@ def build_input_field_type( input_definition=input_definition ) if not input_type_union_elements: - input_type_union_elements.append(Any) + raise ValueError( + f"There is no definition of input type found for property: {input_name} of " + f"dynamic block {block_type}." + ) if len(input_type_union_elements) > 1: input_type = Union[tuple(input_type_union_elements)] else: @@ -123,7 +186,12 @@ def collect_python_types_for_selectors( ) selector_kind = [] for kind_name in selector_kind_names: - selector_kind.append(kinds_lookup.get(kind_name, Kind(name=kind_name))) + if kind_name not in kinds_lookup: + raise ValueError( + f"Could not find kind with name {kind_name} within kinds " + f"recognised by Execution Engine: {list(kinds_lookup.keys())}." + ) + selector_kind.append(kinds_lookup[kind_name]) if selector_type is SelectorType.INPUT_IMAGE: result.append(WorkflowImageSelector) elif selector_type is SelectorType.INPUT_PARAMETER: @@ -144,6 +212,8 @@ def collect_python_types_for_values( def build_input_field_metadata(input_definition: DynamicInputDefinition) -> Field: + if not input_definition.has_default_value: + return Field() default_value = input_definition.default_value field_metadata_params = {} if default_holds_compound_object(default_value=default_value): @@ -193,7 +263,7 @@ def assembly_manifest_class_methods( manifest_class: Type[BaseModel], outputs_definitions: List[OutputDefinition], manifest_description: ManifestDescription, -) -> Type[BaseModel]: +) -> Type[WorkflowBlockManifest]: describe_outputs = lambda cls: outputs_definitions setattr(manifest_class, "describe_outputs", classmethod(describe_outputs)) setattr(manifest_class, "get_actual_outputs", describe_outputs) diff --git a/inference/core/workflows/core_steps/dynamic_blocs/python_code.py b/inference/core/workflows/execution_engine/dynamic_blocs/block_scaffolding.py similarity index 55% rename from inference/core/workflows/core_steps/dynamic_blocs/python_code.py rename to inference/core/workflows/execution_engine/dynamic_blocs/block_scaffolding.py index d1ec3c40c1..bbd02f28ce 100644 --- a/inference/core/workflows/core_steps/dynamic_blocs/python_code.py +++ b/inference/core/workflows/execution_engine/dynamic_blocs/block_scaffolding.py @@ -1,15 +1,7 @@ import types -from typing import List, Literal, Type -from uuid import uuid4 +from typing import List, Type -from inference.core.workflows.core_steps.common.dynamic_blocks.entities import ( - ManifestDescription, -) -from inference.core.workflows.core_steps.common.dynamic_blocks.manifest_assembler import ( - assembly_dynamic_block_manifest, -) -from inference.core.workflows.entities.base import OutputDefinition -from inference.core.workflows.entities.types import WILDCARD_KIND +from inference.core.workflows.execution_engine.dynamic_blocs.entities import PythonCode from inference.core.workflows.prototypes.block import ( BlockResult, WorkflowBlock, @@ -26,35 +18,20 @@ ] -class CustomPythonDeclaredManifest(WorkflowBlockManifest): - name: str - type: Literal["CustomPython"] - manifest_description: ManifestDescription - python_code: str - function_name: str - - @classmethod - def describe_outputs(cls) -> List[OutputDefinition]: - return [OutputDefinition(name="*", kind=[WILDCARD_KIND])] - - def assembly_custom_python_block( - declared_manifest: CustomPythonDeclaredManifest, + unique_identifier: str, + manifest: Type[WorkflowBlockManifest], + python_code: PythonCode, ) -> Type[WorkflowBlock]: - actual_manifest = assembly_dynamic_block_manifest( - block_name=declared_manifest.name, - block_type=declared_manifest.type, - manifest_description=declared_manifest.manifest_description, - ) code_module = create_dynamic_module( - code=declared_manifest.python_code, - module_name=f"dynamic_module_{uuid4()}", + code=python_code.function_code, + module_name=f"dynamic_module_{unique_identifier}", ) - if not hasattr(code_module, declared_manifest.function_name): + if not hasattr(code_module, python_code.function_name): raise ValueError( - f"Cannot find function: {declared_manifest.function_name} in declared code." + f"Cannot find function: {python_code.function_name} in declared code." ) - run_function = getattr(code_module, declared_manifest.function_name) + run_function = getattr(code_module, python_code.function_name) async def run(self, *args, **kwargs) -> BlockResult: if not self._allow_custom_python_execution: @@ -73,10 +50,10 @@ def get_init_parameters(cls) -> List[str]: @classmethod def get_manifest(cls) -> Type[WorkflowBlockManifest]: - return actual_manifest + return manifest return type( - f"CustomPythonBlock-{uuid4()}", + f"DynamicBlock[{unique_identifier}]", (WorkflowBlock,), { "__init__": constructor, diff --git a/inference/core/workflows/execution_engine/dynamic_blocs/entities.py b/inference/core/workflows/execution_engine/dynamic_blocs/entities.py new file mode 100644 index 0000000000..0e8ed2771c --- /dev/null +++ b/inference/core/workflows/execution_engine/dynamic_blocs/entities.py @@ -0,0 +1,130 @@ +from enum import Enum +from typing import Any, Dict, List, Literal + +from pydantic import BaseModel, Field + + +class SelectorType(Enum): + INPUT_IMAGE = "input_image" + INPUT_PARAMETER = "input_parameter" + STEP_OUTPUT = "step_output" + + +class ValueType(Enum): + ANY = "any" + INTEGER = "integer" + FLOAT = "float" + BOOLEAN = "boolean" + DICT = "dict" + LIST = "list" + STRING = "string" + + +class DynamicInputDefinition(BaseModel): + type: Literal["DynamicInputDefinition"] + has_default_value: bool = Field( + default=False, + description="Flag to decide if default value is provided for input", + ) + default_value: Any = Field( + description="Definition of default value for a field. Use in combination with, " + "`has_default_value` to decide on default value if field is optional.", + default=None, + ) + is_optional: bool = Field( + description="Flag deciding if `default_value` will be added for manifest field annotation.", + default=False, + ) + is_dimensionality_reference: bool = Field( + default=False, + description="Flag deciding if declared property holds dimensionality reference - see how " + "dimensionality works for statically defined blocks to discover meaning of the " + "parameter.", + ) + dimensionality_offset: int = Field( + default=0, + ge=-1, + le=1, + description="Accepted dimensionality offset for parameter. Dimensionality works the same as for " + "traditional workflows blocks.", + ) + selector_types: List[SelectorType] = Field( + default_factory=list, + description="Union of selector types accepted by input. Should be empty if field does not accept " + "selectors.", + ) + selector_data_kind: Dict[SelectorType, List[str]] = Field( + default_factory=dict, + description="Mapping of `selector_types` into names of kinds to be compatible. " + "Empty dict (default value) means wildcard kind for all selectors. If name of kind given - " + "must be valid kind, known for workflow execution engine.", + ) + value_types: List[ValueType] = Field( + default_factory=list, + description="List of types representing union of types for static values (non selectors) " + "that shall be accepted for input field. Empty list represents no value types allowed.", + ) + + +class DynamicOutputDefinition(BaseModel): + type: Literal["DynamicOutputDefinition"] + kind: List[str] = Field( + default_factory=list, + description="List representing union of kinds for defined output", + ) + + +class ManifestDescription(BaseModel): + type: Literal["ManifestDescription"] + block_type: str = Field( + description="Field holds type of the bock to be dynamically created. Block can be initialised " + "as step using the type declared in the field." + ) + inputs: Dict[str, DynamicInputDefinition] = Field( + description="Mapping name -> input definition for block inputs (parameters for run() function of" + "dynamic block)" + ) + outputs: Dict[str, DynamicOutputDefinition] = Field( + default_factory=dict, + description="Mapping name -> output kind for block outputs.", + ) + output_dimensionality_offset: int = Field( + default=0, ge=-1, le=1, description="Definition of output dimensionality offset" + ) + accepts_batch_input: bool = Field( + default=False, + description="Flag to decide if function will be provided with batch data as whole or with singular " + "batch elements while execution", + ) + accepts_empty_values: bool = Field( + default=False, + description="Flag to decide if empty (optional) values will be shipped as run() function parameters", + ) + + +class PythonCode(BaseModel): + type: Literal["PythonCode"] + function_code: str = Field( + description="Code of python function. Content should be properly formatted including indentations. " + "Workflows execution engine is to create dynamic module with provided function - ensuring " + "imports of the following symbols: [Any, List, Dict, Set, sv, np, math, Batch, " + "WorkflowImageData, BlockResult]" + ) + function_name: str = Field( + default="run", description="Name of the function shipped in `function_code`." + ) + + +class DynamicBlockDefinition(BaseModel): + type: Literal["DynamicBlockDefinition"] + manifest: ManifestDescription = Field( + description="Definition of manifest for dynamic block to be created in runtime by " + "workflows execution engine." + ) + code: PythonCode = Field( + description="Code to be executed in run(...) method of block that will be dynamically " + "created." + ) + + +BLOCK_SOURCE = "dynamic_workflows_blocks" diff --git a/inference/core/workflows/execution_engine/dynamic_blocs/loader.py b/inference/core/workflows/execution_engine/dynamic_blocs/loader.py new file mode 100644 index 0000000000..957335911b --- /dev/null +++ b/inference/core/workflows/execution_engine/dynamic_blocs/loader.py @@ -0,0 +1,12 @@ +from typing import Any, Callable, Dict, Union + +from inference.core.env import ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS +from inference.core.workflows.execution_engine.dynamic_blocs.entities import ( + BLOCK_SOURCE, +) + + +def load_dynamic_blocks_initializers() -> Dict[str, Union[Any, Callable[[None], Any]]]: + return { + f"{BLOCK_SOURCE}.allow_custom_python_execution": ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS + } diff --git a/inference/core/workflows/execution_engine/introspection/blocks_loader.py b/inference/core/workflows/execution_engine/introspection/blocks_loader.py index e9f805c709..99faa6f778 100644 --- a/inference/core/workflows/execution_engine/introspection/blocks_loader.py +++ b/inference/core/workflows/execution_engine/introspection/blocks_loader.py @@ -4,19 +4,23 @@ from collections import Counter from typing import Any, Callable, Dict, List, Union -from inference.core.workflows.core_steps.loader import load_blocks, load_kinds, REGISTERED_INITIALIZERS +from inference.core.workflows.core_steps.loader import ( + REGISTERED_INITIALIZERS, + load_blocks, + load_kinds, +) from inference.core.workflows.entities.types import Kind from inference.core.workflows.errors import PluginInterfaceError, PluginLoadingError from inference.core.workflows.execution_engine.compiler.entities import ( BlockSpecification, ) +from inference.core.workflows.execution_engine.dynamic_blocs.loader import ( + load_dynamic_blocks_initializers, +) from inference.core.workflows.execution_engine.introspection.entities import ( BlockDescription, BlocksDescription, ) -from inference.core.workflows.execution_engine.introspection.schema_parser import ( - retrieve_selectors_from_schema, -) from inference.core.workflows.execution_engine.introspection.utils import ( build_human_friendly_block_name, get_full_type_name, @@ -147,16 +151,16 @@ def _load_blocks_from_plugin(plugin_name: str) -> List[BlockSpecification]: if not isinstance(blocks, list): raise PluginInterfaceError( public_message=f"Provided workflow plugin `{plugin_name}` implement `load_blocks()` function " - f"incorrectly. Expected to return list of entries being subclass of `WorkflowBlock`, " - f"but entry of different characteristics found: {type(blocks)}.", + f"incorrectly. Expected to return list of entries being subclass of `WorkflowBlock`, " + f"but entry of different characteristics found: {type(blocks)}.", context="workflow_compilation | blocks_loading", ) for i, block in enumerate(blocks): if not isinstance(block, type) or not issubclass(block, WorkflowBlock): raise PluginInterfaceError( public_message=f"Provided workflow plugin `{plugin_name}` implement `load_blocks()` function " - f"incorrectly. Expected to return list of entries being subclass of `WorkflowBlock`, " - f"but entry of different characteristics found: {block} at position: {i}.", + f"incorrectly. Expected to return list of entries being subclass of `WorkflowBlock`, " + f"but entry of different characteristics found: {block} at position: {i}.", context="workflow_compilation | blocks_loading", ) if block in already_spotted_blocks: @@ -176,6 +180,7 @@ def _load_blocks_from_plugin(plugin_name: str) -> List[BlockSpecification]: def load_initializers() -> Dict[str, Union[Any, Callable[[None], Any]]]: plugins_to_load = get_plugin_modules() result = load_core_blocks_initializers() + result.update(load_dynamic_blocks_initializers()) for plugin_name in plugins_to_load: result.update(load_initializers_from_plugin(plugin_name=plugin_name)) return result @@ -290,14 +295,14 @@ def load_plugin_kinds(plugin_name: str) -> List[Kind]: except ImportError as e: raise PluginLoadingError( public_message=f"It is not possible to load kinds from workflow plugin `{plugin_name}`. " - f"Make sure the library providing custom step is correctly installed in Python environment.", + f"Make sure the library providing custom step is correctly installed in Python environment.", context="workflow_compilation | blocks_loading", inner_error=e, ) from e except AttributeError as e: raise PluginInterfaceError( public_message=f"Provided workflow plugin `{plugin_name}` do not implement blocks loading " - f"interface correctly and cannot be loaded.", + f"interface correctly and cannot be loaded.", context="workflow_compilation | blocks_loading", inner_error=e, ) from e @@ -318,8 +323,8 @@ def _load_plugin_kinds(plugin_name: str) -> List[Kind]: if not isinstance(kinds, list) or not all(isinstance(e, Kind) for e in kinds): raise PluginInterfaceError( public_message=f"Provided workflow plugin `{plugin_name}` do not implement blocks loading " - f"interface correctly and cannot be loaded. Return value of `load_kinds()` " - f"is not list of objects `Kind`.", + f"interface correctly and cannot be loaded. Return value of `load_kinds()` " + f"is not list of objects `Kind`.", context="workflow_compilation | blocks_loading", ) return kinds diff --git a/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py b/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py index 50b978d764..d5c841bf6e 100644 --- a/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py +++ b/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py @@ -7,7 +7,7 @@ from inference.core.workflows.execution_engine.core import ExecutionEngine FUNCTION_TO_GET_OVERLAP_OF_BBOXES = """ -def function(predictions: sv.Detections, class_x: str, class_y: str) -> BlockResult: +def run(predictions: sv.Detections, class_x: str, class_y: str) -> BlockResult: bboxes_class_x = predictions[predictions.data["class_name"] == class_x] bboxes_class_y = predictions[predictions.data["class_name"] == class_y] overlap = [] @@ -33,7 +33,7 @@ def function(predictions: sv.Detections, class_x: str, class_y: str) -> BlockRes FUNCTION_TO_GET_MAXIMUM_OVERLAP = """ -def function(overlaps: List[List[float]]) -> BlockResult: +def run(overlaps: List[List[float]]) -> BlockResult: max_value = -1 for overlap in overlaps: for overlap_value in overlap: @@ -49,33 +49,67 @@ def function(overlaps: List[List[float]]) -> BlockResult: "inputs": [ {"type": "WorkflowImage", "name": "image"}, ], - "steps": [ + "dynamic_blocks_definitions": [ { - "type": "RoboflowObjectDetectionModel", - "name": "model", - "image": "$inputs.image", - "model_id": "yolov8n-640", - }, - { - "type": "CustomPython", - "name": "overlap_measurement", - "manifest_description": { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "OverlapMeasurement", "inputs": { "predictions": { + "type": "DynamicInputDefinition", "selector_types": ["step_output"], - "default_value": "$steps.model.predictions", }, "class_x": { - "default_value": "dog", + "type": "DynamicInputDefinition", + "value_types": ["string"], }, "class_y": { - "default_value": "dog", + "type": "DynamicInputDefinition", + "value_types": ["string"], + }, + }, + "outputs": {"overlap": {"type": "DynamicOutputDefinition", "kind": []}}, + }, + "code": { + "type": "PythonCode", + "function_code": FUNCTION_TO_GET_OVERLAP_OF_BBOXES, + }, + }, + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "MaximumOverlap", + "inputs": { + "overlaps": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output"], }, }, - "outputs": {"overlap": {"kind": []}}, + "outputs": { + "max_value": {"type": "DynamicOutputDefinition", "kind": []} + }, }, - "python_code": FUNCTION_TO_GET_OVERLAP_OF_BBOXES, - "function_name": "function", + "code": { + "type": "PythonCode", + "function_code": FUNCTION_TO_GET_MAXIMUM_OVERLAP, + }, + }, + ], + "steps": [ + { + "type": "RoboflowObjectDetectionModel", + "name": "model", + "image": "$inputs.image", + "model_id": "yolov8n-640", + }, + { + "type": "OverlapMeasurement", + "name": "overlap_measurement", + "predictions": "$steps.model.predictions", + "class_x": "dog", + "class_y": "dog", }, { "type": "ContinueIf", @@ -102,19 +136,9 @@ def function(overlaps: List[List[float]]) -> BlockResult: "next_steps": ["$steps.maximum_overlap"], }, { - "type": "CustomPython", + "type": "MaximumOverlap", "name": "maximum_overlap", - "manifest_description": { - "inputs": { - "overlaps": { - "selector_types": ["step_output"], - "default_value": "$steps.overlap_measurement.overlap", - }, - }, - "outputs": {"max_value": {"kind": []}}, - }, - "python_code": FUNCTION_TO_GET_MAXIMUM_OVERLAP, - "function_name": "function", + "overlaps": "$steps.overlap_measurement.overlap", }, ], "outputs": [ diff --git a/tests/workflows/unit_tests/core_steps/formatters/test_property_extraction.py b/tests/workflows/unit_tests/core_steps/formatters/test_property_extraction.py index 013bdb7ebc..db08c2cb9a 100644 --- a/tests/workflows/unit_tests/core_steps/formatters/test_property_extraction.py +++ b/tests/workflows/unit_tests/core_steps/formatters/test_property_extraction.py @@ -2,8 +2,8 @@ from inference.core.entities.responses.inference import ( ClassificationInferenceResponse, - InferenceResponseImage, ClassificationPrediction, + InferenceResponseImage, ) from inference.core.workflows.core_steps.common.query_language.entities.operations import ( OperationsChain, diff --git a/tests/workflows/unit_tests/core_steps/fusion/test_detections_classes_replacement.py b/tests/workflows/unit_tests/core_steps/fusion/test_detections_classes_replacement.py index 944a9ec1de..a4d9a9367a 100644 --- a/tests/workflows/unit_tests/core_steps/fusion/test_detections_classes_replacement.py +++ b/tests/workflows/unit_tests/core_steps/fusion/test_detections_classes_replacement.py @@ -1,15 +1,20 @@ import numpy as np import pytest - import supervision as sv from supervision.config import CLASS_NAME_DATA_FIELD -from inference.core.entities.responses.inference import MultiLabelClassificationInferenceResponse, \ - InferenceResponseImage, MultiLabelClassificationPrediction, ClassificationInferenceResponse, \ - ClassificationPrediction +from inference.core.entities.responses.inference import ( + ClassificationInferenceResponse, + ClassificationPrediction, + InferenceResponseImage, + MultiLabelClassificationInferenceResponse, + MultiLabelClassificationPrediction, +) from inference.core.workflows.constants import DETECTION_ID_KEY -from inference.core.workflows.core_steps.fusion.detections_classes_replacement import DetectionsClassesReplacementBlock, \ - extract_leading_class_from_prediction +from inference.core.workflows.core_steps.fusion.detections_classes_replacement import ( + DetectionsClassesReplacementBlock, + extract_leading_class_from_prediction, +) from inference.core.workflows.entities.base import Batch @@ -25,7 +30,9 @@ async def test_classes_replacement_when_object_detection_object_is_none() -> Non ) # then - assert result == {"predictions": None}, "object_detection_predictions is superior object so lack of value means lack of output" + assert result == { + "predictions": None + }, "object_detection_predictions is superior object so lack of value means lack of output" @pytest.mark.asyncio @@ -43,24 +50,30 @@ async def test_classes_replacement_when_there_are_no_predictions_is_none() -> No ) # then - assert result == {"predictions": sv.Detections.empty()}, "classification_predictions is inferior object so lack of value means empty output" + assert result == { + "predictions": sv.Detections.empty() + }, "classification_predictions is inferior object so lack of value means empty output" @pytest.mark.asyncio -async def test_classes_replacement_when_replacement_to_happen_without_filtering_for_multi_label_results() -> None: +async def test_classes_replacement_when_replacement_to_happen_without_filtering_for_multi_label_results() -> ( + None +): # given step = DetectionsClassesReplacementBlock() detections = sv.Detections( - xyxy=np.array([ - [10, 20, 30, 40], - [11, 21, 31, 41], - ]), + xyxy=np.array( + [ + [10, 20, 30, 40], + [11, 21, 31, 41], + ] + ), class_id=np.array([7, 7]), confidence=np.array([0.36, 0.91]), data={ "class_name": np.array(["animal", "animal"]), - "detection_id": np.array(["zero", "one"]) - } + "detection_id": np.array(["zero", "one"]), + }, ) first_cls_prediction = MultiLabelClassificationInferenceResponse( image=InferenceResponseImage(width=128, height=256), @@ -85,7 +98,7 @@ async def test_classes_replacement_when_replacement_to_happen_without_filtering_ first_cls_prediction, second_cls_prediction, ], - indices=[(0, 0), (0, 1)] + indices=[(0, 0), (0, 1)], ) # when @@ -95,28 +108,44 @@ async def test_classes_replacement_when_replacement_to_happen_without_filtering_ ) # then - assert np.allclose(result["predictions"].xyxy, np.array([[10, 20, 30, 40], [11, 21, 31, 41]])), "Expected coordinates not to be touched" - assert np.allclose(result["predictions"].confidence, np.array([0.6, 0.4])), "Expected to choose [cat, dog] confidences" - assert np.allclose(result["predictions"].class_id, np.array([0, 1])), "Expected to choose [cat, dog] class ids" - assert result["predictions"].data["class_name"].tolist() == ["cat", "dog"], "Expected cat class to be assigned" - assert result["predictions"].data["detection_id"].tolist() != ["zero", "one"], "Expected to generate new detection id" + assert np.allclose( + result["predictions"].xyxy, np.array([[10, 20, 30, 40], [11, 21, 31, 41]]) + ), "Expected coordinates not to be touched" + assert np.allclose( + result["predictions"].confidence, np.array([0.6, 0.4]) + ), "Expected to choose [cat, dog] confidences" + assert np.allclose( + result["predictions"].class_id, np.array([0, 1]) + ), "Expected to choose [cat, dog] class ids" + assert result["predictions"].data["class_name"].tolist() == [ + "cat", + "dog", + ], "Expected cat class to be assigned" + assert result["predictions"].data["detection_id"].tolist() != [ + "zero", + "one", + ], "Expected to generate new detection id" @pytest.mark.asyncio -async def test_classes_replacement_when_replacement_to_happen_without_filtering_for_multi_class_results() -> None: +async def test_classes_replacement_when_replacement_to_happen_without_filtering_for_multi_class_results() -> ( + None +): # given step = DetectionsClassesReplacementBlock() detections = sv.Detections( - xyxy=np.array([ - [10, 20, 30, 40], - [11, 21, 31, 41], - ]), + xyxy=np.array( + [ + [10, 20, 30, 40], + [11, 21, 31, 41], + ] + ), class_id=np.array([7, 7]), confidence=np.array([0.36, 0.91]), data={ "class_name": np.array(["animal", "animal"]), - "detection_id": np.array(["zero", "one"]) - } + "detection_id": np.array(["zero", "one"]), + }, ) first_cls_prediction = ClassificationInferenceResponse( image=InferenceResponseImage(width=128, height=256), @@ -153,7 +182,7 @@ async def test_classes_replacement_when_replacement_to_happen_without_filtering_ first_cls_prediction, second_cls_prediction, ], - indices=[(0, 0), (0, 1)] + indices=[(0, 0), (0, 1)], ) # when @@ -163,28 +192,44 @@ async def test_classes_replacement_when_replacement_to_happen_without_filtering_ ) # then - assert np.allclose(result["predictions"].xyxy, np.array([[10, 20, 30, 40], [11, 21, 31, 41]])), "Expected coordinates not to be touched" - assert np.allclose(result["predictions"].confidence, np.array([0.6, 0.6])), "Expected to choose [cat, dog] confidences" - assert np.allclose(result["predictions"].class_id, np.array([0, 1])), "Expected to choose [cat, dog] class ids" - assert result["predictions"].data["class_name"].tolist() == ["cat", "dog"], "Expected cat class to be assigned" - assert result["predictions"].data["detection_id"].tolist() != ["zero", "one"], "Expected to generate new detection id" + assert np.allclose( + result["predictions"].xyxy, np.array([[10, 20, 30, 40], [11, 21, 31, 41]]) + ), "Expected coordinates not to be touched" + assert np.allclose( + result["predictions"].confidence, np.array([0.6, 0.6]) + ), "Expected to choose [cat, dog] confidences" + assert np.allclose( + result["predictions"].class_id, np.array([0, 1]) + ), "Expected to choose [cat, dog] class ids" + assert result["predictions"].data["class_name"].tolist() == [ + "cat", + "dog", + ], "Expected cat class to be assigned" + assert result["predictions"].data["detection_id"].tolist() != [ + "zero", + "one", + ], "Expected to generate new detection id" @pytest.mark.asyncio -async def test_classes_replacement_when_replacement_to_happen_and_one_result_to_be_filtered_out() -> None: +async def test_classes_replacement_when_replacement_to_happen_and_one_result_to_be_filtered_out() -> ( + None +): # given step = DetectionsClassesReplacementBlock() detections = sv.Detections( - xyxy=np.array([ - [10, 20, 30, 40], - [11, 21, 31, 41], - ]), + xyxy=np.array( + [ + [10, 20, 30, 40], + [11, 21, 31, 41], + ] + ), class_id=np.array([7, 7]), confidence=np.array([0.36, 0.91]), data={ "class_name": np.array(["animal", "animal"]), - "detection_id": np.array(["zero", "one"]) - } + "detection_id": np.array(["zero", "one"]), + }, ) first_cls_prediction = MultiLabelClassificationInferenceResponse( image=InferenceResponseImage(width=128, height=256), @@ -200,7 +245,7 @@ async def test_classes_replacement_when_replacement_to_happen_and_one_result_to_ first_cls_prediction, None, ], - indices=[(0, 0), (0, 1)] + indices=[(0, 0), (0, 1)], ) # when @@ -210,13 +255,27 @@ async def test_classes_replacement_when_replacement_to_happen_and_one_result_to_ ) # then - assert len(result["predictions"]) == 1, "Expected only one bbox left, as there was mo cls result for second bbox" - assert np.allclose(result["predictions"].xyxy, np.array([[10, 20, 30, 40]])), "Expected first bbox to be left" - assert np.allclose(result["predictions"].confidence, np.array([0.6])), "Expected to choose cat confidence" - assert np.allclose(result["predictions"].class_id, np.array([0])), "Expected to choose cat class id" - assert result["predictions"].data["class_name"].tolist() == ["cat"], "Expected cat class to be assigned" - assert len(result["predictions"].data["detection_id"]) == 1, "Expected only single detection_id" - assert result["predictions"].data["detection_id"].tolist() != ["zero"], "Expected to generate new detection id" + assert ( + len(result["predictions"]) == 1 + ), "Expected only one bbox left, as there was mo cls result for second bbox" + assert np.allclose( + result["predictions"].xyxy, np.array([[10, 20, 30, 40]]) + ), "Expected first bbox to be left" + assert np.allclose( + result["predictions"].confidence, np.array([0.6]) + ), "Expected to choose cat confidence" + assert np.allclose( + result["predictions"].class_id, np.array([0]) + ), "Expected to choose cat class id" + assert result["predictions"].data["class_name"].tolist() == [ + "cat" + ], "Expected cat class to be assigned" + assert ( + len(result["predictions"].data["detection_id"]) == 1 + ), "Expected only single detection_id" + assert result["predictions"].data["detection_id"].tolist() != [ + "zero" + ], "Expected to generate new detection id" def test_extract_leading_class_from_prediction_when_prediction_is_multi_label() -> None: @@ -243,7 +302,9 @@ def test_extract_leading_class_from_prediction_when_prediction_is_multi_label() assert result == ("cat", 0, 0.6) -def test_extract_leading_class_from_prediction_when_prediction_is_faulty_multi_label() -> None: +def test_extract_leading_class_from_prediction_when_prediction_is_faulty_multi_label() -> ( + None +): # given prediction = ClassificationInferenceResponse( image=InferenceResponseImage(width=128, height=256), @@ -265,7 +326,9 @@ def test_extract_leading_class_from_prediction_when_prediction_is_faulty_multi_l _ = extract_leading_class_from_prediction(prediction=prediction) -def test_extract_leading_class_from_prediction_when_prediction_is_multi_class_with_predicted_classes() -> None: +def test_extract_leading_class_from_prediction_when_prediction_is_multi_class_with_predicted_classes() -> ( + None +): # given prediction = MultiLabelClassificationInferenceResponse( image=InferenceResponseImage(width=128, height=256), @@ -283,7 +346,9 @@ def test_extract_leading_class_from_prediction_when_prediction_is_multi_class_wi assert result == ("cat", 0, 0.6) -def test_extract_leading_class_from_prediction_when_prediction_is_multi_class_without_predicted_classes() -> None: +def test_extract_leading_class_from_prediction_when_prediction_is_multi_class_without_predicted_classes() -> ( + None +): # given prediction = MultiLabelClassificationInferenceResponse( image=InferenceResponseImage(width=128, height=256), @@ -301,12 +366,13 @@ def test_extract_leading_class_from_prediction_when_prediction_is_multi_class_wi assert result is None -def test_extract_leading_class_from_prediction_when_prediction_is_multi_class_without_classes_defined() -> None: +def test_extract_leading_class_from_prediction_when_prediction_is_multi_class_without_classes_defined() -> ( + None +): # given prediction = MultiLabelClassificationInferenceResponse( image=InferenceResponseImage(width=128, height=256), - predictions={ - }, + predictions={}, predicted_classes=[], ).dict(by_alias=True, exclude_none=True) diff --git a/tests/workflows/unit_tests/core_steps/fusion/test_domension_collapse.py b/tests/workflows/unit_tests/core_steps/fusion/test_domension_collapse.py index 1b7907a829..209d6e4be2 100644 --- a/tests/workflows/unit_tests/core_steps/fusion/test_domension_collapse.py +++ b/tests/workflows/unit_tests/core_steps/fusion/test_domension_collapse.py @@ -1,6 +1,8 @@ import pytest -from inference.core.workflows.core_steps.fusion.dimension_collapse import DimensionCollapseBlock +from inference.core.workflows.core_steps.fusion.dimension_collapse import ( + DimensionCollapseBlock, +) from inference.core.workflows.entities.base import Batch @@ -8,10 +10,7 @@ async def test_dimension_collapse() -> None: # given step = DimensionCollapseBlock() - data = Batch( - content=[1, 2, 3, 4], - indices=[(0, 1), (0, 2), (0, 3), (0, 4)] - ) + data = Batch(content=[1, 2, 3, 4], indices=[(0, 1), (0, 2), (0, 3), (0, 4)]) # when result = await step.run(data=data) diff --git a/tests/workflows/unit_tests/execution_engine/introspection/test_blocks_loader.py b/tests/workflows/unit_tests/execution_engine/introspection/test_blocks_loader.py index 662e2a9fc4..d7fba8f6de 100644 --- a/tests/workflows/unit_tests/execution_engine/introspection/test_blocks_loader.py +++ b/tests/workflows/unit_tests/execution_engine/introspection/test_blocks_loader.py @@ -174,7 +174,7 @@ def test_load_initializers_when_plugin_exists_and_initializers_provided() -> Non result = load_initializers() # then - assert len(result) == 2 + assert len(result) == 6 assert ( result[ "tests.workflows.unit_tests.execution_engine.introspection.plugin_with_initializers.a" @@ -207,7 +207,7 @@ def test_describe_available_blocks_when_valid_plugins_are_loaded( assert result.blocks[0].manifest_class == plugin_with_valid_blocks.Block1Manifest assert result.blocks[1].block_class == plugin_with_valid_blocks.Block2 assert result.blocks[1].manifest_class == plugin_with_valid_blocks.Block2Manifest - assert len(result.declared_kinds) == 3 + assert len(result.declared_kinds) == 33 @mock.patch.object(blocks_loader, "load_workflow_blocks") From 883196329e0f75ffbe865895d5e6ff8e9f73f15e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Tue, 16 Jul 2024 16:26:19 +0200 Subject: [PATCH 06/15] Change Python block according to suggestions --- .../execution_engine/compiler/core.py | 2 +- .../compiler/syntactic_parser.py | 2 +- .../__init__.py | 0 .../block_assembler.py | 8 +- .../dynamic_blocks/block_scaffolding.py | 112 +++ .../entities.py | 27 +- .../loader.py | 2 +- .../dynamic_blocs/block_scaffolding.py | 71 -- .../introspection/blocks_loader.py | 2 +- .../test_workflow_with_custom_python_block.py | 736 +++++++++++++++++- 10 files changed, 875 insertions(+), 87 deletions(-) rename inference/core/workflows/execution_engine/{dynamic_blocs => dynamic_blocks}/__init__.py (100%) rename inference/core/workflows/execution_engine/{dynamic_blocs => dynamic_blocks}/block_assembler.py (96%) create mode 100644 inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py rename inference/core/workflows/execution_engine/{dynamic_blocs => dynamic_blocks}/entities.py (81%) rename inference/core/workflows/execution_engine/{dynamic_blocs => dynamic_blocks}/loader.py (81%) delete mode 100644 inference/core/workflows/execution_engine/dynamic_blocs/block_scaffolding.py diff --git a/inference/core/workflows/execution_engine/compiler/core.py b/inference/core/workflows/execution_engine/compiler/core.py index 2b0c53ec4f..1cb169b23b 100644 --- a/inference/core/workflows/execution_engine/compiler/core.py +++ b/inference/core/workflows/execution_engine/compiler/core.py @@ -22,7 +22,7 @@ validate_workflow_specification, ) from inference.core.workflows.execution_engine.debugger.core import dump_execution_graph -from inference.core.workflows.execution_engine.dynamic_blocs.block_assembler import ( +from inference.core.workflows.execution_engine.dynamic_blocks.block_assembler import ( compile_dynamic_blocks, ) from inference.core.workflows.execution_engine.introspection.blocks_loader import ( diff --git a/inference/core/workflows/execution_engine/compiler/syntactic_parser.py b/inference/core/workflows/execution_engine/compiler/syntactic_parser.py index 859d4a90ec..17898d7ccc 100644 --- a/inference/core/workflows/execution_engine/compiler/syntactic_parser.py +++ b/inference/core/workflows/execution_engine/compiler/syntactic_parser.py @@ -10,7 +10,7 @@ BlockSpecification, ParsedWorkflowDefinition, ) -from inference.core.workflows.execution_engine.dynamic_blocs.entities import ( +from inference.core.workflows.execution_engine.dynamic_blocks.entities import ( DynamicBlockDefinition, ) from inference.core.workflows.execution_engine.introspection.blocks_loader import ( diff --git a/inference/core/workflows/execution_engine/dynamic_blocs/__init__.py b/inference/core/workflows/execution_engine/dynamic_blocks/__init__.py similarity index 100% rename from inference/core/workflows/execution_engine/dynamic_blocs/__init__.py rename to inference/core/workflows/execution_engine/dynamic_blocks/__init__.py diff --git a/inference/core/workflows/execution_engine/dynamic_blocs/block_assembler.py b/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py similarity index 96% rename from inference/core/workflows/execution_engine/dynamic_blocs/block_assembler.py rename to inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py index 51dcc174c8..03e6f4089a 100644 --- a/inference/core/workflows/execution_engine/dynamic_blocs/block_assembler.py +++ b/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py @@ -7,6 +7,7 @@ from inference.core.workflows.entities.types import ( WILDCARD_KIND, Kind, + StepOutputImageSelector, StepOutputSelector, WorkflowImageSelector, WorkflowParameterSelector, @@ -14,10 +15,10 @@ from inference.core.workflows.execution_engine.compiler.entities import ( BlockSpecification, ) -from inference.core.workflows.execution_engine.dynamic_blocs.block_scaffolding import ( +from inference.core.workflows.execution_engine.dynamic_blocks.block_scaffolding import ( assembly_custom_python_block, ) -from inference.core.workflows.execution_engine.dynamic_blocs.entities import ( +from inference.core.workflows.execution_engine.dynamic_blocks.entities import ( BLOCK_SOURCE, DynamicBlockDefinition, DynamicInputDefinition, @@ -65,6 +66,7 @@ def create_dynamic_block_specification( kinds_lookup=kinds_lookup, ) block_class = assembly_custom_python_block( + block_type_name=dynamic_block_definition.manifest.block_type, unique_identifier=unique_identifier, manifest=block_manifest, python_code=dynamic_block_definition.code, @@ -194,6 +196,8 @@ def collect_python_types_for_selectors( selector_kind.append(kinds_lookup[kind_name]) if selector_type is SelectorType.INPUT_IMAGE: result.append(WorkflowImageSelector) + elif selector_type is SelectorType.STEP_OUTPUT_IMAGE: + result.append(StepOutputImageSelector) elif selector_type is SelectorType.INPUT_PARAMETER: result.append(WorkflowParameterSelector(kind=selector_kind)) else: diff --git a/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py b/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py new file mode 100644 index 0000000000..49d258f1ca --- /dev/null +++ b/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py @@ -0,0 +1,112 @@ +import types +from typing import List, Type + +from inference.core.workflows.errors import BlockInterfaceError +from inference.core.workflows.execution_engine.dynamic_blocks.entities import PythonCode +from inference.core.workflows.prototypes.block import ( + BlockResult, + WorkflowBlock, + WorkflowBlockManifest, +) + +IMPORTS_LINES = [ + "from typing import Any, List, Dict, Set, Optional", + "import supervision as sv", + "import numpy as np", + "import math", + "import time", + "import json", + "import os", + "import requests", + "import cv2", + "import shapely", + "from inference.core.workflows.entities.base import Batch, WorkflowImageData", + "from inference.core.workflows.prototypes.block import BlockResult", +] + + +def assembly_custom_python_block( + block_type_name: str, + unique_identifier: str, + manifest: Type[WorkflowBlockManifest], + python_code: PythonCode, +) -> Type[WorkflowBlock]: + code_module = create_dynamic_module( + block_type_name=block_type_name, + python_code=python_code, + module_name=f"dynamic_module_{unique_identifier}", + ) + if not hasattr(code_module, python_code.run_function_name): + raise BlockInterfaceError( + public_message=f"Cannot find function: {python_code.run_function_name} in declared code for " + f"dynamic block: `{block_type_name}`", + context="workflow_compilation | dynamic_block_compilation | declared_symbols_fetching", + ) + run_function = getattr(code_module, python_code.run_function_name) + + async def run(self, *args, **kwargs) -> BlockResult: + if not self._allow_custom_python_execution: + raise RuntimeError( + "It is not possible to execute CustomPython block in that configuration of `inference`. Set " + "ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=true" + ) + return run_function(self, *args, **kwargs) + + if python_code.init_function_code is not None and not hasattr( + code_module, python_code.init_function_name + ): + raise BlockInterfaceError( + public_message=f"Cannot find function: {python_code.init_function_name} in declared code for " + f"dynamic block: `{block_type_name}`", + context="workflow_compilation | dynamic_block_compilation | declared_symbols_fetching", + ) + + init_function = getattr(code_module, python_code.init_function_name, dict) + + def constructor(self, allow_custom_python_execution: bool): + self._allow_custom_python_execution = allow_custom_python_execution + self._init_results = init_function() + + @classmethod + def get_init_parameters(cls) -> List[str]: + return ["allow_custom_python_execution"] + + @classmethod + def get_manifest(cls) -> Type[WorkflowBlockManifest]: + return manifest + + return type( + f"DynamicBlock[{unique_identifier}]", + (WorkflowBlock,), + { + "__init__": constructor, + "get_init_parameters": get_init_parameters, + "get_manifest": get_manifest, + "run": run, + }, + ) + + +def create_dynamic_module( + block_type_name: str, python_code: PythonCode, module_name: str +) -> types.ModuleType: + try: + dynamic_module = types.ModuleType(module_name) + imports = ( + "\n".join(IMPORTS_LINES) + + "\n" + + "\n".join(python_code.imports) + + "\n\n\n\n" + ) + code = python_code.run_function_code + if python_code.init_function_code: + code += "\n\n\n" + python_code.init_function_code + exec(imports + code, dynamic_module.__dict__) + return dynamic_module + except Exception as error: + raise BlockInterfaceError( + public_message=f"Error of type `{type(error).__class__.__name__}` encountered while attempting to " + f"create Python module with code for block: {block_type_name}. Error message: {error}", + context="workflow_compilation | dynamic_block_compilation | dynamic_module_creation", + inner_error=error, + ) from error diff --git a/inference/core/workflows/execution_engine/dynamic_blocs/entities.py b/inference/core/workflows/execution_engine/dynamic_blocks/entities.py similarity index 81% rename from inference/core/workflows/execution_engine/dynamic_blocs/entities.py rename to inference/core/workflows/execution_engine/dynamic_blocks/entities.py index 0e8ed2771c..7daee0ae35 100644 --- a/inference/core/workflows/execution_engine/dynamic_blocs/entities.py +++ b/inference/core/workflows/execution_engine/dynamic_blocks/entities.py @@ -1,11 +1,12 @@ from enum import Enum -from typing import Any, Dict, List, Literal +from typing import Any, Dict, List, Literal, Optional from pydantic import BaseModel, Field class SelectorType(Enum): INPUT_IMAGE = "input_image" + STEP_OUTPUT_IMAGE = "step_output_image" INPUT_PARAMETER = "input_parameter" STEP_OUTPUT = "step_output" @@ -104,15 +105,31 @@ class ManifestDescription(BaseModel): class PythonCode(BaseModel): type: Literal["PythonCode"] - function_code: str = Field( + run_function_code: str = Field( description="Code of python function. Content should be properly formatted including indentations. " "Workflows execution engine is to create dynamic module with provided function - ensuring " - "imports of the following symbols: [Any, List, Dict, Set, sv, np, math, Batch, " - "WorkflowImageData, BlockResult]" + "imports of the following symbols: [Any, List, Dict, Set, sv, np, math, time, json, os, " + "requests, cv2, shapely, Batch, WorkflowImageData, BlockResult]. Expected signature is: " + "def run(self, ... # parameters of manifest apart from name and type). Through self, " + "one may access self._init_results which is dict returned by `init_code` if given." ) - function_name: str = Field( + run_function_name: str = Field( default="run", description="Name of the function shipped in `function_code`." ) + init_function_code: Optional[str] = Field( + description="Code of the function to perform initialisation of the block. It must be " + "parameter-free function with signature `def init() -> Dict[str, Any]` setting " + "self._init_results on dynamic class initialisation", + default=None, + ) + init_function_name: str = Field( + default="init", + description="Name of init_code function.", + ) + imports: List[str] = Field( + default_factory=list, + description="List of additional imports required to run the code", + ) class DynamicBlockDefinition(BaseModel): diff --git a/inference/core/workflows/execution_engine/dynamic_blocs/loader.py b/inference/core/workflows/execution_engine/dynamic_blocks/loader.py similarity index 81% rename from inference/core/workflows/execution_engine/dynamic_blocs/loader.py rename to inference/core/workflows/execution_engine/dynamic_blocks/loader.py index 957335911b..0a1e435eeb 100644 --- a/inference/core/workflows/execution_engine/dynamic_blocs/loader.py +++ b/inference/core/workflows/execution_engine/dynamic_blocks/loader.py @@ -1,7 +1,7 @@ from typing import Any, Callable, Dict, Union from inference.core.env import ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS -from inference.core.workflows.execution_engine.dynamic_blocs.entities import ( +from inference.core.workflows.execution_engine.dynamic_blocks.entities import ( BLOCK_SOURCE, ) diff --git a/inference/core/workflows/execution_engine/dynamic_blocs/block_scaffolding.py b/inference/core/workflows/execution_engine/dynamic_blocs/block_scaffolding.py deleted file mode 100644 index bbd02f28ce..0000000000 --- a/inference/core/workflows/execution_engine/dynamic_blocs/block_scaffolding.py +++ /dev/null @@ -1,71 +0,0 @@ -import types -from typing import List, Type - -from inference.core.workflows.execution_engine.dynamic_blocs.entities import PythonCode -from inference.core.workflows.prototypes.block import ( - BlockResult, - WorkflowBlock, - WorkflowBlockManifest, -) - -IMPORTS_LINES = [ - "from typing import Any, List, Dict, Set", - "import supervision as sv", - "import numpy as np", - "import math", - "from inference.core.workflows.entities.base import Batch, WorkflowImageData", - "from inference.core.workflows.prototypes.block import BlockResult", -] - - -def assembly_custom_python_block( - unique_identifier: str, - manifest: Type[WorkflowBlockManifest], - python_code: PythonCode, -) -> Type[WorkflowBlock]: - code_module = create_dynamic_module( - code=python_code.function_code, - module_name=f"dynamic_module_{unique_identifier}", - ) - if not hasattr(code_module, python_code.function_name): - raise ValueError( - f"Cannot find function: {python_code.function_name} in declared code." - ) - run_function = getattr(code_module, python_code.function_name) - - async def run(self, *args, **kwargs) -> BlockResult: - if not self._allow_custom_python_execution: - raise RuntimeError( - "It is not possible to execute CustomPython block in that configuration of `inference`. Set " - "ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=true" - ) - return run_function(*args, **kwargs) - - def constructor(self, allow_custom_python_execution: bool): - self._allow_custom_python_execution = allow_custom_python_execution - - @classmethod - def get_init_parameters(cls) -> List[str]: - return ["allow_custom_python_execution"] - - @classmethod - def get_manifest(cls) -> Type[WorkflowBlockManifest]: - return manifest - - return type( - f"DynamicBlock[{unique_identifier}]", - (WorkflowBlock,), - { - "__init__": constructor, - "get_init_parameters": get_init_parameters, - "get_manifest": get_manifest, - "run": run, - }, - ) - - -def create_dynamic_module(code: str, module_name: str) -> types.ModuleType: - dynamic_module = types.ModuleType(module_name) - imports = "\n".join(IMPORTS_LINES) + "\n\n\n\n" - exec(imports + code, dynamic_module.__dict__) - return dynamic_module diff --git a/inference/core/workflows/execution_engine/introspection/blocks_loader.py b/inference/core/workflows/execution_engine/introspection/blocks_loader.py index 99faa6f778..c7ee6e2ac7 100644 --- a/inference/core/workflows/execution_engine/introspection/blocks_loader.py +++ b/inference/core/workflows/execution_engine/introspection/blocks_loader.py @@ -14,7 +14,7 @@ from inference.core.workflows.execution_engine.compiler.entities import ( BlockSpecification, ) -from inference.core.workflows.execution_engine.dynamic_blocs.loader import ( +from inference.core.workflows.execution_engine.dynamic_blocks.loader import ( load_dynamic_blocks_initializers, ) from inference.core.workflows.execution_engine.introspection.entities import ( diff --git a/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py b/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py index d5c841bf6e..8fb61271e2 100644 --- a/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py +++ b/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py @@ -4,10 +4,11 @@ from inference.core.env import WORKFLOWS_MAX_CONCURRENT_STEPS from inference.core.managers.base import ModelManager from inference.core.workflows.core_steps.common.entities import StepExecutionMode +from inference.core.workflows.errors import BlockInterfaceError, StepExecutionError from inference.core.workflows.execution_engine.core import ExecutionEngine FUNCTION_TO_GET_OVERLAP_OF_BBOXES = """ -def run(predictions: sv.Detections, class_x: str, class_y: str) -> BlockResult: +def run(self, predictions: sv.Detections, class_x: str, class_y: str) -> BlockResult: bboxes_class_x = predictions[predictions.data["class_name"] == class_x] bboxes_class_y = predictions[predictions.data["class_name"] == class_y] overlap = [] @@ -33,7 +34,7 @@ def run(predictions: sv.Detections, class_x: str, class_y: str) -> BlockResult: FUNCTION_TO_GET_MAXIMUM_OVERLAP = """ -def run(overlaps: List[List[float]]) -> BlockResult: +def run(self, overlaps: List[List[float]]) -> BlockResult: max_value = -1 for overlap in overlaps: for overlap_value in overlap: @@ -73,7 +74,7 @@ def run(overlaps: List[List[float]]) -> BlockResult: }, "code": { "type": "PythonCode", - "function_code": FUNCTION_TO_GET_OVERLAP_OF_BBOXES, + "run_function_code": FUNCTION_TO_GET_OVERLAP_OF_BBOXES, }, }, { @@ -93,7 +94,7 @@ def run(overlaps: List[List[float]]) -> BlockResult: }, "code": { "type": "PythonCode", - "function_code": FUNCTION_TO_GET_MAXIMUM_OVERLAP, + "run_function_code": FUNCTION_TO_GET_MAXIMUM_OVERLAP, }, }, ], @@ -167,7 +168,7 @@ async def test_workflow_with_custom_python_blocks_measuring_overlap( "workflows_core.model_manager": model_manager, "workflows_core.api_key": None, "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, - "workflows_core.allow_custom_python_execution": True, + "dynamic_workflows_blocks.allow_custom_python_execution": True, } execution_engine = ExecutionEngine.init( workflow_definition=WORKFLOW_WITH_OVERLAP_MEASUREMENT, @@ -205,3 +206,728 @@ async def test_workflow_with_custom_python_blocks_measuring_overlap( assert ( result[1]["max_overlap"] is None ), "Expected `max_overlap` not to be calculated for second image due to conditional execution" + + +FUNCTION_TO_GET_MAXIMUM_CONFIDENCE_FROM_BATCH_OF_DETECTIONS = """ +def run(self, predictions: Batch[sv.Detections]) -> BlockResult: + result = [] + for prediction in predictions: + result.append({"max_confidence": np.max(prediction.confidence).item()}) + return result +""" + +WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_ON_BATCH = { + "version": "1.0", + "inputs": [ + {"type": "WorkflowImage", "name": "image"}, + ], + "dynamic_blocks_definitions": [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "MaxConfidence", + "inputs": { + "predictions": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output"], + }, + }, + "outputs": { + "max_confidence": { + "type": "DynamicOutputDefinition", + "kind": ["float_zero_to_one"], + } + }, + "accepts_batch_input": True, + }, + "code": { + "type": "PythonCode", + "run_function_code": FUNCTION_TO_GET_MAXIMUM_CONFIDENCE_FROM_BATCH_OF_DETECTIONS, + }, + }, + ], + "steps": [ + { + "type": "RoboflowObjectDetectionModel", + "name": "model", + "image": "$inputs.image", + "model_id": "yolov8n-640", + }, + { + "type": "MaxConfidence", + "name": "confidence_aggregation", + "predictions": "$steps.model.predictions", + }, + ], + "outputs": [ + { + "type": "JsonField", + "name": "max_confidence", + "selector": "$steps.confidence_aggregation.max_confidence", + }, + ], +} + + +@pytest.mark.asyncio +async def test_workflow_with_custom_python_block_operating_on_batch( + model_manager: ModelManager, + dogs_image: np.ndarray, + crowd_image: np.ndarray, +) -> None: + # given + workflow_init_parameters = { + "workflows_core.model_manager": model_manager, + "workflows_core.api_key": None, + "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, + "dynamic_workflows_blocks.allow_custom_python_execution": True, + } + execution_engine = ExecutionEngine.init( + workflow_definition=WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_ON_BATCH, + init_parameters=workflow_init_parameters, + max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS, + ) + + # when + result = await execution_engine.run_async( + runtime_parameters={ + "image": [dogs_image, crowd_image], + } + ) + + # then + assert isinstance(result, list), "Expected list to be delivered" + assert len(result) == 2, "Expected 2 elements in the output for two input images" + assert set(result[0].keys()) == { + "max_confidence", + }, "Expected all declared outputs to be delivered" + assert set(result[1].keys()) == { + "max_confidence", + }, "Expected all declared outputs to be delivered" + assert ( + abs(result[0]["max_confidence"] - 0.85599) < 1e-3 + ), "Expected max confidence to be extracted" + assert ( + abs(result[1]["max_confidence"] - 0.84284) < 1e-3 + ), "Expected max confidence to be extracted" + + +FUNCTION_TO_ASSOCIATE_DETECTIONS_FOR_CROPS = """ +def my_function(self, prediction: sv.Detections, crops: Batch[WorkflowImageData]) -> BlockResult: + detection_id2bbox = { + detection_id.item(): i for i, detection_id in enumerate(prediction.data["detection_id"]) + } + results = [] + for crop in crops: + parent_id = crop.parent_metadata.parent_id + results.append({"associated_detections": prediction[detection_id2bbox[parent_id]]}) + return results +""" + + +WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_CROSS_DIMENSIONS = { + "version": "1.0", + "inputs": [ + {"type": "WorkflowImage", "name": "image"}, + ], + "dynamic_blocks_definitions": [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "DetectionsToCropsAssociation", + "inputs": { + "prediction": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output"], + "selector_data_kind": { + "step_output": [ + "Batch[object_detection_prediction]", + "Batch[instance_segmentation_prediction]", + "Batch[keypoint_detection_prediction]", + ] + }, + }, + "crops": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output_image"], + "is_dimensionality_reference": True, + "dimensionality_offset": 1, + }, + }, + "outputs": { + "associated_detections": { + "type": "DynamicOutputDefinition", + "kind": [ + "Batch[object_detection_prediction]", + "Batch[instance_segmentation_prediction]", + "Batch[keypoint_detection_prediction]", + ], + } + }, + }, + "code": { + "type": "PythonCode", + "run_function_code": FUNCTION_TO_ASSOCIATE_DETECTIONS_FOR_CROPS, + "run_function_name": "my_function", + }, + }, + ], + "steps": [ + { + "type": "RoboflowObjectDetectionModel", + "name": "model", + "image": "$inputs.image", + "model_id": "yolov8n-640", + }, + { + "type": "Crop", + "name": "crop", + "image": "$inputs.image", + "predictions": "$steps.model.predictions", + }, + { + "type": "DetectionsToCropsAssociation", + "name": "detections_associations", + "prediction": "$steps.model.predictions", + "crops": "$steps.crop.crops", + }, + ], + "outputs": [ + { + "type": "JsonField", + "name": "associated_detections", + "selector": "$steps.detections_associations.associated_detections", + }, + ], +} + + +@pytest.mark.asyncio +async def test_workflow_with_custom_python_block_operating_cross_dimensions( + model_manager: ModelManager, + dogs_image: np.ndarray, + crowd_image: np.ndarray, +) -> None: + # given + workflow_init_parameters = { + "workflows_core.model_manager": model_manager, + "workflows_core.api_key": None, + "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, + "dynamic_workflows_blocks.allow_custom_python_execution": True, + } + execution_engine = ExecutionEngine.init( + workflow_definition=WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_CROSS_DIMENSIONS, + init_parameters=workflow_init_parameters, + max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS, + ) + + # when + result = await execution_engine.run_async( + runtime_parameters={ + "image": [dogs_image, crowd_image], + } + ) + + # then + assert isinstance(result, list), "Expected list to be delivered" + assert len(result) == 2, "Expected 2 elements in the output for two input images" + assert set(result[0].keys()) == { + "associated_detections", + }, "Expected all declared outputs to be delivered" + assert set(result[1].keys()) == { + "associated_detections", + }, "Expected all declared outputs to be delivered" + assert len(result[1]["associated_detections"]) == 12 + class_names_first_image_crops = [ + e["class_name"].tolist() for e in result[0]["associated_detections"] + ] + for class_names in class_names_first_image_crops: + assert len(class_names) == 1, "Expected single bbox to be associated" + assert len(class_names_first_image_crops) == 2, "Expected 2 crops for first image" + class_names_second_image_crops = [ + e["class_name"].tolist() for e in result[1]["associated_detections"] + ] + for class_names in class_names_second_image_crops: + assert len(class_names) == 1, "Expected single bbox to be associated" + assert ( + len(class_names_second_image_crops) == 12 + ), "Expected 12 crops for second image" + + +@pytest.mark.asyncio +async def test_workflow_with_custom_python_block_when_custom_python_execution_forbidden( + model_manager: ModelManager, + dogs_image: np.ndarray, + crowd_image: np.ndarray, +) -> None: + # given + workflow_init_parameters = { + "workflows_core.model_manager": model_manager, + "workflows_core.api_key": None, + "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, + "dynamic_workflows_blocks.allow_custom_python_execution": False, + } + execution_engine = ExecutionEngine.init( + workflow_definition=WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_CROSS_DIMENSIONS, + init_parameters=workflow_init_parameters, + max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS, + ) + + # when + with pytest.raises(StepExecutionError): + _ = await execution_engine.run_async( + runtime_parameters={ + "image": [dogs_image, crowd_image], + } + ) + + +FUNCTION_TO_MERGE_CROPS_INTO_TILES = """ +def run(self, crops: Optional[Batch[Optional[WorkflowImageData]]]) -> BlockResult: + if crops is None: + return {"tiles": None} + black_image = np.zeros((192, 168, 3), dtype=np.uint8) + images = [crop.numpy_image if crop is not None else black_image for crop in crops] + return {"tiles": sv.create_tiles(images)} +""" + + +WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_DIMENSIONALITY_REDUCTION = { + "version": "1.0", + "inputs": [ + {"type": "WorkflowImage", "name": "image"}, + ], + "dynamic_blocks_definitions": [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "DimensionalityReduction", + "inputs": { + "crops": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output_image"], + }, + }, + "outputs": {"tiles": {"type": "DynamicOutputDefinition", "kind": []}}, + "output_dimensionality_offset": -1, + "accepts_empty_values": True, + }, + "code": { + "type": "PythonCode", + "run_function_code": FUNCTION_TO_MERGE_CROPS_INTO_TILES, + }, + }, + ], + "steps": [ + { + "type": "RoboflowObjectDetectionModel", + "name": "model", + "image": "$inputs.image", + "model_id": "yolov8n-640", + "class_filter": ["person"], + }, + { + "type": "Crop", + "name": "crop", + "image": "$inputs.image", + "predictions": "$steps.model.predictions", + }, + { + "type": "DimensionalityReduction", + "name": "tile_creation", + "crops": "$steps.crop.crops", + }, + ], + "outputs": [ + { + "type": "JsonField", + "name": "tiles", + "selector": "$steps.tile_creation.tiles", + }, + ], +} + + +@pytest.mark.asyncio +async def test_workflow_with_custom_python_block_reducing_dimensionality( + model_manager: ModelManager, + dogs_image: np.ndarray, + crowd_image: np.ndarray, +) -> None: + # given + workflow_init_parameters = { + "workflows_core.model_manager": model_manager, + "workflows_core.api_key": None, + "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, + "dynamic_workflows_blocks.allow_custom_python_execution": True, + } + execution_engine = ExecutionEngine.init( + workflow_definition=WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_DIMENSIONALITY_REDUCTION, + init_parameters=workflow_init_parameters, + max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS, + ) + + # when + result = await execution_engine.run_async( + runtime_parameters={ + "image": [dogs_image, crowd_image], + } + ) + + # then + assert isinstance(result, list), "Expected list to be delivered" + assert len(result) == 2, "Expected 2 elements in the output for two input images" + assert set(result[0].keys()) == { + "tiles", + }, "Expected all declared outputs to be delivered" + assert set(result[1].keys()) == { + "tiles", + }, "Expected all declared outputs to be delivered" + assert result[0]["tiles"] is None, "Expected no crops - hence empty output" + assert isinstance(result[1]["tiles"], np.ndarray), "Expected np array with tile" + + +MODEL_INIT_FUNCTION = """ +def init_model() -> Dict[str, Any]: + model = YOLOv8ObjectDetection(model_id="yolov8n-640") + return {"model": model} +""" + +MODEL_INFER_FUNCTION = """ +def infer(self, image: WorkflowImageData) -> BlockResult: + predictions = self._init_results["model"].infer(image.numpy_image) + return {"predictions": sv.Detections.from_inference(predictions[0].model_dump(by_alias=True, exclude_none=True))} +""" + +WORKFLOW_WITH_PYTHON_BLOCK_HOSTING_MODEL = { + "version": "1.0", + "inputs": [ + {"type": "WorkflowImage", "name": "image"}, + ], + "dynamic_blocks_definitions": [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "CustomModel", + "inputs": { + "image": { + "type": "DynamicInputDefinition", + "selector_types": ["input_image"], + }, + }, + "outputs": { + "predictions": { + "type": "DynamicOutputDefinition", + "kind": [ + "Batch[object_detection_prediction]", + ], + } + }, + }, + "code": { + "type": "PythonCode", + "run_function_code": MODEL_INFER_FUNCTION, + "run_function_name": "infer", + "init_function_code": MODEL_INIT_FUNCTION, + "init_function_name": "init_model", + "imports": [ + "from inference.models.yolov8 import YOLOv8ObjectDetection", + ], + }, + }, + ], + "steps": [ + { + "type": "CustomModel", + "name": "model", + "image": "$inputs.image", + }, + ], + "outputs": [ + { + "type": "JsonField", + "name": "predictions", + "selector": "$steps.model.predictions", + }, + ], +} + + +@pytest.mark.asyncio +async def test_workflow_with_custom_python_block_running_custom_model( + model_manager: ModelManager, + dogs_image: np.ndarray, + crowd_image: np.ndarray, +) -> None: + # given + workflow_init_parameters = { + "workflows_core.model_manager": model_manager, + "workflows_core.api_key": None, + "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, + "dynamic_workflows_blocks.allow_custom_python_execution": True, + } + execution_engine = ExecutionEngine.init( + workflow_definition=WORKFLOW_WITH_PYTHON_BLOCK_HOSTING_MODEL, + init_parameters=workflow_init_parameters, + max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS, + ) + + # when + result = await execution_engine.run_async( + runtime_parameters={ + "image": [dogs_image, crowd_image], + } + ) + + # then + assert isinstance(result, list), "Expected list to be delivered" + assert len(result) == 2, "Expected 2 elements in the output for two input images" + assert set(result[0].keys()) == { + "predictions", + }, "Expected all declared outputs to be delivered" + assert set(result[1].keys()) == { + "predictions", + }, "Expected all declared outputs to be delivered" + assert np.allclose( + result[0]["predictions"].confidence, + [0.85599, 0.50392], + atol=1e-3, + ), "Expected reproducible predictions for first image" + assert np.allclose( + result[1]["predictions"].confidence, + [ + 0.84284, + 0.83957, + 0.81555, + 0.80455, + 0.75804, + 0.75794, + 0.71715, + 0.71408, + 0.71003, + 0.56938, + 0.54092, + 0.43511, + ], + atol=1e-3, + ), "Expected reproducible predictions for second image" + + +BROKEN_RUN_FUNCTION = """ +def run(some: InvalidType): + pass +""" + + +WORKFLOW_WITH_CODE_THAT_DOES_NOT_COMPILE = { + "version": "1.0", + "inputs": [ + {"type": "WorkflowImage", "name": "image"}, + ], + "dynamic_blocks_definitions": [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "CustomModel", + "inputs": { + "image": { + "type": "DynamicInputDefinition", + "selector_types": ["input_image"], + }, + }, + "outputs": { + "predictions": { + "type": "DynamicOutputDefinition", + "kind": [], + } + }, + }, + "code": { + "type": "PythonCode", + "run_function_code": BROKEN_RUN_FUNCTION, + }, + }, + ], + "steps": [ + { + "type": "CustomModel", + "name": "model", + "image": "$inputs.image", + }, + ], + "outputs": [ + { + "type": "JsonField", + "name": "predictions", + "selector": "$steps.model.predictions", + }, + ], +} + + +@pytest.mark.asyncio +async def test_workflow_with_custom_python_block_when_code_cannot_be_compiled( + model_manager: ModelManager, +) -> None: + # given + workflow_init_parameters = { + "workflows_core.model_manager": model_manager, + "workflows_core.api_key": None, + "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, + "dynamic_workflows_blocks.allow_custom_python_execution": True, + } + + # when + with pytest.raises(BlockInterfaceError): + _ = ExecutionEngine.init( + workflow_definition=WORKFLOW_WITH_CODE_THAT_DOES_NOT_COMPILE, + init_parameters=workflow_init_parameters, + max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS, + ) + + +WORKFLOW_WITHOUT_RUN_FUNCTION = { + "version": "1.0", + "inputs": [ + {"type": "WorkflowImage", "name": "image"}, + ], + "dynamic_blocks_definitions": [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "CustomModel", + "inputs": { + "image": { + "type": "DynamicInputDefinition", + "selector_types": ["input_image"], + }, + }, + "outputs": { + "predictions": { + "type": "DynamicOutputDefinition", + "kind": [], + } + }, + }, + "code": { + "type": "PythonCode", + "run_function_code": "", + }, + }, + ], + "steps": [ + { + "type": "CustomModel", + "name": "model", + "image": "$inputs.image", + }, + ], + "outputs": [ + { + "type": "JsonField", + "name": "predictions", + "selector": "$steps.model.predictions", + }, + ], +} + + +@pytest.mark.asyncio +async def test_workflow_with_custom_python_block_when_code_does_not_define_declared_run_function( + model_manager: ModelManager, +) -> None: + # given + workflow_init_parameters = { + "workflows_core.model_manager": model_manager, + "workflows_core.api_key": None, + "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, + "dynamic_workflows_blocks.allow_custom_python_execution": True, + } + + # when + with pytest.raises(BlockInterfaceError): + _ = ExecutionEngine.init( + workflow_definition=WORKFLOW_WITHOUT_RUN_FUNCTION, + init_parameters=workflow_init_parameters, + max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS, + ) + + +WORKFLOW_WITHOUT_DECLARED_INIT_FUNCTION = { + "version": "1.0", + "inputs": [ + {"type": "WorkflowImage", "name": "image"}, + ], + "dynamic_blocks_definitions": [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "CustomModel", + "inputs": { + "image": { + "type": "DynamicInputDefinition", + "selector_types": ["input_image"], + }, + }, + "outputs": { + "predictions": { + "type": "DynamicOutputDefinition", + "kind": [], + } + }, + }, + "code": { + "type": "PythonCode", + "run_function_code": MODEL_INFER_FUNCTION, + "run_function_name": "infer", + "init_function_code": "", + "init_function_name": "init_model", + "imports": [ + "from inference.models.yolov8 import YOLOv8ObjectDetection", + ], + }, + }, + ], + "steps": [ + { + "type": "CustomModel", + "name": "model", + "image": "$inputs.image", + }, + ], + "outputs": [ + { + "type": "JsonField", + "name": "predictions", + "selector": "$steps.model.predictions", + }, + ], +} + + +@pytest.mark.asyncio +async def test_workflow_with_custom_python_block_when_code_does_not_define_declared_init_function( + model_manager: ModelManager, +) -> None: + # given + workflow_init_parameters = { + "workflows_core.model_manager": model_manager, + "workflows_core.api_key": None, + "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, + "dynamic_workflows_blocks.allow_custom_python_execution": True, + } + + # when + with pytest.raises(BlockInterfaceError): + _ = ExecutionEngine.init( + workflow_definition=WORKFLOW_WITHOUT_DECLARED_INIT_FUNCTION, + init_parameters=workflow_init_parameters, + max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS, + ) From ad77990350c68432f614b66c999b094aea4fa073 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Tue, 16 Jul 2024 19:05:44 +0200 Subject: [PATCH 07/15] WIP - safe commit --- development/docs/build_block_docs.py | 2 +- inference/core/entities/requests/workflows.py | 9 + .../core/entities/responses/workflows.py | 3 + .../core/interfaces/http/handlers/__init__.py | 0 .../interfaces/http/handlers/workflows.py | 64 +++++++ inference/core/interfaces/http/http_api.py | 77 ++------ inference/core/workflows/errors.py | 4 + .../execution_engine/compiler/entities.py | 2 +- .../dynamic_blocks/block_assembler.py | 11 ++ .../dynamic_blocks/block_scaffolding.py | 18 +- .../execution_engine/dynamic_blocks/loader.py | 12 -- .../introspection/blocks_loader.py | 8 +- .../test_workflow_endpoints.py | 172 +++++++++++++++++- .../test_workflow_with_custom_python_block.py | 31 ++-- 14 files changed, 307 insertions(+), 106 deletions(-) create mode 100644 inference/core/interfaces/http/handlers/__init__.py create mode 100644 inference/core/interfaces/http/handlers/workflows.py delete mode 100644 inference/core/workflows/execution_engine/dynamic_blocks/loader.py diff --git a/development/docs/build_block_docs.py b/development/docs/build_block_docs.py index 2b64f77633..d5968849b6 100644 --- a/development/docs/build_block_docs.py +++ b/development/docs/build_block_docs.py @@ -104,7 +104,7 @@ def main() -> None: token=AUTOGENERATED_BLOCKS_LIST_TOKEN, ) block_card_lines = [] - blocks_description = describe_available_blocks() + blocks_description = describe_available_blocks(dynamic_blocks=[]) block_type2manifest_type_identifier = { block.block_class: block.manifest_type_identifier for block in blocks_description.blocks diff --git a/inference/core/entities/requests/workflows.py b/inference/core/entities/requests/workflows.py index ab5aa07642..efe6821a59 100644 --- a/inference/core/entities/requests/workflows.py +++ b/inference/core/entities/requests/workflows.py @@ -2,6 +2,8 @@ from pydantic import BaseModel, Field +from inference.core.workflows.execution_engine.dynamic_blocks.entities import DynamicBlockDefinition + class WorkflowInferenceRequest(BaseModel): api_key: str = Field( @@ -18,3 +20,10 @@ class WorkflowInferenceRequest(BaseModel): class WorkflowSpecificationInferenceRequest(WorkflowInferenceRequest): specification: dict + + +class DescribeBlocksRequest(BaseModel): + dynamic_blocks_definitions: List[DynamicBlockDefinition] = Field( + default_factory=list, + description="Dynamic blocks to be used." + ) diff --git a/inference/core/entities/responses/workflows.py b/inference/core/entities/responses/workflows.py index 6ecaf41216..95c5b3fc33 100644 --- a/inference/core/entities/responses/workflows.py +++ b/inference/core/entities/responses/workflows.py @@ -146,3 +146,6 @@ class WorkflowsBlocksDescription(BaseModel): universal_query_language_description: UniversalQueryLanguageDescription = Field( description="Definitions of Universal Query Language operations and operators" ) + dynamic_block_definition_schema: dict = Field( + description="Schema for dynamic block definition" + ) diff --git a/inference/core/interfaces/http/handlers/__init__.py b/inference/core/interfaces/http/handlers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/inference/core/interfaces/http/handlers/workflows.py b/inference/core/interfaces/http/handlers/workflows.py new file mode 100644 index 0000000000..cff91c1350 --- /dev/null +++ b/inference/core/interfaces/http/handlers/workflows.py @@ -0,0 +1,64 @@ +# TODO - for everyone: start migrating other handlers to bring relief to http_api.py +from typing import Optional, List + +from inference.core.entities.responses.workflows import ExternalWorkflowsBlockSelectorDefinition, \ + ExternalBlockPropertyPrimitiveDefinition, UniversalQueryLanguageDescription, WorkflowsBlocksDescription +from inference.core.workflows.core_steps.common.query_language.introspection.core import \ + prepare_operations_descriptions, prepare_operators_descriptions +from inference.core.workflows.execution_engine.dynamic_blocks.block_assembler import compile_dynamic_blocks +from inference.core.workflows.execution_engine.dynamic_blocks.entities import DynamicBlockDefinition +from inference.core.workflows.execution_engine.introspection.blocks_loader import describe_available_blocks +from inference.core.workflows.execution_engine.introspection.connections_discovery import discover_blocks_connections + + +def handle_describe_workflows_blocks_request( + dynamic_blocks_definitions: Optional[List[DynamicBlockDefinition]] = None +) -> WorkflowsBlocksDescription: + if dynamic_blocks_definitions is None: + dynamic_blocks_definitions = [] + dynamic_blocks = compile_dynamic_blocks( + dynamic_blocks_definitions=dynamic_blocks_definitions, + ) + blocks_description = describe_available_blocks(dynamic_blocks=dynamic_blocks) + blocks_connections = discover_blocks_connections( + blocks_description=blocks_description, + ) + kinds_connections = { + kind_name: [ + ExternalWorkflowsBlockSelectorDefinition( + manifest_type_identifier=c.manifest_type_identifier, + property_name=c.property_name, + property_description=c.property_description, + compatible_element=c.compatible_element, + is_list_element=c.is_list_element, + is_dict_element=c.is_dict_element, + ) + for c in connections + ] + for kind_name, connections in blocks_connections.kinds_connections.items() + } + primitives_connections = [ + ExternalBlockPropertyPrimitiveDefinition( + manifest_type_identifier=primitives_connection.manifest_type_identifier, + property_name=primitives_connection.property_name, + property_description=primitives_connection.property_description, + type_annotation=primitives_connection.type_annotation, + ) + for primitives_connection in blocks_connections.primitives_connections + ] + uql_operations_descriptions = prepare_operations_descriptions() + uql_operators_descriptions = prepare_operators_descriptions() + universal_query_language_description = ( + UniversalQueryLanguageDescription.from_internal_entities( + operations_descriptions=uql_operations_descriptions, + operators_descriptions=uql_operators_descriptions, + ) + ) + return WorkflowsBlocksDescription( + blocks=blocks_description.blocks, + declared_kinds=blocks_description.declared_kinds, + kinds_connections=kinds_connections, + primitives_connections=primitives_connections, + universal_query_language_description=universal_query_language_description, + dynamic_block_definition_schema=DynamicBlockDefinition.schema() + ) \ No newline at end of file diff --git a/inference/core/interfaces/http/http_api.py b/inference/core/interfaces/http/http_api.py index 085f90f9e3..d20b6effbc 100644 --- a/inference/core/interfaces/http/http_api.py +++ b/inference/core/interfaces/http/http_api.py @@ -13,7 +13,6 @@ from fastapi_cprofile.profiler import CProfileMiddleware from inference.core import logger -from inference.core.cache import cache from inference.core.devices.utils import GLOBAL_INFERENCE_SERVER_ID from inference.core.entities.requests.clip import ( ClipCompareRequest, @@ -43,7 +42,7 @@ ) from inference.core.entities.requests.workflows import ( WorkflowInferenceRequest, - WorkflowSpecificationInferenceRequest, + WorkflowSpecificationInferenceRequest, DescribeBlocksRequest, ) from inference.core.entities.requests.yolo_world import YOLOWorldInferenceRequest from inference.core.entities.responses.clip import ( @@ -73,9 +72,6 @@ ServerVersionInfo, ) from inference.core.entities.responses.workflows import ( - ExternalBlockPropertyPrimitiveDefinition, - ExternalWorkflowsBlockSelectorDefinition, - UniversalQueryLanguageDescription, WorkflowInferenceResponse, WorkflowsBlocksDescription, WorkflowValidationStatus, @@ -128,6 +124,7 @@ WorkspaceLoadError, ) from inference.core.interfaces.base import BaseInterface +from inference.core.interfaces.http.handlers.workflows import handle_describe_workflows_blocks_request from inference.core.interfaces.http.orjson_utils import ( orjson_response, serialise_workflow_result, @@ -140,10 +137,6 @@ InvalidInputTypeError, OperationTypeNotRecognisedError, ) -from inference.core.workflows.core_steps.common.query_language.introspection.core import ( - prepare_operations_descriptions, - prepare_operators_descriptions, -) from inference.core.workflows.entities.base import OutputDefinition from inference.core.workflows.errors import ( ExecutionGraphStructureError, @@ -157,12 +150,6 @@ parse_workflow_definition, ) from inference.core.workflows.execution_engine.core import ExecutionEngine -from inference.core.workflows.execution_engine.introspection.blocks_loader import ( - describe_available_blocks, -) -from inference.core.workflows.execution_engine.introspection.connections_discovery import ( - discover_blocks_connections, -) from inference.models.aliases import resolve_roboflow_model_alias if LAMBDA: @@ -889,54 +876,30 @@ async def infer_from_workflow( @app.get( "/workflows/blocks/describe", response_model=WorkflowsBlocksDescription, - summary="[EXPERIMENTAL] Endpoint to get definition of workflows blocks that are accessible", + summary="[LEGACY] Endpoint to get definition of workflows blocks that are accessible", description="Endpoint provides detailed information about workflows building blocks that are " "accessible in the inference server. This information could be used to programmatically " "build / display workflows.", + deprecated=True, ) @with_route_exceptions async def describe_workflows_blocks() -> WorkflowsBlocksDescription: - blocks_description = describe_available_blocks() - blocks_connections = discover_blocks_connections( - blocks_description=blocks_description, - ) - kinds_connections = { - kind_name: [ - ExternalWorkflowsBlockSelectorDefinition( - manifest_type_identifier=c.manifest_type_identifier, - property_name=c.property_name, - property_description=c.property_description, - compatible_element=c.compatible_element, - is_list_element=c.is_list_element, - is_dict_element=c.is_dict_element, - ) - for c in connections - ] - for kind_name, connections in blocks_connections.kinds_connections.items() - } - primitives_connections = [ - ExternalBlockPropertyPrimitiveDefinition( - manifest_type_identifier=primitives_connection.manifest_type_identifier, - property_name=primitives_connection.property_name, - property_description=primitives_connection.property_description, - type_annotation=primitives_connection.type_annotation, - ) - for primitives_connection in blocks_connections.primitives_connections - ] - uql_operations_descriptions = prepare_operations_descriptions() - uql_operators_descriptions = prepare_operators_descriptions() - universal_query_language_description = ( - UniversalQueryLanguageDescription.from_internal_entities( - operations_descriptions=uql_operations_descriptions, - operators_descriptions=uql_operators_descriptions, - ) - ) - return WorkflowsBlocksDescription( - blocks=blocks_description.blocks, - declared_kinds=blocks_description.declared_kinds, - kinds_connections=kinds_connections, - primitives_connections=primitives_connections, - universal_query_language_description=universal_query_language_description, + return handle_describe_workflows_blocks_request() + + @app.post( + "/workflows/blocks/describe", + response_model=WorkflowsBlocksDescription, + summary="[EXPERIMENTAL] Endpoint to get definition of workflows blocks that are accessible", + description="Endpoint provides detailed information about workflows building blocks that are " + "accessible in the inference server. This information could be used to programmatically " + "build / display workflows. Additionally - in request body one can specify list of " + "dynamic blocks definitions which will be transformed into blocks and used to generate " + "schemas and definitions of connections", + ) + @with_route_exceptions + async def describe_workflows_blocks(request: DescribeBlocksRequest) -> WorkflowsBlocksDescription: + return handle_describe_workflows_blocks_request( + dynamic_blocks_definitions=request.dynamic_blocks_definitions ) @app.post( diff --git a/inference/core/workflows/errors.py b/inference/core/workflows/errors.py index 1d52905100..f6d378c947 100644 --- a/inference/core/workflows/errors.py +++ b/inference/core/workflows/errors.py @@ -33,6 +33,10 @@ def inner_error(self) -> Optional[Exception]: return self._inner_error +class WorkflowEnvironmentConfigurationError(WorkflowError): + pass + + class WorkflowCompilerError(WorkflowError): pass diff --git a/inference/core/workflows/execution_engine/compiler/entities.py b/inference/core/workflows/execution_engine/compiler/entities.py index c0ccbe04c3..07052f8f99 100644 --- a/inference/core/workflows/execution_engine/compiler/entities.py +++ b/inference/core/workflows/execution_engine/compiler/entities.py @@ -19,7 +19,7 @@ class BlockSpecification: block_source: str identifier: str - block_class: Union[Type[WorkflowBlock]] + block_class: Type[WorkflowBlock] manifest_class: Type[WorkflowBlockManifest] diff --git a/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py b/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py index 03e6f4089a..41279692b5 100644 --- a/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py +++ b/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py @@ -3,6 +3,7 @@ from pydantic import BaseModel, ConfigDict, Field, create_model +from inference.core.env import ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS from inference.core.workflows.entities.base import OutputDefinition from inference.core.workflows.entities.types import ( WILDCARD_KIND, @@ -12,6 +13,7 @@ WorkflowImageSelector, WorkflowParameterSelector, ) +from inference.core.workflows.errors import WorkflowEnvironmentConfigurationError from inference.core.workflows.execution_engine.compiler.entities import ( BlockSpecification, ) @@ -39,6 +41,15 @@ def compile_dynamic_blocks( dynamic_blocks_definitions: List[dict], ) -> List[BlockSpecification]: + if not dynamic_blocks_definitions: + return [] + if not ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS: + raise WorkflowEnvironmentConfigurationError( + public_message="Cannot use dynamic blocks with custom Python code in this installation of `workflows`. " + "This can be changed by setting environmental variable " + "`ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=True`", + context="workflow_compilation | dynamic_blocks_compilation", + ) all_defined_kinds = load_all_defined_kinds() kinds_lookup = {kind.name: kind for kind in all_defined_kinds} dynamic_blocks = [ diff --git a/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py b/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py index 49d258f1ca..89fb3764c2 100644 --- a/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py +++ b/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py @@ -1,7 +1,8 @@ import types from typing import List, Type -from inference.core.workflows.errors import BlockInterfaceError +from inference.core.env import ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS +from inference.core.workflows.errors import BlockInterfaceError, WorkflowEnvironmentConfigurationError from inference.core.workflows.execution_engine.dynamic_blocks.entities import PythonCode from inference.core.workflows.prototypes.block import ( BlockResult, @@ -45,10 +46,12 @@ def assembly_custom_python_block( run_function = getattr(code_module, python_code.run_function_name) async def run(self, *args, **kwargs) -> BlockResult: - if not self._allow_custom_python_execution: - raise RuntimeError( - "It is not possible to execute CustomPython block in that configuration of `inference`. Set " - "ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=true" + if not ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS: + raise WorkflowEnvironmentConfigurationError( + public_message="Cannot use dynamic blocks with custom Python code in this installation of `workflows`. " + "This can be changed by setting environmental variable " + "`ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=True`", + context="workflow_execution | step_execution | dynamic_step", ) return run_function(self, *args, **kwargs) @@ -63,13 +66,12 @@ async def run(self, *args, **kwargs) -> BlockResult: init_function = getattr(code_module, python_code.init_function_name, dict) - def constructor(self, allow_custom_python_execution: bool): - self._allow_custom_python_execution = allow_custom_python_execution + def constructor(self): self._init_results = init_function() @classmethod def get_init_parameters(cls) -> List[str]: - return ["allow_custom_python_execution"] + return [] @classmethod def get_manifest(cls) -> Type[WorkflowBlockManifest]: diff --git a/inference/core/workflows/execution_engine/dynamic_blocks/loader.py b/inference/core/workflows/execution_engine/dynamic_blocks/loader.py deleted file mode 100644 index 0a1e435eeb..0000000000 --- a/inference/core/workflows/execution_engine/dynamic_blocks/loader.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Any, Callable, Dict, Union - -from inference.core.env import ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS -from inference.core.workflows.execution_engine.dynamic_blocks.entities import ( - BLOCK_SOURCE, -) - - -def load_dynamic_blocks_initializers() -> Dict[str, Union[Any, Callable[[None], Any]]]: - return { - f"{BLOCK_SOURCE}.allow_custom_python_execution": ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS - } diff --git a/inference/core/workflows/execution_engine/introspection/blocks_loader.py b/inference/core/workflows/execution_engine/introspection/blocks_loader.py index c7ee6e2ac7..1d46262339 100644 --- a/inference/core/workflows/execution_engine/introspection/blocks_loader.py +++ b/inference/core/workflows/execution_engine/introspection/blocks_loader.py @@ -14,9 +14,6 @@ from inference.core.workflows.execution_engine.compiler.entities import ( BlockSpecification, ) -from inference.core.workflows.execution_engine.dynamic_blocks.loader import ( - load_dynamic_blocks_initializers, -) from inference.core.workflows.execution_engine.introspection.entities import ( BlockDescription, BlocksDescription, @@ -31,8 +28,8 @@ WORKFLOWS_CORE_PLUGIN_NAME = "workflows_core" -def describe_available_blocks() -> BlocksDescription: - blocks = load_workflow_blocks() +def describe_available_blocks(dynamic_blocks: List[BlockSpecification]) -> BlocksDescription: + blocks = load_workflow_blocks() + dynamic_blocks result = [] for block in blocks: block_schema = block.manifest_class.model_json_schema() @@ -180,7 +177,6 @@ def _load_blocks_from_plugin(plugin_name: str) -> List[BlockSpecification]: def load_initializers() -> Dict[str, Union[Any, Callable[[None], Any]]]: plugins_to_load = get_plugin_modules() result = load_core_blocks_initializers() - result.update(load_dynamic_blocks_initializers()) for plugin_name in plugins_to_load: result.update(load_initializers_from_plugin(plugin_name=plugin_name)) return result diff --git a/tests/inference/integration_tests/test_workflow_endpoints.py b/tests/inference/integration_tests/test_workflow_endpoints.py index e1d8213c88..c40c2191b1 100644 --- a/tests/inference/integration_tests/test_workflow_endpoints.py +++ b/tests/inference/integration_tests/test_workflow_endpoints.py @@ -5,7 +5,7 @@ API_KEY = os.environ.get("API_KEY") -def test_getting_blocks_descriptions(server_url) -> None: +def test_getting_blocks_descriptions_using_legacy_get_endpoint(server_url) -> None: # when response = requests.get(f"{server_url}/workflows/blocks/describe") @@ -30,6 +30,96 @@ def test_getting_blocks_descriptions(server_url) -> None: assert ( len(response_data["primitives_connections"]) > 0 ), "Expected some primitive parameters for steps to be declared" + assert "universal_query_language_description" in response_data, "Expected universal_query_language_description key to be present in response" + assert "dynamic_block_definition_schema" in response_data, "Expected key `dynamic_block_definition_schema` to be present in response" + + +def test_getting_blocks_descriptions_using_new_post_endpoint_with_dynamic_steps(server_url) -> None: + # given + function_code = """ + def my_function(self, prediction: sv.Detections, crops: Batch[WorkflowImageData]) -> BlockResult: + detection_id2bbox = { + detection_id.item(): i for i, detection_id in enumerate(prediction.data["detection_id"]) + } + results = [] + for crop in crops: + parent_id = crop.parent_metadata.parent_id + results.append({"associated_detections": prediction[detection_id2bbox[parent_id]]}) + return results + """ + dynamic_blocks_definitions = [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "DetectionsToCropsAssociation", + "inputs": { + "prediction": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output"], + "selector_data_kind": { + "step_output": [ + "Batch[object_detection_prediction]", + "Batch[instance_segmentation_prediction]", + "Batch[keypoint_detection_prediction]", + ] + }, + }, + "crops": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output_image"], + "is_dimensionality_reference": True, + "dimensionality_offset": 1, + }, + }, + "outputs": { + "associated_detections": { + "type": "DynamicOutputDefinition", + "kind": [ + "Batch[object_detection_prediction]", + "Batch[instance_segmentation_prediction]", + "Batch[keypoint_detection_prediction]", + ], + } + }, + }, + "code": { + "type": "PythonCode", + "run_function_code": function_code, + "run_function_name": "my_function", + }, + }, + ] + + # when + response = requests.post( + f"{server_url}/workflows/blocks/describe", + json={"dynamic_blocks_definitions": dynamic_blocks_definitions} + ) + + # then + response.raise_for_status() + response_data = response.json() + assert "blocks" in response_data, "Response expected to define blocks" + assert len(response_data["blocks"]) > 0, "Some blocs expected to be added" + assert ( + "declared_kinds" in response_data + ), "Declared kinds must be provided in output" + assert len(response_data["declared_kinds"]) > 0, "Some kinds must be declared" + assert ( + "kinds_connections" in response_data + ), "Kinds connections expected to be declared" + assert len(response_data["declared_kinds"]) >= len( + response_data["kinds_connections"] + ), "Kinds connections declared as inputs for blocks must be at most in number of all declared kinds" + assert ( + "primitives_connections" in response_data + ), "Primitives connections expected to be in response" + assert ( + len(response_data["primitives_connections"]) > 0 + ), "Expected some primitive parameters for steps to be declared" + assert "universal_query_language_description" in response_data, "Expected universal_query_language_description key to be present in response" + assert "dynamic_block_definition_schema" in response_data, "Expected key `dynamic_block_definition_schema` to be present in response" def test_getting_dynamic_outputs(server_url: str) -> None: @@ -100,6 +190,86 @@ def test_compilation_endpoint_when_compilation_succeeds( assert response_data["status"] == "ok" +def test_compilation_endpoint_when_compilation_succeeds_with_custom_block( + server_url: str, +) -> None: + # given + init_function = """ +def init_model() -> Dict[str, Any]: + model = YOLOv8ObjectDetection(model_id="yolov8n-640") + return {"model": model} +""" + infer_function = """ +def infer(self, image: WorkflowImageData) -> BlockResult: + predictions = self._init_results["model"].infer(image.numpy_image) + return {"predictions": sv.Detections.from_inference(predictions[0].model_dump(by_alias=True, exclude_none=True))} +""" + valid_workflow_definition = { + "version": "1.0", + "inputs": [ + {"type": "WorkflowImage", "name": "image"}, + ], + "dynamic_blocks_definitions": [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "CustomModel", + "inputs": { + "image": { + "type": "DynamicInputDefinition", + "selector_types": ["input_image"], + }, + }, + "outputs": { + "predictions": { + "type": "DynamicOutputDefinition", + "kind": [ + "Batch[object_detection_prediction]", + ], + } + }, + }, + "code": { + "type": "PythonCode", + "run_function_code": infer_function, + "run_function_name": "infer", + "init_function_code": init_function, + "init_function_name": "init_model", + "imports": [ + "from inference.models.yolov8 import YOLOv8ObjectDetection", + ], + }, + }, + ], + "steps": [ + { + "type": "CustomModel", + "name": "model", + "image": "$inputs.image", + }, + ], + "outputs": [ + { + "type": "JsonField", + "name": "predictions", + "selector": "$steps.model.predictions", + }, + ], + } + + # when + response = requests.post( + f"{server_url}/workflows/validate", + json=valid_workflow_definition, + ) + + # then + response.raise_for_status() + response_data = response.json() + assert response_data["status"] == "ok" + + def test_compilation_endpoint_when_compilation_fails( server_url: str, ) -> None: diff --git a/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py b/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py index 8fb61271e2..29510dcd0b 100644 --- a/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py +++ b/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py @@ -1,11 +1,15 @@ +from unittest import mock + import numpy as np import pytest from inference.core.env import WORKFLOWS_MAX_CONCURRENT_STEPS from inference.core.managers.base import ModelManager from inference.core.workflows.core_steps.common.entities import StepExecutionMode -from inference.core.workflows.errors import BlockInterfaceError, StepExecutionError +from inference.core.workflows.errors import BlockInterfaceError, \ + WorkflowEnvironmentConfigurationError from inference.core.workflows.execution_engine.core import ExecutionEngine +from inference.core.workflows.execution_engine.dynamic_blocks import block_assembler FUNCTION_TO_GET_OVERLAP_OF_BBOXES = """ def run(self, predictions: sv.Detections, class_x: str, class_y: str) -> BlockResult: @@ -168,7 +172,6 @@ async def test_workflow_with_custom_python_blocks_measuring_overlap( "workflows_core.model_manager": model_manager, "workflows_core.api_key": None, "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, - "dynamic_workflows_blocks.allow_custom_python_execution": True, } execution_engine = ExecutionEngine.init( workflow_definition=WORKFLOW_WITH_OVERLAP_MEASUREMENT, @@ -281,7 +284,6 @@ async def test_workflow_with_custom_python_block_operating_on_batch( "workflows_core.model_manager": model_manager, "workflows_core.api_key": None, "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, - "dynamic_workflows_blocks.allow_custom_python_execution": True, } execution_engine = ExecutionEngine.init( workflow_definition=WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_ON_BATCH, @@ -415,7 +417,6 @@ async def test_workflow_with_custom_python_block_operating_cross_dimensions( "workflows_core.model_manager": model_manager, "workflows_core.api_key": None, "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, - "dynamic_workflows_blocks.allow_custom_python_execution": True, } execution_engine = ExecutionEngine.init( workflow_definition=WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_CROSS_DIMENSIONS, @@ -457,6 +458,7 @@ async def test_workflow_with_custom_python_block_operating_cross_dimensions( @pytest.mark.asyncio +@mock.patch.object(block_assembler, "ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS", False) async def test_workflow_with_custom_python_block_when_custom_python_execution_forbidden( model_manager: ModelManager, dogs_image: np.ndarray, @@ -467,20 +469,14 @@ async def test_workflow_with_custom_python_block_when_custom_python_execution_fo "workflows_core.model_manager": model_manager, "workflows_core.api_key": None, "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, - "dynamic_workflows_blocks.allow_custom_python_execution": False, } - execution_engine = ExecutionEngine.init( - workflow_definition=WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_CROSS_DIMENSIONS, - init_parameters=workflow_init_parameters, - max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS, - ) # when - with pytest.raises(StepExecutionError): - _ = await execution_engine.run_async( - runtime_parameters={ - "image": [dogs_image, crowd_image], - } + with pytest.raises(WorkflowEnvironmentConfigurationError): + _ = ExecutionEngine.init( + workflow_definition=WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_CROSS_DIMENSIONS, + init_parameters=workflow_init_parameters, + max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS, ) @@ -562,7 +558,6 @@ async def test_workflow_with_custom_python_block_reducing_dimensionality( "workflows_core.model_manager": model_manager, "workflows_core.api_key": None, "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, - "dynamic_workflows_blocks.allow_custom_python_execution": True, } execution_engine = ExecutionEngine.init( workflow_definition=WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_DIMENSIONALITY_REDUCTION, @@ -668,7 +663,6 @@ async def test_workflow_with_custom_python_block_running_custom_model( "workflows_core.model_manager": model_manager, "workflows_core.api_key": None, "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, - "dynamic_workflows_blocks.allow_custom_python_execution": True, } execution_engine = ExecutionEngine.init( workflow_definition=WORKFLOW_WITH_PYTHON_BLOCK_HOSTING_MODEL, @@ -779,7 +773,6 @@ async def test_workflow_with_custom_python_block_when_code_cannot_be_compiled( "workflows_core.model_manager": model_manager, "workflows_core.api_key": None, "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, - "dynamic_workflows_blocks.allow_custom_python_execution": True, } # when @@ -847,7 +840,6 @@ async def test_workflow_with_custom_python_block_when_code_does_not_define_decla "workflows_core.model_manager": model_manager, "workflows_core.api_key": None, "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, - "dynamic_workflows_blocks.allow_custom_python_execution": True, } # when @@ -921,7 +913,6 @@ async def test_workflow_with_custom_python_block_when_code_does_not_define_decla "workflows_core.model_manager": model_manager, "workflows_core.api_key": None, "workflows_core.step_execution_mode": StepExecutionMode.LOCAL, - "dynamic_workflows_blocks.allow_custom_python_execution": True, } # when From 487c9208b8149bbec320c3fba6a1b2d98f8d01ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Wed, 17 Jul 2024 11:57:48 +0200 Subject: [PATCH 08/15] Add integration tests --- docker/dockerfiles/Dockerfile.onnx.lambda | 1 + .../dockerfiles/Dockerfile.onnx.lambda.slim | 1 + inference/core/entities/requests/workflows.py | 7 +- .../interfaces/http/handlers/workflows.py | 38 ++- inference/core/interfaces/http/http_api.py | 21 +- inference/core/interfaces/stream/sinks.py | 13 +- inference/core/workflows/errors.py | 4 + .../dynamic_blocks/block_assembler.py | 50 +++- .../dynamic_blocks/block_scaffolding.py | 16 +- .../introspection/blocks_loader.py | 4 +- .../introspection/entities.py | 12 +- .../test_workflow_endpoints.py | 222 ++++++++++++++++-- .../test_workflow_with_custom_python_block.py | 12 +- .../introspection/test_blocks_loader.py | 8 +- 14 files changed, 332 insertions(+), 77 deletions(-) diff --git a/docker/dockerfiles/Dockerfile.onnx.lambda b/docker/dockerfiles/Dockerfile.onnx.lambda index f877b3a657..7927cb044f 100644 --- a/docker/dockerfiles/Dockerfile.onnx.lambda +++ b/docker/dockerfiles/Dockerfile.onnx.lambda @@ -70,6 +70,7 @@ ENV API_LOGGING_ENABLED=True ENV MODEL_VALIDATION_DISABLED=True ENV ALLOW_NON_HTTPS_URL_INPUT=False ENV ALLOW_URL_INPUT_WITHOUT_FQDN=False +ENV ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=False WORKDIR ${LAMBDA_TASK_ROOT} RUN rm -rf /build diff --git a/docker/dockerfiles/Dockerfile.onnx.lambda.slim b/docker/dockerfiles/Dockerfile.onnx.lambda.slim index ccbbb5c0f7..a31efa51c7 100644 --- a/docker/dockerfiles/Dockerfile.onnx.lambda.slim +++ b/docker/dockerfiles/Dockerfile.onnx.lambda.slim @@ -64,6 +64,7 @@ ENV API_LOGGING_ENABLED=True ENV MODEL_VALIDATION_DISABLED=True ENV ALLOW_NON_HTTPS_URL_INPUT=False ENV ALLOW_URL_INPUT_WITHOUT_FQDN=False +ENV ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=False WORKDIR ${LAMBDA_TASK_ROOT} diff --git a/inference/core/entities/requests/workflows.py b/inference/core/entities/requests/workflows.py index efe6821a59..a82a1448f2 100644 --- a/inference/core/entities/requests/workflows.py +++ b/inference/core/entities/requests/workflows.py @@ -2,7 +2,9 @@ from pydantic import BaseModel, Field -from inference.core.workflows.execution_engine.dynamic_blocks.entities import DynamicBlockDefinition +from inference.core.workflows.execution_engine.dynamic_blocks.entities import ( + DynamicBlockDefinition, +) class WorkflowInferenceRequest(BaseModel): @@ -24,6 +26,5 @@ class WorkflowSpecificationInferenceRequest(WorkflowInferenceRequest): class DescribeBlocksRequest(BaseModel): dynamic_blocks_definitions: List[DynamicBlockDefinition] = Field( - default_factory=list, - description="Dynamic blocks to be used." + default_factory=list, description="Dynamic blocks to be used." ) diff --git a/inference/core/interfaces/http/handlers/workflows.py b/inference/core/interfaces/http/handlers/workflows.py index cff91c1350..326e1e04e5 100644 --- a/inference/core/interfaces/http/handlers/workflows.py +++ b/inference/core/interfaces/http/handlers/workflows.py @@ -1,18 +1,32 @@ # TODO - for everyone: start migrating other handlers to bring relief to http_api.py -from typing import Optional, List +from typing import List, Optional -from inference.core.entities.responses.workflows import ExternalWorkflowsBlockSelectorDefinition, \ - ExternalBlockPropertyPrimitiveDefinition, UniversalQueryLanguageDescription, WorkflowsBlocksDescription -from inference.core.workflows.core_steps.common.query_language.introspection.core import \ - prepare_operations_descriptions, prepare_operators_descriptions -from inference.core.workflows.execution_engine.dynamic_blocks.block_assembler import compile_dynamic_blocks -from inference.core.workflows.execution_engine.dynamic_blocks.entities import DynamicBlockDefinition -from inference.core.workflows.execution_engine.introspection.blocks_loader import describe_available_blocks -from inference.core.workflows.execution_engine.introspection.connections_discovery import discover_blocks_connections +from inference.core.entities.responses.workflows import ( + ExternalBlockPropertyPrimitiveDefinition, + ExternalWorkflowsBlockSelectorDefinition, + UniversalQueryLanguageDescription, + WorkflowsBlocksDescription, +) +from inference.core.workflows.core_steps.common.query_language.introspection.core import ( + prepare_operations_descriptions, + prepare_operators_descriptions, +) +from inference.core.workflows.execution_engine.dynamic_blocks.block_assembler import ( + compile_dynamic_blocks, +) +from inference.core.workflows.execution_engine.dynamic_blocks.entities import ( + DynamicBlockDefinition, +) +from inference.core.workflows.execution_engine.introspection.blocks_loader import ( + describe_available_blocks, +) +from inference.core.workflows.execution_engine.introspection.connections_discovery import ( + discover_blocks_connections, +) def handle_describe_workflows_blocks_request( - dynamic_blocks_definitions: Optional[List[DynamicBlockDefinition]] = None + dynamic_blocks_definitions: Optional[List[DynamicBlockDefinition]] = None, ) -> WorkflowsBlocksDescription: if dynamic_blocks_definitions is None: dynamic_blocks_definitions = [] @@ -60,5 +74,5 @@ def handle_describe_workflows_blocks_request( kinds_connections=kinds_connections, primitives_connections=primitives_connections, universal_query_language_description=universal_query_language_description, - dynamic_block_definition_schema=DynamicBlockDefinition.schema() - ) \ No newline at end of file + dynamic_block_definition_schema=DynamicBlockDefinition.schema(), + ) diff --git a/inference/core/interfaces/http/http_api.py b/inference/core/interfaces/http/http_api.py index d20b6effbc..a0762d7950 100644 --- a/inference/core/interfaces/http/http_api.py +++ b/inference/core/interfaces/http/http_api.py @@ -41,8 +41,9 @@ ClearModelRequest, ) from inference.core.entities.requests.workflows import ( + DescribeBlocksRequest, WorkflowInferenceRequest, - WorkflowSpecificationInferenceRequest, DescribeBlocksRequest, + WorkflowSpecificationInferenceRequest, ) from inference.core.entities.requests.yolo_world import YOLOWorldInferenceRequest from inference.core.entities.responses.clip import ( @@ -124,7 +125,9 @@ WorkspaceLoadError, ) from inference.core.interfaces.base import BaseInterface -from inference.core.interfaces.http.handlers.workflows import handle_describe_workflows_blocks_request +from inference.core.interfaces.http.handlers.workflows import ( + handle_describe_workflows_blocks_request, +) from inference.core.interfaces.http.orjson_utils import ( orjson_response, serialise_workflow_result, @@ -139,6 +142,7 @@ ) from inference.core.workflows.entities.base import OutputDefinition from inference.core.workflows.errors import ( + DynamicBlockError, ExecutionGraphStructureError, InvalidReferenceTargetError, ReferenceTypeError, @@ -230,6 +234,7 @@ async def wrapped_route(*args, **kwargs): RuntimeInputError, InvalidInputTypeError, OperationTypeNotRecognisedError, + DynamicBlockError, ) as error: resp = JSONResponse( status_code=400, @@ -891,13 +896,15 @@ async def describe_workflows_blocks() -> WorkflowsBlocksDescription: response_model=WorkflowsBlocksDescription, summary="[EXPERIMENTAL] Endpoint to get definition of workflows blocks that are accessible", description="Endpoint provides detailed information about workflows building blocks that are " - "accessible in the inference server. This information could be used to programmatically " - "build / display workflows. Additionally - in request body one can specify list of " - "dynamic blocks definitions which will be transformed into blocks and used to generate " - "schemas and definitions of connections", + "accessible in the inference server. This information could be used to programmatically " + "build / display workflows. Additionally - in request body one can specify list of " + "dynamic blocks definitions which will be transformed into blocks and used to generate " + "schemas and definitions of connections", ) @with_route_exceptions - async def describe_workflows_blocks(request: DescribeBlocksRequest) -> WorkflowsBlocksDescription: + async def describe_workflows_blocks( + request: DescribeBlocksRequest, + ) -> WorkflowsBlocksDescription: return handle_describe_workflows_blocks_request( dynamic_blocks_definitions=request.dynamic_blocks_definitions ) diff --git a/inference/core/interfaces/stream/sinks.py b/inference/core/interfaces/stream/sinks.py index 827c737cbb..3e9affbef8 100644 --- a/inference/core/interfaces/stream/sinks.py +++ b/inference/core/interfaces/stream/sinks.py @@ -80,8 +80,8 @@ def render_boxes( (for sequential input) or position in the batch (from 0 to batch_size-1). Returns: None - Side effects: on_frame_rendered() is called against the np.ndarray produced from video frame - and predictions. + Side effects: on_frame_rendered() is called against the tuple (stream_id, np.ndarray) produced from video + frame and predictions. Example: ```python @@ -92,7 +92,11 @@ def render_boxes( output_size = (640, 480) video_sink = cv2.VideoWriter("output.avi", cv2.VideoWriter_fourcc(*"MJPG"), 25.0, output_size) - on_prediction = partial(render_boxes, display_size=output_size, on_frame_rendered=video_sink.write) + on_prediction = partial( + render_boxes, + display_size=output_size, + on_frame_rendered=lambda frame_data: video_sink.write(frame_data[1]) + ) pipeline = InferencePipeline.init( model_id="your-model/3", @@ -105,7 +109,8 @@ def render_boxes( ``` In this example, `render_boxes()` is used as a sink for `InferencePipeline` predictions - making frames with - predictions displayed to be saved into video file. + predictions displayed to be saved into video file. Please note that this is oversimplified example of usage + which will not be robust against multiple streams - better implementation available in `VideoFileSink` class. """ sequential_input_provided = False if not isinstance(video_frame, list): diff --git a/inference/core/workflows/errors.py b/inference/core/workflows/errors.py index f6d378c947..1a7b9bda6f 100644 --- a/inference/core/workflows/errors.py +++ b/inference/core/workflows/errors.py @@ -57,6 +57,10 @@ class BlockInterfaceError(WorkflowCompilerError): pass +class DynamicBlockError(WorkflowCompilerError): + pass + + class WorkflowDefinitionError(WorkflowCompilerError): pass diff --git a/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py b/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py index 41279692b5..dbd2397c0c 100644 --- a/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py +++ b/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py @@ -13,7 +13,10 @@ WorkflowImageSelector, WorkflowParameterSelector, ) -from inference.core.workflows.errors import WorkflowEnvironmentConfigurationError +from inference.core.workflows.errors import ( + DynamicBlockError, + WorkflowEnvironmentConfigurationError, +) from inference.core.workflows.execution_engine.compiler.entities import ( BlockSpecification, ) @@ -33,6 +36,7 @@ load_all_defined_kinds, ) from inference.core.workflows.execution_engine.introspection.utils import ( + build_human_friendly_block_name, get_full_type_name, ) from inference.core.workflows.prototypes.block import WorkflowBlockManifest @@ -46,8 +50,8 @@ def compile_dynamic_blocks( if not ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS: raise WorkflowEnvironmentConfigurationError( public_message="Cannot use dynamic blocks with custom Python code in this installation of `workflows`. " - "This can be changed by setting environmental variable " - "`ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=True`", + "This can be changed by setting environmental variable " + "`ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=True`", context="workflow_compilation | dynamic_blocks_compilation", ) all_defined_kinds = load_all_defined_kinds() @@ -102,7 +106,14 @@ def assembly_dynamic_block_manifest( ) manifest_class = create_model( f"DynamicBlockManifest[{unique_identifier}]", - __config__=ConfigDict(extra="allow"), + __config__=ConfigDict( + extra="allow", + json_schema_extra={ + "name": build_human_friendly_block_name( + fully_qualified_name=manifest_description.block_type + ) + }, + ), name=(str, ...), type=(Literal[manifest_description.block_type], ...), **inputs_definitions, @@ -112,6 +123,7 @@ def assembly_dynamic_block_manifest( kinds_lookup=kinds_lookup, ) return assembly_manifest_class_methods( + block_type=manifest_description.block_type, manifest_class=manifest_class, outputs_definitions=outputs_definitions, manifest_description=manifest_description, @@ -168,6 +180,7 @@ def build_input_field_type( kinds_lookup: Dict[str, Kind], ) -> type: input_type_union_elements = collect_python_types_for_selectors( + block_type=block_type, input_definition=input_definition, kinds_lookup=kinds_lookup, ) @@ -175,9 +188,10 @@ def build_input_field_type( input_definition=input_definition ) if not input_type_union_elements: - raise ValueError( - f"There is no definition of input type found for property: {input_name} of " - f"dynamic block {block_type}." + raise DynamicBlockError( + public_message=f"There is no definition of input type found for property: {input_name} of " + f"dynamic block {block_type}.", + context="workflow_compilation | dynamic_block_compilation | manifest_compilation", ) if len(input_type_union_elements) > 1: input_type = Union[tuple(input_type_union_elements)] @@ -189,6 +203,7 @@ def build_input_field_type( def collect_python_types_for_selectors( + block_type: str, input_definition: DynamicInputDefinition, kinds_lookup: Dict[str, Kind], ) -> List[type]: @@ -200,9 +215,11 @@ def collect_python_types_for_selectors( selector_kind = [] for kind_name in selector_kind_names: if kind_name not in kinds_lookup: - raise ValueError( - f"Could not find kind with name {kind_name} within kinds " - f"recognised by Execution Engine: {list(kinds_lookup.keys())}." + raise DynamicBlockError( + public_message=f"Could not find kind with name {kind_name} declared for dynamic block " + f"`{block_type}` within kinds that would be recognised by Execution Engine knowing the " + f"following kinds: {list(kinds_lookup.keys())}.", + context="workflow_compilation | dynamic_block_compilation | manifest_compilation", ) selector_kind.append(kinds_lookup[kind_name]) if selector_type is SelectorType.INPUT_IMAGE: @@ -275,6 +292,7 @@ def collect_input_dimensionality_offsets( def assembly_manifest_class_methods( + block_type: str, manifest_class: Type[BaseModel], outputs_definitions: List[OutputDefinition], manifest_description: ManifestDescription, @@ -294,7 +312,8 @@ def assembly_manifest_class_methods( classmethod(get_input_dimensionality_offsets), ) dimensionality_reference = pick_dimensionality_reference_property( - inputs=manifest_description.inputs + block_type=block_type, + inputs=manifest_description.inputs, ) get_dimensionality_reference_property = lambda cls: dimensionality_reference setattr( @@ -316,7 +335,7 @@ def assembly_manifest_class_methods( def pick_dimensionality_reference_property( - inputs: Dict[str, DynamicInputDefinition] + block_type: str, inputs: Dict[str, DynamicInputDefinition] ) -> Optional[str]: references = [] for name, definition in inputs.items(): @@ -326,4 +345,9 @@ def pick_dimensionality_reference_property( return None if len(references) == 1: return references[0] - raise ValueError("Not expected to have multiple dimensionality references") + raise DynamicBlockError( + public_message=f"For dynamic block {block_type} detected multiple inputs declared to be " + f"dimensionality reference: {references}, whereas at max one should be declared " + f"to be reference.", + context="workflow_compilation | dynamic_block_compilation | manifest_compilation", + ) diff --git a/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py b/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py index 89fb3764c2..97498030ce 100644 --- a/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py +++ b/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py @@ -2,7 +2,11 @@ from typing import List, Type from inference.core.env import ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS -from inference.core.workflows.errors import BlockInterfaceError, WorkflowEnvironmentConfigurationError +from inference.core.workflows.errors import ( + BlockInterfaceError, + DynamicBlockError, + WorkflowEnvironmentConfigurationError, +) from inference.core.workflows.execution_engine.dynamic_blocks.entities import PythonCode from inference.core.workflows.prototypes.block import ( BlockResult, @@ -38,7 +42,7 @@ def assembly_custom_python_block( module_name=f"dynamic_module_{unique_identifier}", ) if not hasattr(code_module, python_code.run_function_name): - raise BlockInterfaceError( + raise DynamicBlockError( public_message=f"Cannot find function: {python_code.run_function_name} in declared code for " f"dynamic block: `{block_type_name}`", context="workflow_compilation | dynamic_block_compilation | declared_symbols_fetching", @@ -49,8 +53,8 @@ async def run(self, *args, **kwargs) -> BlockResult: if not ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS: raise WorkflowEnvironmentConfigurationError( public_message="Cannot use dynamic blocks with custom Python code in this installation of `workflows`. " - "This can be changed by setting environmental variable " - "`ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=True`", + "This can be changed by setting environmental variable " + "`ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=True`", context="workflow_execution | step_execution | dynamic_step", ) return run_function(self, *args, **kwargs) @@ -58,7 +62,7 @@ async def run(self, *args, **kwargs) -> BlockResult: if python_code.init_function_code is not None and not hasattr( code_module, python_code.init_function_name ): - raise BlockInterfaceError( + raise DynamicBlockError( public_message=f"Cannot find function: {python_code.init_function_name} in declared code for " f"dynamic block: `{block_type_name}`", context="workflow_compilation | dynamic_block_compilation | declared_symbols_fetching", @@ -106,7 +110,7 @@ def create_dynamic_module( exec(imports + code, dynamic_module.__dict__) return dynamic_module except Exception as error: - raise BlockInterfaceError( + raise DynamicBlockError( public_message=f"Error of type `{type(error).__class__.__name__}` encountered while attempting to " f"create Python module with code for block: {block_type_name}. Error message: {error}", context="workflow_compilation | dynamic_block_compilation | dynamic_module_creation", diff --git a/inference/core/workflows/execution_engine/introspection/blocks_loader.py b/inference/core/workflows/execution_engine/introspection/blocks_loader.py index 1d46262339..f8b45e6433 100644 --- a/inference/core/workflows/execution_engine/introspection/blocks_loader.py +++ b/inference/core/workflows/execution_engine/introspection/blocks_loader.py @@ -28,7 +28,9 @@ WORKFLOWS_CORE_PLUGIN_NAME = "workflows_core" -def describe_available_blocks(dynamic_blocks: List[BlockSpecification]) -> BlocksDescription: +def describe_available_blocks( + dynamic_blocks: List[BlockSpecification], +) -> BlocksDescription: blocks = load_workflow_blocks() + dynamic_blocks result = [] for block in blocks: diff --git a/inference/core/workflows/execution_engine/introspection/entities.py b/inference/core/workflows/execution_engine/introspection/entities.py index 32849ca436..d86ae00e84 100644 --- a/inference/core/workflows/execution_engine/introspection/entities.py +++ b/inference/core/workflows/execution_engine/introspection/entities.py @@ -90,11 +90,13 @@ class DiscoveredConnections: class BlockDescription(BaseModel): - manifest_class: Type[WorkflowBlockManifest] = Field(exclude=True) - block_class: Union[ - Type[WorkflowBlock], - Callable[[Type[WorkflowBlockManifest]], Type[WorkflowBlock]], - ] = Field(exclude=True) + manifest_class: Union[Type[WorkflowBlockManifest], Type[BaseModel]] = Field( + exclude=True + ) + # Type[BaseModel] here is to let dynamic blocks being BaseModel to pass validation - but that should be + # the only case for using this type in this field. Dynamic blocks implements the same interface, yet due + # to dynamic nature of creation - cannot be initialised as abstract class WorkflowBlockManifest + block_class: Type[WorkflowBlock] = Field(exclude=True) block_schema: dict = Field( description="OpenAPI specification of block manifest that " "can be used to create workflow step in JSON definition." diff --git a/tests/inference/integration_tests/test_workflow_endpoints.py b/tests/inference/integration_tests/test_workflow_endpoints.py index c40c2191b1..b967ca21f6 100644 --- a/tests/inference/integration_tests/test_workflow_endpoints.py +++ b/tests/inference/integration_tests/test_workflow_endpoints.py @@ -30,22 +30,28 @@ def test_getting_blocks_descriptions_using_legacy_get_endpoint(server_url) -> No assert ( len(response_data["primitives_connections"]) > 0 ), "Expected some primitive parameters for steps to be declared" - assert "universal_query_language_description" in response_data, "Expected universal_query_language_description key to be present in response" - assert "dynamic_block_definition_schema" in response_data, "Expected key `dynamic_block_definition_schema` to be present in response" + assert ( + "universal_query_language_description" in response_data + ), "Expected universal_query_language_description key to be present in response" + assert ( + "dynamic_block_definition_schema" in response_data + ), "Expected key `dynamic_block_definition_schema` to be present in response" -def test_getting_blocks_descriptions_using_new_post_endpoint_with_dynamic_steps(server_url) -> None: +def test_getting_blocks_descriptions_using_new_post_endpoint_with_dynamic_steps( + server_url, +) -> None: # given function_code = """ - def my_function(self, prediction: sv.Detections, crops: Batch[WorkflowImageData]) -> BlockResult: - detection_id2bbox = { - detection_id.item(): i for i, detection_id in enumerate(prediction.data["detection_id"]) - } - results = [] - for crop in crops: - parent_id = crop.parent_metadata.parent_id - results.append({"associated_detections": prediction[detection_id2bbox[parent_id]]}) - return results +def my_function(self, prediction: sv.Detections, crops: Batch[WorkflowImageData]) -> BlockResult: + detection_id2bbox = { + detection_id.item(): i for i, detection_id in enumerate(prediction.data["detection_id"]) + } + results = [] + for crop in crops: + parent_id = crop.parent_metadata.parent_id + results.append({"associated_detections": prediction[detection_id2bbox[parent_id]]}) + return results """ dynamic_blocks_definitions = [ { @@ -94,7 +100,7 @@ def my_function(self, prediction: sv.Detections, crops: Batch[WorkflowImageData] # when response = requests.post( f"{server_url}/workflows/blocks/describe", - json={"dynamic_blocks_definitions": dynamic_blocks_definitions} + json={"dynamic_blocks_definitions": dynamic_blocks_definitions}, ) # then @@ -118,8 +124,89 @@ def my_function(self, prediction: sv.Detections, crops: Batch[WorkflowImageData] assert ( len(response_data["primitives_connections"]) > 0 ), "Expected some primitive parameters for steps to be declared" - assert "universal_query_language_description" in response_data, "Expected universal_query_language_description key to be present in response" - assert "dynamic_block_definition_schema" in response_data, "Expected key `dynamic_block_definition_schema` to be present in response" + assert ( + "universal_query_language_description" in response_data + ), "Expected universal_query_language_description key to be present in response" + assert ( + "dynamic_block_definition_schema" in response_data + ), "Expected key `dynamic_block_definition_schema` to be present in response" + types_compatible_with_object_detection_predictions = { + e["manifest_type_identifier"] + for e in response_data["kinds_connections"][ + "Batch[object_detection_prediction]" + ] + } + assert ( + "DetectionsToCropsAssociation" + in types_compatible_with_object_detection_predictions + ), "Expected dynamic block to be manifested in connections" + + +def test_getting_blocks_descriptions_using_new_post_endpoint_with_dynamic_steps_when_steps_are_malformed( + server_url, +) -> None: + # given + function_code = """ +def my_function(self, prediction: sv.Detections, crops: Batch[WorkflowImageData]) -> BlockResult: + pass + """ + dynamic_blocks_definitions = [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "DetectionsToCropsAssociation", + "inputs": { + "prediction": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output"], + "is_dimensionality_reference": True, + "selector_data_kind": { + "step_output": [ + "Batch[object_detection_prediction]", + "Batch[instance_segmentation_prediction]", + "Batch[keypoint_detection_prediction]", + ] + }, + }, + "crops": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output_image"], + "is_dimensionality_reference": True, + "dimensionality_offset": 1, + }, + }, + "outputs": { + "associated_detections": { + "type": "DynamicOutputDefinition", + "kind": [ + "Batch[object_detection_prediction]", + "Batch[instance_segmentation_prediction]", + "Batch[keypoint_detection_prediction]", + ], + } + }, + }, + "code": { + "type": "PythonCode", + "run_function_code": function_code, + "run_function_name": "my_function", + }, + }, + ] + + # when + response = requests.post( + f"{server_url}/workflows/blocks/describe", + json={"dynamic_blocks_definitions": dynamic_blocks_definitions}, + ) + + # then + assert response.status_code == 400, "Expected bad request to be manifested" + response_data = response.json() + assert ( + "dimensionality reference" in response_data["message"] + ), "Expected the cause of problem being dimensionality reference declaration" def test_getting_dynamic_outputs(server_url: str) -> None: @@ -206,7 +293,7 @@ def infer(self, image: WorkflowImageData) -> BlockResult: """ valid_workflow_definition = { "version": "1.0", - "inputs": [ + "inputs": [ {"type": "WorkflowImage", "name": "image"}, ], "dynamic_blocks_definitions": [ @@ -312,7 +399,7 @@ def test_compilation_endpoint_when_compilation_fails( def test_workflow_run( server_url: str, - clean_loaded_models_fixture + clean_loaded_models_fixture, ) -> None: # given valid_workflow_definition = { @@ -374,3 +461,104 @@ def test_workflow_run( assert ( len(response_data["outputs"][1]["result"]["predictions"]) == 6 ), "Expected to see 6 predictions" + + +FUNCTION_TO_GET_MAXIMUM_CONFIDENCE_FROM_BATCH_OF_DETECTIONS = """ +def run(self, predictions: Batch[sv.Detections]) -> BlockResult: + result = [] + for prediction in predictions: + result.append({"max_confidence": np.max(prediction.confidence).item()}) + return result +""" + +WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_ON_BATCH = { + "version": "1.0", + "inputs": [ + {"type": "WorkflowImage", "name": "image"}, + ], + "dynamic_blocks_definitions": [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "MaxConfidence", + "inputs": { + "predictions": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output"], + }, + }, + "outputs": { + "max_confidence": { + "type": "DynamicOutputDefinition", + "kind": ["float_zero_to_one"], + } + }, + "accepts_batch_input": True, + }, + "code": { + "type": "PythonCode", + "run_function_code": FUNCTION_TO_GET_MAXIMUM_CONFIDENCE_FROM_BATCH_OF_DETECTIONS, + }, + }, + ], + "steps": [ + { + "type": "RoboflowObjectDetectionModel", + "name": "model", + "image": "$inputs.image", + "model_id": "yolov8n-640", + }, + { + "type": "MaxConfidence", + "name": "confidence_aggregation", + "predictions": "$steps.model.predictions", + }, + ], + "outputs": [ + { + "type": "JsonField", + "name": "max_confidence", + "selector": "$steps.confidence_aggregation.max_confidence", + }, + ], +} + + +def test_workflow_run_when_dynamic_block_is_in_use( + server_url: str, + clean_loaded_models_fixture, +) -> None: + # when + response = requests.post( + f"{server_url}/workflows/run", + json={ + "specification": WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_ON_BATCH, + "api_key": API_KEY, + "inputs": { + "image": [ + { + "type": "url", + "value": "https://media.roboflow.com/fruit.png", + } + ] + * 2, + }, + }, + ) + + # then + response.raise_for_status() + response_data = response.json() + assert isinstance( + response_data["outputs"], list + ), "Expected list of elements to be returned" + assert ( + len(response_data["outputs"]) == 2 + ), "Two images submitted - two responses expected" + assert set(response_data["outputs"][0].keys()) == { + "max_confidence" + }, "Expected only `max_confidence` output" + assert set(response_data["outputs"][1].keys()) == { + "max_confidence" + }, "Expected only `max_confidence` output" diff --git a/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py b/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py index 29510dcd0b..d74642e2d9 100644 --- a/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py +++ b/tests/workflows/integration_tests/execution/test_workflow_with_custom_python_block.py @@ -6,8 +6,10 @@ from inference.core.env import WORKFLOWS_MAX_CONCURRENT_STEPS from inference.core.managers.base import ModelManager from inference.core.workflows.core_steps.common.entities import StepExecutionMode -from inference.core.workflows.errors import BlockInterfaceError, \ - WorkflowEnvironmentConfigurationError +from inference.core.workflows.errors import ( + DynamicBlockError, + WorkflowEnvironmentConfigurationError, +) from inference.core.workflows.execution_engine.core import ExecutionEngine from inference.core.workflows.execution_engine.dynamic_blocks import block_assembler @@ -776,7 +778,7 @@ async def test_workflow_with_custom_python_block_when_code_cannot_be_compiled( } # when - with pytest.raises(BlockInterfaceError): + with pytest.raises(DynamicBlockError): _ = ExecutionEngine.init( workflow_definition=WORKFLOW_WITH_CODE_THAT_DOES_NOT_COMPILE, init_parameters=workflow_init_parameters, @@ -843,7 +845,7 @@ async def test_workflow_with_custom_python_block_when_code_does_not_define_decla } # when - with pytest.raises(BlockInterfaceError): + with pytest.raises(DynamicBlockError): _ = ExecutionEngine.init( workflow_definition=WORKFLOW_WITHOUT_RUN_FUNCTION, init_parameters=workflow_init_parameters, @@ -916,7 +918,7 @@ async def test_workflow_with_custom_python_block_when_code_does_not_define_decla } # when - with pytest.raises(BlockInterfaceError): + with pytest.raises(DynamicBlockError): _ = ExecutionEngine.init( workflow_definition=WORKFLOW_WITHOUT_DECLARED_INIT_FUNCTION, init_parameters=workflow_init_parameters, diff --git a/tests/workflows/unit_tests/execution_engine/introspection/test_blocks_loader.py b/tests/workflows/unit_tests/execution_engine/introspection/test_blocks_loader.py index d7fba8f6de..68dd7202c7 100644 --- a/tests/workflows/unit_tests/execution_engine/introspection/test_blocks_loader.py +++ b/tests/workflows/unit_tests/execution_engine/introspection/test_blocks_loader.py @@ -174,7 +174,7 @@ def test_load_initializers_when_plugin_exists_and_initializers_provided() -> Non result = load_initializers() # then - assert len(result) == 6 + assert len(result) == 5 assert ( result[ "tests.workflows.unit_tests.execution_engine.introspection.plugin_with_initializers.a" @@ -199,7 +199,7 @@ def test_describe_available_blocks_when_valid_plugins_are_loaded( ) # when - result = describe_available_blocks() + result = describe_available_blocks(dynamic_blocks=[]) # then assert len(result.blocks) == 2, "Expected 2 blocks to be loaded" @@ -224,7 +224,7 @@ def test_describe_available_blocks_when_plugins_duplicate_class_names( # when with pytest.raises(PluginLoadingError): - _ = describe_available_blocks() + _ = describe_available_blocks(dynamic_blocks=[]) @mock.patch.object(blocks_loader, "load_workflow_blocks") @@ -238,4 +238,4 @@ def test_describe_available_blocks_when_plugins_duplicate_type_identifiers( # when with pytest.raises(PluginLoadingError): - _ = describe_available_blocks() + _ = describe_available_blocks(dynamic_blocks=[]) From d43e75a98a9e6c49cad5609ea564ede883e44709 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Wed, 17 Jul 2024 11:58:34 +0200 Subject: [PATCH 09/15] Bump version --- inference/core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/inference/core/version.py b/inference/core/version.py index 9a9b0d4a7a..bed69e5243 100644 --- a/inference/core/version.py +++ b/inference/core/version.py @@ -1,4 +1,4 @@ -__version__ = "0.14.1" +__version__ = "0.14.2" if __name__ == "__main__": From 18690a40817aa13e1cae8d741e54b0389f3b19df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Wed, 17 Jul 2024 12:16:52 +0200 Subject: [PATCH 10/15] Add hosted platform tests --- inference/core/interfaces/http/http_api.py | 2 + .../hosted_platform_tests/conftest.py | 3 - .../hosted_platform_tests/test_workflows.py | 227 +++++++++++++++++- 3 files changed, 227 insertions(+), 5 deletions(-) diff --git a/inference/core/interfaces/http/http_api.py b/inference/core/interfaces/http/http_api.py index a0762d7950..0e771d5564 100644 --- a/inference/core/interfaces/http/http_api.py +++ b/inference/core/interfaces/http/http_api.py @@ -920,6 +920,8 @@ async def describe_workflows_blocks( async def get_dynamic_block_outputs( step_manifest: Dict[str, Any] ) -> List[OutputDefinition]: + # Potentially TODO: dynamic blocks do not support dynamic outputs, but if it changes + # we need to provide dynamic blocks manifests here dummy_workflow_definition = { "version": "1.0", "inputs": [], diff --git a/tests/inference/hosted_platform_tests/conftest.py b/tests/inference/hosted_platform_tests/conftest.py index 0d9ef9af0c..43bc043592 100644 --- a/tests/inference/hosted_platform_tests/conftest.py +++ b/tests/inference/hosted_platform_tests/conftest.py @@ -249,6 +249,3 @@ def retry_at_max_n_times(function: callable, n: int, function_description: str) return None attempts += 1 raise Exception(f"Could not achieve success of {function_description}") - - - diff --git a/tests/inference/hosted_platform_tests/test_workflows.py b/tests/inference/hosted_platform_tests/test_workflows.py index f5beeb4015..3a1081c99b 100644 --- a/tests/inference/hosted_platform_tests/test_workflows.py +++ b/tests/inference/hosted_platform_tests/test_workflows.py @@ -5,7 +5,9 @@ @pytest.mark.flaky(retries=4, delay=1) -def test_getting_schemas(object_detection_service_url: str) -> None: +def test_getting_schemas_from_legacy_get_endpoint( + object_detection_service_url: str, +) -> None: # when response = requests.get(f"{object_detection_service_url}/workflows/blocks/describe") @@ -27,6 +29,115 @@ def test_getting_schemas(object_detection_service_url: str) -> None: assert ( len(response_data["primitives_connections"]) > 0 ), "Expected some primitive parameters for steps to be declared" + assert ( + "dynamic_block_definition_schema" in response_data + ), "Expected key `dynamic_block_definition_schema` to be present in response" + + +@pytest.mark.flaky(retries=4, delay=1) +def test_getting_schemas_from_new_post_endpoint( + object_detection_service_url: str, +) -> None: + # when + response = requests.post( + f"{object_detection_service_url}/workflows/blocks/describe" + ) + + # then + response.raise_for_status() + response_data = response.json() + assert set(response_data.keys()) == { + "blocks", + "declared_kinds", + "kinds_connections", + "primitives_connections", + "universal_query_language_description", + } + assert len(response_data["blocks"]) > 0, "Some blocs expected to be added" + assert len(response_data["declared_kinds"]) > 0, "Some kinds must be declared" + assert len(response_data["declared_kinds"]) >= len( + response_data["kinds_connections"] + ), "Kinds connections declared as inputs for blocks must be at most in number of all declared kinds" + assert ( + len(response_data["primitives_connections"]) > 0 + ), "Expected some primitive parameters for steps to be declared" + assert ( + "dynamic_block_definition_schema" in response_data + ), "Expected key `dynamic_block_definition_schema` to be present in response" + + +FUNCTION = """ +def my_function(self, prediction: sv.Detections, crops: Batch[WorkflowImageData]) -> BlockResult: + detection_id2bbox = { + detection_id.item(): i for i, detection_id in enumerate(prediction.data["detection_id"]) + } + results = [] + for crop in crops: + parent_id = crop.parent_metadata.parent_id + results.append({"associated_detections": prediction[detection_id2bbox[parent_id]]}) + return results + """ +DYNAMIC_BLOCKS_DEFINITION = [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "DetectionsToCropsAssociation", + "inputs": { + "prediction": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output"], + "selector_data_kind": { + "step_output": [ + "Batch[object_detection_prediction]", + "Batch[instance_segmentation_prediction]", + "Batch[keypoint_detection_prediction]", + ] + }, + }, + "crops": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output_image"], + "is_dimensionality_reference": True, + "dimensionality_offset": 1, + }, + }, + "outputs": { + "associated_detections": { + "type": "DynamicOutputDefinition", + "kind": [ + "Batch[object_detection_prediction]", + "Batch[instance_segmentation_prediction]", + "Batch[keypoint_detection_prediction]", + ], + } + }, + }, + "code": { + "type": "PythonCode", + "run_function_code": FUNCTION, + "run_function_name": "my_function", + }, + }, +] + + +@pytest.mark.flaky(retries=4, delay=1) +def test_getting_schemas_from_new_post_endpoint_with_dynamic_blocks( + object_detection_service_url: str, +) -> None: + # when + response = requests.post( + f"{object_detection_service_url}/workflows/blocks/describe", + json={"dynamic_blocks_definitions": DYNAMIC_BLOCKS_DEFINITION}, + ) + + # then + assert response.status_code == 500 + response_data = response.json() + assert ( + "Cannot use dynamic blocks with custom Python code" in response_data["message"] + ), "Expected execution to be prevented" @pytest.mark.flaky(retries=4, delay=1) @@ -410,7 +521,7 @@ def test_ocr_workflow_run_when_run_expected_to_succeed( @pytest.mark.flaky(retries=4, delay=1) def test_yolo_world_workflow_run_when_run_expected_to_succeed( - object_detection_service_url: str, detection_model_id: str + object_detection_service_url: str, ) -> None: # when response = requests.post( @@ -440,3 +551,115 @@ def test_yolo_world_workflow_run_when_run_expected_to_succeed( assert ( len(response_data["outputs"]) == 2 ), "Two images submitted - two response expected" + + +FUNCTION_TO_GET_MAXIMUM_CONFIDENCE_FROM_BATCH_OF_DETECTIONS = """ +def run(self, predictions: Batch[sv.Detections]) -> BlockResult: + result = [] + for prediction in predictions: + result.append({"max_confidence": np.max(prediction.confidence).item()}) + return result +""" + +WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_ON_BATCH = { + "version": "1.0", + "inputs": [ + {"type": "WorkflowImage", "name": "image"}, + {"type": "WorkflowParameter", "name": "model_id"}, + ], + "dynamic_blocks_definitions": [ + { + "type": "DynamicBlockDefinition", + "manifest": { + "type": "ManifestDescription", + "block_type": "MaxConfidence", + "inputs": { + "predictions": { + "type": "DynamicInputDefinition", + "selector_types": ["step_output"], + }, + }, + "outputs": { + "max_confidence": { + "type": "DynamicOutputDefinition", + "kind": ["float_zero_to_one"], + } + }, + "accepts_batch_input": True, + }, + "code": { + "type": "PythonCode", + "run_function_code": FUNCTION_TO_GET_MAXIMUM_CONFIDENCE_FROM_BATCH_OF_DETECTIONS, + }, + }, + ], + "steps": [ + { + "type": "RoboflowObjectDetectionModel", + "name": "model", + "image": "$inputs.image", + "model_id": "$inputs.model_id", + }, + { + "type": "MaxConfidence", + "name": "confidence_aggregation", + "predictions": "$steps.model.predictions", + }, + ], + "outputs": [ + { + "type": "JsonField", + "name": "max_confidence", + "selector": "$steps.confidence_aggregation.max_confidence", + }, + ], +} + + +@pytest.mark.flaky(retries=4, delay=1) +def test_workflow_run_with_dynamic_blocks( + object_detection_service_url: str, detection_model_id: str +) -> None: + # when + response = requests.post( + f"{object_detection_service_url}/workflows/run", + json={ + "specification": WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_ON_BATCH, + "api_key": ROBOFLOW_API_KEY, + "inputs": { + "image": [ + { + "type": "url", + "value": "https://media.roboflow.com/fruit.png", + } + ] + * 2, + "model_id": detection_model_id, + }, + }, + ) + + # then + assert response.status_code == 500 + response_data = response.json() + assert ( + "Cannot use dynamic blocks with custom Python code" in response_data["message"] + ), "Expected execution to be prevented" + + +@pytest.mark.flaky(retries=4, delay=1) +def test_workflow_validate_with_dynamic_blocks( + object_detection_service_url: str, detection_model_id: str +) -> None: + # when + response = requests.post( + f"{object_detection_service_url}/workflows/validate", + json=WORKFLOW_WITH_PYTHON_BLOCK_RUNNING_ON_BATCH, + ) + + # then + assert response.status_code == 500 + response_data = response.json() + assert ( + "Cannot use dynamic blocks with custom Python code" in response_data["message"] + ), "Expected execution to be prevented" From 4003812b003afd777eaa5c824a80d6e4a39bff1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Wed, 17 Jul 2024 13:57:02 +0200 Subject: [PATCH 11/15] Add part of unit tests for functionalities --- inference/core/version.py | 2 +- .../dynamic_blocks/block_assembler.py | 63 ++- .../dynamic_blocks/block_scaffolding.py | 1 - .../dynamic_blocs/__init__.py | 0 .../dynamic_blocs/test_block_assembler.py | 441 ++++++++++++++++++ .../dynamic_blocs/test_block_scaffolding.py | 7 + 6 files changed, 502 insertions(+), 12 deletions(-) create mode 100644 tests/workflows/unit_tests/execution_engine/dynamic_blocs/__init__.py create mode 100644 tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_assembler.py create mode 100644 tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_scaffolding.py diff --git a/inference/core/version.py b/inference/core/version.py index bed69e5243..e77f099111 100644 --- a/inference/core/version.py +++ b/inference/core/version.py @@ -1,4 +1,4 @@ -__version__ = "0.14.2" +__version__ = "0.14.2rc1" if __name__ == "__main__": diff --git a/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py b/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py index dbd2397c0c..a03cba1e6e 100644 --- a/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py +++ b/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py @@ -1,3 +1,4 @@ +from copy import deepcopy from typing import Any, Dict, List, Literal, Optional, Tuple, Type, Union from uuid import uuid4 @@ -119,6 +120,7 @@ def assembly_dynamic_block_manifest( **inputs_definitions, ) outputs_definitions = build_outputs_definitions( + block_type=manifest_description.block_type, outputs=manifest_description.outputs, kinds_lookup=kinds_lookup, ) @@ -181,11 +183,14 @@ def build_input_field_type( ) -> type: input_type_union_elements = collect_python_types_for_selectors( block_type=block_type, + input_name=input_name, input_definition=input_definition, kinds_lookup=kinds_lookup, ) input_type_union_elements += collect_python_types_for_values( - input_definition=input_definition + block_type=block_type, + input_name=input_name, + input_definition=input_definition, ) if not input_type_union_elements: raise DynamicBlockError( @@ -204,6 +209,7 @@ def build_input_field_type( def collect_python_types_for_selectors( block_type: str, + input_name: str, input_definition: DynamicInputDefinition, kinds_lookup: Dict[str, Kind], ) -> List[type]: @@ -216,9 +222,9 @@ def collect_python_types_for_selectors( for kind_name in selector_kind_names: if kind_name not in kinds_lookup: raise DynamicBlockError( - public_message=f"Could not find kind with name {kind_name} declared for dynamic block " - f"`{block_type}` within kinds that would be recognised by Execution Engine knowing the " - f"following kinds: {list(kinds_lookup.keys())}.", + public_message=f"Could not find kind with name `{kind_name}` declared for input `{input_name}` " + f"of dynamic block `{block_type}` within kinds that would be recognised by Execution " + f"Engine knowing the following kinds: {list(kinds_lookup.keys())}.", context="workflow_compilation | dynamic_block_compilation | manifest_compilation", ) selector_kind.append(kinds_lookup[kind_name]) @@ -228,16 +234,31 @@ def collect_python_types_for_selectors( result.append(StepOutputImageSelector) elif selector_type is SelectorType.INPUT_PARAMETER: result.append(WorkflowParameterSelector(kind=selector_kind)) - else: + elif selector_type is SelectorType.STEP_OUTPUT: result.append(StepOutputSelector(kind=selector_kind)) + else: + raise DynamicBlockError( + public_message=f"Could not recognise selector type `{selector_type}` declared for input `{input_name}` " + f"of dynamic block `{block_type}`.", + context="workflow_compilation | dynamic_block_compilation | manifest_compilation", + ) return result def collect_python_types_for_values( + block_type: str, + input_name: str, input_definition: DynamicInputDefinition, ) -> List[type]: result = [] for value_type_name in input_definition.value_types: + if value_type_name not in PYTHON_TYPES_MAPPING: + raise DynamicBlockError( + public_message=f"Could not resolve Python type `{value_type_name}` declared for input `{input_name}` " + f"of dynamic block `{block_type}` within types that would be recognised by Execution " + f"Engine knowing the following types: {list(PYTHON_TYPES_MAPPING.keys())}.", + context="workflow_compilation | dynamic_block_compilation | manifest_compilation", + ) value_type = PYTHON_TYPES_MAPPING[value_type_name] result.append(value_type) return result @@ -249,7 +270,7 @@ def build_input_field_metadata(input_definition: DynamicInputDefinition) -> Fiel default_value = input_definition.default_value field_metadata_params = {} if default_holds_compound_object(default_value=default_value): - field_metadata_params["default_factory"] = lambda: default_value + field_metadata_params["default_factory"] = lambda: deepcopy(default_value) else: field_metadata_params["default"] = default_value field_metadata = Field(**field_metadata_params) @@ -265,6 +286,7 @@ def default_holds_compound_object(default_value: Any) -> bool: def build_outputs_definitions( + block_type: str, outputs: Dict[str, DynamicOutputDefinition], kinds_lookup: Dict[str, Kind], ) -> List[OutputDefinition]: @@ -273,14 +295,35 @@ def build_outputs_definitions( if not definition.kind: result.append(OutputDefinition(name=name, kind=[WILDCARD_KIND])) else: - actual_kinds = [ - kinds_lookup.get(kind_name, Kind(name=kind_name)) - for kind_name in definition.kind - ] + actual_kinds = collect_actual_kinds_for_output( + block_type=block_type, + output_name=name, + output=definition, + kinds_lookup=kinds_lookup, + ) result.append(OutputDefinition(name=name, kind=actual_kinds)) return result +def collect_actual_kinds_for_output( + block_type: str, + output_name: str, + output: DynamicOutputDefinition, + kinds_lookup: Dict[str, Kind], +) -> List[Kind]: + actual_kinds = [] + for kind_name in output.kind: + if kind_name not in kinds_lookup: + raise DynamicBlockError( + public_message=f"Could not find kind with name `{kind_name}` declared for output `{output_name}` " + f"of dynamic block `{block_type}` within kinds that would be recognised by Execution " + f"Engine knowing the following kinds: {list(kinds_lookup.keys())}.", + context="workflow_compilation | dynamic_block_compilation | manifest_compilation", + ) + actual_kinds.append(kinds_lookup[kind_name]) + return actual_kinds + + def collect_input_dimensionality_offsets( inputs: Dict[str, DynamicInputDefinition], ) -> Dict[str, int]: diff --git a/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py b/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py index 97498030ce..277969d9bd 100644 --- a/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py +++ b/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py @@ -3,7 +3,6 @@ from inference.core.env import ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS from inference.core.workflows.errors import ( - BlockInterfaceError, DynamicBlockError, WorkflowEnvironmentConfigurationError, ) diff --git a/tests/workflows/unit_tests/execution_engine/dynamic_blocs/__init__.py b/tests/workflows/unit_tests/execution_engine/dynamic_blocs/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_assembler.py b/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_assembler.py new file mode 100644 index 0000000000..f47841a2b2 --- /dev/null +++ b/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_assembler.py @@ -0,0 +1,441 @@ +from typing import Union, Type +from unittest import mock + +import pytest +from pydantic import ValidationError +from pydantic_core import PydanticUndefinedType + +from inference.core.workflows.entities.base import OutputDefinition +from inference.core.workflows.entities.types import Kind, WILDCARD_KIND, WorkflowParameterSelector, \ + WorkflowImageSelector, StepOutputImageSelector, StepOutputSelector +from inference.core.workflows.errors import DynamicBlockError +from inference.core.workflows.execution_engine.dynamic_blocks.block_assembler import \ + pick_dimensionality_reference_property, build_outputs_definitions, collect_input_dimensionality_offsets, \ + build_input_field_metadata, collect_python_types_for_values, collect_python_types_for_selectors, \ + create_dynamic_block_specification +from inference.core.workflows.execution_engine.dynamic_blocks.entities import DynamicInputDefinition, SelectorType, \ + ValueType, DynamicOutputDefinition, DynamicBlockDefinition, ManifestDescription, PythonCode +from inference.core.workflows.execution_engine.dynamic_blocks import block_assembler + + +def test_pick_dimensionality_reference_property_when_there_is_no_reference_property() -> None: + # given + inputs = { + "a": DynamicInputDefinition(type="DynamicInputDefinition", selector_types=[SelectorType.INPUT_PARAMETER]), + "b": DynamicInputDefinition(type="DynamicInputDefinition", value_types=[ValueType.INTEGER]), + } + + # when + result = pick_dimensionality_reference_property( + block_type="some", + inputs=inputs, + ) + + # then + assert result is None + + +def test_pick_dimensionality_reference_property_when_there_is_single_reference_property() -> None: + # given + inputs = { + "a": DynamicInputDefinition( + type="DynamicInputDefinition", + selector_types=[SelectorType.INPUT_PARAMETER], + is_dimensionality_reference=True, + ), + "b": DynamicInputDefinition(type="DynamicInputDefinition", value_types=[ValueType.INTEGER]), + } + + # when + result = pick_dimensionality_reference_property( + block_type="some", + inputs=inputs, + ) + + # then + assert result == "a", "Expected `a` to be picked as dimensionality reference" + + +def test_pick_dimensionality_reference_property_when_there_are_multiple_reference_properties() -> None: + # given + inputs = { + "a": DynamicInputDefinition( + type="DynamicInputDefinition", + selector_types=[SelectorType.INPUT_PARAMETER], + is_dimensionality_reference=True, + ), + "b": DynamicInputDefinition( + type="DynamicInputDefinition", + value_types=[ValueType.INTEGER], + is_dimensionality_reference=True, + ), + } + + # when + with pytest.raises(DynamicBlockError): + _ = pick_dimensionality_reference_property( + block_type="some", + inputs=inputs, + ) + + +def test_build_outputs_definitions_when_build_should_succeed() -> None: + # given + outputs = { + "a": DynamicOutputDefinition(type="DynamicOutputDefinition"), + "b": DynamicOutputDefinition(type="DynamicOutputDefinition", kind=["string", "integer"]), + } + kinds_lookup = { + "*": WILDCARD_KIND, + "string": Kind(name="string"), + "integer": Kind(name="integer") + } + + # when + result = build_outputs_definitions( + block_type="some", + outputs=outputs, + kinds_lookup=kinds_lookup, + ) + + # then + assert result == [ + OutputDefinition(name="a", kind=[WILDCARD_KIND]), + OutputDefinition(name="b", kind=[kinds_lookup["string"], kinds_lookup["integer"]]) + ], "Expected outputs to be built such that `a` has * kind and `b` has exactly the kinds that were defined" + + +def test_build_outputs_definitions_when_build_should_fail_on_not_recognised_kind() -> None: + # given + outputs = { + "a": DynamicOutputDefinition(type="DynamicOutputDefinition"), + "b": DynamicOutputDefinition(type="DynamicOutputDefinition", kind=["string", "integer"]), + } + kinds_lookup = { + "*": WILDCARD_KIND, + "string": Kind(name="string"), + } + + # when + with pytest.raises(DynamicBlockError): + _ = build_outputs_definitions( + block_type="some", + outputs=outputs, + kinds_lookup=kinds_lookup, + ) + + +def test_collect_input_dimensionality_offsets() -> None: + # given + inputs = { + "a": DynamicInputDefinition( + type="DynamicInputDefinition", + selector_types=[SelectorType.INPUT_PARAMETER], + dimensionality_offset=1, + ), + "b": DynamicInputDefinition(type="DynamicInputDefinition", selector_types=[SelectorType.INPUT_PARAMETER],), + "c": DynamicInputDefinition( + type="DynamicInputDefinition", + selector_types=[SelectorType.INPUT_PARAMETER], + dimensionality_offset=-1, + ), + } + + # when + result = collect_input_dimensionality_offsets(inputs=inputs) + + # then + assert result == {"a": 1, "c": -1}, "Expected only entries with non-default value be given in results" + + +def test_build_input_field_metadata_for_field_without_default_value() -> None: + # given + input_definition = DynamicInputDefinition( + type="DynamicInputDefinition", + selector_types=[SelectorType.INPUT_PARAMETER], + dimensionality_offset=1, + ) + + # when + result = build_input_field_metadata(input_definition=input_definition) + + # then + assert isinstance(result.default, PydanticUndefinedType) + + +def test_build_input_field_metadata_for_field_without_default_being_none() -> None: + # given + input_definition = DynamicInputDefinition( + type="DynamicInputDefinition", + value_types=[ValueType.INTEGER], + is_optional=True, + has_default_value=True, + ) + + # when + result = build_input_field_metadata(input_definition=input_definition) + + # then + assert result.default is None + + +def test_build_input_field_metadata_for_field_without_default_being_primitive() -> None: + # given + input_definition = DynamicInputDefinition( + type="DynamicInputDefinition", + value_types=[ValueType.INTEGER], + is_optional=True, + has_default_value=True, + default_value=3. + ) + + # when + result = build_input_field_metadata(input_definition=input_definition) + + # then + assert result.default == 3 + + +@pytest.mark.parametrize( + "default_type", [list, set, dict] +) +def test_build_input_field_metadata_for_field_without_default_being_compound( + default_type: Union[Type[list], Type[set], Type[dict]], +) -> None: + # given + input_definition = DynamicInputDefinition( + type="DynamicInputDefinition", + value_types=[ValueType.LIST], + has_default_value=True, + default_value=default_type() + ) + + # when + result = build_input_field_metadata(input_definition=input_definition) + + # then + assert result.default_factory() == default_type(), "Expected default_factory used creates new instance of compound element" + + +@pytest.mark.parametrize( + "default_value", [ + [2, 3, 4], + {"a", "b", "c"}, + {"a": 1, "b": 2} + ] +) +def test_build_input_field_metadata_for_field_without_default_being_non_empty_compound( + default_value: Union[set, list, dict], +) -> None: + # given + input_definition = DynamicInputDefinition( + type="DynamicInputDefinition", + value_types=[ValueType.LIST], + has_default_value=True, + default_value=default_value + ) + + # when + result = build_input_field_metadata(input_definition=input_definition) + + # then + assert result.default_factory() == default_value, "Expected default_factory to create identical instance of compound data" + assert id(result.default_factory()) != id(default_value), "Expected default_factory to create new instance of compound data" + + +def test_collect_python_types_for_values_when_types_can_be_resolved() -> None: + # given + input_definition = DynamicInputDefinition( + type="DynamicInputDefinition", + value_types=[ValueType.LIST, ValueType.INTEGER], + ) + + # when + result = collect_python_types_for_values( + block_type="some", + input_name="a", + input_definition=input_definition, + ) + + # then + assert result == [list, int], "Expected python types to be resolved properly" + + +@mock.patch.object(block_assembler, "PYTHON_TYPES_MAPPING", {}) +def test_collect_python_types_for_values_when_type_cannot_be_resolved() -> None: + # given + input_definition = DynamicInputDefinition( + type="DynamicInputDefinition", + value_types=[ValueType.LIST, ValueType.INTEGER], + ) + + # when + with pytest.raises(DynamicBlockError): + _ = collect_python_types_for_values( + block_type="some", + input_name="a", + input_definition=input_definition, + ) + + +def test_collect_python_types_for_selectors_when_collection_should_succeed() -> None: + # given + kinds_lookup = { + "*": WILDCARD_KIND, + "string": Kind(name="string"), + "integer": Kind(name="integer") + } + input_definition = DynamicInputDefinition( + type="DynamicInputDefinition", + selector_types=[ + SelectorType.INPUT_PARAMETER, SelectorType.INPUT_IMAGE, + SelectorType.STEP_OUTPUT_IMAGE, SelectorType.STEP_OUTPUT, + ], + selector_data_kind={ + SelectorType.STEP_OUTPUT: ["string", "integer"] + } + ) + + # when + result = collect_python_types_for_selectors( + block_type="some", + input_name="a", + input_definition=input_definition, + kinds_lookup=kinds_lookup, + ) + + # then + + assert len(result) == 4, "Expected union of 4 types" + assert repr(result[0]) == repr(WorkflowParameterSelector(kind=[WILDCARD_KIND])), "First element of union is to be input param of kind *" + assert repr(result[1]) == repr(WorkflowImageSelector), "Second element of union is to be input image selector" + assert repr(result[2]) == repr(StepOutputImageSelector), "Third element of union is to be step output image selector" + assert repr(result[3]) == repr(StepOutputSelector(kind=[kinds_lookup["string"], kinds_lookup["integer"]])), "Last element of union is to be step output selector of kinds string integer" + + +def test_collect_python_types_for_selectors_when_collection_should_fail_on_unknown_kind() -> None: + # given + kinds_lookup = { + "*": WILDCARD_KIND, + "string": Kind(name="string"), + } + input_definition = DynamicInputDefinition( + type="DynamicInputDefinition", + selector_types=[ + SelectorType.INPUT_PARAMETER, SelectorType.INPUT_IMAGE, + SelectorType.STEP_OUTPUT_IMAGE, SelectorType.STEP_OUTPUT, + ], + selector_data_kind={ + SelectorType.STEP_OUTPUT: ["string", "integer"] + } + ) + + # when + with pytest.raises(DynamicBlockError): + _ = collect_python_types_for_selectors( + block_type="some", + input_name="a", + input_definition=input_definition, + kinds_lookup=kinds_lookup, + ) + + +PYTHON_CODE = """ +def run(self, a, b): + return {"output": b[::-1]} +""" + + +@pytest.mark.asyncio +async def test_create_dynamic_block_specification() -> None: + # given + kinds_lookup = { + "*": WILDCARD_KIND, + "string": Kind(name="string"), + "integer": Kind(name="integer") + } + dynamic_block_definition = DynamicBlockDefinition( + type="DynamicBlockDefinition", + manifest=ManifestDescription( + type="ManifestDescription", + block_type="MyBlock", + inputs={ + "a": DynamicInputDefinition( + type="DynamicInputDefinition", + selector_types=[ + SelectorType.INPUT_PARAMETER, SelectorType.STEP_OUTPUT, + ], + selector_data_kind={ + SelectorType.STEP_OUTPUT: ["string", "integer"] + } + ), + "b": DynamicInputDefinition( + type="DynamicInputDefinition", + value_types=[ValueType.LIST], + has_default_value=True, + default_value=[1, 2, 3], + ) + }, + outputs={ + "a": DynamicOutputDefinition(type="DynamicOutputDefinition"), + "b": DynamicOutputDefinition(type="DynamicOutputDefinition", kind=["string", "integer"]), + }, + output_dimensionality_offset=1, + accepts_batch_input=True, + ), + code=PythonCode( + type="PythonCode", + run_function_code=PYTHON_CODE, + ) + ) + + # when + result = create_dynamic_block_specification( + dynamic_block_definition=dynamic_block_definition, + kinds_lookup=kinds_lookup, + ) + + # then + assert result.block_source == "dynamic_workflows_blocks" + assert result.manifest_class.describe_outputs() == [ + OutputDefinition(name="a", kind=[WILDCARD_KIND]), + OutputDefinition(name="b", kind=[kinds_lookup["string"], kinds_lookup["integer"]]) + ], "Expected outputs to be built such that `a` has * kind and `b` has exactly the kinds that were defined" + assert result.manifest_class.accepts_batch_input() is True, "Manifest defined to accept batch input" + assert result.manifest_class.accepts_empty_values() is False, "Manifest defined not to accept empty input" + assert result.manifest_class.get_input_dimensionality_offsets() == {}, "No explicit offsets defined" + assert result.manifest_class.get_dimensionality_reference_property() is None, "No dimensionality reference property expected" + assert result.manifest_class.get_output_dimensionality_offset() == 1, "Expected output dimensionality offset announced" + + block_instance = result.block_class() + code_run_result = await block_instance.run(a="some", b=[1, 2, 3]) + assert code_run_result == {"output": [3, 2, 1]}, "Expected code to work properly and revert second param" + + _ = result.manifest_class.model_validate({ + "name": "some", + "type": "MyBlock", + "a": "$steps.some.a", + "b": [1, 2, 3, 4, 5] + }) # no error expected + + _ = result.manifest_class.model_validate({ + "name": "some", + "type": "MyBlock", + "a": "$steps.some.a", + }) # no error expected, default value for "b" defined + + with pytest.raises(ValidationError): + _ = result.manifest_class.model_validate({ + "name": "some", + "type": "MyBlock", + "a": "some", + "b": [1, 2, 3, 4, 5] + }) # error expected - value "a" without selector + + with pytest.raises(ValidationError): + _ = result.manifest_class.model_validate({ + "name": "some", + "type": "MyBlock", + "a": "$steps.some.a", + "b": 1 + }) # error expected - value "b" not a list diff --git a/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_scaffolding.py b/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_scaffolding.py new file mode 100644 index 0000000000..d313686b19 --- /dev/null +++ b/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_scaffolding.py @@ -0,0 +1,7 @@ +def test_create_dynamic_module_when_syntax_error_happens() -> None: + pass + + +def test_create_dynamic_module_when_creation_should_succeed() -> None: + pass + From d88a9458018ed1472ae6905f864e5a415d8aa5fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Wed, 17 Jul 2024 13:59:18 +0200 Subject: [PATCH 12/15] Make linters happy --- .../execution_engine/dynamic_blocks/block_assembler.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py b/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py index a03cba1e6e..07c55e7390 100644 --- a/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py +++ b/inference/core/workflows/execution_engine/dynamic_blocks/block_assembler.py @@ -239,7 +239,7 @@ def collect_python_types_for_selectors( else: raise DynamicBlockError( public_message=f"Could not recognise selector type `{selector_type}` declared for input `{input_name}` " - f"of dynamic block `{block_type}`.", + f"of dynamic block `{block_type}`.", context="workflow_compilation | dynamic_block_compilation | manifest_compilation", ) return result @@ -255,8 +255,8 @@ def collect_python_types_for_values( if value_type_name not in PYTHON_TYPES_MAPPING: raise DynamicBlockError( public_message=f"Could not resolve Python type `{value_type_name}` declared for input `{input_name}` " - f"of dynamic block `{block_type}` within types that would be recognised by Execution " - f"Engine knowing the following types: {list(PYTHON_TYPES_MAPPING.keys())}.", + f"of dynamic block `{block_type}` within types that would be recognised by Execution " + f"Engine knowing the following types: {list(PYTHON_TYPES_MAPPING.keys())}.", context="workflow_compilation | dynamic_block_compilation | manifest_compilation", ) value_type = PYTHON_TYPES_MAPPING[value_type_name] @@ -316,8 +316,8 @@ def collect_actual_kinds_for_output( if kind_name not in kinds_lookup: raise DynamicBlockError( public_message=f"Could not find kind with name `{kind_name}` declared for output `{output_name}` " - f"of dynamic block `{block_type}` within kinds that would be recognised by Execution " - f"Engine knowing the following kinds: {list(kinds_lookup.keys())}.", + f"of dynamic block `{block_type}` within kinds that would be recognised by Execution " + f"Engine knowing the following kinds: {list(kinds_lookup.keys())}.", context="workflow_compilation | dynamic_block_compilation | manifest_compilation", ) actual_kinds.append(kinds_lookup[kind_name]) From 13dbc86da82ddcd11f0c422e6634dc77c908e16b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Wed, 17 Jul 2024 14:40:30 +0200 Subject: [PATCH 13/15] FGix bug spotted during tests --- inference/core/interfaces/http/http_api.py | 7 ++-- inference/core/version.py | 2 +- .../hosted_platform_tests/test_workflows.py | 8 ++--- .../test_workflow_endpoints.py | 33 +++++++++++++++++++ 4 files changed, 41 insertions(+), 9 deletions(-) diff --git a/inference/core/interfaces/http/http_api.py b/inference/core/interfaces/http/http_api.py index 0e771d5564..ea2271e075 100644 --- a/inference/core/interfaces/http/http_api.py +++ b/inference/core/interfaces/http/http_api.py @@ -903,10 +903,13 @@ async def describe_workflows_blocks() -> WorkflowsBlocksDescription: ) @with_route_exceptions async def describe_workflows_blocks( - request: DescribeBlocksRequest, + request: Optional[DescribeBlocksRequest] = None, ) -> WorkflowsBlocksDescription: + dynamic_blocks_definitions = None + if request is not None: + dynamic_blocks_definitions = request.dynamic_blocks_definitions return handle_describe_workflows_blocks_request( - dynamic_blocks_definitions=request.dynamic_blocks_definitions + dynamic_blocks_definitions=dynamic_blocks_definitions ) @app.post( diff --git a/inference/core/version.py b/inference/core/version.py index e77f099111..c2861530de 100644 --- a/inference/core/version.py +++ b/inference/core/version.py @@ -1,4 +1,4 @@ -__version__ = "0.14.2rc1" +__version__ = "0.14.2rc2" if __name__ == "__main__": diff --git a/tests/inference/hosted_platform_tests/test_workflows.py b/tests/inference/hosted_platform_tests/test_workflows.py index 3a1081c99b..0a34381f27 100644 --- a/tests/inference/hosted_platform_tests/test_workflows.py +++ b/tests/inference/hosted_platform_tests/test_workflows.py @@ -20,6 +20,7 @@ def test_getting_schemas_from_legacy_get_endpoint( "kinds_connections", "primitives_connections", "universal_query_language_description", + "dynamic_block_definition_schema", } assert len(response_data["blocks"]) > 0, "Some blocs expected to be added" assert len(response_data["declared_kinds"]) > 0, "Some kinds must be declared" @@ -29,9 +30,6 @@ def test_getting_schemas_from_legacy_get_endpoint( assert ( len(response_data["primitives_connections"]) > 0 ), "Expected some primitive parameters for steps to be declared" - assert ( - "dynamic_block_definition_schema" in response_data - ), "Expected key `dynamic_block_definition_schema` to be present in response" @pytest.mark.flaky(retries=4, delay=1) @@ -52,6 +50,7 @@ def test_getting_schemas_from_new_post_endpoint( "kinds_connections", "primitives_connections", "universal_query_language_description", + "dynamic_block_definition_schema", } assert len(response_data["blocks"]) > 0, "Some blocs expected to be added" assert len(response_data["declared_kinds"]) > 0, "Some kinds must be declared" @@ -61,9 +60,6 @@ def test_getting_schemas_from_new_post_endpoint( assert ( len(response_data["primitives_connections"]) > 0 ), "Expected some primitive parameters for steps to be declared" - assert ( - "dynamic_block_definition_schema" in response_data - ), "Expected key `dynamic_block_definition_schema` to be present in response" FUNCTION = """ diff --git a/tests/inference/integration_tests/test_workflow_endpoints.py b/tests/inference/integration_tests/test_workflow_endpoints.py index b967ca21f6..fe402c98b2 100644 --- a/tests/inference/integration_tests/test_workflow_endpoints.py +++ b/tests/inference/integration_tests/test_workflow_endpoints.py @@ -38,6 +38,39 @@ def test_getting_blocks_descriptions_using_legacy_get_endpoint(server_url) -> No ), "Expected key `dynamic_block_definition_schema` to be present in response" +def test_getting_blocks_descriptions_using_new_post_endpoint(server_url) -> None: + # when + response = requests.post(f"{server_url}/workflows/blocks/describe") + + # then + response.raise_for_status() + response_data = response.json() + assert "blocks" in response_data, "Response expected to define blocks" + assert len(response_data["blocks"]) > 0, "Some blocs expected to be added" + assert ( + "declared_kinds" in response_data + ), "Declared kinds must be provided in output" + assert len(response_data["declared_kinds"]) > 0, "Some kinds must be declared" + assert ( + "kinds_connections" in response_data + ), "Kinds connections expected to be declared" + assert len(response_data["declared_kinds"]) >= len( + response_data["kinds_connections"] + ), "Kinds connections declared as inputs for blocks must be at most in number of all declared kinds" + assert ( + "primitives_connections" in response_data + ), "Primitives connections expected to be in response" + assert ( + len(response_data["primitives_connections"]) > 0 + ), "Expected some primitive parameters for steps to be declared" + assert ( + "universal_query_language_description" in response_data + ), "Expected universal_query_language_description key to be present in response" + assert ( + "dynamic_block_definition_schema" in response_data + ), "Expected key `dynamic_block_definition_schema` to be present in response" + + def test_getting_blocks_descriptions_using_new_post_endpoint_with_dynamic_steps( server_url, ) -> None: From 9994a5ab0dd959e8441c530a22f579103424673f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Wed, 17 Jul 2024 15:15:21 +0200 Subject: [PATCH 14/15] Add remaining unit tests --- .../dynamic_blocks/block_scaffolding.py | 20 +- .../dynamic_blocs/test_block_assembler.py | 244 +++++++++++------- .../dynamic_blocs/test_block_scaffolding.py | 208 ++++++++++++++- 3 files changed, 368 insertions(+), 104 deletions(-) diff --git a/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py b/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py index 277969d9bd..1b1c8477c2 100644 --- a/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py +++ b/inference/core/workflows/execution_engine/dynamic_blocks/block_scaffolding.py @@ -95,23 +95,19 @@ def get_manifest(cls) -> Type[WorkflowBlockManifest]: def create_dynamic_module( block_type_name: str, python_code: PythonCode, module_name: str ) -> types.ModuleType: + imports = "\n".join(IMPORTS_LINES) + "\n" + "\n".join(python_code.imports) + "\n\n" + code = python_code.run_function_code + if python_code.init_function_code: + code += "\n\n" + python_code.init_function_code + code = imports + code try: dynamic_module = types.ModuleType(module_name) - imports = ( - "\n".join(IMPORTS_LINES) - + "\n" - + "\n".join(python_code.imports) - + "\n\n\n\n" - ) - code = python_code.run_function_code - if python_code.init_function_code: - code += "\n\n\n" + python_code.init_function_code - exec(imports + code, dynamic_module.__dict__) + exec(code, dynamic_module.__dict__) return dynamic_module except Exception as error: raise DynamicBlockError( - public_message=f"Error of type `{type(error).__class__.__name__}` encountered while attempting to " - f"create Python module with code for block: {block_type_name}. Error message: {error}", + public_message=f"Error of type `{error.__class__.__name__}` encountered while attempting to " + f"create Python module with code for block: {block_type_name}. Error message: {error}. Full code:\n{code}", context="workflow_compilation | dynamic_block_compilation | dynamic_module_creation", inner_error=error, ) from error diff --git a/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_assembler.py b/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_assembler.py index f47841a2b2..1c76126b93 100644 --- a/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_assembler.py +++ b/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_assembler.py @@ -1,4 +1,4 @@ -from typing import Union, Type +from typing import Type, Union from unittest import mock import pytest @@ -6,23 +6,47 @@ from pydantic_core import PydanticUndefinedType from inference.core.workflows.entities.base import OutputDefinition -from inference.core.workflows.entities.types import Kind, WILDCARD_KIND, WorkflowParameterSelector, \ - WorkflowImageSelector, StepOutputImageSelector, StepOutputSelector +from inference.core.workflows.entities.types import ( + WILDCARD_KIND, + Kind, + StepOutputImageSelector, + StepOutputSelector, + WorkflowImageSelector, + WorkflowParameterSelector, +) from inference.core.workflows.errors import DynamicBlockError -from inference.core.workflows.execution_engine.dynamic_blocks.block_assembler import \ - pick_dimensionality_reference_property, build_outputs_definitions, collect_input_dimensionality_offsets, \ - build_input_field_metadata, collect_python_types_for_values, collect_python_types_for_selectors, \ - create_dynamic_block_specification -from inference.core.workflows.execution_engine.dynamic_blocks.entities import DynamicInputDefinition, SelectorType, \ - ValueType, DynamicOutputDefinition, DynamicBlockDefinition, ManifestDescription, PythonCode from inference.core.workflows.execution_engine.dynamic_blocks import block_assembler +from inference.core.workflows.execution_engine.dynamic_blocks.block_assembler import ( + build_input_field_metadata, + build_outputs_definitions, + collect_input_dimensionality_offsets, + collect_python_types_for_selectors, + collect_python_types_for_values, + create_dynamic_block_specification, + pick_dimensionality_reference_property, +) +from inference.core.workflows.execution_engine.dynamic_blocks.entities import ( + DynamicBlockDefinition, + DynamicInputDefinition, + DynamicOutputDefinition, + ManifestDescription, + PythonCode, + SelectorType, + ValueType, +) -def test_pick_dimensionality_reference_property_when_there_is_no_reference_property() -> None: +def test_pick_dimensionality_reference_property_when_there_is_no_reference_property() -> ( + None +): # given inputs = { - "a": DynamicInputDefinition(type="DynamicInputDefinition", selector_types=[SelectorType.INPUT_PARAMETER]), - "b": DynamicInputDefinition(type="DynamicInputDefinition", value_types=[ValueType.INTEGER]), + "a": DynamicInputDefinition( + type="DynamicInputDefinition", selector_types=[SelectorType.INPUT_PARAMETER] + ), + "b": DynamicInputDefinition( + type="DynamicInputDefinition", value_types=[ValueType.INTEGER] + ), } # when @@ -35,7 +59,9 @@ def test_pick_dimensionality_reference_property_when_there_is_no_reference_prope assert result is None -def test_pick_dimensionality_reference_property_when_there_is_single_reference_property() -> None: +def test_pick_dimensionality_reference_property_when_there_is_single_reference_property() -> ( + None +): # given inputs = { "a": DynamicInputDefinition( @@ -43,7 +69,9 @@ def test_pick_dimensionality_reference_property_when_there_is_single_reference_p selector_types=[SelectorType.INPUT_PARAMETER], is_dimensionality_reference=True, ), - "b": DynamicInputDefinition(type="DynamicInputDefinition", value_types=[ValueType.INTEGER]), + "b": DynamicInputDefinition( + type="DynamicInputDefinition", value_types=[ValueType.INTEGER] + ), } # when @@ -56,7 +84,9 @@ def test_pick_dimensionality_reference_property_when_there_is_single_reference_p assert result == "a", "Expected `a` to be picked as dimensionality reference" -def test_pick_dimensionality_reference_property_when_there_are_multiple_reference_properties() -> None: +def test_pick_dimensionality_reference_property_when_there_are_multiple_reference_properties() -> ( + None +): # given inputs = { "a": DynamicInputDefinition( @@ -83,12 +113,14 @@ def test_build_outputs_definitions_when_build_should_succeed() -> None: # given outputs = { "a": DynamicOutputDefinition(type="DynamicOutputDefinition"), - "b": DynamicOutputDefinition(type="DynamicOutputDefinition", kind=["string", "integer"]), + "b": DynamicOutputDefinition( + type="DynamicOutputDefinition", kind=["string", "integer"] + ), } kinds_lookup = { "*": WILDCARD_KIND, "string": Kind(name="string"), - "integer": Kind(name="integer") + "integer": Kind(name="integer"), } # when @@ -101,15 +133,21 @@ def test_build_outputs_definitions_when_build_should_succeed() -> None: # then assert result == [ OutputDefinition(name="a", kind=[WILDCARD_KIND]), - OutputDefinition(name="b", kind=[kinds_lookup["string"], kinds_lookup["integer"]]) + OutputDefinition( + name="b", kind=[kinds_lookup["string"], kinds_lookup["integer"]] + ), ], "Expected outputs to be built such that `a` has * kind and `b` has exactly the kinds that were defined" -def test_build_outputs_definitions_when_build_should_fail_on_not_recognised_kind() -> None: +def test_build_outputs_definitions_when_build_should_fail_on_not_recognised_kind() -> ( + None +): # given outputs = { "a": DynamicOutputDefinition(type="DynamicOutputDefinition"), - "b": DynamicOutputDefinition(type="DynamicOutputDefinition", kind=["string", "integer"]), + "b": DynamicOutputDefinition( + type="DynamicOutputDefinition", kind=["string", "integer"] + ), } kinds_lookup = { "*": WILDCARD_KIND, @@ -133,7 +171,10 @@ def test_collect_input_dimensionality_offsets() -> None: selector_types=[SelectorType.INPUT_PARAMETER], dimensionality_offset=1, ), - "b": DynamicInputDefinition(type="DynamicInputDefinition", selector_types=[SelectorType.INPUT_PARAMETER],), + "b": DynamicInputDefinition( + type="DynamicInputDefinition", + selector_types=[SelectorType.INPUT_PARAMETER], + ), "c": DynamicInputDefinition( type="DynamicInputDefinition", selector_types=[SelectorType.INPUT_PARAMETER], @@ -145,7 +186,10 @@ def test_collect_input_dimensionality_offsets() -> None: result = collect_input_dimensionality_offsets(inputs=inputs) # then - assert result == {"a": 1, "c": -1}, "Expected only entries with non-default value be given in results" + assert result == { + "a": 1, + "c": -1, + }, "Expected only entries with non-default value be given in results" def test_build_input_field_metadata_for_field_without_default_value() -> None: @@ -186,7 +230,7 @@ def test_build_input_field_metadata_for_field_without_default_being_primitive() value_types=[ValueType.INTEGER], is_optional=True, has_default_value=True, - default_value=3. + default_value=3.0, ) # when @@ -196,9 +240,7 @@ def test_build_input_field_metadata_for_field_without_default_being_primitive() assert result.default == 3 -@pytest.mark.parametrize( - "default_type", [list, set, dict] -) +@pytest.mark.parametrize("default_type", [list, set, dict]) def test_build_input_field_metadata_for_field_without_default_being_compound( default_type: Union[Type[list], Type[set], Type[dict]], ) -> None: @@ -207,22 +249,20 @@ def test_build_input_field_metadata_for_field_without_default_being_compound( type="DynamicInputDefinition", value_types=[ValueType.LIST], has_default_value=True, - default_value=default_type() + default_value=default_type(), ) # when result = build_input_field_metadata(input_definition=input_definition) # then - assert result.default_factory() == default_type(), "Expected default_factory used creates new instance of compound element" + assert ( + result.default_factory() == default_type() + ), "Expected default_factory used creates new instance of compound element" @pytest.mark.parametrize( - "default_value", [ - [2, 3, 4], - {"a", "b", "c"}, - {"a": 1, "b": 2} - ] + "default_value", [[2, 3, 4], {"a", "b", "c"}, {"a": 1, "b": 2}] ) def test_build_input_field_metadata_for_field_without_default_being_non_empty_compound( default_value: Union[set, list, dict], @@ -232,15 +272,19 @@ def test_build_input_field_metadata_for_field_without_default_being_non_empty_co type="DynamicInputDefinition", value_types=[ValueType.LIST], has_default_value=True, - default_value=default_value + default_value=default_value, ) # when result = build_input_field_metadata(input_definition=input_definition) # then - assert result.default_factory() == default_value, "Expected default_factory to create identical instance of compound data" - assert id(result.default_factory()) != id(default_value), "Expected default_factory to create new instance of compound data" + assert ( + result.default_factory() == default_value + ), "Expected default_factory to create identical instance of compound data" + assert id(result.default_factory()) != id( + default_value + ), "Expected default_factory to create new instance of compound data" def test_collect_python_types_for_values_when_types_can_be_resolved() -> None: @@ -283,17 +327,17 @@ def test_collect_python_types_for_selectors_when_collection_should_succeed() -> kinds_lookup = { "*": WILDCARD_KIND, "string": Kind(name="string"), - "integer": Kind(name="integer") + "integer": Kind(name="integer"), } input_definition = DynamicInputDefinition( type="DynamicInputDefinition", selector_types=[ - SelectorType.INPUT_PARAMETER, SelectorType.INPUT_IMAGE, - SelectorType.STEP_OUTPUT_IMAGE, SelectorType.STEP_OUTPUT, + SelectorType.INPUT_PARAMETER, + SelectorType.INPUT_IMAGE, + SelectorType.STEP_OUTPUT_IMAGE, + SelectorType.STEP_OUTPUT, ], - selector_data_kind={ - SelectorType.STEP_OUTPUT: ["string", "integer"] - } + selector_data_kind={SelectorType.STEP_OUTPUT: ["string", "integer"]}, ) # when @@ -307,13 +351,23 @@ def test_collect_python_types_for_selectors_when_collection_should_succeed() -> # then assert len(result) == 4, "Expected union of 4 types" - assert repr(result[0]) == repr(WorkflowParameterSelector(kind=[WILDCARD_KIND])), "First element of union is to be input param of kind *" - assert repr(result[1]) == repr(WorkflowImageSelector), "Second element of union is to be input image selector" - assert repr(result[2]) == repr(StepOutputImageSelector), "Third element of union is to be step output image selector" - assert repr(result[3]) == repr(StepOutputSelector(kind=[kinds_lookup["string"], kinds_lookup["integer"]])), "Last element of union is to be step output selector of kinds string integer" - - -def test_collect_python_types_for_selectors_when_collection_should_fail_on_unknown_kind() -> None: + assert repr(result[0]) == repr( + WorkflowParameterSelector(kind=[WILDCARD_KIND]) + ), "First element of union is to be input param of kind *" + assert repr(result[1]) == repr( + WorkflowImageSelector + ), "Second element of union is to be input image selector" + assert repr(result[2]) == repr( + StepOutputImageSelector + ), "Third element of union is to be step output image selector" + assert repr(result[3]) == repr( + StepOutputSelector(kind=[kinds_lookup["string"], kinds_lookup["integer"]]) + ), "Last element of union is to be step output selector of kinds string integer" + + +def test_collect_python_types_for_selectors_when_collection_should_fail_on_unknown_kind() -> ( + None +): # given kinds_lookup = { "*": WILDCARD_KIND, @@ -322,12 +376,12 @@ def test_collect_python_types_for_selectors_when_collection_should_fail_on_unkno input_definition = DynamicInputDefinition( type="DynamicInputDefinition", selector_types=[ - SelectorType.INPUT_PARAMETER, SelectorType.INPUT_IMAGE, - SelectorType.STEP_OUTPUT_IMAGE, SelectorType.STEP_OUTPUT, + SelectorType.INPUT_PARAMETER, + SelectorType.INPUT_IMAGE, + SelectorType.STEP_OUTPUT_IMAGE, + SelectorType.STEP_OUTPUT, ], - selector_data_kind={ - SelectorType.STEP_OUTPUT: ["string", "integer"] - } + selector_data_kind={SelectorType.STEP_OUTPUT: ["string", "integer"]}, ) # when @@ -352,7 +406,7 @@ async def test_create_dynamic_block_specification() -> None: kinds_lookup = { "*": WILDCARD_KIND, "string": Kind(name="string"), - "integer": Kind(name="integer") + "integer": Kind(name="integer"), } dynamic_block_definition = DynamicBlockDefinition( type="DynamicBlockDefinition", @@ -363,22 +417,25 @@ async def test_create_dynamic_block_specification() -> None: "a": DynamicInputDefinition( type="DynamicInputDefinition", selector_types=[ - SelectorType.INPUT_PARAMETER, SelectorType.STEP_OUTPUT, + SelectorType.INPUT_PARAMETER, + SelectorType.STEP_OUTPUT, ], selector_data_kind={ SelectorType.STEP_OUTPUT: ["string", "integer"] - } + }, ), "b": DynamicInputDefinition( type="DynamicInputDefinition", value_types=[ValueType.LIST], has_default_value=True, default_value=[1, 2, 3], - ) + ), }, outputs={ "a": DynamicOutputDefinition(type="DynamicOutputDefinition"), - "b": DynamicOutputDefinition(type="DynamicOutputDefinition", kind=["string", "integer"]), + "b": DynamicOutputDefinition( + type="DynamicOutputDefinition", kind=["string", "integer"] + ), }, output_dimensionality_offset=1, accepts_batch_input=True, @@ -386,7 +443,7 @@ async def test_create_dynamic_block_specification() -> None: code=PythonCode( type="PythonCode", run_function_code=PYTHON_CODE, - ) + ), ) # when @@ -399,43 +456,50 @@ async def test_create_dynamic_block_specification() -> None: assert result.block_source == "dynamic_workflows_blocks" assert result.manifest_class.describe_outputs() == [ OutputDefinition(name="a", kind=[WILDCARD_KIND]), - OutputDefinition(name="b", kind=[kinds_lookup["string"], kinds_lookup["integer"]]) + OutputDefinition( + name="b", kind=[kinds_lookup["string"], kinds_lookup["integer"]] + ), ], "Expected outputs to be built such that `a` has * kind and `b` has exactly the kinds that were defined" - assert result.manifest_class.accepts_batch_input() is True, "Manifest defined to accept batch input" - assert result.manifest_class.accepts_empty_values() is False, "Manifest defined not to accept empty input" - assert result.manifest_class.get_input_dimensionality_offsets() == {}, "No explicit offsets defined" - assert result.manifest_class.get_dimensionality_reference_property() is None, "No dimensionality reference property expected" - assert result.manifest_class.get_output_dimensionality_offset() == 1, "Expected output dimensionality offset announced" + assert ( + result.manifest_class.accepts_batch_input() is True + ), "Manifest defined to accept batch input" + assert ( + result.manifest_class.accepts_empty_values() is False + ), "Manifest defined not to accept empty input" + assert ( + result.manifest_class.get_input_dimensionality_offsets() == {} + ), "No explicit offsets defined" + assert ( + result.manifest_class.get_dimensionality_reference_property() is None + ), "No dimensionality reference property expected" + assert ( + result.manifest_class.get_output_dimensionality_offset() == 1 + ), "Expected output dimensionality offset announced" block_instance = result.block_class() code_run_result = await block_instance.run(a="some", b=[1, 2, 3]) - assert code_run_result == {"output": [3, 2, 1]}, "Expected code to work properly and revert second param" + assert code_run_result == { + "output": [3, 2, 1] + }, "Expected code to work properly and revert second param" - _ = result.manifest_class.model_validate({ - "name": "some", - "type": "MyBlock", - "a": "$steps.some.a", - "b": [1, 2, 3, 4, 5] - }) # no error expected + _ = result.manifest_class.model_validate( + {"name": "some", "type": "MyBlock", "a": "$steps.some.a", "b": [1, 2, 3, 4, 5]} + ) # no error expected - _ = result.manifest_class.model_validate({ - "name": "some", - "type": "MyBlock", - "a": "$steps.some.a", - }) # no error expected, default value for "b" defined - - with pytest.raises(ValidationError): - _ = result.manifest_class.model_validate({ + _ = result.manifest_class.model_validate( + { "name": "some", "type": "MyBlock", - "a": "some", - "b": [1, 2, 3, 4, 5] - }) # error expected - value "a" without selector + "a": "$steps.some.a", + } + ) # no error expected, default value for "b" defined with pytest.raises(ValidationError): - _ = result.manifest_class.model_validate({ - "name": "some", - "type": "MyBlock", - "a": "$steps.some.a", - "b": 1 - }) # error expected - value "b" not a list + _ = result.manifest_class.model_validate( + {"name": "some", "type": "MyBlock", "a": "some", "b": [1, 2, 3, 4, 5]} + ) # error expected - value "a" without selector + + with pytest.raises(ValidationError): + _ = result.manifest_class.model_validate( + {"name": "some", "type": "MyBlock", "a": "$steps.some.a", "b": 1} + ) # error expected - value "b" not a list diff --git a/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_scaffolding.py b/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_scaffolding.py index d313686b19..4411be7336 100644 --- a/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_scaffolding.py +++ b/tests/workflows/unit_tests/execution_engine/dynamic_blocs/test_block_scaffolding.py @@ -1,7 +1,211 @@ +from unittest import mock + +import pytest + +from inference.core.workflows.core_steps.formatters.expression import BlockManifest +from inference.core.workflows.errors import ( + DynamicBlockError, + WorkflowEnvironmentConfigurationError, +) +from inference.core.workflows.execution_engine.dynamic_blocks import block_scaffolding +from inference.core.workflows.execution_engine.dynamic_blocks.block_scaffolding import ( + assembly_custom_python_block, + create_dynamic_module, +) +from inference.core.workflows.execution_engine.dynamic_blocks.entities import PythonCode + + def test_create_dynamic_module_when_syntax_error_happens() -> None: - pass + # given + init_function = """ +def init_fun() -> Dict[str, Any]: + return {"a": 35} +""" + run_function = """ +def run_function( -> BlockResult: + return {"result": a + b} +""" + python_code = PythonCode( + type="PythonCode", + run_function_code=run_function, + run_function_name="run_function", + init_function_code=init_function, + init_function_name="init_fun", + imports=["import math"], + ) + + # when + with pytest.raises(DynamicBlockError): + _ = create_dynamic_module( + block_type_name="some", python_code=python_code, module_name="my_module" + ) def test_create_dynamic_module_when_creation_should_succeed() -> None: - pass + # given + init_function = """ +def init_fun() -> Dict[str, Any]: + return {"a": 35} +""" + run_function = """ +def run_function(a, b) -> BlockResult: + return {"result": a + b} +""" + python_code = PythonCode( + type="PythonCode", + run_function_code=run_function, + run_function_name="run_function", + init_function_code=init_function, + init_function_name="init_fun", + imports=["import math"], + ) + + # when + module = create_dynamic_module( + block_type_name="some", python_code=python_code, module_name="my_module" + ) + + # then + assert module.init_fun() == {"a": 35} + assert module.run_function(3, 5) == {"result": 8} + + +@pytest.mark.asyncio +async def test_assembly_custom_python_block() -> None: + # given + manifest = BlockManifest + init_function = """ +def init_fun() -> Dict[str, Any]: + return {"a": 6} +""" + run_function = """ +def run_function(self, a, b) -> BlockResult: + return {"result": a + b + self._init_results["a"]} + """ + python_code = PythonCode( + type="PythonCode", + run_function_code=run_function, + run_function_name="run_function", + init_function_code=init_function, + init_function_name="init_fun", + imports=["import math"], + ) + + # when + workflow_block_class = assembly_custom_python_block( + block_type_name="some", + unique_identifier="unique-id", + manifest=manifest, + python_code=python_code, + ) + workflow_block_instance = workflow_block_class() + execution_result = await workflow_block_instance.run(a=3, b=5) + + # then + assert ( + workflow_block_class.get_init_parameters() == [] + ), "Expected no init parameters defined" + assert ( + workflow_block_class.get_manifest() == BlockManifest + ), "Expected manifest to be returned" + assert execution_result == { + "result": 14 + }, "Expected result of 3 + 5 + 6 (last value from init)" + + +@pytest.mark.asyncio +async def test_assembly_custom_python_block_when_run_function_not_found() -> None: + # given + manifest = BlockManifest + init_function = """ +def init_fun() -> Dict[str, Any]: + return {"a": 6} +""" + run_function = """ +def run_function(self, a, b) -> BlockResult: + return {"result": a + b + self._init_results["a"]} + """ + python_code = PythonCode( + type="PythonCode", + run_function_code=run_function, + run_function_name="invalid", + init_function_code=init_function, + init_function_name="init_fun", + imports=["import math"], + ) + + # when + with pytest.raises(DynamicBlockError): + _ = assembly_custom_python_block( + block_type_name="some", + unique_identifier="unique-id", + manifest=manifest, + python_code=python_code, + ) + + +@pytest.mark.asyncio +async def test_assembly_custom_python_block_when_init_function_not_found() -> None: + # given + manifest = BlockManifest + init_function = """ +def init_fun() -> Dict[str, Any]: + return {"a": 6} +""" + run_function = """ +def run_function(self, a, b) -> BlockResult: + return {"result": a + b + self._init_results["a"]} + """ + python_code = PythonCode( + type="PythonCode", + run_function_code=run_function, + run_function_name="run_function", + init_function_code=init_function, + init_function_name="invalid", + imports=["import math"], + ) + + # when + with pytest.raises(DynamicBlockError): + _ = assembly_custom_python_block( + block_type_name="some", + unique_identifier="unique-id", + manifest=manifest, + python_code=python_code, + ) + + +@pytest.mark.asyncio +@mock.patch.object( + block_scaffolding, "ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS", False +) +async def test_run_assembled_custom_python_block_when_custom_python_forbidden() -> None: + # given + manifest = BlockManifest + init_function = """ +def init_fun() -> Dict[str, Any]: + return {"a": 6} +""" + run_function = """ +def run_function(self, a, b) -> BlockResult: + return {"result": a + b + self._init_results["a"]} + """ + python_code = PythonCode( + type="PythonCode", + run_function_code=run_function, + run_function_name="run_function", + init_function_code=init_function, + init_function_name="init_fun", + imports=["import math"], + ) + # when + workflow_block_class = assembly_custom_python_block( + block_type_name="some", + unique_identifier="unique-id", + manifest=manifest, + python_code=python_code, + ) + workflow_block_instance = workflow_block_class() + with pytest.raises(WorkflowEnvironmentConfigurationError): + _ = await workflow_block_instance.run(a=3, b=5) From f43fcf767fe063cd3051a0a202b67e5c73870b41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Thu, 18 Jul 2024 17:25:31 +0200 Subject: [PATCH 15/15] Bringing version back --- inference/core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/inference/core/version.py b/inference/core/version.py index f197a7558d..82410ed6dd 100644 --- a/inference/core/version.py +++ b/inference/core/version.py @@ -1,4 +1,4 @@ -__version__ = "0.15.1" +__version__ = "0.15.0" if __name__ == "__main__":