Skip to content

Commit

Permalink
Merge pull request #509 from roboflow/feature/add_python_code_block
Browse files Browse the repository at this point in the history
Add python code block to `workflows`
  • Loading branch information
PawelPeczek-Roboflow authored Jul 18, 2024
2 parents f0d6356 + f43fcf7 commit fd7fe31
Show file tree
Hide file tree
Showing 37 changed files with 3,445 additions and 210 deletions.
2 changes: 1 addition & 1 deletion development/docs/build_block_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def main() -> None:
token=AUTOGENERATED_BLOCKS_LIST_TOKEN,
)
block_card_lines = []
blocks_description = describe_available_blocks()
blocks_description = describe_available_blocks(dynamic_blocks=[])
block_type2manifest_type_identifier = {
block.block_class: block.manifest_type_identifier
for block in blocks_description.blocks
Expand Down
1 change: 1 addition & 0 deletions docker/dockerfiles/Dockerfile.onnx.lambda
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ ENV API_LOGGING_ENABLED=True
ENV MODEL_VALIDATION_DISABLED=True
ENV ALLOW_NON_HTTPS_URL_INPUT=False
ENV ALLOW_URL_INPUT_WITHOUT_FQDN=False
ENV ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=False

WORKDIR ${LAMBDA_TASK_ROOT}
RUN rm -rf /build
Expand Down
1 change: 1 addition & 0 deletions docker/dockerfiles/Dockerfile.onnx.lambda.slim
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ ENV API_LOGGING_ENABLED=True
ENV MODEL_VALIDATION_DISABLED=True
ENV ALLOW_NON_HTTPS_URL_INPUT=False
ENV ALLOW_URL_INPUT_WITHOUT_FQDN=False
ENV ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=False

WORKDIR ${LAMBDA_TASK_ROOT}

Expand Down
1 change: 1 addition & 0 deletions docs/workflows/blocks.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ hide:
<p class="card block-card" data-url="continue_if" data-name="ContinueIf" data-desc="Stops execution of processing branch under certain condition" data-labels="FLOW_CONTROL, APACHE-2.0" data-author=""></p>
<p class="card block-card" data-url="perspective_correction" data-name="PerspectiveCorrection" data-desc="Correct coordinates of detections from plane defined by given polygon to straight rectangular plane of given width and height" data-labels="TRANSFORMATION, APACHE-2.0" data-author=""></p>
<p class="card block-card" data-url="dynamic_zone" data-name="DynamicZone" data-desc="Simplify polygons so they are geometrically convex and simplify them to contain only requested amount of vertices" data-labels="TRANSFORMATION, APACHE-2.0" data-author=""></p>
<p class="card block-card" data-url="custom_python" data-name="CustomPython" data-desc="" data-labels=", " data-author=""></p>
<!--- AUTOGENERATED_BLOCKS_LIST -->
</div>
</div>
Expand Down
28 changes: 14 additions & 14 deletions docs/workflows/kinds.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,26 +8,26 @@ resolved we need a simple type system - that's what we call `kinds`.

## List of `workflows` kinds
<!--- AUTOGENERATED_KINDS_LIST -->
* [`roboflow_project`](/workflows/kinds/roboflow_project): Roboflow project name
* [`dictionary`](/workflows/kinds/dictionary): Dictionary
* [`string`](/workflows/kinds/string): String value
* [`list_of_values`](/workflows/kinds/list_of_values): List of values of any types
* [`*`](/workflows/kinds/*): Equivalent of any element
* [`Batch[dictionary]`](/workflows/kinds/batch_dictionary): Batch of dictionaries
* [`Batch[keypoint_detection_prediction]`](/workflows/kinds/batch_keypoint_detection_prediction): `'predictions'` key from Keypoint Detection Model output
* [`Batch[parent_id]`](/workflows/kinds/batch_parent_id): Identifier of parent for step output
* [`Batch[classification_prediction]`](/workflows/kinds/batch_classification_prediction): `'predictions'` key from Classification Model outputs
* [`roboflow_model_id`](/workflows/kinds/roboflow_model_id): Roboflow model id
* [`Batch[top_class]`](/workflows/kinds/batch_top_class): Batch of string values representing top class predicted by classification model
* [`integer`](/workflows/kinds/integer): Integer value
* [`dictionary`](/workflows/kinds/dictionary): Dictionary
* [`Batch[classification_prediction]`](/workflows/kinds/batch_classification_prediction): `'predictions'` key from Classification Model outputs
* [`Batch[boolean]`](/workflows/kinds/batch_boolean): Boolean values batch
* [`boolean`](/workflows/kinds/boolean): Boolean flag
* [`Batch[prediction_type]`](/workflows/kinds/batch_prediction_type): String value with type of prediction
* [`Batch[parent_id]`](/workflows/kinds/batch_parent_id): Identifier of parent for step output
* [`string`](/workflows/kinds/string): String value
* [`Batch[instance_segmentation_prediction]`](/workflows/kinds/batch_instance_segmentation_prediction): `'predictions'` key from Instance Segmentation Model outputs
* [`*`](/workflows/kinds/*): Equivalent of any element
* [`integer`](/workflows/kinds/integer): Integer value
* [`float_zero_to_one`](/workflows/kinds/float_zero_to_one): `float` value in range `[0.0, 1.0]`
* [`Batch[image_metadata]`](/workflows/kinds/batch_image_metadata): Dictionary with image metadata required by supervision
* [`Batch[bar_code_detection]`](/workflows/kinds/batch_bar_code_detection): Prediction with barcode detection
* [`Batch[image]`](/workflows/kinds/batch_image): Image in workflows
* [`roboflow_project`](/workflows/kinds/roboflow_project): Roboflow project name
* [`Batch[string]`](/workflows/kinds/batch_string): Batch of string values
* [`list_of_values`](/workflows/kinds/list_of_values): List of values of any types
* [`Batch[boolean]`](/workflows/kinds/batch_boolean): Boolean values batch
* [`Batch[object_detection_prediction]`](/workflows/kinds/batch_object_detection_prediction): `'predictions'` key from Object Detection Model output
* [`float_zero_to_one`](/workflows/kinds/float_zero_to_one): `float` value in range `[0.0, 1.0]`
* [`Batch[prediction_type]`](/workflows/kinds/batch_prediction_type): String value with type of prediction
* [`Batch[keypoint_detection_prediction]`](/workflows/kinds/batch_keypoint_detection_prediction): `'predictions'` key from Keypoint Detection Model output
* [`Batch[bar_code_detection]`](/workflows/kinds/batch_bar_code_detection): Prediction with barcode detection
* [`roboflow_model_id`](/workflows/kinds/roboflow_model_id): Roboflow model id
<!--- AUTOGENERATED_KINDS_LIST -->
10 changes: 10 additions & 0 deletions inference/core/entities/requests/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@

from pydantic import BaseModel, Field

from inference.core.workflows.execution_engine.dynamic_blocks.entities import (
DynamicBlockDefinition,
)


class WorkflowInferenceRequest(BaseModel):
api_key: str = Field(
Expand All @@ -18,3 +22,9 @@ class WorkflowInferenceRequest(BaseModel):

class WorkflowSpecificationInferenceRequest(WorkflowInferenceRequest):
specification: dict


class DescribeBlocksRequest(BaseModel):
dynamic_blocks_definitions: List[DynamicBlockDefinition] = Field(
default_factory=list, description="Dynamic blocks to be used."
)
3 changes: 3 additions & 0 deletions inference/core/entities/responses/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,3 +146,6 @@ class WorkflowsBlocksDescription(BaseModel):
universal_query_language_description: UniversalQueryLanguageDescription = Field(
description="Definitions of Universal Query Language operations and operators"
)
dynamic_block_definition_schema: dict = Field(
description="Schema for dynamic block definition"
)
3 changes: 3 additions & 0 deletions inference/core/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,6 +393,9 @@
WORKFLOWS_REMOTE_EXECUTION_MAX_STEP_CONCURRENT_REQUESTS = int(
os.getenv("WORKFLOWS_REMOTE_EXECUTION_MAX_STEP_CONCURRENT_REQUESTS", "8")
)
ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS = str2bool(
os.getenv("ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS", True)
)

MODEL_VALIDATION_DISABLED = str2bool(os.getenv("MODEL_VALIDATION_DISABLED", "False"))

Expand Down
Empty file.
78 changes: 78 additions & 0 deletions inference/core/interfaces/http/handlers/workflows.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# TODO - for everyone: start migrating other handlers to bring relief to http_api.py
from typing import List, Optional

from inference.core.entities.responses.workflows import (
ExternalBlockPropertyPrimitiveDefinition,
ExternalWorkflowsBlockSelectorDefinition,
UniversalQueryLanguageDescription,
WorkflowsBlocksDescription,
)
from inference.core.workflows.core_steps.common.query_language.introspection.core import (
prepare_operations_descriptions,
prepare_operators_descriptions,
)
from inference.core.workflows.execution_engine.dynamic_blocks.block_assembler import (
compile_dynamic_blocks,
)
from inference.core.workflows.execution_engine.dynamic_blocks.entities import (
DynamicBlockDefinition,
)
from inference.core.workflows.execution_engine.introspection.blocks_loader import (
describe_available_blocks,
)
from inference.core.workflows.execution_engine.introspection.connections_discovery import (
discover_blocks_connections,
)


def handle_describe_workflows_blocks_request(
dynamic_blocks_definitions: Optional[List[DynamicBlockDefinition]] = None,
) -> WorkflowsBlocksDescription:
if dynamic_blocks_definitions is None:
dynamic_blocks_definitions = []
dynamic_blocks = compile_dynamic_blocks(
dynamic_blocks_definitions=dynamic_blocks_definitions,
)
blocks_description = describe_available_blocks(dynamic_blocks=dynamic_blocks)
blocks_connections = discover_blocks_connections(
blocks_description=blocks_description,
)
kinds_connections = {
kind_name: [
ExternalWorkflowsBlockSelectorDefinition(
manifest_type_identifier=c.manifest_type_identifier,
property_name=c.property_name,
property_description=c.property_description,
compatible_element=c.compatible_element,
is_list_element=c.is_list_element,
is_dict_element=c.is_dict_element,
)
for c in connections
]
for kind_name, connections in blocks_connections.kinds_connections.items()
}
primitives_connections = [
ExternalBlockPropertyPrimitiveDefinition(
manifest_type_identifier=primitives_connection.manifest_type_identifier,
property_name=primitives_connection.property_name,
property_description=primitives_connection.property_description,
type_annotation=primitives_connection.type_annotation,
)
for primitives_connection in blocks_connections.primitives_connections
]
uql_operations_descriptions = prepare_operations_descriptions()
uql_operators_descriptions = prepare_operators_descriptions()
universal_query_language_description = (
UniversalQueryLanguageDescription.from_internal_entities(
operations_descriptions=uql_operations_descriptions,
operators_descriptions=uql_operators_descriptions,
)
)
return WorkflowsBlocksDescription(
blocks=blocks_description.blocks,
declared_kinds=blocks_description.declared_kinds,
kinds_connections=kinds_connections,
primitives_connections=primitives_connections,
universal_query_language_description=universal_query_language_description,
dynamic_block_definition_schema=DynamicBlockDefinition.schema(),
)
93 changes: 33 additions & 60 deletions inference/core/interfaces/http/http_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from starlette.middleware.base import BaseHTTPMiddleware

from inference.core import logger
from inference.core.cache import cache
from inference.core.devices.utils import GLOBAL_INFERENCE_SERVER_ID
from inference.core.entities.requests.clip import (
ClipCompareRequest,
Expand Down Expand Up @@ -43,6 +42,7 @@
ClearModelRequest,
)
from inference.core.entities.requests.workflows import (
DescribeBlocksRequest,
WorkflowInferenceRequest,
WorkflowSpecificationInferenceRequest,
)
Expand Down Expand Up @@ -74,9 +74,6 @@
ServerVersionInfo,
)
from inference.core.entities.responses.workflows import (
ExternalBlockPropertyPrimitiveDefinition,
ExternalWorkflowsBlockSelectorDefinition,
UniversalQueryLanguageDescription,
WorkflowInferenceResponse,
WorkflowsBlocksDescription,
WorkflowValidationStatus,
Expand Down Expand Up @@ -129,6 +126,9 @@
WorkspaceLoadError,
)
from inference.core.interfaces.base import BaseInterface
from inference.core.interfaces.http.handlers.workflows import (
handle_describe_workflows_blocks_request,
)
from inference.core.interfaces.http.orjson_utils import (
orjson_response,
serialise_workflow_result,
Expand All @@ -141,12 +141,9 @@
InvalidInputTypeError,
OperationTypeNotRecognisedError,
)
from inference.core.workflows.core_steps.common.query_language.introspection.core import (
prepare_operations_descriptions,
prepare_operators_descriptions,
)
from inference.core.workflows.entities.base import OutputDefinition
from inference.core.workflows.errors import (
DynamicBlockError,
ExecutionGraphStructureError,
InvalidReferenceTargetError,
ReferenceTypeError,
Expand All @@ -158,12 +155,6 @@
parse_workflow_definition,
)
from inference.core.workflows.execution_engine.core import ExecutionEngine
from inference.core.workflows.execution_engine.introspection.blocks_loader import (
describe_available_blocks,
)
from inference.core.workflows.execution_engine.introspection.connections_discovery import (
discover_blocks_connections,
)
from inference.models.aliases import resolve_roboflow_model_alias
from inference.usage_tracking.collector import usage_collector

Expand Down Expand Up @@ -245,6 +236,7 @@ async def wrapped_route(*args, **kwargs):
RuntimeInputError,
InvalidInputTypeError,
OperationTypeNotRecognisedError,
DynamicBlockError,
) as error:
resp = JSONResponse(
status_code=400,
Expand Down Expand Up @@ -473,13 +465,10 @@ async def process_workflow_inference_request(
workflow_specification: dict,
background_tasks: Optional[BackgroundTasks],
) -> WorkflowInferenceResponse:
step_execution_mode = StepExecutionMode(WORKFLOWS_STEP_EXECUTION_MODE)
workflow_init_parameters = {
"workflows_core.model_manager": model_manager,
"workflows_core.api_key": workflow_request.api_key,
"workflows_core.background_tasks": background_tasks,
"workflows_core.cache": cache,
"workflows_core.step_execution_mode": step_execution_mode,
}
execution_engine = ExecutionEngine.init(
workflow_definition=workflow_specification,
Expand Down Expand Up @@ -904,54 +893,35 @@ async def infer_from_workflow(
@app.get(
"/workflows/blocks/describe",
response_model=WorkflowsBlocksDescription,
summary="[EXPERIMENTAL] Endpoint to get definition of workflows blocks that are accessible",
summary="[LEGACY] Endpoint to get definition of workflows blocks that are accessible",
description="Endpoint provides detailed information about workflows building blocks that are "
"accessible in the inference server. This information could be used to programmatically "
"build / display workflows.",
deprecated=True,
)
@with_route_exceptions
async def describe_workflows_blocks() -> WorkflowsBlocksDescription:
blocks_description = describe_available_blocks()
blocks_connections = discover_blocks_connections(
blocks_description=blocks_description,
)
kinds_connections = {
kind_name: [
ExternalWorkflowsBlockSelectorDefinition(
manifest_type_identifier=c.manifest_type_identifier,
property_name=c.property_name,
property_description=c.property_description,
compatible_element=c.compatible_element,
is_list_element=c.is_list_element,
is_dict_element=c.is_dict_element,
)
for c in connections
]
for kind_name, connections in blocks_connections.kinds_connections.items()
}
primitives_connections = [
ExternalBlockPropertyPrimitiveDefinition(
manifest_type_identifier=primitives_connection.manifest_type_identifier,
property_name=primitives_connection.property_name,
property_description=primitives_connection.property_description,
type_annotation=primitives_connection.type_annotation,
)
for primitives_connection in blocks_connections.primitives_connections
]
uql_operations_descriptions = prepare_operations_descriptions()
uql_operators_descriptions = prepare_operators_descriptions()
universal_query_language_description = (
UniversalQueryLanguageDescription.from_internal_entities(
operations_descriptions=uql_operations_descriptions,
operators_descriptions=uql_operators_descriptions,
)
)
return WorkflowsBlocksDescription(
blocks=blocks_description.blocks,
declared_kinds=blocks_description.declared_kinds,
kinds_connections=kinds_connections,
primitives_connections=primitives_connections,
universal_query_language_description=universal_query_language_description,
return handle_describe_workflows_blocks_request()

@app.post(
"/workflows/blocks/describe",
response_model=WorkflowsBlocksDescription,
summary="[EXPERIMENTAL] Endpoint to get definition of workflows blocks that are accessible",
description="Endpoint provides detailed information about workflows building blocks that are "
"accessible in the inference server. This information could be used to programmatically "
"build / display workflows. Additionally - in request body one can specify list of "
"dynamic blocks definitions which will be transformed into blocks and used to generate "
"schemas and definitions of connections",
)
@with_route_exceptions
async def describe_workflows_blocks(
request: Optional[DescribeBlocksRequest] = None,
) -> WorkflowsBlocksDescription:
dynamic_blocks_definitions = None
if request is not None:
dynamic_blocks_definitions = request.dynamic_blocks_definitions
return handle_describe_workflows_blocks_request(
dynamic_blocks_definitions=dynamic_blocks_definitions
)

@app.post(
Expand All @@ -965,14 +935,17 @@ async def describe_workflows_blocks() -> WorkflowsBlocksDescription:
async def get_dynamic_block_outputs(
step_manifest: Dict[str, Any]
) -> List[OutputDefinition]:
# Potentially TODO: dynamic blocks do not support dynamic outputs, but if it changes
# we need to provide dynamic blocks manifests here
dummy_workflow_definition = {
"version": "1.0",
"inputs": [],
"steps": [step_manifest],
"outputs": [],
}
parsed_definition = parse_workflow_definition(
raw_workflow_definition=dummy_workflow_definition
raw_workflow_definition=dummy_workflow_definition,
dynamic_blocks=[],
)
parsed_manifest = parsed_definition.steps[0]
return parsed_manifest.get_actual_outputs()
Expand Down
4 changes: 0 additions & 4 deletions inference/core/interfaces/stream/inference_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -556,10 +556,6 @@ def init_with_workflow(
workflow_init_parameters["workflows_core.background_tasks"] = (
background_tasks
)
workflow_init_parameters["workflows_core.cache"] = cache
workflow_init_parameters["workflows_core.step_execution_mode"] = (
StepExecutionMode.LOCAL
)
execution_engine = ExecutionEngine.init(
workflow_definition=workflow_specification,
init_parameters=workflow_init_parameters,
Expand Down
Loading

0 comments on commit fd7fe31

Please sign in to comment.