Skip to content

Commit

Permalink
Merge pull request #1020 from roboflow/feature/add_option_to_specify_…
Browse files Browse the repository at this point in the history
…bp_webhook

Add changes to make it possible to register WebHooks
  • Loading branch information
PawelPeczek-Roboflow authored Feb 12, 2025
2 parents 84f6b62 + 43b75fb commit 0f2364d
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,7 @@ def trigger_job_with_workflows_images_processing(
max_parallel_tasks: Optional[int],
aggregation_format: Optional[AggregationFormat],
job_id: Optional[str],
notifications_url: Optional[str],
api_key: Optional[str],
) -> str:
workspace = get_workspace(api_key=api_key)
Expand Down Expand Up @@ -369,6 +370,7 @@ def trigger_job_with_workflows_images_processing(
processing_timeout_seconds=max_runtime_seconds,
max_parallel_tasks=max_parallel_tasks,
processing_specification=processing_specification,
notifications_url=notifications_url,
)
create_batch_job(
workspace=workspace,
Expand All @@ -394,6 +396,7 @@ def trigger_job_with_workflows_videos_processing(
aggregation_format: Optional[AggregationFormat],
max_video_fps: Optional[Union[float, int]],
job_id: Optional[str],
notifications_url: Optional[str],
api_key: Optional[str],
) -> str:
workspace = get_workspace(api_key=api_key)
Expand Down Expand Up @@ -427,6 +430,7 @@ def trigger_job_with_workflows_videos_processing(
processing_timeout_seconds=max_runtime_seconds,
max_parallel_tasks=max_parallel_tasks,
processing_specification=processing_specification,
notifications_url=notifications_url,
)
create_batch_job(
workspace=workspace,
Expand Down
16 changes: 16 additions & 0 deletions inference_cli/lib/roboflow_cloud/batch_processing/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,13 @@ def process_images_with_workflow(
help="Flag enabling errors stack traces to be displayed (helpful for debugging)",
),
] = False,
notifications_url: Annotated[
Optional[str],
typer.Option(
"--notifications-url",
help="URL of the Webhook to be used for job state notifications.",
),
] = None,
) -> None:
if api_key is None:
api_key = ROBOFLOW_API_KEY
Expand All @@ -211,6 +218,7 @@ def process_images_with_workflow(
max_parallel_tasks=max_parallel_tasks,
aggregation_format=aggregation_format,
job_id=job_id,
notifications_url=notifications_url,
api_key=api_key,
)
print(f"Triggered job with ID: {job_id}")
Expand Down Expand Up @@ -324,6 +332,13 @@ def process_videos_with_workflow(
help="Flag enabling errors stack traces to be displayed (helpful for debugging)",
),
] = False,
notifications_url: Annotated[
Optional[str],
typer.Option(
"--notifications-url",
help="URL of the Webhook to be used for job state notifications.",
),
] = None,
) -> None:
if api_key is None:
api_key = ROBOFLOW_API_KEY
Expand All @@ -343,6 +358,7 @@ def process_videos_with_workflow(
aggregation_format=aggregation_format,
max_video_fps=max_video_fps,
job_id=job_id,
notifications_url=notifications_url,
api_key=api_key,
)
print(f"Triggered job with ID: {job_id}")
Expand Down
4 changes: 4 additions & 0 deletions inference_cli/lib/roboflow_cloud/batch_processing/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,3 +139,7 @@ class WorkflowProcessingJobV1(BaseModel):
processing_specification: WorkflowsProcessingSpecificationV1 = Field(
serialization_alias="processingSpecification"
)
notifications_url: Optional[str] = Field(
serialization_alias="notificationsURL",
default=None,
)

0 comments on commit 0f2364d

Please sign in to comment.