From aaf30769b4517a143441d73da93df1e4a7c7dff3 Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Mon, 17 Jun 2024 14:45:17 +0200 Subject: [PATCH 01/15] pin pydantic=1.10.16 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 42ef0e143..2e4e520b8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ numpy = "<2" pandas = ">=1.2.0,<2" defusedxml = "^0.7.1" lxml = "^4.9.1" -pydantic = "<2" +pydantic = "==1.10.16" docstring-parser = "^0.15" anndata = ">=0.8.0,<0.11.0" filelock = "3.13.*" From 0a76cafdbc97fb35e549bedee32e313a99cf8a7c Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Mon, 17 Jun 2024 14:50:05 +0200 Subject: [PATCH 02/15] update pydantic imports to .v1 --- fractal_tasks_core/cellvoyager/metadata.py | 66 ++++---------- fractal_tasks_core/channels.py | 85 +++++-------------- fractal_tasks_core/dev/lib_args_schemas.py | 43 +++------- .../dev/lib_signature_constraints.py | 38 +++------ fractal_tasks_core/dev/task_models.py | 2 +- fractal_tasks_core/labels.py | 2 +- fractal_tasks_core/ngff/specs.py | 6 +- fractal_tasks_core/tables/v1.py | 16 ++-- .../tasks/cellpose_transforms.py | 6 +- fractal_tasks_core/tasks/io_models.py | 14 ++- .../my_package/lib_custom_models.py | 2 +- 11 files changed, 81 insertions(+), 199 deletions(-) diff --git a/fractal_tasks_core/cellvoyager/metadata.py b/fractal_tasks_core/cellvoyager/metadata.py index 4d9a008b4..9cd9e6593 100644 --- a/fractal_tasks_core/cellvoyager/metadata.py +++ b/fractal_tasks_core/cellvoyager/metadata.py @@ -48,17 +48,13 @@ def parse_yokogawa_metadata( mrf_str = Path(mrf_path).as_posix() mlf_str = Path(mlf_path).as_posix() - mrf_frame, mlf_frame, error_count = read_metadata_files( - mrf_str, mlf_str, filename_patterns - ) + mrf_frame, mlf_frame, error_count = read_metadata_files(mrf_str, mlf_str, filename_patterns) # Aggregate information from the mlf file per_site_parameters = ["X", "Y"] grouping_params = ["well_id", "FieldIndex"] - grouped_sites = mlf_frame.loc[ - :, grouping_params + per_site_parameters - ].groupby(by=grouping_params) + grouped_sites = mlf_frame.loc[:, grouping_params + per_site_parameters].groupby(by=grouping_params) check_group_consistency(grouped_sites, message="X & Y stage positions") site_metadata = grouped_sites.mean() @@ -82,9 +78,7 @@ def parse_yokogawa_metadata( "vert_pixels", "bit_depth", ] - check_group_consistency( - mrf_frame.loc[:, mrf_columns], message="Image dimensions" - ) + check_group_consistency(mrf_frame.loc[:, mrf_columns], message="Image dimensions") site_metadata["pixel_size_x"] = mrf_frame.loc[:, "horiz_pixel_dim"].max() site_metadata["pixel_size_y"] = mrf_frame.loc[:, "vert_pixel_dim"].max() site_metadata["x_pixel"] = int(mrf_frame.loc[:, "horiz_pixels"].max()) @@ -102,17 +96,13 @@ def parse_yokogawa_metadata( number_of_files = {} for this_well_id in list_of_wells: num_images = (mlf_frame.well_id == this_well_id).sum() - logger.info( - f"Expected number of images for well {this_well_id}: {num_images}" - ) + logger.info(f"Expected number of images for well {this_well_id}: {num_images}") number_of_files[this_well_id] = num_images # Check that the sum of per-well file numbers correspond to the total # file number if not sum(number_of_files.values()) == len(mlf_frame): raise ValueError( - "Error while counting the number of image files per well.\n" - f"{len(mlf_frame)=}\n" - f"{number_of_files=}" + "Error while counting the number of image files per well.\n" f"{len(mlf_frame)=}\n" f"{number_of_files=}" ) return site_metadata, number_of_files @@ -148,9 +138,7 @@ def read_metadata_files( # processed further. Figure out how to save them as relevant metadata for # use e.g. during illumination correction - mlf_frame, error_count = read_mlf_file( - mlf_path, plate_type, filename_patterns - ) + mlf_frame, error_count = read_mlf_file(mlf_path, plate_type, filename_patterns) # Time points are parsed as part of the mlf_frame, but currently not # processed further. Once we tackle time-resolved data, parse from here. @@ -250,9 +238,7 @@ def _create_well_ids( col_sub = [(x - 1) % 4 + 1 for x in col_series] well_ids = [] for i in range(len(row_base)): - well_ids.append( - f"{row_base[i]}{col_base[i]:02}.{row_sub[i]}{col_sub[i]}" - ) + well_ids.append(f"{row_base[i]}{col_base[i]:02}.{row_sub[i]}{col_sub[i]}") else: row_str = [chr(x) for x in (row_series + 64)] well_ids = [f"{a}{b:02}" for a, b in zip(row_str, col_series)] @@ -284,10 +270,7 @@ def read_mlf_file( mlf_frame_raw = pd.read_xml(mlf_path) # Remove all rows that do not match the given patterns - logger.info( - f"Read {mlf_path}, and apply following patterns to " - f"image filenames: {filename_patterns}" - ) + logger.info(f"Read {mlf_path}, and apply following patterns to " f"image filenames: {filename_patterns}") if filename_patterns: filenames = mlf_frame_raw.MeasurementRecord keep_row = None @@ -295,19 +278,13 @@ def read_mlf_file( actual_pattern = fnmatch.translate(pattern) new_matches = filenames.str.fullmatch(actual_pattern) if new_matches.sum() == 0: - raise ValueError( - f"In {mlf_path} there is no image filename " - f'matching "{actual_pattern}".' - ) + raise ValueError(f"In {mlf_path} there is no image filename " f'matching "{actual_pattern}".') if keep_row is None: keep_row = new_matches.copy() else: keep_row = keep_row & new_matches if keep_row.sum() == 0: - raise ValueError( - f"In {mlf_path} there is no image filename " - f"matching {filename_patterns}." - ) + raise ValueError(f"In {mlf_path} there is no image filename " f"matching {filename_patterns}.") mlf_frame_matching = mlf_frame_raw[keep_row.values].copy() else: mlf_frame_matching = mlf_frame_raw.copy() @@ -378,31 +355,20 @@ def get_z_steps(mlf_frame: pd.DataFrame) -> pd.DataFrame: else: # Group the whole site (combine channels), because Z steps need to be # consistent between channels for OME-Zarr. - z_data = grouped_sites_z.apply(calculate_steps).groupby( - ["well_id", "FieldIndex"] - ) + z_data = grouped_sites_z.apply(calculate_steps).groupby(["well_id", "FieldIndex"]) - check_group_consistency( - z_data, message="Comparing Z steps between channels" - ) + check_group_consistency(z_data, message="Comparing Z steps between channels") # Ensure that channels have the same number of z planes and # reduce it to one value. # Only check if there is more than one channel available - if any( - grouped_sites_z.count().groupby(["well_id", "FieldIndex"]).count() > 1 - ): + if any(grouped_sites_z.count().groupby(["well_id", "FieldIndex"]).count() > 1): check_group_consistency( grouped_sites_z.count().groupby(["well_id", "FieldIndex"]), message="Checking number of Z steps between channels", ) - z_steps = ( - grouped_sites_z.count() - .groupby(["well_id", "FieldIndex"]) - .mean() - .astype(int) - ) + z_steps = grouped_sites_z.count().groupby(["well_id", "FieldIndex"]).mean().astype(int) # Combine the two dataframes z_frame = pd.concat([z_data.mean(), z_steps], axis=1) @@ -422,9 +388,7 @@ def get_earliest_time_per_site(mlf_frame: pd.DataFrame) -> pd.DataFrame: # Because a site will contain time information for each plane # of each channel, we just return the earliest time infromation # per site. - return pd.to_datetime( - mlf_frame.groupby(["well_id", "FieldIndex"]).min()["Time"], utc=True - ) + return pd.to_datetime(mlf_frame.groupby(["well_id", "FieldIndex"]).min()["Time"], utc=True) def check_group_consistency(grouped_df: pd.DataFrame, message: str = ""): diff --git a/fractal_tasks_core/channels.py b/fractal_tasks_core/channels.py index 0ed7f6d37..7b6bb0cbf 100644 --- a/fractal_tasks_core/channels.py +++ b/fractal_tasks_core/channels.py @@ -18,16 +18,14 @@ from typing import Union import zarr -from pydantic import BaseModel -from pydantic import validator +from pydantic.v1 import BaseModel +from pydantic.v1 import validator from fractal_tasks_core import __OME_NGFF_VERSION__ if __OME_NGFF_VERSION__ != "0.4": - NotImplementedError( - f"OME NGFF {__OME_NGFF_VERSION__} is not supported " "in `channels.py`" - ) + NotImplementedError(f"OME NGFF {__OME_NGFF_VERSION__} is not supported " "in `channels.py`") class Window(BaseModel): @@ -93,10 +91,7 @@ def valid_hex_color(cls, v, values): allowed_characters = "abcdefABCDEF0123456789" for character in v: if character not in allowed_characters: - raise ValueError( - "color must only include characters from " - f'"{allowed_characters}" (given: "{v}")' - ) + raise ValueError("color must only include characters from " f'"{allowed_characters}" (given: "{v}")') return v @@ -124,13 +119,10 @@ def mutually_exclusive_channel_attributes(cls, v, values): label = v if wavelength_id and v: raise ValueError( - "`wavelength_id` and `label` cannot be both set " - f"(given {wavelength_id=} and {label=})." + "`wavelength_id` and `label` cannot be both set " f"(given {wavelength_id=} and {label=})." ) if wavelength_id is None and v is None: - raise ValueError( - "`wavelength_id` and `label` cannot be both `None`" - ) + raise ValueError("`wavelength_id` and `label` cannot be both `None`") return v @@ -152,9 +144,7 @@ def check_unique_wavelength_ids(channels: list[OmeroChannel]): """ wavelength_ids = [c.wavelength_id for c in channels] if len(set(wavelength_ids)) < len(wavelength_ids): - raise ValueError( - f"Non-unique wavelength_id's in {wavelength_ids}\n" f"{channels=}" - ) + raise ValueError(f"Non-unique wavelength_id's in {wavelength_ids}\n" f"{channels=}") def check_well_channel_labels(*, well_zarr_path: str) -> None: @@ -173,9 +163,7 @@ def check_well_channel_labels(*, well_zarr_path: str) -> None: image_paths = [image["path"] for image in group.attrs["well"]["images"]] list_of_channel_lists = [] for image_path in image_paths: - channels = get_omero_channel_list( - image_zarr_path=f"{well_zarr_path}/{image_path}" - ) + channels = get_omero_channel_list(image_zarr_path=f"{well_zarr_path}/{image_path}") list_of_channel_lists.append(channels[:]) # For each pair of channel-labels lists, verify they do not overlap @@ -191,10 +179,7 @@ def check_well_channel_labels(*, well_zarr_path: str) -> None: "images? This could lead to non-unique channel labels, " "and then could be the reason of the error" ) - raise ValueError( - "Non-unique channel labels\n" - f"{labels_1=}\n{labels_2=}\n{hint}" - ) + raise ValueError("Non-unique channel labels\n" f"{labels_1=}\n{labels_2=}\n{hint}") def get_channel_from_image_zarr( @@ -219,9 +204,7 @@ def get_channel_from_image_zarr( A single channel dictionary. """ omero_channels = get_omero_channel_list(image_zarr_path=image_zarr_path) - channel = get_channel_from_list( - channels=omero_channels, label=label, wavelength_id=wavelength_id - ) + channel = get_channel_from_list(channels=omero_channels, label=label, wavelength_id=wavelength_id) return channel @@ -268,36 +251,24 @@ def get_channel_from_list( if label: if wavelength_id: # Both label and wavelength_id are specified - matching_channels = [ - c - for c in channels - if (c.label == label and c.wavelength_id == wavelength_id) - ] + matching_channels = [c for c in channels if (c.label == label and c.wavelength_id == wavelength_id)] else: # Only label is specified matching_channels = [c for c in channels if c.label == label] else: if wavelength_id: # Only wavelength_id is specified - matching_channels = [ - c for c in channels if c.wavelength_id == wavelength_id - ] + matching_channels = [c for c in channels if c.wavelength_id == wavelength_id] else: # Neither label or wavelength_id are specified - raise ValueError( - "get_channel requires at least one in {label,wavelength_id} " - "arguments" - ) + raise ValueError("get_channel requires at least one in {label,wavelength_id} " "arguments") # Verify that there is one and only one matching channel if len(matching_channels) == 0: required_match = [f"{label=}", f"{wavelength_id=}"] - required_match_string = " and ".join( - [x for x in required_match if "None" not in x] - ) + required_match_string = " and ".join([x for x in required_match if "None" not in x]) raise ChannelNotFoundError( - f"ChannelNotFoundError: No channel found in {channels}" - f" for {required_match_string}" + f"ChannelNotFoundError: No channel found in {channels}" f" for {required_match_string}" ) if len(matching_channels) > 1: raise ValueError(f"Inconsistent set of channels: {channels}") @@ -346,9 +317,7 @@ def define_omero_channels( default_label = wavelength_id if label_prefix: default_label = f"{label_prefix}_{default_label}" - logging.warning( - f"Missing label for {channel=}, using {default_label=}" - ) + logging.warning(f"Missing label for {channel=}, using {default_label=}") channel.label = default_label # If channel.color is None, set it to a default value (use the default @@ -369,9 +338,7 @@ def define_omero_channels( if len(set(labels)) < len(labels): raise ValueError(f"Non-unique labels in {new_channels=}") - new_channels_dictionaries = [ - c.dict(exclude={"index"}, exclude_unset=True) for c in new_channels - ] + new_channels_dictionaries = [c.dict(exclude={"index"}, exclude_unset=True) for c in new_channels] return new_channels_dictionaries @@ -401,9 +368,7 @@ def _get_new_unique_value( return new_value -def update_omero_channels( - old_channels: list[dict[str, Any]] -) -> list[dict[str, Any]]: +def update_omero_channels(old_channels: list[dict[str, Any]]) -> list[dict[str, Any]]: """ Make an existing list of Omero channels Fractal-compatible @@ -490,19 +455,11 @@ def _get_next_color() -> str: label = old_channel.get("label") color = old_channel.get("color") wavelength_id = old_channel.get("wavelength_id") - old_attributes = ( - f"Old attributes: {label=}, {wavelength_id=}, {color=}" - ) + old_attributes = f"Old attributes: {label=}, {wavelength_id=}, {color=}" label = new_channels[ind]["label"] wavelength_id = new_channels[ind]["wavelength_id"] color = new_channels[ind]["color"] - new_attributes = ( - f"New attributes: {label=}, {wavelength_id=}, {color=}" - ) - logging.info( - "Omero channel update:\n" - f" {old_attributes}\n" - f" {new_attributes}" - ) + new_attributes = f"New attributes: {label=}, {wavelength_id=}, {color=}" + logging.info("Omero channel update:\n" f" {old_attributes}\n" f" {new_attributes}") return new_channels diff --git a/fractal_tasks_core/dev/lib_args_schemas.py b/fractal_tasks_core/dev/lib_args_schemas.py index f3846de68..0ecf88ddb 100644 --- a/fractal_tasks_core/dev/lib_args_schemas.py +++ b/fractal_tasks_core/dev/lib_args_schemas.py @@ -21,11 +21,12 @@ from typing import Optional from docstring_parser import parse as docparse -from pydantic.decorator import ALT_V_ARGS -from pydantic.decorator import ALT_V_KWARGS -from pydantic.decorator import V_DUPLICATE_KWARGS -from pydantic.decorator import V_POSITIONAL_ONLY_NAME -from pydantic.decorator import ValidatedFunction +from pydantic.v1.decorator import ALT_V_ARGS +from pydantic.v1.decorator import ALT_V_ARGS +from pydantic.v1.decorator import ALT_V_KWARGS +from pydantic.v1.decorator import V_DUPLICATE_KWARGS +from pydantic.v1.decorator import V_POSITIONAL_ONLY_NAME +from pydantic.v1.decorator import ValidatedFunction from fractal_tasks_core.dev.lib_descriptions import ( _get_class_attrs_descriptions, @@ -96,14 +97,9 @@ def _remove_args_kwargs_properties(old_schema: _Schema) -> _Schema: expected_args_property = {"title": "Args", "type": "array", "items": {}} expected_kwargs_property = {"title": "Kwargs", "type": "object"} if args_property != expected_args_property: - raise ValueError( - f"{args_property=}\ndiffers from\n{expected_args_property=}" - ) + raise ValueError(f"{args_property=}\ndiffers from\n{expected_args_property=}") if kwargs_property != expected_kwargs_property: - raise ValueError( - f"{kwargs_property=}\ndiffers from\n" - f"{expected_kwargs_property=}" - ) + raise ValueError(f"{kwargs_property=}\ndiffers from\n" f"{expected_kwargs_property=}") logging.info("[_remove_args_kwargs_properties] END") return new_schema @@ -149,9 +145,7 @@ def _remove_attributes_from_descriptions(old_schema: _Schema) -> _Schema: if "definitions" in new_schema: for name, definition in new_schema["definitions"].items(): parsed_docstring = docparse(definition["description"]) - new_schema["definitions"][name][ - "description" - ] = parsed_docstring.short_description + new_schema["definitions"][name]["description"] = parsed_docstring.short_description logging.info("[_remove_attributes_from_descriptions] END") return new_schema @@ -182,21 +176,18 @@ def create_schema_for_single_task( # Usage 1 (standard) if package is None: raise ValueError( - "Cannot call `create_schema_for_single_task with " - f"{task_function=} and {package=}. Exit." + "Cannot call `create_schema_for_single_task with " f"{task_function=} and {package=}. Exit." ) if os.path.isabs(executable): raise ValueError( - "Cannot call `create_schema_for_single_task with " - f"{task_function=} and absolute {executable=}. Exit." + "Cannot call `create_schema_for_single_task with " f"{task_function=} and absolute {executable=}. Exit." ) else: usage = "2" # Usage 2 (testing) if package is not None: raise ValueError( - "Cannot call `create_schema_for_single_task with " - f"{task_function=} and non-None {package=}. Exit." + "Cannot call `create_schema_for_single_task with " f"{task_function=} and non-None {package=}. Exit." ) if not os.path.isabs(executable): raise ValueError( @@ -245,9 +236,7 @@ def create_schema_for_single_task( function_name=function_name, verbose=verbose, ) - schema = _insert_function_args_descriptions( - schema=schema, descriptions=function_args_descriptions, verbose=verbose - ) + schema = _insert_function_args_descriptions(schema=schema, descriptions=function_args_descriptions, verbose=verbose) # Merge lists of fractal-tasks-core and user-provided Pydantic models user_provided_models = custom_pydantic_models or [] @@ -255,11 +244,7 @@ def create_schema_for_single_task( # Check that model names are unique pydantic_models_names = [item[2] for item in pydantic_models] - duplicate_class_names = [ - name - for name, count in Counter(pydantic_models_names).items() - if count > 1 - ] + duplicate_class_names = [name for name, count in Counter(pydantic_models_names).items() if count > 1] if duplicate_class_names: pydantic_models_str = " " + "\n ".join(map(str, pydantic_models)) raise ValueError( diff --git a/fractal_tasks_core/dev/lib_signature_constraints.py b/fractal_tasks_core/dev/lib_signature_constraints.py index 9a4d7abf6..468136bb0 100644 --- a/fractal_tasks_core/dev/lib_signature_constraints.py +++ b/fractal_tasks_core/dev/lib_signature_constraints.py @@ -15,10 +15,10 @@ from pathlib import Path from typing import Callable -from pydantic.decorator import ALT_V_ARGS -from pydantic.decorator import ALT_V_KWARGS -from pydantic.decorator import V_DUPLICATE_KWARGS -from pydantic.decorator import V_POSITIONAL_ONLY_NAME +from pydantic.v1.decorator import ALT_V_ARGS +from pydantic.v1.decorator import ALT_V_KWARGS +from pydantic.v1.decorator import V_DUPLICATE_KWARGS +from pydantic.v1.decorator import V_POSITIONAL_ONLY_NAME FORBIDDEN_PARAM_NAMES = ( "args", @@ -47,23 +47,13 @@ def _extract_function( """ if not module_relative_path.endswith(".py"): raise ValueError(f"{module_relative_path=} must end with '.py'") - module_relative_path_no_py = str( - Path(module_relative_path).with_suffix("") - ) + module_relative_path_no_py = str(Path(module_relative_path).with_suffix("")) module_relative_path_dots = module_relative_path_no_py.replace("/", ".") if verbose: - logging.info( - f"Now calling `import_module` for " - f"{package_name}.{module_relative_path_dots}" - ) - imported_module = import_module( - f"{package_name}.{module_relative_path_dots}" - ) + logging.info(f"Now calling `import_module` for " f"{package_name}.{module_relative_path_dots}") + imported_module = import_module(f"{package_name}.{module_relative_path_dots}") if verbose: - logging.info( - f"Now getting attribute {function_name} from " - f"imported module {imported_module}." - ) + logging.info(f"Now getting attribute {function_name} from " f"imported module {imported_module}.") task_function = getattr(imported_module, function_name) return task_function @@ -84,9 +74,7 @@ def _validate_function_signature(function: Callable): # CASE 1: Check that name is not forbidden if param.name in FORBIDDEN_PARAM_NAMES: - raise ValueError( - f"Function {function} has argument with name {param.name}" - ) + raise ValueError(f"Function {function} has argument with name {param.name}") # CASE 2: Raise an error for unions if str(param.annotation).startswith(("typing.Union[", "Union[")): @@ -98,12 +86,8 @@ def _validate_function_signature(function: Callable): # CASE 4: Raise an error for optional parameter with given (non-None) # default, e.g. Optional[str] = "asd" - is_annotation_optional = str(param.annotation).startswith( - ("typing.Optional[", "Optional[") - ) - default_given = (param.default is not None) and ( - param.default != inspect._empty - ) + is_annotation_optional = str(param.annotation).startswith(("typing.Optional[", "Optional[")) + default_given = (param.default is not None) and (param.default != inspect._empty) if default_given and is_annotation_optional: raise ValueError("Optional parameter has non-None default value") diff --git a/fractal_tasks_core/dev/task_models.py b/fractal_tasks_core/dev/task_models.py index 89a650866..2d2ff9835 100644 --- a/fractal_tasks_core/dev/task_models.py +++ b/fractal_tasks_core/dev/task_models.py @@ -18,7 +18,7 @@ from typing import Any from typing import Optional -from pydantic import BaseModel +from pydantic.v1 import BaseModel class _BaseTask(BaseModel): diff --git a/fractal_tasks_core/labels.py b/fractal_tasks_core/labels.py index 2dff5ac88..f15f82f91 100644 --- a/fractal_tasks_core/labels.py +++ b/fractal_tasks_core/labels.py @@ -16,7 +16,7 @@ from typing import Optional import zarr.hierarchy -from pydantic.error_wrappers import ValidationError +from pydantic.v1.error_wrappers import ValidationError from fractal_tasks_core.ngff import NgffImageMeta from fractal_tasks_core.zarr_utils import OverwriteNotAllowedError diff --git a/fractal_tasks_core/ngff/specs.py b/fractal_tasks_core/ngff/specs.py index 3c5700c34..9d65cc62e 100644 --- a/fractal_tasks_core/ngff/specs.py +++ b/fractal_tasks_core/ngff/specs.py @@ -7,9 +7,9 @@ from typing import Optional from typing import Union -from pydantic import BaseModel -from pydantic import Field -from pydantic import validator +from pydantic.v1 import BaseModel +from pydantic.v1 import Field +from pydantic.v1 import validator logger = logging.getLogger(__name__) diff --git a/fractal_tasks_core/tables/v1.py b/fractal_tasks_core/tables/v1.py index 67d26f73e..80a214ff0 100644 --- a/fractal_tasks_core/tables/v1.py +++ b/fractal_tasks_core/tables/v1.py @@ -2,6 +2,7 @@ Functions and classes related to table specifications V1 (see https://fractal-analytics-platform.github.io/fractal-tasks-core/tables). """ + import logging import warnings from typing import Any @@ -11,8 +12,8 @@ import anndata as ad import zarr.hierarchy from anndata.experimental import write_elem -from pydantic import BaseModel -from pydantic import validator +from pydantic.v1 import BaseModel +from pydantic.v1 import validator from pydantic.error_wrappers import ValidationError from fractal_tasks_core.zarr_utils import OverwriteNotAllowedError @@ -206,8 +207,7 @@ def _write_table_v1( if table_type is not None: if table_type_from_attrs is not None: logger.warning( - f"Setting table type to '{table_type}' (and overriding " - f"'{table_type_from_attrs}' attribute)." + f"Setting table type to '{table_type}' (and overriding " f"'{table_type_from_attrs}' attribute)." ) table_attrs["type"] = table_type else: @@ -266,9 +266,7 @@ def _write_table_v1( return table_group -def get_tables_list_v1( - zarr_url: str, table_type: str = None, strict: bool = False -) -> list[str]: +def get_tables_list_v1(zarr_url: str, table_type: str = None, strict: bool = False) -> list[str]: """ Find the list of tables in the Zarr file @@ -294,9 +292,7 @@ def get_tables_list_v1( if not table_type: return all_tables else: - return _filter_tables_by_type_v1( - zarr_url, all_tables, table_type, strict - ) + return _filter_tables_by_type_v1(zarr_url, all_tables, table_type, strict) def _filter_tables_by_type_v1( diff --git a/fractal_tasks_core/tasks/cellpose_transforms.py b/fractal_tasks_core/tasks/cellpose_transforms.py index f74ed0ef7..ed104c115 100644 --- a/fractal_tasks_core/tasks/cellpose_transforms.py +++ b/fractal_tasks_core/tasks/cellpose_transforms.py @@ -16,9 +16,9 @@ from typing import Optional import numpy as np -from pydantic import BaseModel -from pydantic import Field -from pydantic import root_validator +from pydantic.v1 import BaseModel +from pydantic.v1 import Field +from pydantic.v1 import root_validator logger = logging.getLogger(__name__) diff --git a/fractal_tasks_core/tasks/io_models.py b/fractal_tasks_core/tasks/io_models.py index 75c7c9cfe..db2bb7f81 100644 --- a/fractal_tasks_core/tasks/io_models.py +++ b/fractal_tasks_core/tasks/io_models.py @@ -1,9 +1,9 @@ from typing import Literal from typing import Optional -from pydantic import BaseModel -from pydantic import Field -from pydantic import validator +from pydantic.v1 import BaseModel +from pydantic.v1 import Field +from pydantic.v1 import validator from fractal_tasks_core.channels import ChannelInputModel from fractal_tasks_core.channels import OmeroChannel @@ -126,9 +126,7 @@ def table_name_only_for_dataframe_type(cls, v, values): """ _type = values.get("type") if (_type == "dataframe" and (not v)) or (_type != "dataframe" and v): - raise ValueError( - f"Output item has type={_type} but table_name={v}." - ) + raise ValueError(f"Output item has type={_type} but table_name={v}.") return v @@ -153,9 +151,7 @@ def label_name_is_present(cls, v, values): """ _type = values.get("type") if _type == "label" and not v: - raise ValueError( - f"Input item has type={_type} but label_name={v}." - ) + raise ValueError(f"Input item has type={_type} but label_name={v}.") return v @validator("channel", always=True) diff --git a/tests/data/fake_package_for_schema_experiments/my_package/lib_custom_models.py b/tests/data/fake_package_for_schema_experiments/my_package/lib_custom_models.py index a242589bc..0fba2d6df 100644 --- a/tests/data/fake_package_for_schema_experiments/my_package/lib_custom_models.py +++ b/tests/data/fake_package_for_schema_experiments/my_package/lib_custom_models.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel +from pydantic.v1 import BaseModel class CustomModel(BaseModel): From 558ad79848430e6443579e0a839fe3e0f2c761e4 Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Mon, 17 Jun 2024 14:50:37 +0200 Subject: [PATCH 03/15] update all pydantic imports to .v1 --- fractal_tasks_core/roi/v1_checks.py | 2 +- fractal_tasks_core/tables/v1.py | 2 +- .../tasks/apply_registration_to_image.py | 46 ++---- .../calculate_registration_image_based.py | 46 ++---- .../tasks/cellpose_segmentation.py | 70 +++------ .../tasks/cellvoyager_to_ome_zarr_compute.py | 20 +-- .../tasks/cellvoyager_to_ome_zarr_init.py | 63 +++----- .../cellvoyager_to_ome_zarr_init_multiplex.py | 92 ++++-------- .../tasks/copy_ome_zarr_hcs_plate.py | 46 ++---- .../tasks/find_registration_consensus.py | 20 +-- .../tasks/illumination_correction.py | 39 ++--- .../image_based_registration_hcs_init.py | 6 +- fractal_tasks_core/tasks/import_ome_zarr.py | 16 +-- .../init_group_by_well_for_multiplexing.py | 6 +- .../tasks/maximum_intensity_projection.py | 24 +--- .../tasks/napari_workflows_wrapper.py | 135 +++++------------- .../dev/test_create_schema_for_single_task.py | 2 +- tests/dev/test_enum_arguments.py | 2 +- .../test_unit_napari_workflows_wrapper.py | 14 +- 19 files changed, 167 insertions(+), 484 deletions(-) diff --git a/fractal_tasks_core/roi/v1_checks.py b/fractal_tasks_core/roi/v1_checks.py index aa46366b7..fc3aa4672 100644 --- a/fractal_tasks_core/roi/v1_checks.py +++ b/fractal_tasks_core/roi/v1_checks.py @@ -17,7 +17,7 @@ import anndata as ad import zarr -from pydantic.error_wrappers import ValidationError +from pydantic.v1.error_wrappers import ValidationError from fractal_tasks_core.tables.v1 import MaskingROITableAttrs diff --git a/fractal_tasks_core/tables/v1.py b/fractal_tasks_core/tables/v1.py index 80a214ff0..070d2b53f 100644 --- a/fractal_tasks_core/tables/v1.py +++ b/fractal_tasks_core/tables/v1.py @@ -14,7 +14,7 @@ from anndata.experimental import write_elem from pydantic.v1 import BaseModel from pydantic.v1 import validator -from pydantic.error_wrappers import ValidationError +from pydantic.v1.error_wrappers import ValidationError from fractal_tasks_core.zarr_utils import OverwriteNotAllowedError diff --git a/fractal_tasks_core/tasks/apply_registration_to_image.py b/fractal_tasks_core/tasks/apply_registration_to_image.py index a8e3a718b..14bd35223 100644 --- a/fractal_tasks_core/tasks/apply_registration_to_image.py +++ b/fractal_tasks_core/tasks/apply_registration_to_image.py @@ -21,7 +21,7 @@ import dask.array as da import numpy as np import zarr -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from fractal_tasks_core.ngff import load_NgffImageMeta from fractal_tasks_core.ngff.zarr_utils import load_NgffWellMeta @@ -102,13 +102,10 @@ def apply_registration_to_image( acq_dict = load_NgffWellMeta(well_url).get_acquisition_paths() if reference_acquisition not in acq_dict: raise ValueError( - f"{reference_acquisition=} was not one of the available " - f"acquisitions in {acq_dict=} for well {well_url}" + f"{reference_acquisition=} was not one of the available " f"acquisitions in {acq_dict=} for well {well_url}" ) elif len(acq_dict[reference_acquisition]) > 1: - ref_path = _get_matching_ref_acquisition_path_heuristic( - acq_dict[reference_acquisition], old_img_path - ) + ref_path = _get_matching_ref_acquisition_path_heuristic(acq_dict[reference_acquisition], old_img_path) logger.warning( "Running registration when there are multiple images of the same " "acquisition in a well. Using a heuristic to match the reference " @@ -118,9 +115,7 @@ def apply_registration_to_image( ref_path = acq_dict[reference_acquisition][0] reference_zarr_url = f"{well_url}/{ref_path}" - ROI_table_ref = ad.read_zarr( - f"{reference_zarr_url}/tables/{registered_roi_table}" - ) + ROI_table_ref = ad.read_zarr(f"{reference_zarr_url}/tables/{registered_roi_table}") ROI_table_acq = ad.read_zarr(f"{zarr_url}/tables/{registered_roi_table}") ngff_image_meta = load_NgffImageMeta(zarr_url) @@ -209,9 +204,7 @@ def apply_registration_to_image( current_round = 0 while current_round < max_retries: try: - old_table_group = zarr.open_group( - table_dict[table], mode="r" - ) + old_table_group = zarr.open_group(table_dict[table], mode="r") current_round = max_retries except zarr.errors.GroupNotFoundError: logger.debug( @@ -234,9 +227,7 @@ def apply_registration_to_image( # Clean up Zarr file #################### if overwrite_input: - logger.info( - "Replace original zarr image with the newly created Zarr image" - ) + logger.info("Replace original zarr image with the newly created Zarr image") # Potential for race conditions: Every acquisition reads the # reference acquisition, but the reference acquisition also gets # modified @@ -246,9 +237,7 @@ def apply_registration_to_image( shutil.rmtree(f"{zarr_url}_tmp") image_list_updates = dict(image_list_updates=[dict(zarr_url=zarr_url)]) else: - image_list_updates = dict( - image_list_updates=[dict(zarr_url=new_zarr_url, origin=zarr_url)] - ) + image_list_updates = dict(image_list_updates=[dict(zarr_url=new_zarr_url, origin=zarr_url)]) # Update the metadata of the the well well_url, new_img_path = _split_well_path_image_path(new_zarr_url) _update_well_metadata( @@ -336,32 +325,23 @@ def write_registered_zarr( num_channels = data_array.shape[0] # Loop over channels for ind_ch in range(num_channels): - idx = tuple( - [slice(ind_ch, ind_ch + 1)] + list(reference_region) - ) - new_array[idx] = load_region( - data_zyx=data_array[ind_ch], region=region, compute=False - ) + idx = tuple([slice(ind_ch, ind_ch + 1)] + list(reference_region)) + new_array[idx] = load_region(data_zyx=data_array[ind_ch], region=region, compute=False) elif axes_list == ["z", "y", "x"]: - new_array[reference_region] = load_region( - data_zyx=data_array, region=region, compute=False - ) + new_array[reference_region] = load_region(data_zyx=data_array, region=region, compute=False) elif axes_list == ["c", "y", "x"]: # TODO: Implement cyx case (based on looping over xy case) raise NotImplementedError( - "`write_registered_zarr` has not been implemented for " - f"a zarr with {axes_list=}" + "`write_registered_zarr` has not been implemented for " f"a zarr with {axes_list=}" ) elif axes_list == ["y", "x"]: # TODO: Implement yx case raise NotImplementedError( - "`write_registered_zarr` has not been implemented for " - f"a zarr with {axes_list=}" + "`write_registered_zarr` has not been implemented for " f"a zarr with {axes_list=}" ) else: raise NotImplementedError( - "`write_registered_zarr` has not been implemented for " - f"a zarr with {axes_list=}" + "`write_registered_zarr` has not been implemented for " f"a zarr with {axes_list=}" ) new_array.to_zarr( diff --git a/fractal_tasks_core/tasks/calculate_registration_image_based.py b/fractal_tasks_core/tasks/calculate_registration_image_based.py index 5829eefd1..88ea75489 100644 --- a/fractal_tasks_core/tasks/calculate_registration_image_based.py +++ b/fractal_tasks_core/tasks/calculate_registration_image_based.py @@ -18,7 +18,7 @@ import dask.array as da import numpy as np import zarr -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from skimage.registration import phase_cross_correlation from fractal_tasks_core.channels import get_channel_from_image_zarr @@ -82,9 +82,7 @@ def calculate_registration_image_based( """ logger.info( - f"Running for {zarr_url=}.\n" - f"Calculating translation registration per {roi_table=} for " - f"{wavelength_id=}." + f"Running for {zarr_url=}.\n" f"Calculating translation registration per {roi_table=} for " f"{wavelength_id=}." ) init_args.reference_zarr_url = init_args.reference_zarr_url @@ -108,21 +106,13 @@ def calculate_registration_image_based( channel_index_align = channel_align.index # Lazily load zarr array - data_reference_zyx = da.from_zarr( - f"{init_args.reference_zarr_url}/{level}" - )[channel_index_ref] - data_alignment_zyx = da.from_zarr(f"{zarr_url}/{level}")[ - channel_index_align - ] + data_reference_zyx = da.from_zarr(f"{init_args.reference_zarr_url}/{level}")[channel_index_ref] + data_alignment_zyx = da.from_zarr(f"{zarr_url}/{level}")[channel_index_align] # Read ROIs - ROI_table_ref = ad.read_zarr( - f"{init_args.reference_zarr_url}/tables/{roi_table}" - ) + ROI_table_ref = ad.read_zarr(f"{init_args.reference_zarr_url}/tables/{roi_table}") ROI_table_x = ad.read_zarr(f"{zarr_url}/tables/{roi_table}") - logger.info( - f"Found {len(ROI_table_x)} ROIs in {roi_table=} to be processed." - ) + logger.info(f"Found {len(ROI_table_x)} ROIs in {roi_table=} to be processed.") # Check that table type of ROI_table_ref is valid. Note that # "ngff:region_table" and None are accepted for backwards compatibility @@ -139,12 +129,7 @@ def calculate_registration_image_based( ref_table_attrs = ROI_table_ref_group.attrs.asdict() ref_table_type = ref_table_attrs.get("type") if ref_table_type not in valid_table_types: - raise ValueError( - ( - f"Table '{roi_table}' (with type '{ref_table_type}') is " - "not a valid ROI table." - ) - ) + raise ValueError((f"Table '{roi_table}' (with type '{ref_table_type}') is " "not a valid ROI table.")) # For each acquisition, get the relevant info # TODO: Add additional checks on ROIs? @@ -167,10 +152,7 @@ def calculate_registration_image_based( pxl_sizes_zyx_acq_x = ngff_image_meta_acq_x.get_pixel_sizes_zyx(level=0) if pxl_sizes_zyx != pxl_sizes_zyx_acq_x: - raise ValueError( - "Pixel sizes need to be equal between acquisitions for " - "registration." - ) + raise ValueError("Pixel sizes need to be equal between acquisitions for " "registration.") # Create list of indices for 3D ROIs spanning the entire Z direction list_indices_ref = convert_ROI_table_to_indices( @@ -193,10 +175,7 @@ def calculate_registration_image_based( compute = True new_shifts = {} for i_ROI in range(num_ROIs): - logger.info( - f"Now processing ROI {i_ROI+1}/{num_ROIs} " - f"for channel {channel_align}." - ) + logger.info(f"Now processing ROI {i_ROI+1}/{num_ROIs} " f"for channel {channel_align}.") img_ref = load_region( data_zyx=data_reference_zyx, region=convert_indices_to_regions(list_indices_ref[i_ROI]), @@ -214,12 +193,9 @@ def calculate_registration_image_based( # Basic version (no padding, no internal binning) if img_ref.shape != img_acq_x.shape: raise NotImplementedError( - "This registration is not implemented for ROIs with " - "different shapes between acquisitions." + "This registration is not implemented for ROIs with " "different shapes between acquisitions." ) - shifts = phase_cross_correlation( - np.squeeze(img_ref), np.squeeze(img_acq_x) - )[0] + shifts = phase_cross_correlation(np.squeeze(img_ref), np.squeeze(img_acq_x))[0] # Registration based on scmultiplex, image-based # shifts, _, _ = calculate_shift(np.squeeze(img_ref), diff --git a/fractal_tasks_core/tasks/cellpose_segmentation.py b/fractal_tasks_core/tasks/cellpose_segmentation.py index 6664dc920..9bf542d6d 100644 --- a/fractal_tasks_core/tasks/cellpose_segmentation.py +++ b/fractal_tasks_core/tasks/cellpose_segmentation.py @@ -26,7 +26,7 @@ import pandas as pd import zarr from cellpose import models -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments import fractal_tasks_core from fractal_tasks_core.channels import ChannelInputModel @@ -329,13 +329,8 @@ def cellpose_segmentation( actual_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=level) logger.info(f"NGFF image has {num_levels=}") logger.info(f"NGFF image has {coarsening_xy=}") - logger.info( - f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}" - ) - logger.info( - f"NGFF image has level-{level} pixel sizes " - f"{actual_res_pxl_sizes_zyx}" - ) + logger.info(f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}") + logger.info(f"NGFF image has level-{level} pixel sizes " f"{actual_res_pxl_sizes_zyx}") # Find channel index try: @@ -345,10 +340,7 @@ def cellpose_segmentation( label=channel.label, ) except ChannelNotFoundError as e: - logger.warning( - "Channel not found, exit from the task.\n" - f"Original error: {str(e)}" - ) + logger.warning("Channel not found, exit from the task.\n" f"Original error: {str(e)}") return None ind_channel = tmp_channel.index @@ -389,14 +381,9 @@ def cellpose_segmentation( ROI_table = ad.read_zarr(ROI_table_path) # Perform some checks on the ROI table - valid_ROI_table = is_ROI_table_valid( - table_path=ROI_table_path, use_masks=use_masks - ) + valid_ROI_table = is_ROI_table_valid(table_path=ROI_table_path, use_masks=use_masks) if use_masks and not valid_ROI_table: - logger.info( - f"ROI table at {ROI_table_path} cannot be used for masked " - "loading. Set use_masks=False." - ) + logger.info(f"ROI table at {ROI_table_path} cannot be used for masked " "loading. Set use_masks=False.") use_masks = False logger.info(f"{use_masks=}") @@ -423,9 +410,7 @@ def cellpose_segmentation( if do_3D: if anisotropy is None: # Compute anisotropy as pixel_size_z/pixel_size_x - anisotropy = ( - actual_res_pxl_sizes_zyx[0] / actual_res_pxl_sizes_zyx[2] - ) + anisotropy = actual_res_pxl_sizes_zyx[0] / actual_res_pxl_sizes_zyx[2] logger.info(f"Anisotropy: {anisotropy}") # Rescale datasets (only relevant for level>0) @@ -451,11 +436,7 @@ def cellpose_segmentation( { "name": output_label_name, "version": __OME_NGFF_VERSION__, - "axes": [ - ax.dict() - for ax in ngff_image_meta.multiscale.axes - if ax.type != "channel" - ], + "axes": [ax.dict() for ax in ngff_image_meta.multiscale.axes if ax.type != "channel"], "datasets": new_datasets, } ], @@ -470,9 +451,7 @@ def cellpose_segmentation( logger=logger, ) - logger.info( - f"Helper function `prepare_label_group` returned {label_group=}" - ) + logger.info(f"Helper function `prepare_label_group` returned {label_group=}") logger.info(f"Output label path: {zarr_url}/labels/{output_label_name}/0") store = zarr.storage.FSStore(f"{zarr_url}/labels/{output_label_name}/0") label_dtype = np.uint32 @@ -494,17 +473,12 @@ def cellpose_segmentation( dimension_separator="/", ) - logger.info( - f"mask will have shape {data_zyx.shape} " - f"and chunks {data_zyx.chunks}" - ) + logger.info(f"mask will have shape {data_zyx.shape} " f"and chunks {data_zyx.chunks}") # Initialize cellpose gpu = use_gpu and cellpose.core.use_gpu() if pretrained_model: - model = models.CellposeModel( - gpu=gpu, pretrained_model=pretrained_model - ) + model = models.CellposeModel(gpu=gpu, pretrained_model=pretrained_model) else: model = models.CellposeModel(gpu=gpu, model_type=model_type) @@ -626,9 +600,7 @@ def cellpose_segmentation( # Check that total number of labels is under control if num_labels_tot > np.iinfo(label_dtype).max: raise ValueError( - "ERROR in re-labeling:" - f"Reached {num_labels_tot} labels, " - f"but dtype={label_dtype}" + "ERROR in re-labeling:" f"Reached {num_labels_tot} labels, " f"but dtype={label_dtype}" ) if output_ROI_table: @@ -642,13 +614,9 @@ def cellpose_segmentation( overlap_list = [] for df in bbox_dataframe_list: - overlap_list.extend( - get_overlapping_pairs_3D(df, full_res_pxl_sizes_zyx) - ) + overlap_list.extend(get_overlapping_pairs_3D(df, full_res_pxl_sizes_zyx)) if len(overlap_list) > 0: - logger.warning( - f"{len(overlap_list)} bounding-box pairs overlap" - ) + logger.warning(f"{len(overlap_list)} bounding-box pairs overlap") # Compute and store 0-th level to disk da.array(new_label_img).to_zarr( @@ -657,10 +625,7 @@ def cellpose_segmentation( compute=True, ) - logger.info( - f"End cellpose_segmentation task for {zarr_url}, " - "now building pyramids." - ) + logger.info(f"End cellpose_segmentation task for {zarr_url}, " "now building pyramids.") # Starting from on-disk highest-resolution data, build and write to disk a # pyramid of coarser levels @@ -695,10 +660,7 @@ def cellpose_segmentation( # Write to zarr group image_group = zarr.group(zarr_url) - logger.info( - "Now writing bounding-box ROI table to " - f"{zarr_url}/tables/{output_ROI_table}" - ) + logger.info("Now writing bounding-box ROI table to " f"{zarr_url}/tables/{output_ROI_table}") table_attrs = { "type": "masking_roi_table", "region": {"path": f"../labels/{output_label_name}"}, diff --git a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_compute.py b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_compute.py index 11a8aba90..f82ad20ce 100644 --- a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_compute.py +++ b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_compute.py @@ -18,7 +18,7 @@ import zarr from anndata import read_zarr from dask.array.image import imread -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from fractal_tasks_core.cellvoyager.filenames import ( glob_with_multiple_patterns, @@ -85,13 +85,9 @@ def cellvoyager_to_ome_zarr_compute( full_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=0) logger.info(f"NGFF image has {num_levels=}") logger.info(f"NGFF image has {coarsening_xy=}") - logger.info( - f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}" - ) + logger.info(f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}") - channels: list[OmeroChannel] = get_omero_channel_list( - image_zarr_path=zarr_url - ) + channels: list[OmeroChannel] = get_omero_channel_list(image_zarr_path=zarr_url) wavelength_ids = [c.wavelength_id for c in channels] # Read useful information from ROI table @@ -115,10 +111,7 @@ def cellvoyager_to_ome_zarr_compute( max_x = well_indices[0][5] # Load a single image, to retrieve useful information - patterns = [ - f"{init_args.plate_prefix}_{init_args.well_ID}_*." - f"{init_args.image_extension}" - ] + patterns = [f"{init_args.plate_prefix}_{init_args.well_ID}_*." f"{init_args.image_extension}"] if init_args.image_glob_patterns: patterns.extend(init_args.image_glob_patterns) @@ -143,10 +136,7 @@ def cellvoyager_to_ome_zarr_compute( for i_c, wavelength_id in enumerate(wavelength_ids): A, C = wavelength_id.split("_") - patterns = [ - f"{init_args.plate_prefix}_{init_args.well_ID}_*{A}*{C}*." - f"{init_args.image_extension}" - ] + patterns = [f"{init_args.plate_prefix}_{init_args.well_ID}_*{A}*{C}*." f"{init_args.image_extension}"] if init_args.image_glob_patterns: patterns.extend(init_args.image_glob_patterns) filenames_set = glob_with_multiple_patterns( diff --git a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init.py b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init.py index ec1e8a098..4e88f70ee 100644 --- a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init.py +++ b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init.py @@ -17,7 +17,7 @@ from typing import Optional import pandas as pd -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments import fractal_tasks_core from fractal_tasks_core.cellvoyager.filenames import ( @@ -160,9 +160,7 @@ def cellvoyager_to_ome_zarr_init( C = filename_metadata["C"] tmp_wavelength_ids.append(f"A{A}_C{C}") except ValueError as e: - logger.warning( - f'Skipping "{Path(fn).name}". Original error: ' + str(e) - ) + logger.warning(f'Skipping "{Path(fn).name}". Original error: ' + str(e)) tmp_plates = sorted(list(set(tmp_plates))) tmp_wavelength_ids = sorted(list(set(tmp_wavelength_ids))) @@ -188,9 +186,7 @@ def cellvoyager_to_ome_zarr_init( while new_plate in plates: new_plate = f"{plate}_{ind}" ind += 1 - logger.info( - f"WARNING: {plate} already exists, renaming it as {new_plate}" - ) + logger.info(f"WARNING: {plate} already exists, renaming it as {new_plate}") plates.append(new_plate) dict_plate_prefixes[new_plate] = dict_plate_prefixes[plate] plate = new_plate @@ -202,18 +198,13 @@ def cellvoyager_to_ome_zarr_init( actual_wavelength_ids = tmp_wavelength_ids[:] else: if actual_wavelength_ids != tmp_wavelength_ids: - raise ValueError( - f"ERROR\n{info}\nERROR:" - f" expected channels {actual_wavelength_ids}" - ) + raise ValueError(f"ERROR\n{info}\nERROR:" f" expected channels {actual_wavelength_ids}") # Update dict_plate_paths dict_plate_paths[plate] = image_dir # Check that all channels are in the allowed_channels - allowed_wavelength_ids = [ - channel.wavelength_id for channel in allowed_channels - ] + allowed_wavelength_ids = [channel.wavelength_id for channel in allowed_channels] if not set(actual_wavelength_ids).issubset(set(allowed_wavelength_ids)): msg = "ERROR in create_ome_zarr\n" msg += f"actual_wavelength_ids: {actual_wavelength_ids}\n" @@ -222,11 +213,7 @@ def cellvoyager_to_ome_zarr_init( # Create actual_channels, i.e. a list of the channel dictionaries which are # present - actual_channels = [ - channel - for channel in allowed_channels - if channel.wavelength_id in actual_wavelength_ids - ] + actual_channels = [channel for channel in allowed_channels if channel.wavelength_id in actual_wavelength_ids] ################################################################ # Create well/image OME-Zarr folders on disk, and prepare output @@ -280,13 +267,9 @@ def cellvoyager_to_ome_zarr_init( patterns = [f"{plate_prefix}_*.{image_extension}"] if image_glob_patterns: patterns.extend(image_glob_patterns) - plate_images = glob_with_multiple_patterns( - folder=str(in_path), patterns=patterns - ) + plate_images = glob_with_multiple_patterns(folder=str(in_path), patterns=patterns) - wells = [ - parse_filename(os.path.basename(fn))["well"] for fn in plate_images - ] + wells = [parse_filename(os.path.basename(fn))["well"] for fn in plate_images] wells = sorted(list(set(wells))) # Verify that all wells have all channels @@ -294,9 +277,7 @@ def cellvoyager_to_ome_zarr_init( patterns = [f"{plate_prefix}_{well}_*.{image_extension}"] if image_glob_patterns: patterns.extend(image_glob_patterns) - well_images = glob_with_multiple_patterns( - folder=str(in_path), patterns=patterns - ) + well_images = glob_with_multiple_patterns(folder=str(in_path), patterns=patterns) # Check number of images matches with expected one if metadata_table_file is None: @@ -316,9 +297,7 @@ def cellvoyager_to_ome_zarr_init( for fpath in well_images: try: filename_metadata = parse_filename(os.path.basename(fpath)) - well_wavelength_ids.append( - f"A{filename_metadata['A']}_C{filename_metadata['C']}" - ) + well_wavelength_ids.append(f"A{filename_metadata['A']}_C{filename_metadata['C']}") except IndexError: logger.info(f"Skipping {fpath}") well_wavelength_ids = sorted(list(set(well_wavelength_ids))) @@ -332,12 +311,8 @@ def cellvoyager_to_ome_zarr_init( well_rows_columns = generate_row_col_split(wells) - row_list = [ - well_row_column[0] for well_row_column in well_rows_columns - ] - col_list = [ - well_row_column[1] for well_row_column in well_rows_columns - ] + row_list = [well_row_column[0] for well_row_column in well_rows_columns] + col_list = [well_row_column[1] for well_row_column in well_rows_columns] row_list = sorted(list(set(row_list))) col_list = sorted(list(set(col_list))) @@ -416,10 +391,8 @@ def cellvoyager_to_ome_zarr_init( "scale": [ 1, pixel_size_z, - pixel_size_y - * coarsening_xy**ind_level, - pixel_size_x - * coarsening_xy**ind_level, + pixel_size_y * coarsening_xy**ind_level, + pixel_size_x * coarsening_xy**ind_level, ], } ], @@ -433,9 +406,7 @@ def cellvoyager_to_ome_zarr_init( "id": 1, # TODO does this depend on the plate number? "name": "TBD", "version": __OME_NGFF_VERSION__, - "channels": define_omero_channels( - channels=actual_channels, bit_depth=bit_depth - ), + "channels": define_omero_channels(channels=actual_channels, bit_depth=bit_depth), } # Validate Image attrs @@ -444,9 +415,7 @@ def cellvoyager_to_ome_zarr_init( # Prepare AnnData tables for FOV/well ROIs well_id = get_filename_well_id(row, column) FOV_ROIs_table = prepare_FOV_ROI_table(site_metadata.loc[well_id]) - well_ROIs_table = prepare_well_ROI_table( - site_metadata.loc[well_id] - ) + well_ROIs_table = prepare_well_ROI_table(site_metadata.loc[well_id]) # Write AnnData tables into the `tables` zarr group write_table( diff --git a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init_multiplex.py b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init_multiplex.py index 8e7f9b890..8ce0864ac 100644 --- a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init_multiplex.py +++ b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init_multiplex.py @@ -18,7 +18,7 @@ import pandas as pd import zarr -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from zarr.errors import ContainsGroupError import fractal_tasks_core @@ -122,19 +122,13 @@ def cellvoyager_to_ome_zarr_init_multiplex( # 3. Files exist. if set(acquisitions.keys()) != set(metadata_table_files.keys()): raise ValueError( - "Mismatch in acquisition keys between " - f"{acquisitions.keys()=} and " - f"{metadata_table_files.keys()=}" + "Mismatch in acquisition keys between " f"{acquisitions.keys()=} and " f"{metadata_table_files.keys()=}" ) for f in metadata_table_files.values(): if not f.endswith(".csv"): - raise ValueError( - f"{f} (in metadata_table_file) is not a csv file." - ) + raise ValueError(f"{f} (in metadata_table_file) is not a csv file.") if not os.path.isfile(f): - raise ValueError( - f"{f} (in metadata_table_file) does not exist." - ) + raise ValueError(f"{f} (in metadata_table_file) does not exist.") # Preliminary checks on acquisitions # Note that in metadata the keys of dictionary arguments should be @@ -172,9 +166,7 @@ def cellvoyager_to_ome_zarr_init_multiplex( C = filename_metadata["C"] actual_wavelength_ids.append(f"A{A}_C{C}") except ValueError as e: - logger.warning( - f'Skipping "{Path(fn).name}". Original error: ' + str(e) - ) + logger.warning(f'Skipping "{Path(fn).name}". Original error: ' + str(e)) plates = sorted(list(set(plates))) actual_wavelength_ids = sorted(list(set(actual_wavelength_ids))) @@ -197,17 +189,12 @@ def cellvoyager_to_ome_zarr_init_multiplex( if int(acquisition) > 0: plate = dict_acquisitions["0"]["plate"] logger.warning( - f"For {acquisition=}, we replace {original_plate=} with " - f"{plate=} (the one for acquisition 0)" + f"For {acquisition=}, we replace {original_plate=} with " f"{plate=} (the one for acquisition 0)" ) # Check that all channels are in the allowed_channels - allowed_wavelength_ids = [ - c.wavelength_id for c in acq_input.allowed_channels - ] - if not set(actual_wavelength_ids).issubset( - set(allowed_wavelength_ids) - ): + allowed_wavelength_ids = [c.wavelength_id for c in acq_input.allowed_channels] + if not set(actual_wavelength_ids).issubset(set(allowed_wavelength_ids)): msg = "ERROR in create_ome_zarr\n" msg += f"actual_wavelength_ids: {actual_wavelength_ids}\n" msg += f"allowed_wavelength_ids: {allowed_wavelength_ids}\n" @@ -216,9 +203,7 @@ def cellvoyager_to_ome_zarr_init_multiplex( # Create actual_channels, i.e. a list of the channel dictionaries which # are present actual_channels = [ - channel - for channel in acq_input.allowed_channels - if channel.wavelength_id in actual_wavelength_ids + channel for channel in acq_input.allowed_channels if channel.wavelength_id in actual_wavelength_ids ] logger.info(f"plate: {plate}") @@ -229,13 +214,9 @@ def cellvoyager_to_ome_zarr_init_multiplex( dict_acquisitions[acquisition]["original_plate"] = original_plate dict_acquisitions[acquisition]["plate_prefix"] = plate_prefix dict_acquisitions[acquisition]["image_folder"] = acq_input.image_dir - dict_acquisitions[acquisition]["original_paths"] = [ - acq_input.image_dir - ] + dict_acquisitions[acquisition]["original_paths"] = [acq_input.image_dir] dict_acquisitions[acquisition]["actual_channels"] = actual_channels - dict_acquisitions[acquisition][ - "actual_wavelength_ids" - ] = actual_wavelength_ids + dict_acquisitions[acquisition]["actual_wavelength_ids"] = actual_wavelength_ids parallelization_list = [] acquisitions_sorted = sorted(list(acquisitions.keys())) @@ -248,9 +229,7 @@ def cellvoyager_to_ome_zarr_init_multiplex( full_zarrurl = str(Path(zarr_dir) / zarrurl) logger.info(f"Creating {full_zarrurl=}") # Call zarr.open_group wrapper, which handles overwrite=True/False - group_plate = open_zarr_group_with_overwrite( - full_zarrurl, overwrite=overwrite - ) + group_plate = open_zarr_group_with_overwrite(full_zarrurl, overwrite=overwrite) group_plate.attrs["plate"] = { "acquisitions": [ { @@ -304,9 +283,7 @@ def cellvoyager_to_ome_zarr_init_multiplex( patterns=patterns, ) - wells = [ - parse_filename(os.path.basename(fn))["well"] for fn in plate_images - ] + wells = [parse_filename(os.path.basename(fn))["well"] for fn in plate_images] wells = sorted(list(set(wells))) logger.info(f"{wells=}") @@ -331,9 +308,7 @@ def cellvoyager_to_ome_zarr_init_multiplex( except IndexError: logger.info(f"Skipping {fpath}") well_wavelength_ids = sorted(list(set(well_wavelength_ids))) - actual_wavelength_ids = dict_acquisitions[acquisition][ - "actual_wavelength_ids" - ] + actual_wavelength_ids = dict_acquisitions[acquisition]["actual_wavelength_ids"] if well_wavelength_ids != actual_wavelength_ids: raise ValueError( f"ERROR: well {well} in plate {plate} (prefix: " @@ -343,12 +318,8 @@ def cellvoyager_to_ome_zarr_init_multiplex( ) well_rows_columns = generate_row_col_split(wells) - row_list = [ - well_row_column[0] for well_row_column in well_rows_columns - ] - col_list = [ - well_row_column[1] for well_row_column in well_rows_columns - ] + row_list = [well_row_column[0] for well_row_column in well_rows_columns] + col_list = [well_row_column[1] for well_row_column in well_rows_columns] row_list = sorted(list(set(row_list))) col_list = sorted(list(set(col_list))) @@ -371,10 +342,7 @@ def cellvoyager_to_ome_zarr_init_multiplex( for row, column in well_rows_columns: parallelization_list.append( { - "zarr_url": ( - f"{zarr_dir}/{plate}.zarr/{row}/{column}/" - f"{acquisition}/" - ), + "zarr_url": (f"{zarr_dir}/{plate}.zarr/{row}/{column}/" f"{acquisition}/"), "init_args": InitArgsCellVoyager( image_dir=acquisitions[acquisition].image_dir, plate_prefix=plate_prefix, @@ -402,12 +370,8 @@ def cellvoyager_to_ome_zarr_init_multiplex( group_well.attrs["well"] = well_attrs zarrurls["well"].append(f"{plate}.zarr/{row}/{column}") except ContainsGroupError: - group_well = zarr.open_group( - f"{full_zarrurl}/{row}/{column}/", mode="r+" - ) - logging.info( - f"Loaded group_well from {full_zarrurl}/{row}/{column}" - ) + group_well = zarr.open_group(f"{full_zarrurl}/{row}/{column}/", mode="r+") + logging.info(f"Loaded group_well from {full_zarrurl}/{row}/{column}") current_images = group_well.attrs["well"]["images"] + [ {"path": f"{acquisition}", "acquisition": int(acquisition)} ] @@ -419,9 +383,7 @@ def cellvoyager_to_ome_zarr_init_multiplex( Well(**well_attrs) group_well.attrs["well"] = well_attrs - group_image = group_well.create_group( - f"{acquisition}/" - ) # noqa: F841 + group_image = group_well.create_group(f"{acquisition}/") # noqa: F841 logging.info(f"Created image group {row}/{column}/{acquisition}") image = f"{plate}.zarr/{row}/{column}/{acquisition}" zarrurls["image"].append(image) @@ -456,10 +418,8 @@ def cellvoyager_to_ome_zarr_init_multiplex( "scale": [ 1, pixel_size_z, - pixel_size_y - * coarsening_xy**ind_level, - pixel_size_x - * coarsening_xy**ind_level, + pixel_size_y * coarsening_xy**ind_level, + pixel_size_x * coarsening_xy**ind_level, ], } ], @@ -485,9 +445,7 @@ def cellvoyager_to_ome_zarr_init_multiplex( # Prepare AnnData tables for FOV/well ROIs well_id = get_filename_well_id(row, column) FOV_ROIs_table = prepare_FOV_ROI_table(site_metadata.loc[well_id]) - well_ROIs_table = prepare_well_ROI_table( - site_metadata.loc[well_id] - ) + well_ROIs_table = prepare_well_ROI_table(site_metadata.loc[well_id]) # Write AnnData tables into the `tables` zarr group write_table( @@ -508,9 +466,7 @@ def cellvoyager_to_ome_zarr_init_multiplex( # Check that the different images (e.g. different acquisitions) in the each # well have unique labels for well_path in zarrurls["well"]: - check_well_channel_labels( - well_zarr_path=str(Path(zarr_dir) / well_path) - ) + check_well_channel_labels(well_zarr_path=str(Path(zarr_dir) / well_path)) return dict(parallelization_list=parallelization_list) diff --git a/fractal_tasks_core/tasks/copy_ome_zarr_hcs_plate.py b/fractal_tasks_core/tasks/copy_ome_zarr_hcs_plate.py index 6d6be9cc4..09bbeb143 100644 --- a/fractal_tasks_core/tasks/copy_ome_zarr_hcs_plate.py +++ b/fractal_tasks_core/tasks/copy_ome_zarr_hcs_plate.py @@ -16,7 +16,7 @@ from typing import Any import zarr -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments import fractal_tasks_core from fractal_tasks_core.ngff.specs import NgffPlateMeta @@ -153,36 +153,26 @@ def _generate_plate_well_metadata( # Find images of the current well with name matching the current image # TODO: clarify whether this list must always have length 1 curr_well_image_list = [ - img - for img in well_image_attrs[old_plate_url][well_sub_url].images - if img.path == curr_img_sub_url + img for img in well_image_attrs[old_plate_url][well_sub_url].images if img.path == curr_img_sub_url ] - new_well_image_attrs[old_plate_url][ - well_sub_url - ] += curr_well_image_list + new_well_image_attrs[old_plate_url][well_sub_url] += curr_well_image_list # Fill in the plate metadata based on all available wells for old_plate_url in plate_metadata_dicts: - well_list, row_list, column_list = _generate_wells_rows_columns( - plate_wells[old_plate_url] - ) + well_list, row_list, column_list = _generate_wells_rows_columns(plate_wells[old_plate_url]) plate_metadata_dicts[old_plate_url]["plate"]["columns"] = [] for column in column_list: - plate_metadata_dicts[old_plate_url]["plate"]["columns"].append( - {"name": column} - ) + plate_metadata_dicts[old_plate_url]["plate"]["columns"].append({"name": column}) plate_metadata_dicts[old_plate_url]["plate"]["rows"] = [] for row in row_list: - plate_metadata_dicts[old_plate_url]["plate"]["rows"].append( - {"name": row} - ) + plate_metadata_dicts[old_plate_url]["plate"]["rows"].append({"name": row}) plate_metadata_dicts[old_plate_url]["plate"]["wells"] = well_list # Validate with NgffPlateMeta model - plate_metadata_dicts[old_plate_url] = NgffPlateMeta( - **plate_metadata_dicts[old_plate_url] - ).dict(exclude_none=True) + plate_metadata_dicts[old_plate_url] = NgffPlateMeta(**plate_metadata_dicts[old_plate_url]).dict( + exclude_none=True + ) return plate_metadata_dicts, new_well_image_attrs, well_image_attrs @@ -231,8 +221,7 @@ def copy_ome_zarr_hcs_plate( # Preliminary check if suffix is None or suffix == "": raise ValueError( - "Running copy_ome_zarr_hcs_plate without a suffix would lead to" - "overwriting of the existing HCS plates." + "Running copy_ome_zarr_hcs_plate without a suffix would lead to" "overwriting of the existing HCS plates." ) parallelization_list = [] @@ -267,9 +256,7 @@ def copy_ome_zarr_hcs_plate( zarrurl_new = f"{zarr_dir}/{new_plate_name}.zarr" logger.info(f"{old_plate_url=}") logger.info(f"{zarrurl_new=}") - new_plate_group = open_zarr_group_with_overwrite( - zarrurl_new, overwrite=overwrite - ) + new_plate_group = open_zarr_group_with_overwrite(zarrurl_new, overwrite=overwrite) new_plate_group.attrs.put(plate_attrs) # Write well groups: @@ -277,15 +264,8 @@ def copy_ome_zarr_hcs_plate( new_well_group = zarr.group(f"{zarrurl_new}/{well_sub_url}") well_attrs = dict( well=dict( - images=[ - img.dict(exclude_none=True) - for img in new_well_image_attrs[old_plate_url][ - well_sub_url - ] - ], - version=well_image_attrs[old_plate_url][ - well_sub_url - ].version, + images=[img.dict(exclude_none=True) for img in new_well_image_attrs[old_plate_url][well_sub_url]], + version=well_image_attrs[old_plate_url][well_sub_url].version, ) ) new_well_group.attrs.put(well_attrs) diff --git a/fractal_tasks_core/tasks/find_registration_consensus.py b/fractal_tasks_core/tasks/find_registration_consensus.py index d93b0d6b9..543888f68 100644 --- a/fractal_tasks_core/tasks/find_registration_consensus.py +++ b/fractal_tasks_core/tasks/find_registration_consensus.py @@ -17,7 +17,7 @@ import anndata as ad import zarr -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from fractal_tasks_core.roi import ( are_ROI_table_columns_valid, @@ -87,9 +87,7 @@ def find_registration_consensus( roi_tables_attrs = {} for acq_zarr_url in init_args.zarr_url_list: curr_ROI_table = ad.read_zarr(f"{acq_zarr_url}/tables/{roi_table}") - curr_ROI_table_group = zarr.open_group( - f"{acq_zarr_url}/tables/{roi_table}", mode="r" - ) + curr_ROI_table_group = zarr.open_group(f"{acq_zarr_url}/tables/{roi_table}", mode="r") curr_ROI_table_attrs = curr_ROI_table_group.attrs.asdict() # For reference_acquisition, handle the fact that it doesn't @@ -123,26 +121,18 @@ def find_registration_consensus( f"{zarr_url}: {rois}" ) - roi_table_dfs = [ - roi_table.to_df().loc[:, translation_columns] - for roi_table in roi_tables.values() - ] + roi_table_dfs = [roi_table.to_df().loc[:, translation_columns] for roi_table in roi_tables.values()] logger.info("Calculating min & max translation across acquisitions.") max_df, min_df = calculate_min_max_across_dfs(roi_table_dfs) shifted_rois = {} # Loop over acquisitions for acq_zarr_url in init_args.zarr_url_list: - shifted_rois[acq_zarr_url] = apply_registration_to_single_ROI_table( - roi_tables[acq_zarr_url], max_df, min_df - ) + shifted_rois[acq_zarr_url] = apply_registration_to_single_ROI_table(roi_tables[acq_zarr_url], max_df, min_df) # TODO: Drop translation columns from this table? - logger.info( - f"Write the registered ROI table {new_roi_table} for " - "{acq_zarr_url=}" - ) + logger.info(f"Write the registered ROI table {new_roi_table} for " "{acq_zarr_url=}") # Save the shifted ROI table as a new table image_group = zarr.group(acq_zarr_url) write_table( diff --git a/fractal_tasks_core/tasks/illumination_correction.py b/fractal_tasks_core/tasks/illumination_correction.py index 06e056181..dbfe00edc 100644 --- a/fractal_tasks_core/tasks/illumination_correction.py +++ b/fractal_tasks_core/tasks/illumination_correction.py @@ -22,7 +22,7 @@ import dask.array as da import numpy as np import zarr -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from skimage.io import imread from fractal_tasks_core.channels import get_omero_channel_list @@ -59,10 +59,7 @@ def correct( # Check shapes if corr_img.shape != img_stack.shape[2:] or img_stack.shape[0] != 1: - raise ValueError( - "Error in illumination_correction:\n" - f"{img_stack.shape=}\n{corr_img.shape=}" - ) + raise ValueError("Error in illumination_correction:\n" f"{img_stack.shape=}\n{corr_img.shape=}") # Store info about dtype dtype = img_stack.dtype @@ -106,7 +103,6 @@ def illumination_correction( # Advanced parameters suffix: str = "_illum_corr", ) -> dict[str, Any]: - """ Applies illumination correction to the images in the OME-Zarr. @@ -161,14 +157,10 @@ def illumination_correction( full_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=0) logger.info(f"NGFF image has {num_levels=}") logger.info(f"NGFF image has {coarsening_xy=}") - logger.info( - f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}" - ) + logger.info(f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}") # Read channels from .zattrs - channels: list[OmeroChannel] = get_omero_channel_list( - image_zarr_path=zarr_url - ) + channels: list[OmeroChannel] = get_omero_channel_list(image_zarr_path=zarr_url) num_channels = len(channels) # Read FOV ROIs @@ -192,9 +184,7 @@ def illumination_correction( ref_img_size = img_size else: if img_size != ref_img_size: - raise ValueError( - "ERROR: inconsistent image sizes in list_indices" - ) + raise ValueError("ERROR: inconsistent image sizes in list_indices") img_size_y, img_size_x = img_size[:] # Assemble dictionary of matrices and check their shapes @@ -202,16 +192,10 @@ def illumination_correction( for channel in channels: wavelength_id = channel.wavelength_id corrections[wavelength_id] = imread( - ( - Path(illumination_profiles_folder) - / illumination_profiles[wavelength_id] - ).as_posix() + (Path(illumination_profiles_folder) / illumination_profiles[wavelength_id]).as_posix() ) if corrections[wavelength_id].shape != (img_size_y, img_size_x): - raise ValueError( - "Error in illumination_correction, " - "correction matrix has wrong shape." - ) + raise ValueError("Error in illumination_correction, " "correction matrix has wrong shape.") # Lazily load highest-res level from original zarr array data_czyx = da.from_zarr(f"{zarr_url}/0") @@ -245,10 +229,7 @@ def illumination_correction( slice(s_y, e_y), slice(s_x, e_x), ) - logger.info( - f"Now processing ROI {i_ROI+1}/{num_ROIs} " - f"for channel {i_c+1}/{num_channels}" - ) + logger.info(f"Now processing ROI {i_ROI+1}/{num_ROIs} " f"for channel {i_c+1}/{num_channels}") # Execute illumination correction corrected_fov = correct( data_czyx[region].compute(), @@ -278,9 +259,7 @@ def illumination_correction( if overwrite_input: image_list_updates = dict(image_list_updates=[dict(zarr_url=zarr_url)]) else: - image_list_updates = dict( - image_list_updates=[dict(zarr_url=zarr_url_new, origin=zarr_url)] - ) + image_list_updates = dict(image_list_updates=[dict(zarr_url=zarr_url_new, origin=zarr_url)]) return image_list_updates diff --git a/fractal_tasks_core/tasks/image_based_registration_hcs_init.py b/fractal_tasks_core/tasks/image_based_registration_hcs_init.py index f1496e0e1..9f553cb95 100644 --- a/fractal_tasks_core/tasks/image_based_registration_hcs_init.py +++ b/fractal_tasks_core/tasks/image_based_registration_hcs_init.py @@ -15,7 +15,7 @@ import logging from typing import Any -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from fractal_tasks_core.tasks._registration_utils import ( create_well_acquisition_dict, @@ -59,9 +59,7 @@ def image_based_registration_hcs_init( task_output: Dictionary for Fractal server that contains a parallelization list. """ - logger.info( - f"Running `image_based_registration_hcs_init` for {zarr_urls=}" - ) + logger.info(f"Running `image_based_registration_hcs_init` for {zarr_urls=}") image_groups = create_well_acquisition_dict(zarr_urls) # Create the parallelization list diff --git a/fractal_tasks_core/tasks/import_ome_zarr.py b/fractal_tasks_core/tasks/import_ome_zarr.py index 1213147e7..55ef34ce3 100644 --- a/fractal_tasks_core/tasks/import_ome_zarr.py +++ b/fractal_tasks_core/tasks/import_ome_zarr.py @@ -17,7 +17,7 @@ import dask.array as da import zarr -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from fractal_tasks_core.channels import update_omero_channels from fractal_tasks_core.ngff import detect_ome_ngff_type @@ -66,10 +66,7 @@ def _process_single_image( # Preliminary checks if add_grid_ROI_table and (grid_YX_shape is None): - raise ValueError( - f"_process_single_image called with {add_grid_ROI_table=}, " - f"but {grid_YX_shape=}." - ) + raise ValueError(f"_process_single_image called with {add_grid_ROI_table=}, " f"but {grid_YX_shape=}.") pixels_ZYX = image_meta.get_pixel_sizes_zyx(level=0) @@ -121,17 +118,12 @@ def _process_single_image( logger.info(f"Existing axes: {image_meta.axes_names}") logger.info(f"Channel-axis index: {channel_axis_index}") num_channels_zarr = array.shape[channel_axis_index] - logger.info( - f"{num_channels_zarr} channel(s) found in Zarr array " - f"at {image_path}/{dataset_subpath}" - ) + logger.info(f"{num_channels_zarr} channel(s) found in Zarr array " f"at {image_path}/{dataset_subpath}") # Update or create omero channels metadata old_omero = image_group.attrs.get("omero", {}) old_channels = old_omero.get("channels", []) if len(old_channels) > 0: - logger.info( - f"{len(old_channels)} channel(s) found in NGFF omero metadata" - ) + logger.info(f"{len(old_channels)} channel(s) found in NGFF omero metadata") if len(old_channels) != num_channels_zarr: error_msg = ( "Channels-number mismatch: Number of channels in the " diff --git a/fractal_tasks_core/tasks/init_group_by_well_for_multiplexing.py b/fractal_tasks_core/tasks/init_group_by_well_for_multiplexing.py index 75ba7c96b..92e498459 100644 --- a/fractal_tasks_core/tasks/init_group_by_well_for_multiplexing.py +++ b/fractal_tasks_core/tasks/init_group_by_well_for_multiplexing.py @@ -14,7 +14,7 @@ """ import logging -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from fractal_tasks_core.tasks._registration_utils import ( create_well_acquisition_dict, @@ -48,9 +48,7 @@ def init_group_by_well_for_multiplexing( OME-NGFF HCS well metadata acquisition keys to find the reference acquisition. """ - logger.info( - f"Running `init_group_by_well_for_multiplexing` for {zarr_urls=}" - ) + logger.info(f"Running `init_group_by_well_for_multiplexing` for {zarr_urls=}") image_groups = create_well_acquisition_dict(zarr_urls) # Create the parallelization list diff --git a/fractal_tasks_core/tasks/maximum_intensity_projection.py b/fractal_tasks_core/tasks/maximum_intensity_projection.py index 6c3adb924..7454cf63d 100644 --- a/fractal_tasks_core/tasks/maximum_intensity_projection.py +++ b/fractal_tasks_core/tasks/maximum_intensity_projection.py @@ -18,7 +18,7 @@ import anndata as ad import dask.array as da import zarr -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from zarr.errors import ContainsArrayError from fractal_tasks_core.ngff import load_NgffImageMeta @@ -129,15 +129,11 @@ def maximum_intensity_projection( "write it back to the new zarr file." ) new_ROI_table = ad.read_zarr(f"{init_args.origin_url}/tables/{table}") - old_ROI_table_attrs = zarr.open_group( - f"{init_args.origin_url}/tables/{table}" - ).attrs.asdict() + old_ROI_table_attrs = zarr.open_group(f"{init_args.origin_url}/tables/{table}").attrs.asdict() # Convert 3D ROIs to 2D pxl_sizes_zyx = ngff_image.get_pixel_sizes_zyx(level=0) - new_ROI_table = convert_ROIs_from_3D_to_2D( - new_ROI_table, pixel_size_z=pxl_sizes_zyx[0] - ) + new_ROI_table = convert_ROIs_from_3D_to_2D(new_ROI_table, pixel_size_z=pxl_sizes_zyx[0]) # Write new table write_table( new_image_group, @@ -148,17 +144,9 @@ def maximum_intensity_projection( ) for table in non_roi_tables: - logger.info( - f"Reading {table} from " - f"{init_args.origin_url=}, and " - "write it back to the new zarr file." - ) - new_non_ROI_table = ad.read_zarr( - f"{init_args.origin_url}/tables/{table}" - ) - old_non_ROI_table_attrs = zarr.open_group( - f"{init_args.origin_url}/tables/{table}" - ).attrs.asdict() + logger.info(f"Reading {table} from " f"{init_args.origin_url=}, and " "write it back to the new zarr file.") + new_non_ROI_table = ad.read_zarr(f"{init_args.origin_url}/tables/{table}") + old_non_ROI_table_attrs = zarr.open_group(f"{init_args.origin_url}/tables/{table}").attrs.asdict() # Write new table write_table( diff --git a/fractal_tasks_core/tasks/napari_workflows_wrapper.py b/fractal_tasks_core/tasks/napari_workflows_wrapper.py index d0060bcaa..3347cf1c9 100644 --- a/fractal_tasks_core/tasks/napari_workflows_wrapper.py +++ b/fractal_tasks_core/tasks/napari_workflows_wrapper.py @@ -22,7 +22,7 @@ import pandas as pd import zarr from napari_workflows._io_yaml_v1 import load_workflow -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments import fractal_tasks_core from fractal_tasks_core.channels import get_channel_from_image_zarr @@ -141,9 +141,7 @@ def napari_workflows_wrapper( # Characterization of workflow and scope restriction input_types = [in_params.type for (name, in_params) in input_specs.items()] - output_types = [ - out_params.type for (name, out_params) in output_specs.items() - ] + output_types = [out_params.type for (name, out_params) in output_specs.items()] are_inputs_all_images = set(input_types) == {"image"} are_outputs_all_labels = set(output_types) == {"label"} are_outputs_all_dataframes = set(output_types) == {"dataframe"} @@ -151,9 +149,7 @@ def napari_workflows_wrapper( is_measurement_only_workflow = are_outputs_all_dataframes # Level-related constraint logger.info(f"This workflow acts at {level=}") - logger.info( - f"Is the current workflow a labeling one? {is_labeling_workflow}" - ) + logger.info(f"Is the current workflow a labeling one? {is_labeling_workflow}") if level > 0 and not is_labeling_workflow: msg = ( f"{level=}>0 is currently only accepted for labeling workflows, " @@ -163,10 +159,7 @@ def napari_workflows_wrapper( raise OutOfTaskScopeError(msg) # Relabeling-related (soft) constraint if is_measurement_only_workflow and relabeling: - logger.warning( - "This is a measurement-output-only workflow, setting " - "relabeling=False." - ) + logger.warning("This is a measurement-output-only workflow, setting " "relabeling=False.") relabeling = False if relabeling: max_label_for_relabeling = 0 @@ -193,22 +186,15 @@ def napari_workflows_wrapper( ) check_valid_ROI_indices(list_indices, input_ROI_table) num_ROIs = len(list_indices) - logger.info( - f"Completed reading ROI table {input_ROI_table}," - f" found {num_ROIs} ROIs." - ) + logger.info(f"Completed reading ROI table {input_ROI_table}," f" found {num_ROIs} ROIs.") # Input preparation: "image" type - image_inputs = [ - (name, in_params) - for (name, in_params) in input_specs.items() - if in_params.type == "image" - ] + image_inputs = [(name, in_params) for (name, in_params) in input_specs.items() if in_params.type == "image"] input_image_arrays = {} if image_inputs: img_array = da.from_zarr(f"{zarr_url}/{level}") # Loop over image inputs and assign corresponding channel of the image - for (name, params) in image_inputs: + for name, params in image_inputs: channel = get_channel_from_image_zarr( image_zarr_path=zarr_url, wavelength_id=params.channel.wavelength_id, @@ -220,74 +206,49 @@ def napari_workflows_wrapper( # Handle dimensions shape = input_image_arrays[name].shape if expected_dimensions == 3 and shape[0] == 1: - logger.warning( - f"Input {name} has shape {shape} " - f"but {expected_dimensions=}" - ) + logger.warning(f"Input {name} has shape {shape} " f"but {expected_dimensions=}") if expected_dimensions == 2: if len(shape) == 2: # We already load the data as a 2D array pass elif shape[0] == 1: - input_image_arrays[name] = input_image_arrays[name][ - 0, :, : - ] + input_image_arrays[name] = input_image_arrays[name][0, :, :] else: - msg = ( - f"Input {name} has shape {shape} " - f"but {expected_dimensions=}" - ) + msg = f"Input {name} has shape {shape} " f"but {expected_dimensions=}" logger.error(msg) raise ValueError(msg) logger.info(f"Prepared input with {name=} and {params=}") logger.info(f"{input_image_arrays=}") # Input preparation: "label" type - label_inputs = [ - (name, in_params) - for (name, in_params) in input_specs.items() - if in_params.type == "label" - ] + label_inputs = [(name, in_params) for (name, in_params) in input_specs.items() if in_params.type == "label"] if label_inputs: # Set target_shape for upscaling labels if not image_inputs: - logger.warning( - f"{len(label_inputs)=} but num_image_inputs=0. " - "Label array(s) will not be upscaled." - ) + logger.warning(f"{len(label_inputs)=} but num_image_inputs=0. " "Label array(s) will not be upscaled.") upscale_labels = False else: target_shape = list(input_image_arrays.values())[0].shape upscale_labels = True # Loop over label inputs and load corresponding (upscaled) image input_label_arrays = {} - for (name, params) in label_inputs: + for name, params in label_inputs: label_name = params.label_name - label_array_raw = da.from_zarr( - f"{zarr_url}/labels/{label_name}/{level}" - ) + label_array_raw = da.from_zarr(f"{zarr_url}/labels/{label_name}/{level}") input_label_arrays[name] = label_array_raw # Handle dimensions shape = input_label_arrays[name].shape if expected_dimensions == 3 and shape[0] == 1: - logger.warning( - f"Input {name} has shape {shape} " - f"but {expected_dimensions=}" - ) + logger.warning(f"Input {name} has shape {shape} " f"but {expected_dimensions=}") if expected_dimensions == 2: if len(shape) == 2: # We already load the data as a 2D array pass elif shape[0] == 1: - input_label_arrays[name] = input_label_arrays[name][ - 0, :, : - ] + input_label_arrays[name] = input_label_arrays[name][0, :, :] else: - msg = ( - f"Input {name} has shape {shape} " - f"but {expected_dimensions=}" - ) + msg = f"Input {name} has shape {shape} " f"but {expected_dimensions=}" logger.error(msg) raise ValueError(msg) @@ -317,17 +278,12 @@ def napari_workflows_wrapper( logger.info(f"{input_label_arrays=}") # Output preparation: "label" type - label_outputs = [ - (name, out_params) - for (name, out_params) in output_specs.items() - if out_params.type == "label" - ] + label_outputs = [(name, out_params) for (name, out_params) in output_specs.items() if out_params.type == "label"] if label_outputs: # Preliminary scope checks if len(label_outputs) > 1: raise OutOfTaskScopeError( - "Multiple label outputs would break label-inputs-only " - f"workflows (found {len(label_outputs)=})." + "Multiple label outputs would break label-inputs-only " f"workflows (found {len(label_outputs)=})." ) if len(label_outputs) > 1 and relabeling: raise OutOfTaskScopeError( @@ -347,12 +303,8 @@ def napari_workflows_wrapper( reference_array = list(input_label_arrays.values())[0] # Re-load pixel size, matching to the correct level input_label_name = label_inputs[0][1].label_name - ngff_label_image_meta = load_NgffImageMeta( - f"{zarr_url}/labels/{input_label_name}" - ) - full_res_pxl_sizes_zyx = ngff_label_image_meta.get_pixel_sizes_zyx( - level=0 - ) + ngff_label_image_meta = load_NgffImageMeta(f"{zarr_url}/labels/{input_label_name}") + full_res_pxl_sizes_zyx = ngff_label_image_meta.get_pixel_sizes_zyx(level=0) # Create list of indices for 3D FOVs spanning the whole Z direction list_indices = convert_ROI_table_to_indices( ROI_table, @@ -370,10 +322,7 @@ def napari_workflows_wrapper( "are not upscaled." ) else: - msg = ( - "Missing image_inputs and label_inputs, we cannot assign" - " label output properties" - ) + msg = "Missing image_inputs and label_inputs, we cannot assign" " label output properties" raise OutOfTaskScopeError(msg) # Extract label properties from reference_array, and make sure they are @@ -382,10 +331,7 @@ def napari_workflows_wrapper( label_chunksize = reference_array.chunksize if len(label_shape) == 2 and len(label_chunksize) == 2: if expected_dimensions == 3: - raise ValueError( - f"Something wrong: {label_shape=} but " - f"{expected_dimensions=}" - ) + raise ValueError(f"Something wrong: {label_shape=} but " f"{expected_dimensions=}") label_shape = (1, label_shape[0], label_shape[1]) label_chunksize = (1, label_chunksize[0], label_chunksize[1]) logger.info(f"{label_shape=}") @@ -393,7 +339,7 @@ def napari_workflows_wrapper( # Loop over label outputs and (1) set zattrs, (2) create zarr group output_label_zarr_groups: dict[str, Any] = {} - for (name, out_params) in label_outputs: + for name, out_params in label_outputs: # (1a) Rescale OME-NGFF datasets (relevant for level>0) if not ngff_image_meta.multiscale.axes[0].name == "c": @@ -403,9 +349,7 @@ def napari_workflows_wrapper( 'First axis should have name "c".' ) new_datasets = rescale_datasets( - datasets=[ - ds.dict() for ds in ngff_image_meta.multiscale.datasets - ], + datasets=[ds.dict() for ds in ngff_image_meta.multiscale.datasets], coarsening_xy=coarsening_xy, reference_level=level, remove_channel_axis=True, @@ -422,11 +366,7 @@ def napari_workflows_wrapper( { "name": label_name, "version": __OME_NGFF_VERSION__, - "axes": [ - ax.dict() - for ax in ngff_image_meta.multiscale.axes - if ax.type != "channel" - ], + "axes": [ax.dict() for ax in ngff_image_meta.multiscale.axes if ax.type != "channel"], "datasets": new_datasets, } ], @@ -441,10 +381,7 @@ def napari_workflows_wrapper( label_attrs=label_attrs, logger=logger, ) - logger.info( - "Helper function `prepare_label_group` returned " - f"{label_group=}" - ) + logger.info("Helper function `prepare_label_group` returned " f"{label_group=}") # (3) Create zarr group at level=0 store = zarr.storage.FSStore(f"{zarr_url}/labels/{label_name}/0") @@ -462,12 +399,10 @@ def napari_workflows_wrapper( # Output preparation: "dataframe" type dataframe_outputs = [ - (name, out_params) - for (name, out_params) in output_specs.items() - if out_params.type == "dataframe" + (name, out_params) for (name, out_params) in output_specs.items() if out_params.type == "dataframe" ] output_dataframe_lists: dict[str, list] = {} - for (name, out_params) in dataframe_outputs: + for name, out_params in dataframe_outputs: output_dataframe_lists[name] = [] logger.info(f"Prepared output with {name=} and {out_params=}") logger.info(f"{output_dataframe_lists=}") @@ -536,10 +471,7 @@ def napari_workflows_wrapper( # Check dimensions if len(mask.shape) != expected_dimensions: - msg = ( - f"Output {output_name} has shape {mask.shape} " - f"but {expected_dimensions=}" - ) + msg = f"Output {output_name} has shape {mask.shape} " f"but {expected_dimensions=}" logger.error(msg) raise ValueError(msg) elif expected_dimensions == 2: @@ -561,8 +493,7 @@ def napari_workflows_wrapper( if relabeling: mask[mask > 0] += max_label_for_relabeling logger.info( - f'ROI {i_ROI+1}/{num_ROIs}: Relabeling "{name}" label ' - f"output, with {max_label_for_relabeling=}" + f'ROI {i_ROI+1}/{num_ROIs}: Relabeling "{name}" label ' f"output, with {max_label_for_relabeling=}" ) max_label_for_relabeling += num_labels_in_this_ROI logger.info( @@ -581,7 +512,7 @@ def napari_workflows_wrapper( # Output handling: "dataframe" type (for each output, concatenate ROI # dataframes, clean up, and store in a AnnData table on-disk) - for (name, out_params) in dataframe_outputs: + for name, out_params in dataframe_outputs: table_name = out_params.table_name # Concatenate all FOV dataframes list_dfs = output_dataframe_lists[name] @@ -617,7 +548,7 @@ def napari_workflows_wrapper( # Output handling: "label" type (for each output, build and write to disk # pyramid of coarser levels) - for (name, out_params) in label_outputs: + for name, out_params in label_outputs: label_name = out_params.label_name build_pyramid( zarrurl=f"{zarr_url}/labels/{label_name}", diff --git a/tests/dev/test_create_schema_for_single_task.py b/tests/dev/test_create_schema_for_single_task.py index 1a7b17057..afc4c307a 100644 --- a/tests/dev/test_create_schema_for_single_task.py +++ b/tests/dev/test_create_schema_for_single_task.py @@ -2,7 +2,7 @@ import pytest from devtools import debug -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from fractal_tasks_core.dev.lib_args_schemas import ( create_schema_for_single_task, diff --git a/tests/dev/test_enum_arguments.py b/tests/dev/test_enum_arguments.py index 5b1262e97..a003217b1 100644 --- a/tests/dev/test_enum_arguments.py +++ b/tests/dev/test_enum_arguments.py @@ -2,7 +2,7 @@ from enum import Enum from devtools import debug -from pydantic.decorator import validate_arguments +from pydantic.v1.decorator import validate_arguments from fractal_tasks_core.dev.lib_args_schemas import ( create_schema_for_single_task, diff --git a/tests/tasks/test_unit_napari_workflows_wrapper.py b/tests/tasks/test_unit_napari_workflows_wrapper.py index 93e2e8b46..e13d676ab 100644 --- a/tests/tasks/test_unit_napari_workflows_wrapper.py +++ b/tests/tasks/test_unit_napari_workflows_wrapper.py @@ -2,7 +2,7 @@ import pytest from devtools import debug -from pydantic.error_wrappers import ValidationError +from pydantic.v1.error_wrappers import ValidationError from fractal_tasks_core.tasks.napari_workflows_wrapper import ( napari_workflows_wrapper, @@ -16,13 +16,9 @@ def test_input_specs(tmp_path, testdata_path): """ # napari-workflows - workflow_file = str( - testdata_path / "napari_workflows/wf_5-labeling_only.yaml" - ) + workflow_file = str(testdata_path / "napari_workflows/wf_5-labeling_only.yaml") input_specs = {"asd": "asd"} - output_specs = { - "output_label": {"type": "label", "label_name": "label_DAPI"} - } + output_specs = {"output_label": {"type": "label", "label_name": "label_DAPI"}} zarr_url = str(tmp_path / "component") with pytest.raises(ValidationError): napari_workflows_wrapper( @@ -44,9 +40,7 @@ def test_output_specs(tmp_path, testdata_path, caplog): caplog.set_level(logging.WARNING) # napari-workflows - workflow_file = str( - testdata_path / "napari_workflows/wf_5-labeling_only.yaml" - ) + workflow_file = str(testdata_path / "napari_workflows/wf_5-labeling_only.yaml") input_specs = { "input_image": { "type": "image", From 15cd508a8d2f54fced83535b43af11546def13e7 Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Mon, 17 Jun 2024 15:04:25 +0200 Subject: [PATCH 04/15] fix missing pydantic.v1.error in stderr --- tests/tasks/test_valid_task_interface.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/tests/tasks/test_valid_task_interface.py b/tests/tasks/test_valid_task_interface.py index 260b6b726..3dff80c2b 100644 --- a/tests/tasks/test_valid_task_interface.py +++ b/tests/tasks/test_valid_task_interface.py @@ -24,9 +24,9 @@ def validate_command(cmd: str): assert result.returncode == 1 stderr = result.stderr.decode() debug(stderr) - # Valid stderr includes pydantic.error_wrappers.ValidationError (type + # Valid stderr includes pydantic.v1.error_wrappers.ValidationError (type # match between model and function, but tmp_file_args has wrong arguments) - assert "pydantic.error_wrappers.ValidationError" in stderr + assert "pydantic.v1.error_wrappers.ValidationError" in stderr # Valid stderr must include a mention of "unexpected keyword arguments", # because we are including some invalid arguments assert "unexpected keyword arguments" in stderr @@ -53,9 +53,5 @@ def test_task_interface(task, tmp_path): if value is None: continue task_path = (module_dir / value).as_posix() - cmd = ( - f"python {task_path} " - f"--args-json {tmp_file_args} " - f"--out-json {tmp_file_metadiff}" - ) + cmd = f"python {task_path} " f"--args-json {tmp_file_args} " f"--out-json {tmp_file_metadiff}" validate_command(cmd) From 8af43a595769cc36c5f276d70c8c287a6b05ad80 Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Mon, 17 Jun 2024 15:05:04 +0200 Subject: [PATCH 05/15] relax pydantic to ==1.10.16 || >=2.6.3 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 2e4e520b8..7b77d8cde 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ numpy = "<2" pandas = ">=1.2.0,<2" defusedxml = "^0.7.1" lxml = "^4.9.1" -pydantic = "==1.10.16" +pydantic = "==1.10.16 || >=2.6.3" docstring-parser = "^0.15" anndata = ">=0.8.0,<0.11.0" filelock = "3.13.*" From 17b3fff477a9c1adedfdcc29c6b7c2077f730aa3 Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Mon, 17 Jun 2024 15:13:08 +0200 Subject: [PATCH 06/15] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ef820e57d..04683570a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ # 1.0.3 (unreleased) +* Switch to transitional pydantic.v1 imports and relax pydantic requirement to `1.10.16 || >=2.6.3' (\#760). * Support JSON-Schema generation for `Enum` task arguments (\#749). * Make JSON-Schema generation tools more flexible, to simplify testing (\#749). * Update documentation (\#751). From 48b6ad79531422a206fe27f5fa2f63204e18ddf8 Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Mon, 17 Jun 2024 15:33:55 +0200 Subject: [PATCH 07/15] run pre-commit --- fractal_tasks_core/cellvoyager/metadata.py | 66 +++++++--- fractal_tasks_core/channels.py | 81 +++++++++--- fractal_tasks_core/dev/lib_args_schemas.py | 33 +++-- .../dev/lib_signature_constraints.py | 30 ++++- fractal_tasks_core/tables/v1.py | 12 +- .../tasks/apply_registration_to_image.py | 44 +++++-- .../calculate_registration_image_based.py | 44 +++++-- .../tasks/cellpose_segmentation.py | 68 +++++++--- .../tasks/cellvoyager_to_ome_zarr_compute.py | 18 ++- .../tasks/cellvoyager_to_ome_zarr_init.py | 61 ++++++--- .../cellvoyager_to_ome_zarr_init_multiplex.py | 90 +++++++++---- .../tasks/copy_ome_zarr_hcs_plate.py | 44 +++++-- .../tasks/find_registration_consensus.py | 18 ++- .../tasks/illumination_correction.py | 36 ++++-- .../image_based_registration_hcs_init.py | 4 +- fractal_tasks_core/tasks/import_ome_zarr.py | 14 +- .../init_group_by_well_for_multiplexing.py | 4 +- fractal_tasks_core/tasks/io_models.py | 8 +- .../tasks/maximum_intensity_projection.py | 22 +++- .../tasks/napari_workflows_wrapper.py | 121 ++++++++++++++---- .../test_unit_napari_workflows_wrapper.py | 12 +- tests/tasks/test_valid_task_interface.py | 6 +- 22 files changed, 637 insertions(+), 199 deletions(-) diff --git a/fractal_tasks_core/cellvoyager/metadata.py b/fractal_tasks_core/cellvoyager/metadata.py index 9cd9e6593..4d9a008b4 100644 --- a/fractal_tasks_core/cellvoyager/metadata.py +++ b/fractal_tasks_core/cellvoyager/metadata.py @@ -48,13 +48,17 @@ def parse_yokogawa_metadata( mrf_str = Path(mrf_path).as_posix() mlf_str = Path(mlf_path).as_posix() - mrf_frame, mlf_frame, error_count = read_metadata_files(mrf_str, mlf_str, filename_patterns) + mrf_frame, mlf_frame, error_count = read_metadata_files( + mrf_str, mlf_str, filename_patterns + ) # Aggregate information from the mlf file per_site_parameters = ["X", "Y"] grouping_params = ["well_id", "FieldIndex"] - grouped_sites = mlf_frame.loc[:, grouping_params + per_site_parameters].groupby(by=grouping_params) + grouped_sites = mlf_frame.loc[ + :, grouping_params + per_site_parameters + ].groupby(by=grouping_params) check_group_consistency(grouped_sites, message="X & Y stage positions") site_metadata = grouped_sites.mean() @@ -78,7 +82,9 @@ def parse_yokogawa_metadata( "vert_pixels", "bit_depth", ] - check_group_consistency(mrf_frame.loc[:, mrf_columns], message="Image dimensions") + check_group_consistency( + mrf_frame.loc[:, mrf_columns], message="Image dimensions" + ) site_metadata["pixel_size_x"] = mrf_frame.loc[:, "horiz_pixel_dim"].max() site_metadata["pixel_size_y"] = mrf_frame.loc[:, "vert_pixel_dim"].max() site_metadata["x_pixel"] = int(mrf_frame.loc[:, "horiz_pixels"].max()) @@ -96,13 +102,17 @@ def parse_yokogawa_metadata( number_of_files = {} for this_well_id in list_of_wells: num_images = (mlf_frame.well_id == this_well_id).sum() - logger.info(f"Expected number of images for well {this_well_id}: {num_images}") + logger.info( + f"Expected number of images for well {this_well_id}: {num_images}" + ) number_of_files[this_well_id] = num_images # Check that the sum of per-well file numbers correspond to the total # file number if not sum(number_of_files.values()) == len(mlf_frame): raise ValueError( - "Error while counting the number of image files per well.\n" f"{len(mlf_frame)=}\n" f"{number_of_files=}" + "Error while counting the number of image files per well.\n" + f"{len(mlf_frame)=}\n" + f"{number_of_files=}" ) return site_metadata, number_of_files @@ -138,7 +148,9 @@ def read_metadata_files( # processed further. Figure out how to save them as relevant metadata for # use e.g. during illumination correction - mlf_frame, error_count = read_mlf_file(mlf_path, plate_type, filename_patterns) + mlf_frame, error_count = read_mlf_file( + mlf_path, plate_type, filename_patterns + ) # Time points are parsed as part of the mlf_frame, but currently not # processed further. Once we tackle time-resolved data, parse from here. @@ -238,7 +250,9 @@ def _create_well_ids( col_sub = [(x - 1) % 4 + 1 for x in col_series] well_ids = [] for i in range(len(row_base)): - well_ids.append(f"{row_base[i]}{col_base[i]:02}.{row_sub[i]}{col_sub[i]}") + well_ids.append( + f"{row_base[i]}{col_base[i]:02}.{row_sub[i]}{col_sub[i]}" + ) else: row_str = [chr(x) for x in (row_series + 64)] well_ids = [f"{a}{b:02}" for a, b in zip(row_str, col_series)] @@ -270,7 +284,10 @@ def read_mlf_file( mlf_frame_raw = pd.read_xml(mlf_path) # Remove all rows that do not match the given patterns - logger.info(f"Read {mlf_path}, and apply following patterns to " f"image filenames: {filename_patterns}") + logger.info( + f"Read {mlf_path}, and apply following patterns to " + f"image filenames: {filename_patterns}" + ) if filename_patterns: filenames = mlf_frame_raw.MeasurementRecord keep_row = None @@ -278,13 +295,19 @@ def read_mlf_file( actual_pattern = fnmatch.translate(pattern) new_matches = filenames.str.fullmatch(actual_pattern) if new_matches.sum() == 0: - raise ValueError(f"In {mlf_path} there is no image filename " f'matching "{actual_pattern}".') + raise ValueError( + f"In {mlf_path} there is no image filename " + f'matching "{actual_pattern}".' + ) if keep_row is None: keep_row = new_matches.copy() else: keep_row = keep_row & new_matches if keep_row.sum() == 0: - raise ValueError(f"In {mlf_path} there is no image filename " f"matching {filename_patterns}.") + raise ValueError( + f"In {mlf_path} there is no image filename " + f"matching {filename_patterns}." + ) mlf_frame_matching = mlf_frame_raw[keep_row.values].copy() else: mlf_frame_matching = mlf_frame_raw.copy() @@ -355,20 +378,31 @@ def get_z_steps(mlf_frame: pd.DataFrame) -> pd.DataFrame: else: # Group the whole site (combine channels), because Z steps need to be # consistent between channels for OME-Zarr. - z_data = grouped_sites_z.apply(calculate_steps).groupby(["well_id", "FieldIndex"]) + z_data = grouped_sites_z.apply(calculate_steps).groupby( + ["well_id", "FieldIndex"] + ) - check_group_consistency(z_data, message="Comparing Z steps between channels") + check_group_consistency( + z_data, message="Comparing Z steps between channels" + ) # Ensure that channels have the same number of z planes and # reduce it to one value. # Only check if there is more than one channel available - if any(grouped_sites_z.count().groupby(["well_id", "FieldIndex"]).count() > 1): + if any( + grouped_sites_z.count().groupby(["well_id", "FieldIndex"]).count() > 1 + ): check_group_consistency( grouped_sites_z.count().groupby(["well_id", "FieldIndex"]), message="Checking number of Z steps between channels", ) - z_steps = grouped_sites_z.count().groupby(["well_id", "FieldIndex"]).mean().astype(int) + z_steps = ( + grouped_sites_z.count() + .groupby(["well_id", "FieldIndex"]) + .mean() + .astype(int) + ) # Combine the two dataframes z_frame = pd.concat([z_data.mean(), z_steps], axis=1) @@ -388,7 +422,9 @@ def get_earliest_time_per_site(mlf_frame: pd.DataFrame) -> pd.DataFrame: # Because a site will contain time information for each plane # of each channel, we just return the earliest time infromation # per site. - return pd.to_datetime(mlf_frame.groupby(["well_id", "FieldIndex"]).min()["Time"], utc=True) + return pd.to_datetime( + mlf_frame.groupby(["well_id", "FieldIndex"]).min()["Time"], utc=True + ) def check_group_consistency(grouped_df: pd.DataFrame, message: str = ""): diff --git a/fractal_tasks_core/channels.py b/fractal_tasks_core/channels.py index 7b6bb0cbf..bf50e2213 100644 --- a/fractal_tasks_core/channels.py +++ b/fractal_tasks_core/channels.py @@ -25,7 +25,9 @@ if __OME_NGFF_VERSION__ != "0.4": - NotImplementedError(f"OME NGFF {__OME_NGFF_VERSION__} is not supported " "in `channels.py`") + NotImplementedError( + f"OME NGFF {__OME_NGFF_VERSION__} is not supported " "in `channels.py`" + ) class Window(BaseModel): @@ -91,7 +93,10 @@ def valid_hex_color(cls, v, values): allowed_characters = "abcdefABCDEF0123456789" for character in v: if character not in allowed_characters: - raise ValueError("color must only include characters from " f'"{allowed_characters}" (given: "{v}")') + raise ValueError( + "color must only include characters from " + f'"{allowed_characters}" (given: "{v}")' + ) return v @@ -119,10 +124,13 @@ def mutually_exclusive_channel_attributes(cls, v, values): label = v if wavelength_id and v: raise ValueError( - "`wavelength_id` and `label` cannot be both set " f"(given {wavelength_id=} and {label=})." + "`wavelength_id` and `label` cannot be both set " + f"(given {wavelength_id=} and {label=})." ) if wavelength_id is None and v is None: - raise ValueError("`wavelength_id` and `label` cannot be both `None`") + raise ValueError( + "`wavelength_id` and `label` cannot be both `None`" + ) return v @@ -144,7 +152,9 @@ def check_unique_wavelength_ids(channels: list[OmeroChannel]): """ wavelength_ids = [c.wavelength_id for c in channels] if len(set(wavelength_ids)) < len(wavelength_ids): - raise ValueError(f"Non-unique wavelength_id's in {wavelength_ids}\n" f"{channels=}") + raise ValueError( + f"Non-unique wavelength_id's in {wavelength_ids}\n" f"{channels=}" + ) def check_well_channel_labels(*, well_zarr_path: str) -> None: @@ -163,7 +173,9 @@ def check_well_channel_labels(*, well_zarr_path: str) -> None: image_paths = [image["path"] for image in group.attrs["well"]["images"]] list_of_channel_lists = [] for image_path in image_paths: - channels = get_omero_channel_list(image_zarr_path=f"{well_zarr_path}/{image_path}") + channels = get_omero_channel_list( + image_zarr_path=f"{well_zarr_path}/{image_path}" + ) list_of_channel_lists.append(channels[:]) # For each pair of channel-labels lists, verify they do not overlap @@ -179,7 +191,10 @@ def check_well_channel_labels(*, well_zarr_path: str) -> None: "images? This could lead to non-unique channel labels, " "and then could be the reason of the error" ) - raise ValueError("Non-unique channel labels\n" f"{labels_1=}\n{labels_2=}\n{hint}") + raise ValueError( + "Non-unique channel labels\n" + f"{labels_1=}\n{labels_2=}\n{hint}" + ) def get_channel_from_image_zarr( @@ -204,7 +219,9 @@ def get_channel_from_image_zarr( A single channel dictionary. """ omero_channels = get_omero_channel_list(image_zarr_path=image_zarr_path) - channel = get_channel_from_list(channels=omero_channels, label=label, wavelength_id=wavelength_id) + channel = get_channel_from_list( + channels=omero_channels, label=label, wavelength_id=wavelength_id + ) return channel @@ -251,24 +268,36 @@ def get_channel_from_list( if label: if wavelength_id: # Both label and wavelength_id are specified - matching_channels = [c for c in channels if (c.label == label and c.wavelength_id == wavelength_id)] + matching_channels = [ + c + for c in channels + if (c.label == label and c.wavelength_id == wavelength_id) + ] else: # Only label is specified matching_channels = [c for c in channels if c.label == label] else: if wavelength_id: # Only wavelength_id is specified - matching_channels = [c for c in channels if c.wavelength_id == wavelength_id] + matching_channels = [ + c for c in channels if c.wavelength_id == wavelength_id + ] else: # Neither label or wavelength_id are specified - raise ValueError("get_channel requires at least one in {label,wavelength_id} " "arguments") + raise ValueError( + "get_channel requires at least one in {label,wavelength_id} " + "arguments" + ) # Verify that there is one and only one matching channel if len(matching_channels) == 0: required_match = [f"{label=}", f"{wavelength_id=}"] - required_match_string = " and ".join([x for x in required_match if "None" not in x]) + required_match_string = " and ".join( + [x for x in required_match if "None" not in x] + ) raise ChannelNotFoundError( - f"ChannelNotFoundError: No channel found in {channels}" f" for {required_match_string}" + f"ChannelNotFoundError: No channel found in {channels}" + f" for {required_match_string}" ) if len(matching_channels) > 1: raise ValueError(f"Inconsistent set of channels: {channels}") @@ -317,7 +346,9 @@ def define_omero_channels( default_label = wavelength_id if label_prefix: default_label = f"{label_prefix}_{default_label}" - logging.warning(f"Missing label for {channel=}, using {default_label=}") + logging.warning( + f"Missing label for {channel=}, using {default_label=}" + ) channel.label = default_label # If channel.color is None, set it to a default value (use the default @@ -338,7 +369,9 @@ def define_omero_channels( if len(set(labels)) < len(labels): raise ValueError(f"Non-unique labels in {new_channels=}") - new_channels_dictionaries = [c.dict(exclude={"index"}, exclude_unset=True) for c in new_channels] + new_channels_dictionaries = [ + c.dict(exclude={"index"}, exclude_unset=True) for c in new_channels + ] return new_channels_dictionaries @@ -368,7 +401,9 @@ def _get_new_unique_value( return new_value -def update_omero_channels(old_channels: list[dict[str, Any]]) -> list[dict[str, Any]]: +def update_omero_channels( + old_channels: list[dict[str, Any]] +) -> list[dict[str, Any]]: """ Make an existing list of Omero channels Fractal-compatible @@ -455,11 +490,19 @@ def _get_next_color() -> str: label = old_channel.get("label") color = old_channel.get("color") wavelength_id = old_channel.get("wavelength_id") - old_attributes = f"Old attributes: {label=}, {wavelength_id=}, {color=}" + old_attributes = ( + f"Old attributes: {label=}, {wavelength_id=}, {color=}" + ) label = new_channels[ind]["label"] wavelength_id = new_channels[ind]["wavelength_id"] color = new_channels[ind]["color"] - new_attributes = f"New attributes: {label=}, {wavelength_id=}, {color=}" - logging.info("Omero channel update:\n" f" {old_attributes}\n" f" {new_attributes}") + new_attributes = ( + f"New attributes: {label=}, {wavelength_id=}, {color=}" + ) + logging.info( + "Omero channel update:\n" + f" {old_attributes}\n" + f" {new_attributes}" + ) return new_channels diff --git a/fractal_tasks_core/dev/lib_args_schemas.py b/fractal_tasks_core/dev/lib_args_schemas.py index 0ecf88ddb..c84c4ed12 100644 --- a/fractal_tasks_core/dev/lib_args_schemas.py +++ b/fractal_tasks_core/dev/lib_args_schemas.py @@ -22,7 +22,6 @@ from docstring_parser import parse as docparse from pydantic.v1.decorator import ALT_V_ARGS -from pydantic.v1.decorator import ALT_V_ARGS from pydantic.v1.decorator import ALT_V_KWARGS from pydantic.v1.decorator import V_DUPLICATE_KWARGS from pydantic.v1.decorator import V_POSITIONAL_ONLY_NAME @@ -97,9 +96,14 @@ def _remove_args_kwargs_properties(old_schema: _Schema) -> _Schema: expected_args_property = {"title": "Args", "type": "array", "items": {}} expected_kwargs_property = {"title": "Kwargs", "type": "object"} if args_property != expected_args_property: - raise ValueError(f"{args_property=}\ndiffers from\n{expected_args_property=}") + raise ValueError( + f"{args_property=}\ndiffers from\n{expected_args_property=}" + ) if kwargs_property != expected_kwargs_property: - raise ValueError(f"{kwargs_property=}\ndiffers from\n" f"{expected_kwargs_property=}") + raise ValueError( + f"{kwargs_property=}\ndiffers from\n" + f"{expected_kwargs_property=}" + ) logging.info("[_remove_args_kwargs_properties] END") return new_schema @@ -145,7 +149,9 @@ def _remove_attributes_from_descriptions(old_schema: _Schema) -> _Schema: if "definitions" in new_schema: for name, definition in new_schema["definitions"].items(): parsed_docstring = docparse(definition["description"]) - new_schema["definitions"][name]["description"] = parsed_docstring.short_description + new_schema["definitions"][name][ + "description" + ] = parsed_docstring.short_description logging.info("[_remove_attributes_from_descriptions] END") return new_schema @@ -176,18 +182,21 @@ def create_schema_for_single_task( # Usage 1 (standard) if package is None: raise ValueError( - "Cannot call `create_schema_for_single_task with " f"{task_function=} and {package=}. Exit." + "Cannot call `create_schema_for_single_task with " + f"{task_function=} and {package=}. Exit." ) if os.path.isabs(executable): raise ValueError( - "Cannot call `create_schema_for_single_task with " f"{task_function=} and absolute {executable=}. Exit." + "Cannot call `create_schema_for_single_task with " + f"{task_function=} and absolute {executable=}. Exit." ) else: usage = "2" # Usage 2 (testing) if package is not None: raise ValueError( - "Cannot call `create_schema_for_single_task with " f"{task_function=} and non-None {package=}. Exit." + "Cannot call `create_schema_for_single_task with " + f"{task_function=} and non-None {package=}. Exit." ) if not os.path.isabs(executable): raise ValueError( @@ -236,7 +245,9 @@ def create_schema_for_single_task( function_name=function_name, verbose=verbose, ) - schema = _insert_function_args_descriptions(schema=schema, descriptions=function_args_descriptions, verbose=verbose) + schema = _insert_function_args_descriptions( + schema=schema, descriptions=function_args_descriptions, verbose=verbose + ) # Merge lists of fractal-tasks-core and user-provided Pydantic models user_provided_models = custom_pydantic_models or [] @@ -244,7 +255,11 @@ def create_schema_for_single_task( # Check that model names are unique pydantic_models_names = [item[2] for item in pydantic_models] - duplicate_class_names = [name for name, count in Counter(pydantic_models_names).items() if count > 1] + duplicate_class_names = [ + name + for name, count in Counter(pydantic_models_names).items() + if count > 1 + ] if duplicate_class_names: pydantic_models_str = " " + "\n ".join(map(str, pydantic_models)) raise ValueError( diff --git a/fractal_tasks_core/dev/lib_signature_constraints.py b/fractal_tasks_core/dev/lib_signature_constraints.py index 468136bb0..a16efae47 100644 --- a/fractal_tasks_core/dev/lib_signature_constraints.py +++ b/fractal_tasks_core/dev/lib_signature_constraints.py @@ -47,13 +47,23 @@ def _extract_function( """ if not module_relative_path.endswith(".py"): raise ValueError(f"{module_relative_path=} must end with '.py'") - module_relative_path_no_py = str(Path(module_relative_path).with_suffix("")) + module_relative_path_no_py = str( + Path(module_relative_path).with_suffix("") + ) module_relative_path_dots = module_relative_path_no_py.replace("/", ".") if verbose: - logging.info(f"Now calling `import_module` for " f"{package_name}.{module_relative_path_dots}") - imported_module = import_module(f"{package_name}.{module_relative_path_dots}") + logging.info( + f"Now calling `import_module` for " + f"{package_name}.{module_relative_path_dots}" + ) + imported_module = import_module( + f"{package_name}.{module_relative_path_dots}" + ) if verbose: - logging.info(f"Now getting attribute {function_name} from " f"imported module {imported_module}.") + logging.info( + f"Now getting attribute {function_name} from " + f"imported module {imported_module}." + ) task_function = getattr(imported_module, function_name) return task_function @@ -74,7 +84,9 @@ def _validate_function_signature(function: Callable): # CASE 1: Check that name is not forbidden if param.name in FORBIDDEN_PARAM_NAMES: - raise ValueError(f"Function {function} has argument with name {param.name}") + raise ValueError( + f"Function {function} has argument with name {param.name}" + ) # CASE 2: Raise an error for unions if str(param.annotation).startswith(("typing.Union[", "Union[")): @@ -86,8 +98,12 @@ def _validate_function_signature(function: Callable): # CASE 4: Raise an error for optional parameter with given (non-None) # default, e.g. Optional[str] = "asd" - is_annotation_optional = str(param.annotation).startswith(("typing.Optional[", "Optional[")) - default_given = (param.default is not None) and (param.default != inspect._empty) + is_annotation_optional = str(param.annotation).startswith( + ("typing.Optional[", "Optional[") + ) + default_given = (param.default is not None) and ( + param.default != inspect._empty + ) if default_given and is_annotation_optional: raise ValueError("Optional parameter has non-None default value") diff --git a/fractal_tasks_core/tables/v1.py b/fractal_tasks_core/tables/v1.py index 070d2b53f..12aa78132 100644 --- a/fractal_tasks_core/tables/v1.py +++ b/fractal_tasks_core/tables/v1.py @@ -2,7 +2,6 @@ Functions and classes related to table specifications V1 (see https://fractal-analytics-platform.github.io/fractal-tasks-core/tables). """ - import logging import warnings from typing import Any @@ -207,7 +206,8 @@ def _write_table_v1( if table_type is not None: if table_type_from_attrs is not None: logger.warning( - f"Setting table type to '{table_type}' (and overriding " f"'{table_type_from_attrs}' attribute)." + f"Setting table type to '{table_type}' (and overriding " + f"'{table_type_from_attrs}' attribute)." ) table_attrs["type"] = table_type else: @@ -266,7 +266,9 @@ def _write_table_v1( return table_group -def get_tables_list_v1(zarr_url: str, table_type: str = None, strict: bool = False) -> list[str]: +def get_tables_list_v1( + zarr_url: str, table_type: str = None, strict: bool = False +) -> list[str]: """ Find the list of tables in the Zarr file @@ -292,7 +294,9 @@ def get_tables_list_v1(zarr_url: str, table_type: str = None, strict: bool = Fal if not table_type: return all_tables else: - return _filter_tables_by_type_v1(zarr_url, all_tables, table_type, strict) + return _filter_tables_by_type_v1( + zarr_url, all_tables, table_type, strict + ) def _filter_tables_by_type_v1( diff --git a/fractal_tasks_core/tasks/apply_registration_to_image.py b/fractal_tasks_core/tasks/apply_registration_to_image.py index 14bd35223..d7b435ecc 100644 --- a/fractal_tasks_core/tasks/apply_registration_to_image.py +++ b/fractal_tasks_core/tasks/apply_registration_to_image.py @@ -102,10 +102,13 @@ def apply_registration_to_image( acq_dict = load_NgffWellMeta(well_url).get_acquisition_paths() if reference_acquisition not in acq_dict: raise ValueError( - f"{reference_acquisition=} was not one of the available " f"acquisitions in {acq_dict=} for well {well_url}" + f"{reference_acquisition=} was not one of the available " + f"acquisitions in {acq_dict=} for well {well_url}" ) elif len(acq_dict[reference_acquisition]) > 1: - ref_path = _get_matching_ref_acquisition_path_heuristic(acq_dict[reference_acquisition], old_img_path) + ref_path = _get_matching_ref_acquisition_path_heuristic( + acq_dict[reference_acquisition], old_img_path + ) logger.warning( "Running registration when there are multiple images of the same " "acquisition in a well. Using a heuristic to match the reference " @@ -115,7 +118,9 @@ def apply_registration_to_image( ref_path = acq_dict[reference_acquisition][0] reference_zarr_url = f"{well_url}/{ref_path}" - ROI_table_ref = ad.read_zarr(f"{reference_zarr_url}/tables/{registered_roi_table}") + ROI_table_ref = ad.read_zarr( + f"{reference_zarr_url}/tables/{registered_roi_table}" + ) ROI_table_acq = ad.read_zarr(f"{zarr_url}/tables/{registered_roi_table}") ngff_image_meta = load_NgffImageMeta(zarr_url) @@ -204,7 +209,9 @@ def apply_registration_to_image( current_round = 0 while current_round < max_retries: try: - old_table_group = zarr.open_group(table_dict[table], mode="r") + old_table_group = zarr.open_group( + table_dict[table], mode="r" + ) current_round = max_retries except zarr.errors.GroupNotFoundError: logger.debug( @@ -227,7 +234,9 @@ def apply_registration_to_image( # Clean up Zarr file #################### if overwrite_input: - logger.info("Replace original zarr image with the newly created Zarr image") + logger.info( + "Replace original zarr image with the newly created Zarr image" + ) # Potential for race conditions: Every acquisition reads the # reference acquisition, but the reference acquisition also gets # modified @@ -237,7 +246,9 @@ def apply_registration_to_image( shutil.rmtree(f"{zarr_url}_tmp") image_list_updates = dict(image_list_updates=[dict(zarr_url=zarr_url)]) else: - image_list_updates = dict(image_list_updates=[dict(zarr_url=new_zarr_url, origin=zarr_url)]) + image_list_updates = dict( + image_list_updates=[dict(zarr_url=new_zarr_url, origin=zarr_url)] + ) # Update the metadata of the the well well_url, new_img_path = _split_well_path_image_path(new_zarr_url) _update_well_metadata( @@ -325,23 +336,32 @@ def write_registered_zarr( num_channels = data_array.shape[0] # Loop over channels for ind_ch in range(num_channels): - idx = tuple([slice(ind_ch, ind_ch + 1)] + list(reference_region)) - new_array[idx] = load_region(data_zyx=data_array[ind_ch], region=region, compute=False) + idx = tuple( + [slice(ind_ch, ind_ch + 1)] + list(reference_region) + ) + new_array[idx] = load_region( + data_zyx=data_array[ind_ch], region=region, compute=False + ) elif axes_list == ["z", "y", "x"]: - new_array[reference_region] = load_region(data_zyx=data_array, region=region, compute=False) + new_array[reference_region] = load_region( + data_zyx=data_array, region=region, compute=False + ) elif axes_list == ["c", "y", "x"]: # TODO: Implement cyx case (based on looping over xy case) raise NotImplementedError( - "`write_registered_zarr` has not been implemented for " f"a zarr with {axes_list=}" + "`write_registered_zarr` has not been implemented for " + f"a zarr with {axes_list=}" ) elif axes_list == ["y", "x"]: # TODO: Implement yx case raise NotImplementedError( - "`write_registered_zarr` has not been implemented for " f"a zarr with {axes_list=}" + "`write_registered_zarr` has not been implemented for " + f"a zarr with {axes_list=}" ) else: raise NotImplementedError( - "`write_registered_zarr` has not been implemented for " f"a zarr with {axes_list=}" + "`write_registered_zarr` has not been implemented for " + f"a zarr with {axes_list=}" ) new_array.to_zarr( diff --git a/fractal_tasks_core/tasks/calculate_registration_image_based.py b/fractal_tasks_core/tasks/calculate_registration_image_based.py index 88ea75489..50560c120 100644 --- a/fractal_tasks_core/tasks/calculate_registration_image_based.py +++ b/fractal_tasks_core/tasks/calculate_registration_image_based.py @@ -82,7 +82,9 @@ def calculate_registration_image_based( """ logger.info( - f"Running for {zarr_url=}.\n" f"Calculating translation registration per {roi_table=} for " f"{wavelength_id=}." + f"Running for {zarr_url=}.\n" + f"Calculating translation registration per {roi_table=} for " + f"{wavelength_id=}." ) init_args.reference_zarr_url = init_args.reference_zarr_url @@ -106,13 +108,21 @@ def calculate_registration_image_based( channel_index_align = channel_align.index # Lazily load zarr array - data_reference_zyx = da.from_zarr(f"{init_args.reference_zarr_url}/{level}")[channel_index_ref] - data_alignment_zyx = da.from_zarr(f"{zarr_url}/{level}")[channel_index_align] + data_reference_zyx = da.from_zarr( + f"{init_args.reference_zarr_url}/{level}" + )[channel_index_ref] + data_alignment_zyx = da.from_zarr(f"{zarr_url}/{level}")[ + channel_index_align + ] # Read ROIs - ROI_table_ref = ad.read_zarr(f"{init_args.reference_zarr_url}/tables/{roi_table}") + ROI_table_ref = ad.read_zarr( + f"{init_args.reference_zarr_url}/tables/{roi_table}" + ) ROI_table_x = ad.read_zarr(f"{zarr_url}/tables/{roi_table}") - logger.info(f"Found {len(ROI_table_x)} ROIs in {roi_table=} to be processed.") + logger.info( + f"Found {len(ROI_table_x)} ROIs in {roi_table=} to be processed." + ) # Check that table type of ROI_table_ref is valid. Note that # "ngff:region_table" and None are accepted for backwards compatibility @@ -129,7 +139,12 @@ def calculate_registration_image_based( ref_table_attrs = ROI_table_ref_group.attrs.asdict() ref_table_type = ref_table_attrs.get("type") if ref_table_type not in valid_table_types: - raise ValueError((f"Table '{roi_table}' (with type '{ref_table_type}') is " "not a valid ROI table.")) + raise ValueError( + ( + f"Table '{roi_table}' (with type '{ref_table_type}') is " + "not a valid ROI table." + ) + ) # For each acquisition, get the relevant info # TODO: Add additional checks on ROIs? @@ -152,7 +167,10 @@ def calculate_registration_image_based( pxl_sizes_zyx_acq_x = ngff_image_meta_acq_x.get_pixel_sizes_zyx(level=0) if pxl_sizes_zyx != pxl_sizes_zyx_acq_x: - raise ValueError("Pixel sizes need to be equal between acquisitions for " "registration.") + raise ValueError( + "Pixel sizes need to be equal between acquisitions for " + "registration." + ) # Create list of indices for 3D ROIs spanning the entire Z direction list_indices_ref = convert_ROI_table_to_indices( @@ -175,7 +193,10 @@ def calculate_registration_image_based( compute = True new_shifts = {} for i_ROI in range(num_ROIs): - logger.info(f"Now processing ROI {i_ROI+1}/{num_ROIs} " f"for channel {channel_align}.") + logger.info( + f"Now processing ROI {i_ROI+1}/{num_ROIs} " + f"for channel {channel_align}." + ) img_ref = load_region( data_zyx=data_reference_zyx, region=convert_indices_to_regions(list_indices_ref[i_ROI]), @@ -193,9 +214,12 @@ def calculate_registration_image_based( # Basic version (no padding, no internal binning) if img_ref.shape != img_acq_x.shape: raise NotImplementedError( - "This registration is not implemented for ROIs with " "different shapes between acquisitions." + "This registration is not implemented for ROIs with " + "different shapes between acquisitions." ) - shifts = phase_cross_correlation(np.squeeze(img_ref), np.squeeze(img_acq_x))[0] + shifts = phase_cross_correlation( + np.squeeze(img_ref), np.squeeze(img_acq_x) + )[0] # Registration based on scmultiplex, image-based # shifts, _, _ = calculate_shift(np.squeeze(img_ref), diff --git a/fractal_tasks_core/tasks/cellpose_segmentation.py b/fractal_tasks_core/tasks/cellpose_segmentation.py index 9bf542d6d..a323d8262 100644 --- a/fractal_tasks_core/tasks/cellpose_segmentation.py +++ b/fractal_tasks_core/tasks/cellpose_segmentation.py @@ -329,8 +329,13 @@ def cellpose_segmentation( actual_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=level) logger.info(f"NGFF image has {num_levels=}") logger.info(f"NGFF image has {coarsening_xy=}") - logger.info(f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}") - logger.info(f"NGFF image has level-{level} pixel sizes " f"{actual_res_pxl_sizes_zyx}") + logger.info( + f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}" + ) + logger.info( + f"NGFF image has level-{level} pixel sizes " + f"{actual_res_pxl_sizes_zyx}" + ) # Find channel index try: @@ -340,7 +345,10 @@ def cellpose_segmentation( label=channel.label, ) except ChannelNotFoundError as e: - logger.warning("Channel not found, exit from the task.\n" f"Original error: {str(e)}") + logger.warning( + "Channel not found, exit from the task.\n" + f"Original error: {str(e)}" + ) return None ind_channel = tmp_channel.index @@ -381,9 +389,14 @@ def cellpose_segmentation( ROI_table = ad.read_zarr(ROI_table_path) # Perform some checks on the ROI table - valid_ROI_table = is_ROI_table_valid(table_path=ROI_table_path, use_masks=use_masks) + valid_ROI_table = is_ROI_table_valid( + table_path=ROI_table_path, use_masks=use_masks + ) if use_masks and not valid_ROI_table: - logger.info(f"ROI table at {ROI_table_path} cannot be used for masked " "loading. Set use_masks=False.") + logger.info( + f"ROI table at {ROI_table_path} cannot be used for masked " + "loading. Set use_masks=False." + ) use_masks = False logger.info(f"{use_masks=}") @@ -410,7 +423,9 @@ def cellpose_segmentation( if do_3D: if anisotropy is None: # Compute anisotropy as pixel_size_z/pixel_size_x - anisotropy = actual_res_pxl_sizes_zyx[0] / actual_res_pxl_sizes_zyx[2] + anisotropy = ( + actual_res_pxl_sizes_zyx[0] / actual_res_pxl_sizes_zyx[2] + ) logger.info(f"Anisotropy: {anisotropy}") # Rescale datasets (only relevant for level>0) @@ -436,7 +451,11 @@ def cellpose_segmentation( { "name": output_label_name, "version": __OME_NGFF_VERSION__, - "axes": [ax.dict() for ax in ngff_image_meta.multiscale.axes if ax.type != "channel"], + "axes": [ + ax.dict() + for ax in ngff_image_meta.multiscale.axes + if ax.type != "channel" + ], "datasets": new_datasets, } ], @@ -451,7 +470,9 @@ def cellpose_segmentation( logger=logger, ) - logger.info(f"Helper function `prepare_label_group` returned {label_group=}") + logger.info( + f"Helper function `prepare_label_group` returned {label_group=}" + ) logger.info(f"Output label path: {zarr_url}/labels/{output_label_name}/0") store = zarr.storage.FSStore(f"{zarr_url}/labels/{output_label_name}/0") label_dtype = np.uint32 @@ -473,12 +494,17 @@ def cellpose_segmentation( dimension_separator="/", ) - logger.info(f"mask will have shape {data_zyx.shape} " f"and chunks {data_zyx.chunks}") + logger.info( + f"mask will have shape {data_zyx.shape} " + f"and chunks {data_zyx.chunks}" + ) # Initialize cellpose gpu = use_gpu and cellpose.core.use_gpu() if pretrained_model: - model = models.CellposeModel(gpu=gpu, pretrained_model=pretrained_model) + model = models.CellposeModel( + gpu=gpu, pretrained_model=pretrained_model + ) else: model = models.CellposeModel(gpu=gpu, model_type=model_type) @@ -600,7 +626,9 @@ def cellpose_segmentation( # Check that total number of labels is under control if num_labels_tot > np.iinfo(label_dtype).max: raise ValueError( - "ERROR in re-labeling:" f"Reached {num_labels_tot} labels, " f"but dtype={label_dtype}" + "ERROR in re-labeling:" + f"Reached {num_labels_tot} labels, " + f"but dtype={label_dtype}" ) if output_ROI_table: @@ -614,9 +642,13 @@ def cellpose_segmentation( overlap_list = [] for df in bbox_dataframe_list: - overlap_list.extend(get_overlapping_pairs_3D(df, full_res_pxl_sizes_zyx)) + overlap_list.extend( + get_overlapping_pairs_3D(df, full_res_pxl_sizes_zyx) + ) if len(overlap_list) > 0: - logger.warning(f"{len(overlap_list)} bounding-box pairs overlap") + logger.warning( + f"{len(overlap_list)} bounding-box pairs overlap" + ) # Compute and store 0-th level to disk da.array(new_label_img).to_zarr( @@ -625,7 +657,10 @@ def cellpose_segmentation( compute=True, ) - logger.info(f"End cellpose_segmentation task for {zarr_url}, " "now building pyramids.") + logger.info( + f"End cellpose_segmentation task for {zarr_url}, " + "now building pyramids." + ) # Starting from on-disk highest-resolution data, build and write to disk a # pyramid of coarser levels @@ -660,7 +695,10 @@ def cellpose_segmentation( # Write to zarr group image_group = zarr.group(zarr_url) - logger.info("Now writing bounding-box ROI table to " f"{zarr_url}/tables/{output_ROI_table}") + logger.info( + "Now writing bounding-box ROI table to " + f"{zarr_url}/tables/{output_ROI_table}" + ) table_attrs = { "type": "masking_roi_table", "region": {"path": f"../labels/{output_label_name}"}, diff --git a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_compute.py b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_compute.py index f82ad20ce..fc1ae6380 100644 --- a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_compute.py +++ b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_compute.py @@ -85,9 +85,13 @@ def cellvoyager_to_ome_zarr_compute( full_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=0) logger.info(f"NGFF image has {num_levels=}") logger.info(f"NGFF image has {coarsening_xy=}") - logger.info(f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}") + logger.info( + f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}" + ) - channels: list[OmeroChannel] = get_omero_channel_list(image_zarr_path=zarr_url) + channels: list[OmeroChannel] = get_omero_channel_list( + image_zarr_path=zarr_url + ) wavelength_ids = [c.wavelength_id for c in channels] # Read useful information from ROI table @@ -111,7 +115,10 @@ def cellvoyager_to_ome_zarr_compute( max_x = well_indices[0][5] # Load a single image, to retrieve useful information - patterns = [f"{init_args.plate_prefix}_{init_args.well_ID}_*." f"{init_args.image_extension}"] + patterns = [ + f"{init_args.plate_prefix}_{init_args.well_ID}_*." + f"{init_args.image_extension}" + ] if init_args.image_glob_patterns: patterns.extend(init_args.image_glob_patterns) @@ -136,7 +143,10 @@ def cellvoyager_to_ome_zarr_compute( for i_c, wavelength_id in enumerate(wavelength_ids): A, C = wavelength_id.split("_") - patterns = [f"{init_args.plate_prefix}_{init_args.well_ID}_*{A}*{C}*." f"{init_args.image_extension}"] + patterns = [ + f"{init_args.plate_prefix}_{init_args.well_ID}_*{A}*{C}*." + f"{init_args.image_extension}" + ] if init_args.image_glob_patterns: patterns.extend(init_args.image_glob_patterns) filenames_set = glob_with_multiple_patterns( diff --git a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init.py b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init.py index 4e88f70ee..831dde3c2 100644 --- a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init.py +++ b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init.py @@ -160,7 +160,9 @@ def cellvoyager_to_ome_zarr_init( C = filename_metadata["C"] tmp_wavelength_ids.append(f"A{A}_C{C}") except ValueError as e: - logger.warning(f'Skipping "{Path(fn).name}". Original error: ' + str(e)) + logger.warning( + f'Skipping "{Path(fn).name}". Original error: ' + str(e) + ) tmp_plates = sorted(list(set(tmp_plates))) tmp_wavelength_ids = sorted(list(set(tmp_wavelength_ids))) @@ -186,7 +188,9 @@ def cellvoyager_to_ome_zarr_init( while new_plate in plates: new_plate = f"{plate}_{ind}" ind += 1 - logger.info(f"WARNING: {plate} already exists, renaming it as {new_plate}") + logger.info( + f"WARNING: {plate} already exists, renaming it as {new_plate}" + ) plates.append(new_plate) dict_plate_prefixes[new_plate] = dict_plate_prefixes[plate] plate = new_plate @@ -198,13 +202,18 @@ def cellvoyager_to_ome_zarr_init( actual_wavelength_ids = tmp_wavelength_ids[:] else: if actual_wavelength_ids != tmp_wavelength_ids: - raise ValueError(f"ERROR\n{info}\nERROR:" f" expected channels {actual_wavelength_ids}") + raise ValueError( + f"ERROR\n{info}\nERROR:" + f" expected channels {actual_wavelength_ids}" + ) # Update dict_plate_paths dict_plate_paths[plate] = image_dir # Check that all channels are in the allowed_channels - allowed_wavelength_ids = [channel.wavelength_id for channel in allowed_channels] + allowed_wavelength_ids = [ + channel.wavelength_id for channel in allowed_channels + ] if not set(actual_wavelength_ids).issubset(set(allowed_wavelength_ids)): msg = "ERROR in create_ome_zarr\n" msg += f"actual_wavelength_ids: {actual_wavelength_ids}\n" @@ -213,7 +222,11 @@ def cellvoyager_to_ome_zarr_init( # Create actual_channels, i.e. a list of the channel dictionaries which are # present - actual_channels = [channel for channel in allowed_channels if channel.wavelength_id in actual_wavelength_ids] + actual_channels = [ + channel + for channel in allowed_channels + if channel.wavelength_id in actual_wavelength_ids + ] ################################################################ # Create well/image OME-Zarr folders on disk, and prepare output @@ -267,9 +280,13 @@ def cellvoyager_to_ome_zarr_init( patterns = [f"{plate_prefix}_*.{image_extension}"] if image_glob_patterns: patterns.extend(image_glob_patterns) - plate_images = glob_with_multiple_patterns(folder=str(in_path), patterns=patterns) + plate_images = glob_with_multiple_patterns( + folder=str(in_path), patterns=patterns + ) - wells = [parse_filename(os.path.basename(fn))["well"] for fn in plate_images] + wells = [ + parse_filename(os.path.basename(fn))["well"] for fn in plate_images + ] wells = sorted(list(set(wells))) # Verify that all wells have all channels @@ -277,7 +294,9 @@ def cellvoyager_to_ome_zarr_init( patterns = [f"{plate_prefix}_{well}_*.{image_extension}"] if image_glob_patterns: patterns.extend(image_glob_patterns) - well_images = glob_with_multiple_patterns(folder=str(in_path), patterns=patterns) + well_images = glob_with_multiple_patterns( + folder=str(in_path), patterns=patterns + ) # Check number of images matches with expected one if metadata_table_file is None: @@ -297,7 +316,9 @@ def cellvoyager_to_ome_zarr_init( for fpath in well_images: try: filename_metadata = parse_filename(os.path.basename(fpath)) - well_wavelength_ids.append(f"A{filename_metadata['A']}_C{filename_metadata['C']}") + well_wavelength_ids.append( + f"A{filename_metadata['A']}_C{filename_metadata['C']}" + ) except IndexError: logger.info(f"Skipping {fpath}") well_wavelength_ids = sorted(list(set(well_wavelength_ids))) @@ -311,8 +332,12 @@ def cellvoyager_to_ome_zarr_init( well_rows_columns = generate_row_col_split(wells) - row_list = [well_row_column[0] for well_row_column in well_rows_columns] - col_list = [well_row_column[1] for well_row_column in well_rows_columns] + row_list = [ + well_row_column[0] for well_row_column in well_rows_columns + ] + col_list = [ + well_row_column[1] for well_row_column in well_rows_columns + ] row_list = sorted(list(set(row_list))) col_list = sorted(list(set(col_list))) @@ -391,8 +416,10 @@ def cellvoyager_to_ome_zarr_init( "scale": [ 1, pixel_size_z, - pixel_size_y * coarsening_xy**ind_level, - pixel_size_x * coarsening_xy**ind_level, + pixel_size_y + * coarsening_xy**ind_level, + pixel_size_x + * coarsening_xy**ind_level, ], } ], @@ -406,7 +433,9 @@ def cellvoyager_to_ome_zarr_init( "id": 1, # TODO does this depend on the plate number? "name": "TBD", "version": __OME_NGFF_VERSION__, - "channels": define_omero_channels(channels=actual_channels, bit_depth=bit_depth), + "channels": define_omero_channels( + channels=actual_channels, bit_depth=bit_depth + ), } # Validate Image attrs @@ -415,7 +444,9 @@ def cellvoyager_to_ome_zarr_init( # Prepare AnnData tables for FOV/well ROIs well_id = get_filename_well_id(row, column) FOV_ROIs_table = prepare_FOV_ROI_table(site_metadata.loc[well_id]) - well_ROIs_table = prepare_well_ROI_table(site_metadata.loc[well_id]) + well_ROIs_table = prepare_well_ROI_table( + site_metadata.loc[well_id] + ) # Write AnnData tables into the `tables` zarr group write_table( diff --git a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init_multiplex.py b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init_multiplex.py index 8ce0864ac..723e4f9c9 100644 --- a/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init_multiplex.py +++ b/fractal_tasks_core/tasks/cellvoyager_to_ome_zarr_init_multiplex.py @@ -122,13 +122,19 @@ def cellvoyager_to_ome_zarr_init_multiplex( # 3. Files exist. if set(acquisitions.keys()) != set(metadata_table_files.keys()): raise ValueError( - "Mismatch in acquisition keys between " f"{acquisitions.keys()=} and " f"{metadata_table_files.keys()=}" + "Mismatch in acquisition keys between " + f"{acquisitions.keys()=} and " + f"{metadata_table_files.keys()=}" ) for f in metadata_table_files.values(): if not f.endswith(".csv"): - raise ValueError(f"{f} (in metadata_table_file) is not a csv file.") + raise ValueError( + f"{f} (in metadata_table_file) is not a csv file." + ) if not os.path.isfile(f): - raise ValueError(f"{f} (in metadata_table_file) does not exist.") + raise ValueError( + f"{f} (in metadata_table_file) does not exist." + ) # Preliminary checks on acquisitions # Note that in metadata the keys of dictionary arguments should be @@ -166,7 +172,9 @@ def cellvoyager_to_ome_zarr_init_multiplex( C = filename_metadata["C"] actual_wavelength_ids.append(f"A{A}_C{C}") except ValueError as e: - logger.warning(f'Skipping "{Path(fn).name}". Original error: ' + str(e)) + logger.warning( + f'Skipping "{Path(fn).name}". Original error: ' + str(e) + ) plates = sorted(list(set(plates))) actual_wavelength_ids = sorted(list(set(actual_wavelength_ids))) @@ -189,12 +197,17 @@ def cellvoyager_to_ome_zarr_init_multiplex( if int(acquisition) > 0: plate = dict_acquisitions["0"]["plate"] logger.warning( - f"For {acquisition=}, we replace {original_plate=} with " f"{plate=} (the one for acquisition 0)" + f"For {acquisition=}, we replace {original_plate=} with " + f"{plate=} (the one for acquisition 0)" ) # Check that all channels are in the allowed_channels - allowed_wavelength_ids = [c.wavelength_id for c in acq_input.allowed_channels] - if not set(actual_wavelength_ids).issubset(set(allowed_wavelength_ids)): + allowed_wavelength_ids = [ + c.wavelength_id for c in acq_input.allowed_channels + ] + if not set(actual_wavelength_ids).issubset( + set(allowed_wavelength_ids) + ): msg = "ERROR in create_ome_zarr\n" msg += f"actual_wavelength_ids: {actual_wavelength_ids}\n" msg += f"allowed_wavelength_ids: {allowed_wavelength_ids}\n" @@ -203,7 +216,9 @@ def cellvoyager_to_ome_zarr_init_multiplex( # Create actual_channels, i.e. a list of the channel dictionaries which # are present actual_channels = [ - channel for channel in acq_input.allowed_channels if channel.wavelength_id in actual_wavelength_ids + channel + for channel in acq_input.allowed_channels + if channel.wavelength_id in actual_wavelength_ids ] logger.info(f"plate: {plate}") @@ -214,9 +229,13 @@ def cellvoyager_to_ome_zarr_init_multiplex( dict_acquisitions[acquisition]["original_plate"] = original_plate dict_acquisitions[acquisition]["plate_prefix"] = plate_prefix dict_acquisitions[acquisition]["image_folder"] = acq_input.image_dir - dict_acquisitions[acquisition]["original_paths"] = [acq_input.image_dir] + dict_acquisitions[acquisition]["original_paths"] = [ + acq_input.image_dir + ] dict_acquisitions[acquisition]["actual_channels"] = actual_channels - dict_acquisitions[acquisition]["actual_wavelength_ids"] = actual_wavelength_ids + dict_acquisitions[acquisition][ + "actual_wavelength_ids" + ] = actual_wavelength_ids parallelization_list = [] acquisitions_sorted = sorted(list(acquisitions.keys())) @@ -229,7 +248,9 @@ def cellvoyager_to_ome_zarr_init_multiplex( full_zarrurl = str(Path(zarr_dir) / zarrurl) logger.info(f"Creating {full_zarrurl=}") # Call zarr.open_group wrapper, which handles overwrite=True/False - group_plate = open_zarr_group_with_overwrite(full_zarrurl, overwrite=overwrite) + group_plate = open_zarr_group_with_overwrite( + full_zarrurl, overwrite=overwrite + ) group_plate.attrs["plate"] = { "acquisitions": [ { @@ -283,7 +304,9 @@ def cellvoyager_to_ome_zarr_init_multiplex( patterns=patterns, ) - wells = [parse_filename(os.path.basename(fn))["well"] for fn in plate_images] + wells = [ + parse_filename(os.path.basename(fn))["well"] for fn in plate_images + ] wells = sorted(list(set(wells))) logger.info(f"{wells=}") @@ -308,7 +331,9 @@ def cellvoyager_to_ome_zarr_init_multiplex( except IndexError: logger.info(f"Skipping {fpath}") well_wavelength_ids = sorted(list(set(well_wavelength_ids))) - actual_wavelength_ids = dict_acquisitions[acquisition]["actual_wavelength_ids"] + actual_wavelength_ids = dict_acquisitions[acquisition][ + "actual_wavelength_ids" + ] if well_wavelength_ids != actual_wavelength_ids: raise ValueError( f"ERROR: well {well} in plate {plate} (prefix: " @@ -318,8 +343,12 @@ def cellvoyager_to_ome_zarr_init_multiplex( ) well_rows_columns = generate_row_col_split(wells) - row_list = [well_row_column[0] for well_row_column in well_rows_columns] - col_list = [well_row_column[1] for well_row_column in well_rows_columns] + row_list = [ + well_row_column[0] for well_row_column in well_rows_columns + ] + col_list = [ + well_row_column[1] for well_row_column in well_rows_columns + ] row_list = sorted(list(set(row_list))) col_list = sorted(list(set(col_list))) @@ -342,7 +371,10 @@ def cellvoyager_to_ome_zarr_init_multiplex( for row, column in well_rows_columns: parallelization_list.append( { - "zarr_url": (f"{zarr_dir}/{plate}.zarr/{row}/{column}/" f"{acquisition}/"), + "zarr_url": ( + f"{zarr_dir}/{plate}.zarr/{row}/{column}/" + f"{acquisition}/" + ), "init_args": InitArgsCellVoyager( image_dir=acquisitions[acquisition].image_dir, plate_prefix=plate_prefix, @@ -370,8 +402,12 @@ def cellvoyager_to_ome_zarr_init_multiplex( group_well.attrs["well"] = well_attrs zarrurls["well"].append(f"{plate}.zarr/{row}/{column}") except ContainsGroupError: - group_well = zarr.open_group(f"{full_zarrurl}/{row}/{column}/", mode="r+") - logging.info(f"Loaded group_well from {full_zarrurl}/{row}/{column}") + group_well = zarr.open_group( + f"{full_zarrurl}/{row}/{column}/", mode="r+" + ) + logging.info( + f"Loaded group_well from {full_zarrurl}/{row}/{column}" + ) current_images = group_well.attrs["well"]["images"] + [ {"path": f"{acquisition}", "acquisition": int(acquisition)} ] @@ -383,7 +419,9 @@ def cellvoyager_to_ome_zarr_init_multiplex( Well(**well_attrs) group_well.attrs["well"] = well_attrs - group_image = group_well.create_group(f"{acquisition}/") # noqa: F841 + group_image = group_well.create_group( + f"{acquisition}/" + ) # noqa: F841 logging.info(f"Created image group {row}/{column}/{acquisition}") image = f"{plate}.zarr/{row}/{column}/{acquisition}" zarrurls["image"].append(image) @@ -418,8 +456,10 @@ def cellvoyager_to_ome_zarr_init_multiplex( "scale": [ 1, pixel_size_z, - pixel_size_y * coarsening_xy**ind_level, - pixel_size_x * coarsening_xy**ind_level, + pixel_size_y + * coarsening_xy**ind_level, + pixel_size_x + * coarsening_xy**ind_level, ], } ], @@ -445,7 +485,9 @@ def cellvoyager_to_ome_zarr_init_multiplex( # Prepare AnnData tables for FOV/well ROIs well_id = get_filename_well_id(row, column) FOV_ROIs_table = prepare_FOV_ROI_table(site_metadata.loc[well_id]) - well_ROIs_table = prepare_well_ROI_table(site_metadata.loc[well_id]) + well_ROIs_table = prepare_well_ROI_table( + site_metadata.loc[well_id] + ) # Write AnnData tables into the `tables` zarr group write_table( @@ -466,7 +508,9 @@ def cellvoyager_to_ome_zarr_init_multiplex( # Check that the different images (e.g. different acquisitions) in the each # well have unique labels for well_path in zarrurls["well"]: - check_well_channel_labels(well_zarr_path=str(Path(zarr_dir) / well_path)) + check_well_channel_labels( + well_zarr_path=str(Path(zarr_dir) / well_path) + ) return dict(parallelization_list=parallelization_list) diff --git a/fractal_tasks_core/tasks/copy_ome_zarr_hcs_plate.py b/fractal_tasks_core/tasks/copy_ome_zarr_hcs_plate.py index 09bbeb143..9f35db5f2 100644 --- a/fractal_tasks_core/tasks/copy_ome_zarr_hcs_plate.py +++ b/fractal_tasks_core/tasks/copy_ome_zarr_hcs_plate.py @@ -153,26 +153,36 @@ def _generate_plate_well_metadata( # Find images of the current well with name matching the current image # TODO: clarify whether this list must always have length 1 curr_well_image_list = [ - img for img in well_image_attrs[old_plate_url][well_sub_url].images if img.path == curr_img_sub_url + img + for img in well_image_attrs[old_plate_url][well_sub_url].images + if img.path == curr_img_sub_url ] - new_well_image_attrs[old_plate_url][well_sub_url] += curr_well_image_list + new_well_image_attrs[old_plate_url][ + well_sub_url + ] += curr_well_image_list # Fill in the plate metadata based on all available wells for old_plate_url in plate_metadata_dicts: - well_list, row_list, column_list = _generate_wells_rows_columns(plate_wells[old_plate_url]) + well_list, row_list, column_list = _generate_wells_rows_columns( + plate_wells[old_plate_url] + ) plate_metadata_dicts[old_plate_url]["plate"]["columns"] = [] for column in column_list: - plate_metadata_dicts[old_plate_url]["plate"]["columns"].append({"name": column}) + plate_metadata_dicts[old_plate_url]["plate"]["columns"].append( + {"name": column} + ) plate_metadata_dicts[old_plate_url]["plate"]["rows"] = [] for row in row_list: - plate_metadata_dicts[old_plate_url]["plate"]["rows"].append({"name": row}) + plate_metadata_dicts[old_plate_url]["plate"]["rows"].append( + {"name": row} + ) plate_metadata_dicts[old_plate_url]["plate"]["wells"] = well_list # Validate with NgffPlateMeta model - plate_metadata_dicts[old_plate_url] = NgffPlateMeta(**plate_metadata_dicts[old_plate_url]).dict( - exclude_none=True - ) + plate_metadata_dicts[old_plate_url] = NgffPlateMeta( + **plate_metadata_dicts[old_plate_url] + ).dict(exclude_none=True) return plate_metadata_dicts, new_well_image_attrs, well_image_attrs @@ -221,7 +231,8 @@ def copy_ome_zarr_hcs_plate( # Preliminary check if suffix is None or suffix == "": raise ValueError( - "Running copy_ome_zarr_hcs_plate without a suffix would lead to" "overwriting of the existing HCS plates." + "Running copy_ome_zarr_hcs_plate without a suffix would lead to" + "overwriting of the existing HCS plates." ) parallelization_list = [] @@ -256,7 +267,9 @@ def copy_ome_zarr_hcs_plate( zarrurl_new = f"{zarr_dir}/{new_plate_name}.zarr" logger.info(f"{old_plate_url=}") logger.info(f"{zarrurl_new=}") - new_plate_group = open_zarr_group_with_overwrite(zarrurl_new, overwrite=overwrite) + new_plate_group = open_zarr_group_with_overwrite( + zarrurl_new, overwrite=overwrite + ) new_plate_group.attrs.put(plate_attrs) # Write well groups: @@ -264,8 +277,15 @@ def copy_ome_zarr_hcs_plate( new_well_group = zarr.group(f"{zarrurl_new}/{well_sub_url}") well_attrs = dict( well=dict( - images=[img.dict(exclude_none=True) for img in new_well_image_attrs[old_plate_url][well_sub_url]], - version=well_image_attrs[old_plate_url][well_sub_url].version, + images=[ + img.dict(exclude_none=True) + for img in new_well_image_attrs[old_plate_url][ + well_sub_url + ] + ], + version=well_image_attrs[old_plate_url][ + well_sub_url + ].version, ) ) new_well_group.attrs.put(well_attrs) diff --git a/fractal_tasks_core/tasks/find_registration_consensus.py b/fractal_tasks_core/tasks/find_registration_consensus.py index 543888f68..b2825e7be 100644 --- a/fractal_tasks_core/tasks/find_registration_consensus.py +++ b/fractal_tasks_core/tasks/find_registration_consensus.py @@ -87,7 +87,9 @@ def find_registration_consensus( roi_tables_attrs = {} for acq_zarr_url in init_args.zarr_url_list: curr_ROI_table = ad.read_zarr(f"{acq_zarr_url}/tables/{roi_table}") - curr_ROI_table_group = zarr.open_group(f"{acq_zarr_url}/tables/{roi_table}", mode="r") + curr_ROI_table_group = zarr.open_group( + f"{acq_zarr_url}/tables/{roi_table}", mode="r" + ) curr_ROI_table_attrs = curr_ROI_table_group.attrs.asdict() # For reference_acquisition, handle the fact that it doesn't @@ -121,18 +123,26 @@ def find_registration_consensus( f"{zarr_url}: {rois}" ) - roi_table_dfs = [roi_table.to_df().loc[:, translation_columns] for roi_table in roi_tables.values()] + roi_table_dfs = [ + roi_table.to_df().loc[:, translation_columns] + for roi_table in roi_tables.values() + ] logger.info("Calculating min & max translation across acquisitions.") max_df, min_df = calculate_min_max_across_dfs(roi_table_dfs) shifted_rois = {} # Loop over acquisitions for acq_zarr_url in init_args.zarr_url_list: - shifted_rois[acq_zarr_url] = apply_registration_to_single_ROI_table(roi_tables[acq_zarr_url], max_df, min_df) + shifted_rois[acq_zarr_url] = apply_registration_to_single_ROI_table( + roi_tables[acq_zarr_url], max_df, min_df + ) # TODO: Drop translation columns from this table? - logger.info(f"Write the registered ROI table {new_roi_table} for " "{acq_zarr_url=}") + logger.info( + f"Write the registered ROI table {new_roi_table} for " + "{acq_zarr_url=}" + ) # Save the shifted ROI table as a new table image_group = zarr.group(acq_zarr_url) write_table( diff --git a/fractal_tasks_core/tasks/illumination_correction.py b/fractal_tasks_core/tasks/illumination_correction.py index dbfe00edc..646b47570 100644 --- a/fractal_tasks_core/tasks/illumination_correction.py +++ b/fractal_tasks_core/tasks/illumination_correction.py @@ -59,7 +59,10 @@ def correct( # Check shapes if corr_img.shape != img_stack.shape[2:] or img_stack.shape[0] != 1: - raise ValueError("Error in illumination_correction:\n" f"{img_stack.shape=}\n{corr_img.shape=}") + raise ValueError( + "Error in illumination_correction:\n" + f"{img_stack.shape=}\n{corr_img.shape=}" + ) # Store info about dtype dtype = img_stack.dtype @@ -157,10 +160,14 @@ def illumination_correction( full_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=0) logger.info(f"NGFF image has {num_levels=}") logger.info(f"NGFF image has {coarsening_xy=}") - logger.info(f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}") + logger.info( + f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}" + ) # Read channels from .zattrs - channels: list[OmeroChannel] = get_omero_channel_list(image_zarr_path=zarr_url) + channels: list[OmeroChannel] = get_omero_channel_list( + image_zarr_path=zarr_url + ) num_channels = len(channels) # Read FOV ROIs @@ -184,7 +191,9 @@ def illumination_correction( ref_img_size = img_size else: if img_size != ref_img_size: - raise ValueError("ERROR: inconsistent image sizes in list_indices") + raise ValueError( + "ERROR: inconsistent image sizes in list_indices" + ) img_size_y, img_size_x = img_size[:] # Assemble dictionary of matrices and check their shapes @@ -192,10 +201,16 @@ def illumination_correction( for channel in channels: wavelength_id = channel.wavelength_id corrections[wavelength_id] = imread( - (Path(illumination_profiles_folder) / illumination_profiles[wavelength_id]).as_posix() + ( + Path(illumination_profiles_folder) + / illumination_profiles[wavelength_id] + ).as_posix() ) if corrections[wavelength_id].shape != (img_size_y, img_size_x): - raise ValueError("Error in illumination_correction, " "correction matrix has wrong shape.") + raise ValueError( + "Error in illumination_correction, " + "correction matrix has wrong shape." + ) # Lazily load highest-res level from original zarr array data_czyx = da.from_zarr(f"{zarr_url}/0") @@ -229,7 +244,10 @@ def illumination_correction( slice(s_y, e_y), slice(s_x, e_x), ) - logger.info(f"Now processing ROI {i_ROI+1}/{num_ROIs} " f"for channel {i_c+1}/{num_channels}") + logger.info( + f"Now processing ROI {i_ROI+1}/{num_ROIs} " + f"for channel {i_c+1}/{num_channels}" + ) # Execute illumination correction corrected_fov = correct( data_czyx[region].compute(), @@ -259,7 +277,9 @@ def illumination_correction( if overwrite_input: image_list_updates = dict(image_list_updates=[dict(zarr_url=zarr_url)]) else: - image_list_updates = dict(image_list_updates=[dict(zarr_url=zarr_url_new, origin=zarr_url)]) + image_list_updates = dict( + image_list_updates=[dict(zarr_url=zarr_url_new, origin=zarr_url)] + ) return image_list_updates diff --git a/fractal_tasks_core/tasks/image_based_registration_hcs_init.py b/fractal_tasks_core/tasks/image_based_registration_hcs_init.py index 9f553cb95..47989280d 100644 --- a/fractal_tasks_core/tasks/image_based_registration_hcs_init.py +++ b/fractal_tasks_core/tasks/image_based_registration_hcs_init.py @@ -59,7 +59,9 @@ def image_based_registration_hcs_init( task_output: Dictionary for Fractal server that contains a parallelization list. """ - logger.info(f"Running `image_based_registration_hcs_init` for {zarr_urls=}") + logger.info( + f"Running `image_based_registration_hcs_init` for {zarr_urls=}" + ) image_groups = create_well_acquisition_dict(zarr_urls) # Create the parallelization list diff --git a/fractal_tasks_core/tasks/import_ome_zarr.py b/fractal_tasks_core/tasks/import_ome_zarr.py index 55ef34ce3..c720654e3 100644 --- a/fractal_tasks_core/tasks/import_ome_zarr.py +++ b/fractal_tasks_core/tasks/import_ome_zarr.py @@ -66,7 +66,10 @@ def _process_single_image( # Preliminary checks if add_grid_ROI_table and (grid_YX_shape is None): - raise ValueError(f"_process_single_image called with {add_grid_ROI_table=}, " f"but {grid_YX_shape=}.") + raise ValueError( + f"_process_single_image called with {add_grid_ROI_table=}, " + f"but {grid_YX_shape=}." + ) pixels_ZYX = image_meta.get_pixel_sizes_zyx(level=0) @@ -118,12 +121,17 @@ def _process_single_image( logger.info(f"Existing axes: {image_meta.axes_names}") logger.info(f"Channel-axis index: {channel_axis_index}") num_channels_zarr = array.shape[channel_axis_index] - logger.info(f"{num_channels_zarr} channel(s) found in Zarr array " f"at {image_path}/{dataset_subpath}") + logger.info( + f"{num_channels_zarr} channel(s) found in Zarr array " + f"at {image_path}/{dataset_subpath}" + ) # Update or create omero channels metadata old_omero = image_group.attrs.get("omero", {}) old_channels = old_omero.get("channels", []) if len(old_channels) > 0: - logger.info(f"{len(old_channels)} channel(s) found in NGFF omero metadata") + logger.info( + f"{len(old_channels)} channel(s) found in NGFF omero metadata" + ) if len(old_channels) != num_channels_zarr: error_msg = ( "Channels-number mismatch: Number of channels in the " diff --git a/fractal_tasks_core/tasks/init_group_by_well_for_multiplexing.py b/fractal_tasks_core/tasks/init_group_by_well_for_multiplexing.py index 92e498459..6f6bff63a 100644 --- a/fractal_tasks_core/tasks/init_group_by_well_for_multiplexing.py +++ b/fractal_tasks_core/tasks/init_group_by_well_for_multiplexing.py @@ -48,7 +48,9 @@ def init_group_by_well_for_multiplexing( OME-NGFF HCS well metadata acquisition keys to find the reference acquisition. """ - logger.info(f"Running `init_group_by_well_for_multiplexing` for {zarr_urls=}") + logger.info( + f"Running `init_group_by_well_for_multiplexing` for {zarr_urls=}" + ) image_groups = create_well_acquisition_dict(zarr_urls) # Create the parallelization list diff --git a/fractal_tasks_core/tasks/io_models.py b/fractal_tasks_core/tasks/io_models.py index db2bb7f81..93e4425c4 100644 --- a/fractal_tasks_core/tasks/io_models.py +++ b/fractal_tasks_core/tasks/io_models.py @@ -126,7 +126,9 @@ def table_name_only_for_dataframe_type(cls, v, values): """ _type = values.get("type") if (_type == "dataframe" and (not v)) or (_type != "dataframe" and v): - raise ValueError(f"Output item has type={_type} but table_name={v}.") + raise ValueError( + f"Output item has type={_type} but table_name={v}." + ) return v @@ -151,7 +153,9 @@ def label_name_is_present(cls, v, values): """ _type = values.get("type") if _type == "label" and not v: - raise ValueError(f"Input item has type={_type} but label_name={v}.") + raise ValueError( + f"Input item has type={_type} but label_name={v}." + ) return v @validator("channel", always=True) diff --git a/fractal_tasks_core/tasks/maximum_intensity_projection.py b/fractal_tasks_core/tasks/maximum_intensity_projection.py index 7454cf63d..bec0f72cf 100644 --- a/fractal_tasks_core/tasks/maximum_intensity_projection.py +++ b/fractal_tasks_core/tasks/maximum_intensity_projection.py @@ -129,11 +129,15 @@ def maximum_intensity_projection( "write it back to the new zarr file." ) new_ROI_table = ad.read_zarr(f"{init_args.origin_url}/tables/{table}") - old_ROI_table_attrs = zarr.open_group(f"{init_args.origin_url}/tables/{table}").attrs.asdict() + old_ROI_table_attrs = zarr.open_group( + f"{init_args.origin_url}/tables/{table}" + ).attrs.asdict() # Convert 3D ROIs to 2D pxl_sizes_zyx = ngff_image.get_pixel_sizes_zyx(level=0) - new_ROI_table = convert_ROIs_from_3D_to_2D(new_ROI_table, pixel_size_z=pxl_sizes_zyx[0]) + new_ROI_table = convert_ROIs_from_3D_to_2D( + new_ROI_table, pixel_size_z=pxl_sizes_zyx[0] + ) # Write new table write_table( new_image_group, @@ -144,9 +148,17 @@ def maximum_intensity_projection( ) for table in non_roi_tables: - logger.info(f"Reading {table} from " f"{init_args.origin_url=}, and " "write it back to the new zarr file.") - new_non_ROI_table = ad.read_zarr(f"{init_args.origin_url}/tables/{table}") - old_non_ROI_table_attrs = zarr.open_group(f"{init_args.origin_url}/tables/{table}").attrs.asdict() + logger.info( + f"Reading {table} from " + f"{init_args.origin_url=}, and " + "write it back to the new zarr file." + ) + new_non_ROI_table = ad.read_zarr( + f"{init_args.origin_url}/tables/{table}" + ) + old_non_ROI_table_attrs = zarr.open_group( + f"{init_args.origin_url}/tables/{table}" + ).attrs.asdict() # Write new table write_table( diff --git a/fractal_tasks_core/tasks/napari_workflows_wrapper.py b/fractal_tasks_core/tasks/napari_workflows_wrapper.py index 3347cf1c9..c37b1a7a4 100644 --- a/fractal_tasks_core/tasks/napari_workflows_wrapper.py +++ b/fractal_tasks_core/tasks/napari_workflows_wrapper.py @@ -141,7 +141,9 @@ def napari_workflows_wrapper( # Characterization of workflow and scope restriction input_types = [in_params.type for (name, in_params) in input_specs.items()] - output_types = [out_params.type for (name, out_params) in output_specs.items()] + output_types = [ + out_params.type for (name, out_params) in output_specs.items() + ] are_inputs_all_images = set(input_types) == {"image"} are_outputs_all_labels = set(output_types) == {"label"} are_outputs_all_dataframes = set(output_types) == {"dataframe"} @@ -149,7 +151,9 @@ def napari_workflows_wrapper( is_measurement_only_workflow = are_outputs_all_dataframes # Level-related constraint logger.info(f"This workflow acts at {level=}") - logger.info(f"Is the current workflow a labeling one? {is_labeling_workflow}") + logger.info( + f"Is the current workflow a labeling one? {is_labeling_workflow}" + ) if level > 0 and not is_labeling_workflow: msg = ( f"{level=}>0 is currently only accepted for labeling workflows, " @@ -159,7 +163,10 @@ def napari_workflows_wrapper( raise OutOfTaskScopeError(msg) # Relabeling-related (soft) constraint if is_measurement_only_workflow and relabeling: - logger.warning("This is a measurement-output-only workflow, setting " "relabeling=False.") + logger.warning( + "This is a measurement-output-only workflow, setting " + "relabeling=False." + ) relabeling = False if relabeling: max_label_for_relabeling = 0 @@ -186,10 +193,17 @@ def napari_workflows_wrapper( ) check_valid_ROI_indices(list_indices, input_ROI_table) num_ROIs = len(list_indices) - logger.info(f"Completed reading ROI table {input_ROI_table}," f" found {num_ROIs} ROIs.") + logger.info( + f"Completed reading ROI table {input_ROI_table}," + f" found {num_ROIs} ROIs." + ) # Input preparation: "image" type - image_inputs = [(name, in_params) for (name, in_params) in input_specs.items() if in_params.type == "image"] + image_inputs = [ + (name, in_params) + for (name, in_params) in input_specs.items() + if in_params.type == "image" + ] input_image_arrays = {} if image_inputs: img_array = da.from_zarr(f"{zarr_url}/{level}") @@ -206,26 +220,41 @@ def napari_workflows_wrapper( # Handle dimensions shape = input_image_arrays[name].shape if expected_dimensions == 3 and shape[0] == 1: - logger.warning(f"Input {name} has shape {shape} " f"but {expected_dimensions=}") + logger.warning( + f"Input {name} has shape {shape} " + f"but {expected_dimensions=}" + ) if expected_dimensions == 2: if len(shape) == 2: # We already load the data as a 2D array pass elif shape[0] == 1: - input_image_arrays[name] = input_image_arrays[name][0, :, :] + input_image_arrays[name] = input_image_arrays[name][ + 0, :, : + ] else: - msg = f"Input {name} has shape {shape} " f"but {expected_dimensions=}" + msg = ( + f"Input {name} has shape {shape} " + f"but {expected_dimensions=}" + ) logger.error(msg) raise ValueError(msg) logger.info(f"Prepared input with {name=} and {params=}") logger.info(f"{input_image_arrays=}") # Input preparation: "label" type - label_inputs = [(name, in_params) for (name, in_params) in input_specs.items() if in_params.type == "label"] + label_inputs = [ + (name, in_params) + for (name, in_params) in input_specs.items() + if in_params.type == "label" + ] if label_inputs: # Set target_shape for upscaling labels if not image_inputs: - logger.warning(f"{len(label_inputs)=} but num_image_inputs=0. " "Label array(s) will not be upscaled.") + logger.warning( + f"{len(label_inputs)=} but num_image_inputs=0. " + "Label array(s) will not be upscaled." + ) upscale_labels = False else: target_shape = list(input_image_arrays.values())[0].shape @@ -234,21 +263,31 @@ def napari_workflows_wrapper( input_label_arrays = {} for name, params in label_inputs: label_name = params.label_name - label_array_raw = da.from_zarr(f"{zarr_url}/labels/{label_name}/{level}") + label_array_raw = da.from_zarr( + f"{zarr_url}/labels/{label_name}/{level}" + ) input_label_arrays[name] = label_array_raw # Handle dimensions shape = input_label_arrays[name].shape if expected_dimensions == 3 and shape[0] == 1: - logger.warning(f"Input {name} has shape {shape} " f"but {expected_dimensions=}") + logger.warning( + f"Input {name} has shape {shape} " + f"but {expected_dimensions=}" + ) if expected_dimensions == 2: if len(shape) == 2: # We already load the data as a 2D array pass elif shape[0] == 1: - input_label_arrays[name] = input_label_arrays[name][0, :, :] + input_label_arrays[name] = input_label_arrays[name][ + 0, :, : + ] else: - msg = f"Input {name} has shape {shape} " f"but {expected_dimensions=}" + msg = ( + f"Input {name} has shape {shape} " + f"but {expected_dimensions=}" + ) logger.error(msg) raise ValueError(msg) @@ -278,12 +317,17 @@ def napari_workflows_wrapper( logger.info(f"{input_label_arrays=}") # Output preparation: "label" type - label_outputs = [(name, out_params) for (name, out_params) in output_specs.items() if out_params.type == "label"] + label_outputs = [ + (name, out_params) + for (name, out_params) in output_specs.items() + if out_params.type == "label" + ] if label_outputs: # Preliminary scope checks if len(label_outputs) > 1: raise OutOfTaskScopeError( - "Multiple label outputs would break label-inputs-only " f"workflows (found {len(label_outputs)=})." + "Multiple label outputs would break label-inputs-only " + f"workflows (found {len(label_outputs)=})." ) if len(label_outputs) > 1 and relabeling: raise OutOfTaskScopeError( @@ -303,8 +347,12 @@ def napari_workflows_wrapper( reference_array = list(input_label_arrays.values())[0] # Re-load pixel size, matching to the correct level input_label_name = label_inputs[0][1].label_name - ngff_label_image_meta = load_NgffImageMeta(f"{zarr_url}/labels/{input_label_name}") - full_res_pxl_sizes_zyx = ngff_label_image_meta.get_pixel_sizes_zyx(level=0) + ngff_label_image_meta = load_NgffImageMeta( + f"{zarr_url}/labels/{input_label_name}" + ) + full_res_pxl_sizes_zyx = ngff_label_image_meta.get_pixel_sizes_zyx( + level=0 + ) # Create list of indices for 3D FOVs spanning the whole Z direction list_indices = convert_ROI_table_to_indices( ROI_table, @@ -322,7 +370,10 @@ def napari_workflows_wrapper( "are not upscaled." ) else: - msg = "Missing image_inputs and label_inputs, we cannot assign" " label output properties" + msg = ( + "Missing image_inputs and label_inputs, we cannot assign" + " label output properties" + ) raise OutOfTaskScopeError(msg) # Extract label properties from reference_array, and make sure they are @@ -331,7 +382,10 @@ def napari_workflows_wrapper( label_chunksize = reference_array.chunksize if len(label_shape) == 2 and len(label_chunksize) == 2: if expected_dimensions == 3: - raise ValueError(f"Something wrong: {label_shape=} but " f"{expected_dimensions=}") + raise ValueError( + f"Something wrong: {label_shape=} but " + f"{expected_dimensions=}" + ) label_shape = (1, label_shape[0], label_shape[1]) label_chunksize = (1, label_chunksize[0], label_chunksize[1]) logger.info(f"{label_shape=}") @@ -349,7 +403,9 @@ def napari_workflows_wrapper( 'First axis should have name "c".' ) new_datasets = rescale_datasets( - datasets=[ds.dict() for ds in ngff_image_meta.multiscale.datasets], + datasets=[ + ds.dict() for ds in ngff_image_meta.multiscale.datasets + ], coarsening_xy=coarsening_xy, reference_level=level, remove_channel_axis=True, @@ -366,7 +422,11 @@ def napari_workflows_wrapper( { "name": label_name, "version": __OME_NGFF_VERSION__, - "axes": [ax.dict() for ax in ngff_image_meta.multiscale.axes if ax.type != "channel"], + "axes": [ + ax.dict() + for ax in ngff_image_meta.multiscale.axes + if ax.type != "channel" + ], "datasets": new_datasets, } ], @@ -381,7 +441,10 @@ def napari_workflows_wrapper( label_attrs=label_attrs, logger=logger, ) - logger.info("Helper function `prepare_label_group` returned " f"{label_group=}") + logger.info( + "Helper function `prepare_label_group` returned " + f"{label_group=}" + ) # (3) Create zarr group at level=0 store = zarr.storage.FSStore(f"{zarr_url}/labels/{label_name}/0") @@ -399,7 +462,9 @@ def napari_workflows_wrapper( # Output preparation: "dataframe" type dataframe_outputs = [ - (name, out_params) for (name, out_params) in output_specs.items() if out_params.type == "dataframe" + (name, out_params) + for (name, out_params) in output_specs.items() + if out_params.type == "dataframe" ] output_dataframe_lists: dict[str, list] = {} for name, out_params in dataframe_outputs: @@ -471,7 +536,10 @@ def napari_workflows_wrapper( # Check dimensions if len(mask.shape) != expected_dimensions: - msg = f"Output {output_name} has shape {mask.shape} " f"but {expected_dimensions=}" + msg = ( + f"Output {output_name} has shape {mask.shape} " + f"but {expected_dimensions=}" + ) logger.error(msg) raise ValueError(msg) elif expected_dimensions == 2: @@ -493,7 +561,8 @@ def napari_workflows_wrapper( if relabeling: mask[mask > 0] += max_label_for_relabeling logger.info( - f'ROI {i_ROI+1}/{num_ROIs}: Relabeling "{name}" label ' f"output, with {max_label_for_relabeling=}" + f'ROI {i_ROI+1}/{num_ROIs}: Relabeling "{name}" label ' + f"output, with {max_label_for_relabeling=}" ) max_label_for_relabeling += num_labels_in_this_ROI logger.info( diff --git a/tests/tasks/test_unit_napari_workflows_wrapper.py b/tests/tasks/test_unit_napari_workflows_wrapper.py index e13d676ab..616ba9a1d 100644 --- a/tests/tasks/test_unit_napari_workflows_wrapper.py +++ b/tests/tasks/test_unit_napari_workflows_wrapper.py @@ -16,9 +16,13 @@ def test_input_specs(tmp_path, testdata_path): """ # napari-workflows - workflow_file = str(testdata_path / "napari_workflows/wf_5-labeling_only.yaml") + workflow_file = str( + testdata_path / "napari_workflows/wf_5-labeling_only.yaml" + ) input_specs = {"asd": "asd"} - output_specs = {"output_label": {"type": "label", "label_name": "label_DAPI"}} + output_specs = { + "output_label": {"type": "label", "label_name": "label_DAPI"} + } zarr_url = str(tmp_path / "component") with pytest.raises(ValidationError): napari_workflows_wrapper( @@ -40,7 +44,9 @@ def test_output_specs(tmp_path, testdata_path, caplog): caplog.set_level(logging.WARNING) # napari-workflows - workflow_file = str(testdata_path / "napari_workflows/wf_5-labeling_only.yaml") + workflow_file = str( + testdata_path / "napari_workflows/wf_5-labeling_only.yaml" + ) input_specs = { "input_image": { "type": "image", diff --git a/tests/tasks/test_valid_task_interface.py b/tests/tasks/test_valid_task_interface.py index 3dff80c2b..3a75f2e23 100644 --- a/tests/tasks/test_valid_task_interface.py +++ b/tests/tasks/test_valid_task_interface.py @@ -53,5 +53,9 @@ def test_task_interface(task, tmp_path): if value is None: continue task_path = (module_dir / value).as_posix() - cmd = f"python {task_path} " f"--args-json {tmp_file_args} " f"--out-json {tmp_file_metadiff}" + cmd = ( + f"python {task_path} " + f"--args-json {tmp_file_args} " + f"--out-json {tmp_file_metadiff}" + ) validate_command(cmd) From 69d7107200a931a0f8665d9fb119ef29541b0efb Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Tue, 18 Jun 2024 13:13:54 +0200 Subject: [PATCH 08/15] set pydantic version to >=1.10.16 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7b77d8cde..20b33ce42 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ numpy = "<2" pandas = ">=1.2.0,<2" defusedxml = "^0.7.1" lxml = "^4.9.1" -pydantic = "==1.10.16 || >=2.6.3" +pydantic = ">=1.10.16" docstring-parser = "^0.15" anndata = ">=0.8.0,<0.11.0" filelock = "3.13.*" From 2b2a07390437a3dc6e6e49714b401d150d299d22 Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Tue, 18 Jun 2024 13:15:15 +0200 Subject: [PATCH 09/15] add runtime version check for pydantic --- fractal_tasks_core/__init__.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/fractal_tasks_core/__init__.py b/fractal_tasks_core/__init__.py index 4b38628ea..b45e18c80 100644 --- a/fractal_tasks_core/__init__.py +++ b/fractal_tasks_core/__init__.py @@ -1,6 +1,26 @@ import logging +def _check_pydantic_version(): + """ + Temporary check for pydantic version. + To be removed after moving to pydantic v2 is complete. + """ + import importlib.metadata + from packaging import version + + pydantic_version = version.parse(importlib.metadata.version("pydantic")) + pydantic_v1 = version.parse("1.10.16") + pydantic_v2 = version.parse("2.6.3") + if pydantic_version != pydantic_v1 and pydantic_version < pydantic_v2: + raise ImportError( + f"Pydantic version {pydantic_version} is not supported. " + f"Please use version =={pydantic_v1} or >={pydantic_v2}." + ) + + +_check_pydantic_version() + logging.basicConfig( level=logging.INFO, format="%(asctime)s; %(levelname)s; %(message)s" ) From 37f416ee51d6a794e8bf44f7181c1de1064ca72b Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Tue, 18 Jun 2024 13:18:05 +0200 Subject: [PATCH 10/15] update lock file --- poetry.lock | 557 +++++++++++++++++++++++++++++----------------------- 1 file changed, 309 insertions(+), 248 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7ac71ec1e..4f82d58d2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "alabaster" @@ -38,6 +38,17 @@ doc = ["awkward (>=2.0.7)", "ipython", "myst-parser", "nbsphinx", "readthedocs-s gpu = ["cupy"] test = ["awkward (>=2.3)", "boltons", "dask[array,distributed] (>=2022.09.2)", "httpx", "joblib", "loompy (>=3.0.5)", "matplotlib", "openpyxl", "pyarrow", "pytest (>=7.3)", "pytest-cov (>=2.10)", "pytest-memray", "pytest-mock", "scanpy", "scikit-learn", "zarr"] +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + [[package]] name = "anyio" version = "4.4.0" @@ -265,17 +276,17 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "autopep8" -version = "2.2.0" +version = "2.3.0" description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" optional = true python-versions = ">=3.8" files = [ - {file = "autopep8-2.2.0-py2.py3-none-any.whl", hash = "sha256:05418a981f038969d8bdcd5636bf15948db7555ae944b9f79b5a34b35f1370d4"}, - {file = "autopep8-2.2.0.tar.gz", hash = "sha256:d306a0581163ac29908280ad557773a95a9bede072c0fafed6f141f5311f43c1"}, + {file = "autopep8-2.3.0-py2.py3-none-any.whl", hash = "sha256:b716efa70cbafbf4a2c9c5ec1cabfa037a68f9e30b04c74ffa5864dd49b8f7d2"}, + {file = "autopep8-2.3.0.tar.gz", hash = "sha256:5cfe45eb3bef8662f6a3c7e28b7c0310c7310d340074b7f0f28f9810b44b7ef4"}, ] [package.dependencies] -pycodestyle = ">=2.11.0" +pycodestyle = ">=2.12.0" tomli = {version = "*", markers = "python_version < \"3.11\""} [[package]] @@ -632,28 +643,28 @@ files = [ [[package]] name = "cmake" -version = "3.29.3" +version = "3.29.5.1" description = "CMake is an open-source, cross-platform family of tools designed to build, test and package software" optional = true python-versions = ">=3.7" files = [ - {file = "cmake-3.29.3-py3-none-macosx_10_10_universal2.macosx_10_10_x86_64.macosx_11_0_arm64.macosx_11_0_universal2.whl", hash = "sha256:355f515826023338094514a2181724e297ed2145bc0792dacaa9ed3772b98733"}, - {file = "cmake-3.29.3-py3-none-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ab5eb91e7f5bbfc2f0e23c964c3a3e74c6e6a26e9b59b57b87192d249b1b7162"}, - {file = "cmake-3.29.3-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ae9e5dcd77822f89e042ad820ef25a52327bb0d15fd7a492ad4886edb31fae52"}, - {file = "cmake-3.29.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b09d1f0f46a880fdfc50374917fd4c850d9428b244535343bb5411658a36e202"}, - {file = "cmake-3.29.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d05cf16a6fb370cc344b3552ab321524cba1f067da240876c09cab571bf6ec0"}, - {file = "cmake-3.29.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c0a23fbb3daeecdc42d233c1a2df233714c2db59e75ab154e2af469c1c308a5"}, - {file = "cmake-3.29.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1037218e135302f396eca444e24ca892d8a440589f1a859313e06484f10c350f"}, - {file = "cmake-3.29.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c84eead2ea6f596fe5ac58beedbfc9bc1f460c410c481348b3783b4794f4b1a2"}, - {file = "cmake-3.29.3-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:e1fd53ca2f24dc0aad54934c2472cb83e273b94b4bad23fcdbd438515881f5a7"}, - {file = "cmake-3.29.3-py3-none-musllinux_1_1_i686.whl", hash = "sha256:00225a2be8422d4b6f2ad2da10d7dfe2ad844748bd1defa94f236bfabb0d2d44"}, - {file = "cmake-3.29.3-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:28fe371f1865943118a0f669af87344c799751f85a5be084197c006ee6329d89"}, - {file = "cmake-3.29.3-py3-none-musllinux_1_1_s390x.whl", hash = "sha256:ad184528fa9560bf4167279e8e4e7168a5fa1cc87a9f0b4b99ffbc79588b0cf9"}, - {file = "cmake-3.29.3-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:40cd0ec1310e52fa29b4e2b07829d56ae95f01ea0b2479ece359259849269f86"}, - {file = "cmake-3.29.3-py3-none-win32.whl", hash = "sha256:a2c15ab9e4922d71d98a6495a5fd661dd00b3d4ada79a3d183f996fff45db011"}, - {file = "cmake-3.29.3-py3-none-win_amd64.whl", hash = "sha256:dd8aaffe5d8dc2dd41421dc63c39b64df30a7109392e276e2b6d021805b770e9"}, - {file = "cmake-3.29.3-py3-none-win_arm64.whl", hash = "sha256:6672a873855e9a8f954390d0352c1d09b034a36b5f4cc5da012ae292f28623f7"}, - {file = "cmake-3.29.3.tar.gz", hash = "sha256:d04adb1a8b878e92a734742cb0db9c59e3828abcf8ec9c930eb8a01faa00c9df"}, + {file = "cmake-3.29.5.1-py3-none-macosx_11_0_universal2.macosx_10_10_x86_64.macosx_11_0_arm64.whl", hash = "sha256:fae935a9500e82897df18c47c9cf18c9d7dda39f85da23ab17f5e9a4224d0380"}, + {file = "cmake-3.29.5.1-py3-none-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aae85f3ef965baac4fee26d4264e31184e02f53cbfa13a62479efdace5553acd"}, + {file = "cmake-3.29.5.1-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58a4b69d2677cd2694d00ab1c24b348dd11946deb3102be102bad2b9b4fd1b86"}, + {file = "cmake-3.29.5.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bdf8f0b629d1adb63d273c79a6b9a775a0f1e31cd2c030f8ae777958c5b70ae"}, + {file = "cmake-3.29.5.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33ce2cd40f77b59a61ffbc53d26474a5f9e1d51dbd4246792795cbd4dad596a4"}, + {file = "cmake-3.29.5.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ec908f111f3833891e8c6763a6532cf5d29b293c71a144ffb4bf8ca5c6fb469"}, + {file = "cmake-3.29.5.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11a8ebc34367a0070fc1bcf5e68cb84fbe9ac4b2a16c3991e08aaea7c502f97f"}, + {file = "cmake-3.29.5.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:290d100c768e2e637c8ba6054693fa18d4fd087f782f14fff0bb0c4262569a8f"}, + {file = "cmake-3.29.5.1-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:13d39eb27db7af21dfe35565f327975002e59dabcd742bc0115eff3d1d7c2364"}, + {file = "cmake-3.29.5.1-py3-none-musllinux_1_1_i686.whl", hash = "sha256:db20b4824ddc54698d778f8e1138316b6849068f5e5e116c101ded19aeb07a1d"}, + {file = "cmake-3.29.5.1-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:247629be51051d2fc8ae07e00c86e7acbfb41336a2e43b1da86ed8014ec617ac"}, + {file = "cmake-3.29.5.1-py3-none-musllinux_1_1_s390x.whl", hash = "sha256:350f2e8ae25d7cdc8ccc6d8a943c0b8df23083a32edec5b6a7d2035cfe2eb1b5"}, + {file = "cmake-3.29.5.1-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:ad86a514b116426796e6ae48cc561a5a154787acc49894d8e5ef85e7b289191e"}, + {file = "cmake-3.29.5.1-py3-none-win32.whl", hash = "sha256:137d7d5ed6539b1cb28bf67b39ee4b36738c8c4a5789fe1804a16e43993ab8de"}, + {file = "cmake-3.29.5.1-py3-none-win_amd64.whl", hash = "sha256:b4154ed968a54a9f5b2e00212297f7b6b6351b23e54c1b674ae55233e8a6f0ab"}, + {file = "cmake-3.29.5.1-py3-none-win_arm64.whl", hash = "sha256:e6236d54b9575104887a62117a32ebe08ad9ba364df4afe7c21794ecc36a1e8b"}, + {file = "cmake-3.29.5.1.tar.gz", hash = "sha256:dce57b4a4439b3955036ef0b6050b7f796480e5d3965b4160b6eee2ac9500891"}, ] [package.extras] @@ -832,13 +843,13 @@ tests = ["pytest", "pytest-cov", "pytest-xdist"] [[package]] name = "dask" -version = "2024.5.2" +version = "2024.6.0" description = "Parallel PyData with Task Scheduling" optional = false python-versions = ">=3.9" files = [ - {file = "dask-2024.5.2-py3-none-any.whl", hash = "sha256:acc2cfe41d9e0151c216ac40396dbe34df13bc3d8c51dfece190349e4f2243af"}, - {file = "dask-2024.5.2.tar.gz", hash = "sha256:5c9722c44d0195e78b6e54197aa3302e6fcaaac2310fd3014560bcb86253dcb3"}, + {file = "dask-2024.6.0-py3-none-any.whl", hash = "sha256:de0ced6cd46dbc6c01120c8870457af46d667940805a4be063a74dd467466804"}, + {file = "dask-2024.6.0.tar.gz", hash = "sha256:6882ce7e485336d707e540080ed48e01f9c09485d52a2928ea05f9a9e44bb433"}, ] [package.dependencies] @@ -857,7 +868,7 @@ array = ["numpy (>=1.21)"] complete = ["dask[array,dataframe,diagnostics,distributed]", "lz4 (>=4.3.2)", "pyarrow (>=7.0)", "pyarrow-hotfix"] dataframe = ["dask-expr (>=1.1,<1.2)", "dask[array]", "pandas (>=1.3)"] diagnostics = ["bokeh (>=2.4.2)", "jinja2 (>=2.10.3)"] -distributed = ["distributed (==2024.5.2)"] +distributed = ["distributed (==2024.6.0)"] test = ["pandas[test]", "pre-commit", "pytest", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist"] [[package]] @@ -1002,13 +1013,13 @@ files = [ [[package]] name = "fastjsonschema" -version = "2.19.1" +version = "2.20.0" description = "Fastest Python implementation of JSON schema" optional = true python-versions = "*" files = [ - {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, - {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, + {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, + {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, ] [package.extras] @@ -1016,46 +1027,41 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "fastremap" -version = "1.14.1" +version = "1.14.2" description = "Remap, mask, renumber, unique, and in-place transposition of 3D labeled images. Point cloud too." optional = true -python-versions = ">=3.7,<4.0" -files = [ - {file = "fastremap-1.14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:36cf8e90dd6000c4cdefba353659f0a5bf0de23bda918edded4dca58e627994a"}, - {file = "fastremap-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0b2723cee9ffd034479cab10662245faf256396a30a129f8a24bb7468fcdeba"}, - {file = "fastremap-1.14.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccd70c1e2ec83ebd9ae8681f9610f44f8766238e3550563318e09e20b5420aec"}, - {file = "fastremap-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7562dc6ad1757615fb976a45b4dae8dd60d2058d33863e4b5779c1e9afa0896b"}, - {file = "fastremap-1.14.1-cp310-cp310-win32.whl", hash = "sha256:6f15e8387cf0278be892300b9654a4ccae88e2d086712af1c889cd689129f778"}, - {file = "fastremap-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:4333c0ddd210fada5e5544e047d544fb53c48e8e7cc6ea7d8bea633409323d1a"}, - {file = "fastremap-1.14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:90525811a7f142a75e94c356dba7759e10b49fa37deece8abac49e7fae6f53d0"}, - {file = "fastremap-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dcb8df5b4f7d5c37e436bdb55493558477a77c07d2dba46c87eeaba9667a31a"}, - {file = "fastremap-1.14.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d83c02626b9cc5da3b34abf879dc07a33c02c4ae8234e1085ed0f2114034bda"}, - {file = "fastremap-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ed8b0252419404a8fc08d5f4e1d6e6a9d4109fbb6cd0975cbe50abd4f6274d5"}, - {file = "fastremap-1.14.1-cp311-cp311-win32.whl", hash = "sha256:fd1ded92afc008166483b77881bcc537ef8fe9b373c705b2d89556a4d2ced0b0"}, - {file = "fastremap-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:e856bfe126aa0447cd22142b2f8ead38500cf43b6ffe56e4e5337c236ec73003"}, - {file = "fastremap-1.14.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e8d4ddcf17d688df975e3076d108d02ddc9ecf17c7139b3efa55ee496a8e257"}, - {file = "fastremap-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:907026ab7cc365fb6d947a2f4e32e64bd086e48186ed708d85d84cd64e3b2aa5"}, - {file = "fastremap-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbced671b0d4024810c9cef04f99601a69fd4cda757dc4eae7ed1e16227035f"}, - {file = "fastremap-1.14.1-cp312-cp312-win32.whl", hash = "sha256:3318537edcee3b1b781fadd5106591e16b0b8e405e638216cfe29506fa2bc4b7"}, - {file = "fastremap-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:c047f8270fa1ed9db385e88cb1576a940ecc63e44ccbc70ec1b902dde395326f"}, - {file = "fastremap-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bedd51db0d9d93897e9e7e1deabfe9370169b5cd69e60fc99deae9e1390494d9"}, - {file = "fastremap-1.14.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ae47cbc4f7783a505e60b4c515d9985ae2b79b88a18ee6e138fe27e9bbdd2e3"}, - {file = "fastremap-1.14.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df4ae6e92dd67fbc77721e1db9631a4acfd27ea120cbf1ed6251bf91c4088ac1"}, - {file = "fastremap-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:c722c4c61054bae95e22cf82f85472025585ba8ace1386bdf905529e87a88351"}, - {file = "fastremap-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:bfd4ed7f0c436348ef815e9aecedf314c145f55328b32feea65b60b3ca43cf46"}, - {file = "fastremap-1.14.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:fd0605b4da0fecc5a09da9714d074948b5f729bbcfb2144b50908f0369b42fbf"}, - {file = "fastremap-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17fcf3e4abd8577df5619726f8c8bce7e407f1e435e25acd4a4442dd6a62c553"}, - {file = "fastremap-1.14.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:102849233601e0dcb92952f91ea6e54d90107c6e62642ccc6d563bb727fe70af"}, - {file = "fastremap-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c87ec9442ec47ee29d4d1ba6bdc1258d9563567ec851590508133849a38be97d"}, - {file = "fastremap-1.14.1-cp38-cp38-win32.whl", hash = "sha256:20aea5c0baac25b6532514627bdec12651062a01d903652fb6787a8551ee1f02"}, - {file = "fastremap-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:b83202dcf6b61d42b90dfcf39253d979e6200c96cd86a2672b2add7e532b2924"}, - {file = "fastremap-1.14.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f2cb17ebf607d0473d487422e9d8f856de94736fb77b6a4a44adfc39e8793049"}, - {file = "fastremap-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20b53fca2dcbef0956ac9362ff1b487717532f5ad97a439876f5e2b3a1f4768b"}, - {file = "fastremap-1.14.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fdc45f2609304ceba67ff088e925570cbcd84b54b0049813badc1fb6cfc0422"}, - {file = "fastremap-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44ee473b948e58df8dfe15107d99d6dcfa54c3ce0eb9ce0b0cfe98446ef0b6de"}, - {file = "fastremap-1.14.1-cp39-cp39-win32.whl", hash = "sha256:9430813cf0af429054923eedef608653a34bcdac7066568895b946eae7b69bbd"}, - {file = "fastremap-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:964aad28d612614b84403d6da1406c2c9bb84dea043aba2ec152421a88429259"}, - {file = "fastremap-1.14.1.tar.gz", hash = "sha256:067d42d6cb3b1b0789889efd1d7fae58006c82ada4a8446d40e9e838b358ee7c"}, +python-versions = "<4.0,>=3.8" +files = [ + {file = "fastremap-1.14.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb671467d52ca054661c9c60a064626e55bbfc6e39af79ee2935ad15df5fdf1b"}, + {file = "fastremap-1.14.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3b9df2ad1ace2de68c53a43e2912823fd5e27c581e4dd25aafb3087d5b26ecc"}, + {file = "fastremap-1.14.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62a953e099aa68b08b58e04f02ceccca75d6fddeab09c913c6dbfe894182bfb6"}, + {file = "fastremap-1.14.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68d406a0cbd343fccaa6bfa88b2e2fe3888b060d320df4a320a90e11475af820"}, + {file = "fastremap-1.14.2-cp310-cp310-win32.whl", hash = "sha256:8000a91ca1159feacf99f872e2e59d993667e684463964d8572b2feb307c19dc"}, + {file = "fastremap-1.14.2-cp310-cp310-win_amd64.whl", hash = "sha256:6df19994e6f771d25c1b74ad2ef376781d613582d75c97d07ae4b58ad2396c06"}, + {file = "fastremap-1.14.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d24030e1a7a204df98211234588ab324cce212097df5d2a2f3ddb3e02aeb755c"}, + {file = "fastremap-1.14.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a16074bbdeaf5c66c62d80c4a464640bd8209ee7247aa014811b5772b56a7785"}, + {file = "fastremap-1.14.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc351a7e7d479f5e24791b78c45242615cf7bce7f0adbce54395311c746a014"}, + {file = "fastremap-1.14.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de8f122071ac506b9ca22e1ecd0bb157e8ed4c479b90ad567056f2a2a4bc96b6"}, + {file = "fastremap-1.14.2-cp311-cp311-win32.whl", hash = "sha256:aa76b0edfed8ed7c457bf497f133b48181d535c63ca81be4fd2e3b0cfa359b5a"}, + {file = "fastremap-1.14.2-cp311-cp311-win_amd64.whl", hash = "sha256:a5bab4b7bb3f983fc2b01c6ff10cc56207340f4ebb495d6c556707310250ac79"}, + {file = "fastremap-1.14.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4e5e6dc12aec00aeacccfc6f4f974bd2db65a6ae97eceaf8b52948b8fbbe54"}, + {file = "fastremap-1.14.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d087bb86d48925fcd27d941702646aaec318be671f9ae7e61d85e09c154e18ec"}, + {file = "fastremap-1.14.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35e420ea8ba4af45abbc8df228ce67e6ce662b2d1ccfc074504eff410e994f96"}, + {file = "fastremap-1.14.2-cp312-cp312-win32.whl", hash = "sha256:a9bce3685867b809a8b6f386681b97ccf82c2ab8d5804aceee026c827e4baf37"}, + {file = "fastremap-1.14.2-cp312-cp312-win_amd64.whl", hash = "sha256:418bcccd97393f13992f516a8a72eedeb8684e1e496d0cec21ae6ee0eabb7bbe"}, + {file = "fastremap-1.14.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87f764ed78b8709fe8c7271fc1a1fd012baa8eaf5a46ffae39b7ac02e8666b16"}, + {file = "fastremap-1.14.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1984b81dc2c67c18094b9fd2e8608b3f6c6b54f5da05dfeeedd53d7c2a992dd4"}, + {file = "fastremap-1.14.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:781c1fe055b30166f1c9da3187446b1477ab9cc59cc978dca5293d952d5e0c9a"}, + {file = "fastremap-1.14.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8d0c31bfffef1883030bf9d59be0bceee78affd2de4b9a9447efb6a951592f0"}, + {file = "fastremap-1.14.2-cp38-cp38-win32.whl", hash = "sha256:0978571738deaec51ecb26fea98af7d9b3e1258f91d457c5b1f59ac106007926"}, + {file = "fastremap-1.14.2-cp38-cp38-win_amd64.whl", hash = "sha256:39840888baaaa67e2ab5b85bfddb7e5cc3c85427b21cc7d4d3923ab497653f5a"}, + {file = "fastremap-1.14.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b959f5d61a84bfd00dad2e2d0b4d1a0985639fd66cb900a99654d05911605c01"}, + {file = "fastremap-1.14.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44c89c6e0c610c024a956cb4dc389ad6f36596d089319f8d7786f69b581fa57d"}, + {file = "fastremap-1.14.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:613d31c103f21169d829615cfaf99b019f2d5420988d699b00ff370244e28ac6"}, + {file = "fastremap-1.14.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a23634d5cd3183946b126dbd556a94570d4c92f0f3518ed592aa39fbfbb7544"}, + {file = "fastremap-1.14.2-cp39-cp39-win32.whl", hash = "sha256:4fa52fb69dbe27edf3dcd672e35d5fb0ca72f2fa7dd9db04bc410dadb41002ad"}, + {file = "fastremap-1.14.2-cp39-cp39-win_amd64.whl", hash = "sha256:bedacfab5abf891280c4212177b4e01dcbaba40b3eb41473210adff8a983a20a"}, + {file = "fastremap-1.14.2.tar.gz", hash = "sha256:6f6ed313a44c8214fa97576338fb4af691eb9e3f3153b121f5f345abece7b8b2"}, ] [package.dependencies] @@ -1227,13 +1233,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "griffe" -version = "0.45.2" +version = "0.47.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.45.2-py3-none-any.whl", hash = "sha256:297ec8530d0c68e5b98ff86fb588ebc3aa3559bb5dc21f3caea8d9542a350133"}, - {file = "griffe-0.45.2.tar.gz", hash = "sha256:83ce7dcaafd8cb7f43cbf1a455155015a1eb624b1ffd93249e5e1c4a22b2fdb2"}, + {file = "griffe-0.47.0-py3-none-any.whl", hash = "sha256:07a2fd6a8c3d21d0bbb0decf701d62042ccc8a576645c7f8799fe1f10de2b2de"}, + {file = "griffe-0.47.0.tar.gz", hash = "sha256:95119a440a3c932b13293538bdbc405bee4c36428547553dc6b327e7e7d35e5a"}, ] [package.dependencies] @@ -1731,13 +1737,13 @@ files = [ [[package]] name = "jsonpointer" -version = "2.4" +version = "3.0.0" description = "Identify specific nodes in a JSON document (RFC 6901)" optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +python-versions = ">=3.7" files = [ - {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, ] [[package]] @@ -1967,13 +1973,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.2.1" +version = "4.2.2" description = "JupyterLab computational environment" optional = true python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.2.1-py3-none-any.whl", hash = "sha256:6ac6e3827b3c890e6e549800e8a4f4aaea6a69321e2240007902aa7a0c56a8e4"}, - {file = "jupyterlab-4.2.1.tar.gz", hash = "sha256:a10fb71085a6900820c62d43324005046402ffc8f0fde696103e37238a839507"}, + {file = "jupyterlab-4.2.2-py3-none-any.whl", hash = "sha256:59ee9b839f43308c3dfd55d72d1f1a299ed42a7f91f2d1afe9c12a783f9e525f"}, + {file = "jupyterlab-4.2.2.tar.gz", hash = "sha256:a534b6a25719a92a40d514fb133a9fe8f0d9981b0bbce5d8a5fcaa33344a3038"}, ] [package.dependencies] @@ -1988,6 +1994,7 @@ jupyter-server = ">=2.4.0,<3" jupyterlab-server = ">=2.27.1,<3" notebook-shim = ">=0.2" packaging = "*" +setuptools = ">=40.1.0" tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} tornado = ">=6.2.0" traitlets = "*" @@ -2221,43 +2228,43 @@ files = [ [[package]] name = "lit" -version = "18.1.6" +version = "18.1.7" description = "A Software Testing Tool" optional = true python-versions = "*" files = [ - {file = "lit-18.1.6-py3-none-any.whl", hash = "sha256:58fc0bb1912f7e2a692d598e34c3b1675826e25bdc8078098025fb0fa28784a9"}, - {file = "lit-18.1.6.tar.gz", hash = "sha256:70878fb0a2eee81c95898ed59605b0ee5e41565f8fd382322bca769a2bc3d4e5"}, + {file = "lit-18.1.7-py3-none-any.whl", hash = "sha256:684629e3af788bd0a61ca253a2dbb46bda8fd40c9022e3925d8ff067b67549f7"}, + {file = "lit-18.1.7.tar.gz", hash = "sha256:2ddd9be26bdcc6da03aea3ec456c6945eb5a09dbde548d3500bff9b8ed4763bb"}, ] [[package]] name = "llvmlite" -version = "0.42.0" +version = "0.43.0" description = "lightweight wrapper around basic LLVM functionality" optional = true python-versions = ">=3.9" files = [ - {file = "llvmlite-0.42.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3366938e1bf63d26c34fbfb4c8e8d2ded57d11e0567d5bb243d89aab1eb56098"}, - {file = "llvmlite-0.42.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c35da49666a21185d21b551fc3caf46a935d54d66969d32d72af109b5e7d2b6f"}, - {file = "llvmlite-0.42.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70f44ccc3c6220bd23e0ba698a63ec2a7d3205da0d848804807f37fc243e3f77"}, - {file = "llvmlite-0.42.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763f8d8717a9073b9e0246998de89929071d15b47f254c10eef2310b9aac033d"}, - {file = "llvmlite-0.42.0-cp310-cp310-win_amd64.whl", hash = "sha256:8d90edf400b4ceb3a0e776b6c6e4656d05c7187c439587e06f86afceb66d2be5"}, - {file = "llvmlite-0.42.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ae511caed28beaf1252dbaf5f40e663f533b79ceb408c874c01754cafabb9cbf"}, - {file = "llvmlite-0.42.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81e674c2fe85576e6c4474e8c7e7aba7901ac0196e864fe7985492b737dbab65"}, - {file = "llvmlite-0.42.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb3975787f13eb97629052edb5017f6c170eebc1c14a0433e8089e5db43bcce6"}, - {file = "llvmlite-0.42.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5bece0cdf77f22379f19b1959ccd7aee518afa4afbd3656c6365865f84903f9"}, - {file = "llvmlite-0.42.0-cp311-cp311-win_amd64.whl", hash = "sha256:7e0c4c11c8c2aa9b0701f91b799cb9134a6a6de51444eff5a9087fc7c1384275"}, - {file = "llvmlite-0.42.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:08fa9ab02b0d0179c688a4216b8939138266519aaa0aa94f1195a8542faedb56"}, - {file = "llvmlite-0.42.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b2fce7d355068494d1e42202c7aff25d50c462584233013eb4470c33b995e3ee"}, - {file = "llvmlite-0.42.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebe66a86dc44634b59a3bc860c7b20d26d9aaffcd30364ebe8ba79161a9121f4"}, - {file = "llvmlite-0.42.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d47494552559e00d81bfb836cf1c4d5a5062e54102cc5767d5aa1e77ccd2505c"}, - {file = "llvmlite-0.42.0-cp312-cp312-win_amd64.whl", hash = "sha256:05cb7e9b6ce69165ce4d1b994fbdedca0c62492e537b0cc86141b6e2c78d5888"}, - {file = "llvmlite-0.42.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdd3888544538a94d7ec99e7c62a0cdd8833609c85f0c23fcb6c5c591aec60ad"}, - {file = "llvmlite-0.42.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0936c2067a67fb8816c908d5457d63eba3e2b17e515c5fe00e5ee2bace06040"}, - {file = "llvmlite-0.42.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a78ab89f1924fc11482209f6799a7a3fc74ddc80425a7a3e0e8174af0e9e2301"}, - {file = "llvmlite-0.42.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7599b65c7af7abbc978dbf345712c60fd596aa5670496561cc10e8a71cebfb2"}, - {file = "llvmlite-0.42.0-cp39-cp39-win_amd64.whl", hash = "sha256:43d65cc4e206c2e902c1004dd5418417c4efa6c1d04df05c6c5675a27e8ca90e"}, - {file = "llvmlite-0.42.0.tar.gz", hash = "sha256:f92b09243c0cc3f457da8b983f67bd8e1295d0f5b3746c7a1861d7a99403854a"}, + {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, + {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, + {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead"}, + {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a"}, + {file = "llvmlite-0.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed"}, + {file = "llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98"}, + {file = "llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57"}, + {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2"}, + {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749"}, + {file = "llvmlite-0.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91"}, + {file = "llvmlite-0.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7"}, + {file = "llvmlite-0.43.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7"}, + {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f"}, + {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844"}, + {file = "llvmlite-0.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9"}, + {file = "llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c"}, + {file = "llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8"}, + {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a"}, + {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867"}, + {file = "llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4"}, + {file = "llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}, ] [[package]] @@ -2399,13 +2406,13 @@ source = ["Cython (==0.29.37)"] [[package]] name = "magicgui" -version = "0.8.2" +version = "0.8.3" description = "build GUIs from python types" optional = true python-versions = ">=3.8" files = [ - {file = "magicgui-0.8.2-py3-none-any.whl", hash = "sha256:23ad789a058e6f5b4c9f9c7b804cbb73633c2d44c5141a10b71fabcd4847ae2e"}, - {file = "magicgui-0.8.2.tar.gz", hash = "sha256:470176d4864007f42af2f1a64a1224a665b8afec14e5c8efa1e6c644b4100096"}, + {file = "magicgui-0.8.3-py3-none-any.whl", hash = "sha256:ee763f3908a344cc73e1ed0981885114937ce3077e60e4feb8148e64f0e762a3"}, + {file = "magicgui-0.8.3.tar.gz", hash = "sha256:862b02e472f4cc2081ccfb9e8e1d91ab30ce91b88f9d9ff8a44e0d27f317f4ab"}, ] [package.dependencies] @@ -2413,7 +2420,7 @@ docstring-parser = ">=0.7" psygnal = ">=0.6.1" qtpy = ">=1.7.0" superqt = {version = ">=0.6.1", extras = ["iconify"]} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6" [package.extras] dev = ["ipython", "mypy", "pdbpp", "pre-commit", "pyqt6", "rich", "ruff"] @@ -3286,24 +3293,24 @@ test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "nodeenv" -version = "1.9.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, - {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] [[package]] name = "notebook" -version = "7.2.0" +version = "7.2.1" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" optional = true python-versions = ">=3.8" files = [ - {file = "notebook-7.2.0-py3-none-any.whl", hash = "sha256:b4752d7407d6c8872fc505df0f00d3cae46e8efb033b822adacbaa3f1f3ce8f5"}, - {file = "notebook-7.2.0.tar.gz", hash = "sha256:34a2ba4b08ad5d19ec930db7484fb79746a1784be9e1a5f8218f9af8656a141f"}, + {file = "notebook-7.2.1-py3-none-any.whl", hash = "sha256:f45489a3995746f2195a137e0773e2130960b51c9ac3ce257dbc2705aab3a6ca"}, + {file = "notebook-7.2.1.tar.gz", hash = "sha256:4287b6da59740b32173d01d641f763d292f49c30e7a51b89c46ba8473126341e"}, ] [package.dependencies] @@ -3365,37 +3372,37 @@ testing = ["jsonschema", "magicgui", "napari-plugin-engine", "napari-svg (==0.1. [[package]] name = "numba" -version = "0.59.1" +version = "0.60.0" description = "compiling Python code using LLVM" optional = true python-versions = ">=3.9" files = [ - {file = "numba-0.59.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:97385a7f12212c4f4bc28f648720a92514bee79d7063e40ef66c2d30600fd18e"}, - {file = "numba-0.59.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b77aecf52040de2a1eb1d7e314497b9e56fba17466c80b457b971a25bb1576d"}, - {file = "numba-0.59.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3476a4f641bfd58f35ead42f4dcaf5f132569c4647c6f1360ccf18ee4cda3990"}, - {file = "numba-0.59.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:525ef3f820931bdae95ee5379c670d5c97289c6520726bc6937a4a7d4230ba24"}, - {file = "numba-0.59.1-cp310-cp310-win_amd64.whl", hash = "sha256:990e395e44d192a12105eca3083b61307db7da10e093972ca285c85bef0963d6"}, - {file = "numba-0.59.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43727e7ad20b3ec23ee4fc642f5b61845c71f75dd2825b3c234390c6d8d64051"}, - {file = "numba-0.59.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:411df625372c77959570050e861981e9d196cc1da9aa62c3d6a836b5cc338966"}, - {file = "numba-0.59.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2801003caa263d1e8497fb84829a7ecfb61738a95f62bc05693fcf1733e978e4"}, - {file = "numba-0.59.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dd2842fac03be4e5324ebbbd4d2d0c8c0fc6e0df75c09477dd45b288a0777389"}, - {file = "numba-0.59.1-cp311-cp311-win_amd64.whl", hash = "sha256:0594b3dfb369fada1f8bb2e3045cd6c61a564c62e50cf1f86b4666bc721b3450"}, - {file = "numba-0.59.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1cce206a3b92836cdf26ef39d3a3242fec25e07f020cc4feec4c4a865e340569"}, - {file = "numba-0.59.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8c8b4477763cb1fbd86a3be7050500229417bf60867c93e131fd2626edb02238"}, - {file = "numba-0.59.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d80bce4ef7e65bf895c29e3889ca75a29ee01da80266a01d34815918e365835"}, - {file = "numba-0.59.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f7ad1d217773e89a9845886401eaaab0a156a90aa2f179fdc125261fd1105096"}, - {file = "numba-0.59.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bf68f4d69dd3a9f26a9b23548fa23e3bcb9042e2935257b471d2a8d3c424b7f"}, - {file = "numba-0.59.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e0318ae729de6e5dbe64c75ead1a95eb01fabfe0e2ebed81ebf0344d32db0ae"}, - {file = "numba-0.59.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0f68589740a8c38bb7dc1b938b55d1145244c8353078eea23895d4f82c8b9ec1"}, - {file = "numba-0.59.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:649913a3758891c77c32e2d2a3bcbedf4a69f5fea276d11f9119677c45a422e8"}, - {file = "numba-0.59.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9712808e4545270291d76b9a264839ac878c5eb7d8b6e02c970dc0ac29bc8187"}, - {file = "numba-0.59.1-cp39-cp39-win_amd64.whl", hash = "sha256:8d51ccd7008a83105ad6a0082b6a2b70f1142dc7cfd76deb8c5a862367eb8c86"}, - {file = "numba-0.59.1.tar.gz", hash = "sha256:76f69132b96028d2774ed20415e8c528a34e3299a40581bae178f0994a2f370b"}, -] - -[package.dependencies] -llvmlite = "==0.42.*" -numpy = ">=1.22,<1.27" + {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, + {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, + {file = "numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781"}, + {file = "numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e"}, + {file = "numba-0.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198"}, + {file = "numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8"}, + {file = "numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b"}, + {file = "numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703"}, + {file = "numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8"}, + {file = "numba-0.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2"}, + {file = "numba-0.60.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404"}, + {file = "numba-0.60.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c"}, + {file = "numba-0.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e"}, + {file = "numba-0.60.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d"}, + {file = "numba-0.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347"}, + {file = "numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74"}, + {file = "numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449"}, + {file = "numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b"}, + {file = "numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25"}, + {file = "numba-0.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab"}, + {file = "numba-0.60.0.tar.gz", hash = "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16"}, +] + +[package.dependencies] +llvmlite = "==0.43.*" +numpy = ">=1.22,<2.1" [[package]] name = "numcodecs" @@ -3663,18 +3670,18 @@ wheel = "*" [[package]] name = "opencv-python-headless" -version = "4.10.0.82" +version = "4.10.0.84" description = "Wrapper package for OpenCV python bindings." optional = true python-versions = ">=3.6" files = [ - {file = "opencv-python-headless-4.10.0.82.tar.gz", hash = "sha256:de9e742c1b9540816fbd115b0b03841d41ed0c65566b0d7a5371f98b131b7e6d"}, - {file = "opencv_python_headless-4.10.0.82-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:a09ed50ba21cc5bf5d436cb0e784ad09c692d6b1d1454252772f6c8f2c7b4088"}, - {file = "opencv_python_headless-4.10.0.82-cp37-abi3-macosx_12_0_x86_64.whl", hash = "sha256:977a5fd21e1fe0d3d2134887db4441f8725abeae95150126302f31fcd9f548fa"}, - {file = "opencv_python_headless-4.10.0.82-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db4ec6755838b0be12510bfc9ffb014779c612418f11f4f7e6f505c36124a3aa"}, - {file = "opencv_python_headless-4.10.0.82-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a37fa5276967ecf6eb297295b16b28b7a2eb3b568ca0ee469fb1a5954de298"}, - {file = "opencv_python_headless-4.10.0.82-cp37-abi3-win32.whl", hash = "sha256:94736e9b322d13db4768fd35588ad5e8995e78e207263076bfbee18aac835ad5"}, - {file = "opencv_python_headless-4.10.0.82-cp37-abi3-win_amd64.whl", hash = "sha256:c1822fa23d1641c0249ed5eb906f4c385f7959ff1bd601a776d56b0c18914af4"}, + {file = "opencv-python-headless-4.10.0.84.tar.gz", hash = "sha256:f2017c6101d7c2ef8d7bc3b414c37ff7f54d64413a1847d89970b6b7069b4e1a"}, + {file = "opencv_python_headless-4.10.0.84-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:a4f4bcb07d8f8a7704d9c8564c224c8b064c63f430e95b61ac0bffaa374d330e"}, + {file = "opencv_python_headless-4.10.0.84-cp37-abi3-macosx_12_0_x86_64.whl", hash = "sha256:5ae454ebac0eb0a0b932e3406370aaf4212e6a3fdb5038cc86c7aea15a6851da"}, + {file = "opencv_python_headless-4.10.0.84-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46071015ff9ab40fccd8a163da0ee14ce9846349f06c6c8c0f2870856ffa45db"}, + {file = "opencv_python_headless-4.10.0.84-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:377d08a7e48a1405b5e84afcbe4798464ce7ee17081c1c23619c8b398ff18295"}, + {file = "opencv_python_headless-4.10.0.84-cp37-abi3-win32.whl", hash = "sha256:9092404b65458ed87ce932f613ffbb1106ed2c843577501e5768912360fc50ec"}, + {file = "opencv_python_headless-4.10.0.84-cp37-abi3-win_amd64.whl", hash = "sha256:afcf28bd1209dd58810d33defb622b325d3cbe49dcd7a43a902982c33e5fad05"}, ] [package.dependencies] @@ -3700,13 +3707,13 @@ files = [ [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -4038,13 +4045,13 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.45" +version = "3.0.47" description = "Library for building powerful interactive command lines in Python" optional = true python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.45-py3-none-any.whl", hash = "sha256:a29b89160e494e3ea8622b09fa5897610b437884dcdcd054fdc1308883326c2a"}, - {file = "prompt_toolkit-3.0.45.tar.gz", hash = "sha256:07c60ee4ab7b7e90824b61afa840c8f5aad2d46b3e2e10acc33d8ecc94a49089"}, + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, ] [package.dependencies] @@ -4144,13 +4151,13 @@ tests = ["pytest"] [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.12.0" description = "Python style guide checker" optional = true python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, + {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, ] [[package]] @@ -4184,55 +4191,22 @@ files = [ [[package]] name = "pydantic" -version = "1.10.15" -description = "Data validation and settings management using python type hints" +version = "2.7.4" +description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, - {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, - {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, - {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, - {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, - {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, - {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, - {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, - {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, - {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, + {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, + {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.4.0" +pydantic-core = "2.18.4" +typing-extensions = ">=4.6.1" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-compat" @@ -4252,6 +4226,97 @@ pydantic = "*" dev = ["black", "ipython", "mypy", "pdbpp", "pre-commit", "pytest", "pytest-cov", "rich", "ruff"] test = ["pytest (>=6.0)", "pytest-cov"] +[[package]] +name = "pydantic-core" +version = "2.18.4" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, + {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, + {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, + {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, + {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, + {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, + {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, + {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, + {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, + {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, + {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, + {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, + {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, + {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pygments" version = "2.18.0" @@ -5299,13 +5364,13 @@ toolz = "*" [[package]] name = "superqt" -version = "0.6.6" +version = "0.6.7" description = "Missing widgets and components for PyQt/PySide" optional = true python-versions = ">=3.8" files = [ - {file = "superqt-0.6.6-py3-none-any.whl", hash = "sha256:636feb8ef587fe82c87bb979dc896855c9ad6fbded9fcb0b94c896d163851bb8"}, - {file = "superqt-0.6.6.tar.gz", hash = "sha256:792e09165c8a788ee245bdb784e018f9077fb309253354d86793cdf1d092f99f"}, + {file = "superqt-0.6.7-py3-none-any.whl", hash = "sha256:4300a0e38c4dd36ae06d1e33dc75e33d238252251c43311cc45da52888476223"}, + {file = "superqt-0.6.7.tar.gz", hash = "sha256:af80d5687ec75df6c3e54119ee895e42f79742ed326684c79425414b5c20f1e3"}, ] [package.dependencies] @@ -5517,7 +5582,7 @@ opt-einsum = ["opt-einsum (>=3.3)"] name = "tornado" version = "6.4.1" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, @@ -5635,13 +5700,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.1" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.1-py3-none-any.whl", hash = "sha256:6024b58b69089e5a89c347397254e35f1bf02a907728ec7fee9bf0fe837d203a"}, - {file = "typing_extensions-4.12.1.tar.gz", hash = "sha256:915f5e35ff76f56588223f15fdd5938f9a1cf9195c0de25130c627e4d597f6d1"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -5660,13 +5725,13 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -5697,37 +5762,32 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "vispy" -version = "0.14.2" +version = "0.14.3" description = "Interactive visualization in Python" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "vispy-0.14.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be9cafc28da544692e4377168448b0901c06eef62057deeab861e7c38fe5f042"}, - {file = "vispy-0.14.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bed0fefc54c0b63cbc326b4fca043cbb490a329a14edb2e24f5d714afc56d8e2"}, - {file = "vispy-0.14.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b178cd8921c02de0405a5c4af7a4d48185849810bccc30d404ac4bac0f36846"}, - {file = "vispy-0.14.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2db2a4487ede8356eea4c29a189b345ca7f904bb507d0193356e8c3924a85ac8"}, - {file = "vispy-0.14.2-cp310-cp310-win_amd64.whl", hash = "sha256:0738c0b606939978309280c5528dc08db368d6b3a88e8eee532e5df9050d98cb"}, - {file = "vispy-0.14.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:15bb4e80e533651b9f9132839a62cabf053988f2a87f066d492bdba6a807a3f0"}, - {file = "vispy-0.14.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f3230929b11e2515f59396f8972dd0864e6d4133297a29b30581173a59b8fc30"}, - {file = "vispy-0.14.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:492eba503d1831fd55f16c8d17a543a3f811c2acee22fb212b9402622685a165"}, - {file = "vispy-0.14.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2a0b26219e6b307b039a925da00def4926b9255adf88fd24beeb3301e120e6"}, - {file = "vispy-0.14.2-cp311-cp311-win_amd64.whl", hash = "sha256:8bf14394d0cd612751e8df29ac530538185ae963b0969a19fc648da04413c71a"}, - {file = "vispy-0.14.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b93616278cbcfb5a529c06d278085cf57d93ce57dc063078998564fbcca3a57"}, - {file = "vispy-0.14.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f6af62ff8f5c3723bf15fddd8deb0057eb750e665af5698920c46e2351997df8"}, - {file = "vispy-0.14.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d50eeca053fdcd7f2f1f93204f327116018269453ef99a092f79308eab76ddd3"}, - {file = "vispy-0.14.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b151f038b09829eddeffd1138e10a5cf98cdd3ef5f76427abd04718c33e0492"}, - {file = "vispy-0.14.2-cp312-cp312-win_amd64.whl", hash = "sha256:6f493f37db6130ae2a60fb97ad714429dd4b4fa8551883a3a6aa374efab7e04f"}, - {file = "vispy-0.14.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e389673720aaff3ef647c9bbf15ebb0d50cfb7d959b59a321056087eec8ab7de"}, - {file = "vispy-0.14.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:721e076169075af80ae000b691a7d8b568a297deb9c3b781f6840b8e60c9514e"}, - {file = "vispy-0.14.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95f1e6463ffc8aca6fdb4101cb65196b2725ca9f677a267acf2c675c660d12dd"}, - {file = "vispy-0.14.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9951832b2bc1f964d9fc916c207f7771357ca34747863cfbd4a7a34cbed76550"}, - {file = "vispy-0.14.2-cp38-cp38-win_amd64.whl", hash = "sha256:6d944ccd0d7fb1b8fa694781cb036cb1011853e6d3e1038f5b4da4d0094ed9a1"}, - {file = "vispy-0.14.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:deb724e8af31d3d6bd135b88cf7a17fc457af02a27796fcade9a14b9747c36c0"}, - {file = "vispy-0.14.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:98405adc58b9fb119dceb7c6606b05304cf1e21826f7877e19c43c750b03386b"}, - {file = "vispy-0.14.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ab02e56e655a0e53c60f2b3b4fbc87361fbd6126d28fc9ad11e32313eab9a3"}, - {file = "vispy-0.14.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:679d151bd767f9b04d5d8cb73caf46f5ffbd73437ac707e1ed703172e7496fcd"}, - {file = "vispy-0.14.2-cp39-cp39-win_amd64.whl", hash = "sha256:cf5882d996e31c94d67a678ffa41575c14c23cba856baf2f048a4bf5c2bbaa37"}, - {file = "vispy-0.14.2.tar.gz", hash = "sha256:eed8b44d6f5c87bd295b24aa9a2e0e4e6dc6a905ccee01b2d41c8fbf0a767b3d"}, + {file = "vispy-0.14.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df35d09a5638392875e605008e3efaebc91238d169bda1fadd74851eb0762cbc"}, + {file = "vispy-0.14.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69f32f914bbb42c029e9eead272418a3a29c3d52d413a479c8ba32eab34ccab8"}, + {file = "vispy-0.14.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b244bdfb70aebf1d8d926cd16408fd32bb204a5e1aa55813368f75f90c09389"}, + {file = "vispy-0.14.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bdc2bdf1b7aef27ca1a744ea7de7333e81e5ec4dc6bb532977fef8fed703cf8"}, + {file = "vispy-0.14.3-cp310-cp310-win_amd64.whl", hash = "sha256:f211440444edea428c9d0ffb70447e945015071efb3332408c6335b07c47574e"}, + {file = "vispy-0.14.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fe3ae49fbc6fd7f53fa34a5bbe693eb7fb6b69316fb7fe60c5e2d352afafe278"}, + {file = "vispy-0.14.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f474f415d280e5ed71f5a513c4d42d59049710b11f144fa85c312fd639c08a9b"}, + {file = "vispy-0.14.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af4863e7ba8ec4985ab8772d86b11dc71b3ab20f29c7e044fb35a1a009da5f98"}, + {file = "vispy-0.14.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66bcb62a004bb97544fd14b9035c8194d8074a8dbc3eea6a6f9a3a9f5fc1ff08"}, + {file = "vispy-0.14.3-cp311-cp311-win_amd64.whl", hash = "sha256:12d8e23ffb865e6d491d71cbf0dc54f53ca41b9167f5de99cdb08921a111f585"}, + {file = "vispy-0.14.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f624b58c9a62e68aeb279678f9ae042cf875c24f650b042e2a7005fde9f2f3e2"}, + {file = "vispy-0.14.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31b1fdd1e1924ca04fce250fb958412fbcefe4f1e4e6fffa12eb4040c00b0963"}, + {file = "vispy-0.14.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca7aebb4280e3754ae60c673dafb2f5acc26d6182761215281b07e696962e013"}, + {file = "vispy-0.14.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a90896898b10b31760634a955031dc048fda41fd6e21ee4ff3e12ebf16970b09"}, + {file = "vispy-0.14.3-cp312-cp312-win_amd64.whl", hash = "sha256:2b39304dae410fde21723cdcf50cae71ba611479f01cb8e30116493ce318fcab"}, + {file = "vispy-0.14.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3a7e467e07b5e1be38233c70dab81de43bc3a9221a8fc309dec1b084d5676abf"}, + {file = "vispy-0.14.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fef00a20b1e040b70d869f0e4aea7e4e301a82a97bd2a5253730ef1b9664d21"}, + {file = "vispy-0.14.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59d95977c3f6b7a42761e6e5349a27846f00e17d83891c3d8a19da12115f0e2a"}, + {file = "vispy-0.14.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da3a3b05a72916e9cb39b013e67d7608db32e47943ede7adb85b7d5e085ee015"}, + {file = "vispy-0.14.3-cp39-cp39-win_amd64.whl", hash = "sha256:9778390a5df31bf19e1cec15ba477f24720708d3d6febe678e4e7ca82e031f84"}, + {file = "vispy-0.14.3.tar.gz", hash = "sha256:efbbb847a908baf7e7169ab9bf296138a39364f367e6cb0a8ec03ad71699d31d"}, ] [package.dependencies] @@ -5739,6 +5799,7 @@ packaging = "*" [package.extras] doc = ["myst-parser", "numpydoc", "pillow", "pydata-sphinx-theme", "pyopengl", "pytest", "sphinx-gallery", "sphinxcontrib-apidoc"] +glfw = ["glfw"] io = ["Pillow", "meshio"] ipython-static = ["ipython"] pyglet = ["pyglet (>=1.2)"] @@ -5808,18 +5869,18 @@ files = [ [[package]] name = "webcolors" -version = "1.13" +version = "24.6.0" description = "A library for working with the color formats defined by HTML and CSS." optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, - {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, + {file = "webcolors-24.6.0-py3-none-any.whl", hash = "sha256:8cf5bc7e28defd1d48b9e83d5fc30741328305a8195c29a8e668fa45586568a1"}, + {file = "webcolors-24.6.0.tar.gz", hash = "sha256:1d160d1de46b3e81e58d0a280d0c78b467dc80f47294b91b1ad8029d2cedb55b"}, ] [package.extras] docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["pytest", "pytest-cov"] +tests = ["coverage[toml]"] [[package]] name = "webencodings" @@ -5989,18 +6050,18 @@ jupyter = ["ipytree (>=0.2.2)", "ipywidgets (>=8.0.0)", "notebook"] [[package]] name = "zipp" -version = "3.19.1" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, - {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [extras] fractal-tasks = ["Pillow", "cellpose", "imageio-ffmpeg", "napari-segment-blobs-and-things-with-membranes", "napari-skimage-regionprops", "napari-tools-menu", "napari-workflows", "scikit-image", "torch"] @@ -6008,4 +6069,4 @@ fractal-tasks = ["Pillow", "cellpose", "imageio-ffmpeg", "napari-segment-blobs-a [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "87d9d741e2ebb500d19a7188d654d9551739fec26c2446aa2bc7b060b60e1765" +content-hash = "223fdf04096424ba29e3f8f91564aacdbc7009fbbaf24365351532dda71f6c0d" From 57c8684c02bd0baf82bf3beed70a5a0e92114b8e Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Tue, 18 Jun 2024 13:19:26 +0200 Subject: [PATCH 11/15] update changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 04683570a..2317f5886 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,7 @@ # 1.0.3 (unreleased) -* Switch to transitional pydantic.v1 imports and relax pydantic requirement to `1.10.16 || >=2.6.3' (\#760). +* Switch to transitional pydantic.v1 imports, changes pydantic requirement to `>=1.10.16' (\#760). * Support JSON-Schema generation for `Enum` task arguments (\#749). * Make JSON-Schema generation tools more flexible, to simplify testing (\#749). * Update documentation (\#751). From 87deb993dce6529ea2cf95b58da43ab746a49f25 Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Tue, 18 Jun 2024 13:31:52 +0200 Subject: [PATCH 12/15] fix pre-commit --- .../tasks/cellpose_segmentation.py | 71 ++++++++++++++----- 1 file changed, 55 insertions(+), 16 deletions(-) diff --git a/fractal_tasks_core/tasks/cellpose_segmentation.py b/fractal_tasks_core/tasks/cellpose_segmentation.py index 2240fd083..637ee32ed 100644 --- a/fractal_tasks_core/tasks/cellpose_segmentation.py +++ b/fractal_tasks_core/tasks/cellpose_segmentation.py @@ -174,7 +174,9 @@ def cellpose_segmentation( # Core parameters level: int, channel: CellposeChannel1InputModel, - channel2: CellposeChannel2InputModel = Field(default_factory=CellposeChannel2InputModel), + channel2: CellposeChannel2InputModel = Field( + default_factory=CellposeChannel2InputModel + ), input_ROI_table: str = "FOV_ROI_table", output_ROI_table: Optional[str] = None, output_label_name: Optional[str] = None, @@ -185,7 +187,9 @@ def cellpose_segmentation( pretrained_model: Optional[str] = None, relabeling: bool = True, use_masks: bool = True, - advanced_cellpose_model_params: CellposeModelParams = Field(default_factory=CellposeModelParams), + advanced_cellpose_model_params: CellposeModelParams = Field( + default_factory=CellposeModelParams + ), overwrite: bool = True, ) -> None: """ @@ -254,8 +258,13 @@ def cellpose_segmentation( actual_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=level) logger.info(f"NGFF image has {num_levels=}") logger.info(f"NGFF image has {coarsening_xy=}") - logger.info(f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}") - logger.info(f"NGFF image has level-{level} pixel sizes " f"{actual_res_pxl_sizes_zyx}") + logger.info( + f"NGFF image has full-res pixel sizes {full_res_pxl_sizes_zyx}" + ) + logger.info( + f"NGFF image has level-{level} pixel sizes " + f"{actual_res_pxl_sizes_zyx}" + ) # Find channel index omero_channel = channel.get_omero_channel(zarr_url) @@ -292,9 +301,14 @@ def cellpose_segmentation( ROI_table = ad.read_zarr(ROI_table_path) # Perform some checks on the ROI table - valid_ROI_table = is_ROI_table_valid(table_path=ROI_table_path, use_masks=use_masks) + valid_ROI_table = is_ROI_table_valid( + table_path=ROI_table_path, use_masks=use_masks + ) if use_masks and not valid_ROI_table: - logger.info(f"ROI table at {ROI_table_path} cannot be used for masked " "loading. Set use_masks=False.") + logger.info( + f"ROI table at {ROI_table_path} cannot be used for masked " + "loading. Set use_masks=False." + ) use_masks = False logger.info(f"{use_masks=}") @@ -321,7 +335,9 @@ def cellpose_segmentation( if do_3D: if advanced_cellpose_model_params.anisotropy is None: # Compute anisotropy as pixel_size_z/pixel_size_x - advanced_cellpose_model_params.anisotropy = actual_res_pxl_sizes_zyx[0] / actual_res_pxl_sizes_zyx[2] + advanced_cellpose_model_params.anisotropy = ( + actual_res_pxl_sizes_zyx[0] / actual_res_pxl_sizes_zyx[2] + ) logger.info(f"Anisotropy: {advanced_cellpose_model_params.anisotropy}") # Rescale datasets (only relevant for level>0) @@ -347,7 +363,11 @@ def cellpose_segmentation( { "name": output_label_name, "version": __OME_NGFF_VERSION__, - "axes": [ax.dict() for ax in ngff_image_meta.multiscale.axes if ax.type != "channel"], + "axes": [ + ax.dict() + for ax in ngff_image_meta.multiscale.axes + if ax.type != "channel" + ], "datasets": new_datasets, } ], @@ -362,7 +382,9 @@ def cellpose_segmentation( logger=logger, ) - logger.info(f"Helper function `prepare_label_group` returned {label_group=}") + logger.info( + f"Helper function `prepare_label_group` returned {label_group=}" + ) logger.info(f"Output label path: {zarr_url}/labels/{output_label_name}/0") store = zarr.storage.FSStore(f"{zarr_url}/labels/{output_label_name}/0") label_dtype = np.uint32 @@ -384,12 +406,17 @@ def cellpose_segmentation( dimension_separator="/", ) - logger.info(f"mask will have shape {data_zyx.shape} " f"and chunks {data_zyx.chunks}") + logger.info( + f"mask will have shape {data_zyx.shape} " + f"and chunks {data_zyx.chunks}" + ) # Initialize cellpose gpu = advanced_cellpose_model_params.use_gpu and cellpose.core.use_gpu() if pretrained_model: - model = models.CellposeModel(gpu=gpu, pretrained_model=pretrained_model) + model = models.CellposeModel( + gpu=gpu, pretrained_model=pretrained_model + ) else: model = models.CellposeModel(gpu=gpu, model_type=model_type) @@ -500,7 +527,9 @@ def cellpose_segmentation( # Check that total number of labels is under control if num_labels_tot > np.iinfo(label_dtype).max: raise ValueError( - "ERROR in re-labeling:" f"Reached {num_labels_tot} labels, " f"but dtype={label_dtype}" + "ERROR in re-labeling:" + f"Reached {num_labels_tot} labels, " + f"but dtype={label_dtype}" ) if output_ROI_table: @@ -514,9 +543,13 @@ def cellpose_segmentation( overlap_list = [] for df in bbox_dataframe_list: - overlap_list.extend(get_overlapping_pairs_3D(df, full_res_pxl_sizes_zyx)) + overlap_list.extend( + get_overlapping_pairs_3D(df, full_res_pxl_sizes_zyx) + ) if len(overlap_list) > 0: - logger.warning(f"{len(overlap_list)} bounding-box pairs overlap") + logger.warning( + f"{len(overlap_list)} bounding-box pairs overlap" + ) # Compute and store 0-th level to disk da.array(new_label_img).to_zarr( @@ -525,7 +558,10 @@ def cellpose_segmentation( compute=True, ) - logger.info(f"End cellpose_segmentation task for {zarr_url}, " "now building pyramids.") + logger.info( + f"End cellpose_segmentation task for {zarr_url}, " + "now building pyramids." + ) # Starting from on-disk highest-resolution data, build and write to disk a # pyramid of coarser levels @@ -560,7 +596,10 @@ def cellpose_segmentation( # Write to zarr group image_group = zarr.group(zarr_url) - logger.info("Now writing bounding-box ROI table to " f"{zarr_url}/tables/{output_ROI_table}") + logger.info( + "Now writing bounding-box ROI table to " + f"{zarr_url}/tables/{output_ROI_table}" + ) table_attrs = { "type": "masking_roi_table", "region": {"path": f"../labels/{output_label_name}"}, From 5c152a8931cb5754789d93de9a3dc2e2ccc0eb32 Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Tue, 18 Jun 2024 13:36:49 +0200 Subject: [PATCH 13/15] fix missing v1 refactor from incoming #738 --- fractal_tasks_core/tasks/cellpose_utils.py | 56 ++++++---------------- 1 file changed, 15 insertions(+), 41 deletions(-) diff --git a/fractal_tasks_core/tasks/cellpose_utils.py b/fractal_tasks_core/tasks/cellpose_utils.py index b764b0882..d63f628fb 100644 --- a/fractal_tasks_core/tasks/cellpose_utils.py +++ b/fractal_tasks_core/tasks/cellpose_utils.py @@ -16,10 +16,10 @@ from typing import Optional import numpy as np -from pydantic import BaseModel -from pydantic import Field -from pydantic import root_validator -from pydantic import validator +from pydantic.v1 import BaseModel +from pydantic.v1 import Field +from pydantic.v1 import root_validator +from pydantic.v1 import validator from fractal_tasks_core.channels import ChannelInputModel from fractal_tasks_core.channels import ChannelNotFoundError @@ -82,25 +82,13 @@ def validate_conditions(cls, values): # Verify that custom parameters are only provided when type="custom" if type != "custom": if lower_percentile is not None: - raise ValueError( - f"Type='{type}' but {lower_percentile=}. " - "Hint: set type='custom'." - ) + raise ValueError(f"Type='{type}' but {lower_percentile=}. " "Hint: set type='custom'.") if upper_percentile is not None: - raise ValueError( - f"Type='{type}' but {upper_percentile=}. " - "Hint: set type='custom'." - ) + raise ValueError(f"Type='{type}' but {upper_percentile=}. " "Hint: set type='custom'.") if lower_bound is not None: - raise ValueError( - f"Type='{type}' but {lower_bound=}. " - "Hint: set type='custom'." - ) + raise ValueError(f"Type='{type}' but {lower_bound=}. " "Hint: set type='custom'.") if upper_bound is not None: - raise ValueError( - f"Type='{type}' but {upper_bound=}. " - "Hint: set type='custom'." - ) + raise ValueError(f"Type='{type}' but {upper_bound=}. " "Hint: set type='custom'.") # The only valid options are: # 1. Both percentiles are set and both bounds are unset @@ -114,14 +102,9 @@ def validate_conditions(cls, values): upper_bound is not None, ) if len(set(are_percentiles_set)) != 1: - raise ValueError( - "Both lower_percentile and upper_percentile must be set " - "together." - ) + raise ValueError("Both lower_percentile and upper_percentile must be set " "together.") if len(set(are_bounds_set)) != 1: - raise ValueError( - "Both lower_bound and upper_bound must be set together" - ) + raise ValueError("Both lower_bound and upper_bound must be set together") if lower_percentile is not None and lower_bound is not None: raise ValueError( "You cannot set both explicit bounds and percentile bounds " @@ -213,9 +196,7 @@ class CellposeChannel1InputModel(ChannelInputModel): Cellpose models """ - normalize: CellposeCustomNormalizer = Field( - default_factory=CellposeCustomNormalizer - ) + normalize: CellposeCustomNormalizer = Field(default_factory=CellposeCustomNormalizer) def get_omero_channel(self, zarr_url) -> OmeroChannel: try: @@ -252,9 +233,7 @@ class CellposeChannel2InputModel(BaseModel): wavelength_id: Optional[str] = None label: Optional[str] = None - normalize: CellposeCustomNormalizer = Field( - default_factory=CellposeCustomNormalizer - ) + normalize: CellposeCustomNormalizer = Field(default_factory=CellposeCustomNormalizer) @validator("label", always=True) def mutually_exclusive_channel_attributes(cls, v, values): @@ -265,8 +244,7 @@ def mutually_exclusive_channel_attributes(cls, v, values): label = v if wavelength_id and v: raise ValueError( - "`wavelength_id` and `label` cannot be both set " - f"(given {wavelength_id=} and {label=})." + "`wavelength_id` and `label` cannot be both set " f"(given {wavelength_id=} and {label=})." ) return v @@ -415,18 +393,14 @@ def normalized_img( i99 = np.percentile(img[k], upper_p) i1 = np.percentile(img[k], lower_p) if i99 - i1 > +1e-3: # np.ptp(img[k]) > 1e-3: - img[k] = normalize_percentile( - img[k], lower=lower_p, upper=upper_p - ) + img[k] = normalize_percentile(img[k], lower=lower_p, upper=upper_p) if invert: img[k] = -1 * img[k] + 1 else: img[k] = 0 elif lower_bound is not None: if upper_bound - lower_bound > +1e-3: - img[k] = normalize_bounds( - img[k], lower=lower_bound, upper=upper_bound - ) + img[k] = normalize_bounds(img[k], lower=lower_bound, upper=upper_bound) if invert: img[k] = -1 * img[k] + 1 else: From ac8a453d19d13b180f27eee4aa0eeef0ea4c7f5f Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Tue, 18 Jun 2024 13:39:16 +0200 Subject: [PATCH 14/15] fix pre-commit --- fractal_tasks_core/tasks/cellpose_utils.py | 48 +++++++++++++++++----- 1 file changed, 37 insertions(+), 11 deletions(-) diff --git a/fractal_tasks_core/tasks/cellpose_utils.py b/fractal_tasks_core/tasks/cellpose_utils.py index d63f628fb..42726496a 100644 --- a/fractal_tasks_core/tasks/cellpose_utils.py +++ b/fractal_tasks_core/tasks/cellpose_utils.py @@ -82,13 +82,25 @@ def validate_conditions(cls, values): # Verify that custom parameters are only provided when type="custom" if type != "custom": if lower_percentile is not None: - raise ValueError(f"Type='{type}' but {lower_percentile=}. " "Hint: set type='custom'.") + raise ValueError( + f"Type='{type}' but {lower_percentile=}. " + "Hint: set type='custom'." + ) if upper_percentile is not None: - raise ValueError(f"Type='{type}' but {upper_percentile=}. " "Hint: set type='custom'.") + raise ValueError( + f"Type='{type}' but {upper_percentile=}. " + "Hint: set type='custom'." + ) if lower_bound is not None: - raise ValueError(f"Type='{type}' but {lower_bound=}. " "Hint: set type='custom'.") + raise ValueError( + f"Type='{type}' but {lower_bound=}. " + "Hint: set type='custom'." + ) if upper_bound is not None: - raise ValueError(f"Type='{type}' but {upper_bound=}. " "Hint: set type='custom'.") + raise ValueError( + f"Type='{type}' but {upper_bound=}. " + "Hint: set type='custom'." + ) # The only valid options are: # 1. Both percentiles are set and both bounds are unset @@ -102,9 +114,14 @@ def validate_conditions(cls, values): upper_bound is not None, ) if len(set(are_percentiles_set)) != 1: - raise ValueError("Both lower_percentile and upper_percentile must be set " "together.") + raise ValueError( + "Both lower_percentile and upper_percentile must be set " + "together." + ) if len(set(are_bounds_set)) != 1: - raise ValueError("Both lower_bound and upper_bound must be set together") + raise ValueError( + "Both lower_bound and upper_bound must be set together" + ) if lower_percentile is not None and lower_bound is not None: raise ValueError( "You cannot set both explicit bounds and percentile bounds " @@ -196,7 +213,9 @@ class CellposeChannel1InputModel(ChannelInputModel): Cellpose models """ - normalize: CellposeCustomNormalizer = Field(default_factory=CellposeCustomNormalizer) + normalize: CellposeCustomNormalizer = Field( + default_factory=CellposeCustomNormalizer + ) def get_omero_channel(self, zarr_url) -> OmeroChannel: try: @@ -233,7 +252,9 @@ class CellposeChannel2InputModel(BaseModel): wavelength_id: Optional[str] = None label: Optional[str] = None - normalize: CellposeCustomNormalizer = Field(default_factory=CellposeCustomNormalizer) + normalize: CellposeCustomNormalizer = Field( + default_factory=CellposeCustomNormalizer + ) @validator("label", always=True) def mutually_exclusive_channel_attributes(cls, v, values): @@ -244,7 +265,8 @@ def mutually_exclusive_channel_attributes(cls, v, values): label = v if wavelength_id and v: raise ValueError( - "`wavelength_id` and `label` cannot be both set " f"(given {wavelength_id=} and {label=})." + "`wavelength_id` and `label` cannot be both set " + f"(given {wavelength_id=} and {label=})." ) return v @@ -393,14 +415,18 @@ def normalized_img( i99 = np.percentile(img[k], upper_p) i1 = np.percentile(img[k], lower_p) if i99 - i1 > +1e-3: # np.ptp(img[k]) > 1e-3: - img[k] = normalize_percentile(img[k], lower=lower_p, upper=upper_p) + img[k] = normalize_percentile( + img[k], lower=lower_p, upper=upper_p + ) if invert: img[k] = -1 * img[k] + 1 else: img[k] = 0 elif lower_bound is not None: if upper_bound - lower_bound > +1e-3: - img[k] = normalize_bounds(img[k], lower=lower_bound, upper=upper_bound) + img[k] = normalize_bounds( + img[k], lower=lower_bound, upper=upper_bound + ) if invert: img[k] = -1 * img[k] + 1 else: From 600be9a9df9c8d17ed6891ac5909be2d6c15035b Mon Sep 17 00:00:00 2001 From: lorenzocerrone Date: Tue, 18 Jun 2024 16:16:56 +0200 Subject: [PATCH 15/15] update changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8afb7af2c..6f822cbdc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,7 @@ * Refactor Cellpose Task inputs: Support independent normalization of 2 input channels in the Cellpose task (\#738). * Rename `task.cellpose_transforms` into `tasks.cellpose_utils` (\#738). * Development: - * Switch to transitional pydantic.v1 imports, changes pydantic requirement to `>=1.10.16' (\#760). + * Switch to transitional pydantic.v1 imports, changes pydantic requirement to `==1.10.16` or `>=2.6.3` (\#760). * Support JSON-Schema generation for `Enum` task arguments (\#749). * Make JSON-Schema generation tools more flexible, to simplify testing (\#749). * Update documentation (\#751).