Skip to content

Commit

Permalink
Merge pull request #821 from NeurodataWithoutBorders/backend-configur…
Browse files Browse the repository at this point in the history
…ation-uniform

Uniform Conversion with Backend Configuration
  • Loading branch information
CodyCBakerPhD authored Jun 4, 2024
2 parents 336e040 + 8060c4d commit 0024572
Show file tree
Hide file tree
Showing 3 changed files with 87 additions and 109 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@ import { getResourceUsageBytes } from "../../../../validation/backend-configurat
import { resolveBackendResults, updateSchema } from "../../../../../../../schemas/backend-configuration.schema";
import { getInfoFromId } from "./utils.js";

const getBackendConfigurations = (info, options = {}) => run(`neuroconv/configuration`, info, options);

const itemIgnore = {
full_shape: true,
buffer_shape: true,
Expand All @@ -39,6 +37,12 @@ export class GuidedBackendConfigurationPage extends ManagedPage {
this.style.height = "100%"; // Fix main section
}

getBackendConfigurations = (info, options = {}) =>
run(`neuroconv/configuration`, info, options).catch((e) => {
this.notify(e.message, "error");
throw e;
});

beforeSave = () => {
merge(this.localState, this.info.globalState);
};
Expand Down Expand Up @@ -227,13 +231,18 @@ export class GuidedBackendConfigurationPage extends ManagedPage {
{
title: "Getting backend options",
},
getBackendConfigurations
this.getBackendConfigurations
);
};

validate = (toRun) => {
if (!toRun)
return this.runConversions({}, true, { title: "Validating backend options" }, getBackendConfigurations);
return this.runConversions(
{},
true,
{ title: "Validating backend options" },
this.getBackendConfigurations
);

const { subject, session } = toRun;
return this.runConversions(
Expand All @@ -243,7 +252,7 @@ export class GuidedBackendConfigurationPage extends ManagedPage {
title: "Validating backend options",
showCancelButton: false,
},
getBackendConfigurations
this.getBackendConfigurations
);
};

Expand Down
1 change: 1 addition & 0 deletions src/pyflask/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ def post(self):
message += f"\n{traceback}\n"

selected_logger = getattr(api.logger, type)
api.logger.info(f"Logging {type} message: {header}")
selected_logger(message)


Expand Down
176 changes: 72 additions & 104 deletions src/pyflask/manageNeuroconv/manage_neuroconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -827,58 +827,27 @@ def get_interface_alignment(info: dict) -> dict:
)


def configure_dataset_backends(nwbfile, backend_configuration, configuration=None):
from neuroconv.tools.nwb_helpers import (
configure_backend,
get_default_backend_configuration,
)

PROPS_TO_AVOID = ["full_shape"]

# Default to HDF5 backend configuration
if configuration is None:
configuration = get_default_backend_configuration(nwbfile=nwbfile, backend="hdf5")

# Ensure the configuration is a dictionary
elif isinstance(configuration, str):
configuration = get_default_backend_configuration(nwbfile=nwbfile, backend=configuration)

for name, item in backend_configuration.items():
for key, value in item.items():

# Avoid setting compression options if unspecified
if key == "compression_options" and (value is None or len(value) == 0):
setattr(configuration.dataset_configurations[name], key, None)

# Avoid certain properties passed to the GUIDE
elif key not in PROPS_TO_AVOID:
setattr(configuration.dataset_configurations[name], key, value)

configure_backend(nwbfile=nwbfile, backend_configuration=configuration)


def create_file(
info: dict,
log_url: Optional[str] = None,
) -> dict:

import requests
from neuroconv.tools.nwb_helpers import (
get_default_backend_configuration,
make_or_load_nwbfile,
)
from tqdm_publisher import TQDMProgressSubscriber

project_name = info.get("project_name")

run_stub_test = info.get("stub_test", False)
backend_configuration = info.get("configuration")

overwrite = info.get("overwrite", False)

# Progress update info
url = info.get("url")
request_id = info.get("request_id")

will_configure_backend = backend_configuration is not None and run_stub_test is False
# Backend configuration info
backend_configuration = info.get("configuration")
backend = backend_configuration.get("backend", "hdf5")

converter, metadata, path_info = get_conversion_info(info)

Expand All @@ -894,60 +863,50 @@ def create_file(
else:
nwbfile_path.unlink()

if will_configure_backend:

backend = backend_configuration.get("backend", "hdf5")
configuration_values = backend_configuration.get("results", {}).get(backend, {})
def update_conversion_progress(message):
update_dict = dict(request_id=request_id, **message)
if url or not run_stub_test:
requests.post(url=url, json=update_dict)
else:
progress_handler.announce(update_dict)

# Create NWB file with appropriate backend configuration
with make_or_load_nwbfile(
nwbfile_path=nwbfile_path, metadata=metadata, overwrite=overwrite, backend=backend
) as nwbfile:
converter.add_to_nwbfile(nwbfile, metadata=metadata)
configuration = get_default_backend_configuration(nwbfile=nwbfile, backend=backend)
configure_dataset_backends(nwbfile, configuration_values, configuration)
progress_bar_options = dict(
mininterval=0,
on_progress_update=update_conversion_progress,
)

else:
# Assume all interfaces have the same conversion options for now
available_options = converter.get_conversion_options_schema()
options = {interface: {} for interface in info["source_data"]}

def update_conversion_progress(message):
update_dict = dict(request_id=request_id, **message)
if url or not run_stub_test:
requests.post(url=url, json=update_dict)
else:
progress_handler.announce(update_dict)
for interface in options:
available_opts = available_options.get("properties").get(interface).get("properties", {})

progress_bar_options = dict(
mininterval=0,
on_progress_update=update_conversion_progress,
)
# Specify if stub test
if run_stub_test:
if available_opts.get("stub_test"):
options[interface]["stub_test"] = True

# Assume all interfaces have the same conversion options for now
available_options = converter.get_conversion_options_schema()
options = {interface: {} for interface in info["source_data"]}

for interface in options:
available_opts = available_options.get("properties").get(interface).get("properties", {})
# Specify if iterator options are available
elif available_opts.get("iterator_opts"):
options[interface]["iterator_opts"] = dict(
display_progress=True,
progress_bar_class=TQDMProgressSubscriber,
progress_bar_options=progress_bar_options,
)

# Specify if stub test
if run_stub_test:
if available_opts.get("stub_test"):
options[interface]["stub_test"] = True
run_conversion_kwargs = dict(
metadata=metadata,
nwbfile_path=nwbfile_path,
overwrite=overwrite,
conversion_options=options,
backend=backend,
)

# Specify if iterator options are available
elif available_opts.get("iterator_opts"):
options[interface]["iterator_opts"] = dict(
display_progress=True,
progress_bar_class=TQDMProgressSubscriber,
progress_bar_options=progress_bar_options,
)
if not run_stub_test:
run_conversion_kwargs.update(dict(backend_configuration=update_backend_configuration(info)))

# Actually run the conversion
converter.run_conversion(
metadata=metadata,
nwbfile_path=nwbfile_path,
overwrite=overwrite,
conversion_options=options,
)
converter.run_conversion(**run_conversion_kwargs)

except Exception as e:
if log_url:
Expand All @@ -964,42 +923,51 @@ def update_conversion_progress(message):
raise e


def get_backend_configuration(info: dict) -> dict:

import numpy as np

PROPS_TO_REMOVE = [
# Immutable
"object_id",
"dataset_name",
"location_in_file",
"dtype",
]

PROPS_TO_IGNORE = ["full_shape"]

info["overwrite"] = True # Always overwrite the file
def update_backend_configuration(info: dict) -> dict:

from neuroconv.tools.nwb_helpers import (
get_default_backend_configuration,
make_nwbfile_from_metadata,
)

backend_configuration = info.get("configuration", {})
backend = backend_configuration.get("backend", "hdf5")
results = backend_configuration.get("results", {}).get(backend, {})
PROPS_TO_IGNORE = ["full_shape"]

# raise ValueError(f"This function is not currently supported. {results}")
info_from_frontend = info.get("configuration", {})
backend = info_from_frontend.get("backend", "hdf5")
backend_configuration_from_frontend = info_from_frontend.get("results", {}).get(backend, {})

converter, metadata, __ = get_conversion_info(info)

nwbfile = make_nwbfile_from_metadata(metadata=metadata)
converter.add_to_nwbfile(nwbfile, metadata=metadata)
configuration = get_default_backend_configuration(nwbfile=nwbfile, backend=backend)
for dataset_name, dataset_configuration in results.items():

backend_configuration = get_default_backend_configuration(nwbfile=nwbfile, backend=backend)

for location_in_file, dataset_configuration in backend_configuration_from_frontend.items():
for key, value in dataset_configuration.items():
if key not in PROPS_TO_IGNORE:
setattr(configuration.dataset_configurations[dataset_name], key, value)
# Pydantic models only allow setting of attributes
setattr(backend_configuration.dataset_configurations[location_in_file], key, value)

return backend_configuration


def get_backend_configuration(info: dict) -> dict:

import numpy as np

PROPS_TO_REMOVE = [
# Immutable
"object_id",
"dataset_name",
"location_in_file",
"dtype",
]

info["overwrite"] = True # Always overwrite the file

backend = info.get("backend", "hdf5")
configuration = update_backend_configuration(info)

def custom_encoder(obj):
if isinstance(obj, np.ndarray):
Expand Down

0 comments on commit 0024572

Please sign in to comment.