From 88eb7aca65b6701df22deb8c1be70bbbedba32da Mon Sep 17 00:00:00 2001 From: Cody Baker Date: Mon, 18 Jul 2022 14:02:35 -0400 Subject: [PATCH 01/38] branch from dev --- src/pynwb/__init__.py | 11 +-- src/pynwb/validate.py | 137 -------------------------- src/pynwb/validation.py | 209 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 210 insertions(+), 147 deletions(-) delete mode 100644 src/pynwb/validate.py create mode 100644 src/pynwb/validation.py diff --git a/src/pynwb/__init__.py b/src/pynwb/__init__.py index 57a508eec..44f7ebf2b 100644 --- a/src/pynwb/__init__.py +++ b/src/pynwb/__init__.py @@ -186,16 +186,7 @@ def get_sum(self, a, b): return __TYPE_MAP.get_dt_container_cls(neurodata_type, namespace) -@docval({'name': 'io', 'type': HDMFIO, 'doc': 'the HDMFIO object to read from'}, - {'name': 'namespace', 'type': str, 'doc': 'the namespace to validate against', 'default': CORE_NAMESPACE}, - returns="errors in the file", rtype=list, - is_method=False) -def validate(**kwargs): - """Validate an NWB file against a namespace""" - io, namespace = getargs('io', 'namespace', kwargs) - builder = io.read_builder() - validator = ValidatorMap(io.manager.namespace_catalog.get_namespace(name=namespace)) - return validator.validate(builder) +from .validation import validate # keeping here as placeholder for import order class NWBHDF5IO(_HDF5IO): diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py deleted file mode 100644 index a6ad3e546..000000000 --- a/src/pynwb/validate.py +++ /dev/null @@ -1,137 +0,0 @@ -"""Command line tool to Validate an NWB file against a namespace""" -import os -import sys -from argparse import ArgumentParser - -from hdmf.spec import NamespaceCatalog -from hdmf.build import BuildManager -from hdmf.build import TypeMap as TypeMap - -from pynwb import validate, CORE_NAMESPACE, NWBHDF5IO -from pynwb.spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace - - -def _print_errors(validation_errors): - if validation_errors: - print(' - found the following errors:', file=sys.stderr) - for err in validation_errors: - print(str(err), file=sys.stderr) - else: - print(' - no errors found.') - - -def _validate_helper(**kwargs): - errors = validate(**kwargs) - _print_errors(errors) - - return (errors is not None and len(errors) > 0) - - -def main(): # noqa: C901 - - ep = """ - If --ns is not specified, validate against all namespaces in the NWB file. - """ - - parser = ArgumentParser(description="Validate an NWB file", epilog=ep) - parser.add_argument("paths", type=str, nargs='+', help="NWB file paths") - # parser.add_argument('-p', '--nspath', type=str, help="the path to the namespace YAML file") - parser.add_argument("-n", "--ns", type=str, help="the namespace to validate against") - parser.add_argument("-lns", "--list-namespaces", dest="list_namespaces", - action='store_true', help="List the available namespaces and exit.") - - feature_parser = parser.add_mutually_exclusive_group(required=False) - feature_parser.add_argument("--cached-namespace", dest="cached_namespace", action='store_true', - help="Use the cached namespace (default).") - feature_parser.add_argument('--no-cached-namespace', dest="cached_namespace", action='store_false', - help="Don't use the cached namespace.") - parser.set_defaults(cached_namespace=True) - - args = parser.parse_args() - ret = 0 - - # TODO Validation against a specific namespace file is currently broken. See pynwb#1396 - # if args.nspath: - # if not os.path.isfile(args.nspath): - # print("The namespace file {} is not a valid file.".format(args.nspath), file=sys.stderr) - # sys.exit(1) - # - # if args.cached_namespace: - # print("Turning off validation against cached namespace information " - # "as --nspath was passed.", file=sys.stderr) - # args.cached_namespace = False - - for path in args.paths: - - if not os.path.isfile(path): - print("The file {} does not exist.".format(path), file=sys.stderr) - ret = 1 - continue - - if args.cached_namespace: - catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace) - ns_deps = NWBHDF5IO.load_namespaces(catalog, path) - s = set(ns_deps.keys()) # determine which namespaces are the most - for k in ns_deps: # specific (i.e. extensions) and validate - s -= ns_deps[k].keys() # against those - # TODO remove this workaround for issue https://github.com/NeurodataWithoutBorders/pynwb/issues/1357 - if 'hdmf-experimental' in s: - s.remove('hdmf-experimental') # remove validation of hdmf-experimental for now - namespaces = list(sorted(s)) - if len(namespaces) > 0: - tm = TypeMap(catalog) - manager = BuildManager(tm) - specloc = "cached namespace information" - else: - manager = None - namespaces = [CORE_NAMESPACE] - specloc = "pynwb namespace information" - print("The file {} has no cached namespace information. " - "Falling back to {}.".format(path, specloc), file=sys.stderr) - # elif args.nspath: - # catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace) - # namespaces = catalog.load_namespaces(args.nspath) - # - # if len(namespaces) == 0: - # print("Could not load namespaces from file {}.".format(args.nspath), file=sys.stderr) - # sys.exit(1) - # - # tm = TypeMap(catalog) - # manager = BuildManager(tm) - # specloc = "--nspath namespace information" - else: - manager = None - namespaces = [CORE_NAMESPACE] - specloc = "pynwb namespace information" - - if args.list_namespaces: - print("\n".join(namespaces)) - ret = 0 - continue - - if args.ns: - if args.ns in namespaces: - namespaces = [args.ns] - elif args.cached_namespace and args.ns in ns_deps: # validating against a dependency - for k in ns_deps: - if args.ns in ns_deps[k]: - print(("The namespace '{}' is included by the namespace '{}'. Please validate against " - "that namespace instead.").format(args.ns, k), file=sys.stderr) - ret = 1 - continue - else: - print("The namespace '{}' could not be found in {} as only {} is present.".format( - args.ns, specloc, namespaces), file=sys.stderr) - ret = 1 - continue - - with NWBHDF5IO(path, mode='r', manager=manager) as io: - for ns in namespaces: - print("Validating {} against {} using namespace '{}'.".format(path, specloc, ns)) - ret = ret or _validate_helper(io=io, namespace=ns) - - sys.exit(ret) - - -if __name__ == '__main__': # pragma: no cover - main() diff --git a/src/pynwb/validation.py b/src/pynwb/validation.py new file mode 100644 index 000000000..357f55e24 --- /dev/null +++ b/src/pynwb/validation.py @@ -0,0 +1,209 @@ +"""Command line tool to Validate an NWB file against a namespace.""" +import os +import sys +from argparse import ArgumentParser +from typing import Optional, Tuple, List, Dict + +from hdmf.spec import NamespaceCatalog +from hdmf.build import BuildManager +from hdmf.build import TypeMap as TypeMap +from hdmf.utils import docval, getargs +from hdmf.backends.io import HDMFIO +from hdmf.validate import ValidatorMap + +from pynwb import CORE_NAMESPACE, NWBHDF5IO +from pynwb.spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace + + +def _print_errors(validation_errors): + if validation_errors: + print(' - found the following errors:', file=sys.stderr) + for err in validation_errors: + print(str(err), file=sys.stderr) + else: + print(' - no errors found.') + + +def _validate_helper(**kwargs): + errors = validate(**kwargs) + _print_errors(errors) + + return (errors is not None and len(errors) > 0) + + +@docval({'name': 'io', 'type': HDMFIO, 'doc': 'the HDMFIO object to read from'}, + {'name': 'namespace', 'type': str, 'doc': 'the namespace to validate against', 'default': CORE_NAMESPACE}, + returns="errors in the file", rtype=list, + is_method=False) +def validate(**kwargs): + """Validate the io of an open NWBHDF5IO against a namespace.""" + io, namespace = getargs('io', 'namespace', kwargs) + builder = io.read_builder() + validator = ValidatorMap(io.manager.namespace_catalog.get_namespace(name=namespace)) + return validator.validate(builder) + + +def get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManager, Dict[str, str]]: + """ + Determine the most specific namespace(s) that are cached in the given NWBFile that can be used for validation. + + Example + ------- + The following example illustrates how we can use this function to validate against namespaces + cached in a file. This is useful, e.g., when a file was created using an extension + >>> from pynwb import validate + >>> from pynwb.validate import get_cached_namespaces_to_validate + >>> path = "my_nwb_file.nwb" + >>> validate_namespaces, manager, cached_namespaces = get_cached_namespaces_to_validate(path) + >>> with NWBHDF5IO(path, "r", manager=manager) as reader: + >>> errors = [] + >>> for ns in validate_namespaces: + >>> errors += validate(io=reader, namespace=ns) + :param path: Path for the NWB file + :return: Tuple with: + - List of strings with the most specific namespace(s) to use for validation. + - BuildManager object for opening the file for validation + - Dict with the full result from NWBHDF5IO.load_namespaces + """ + catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace) + namespace_dependencies = NWBHDF5IO.load_namespaces(catalog, path) + + # determine which namespaces are the most specific (i.e. extensions) and validate against those + candidate_namespaces = set(namespace_dependencies.keys()) + for namespace_dependency in namespace_dependencies: + candidate_namespaces -= namespace_dependencies[namespace_dependency].keys() + + # TODO remove this workaround for issue https://github.com/NeurodataWithoutBorders/pynwb/issues/1357 + candidate_namespaces.discard('hdmf-experimental') # remove validation of hdmf-experimental for now + namespaces = sorted(candidate_namespaces) + + if len(namespaces) > 0: + tm = TypeMap(catalog) + manager = BuildManager(tm) + else: + manager = None + + return namespaces, manager, namespace_dependencies + + +@docval({'name': 'path', 'type': HDMFIO, 'doc': 'the HDMFIO object to read from'}, + { + 'name': 'use_cached_namespaces', + 'type': str, 'doc': 'Whether to use namespaces cached within the file for validation.', + 'default': True + }, + { + 'name': 'namespace', + 'type': Optional[str], 'doc': 'Whether to use namespaces cached within the file for validation.', + 'default': None + }, + returns="errors in the file", rtype=list, + is_method=False) +def validate_file(**kwargs): + """Validate an NWB file against a namespace or its cached namespaces.""" + path, use_cached_namespaces, namespace = getargs("path", "use_cached_namespaces", "namespace", kwargs) + + if use_cached_namespaces: + catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace) + ns_deps = NWBHDF5IO.load_namespaces(catalog, path) + s = set(ns_deps.keys()) # determine which namespaces are the most + for k in ns_deps: # specific (i.e. extensions) and validate + s -= ns_deps[k].keys() # against those + # TODO remove this workaround for issue https://github.com/NeurodataWithoutBorders/pynwb/issues/1357 + if 'hdmf-experimental' in s: + s.remove('hdmf-experimental') # remove validation of hdmf-experimental for now + namespaces = list(sorted(s)) + if len(namespaces) > 0: + tm = TypeMap(catalog) + manager = BuildManager(tm) + specloc = "cached namespace information" + else: + manager = None + namespaces = [CORE_NAMESPACE] + specloc = "pynwb namespace information" + print("The file {} has no cached namespace information. " + "Falling back to {}.".format(path, specloc), file=sys.stderr) + + cached_namespaces, manager, namespace_dependencies = get_cached_namespaces_to_validate(path=path) + + if namespace: + if namespace in namespaces: + namespaces = [namespace] + elif use_cached_namespaces and namespaces in namespace_dependencies: # validating against a dependency + for namespace_dependency in namespace_dependencies: + if namespace in namespace_dependencies[namespace_dependency]: + print( + f"The namespace '{namespace}' is included by the namespace '{namespace_dependency}'. " + "Please validate against that namespace instead.", + file=sys.stderr + ) + ret = 1 + else: + print( + f"The namespace '{namespace}' could not be found in {specloc} as only {namespaces} is present.", + file=sys.stderr + ) + ret = 1 + + with NWBHDF5IO(path, mode='r', manager=manager) as io: + for namespace in namespaces: + print(f"Validating {path} against {specloc} using namespace '{namespace}'.") + ret = _validate_helper(io=io, namespace=namespace) or ret + return ret + + +def validate_cli(): # noqa: C901 + + ep = """ + If --ns is not specified, validate against all namespaces in the NWB file. + """ + + parser = ArgumentParser(description="Validate an NWB file", epilog=ep) + parser.add_argument("paths", type=str, nargs='+', help="NWB file paths") + # parser.add_argument('-p', '--nspath', type=str, help="the path to the namespace YAML file") + parser.add_argument("-n", "--ns", type=str, help="the namespace to validate against") + parser.add_argument("-lns", "--list-namespaces", dest="list_namespaces", + action='store_true', help="List the available namespaces and exit.") + + feature_parser = parser.add_mutually_exclusive_group(required=False) + feature_parser.add_argument("--cached-namespace", dest="cached_namespace", action='store_true', + help="Use the cached namespace (default).") + parser.set_defaults(cached_namespace=True) + + args = parser.parse_args() + ret = 0 + + # TODO Validation against a specific namespace file is currently broken. See pynwb#1396 + # if args.nspath: + # if not os.path.isfile(args.nspath): + # print("The namespace file {} is not a valid file.".format(args.nspath), file=sys.stderr) + # sys.exit(1) + # + # if args.cached_namespace: + # print("Turning off validation against cached namespace information " + # "as --nspath was passed.", file=sys.stderr) + # args.cached_namespace = False + + for path in args.paths: + + if not os.path.isfile(path): + print(f"The file {path} does not exist.", file=sys.stderr) + ret = 1 + continue + + cached_namespaces, manager, namespace_dependencies = get_cached_namespaces_to_validate(path=path) + if args.list_namespaces: + namespaces = cached_namespaces or [CORE_NAMESPACE] + print("\n".join(namespaces)) + ret = 0 + continue + + ret = validate_file(path=path, use_cached_namespaces=args.cached_namepsaces, namespace=args.ns) + if ret == 1: + continue + + sys.exit(ret) + + +if __name__ == '__main__': # pragma: no cover + validate_cli() From c7c892e33eb48e723b58b16a06ed3b9532e0d16a Mon Sep 17 00:00:00 2001 From: CodyCBakerPhD Date: Thu, 1 Sep 2022 17:28:54 +0000 Subject: [PATCH 02/38] major work --- src/pynwb/validation.py | 301 +++++++++++---------- tests/back_compat/test_import_structure.py | 85 ++++++ 2 files changed, 242 insertions(+), 144 deletions(-) create mode 100644 tests/back_compat/test_import_structure.py diff --git a/src/pynwb/validation.py b/src/pynwb/validation.py index 357f55e24..922a2d220 100644 --- a/src/pynwb/validation.py +++ b/src/pynwb/validation.py @@ -1,8 +1,7 @@ """Command line tool to Validate an NWB file against a namespace.""" -import os import sys from argparse import ArgumentParser -from typing import Optional, Tuple, List, Dict +from typing import Tuple, List, Dict from hdmf.spec import NamespaceCatalog from hdmf.build import BuildManager @@ -11,39 +10,30 @@ from hdmf.backends.io import HDMFIO from hdmf.validate import ValidatorMap -from pynwb import CORE_NAMESPACE, NWBHDF5IO +from pynwb import CORE_NAMESPACE from pynwb.spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace -def _print_errors(validation_errors): +def _check_errors(validation_errors: list) -> bool: + return validation_errors is not None and len(validation_errors) > 0 + + +def _print_errors(validation_errors: list): if validation_errors: - print(' - found the following errors:', file=sys.stderr) + print(" - found the following errors:", file=sys.stderr) for err in validation_errors: print(str(err), file=sys.stderr) else: - print(' - no errors found.') - + print(" - no errors found.") -def _validate_helper(**kwargs): - errors = validate(**kwargs) - _print_errors(errors) - return (errors is not None and len(errors) > 0) - - -@docval({'name': 'io', 'type': HDMFIO, 'doc': 'the HDMFIO object to read from'}, - {'name': 'namespace', 'type': str, 'doc': 'the namespace to validate against', 'default': CORE_NAMESPACE}, - returns="errors in the file", rtype=list, - is_method=False) -def validate(**kwargs): - """Validate the io of an open NWBHDF5IO against a namespace.""" - io, namespace = getargs('io', 'namespace', kwargs) +def _validate_helper(io: HDMFIO, namespace: str = CORE_NAMESPACE) -> list: builder = io.read_builder() validator = ValidatorMap(io.manager.namespace_catalog.get_namespace(name=namespace)) return validator.validate(builder) -def get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManager, Dict[str, str]]: +def _get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManager, Dict[str, str]]: """ Determine the most specific namespace(s) that are cached in the given NWBFile that can be used for validation. @@ -52,9 +42,9 @@ def get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManage The following example illustrates how we can use this function to validate against namespaces cached in a file. This is useful, e.g., when a file was created using an extension >>> from pynwb import validate - >>> from pynwb.validate import get_cached_namespaces_to_validate + >>> from pynwb.validate import _get_cached_namespaces_to_validate >>> path = "my_nwb_file.nwb" - >>> validate_namespaces, manager, cached_namespaces = get_cached_namespaces_to_validate(path) + >>> validate_namespaces, manager, cached_namespaces = _get_cached_namespaces_to_validate(path) >>> with NWBHDF5IO(path, "r", manager=manager) as reader: >>> errors = [] >>> for ns in validate_namespaces: @@ -65,145 +55,168 @@ def get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManage - BuildManager object for opening the file for validation - Dict with the full result from NWBHDF5IO.load_namespaces """ - catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace) - namespace_dependencies = NWBHDF5IO.load_namespaces(catalog, path) + from . import NWBHDF5IO # TODO: modularize to avoid circular import - # determine which namespaces are the most specific (i.e. extensions) and validate against those + catalog = NamespaceCatalog( + group_spec=NWBGroupSpec, dataset_spec_cls=NWBDatasetSpec, spec_namespace_cls=NWBNamespace + ) + namespace_dependencies = NWBHDF5IO.load_namespaces(namepsace_catalog=catalog, path=path) + + # Determine which namespaces are the most specific (i.e. extensions) and validate against those candidate_namespaces = set(namespace_dependencies.keys()) for namespace_dependency in namespace_dependencies: candidate_namespaces -= namespace_dependencies[namespace_dependency].keys() - # TODO remove this workaround for issue https://github.com/NeurodataWithoutBorders/pynwb/issues/1357 - candidate_namespaces.discard('hdmf-experimental') # remove validation of hdmf-experimental for now - namespaces = sorted(candidate_namespaces) + # TODO: remove this workaround for issue https://github.com/NeurodataWithoutBorders/pynwb/issues/1357 + candidate_namespaces.discard("hdmf-experimental") # remove validation of hdmf-experimental for now + cached_namespaces = sorted(candidate_namespaces) - if len(namespaces) > 0: - tm = TypeMap(catalog) - manager = BuildManager(tm) + if len(cached_namespaces) > 0: + type_map = TypeMap(namespaces=catalog) + manager = BuildManager(type_map=type_map) else: manager = None - return namespaces, manager, namespace_dependencies - - -@docval({'name': 'path', 'type': HDMFIO, 'doc': 'the HDMFIO object to read from'}, - { - 'name': 'use_cached_namespaces', - 'type': str, 'doc': 'Whether to use namespaces cached within the file for validation.', - 'default': True - }, - { - 'name': 'namespace', - 'type': Optional[str], 'doc': 'Whether to use namespaces cached within the file for validation.', - 'default': None - }, - returns="errors in the file", rtype=list, - is_method=False) -def validate_file(**kwargs): - """Validate an NWB file against a namespace or its cached namespaces.""" - path, use_cached_namespaces, namespace = getargs("path", "use_cached_namespaces", "namespace", kwargs) - - if use_cached_namespaces: - catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace) - ns_deps = NWBHDF5IO.load_namespaces(catalog, path) - s = set(ns_deps.keys()) # determine which namespaces are the most - for k in ns_deps: # specific (i.e. extensions) and validate - s -= ns_deps[k].keys() # against those - # TODO remove this workaround for issue https://github.com/NeurodataWithoutBorders/pynwb/issues/1357 - if 'hdmf-experimental' in s: - s.remove('hdmf-experimental') # remove validation of hdmf-experimental for now - namespaces = list(sorted(s)) - if len(namespaces) > 0: - tm = TypeMap(catalog) - manager = BuildManager(tm) - specloc = "cached namespace information" - else: - manager = None - namespaces = [CORE_NAMESPACE] - specloc = "pynwb namespace information" - print("The file {} has no cached namespace information. " - "Falling back to {}.".format(path, specloc), file=sys.stderr) - - cached_namespaces, manager, namespace_dependencies = get_cached_namespaces_to_validate(path=path) - - if namespace: - if namespace in namespaces: - namespaces = [namespace] - elif use_cached_namespaces and namespaces in namespace_dependencies: # validating against a dependency - for namespace_dependency in namespace_dependencies: - if namespace in namespace_dependencies[namespace_dependency]: + return cached_namespaces, manager, namespace_dependencies + + +@docval( + { + "name": "io", + "type": HDMFIO, # want to do Optional[HDMFIO] but docval complains? + "doc": "An open IO to an NWB file.", + "default": False, + }, # For back-compatability + { + "name": "namespace", + "type": str, # want to do Optional[str] but docval complains? + "doc": "A specific namespace to validate against.", + "default": CORE_NAMESPACE, + }, # Argument order is for back-compatability + { + "name": "paths", + "type": list, # want to do Optional[List[str]] but docval complains? + "doc": "List of NWB file paths.", + "default": None, + }, + { + "name": "use_cached_namespaces", + "type": bool, + "doc": "Whether to use namespaces cached within the file for validation.", + "default": True, + }, + { + "name": "verbose", + "type": bool, + "doc": "Whether or not to print messages to stdout.", + "default": False, + }, + returns="Validation errors in the file.", + rtype=(list, bool), + is_method=False, +) +def validate(**kwargs): + """Validate NWB file(s) against a namespace or its cached namespaces.""" + from . import NWBHDF5IO # TODO: modularize to avoid circular import + + io, paths, use_cached_namespaces, chosen_namespace, verbose = getargs( + "io", "paths", "use_cached_namespaces", "namespace", "verbose", kwargs + ) + + if io is not None: + validation_errors = _validate_helper(io=io, namespace=chosen_namespace) + return validation_errors + + status = 0 + validation_errors = list() + for path in paths: + if use_cached_namespaces: + cached_namespaces, manager, namespace_dependencies = _get_cached_namespaces_to_validate(path=path) + + if any(cached_namespaces): + namespaces = cached_namespaces + namespace_message = "cached namespace information" + else: + namespace_message = "PyNWB namespace information" + if verbose: print( - f"The namespace '{namespace}' is included by the namespace '{namespace_dependency}'. " - "Please validate against that namespace instead.", - file=sys.stderr + f"The file {path} has no cached namespace information. " "Falling back to {namespace_source}.", + file=sys.stderr, + ) + namespaces = [CORE_NAMESPACE] + + if chosen_namespace: + if chosen_namespace in namespaces: + namespaces = [chosen_namespace] + elif ( + use_cached_namespaces and chosen_namespace in namespace_dependencies + ): # validating against a dependency + for namespace_dependency in namespace_dependencies: + if chosen_namespace in namespace_dependencies[namespace_dependency]: + status = 1 + if verbose: + print( + f"The namespace '{chosen_namespace}' is included by the namespace " + "'{namespace_dependency}'. Please validate against that namespace instead.", + file=sys.stderr, + ) + else: + status = 1 + if verbose: + print( + f"The namespace '{chosen_namespace}' could not be found in {namespace_message} as only " + f"{namespaces} is present.", + file=sys.stderr, ) - ret = 1 - else: - print( - f"The namespace '{namespace}' could not be found in {specloc} as only {namespaces} is present.", - file=sys.stderr - ) - ret = 1 - - with NWBHDF5IO(path, mode='r', manager=manager) as io: - for namespace in namespaces: - print(f"Validating {path} against {specloc} using namespace '{namespace}'.") - ret = _validate_helper(io=io, namespace=namespace) or ret - return ret - - -def validate_cli(): # noqa: C901 - - ep = """ - If --ns is not specified, validate against all namespaces in the NWB file. - """ - parser = ArgumentParser(description="Validate an NWB file", epilog=ep) - parser.add_argument("paths", type=str, nargs='+', help="NWB file paths") - # parser.add_argument('-p', '--nspath', type=str, help="the path to the namespace YAML file") + with NWBHDF5IO(path, mode="r", manager=manager) as io: + for namespace in namespaces: + if verbose: + print(f"Validating {path} against {namespace_message} using namespace '{namespace}'.") + validation_errors += _validate_helper(io=io, namespace=namespace) + return validation_errors, status + + +def validate_cli(): + """CLI wrapper around pynwb.validate.""" + parser = ArgumentParser( + description="Validate an NWB file", + epilog="If --ns is not specified, validate against all namespaces in the NWB file.", + ) + + # Special arg specific to CLI + parser.add_argument( + "-lns", + "--list-namespaces", + dest="list_namespaces", + action="store_true", + help="List the available namespaces and exit.", + ) + + # Common args to the API validate + parser.add_argument("paths", type=str, nargs="+", help="NWB file paths") parser.add_argument("-n", "--ns", type=str, help="the namespace to validate against") - parser.add_argument("-lns", "--list-namespaces", dest="list_namespaces", - action='store_true', help="List the available namespaces and exit.") - feature_parser = parser.add_mutually_exclusive_group(required=False) - feature_parser.add_argument("--cached-namespace", dest="cached_namespace", action='store_true', - help="Use the cached namespace (default).") + feature_parser.add_argument( + "--cached-namespace", dest="cached_namespace", action="store_true", help="Use the cached namespace (default)." + ) parser.set_defaults(cached_namespace=True) - args = parser.parse_args() - ret = 0 + status = 0 - # TODO Validation against a specific namespace file is currently broken. See pynwb#1396 - # if args.nspath: - # if not os.path.isfile(args.nspath): - # print("The namespace file {} is not a valid file.".format(args.nspath), file=sys.stderr) - # sys.exit(1) - # - # if args.cached_namespace: - # print("Turning off validation against cached namespace information " - # "as --nspath was passed.", file=sys.stderr) - # args.cached_namespace = False - - for path in args.paths: - - if not os.path.isfile(path): - print(f"The file {path} does not exist.", file=sys.stderr) - ret = 1 - continue - - cached_namespaces, manager, namespace_dependencies = get_cached_namespaces_to_validate(path=path) - if args.list_namespaces: - namespaces = cached_namespaces or [CORE_NAMESPACE] - print("\n".join(namespaces)) - ret = 0 - continue - - ret = validate_file(path=path, use_cached_namespaces=args.cached_namepsaces, namespace=args.ns) - if ret == 1: - continue + if args.list_namespaces: + for path in args.paths: + cached_namespaces, _, _, specloc = _get_cached_namespaces_to_validate(path=path) + print("\n".join(cached_namespaces)) + else: + validation_errors, validation_status = validate( + paths=args.paths, use_cached_namespace=args.cached_namespace, namespace=args.ns, verbose=True + ) + _print_errors(validation_errors=validation_errors) + status = status or validation_status or (validation_errors is not None and len(validation_errors) > 0) - sys.exit(ret) + sys.exit(status) -if __name__ == '__main__': # pragma: no cover +if __name__ == "__main__": # pragma: no cover validate_cli() diff --git a/tests/back_compat/test_import_structure.py b/tests/back_compat/test_import_structure.py new file mode 100644 index 000000000..56b34b50b --- /dev/null +++ b/tests/back_compat/test_import_structure.py @@ -0,0 +1,85 @@ +from unittest import TestCase + +import pynwb + + +class TestImportStructure(TestCase): + def test_outer_import_structure(self): + current_structure = dir(pynwb) + expected_structure = [ + "BuildManager", + "CORE_NAMESPACE", + "DataChunkIterator", + "H5DataIO", + "HDMFIO", + "NWBContainer", + "NWBData", + "NWBDatasetSpec", + "NWBFile", + "NWBGroupSpec", + "NWBHDF5IO", + "NWBNamespace", + "NamespaceCatalog", + "Path", + "ProcessingModule", + "TimeSeries", + "TypeMap", + "ValidatorMap", + "_HDF5IO", + "__NS_CATALOG", + "__TYPE_MAP", + "__builtins__", + "__cached__", + "__core_ns_file_name", + "__doc__", + "__file__", + "__get_resources", + "__io", + "__loader__", + "__name__", + "__package__", + "__path__", + "__resources", + "__spec__", + "__version__", + "_due", + "_get_resources", + "_version", + "available_namespaces", + "base", + "behavior", + "core", + "deepcopy", + "device", + "docval", + "ecephys", + "epoch", + "file", + "get_class", + "get_docval", + "get_manager", + "get_type_map", + "getargs", + "h5py", + "hdmf", + "hdmf_typemap", + "icephys", + "image", + "io", + "legacy", + "load_namespaces", + "misc", + "ogen", + "ophys", + "os", + "popargs", + "register_class", + "register_map", + "retinotopy", + "spec", + "validation", + "validate", + "warn", + ] + for member in expected_structure: + self.assertIn(member=member, container=current_structure) From 624659890484fcbd6ba3329db09d8c63153baf91 Mon Sep 17 00:00:00 2001 From: CodyCBakerPhD Date: Thu, 1 Sep 2022 17:34:04 +0000 Subject: [PATCH 03/38] fix flake8 at least --- src/pynwb/__init__.py | 197 +++++++++++++++++++++++++----------------- 1 file changed, 119 insertions(+), 78 deletions(-) diff --git a/src/pynwb/__init__.py b/src/pynwb/__init__.py index 1e3483cec..3d98e87b5 100644 --- a/src/pynwb/__init__.py +++ b/src/pynwb/__init__.py @@ -1,6 +1,6 @@ -'''This package will contain functions, classes, and objects +"""This package will contain functions, classes, and objects for reading and writing data in NWB format -''' +""" import os.path from pathlib import Path from copy import deepcopy @@ -11,21 +11,22 @@ from hdmf.utils import docval, getargs, popargs, get_docval from hdmf.backends.io import HDMFIO from hdmf.backends.hdf5 import HDF5IO as _HDF5IO -from hdmf.validate import ValidatorMap from hdmf.build import BuildManager, TypeMap import hdmf.common +from .validation import validate # noqa: F401 -CORE_NAMESPACE = 'core' -__core_ns_file_name = 'nwb.namespace.yaml' +CORE_NAMESPACE = "core" +__core_ns_file_name = "nwb.namespace.yaml" from .spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace # noqa E402 def __get_resources(): from pkg_resources import resource_filename + ret = dict() - ret['namespace_path'] = os.path.join(resource_filename(__name__, 'nwb-schema/core'), __core_ns_file_name) + ret["namespace_path"] = os.path.join(resource_filename(__name__, "nwb-schema/core"), __core_ns_file_name) return ret @@ -45,17 +46,23 @@ def _get_resources(): __TYPE_MAP.merge(hdmf_typemap, ns_catalog=True) -@docval({'name': 'extensions', 'type': (str, TypeMap, list), - 'doc': 'a path to a namespace, a TypeMap, or a list consisting of paths to namespaces and TypeMaps', - 'default': None}, - returns="TypeMap loaded for the given extension or NWB core namespace", rtype=tuple, - is_method=False) +@docval( + { + "name": "extensions", + "type": (str, TypeMap, list), + "doc": "a path to a namespace, a TypeMap, or a list consisting of paths to namespaces and TypeMaps", + "default": None, + }, + returns="TypeMap loaded for the given extension or NWB core namespace", + rtype=tuple, + is_method=False, +) def get_type_map(**kwargs): - ''' + """ Get the TypeMap for the given extensions. If no extensions are provided, return the TypeMap for the core namespace - ''' - extensions = getargs('extensions', kwargs) + """ + extensions = getargs("extensions", kwargs) type_map = None if extensions is None: type_map = deepcopy(__TYPE_MAP) @@ -71,7 +78,7 @@ def get_type_map(**kwargs): elif isinstance(ext, TypeMap): type_map.merge(ext) else: - raise ValueError('extensions must be a list of paths to namespace specs or a TypeMaps') + raise ValueError("extensions must be a list of paths to namespace specs or a TypeMaps") elif isinstance(extensions, str): type_map.load_namespaces(extensions) elif isinstance(extensions, TypeMap): @@ -79,33 +86,34 @@ def get_type_map(**kwargs): return type_map -@docval(*get_docval(get_type_map), - returns="the namespaces loaded from the given file", rtype=tuple, - is_method=False) +@docval(*get_docval(get_type_map), returns="the namespaces loaded from the given file", rtype=tuple, is_method=False) def get_manager(**kwargs): - ''' + """ Get a BuildManager to use for I/O using the given extensions. If no extensions are provided, return a BuildManager that uses the core namespace - ''' + """ type_map = get_type_map(**kwargs) return BuildManager(type_map) -@docval({'name': 'namespace_path', 'type': str, 'doc': 'the path to the YAML with the namespace definition'}, - returns="the namespaces loaded from the given file", rtype=tuple, - is_method=False) +@docval( + {"name": "namespace_path", "type": str, "doc": "the path to the YAML with the namespace definition"}, + returns="the namespaces loaded from the given file", + rtype=tuple, + is_method=False, +) def load_namespaces(**kwargs): - ''' + """ Load namespaces from file - ''' - namespace_path = getargs('namespace_path', kwargs) + """ + namespace_path = getargs("namespace_path", kwargs) return __TYPE_MAP.load_namespaces(namespace_path) # load the core namespace i.e. base NWB specification __resources = __get_resources() -if os.path.exists(__resources['namespace_path']): - load_namespaces(__resources['namespace_path']) +if os.path.exists(__resources["namespace_path"]): + load_namespaces(__resources["namespace_path"]) def available_namespaces(): @@ -114,21 +122,23 @@ def available_namespaces(): # a function to register a container classes with the global map -@docval({'name': 'neurodata_type', 'type': str, 'doc': 'the neurodata_type to get the spec for'}, - {'name': 'namespace', 'type': str, 'doc': 'the name of the namespace'}, - {"name": "container_cls", "type": type, "doc": "the class to map to the specified neurodata_type", - 'default': None}, - is_method=False) +@docval( + {"name": "neurodata_type", "type": str, "doc": "the neurodata_type to get the spec for"}, + {"name": "namespace", "type": str, "doc": "the name of the namespace"}, + {"name": "container_cls", "type": type, "doc": "the class to map to the specified neurodata_type", "default": None}, + is_method=False, +) def register_class(**kwargs): """Register an NWBContainer class to use for reading and writing a neurodata_type from a specification If container_cls is not specified, returns a decorator for registering an NWBContainer subclass as the class for neurodata_type in namespace. """ - neurodata_type, namespace, container_cls = getargs('neurodata_type', 'namespace', 'container_cls', kwargs) + neurodata_type, namespace, container_cls = getargs("neurodata_type", "namespace", "container_cls", kwargs) def _dec(cls): __TYPE_MAP.register_container_type(namespace, neurodata_type, cls) return cls + if container_cls is None: return _dec else: @@ -136,29 +146,37 @@ def _dec(cls): # a function to register an object mapper for a container class -@docval({"name": "container_cls", "type": type, - "doc": "the Container class for which the given ObjectMapper class gets used"}, - {"name": "mapper_cls", "type": type, "doc": "the ObjectMapper class to use to map", 'default': None}, - is_method=False) +@docval( + { + "name": "container_cls", + "type": type, + "doc": "the Container class for which the given ObjectMapper class gets used", + }, + {"name": "mapper_cls", "type": type, "doc": "the ObjectMapper class to use to map", "default": None}, + is_method=False, +) def register_map(**kwargs): """Register an ObjectMapper to use for a Container class type If mapper_cls is not specified, returns a decorator for registering an ObjectMapper class as the mapper for container_cls. If mapper_cls is specified, register the class as the mapper for container_cls """ - container_cls, mapper_cls = getargs('container_cls', 'mapper_cls', kwargs) + container_cls, mapper_cls = getargs("container_cls", "mapper_cls", kwargs) def _dec(cls): __TYPE_MAP.register_map(container_cls, cls) return cls + if mapper_cls is None: return _dec else: _dec(mapper_cls) -@docval({'name': 'neurodata_type', 'type': str, 'doc': 'the neurodata_type to get the NWBContainer class for'}, - {'name': 'namespace', 'type': str, 'doc': 'the namespace the neurodata_type is defined in'}, - is_method=False) +@docval( + {"name": "neurodata_type", "type": str, "doc": "the neurodata_type to get the NWBContainer class for"}, + {"name": "namespace", "type": str, "doc": "the namespace the neurodata_type is defined in"}, + is_method=False, +) def get_class(**kwargs): """ Parse the YAML file for a given neurodata_type that is a subclass of NWBContainer and automatically generate its @@ -182,40 +200,47 @@ def get_sum(self, a, b): MyClass.get_sum = get_sum """ - neurodata_type, namespace = getargs('neurodata_type', 'namespace', kwargs) + neurodata_type, namespace = getargs("neurodata_type", "namespace", kwargs) return __TYPE_MAP.get_dt_container_cls(neurodata_type, namespace) -from .validation import validate # keeping here as placeholder for import order - - class NWBHDF5IO(_HDF5IO): - - @docval({'name': 'path', 'type': (str, Path), 'doc': 'the path to the HDF5 file', 'default': None}, - {'name': 'mode', 'type': str, - 'doc': 'the mode to open the HDF5 file with, one of ("w", "r", "r+", "a", "w-", "x")', - 'default': 'r'}, - {'name': 'load_namespaces', 'type': bool, - 'doc': 'whether or not to load cached namespaces from given path - not applicable in write mode', - 'default': False}, - {'name': 'manager', 'type': BuildManager, 'doc': 'the BuildManager to use for I/O', 'default': None}, - {'name': 'extensions', 'type': (str, TypeMap, list), - 'doc': 'a path to a namespace, a TypeMap, or a list consisting paths to namespaces and TypeMaps', - 'default': None}, - {'name': 'file', 'type': [h5py.File, 'S3File'], 'doc': 'a pre-existing h5py.File object', 'default': None}, - {'name': 'comm', 'type': "Intracomm", 'doc': 'the MPI communicator to use for parallel I/O', - 'default': None}, - {'name': 'driver', 'type': str, 'doc': 'driver for h5py to use when opening HDF5 file', 'default': None}) + @docval( + {"name": "path", "type": (str, Path), "doc": "the path to the HDF5 file", "default": None}, + { + "name": "mode", + "type": str, + "doc": 'the mode to open the HDF5 file with, one of ("w", "r", "r+", "a", "w-", "x")', + "default": "r", + }, + { + "name": "load_namespaces", + "type": bool, + "doc": "whether or not to load cached namespaces from given path - not applicable in write mode", + "default": False, + }, + {"name": "manager", "type": BuildManager, "doc": "the BuildManager to use for I/O", "default": None}, + { + "name": "extensions", + "type": (str, TypeMap, list), + "doc": "a path to a namespace, a TypeMap, or a list consisting paths to namespaces and TypeMaps", + "default": None, + }, + {"name": "file", "type": [h5py.File, "S3File"], "doc": "a pre-existing h5py.File object", "default": None}, + {"name": "comm", "type": "Intracomm", "doc": "the MPI communicator to use for parallel I/O", "default": None}, + {"name": "driver", "type": str, "doc": "driver for h5py to use when opening HDF5 file", "default": None}, + ) def __init__(self, **kwargs): - path, mode, manager, extensions, load_namespaces, file_obj, comm, driver =\ - popargs('path', 'mode', 'manager', 'extensions', 'load_namespaces', 'file', 'comm', 'driver', kwargs) + path, mode, manager, extensions, load_namespaces, file_obj, comm, driver = popargs( + "path", "mode", "manager", "extensions", "load_namespaces", "file", "comm", "driver", kwargs + ) if load_namespaces: if manager is not None: warn("loading namespaces from file - ignoring 'manager'") if extensions is not None: warn("loading namespaces from file - ignoring 'extensions' argument") # namespaces are not loaded when creating an NWBHDF5IO object in write mode - if 'w' in mode or mode == 'x': + if "w" in mode or mode == "x": raise ValueError("cannot load namespaces from file when writing to it") tm = get_type_map() @@ -237,13 +262,20 @@ def __init__(self, **kwargs): manager = get_manager() super().__init__(path, manager=manager, mode=mode, file=file_obj, comm=comm, driver=driver) - @docval({'name': 'src_io', 'type': HDMFIO, - 'doc': 'the HDMFIO object (such as NWBHDF5IO) that was used to read the data to export'}, - {'name': 'nwbfile', 'type': 'NWBFile', - 'doc': 'the NWBFile object to export. If None, then the entire contents of src_io will be exported', - 'default': None}, - {'name': 'write_args', 'type': dict, 'doc': 'arguments to pass to :py:meth:`write_builder`', - 'default': None}) + @docval( + { + "name": "src_io", + "type": HDMFIO, + "doc": "the HDMFIO object (such as NWBHDF5IO) that was used to read the data to export", + }, + { + "name": "nwbfile", + "type": "NWBFile", + "doc": "the NWBFile object to export. If None, then the entire contents of src_io will be exported", + "default": None, + }, + {"name": "write_args", "type": dict, "doc": "arguments to pass to :py:meth:`write_builder`", "default": None}, + ) def export(self, **kwargs): """Export an NWB file to a new NWB file using the HDF5 backend. @@ -277,8 +309,8 @@ def export(self, **kwargs): See :ref:`export` and :ref:`modifying_data` for more information and examples. """ - nwbfile = popargs('nwbfile', kwargs) - kwargs['container'] = nwbfile + nwbfile = popargs("nwbfile", kwargs) + kwargs["container"] = nwbfile super().export(**kwargs) @@ -302,11 +334,15 @@ def export(self, **kwargs): from hdmf.backends.hdf5 import H5DataIO # noqa: F401,E402 from ._version import get_versions # noqa: E402 -__version__ = get_versions()['version'] + +__version__ = get_versions()["version"] del get_versions from ._due import due, BibTeX # noqa: E402 -due.cite(BibTeX(""" + +due.cite( + BibTeX( + """ @article {R{\"u}bel2021.03.13.435173, author = {R{\"u}bel, Oliver and Tritt, Andrew and Ly, Ryan and Dichter, Benjamin K. and Ghosh, Satrajit and Niu, Lawrence and Soltesz, Ivan and Svoboda, Karel and Frank, Loren and Bouchard, Kristofer E.}, title = {The Neurodata Without Borders ecosystem for neurophysiological data science}, @@ -319,6 +355,11 @@ def export(self, **kwargs): eprint = {https://www.biorxiv.org/content/early/2021/03/15/2021.03.13.435173.full.pdf}, journal = {bioRxiv} } -"""), description="The Neurodata Without Borders ecosystem for neurophysiological data science", # noqa: E501 - path="pynwb/", version=__version__, cite_module=True) +""" + ), + description="The Neurodata Without Borders ecosystem for neurophysiological data science", # noqa: E501 + path="pynwb/", + version=__version__, + cite_module=True, +) del due, BibTeX From 8b233ab4952c4f63e05c52945493729e37ac47eb Mon Sep 17 00:00:00 2001 From: CodyCBakerPhD Date: Thu, 1 Sep 2022 17:35:10 +0000 Subject: [PATCH 04/38] fix flake8 at least --- src/pynwb/__init__.py | 192 ++++++++++++++++-------------------------- 1 file changed, 74 insertions(+), 118 deletions(-) diff --git a/src/pynwb/__init__.py b/src/pynwb/__init__.py index 3d98e87b5..cd2f91aea 100644 --- a/src/pynwb/__init__.py +++ b/src/pynwb/__init__.py @@ -1,6 +1,6 @@ -"""This package will contain functions, classes, and objects +'''This package will contain functions, classes, and objects for reading and writing data in NWB format -""" +''' import os.path from pathlib import Path from copy import deepcopy @@ -16,17 +16,16 @@ from .validation import validate # noqa: F401 -CORE_NAMESPACE = "core" -__core_ns_file_name = "nwb.namespace.yaml" +CORE_NAMESPACE = 'core' +__core_ns_file_name = 'nwb.namespace.yaml' from .spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace # noqa E402 def __get_resources(): from pkg_resources import resource_filename - ret = dict() - ret["namespace_path"] = os.path.join(resource_filename(__name__, "nwb-schema/core"), __core_ns_file_name) + ret['namespace_path'] = os.path.join(resource_filename(__name__, 'nwb-schema/core'), __core_ns_file_name) return ret @@ -46,23 +45,17 @@ def _get_resources(): __TYPE_MAP.merge(hdmf_typemap, ns_catalog=True) -@docval( - { - "name": "extensions", - "type": (str, TypeMap, list), - "doc": "a path to a namespace, a TypeMap, or a list consisting of paths to namespaces and TypeMaps", - "default": None, - }, - returns="TypeMap loaded for the given extension or NWB core namespace", - rtype=tuple, - is_method=False, -) +@docval({'name': 'extensions', 'type': (str, TypeMap, list), + 'doc': 'a path to a namespace, a TypeMap, or a list consisting of paths to namespaces and TypeMaps', + 'default': None}, + returns="TypeMap loaded for the given extension or NWB core namespace", rtype=tuple, + is_method=False) def get_type_map(**kwargs): - """ + ''' Get the TypeMap for the given extensions. If no extensions are provided, return the TypeMap for the core namespace - """ - extensions = getargs("extensions", kwargs) + ''' + extensions = getargs('extensions', kwargs) type_map = None if extensions is None: type_map = deepcopy(__TYPE_MAP) @@ -78,7 +71,7 @@ def get_type_map(**kwargs): elif isinstance(ext, TypeMap): type_map.merge(ext) else: - raise ValueError("extensions must be a list of paths to namespace specs or a TypeMaps") + raise ValueError('extensions must be a list of paths to namespace specs or a TypeMaps') elif isinstance(extensions, str): type_map.load_namespaces(extensions) elif isinstance(extensions, TypeMap): @@ -86,34 +79,33 @@ def get_type_map(**kwargs): return type_map -@docval(*get_docval(get_type_map), returns="the namespaces loaded from the given file", rtype=tuple, is_method=False) +@docval(*get_docval(get_type_map), + returns="the namespaces loaded from the given file", rtype=tuple, + is_method=False) def get_manager(**kwargs): - """ + ''' Get a BuildManager to use for I/O using the given extensions. If no extensions are provided, return a BuildManager that uses the core namespace - """ + ''' type_map = get_type_map(**kwargs) return BuildManager(type_map) -@docval( - {"name": "namespace_path", "type": str, "doc": "the path to the YAML with the namespace definition"}, - returns="the namespaces loaded from the given file", - rtype=tuple, - is_method=False, -) +@docval({'name': 'namespace_path', 'type': str, 'doc': 'the path to the YAML with the namespace definition'}, + returns="the namespaces loaded from the given file", rtype=tuple, + is_method=False) def load_namespaces(**kwargs): - """ + ''' Load namespaces from file - """ - namespace_path = getargs("namespace_path", kwargs) + ''' + namespace_path = getargs('namespace_path', kwargs) return __TYPE_MAP.load_namespaces(namespace_path) # load the core namespace i.e. base NWB specification __resources = __get_resources() -if os.path.exists(__resources["namespace_path"]): - load_namespaces(__resources["namespace_path"]) +if os.path.exists(__resources['namespace_path']): + load_namespaces(__resources['namespace_path']) def available_namespaces(): @@ -122,23 +114,21 @@ def available_namespaces(): # a function to register a container classes with the global map -@docval( - {"name": "neurodata_type", "type": str, "doc": "the neurodata_type to get the spec for"}, - {"name": "namespace", "type": str, "doc": "the name of the namespace"}, - {"name": "container_cls", "type": type, "doc": "the class to map to the specified neurodata_type", "default": None}, - is_method=False, -) +@docval({'name': 'neurodata_type', 'type': str, 'doc': 'the neurodata_type to get the spec for'}, + {'name': 'namespace', 'type': str, 'doc': 'the name of the namespace'}, + {"name": "container_cls", "type": type, "doc": "the class to map to the specified neurodata_type", + 'default': None}, + is_method=False) def register_class(**kwargs): """Register an NWBContainer class to use for reading and writing a neurodata_type from a specification If container_cls is not specified, returns a decorator for registering an NWBContainer subclass as the class for neurodata_type in namespace. """ - neurodata_type, namespace, container_cls = getargs("neurodata_type", "namespace", "container_cls", kwargs) + neurodata_type, namespace, container_cls = getargs('neurodata_type', 'namespace', 'container_cls', kwargs) def _dec(cls): __TYPE_MAP.register_container_type(namespace, neurodata_type, cls) return cls - if container_cls is None: return _dec else: @@ -146,37 +136,29 @@ def _dec(cls): # a function to register an object mapper for a container class -@docval( - { - "name": "container_cls", - "type": type, - "doc": "the Container class for which the given ObjectMapper class gets used", - }, - {"name": "mapper_cls", "type": type, "doc": "the ObjectMapper class to use to map", "default": None}, - is_method=False, -) +@docval({"name": "container_cls", "type": type, + "doc": "the Container class for which the given ObjectMapper class gets used"}, + {"name": "mapper_cls", "type": type, "doc": "the ObjectMapper class to use to map", 'default': None}, + is_method=False) def register_map(**kwargs): """Register an ObjectMapper to use for a Container class type If mapper_cls is not specified, returns a decorator for registering an ObjectMapper class as the mapper for container_cls. If mapper_cls is specified, register the class as the mapper for container_cls """ - container_cls, mapper_cls = getargs("container_cls", "mapper_cls", kwargs) + container_cls, mapper_cls = getargs('container_cls', 'mapper_cls', kwargs) def _dec(cls): __TYPE_MAP.register_map(container_cls, cls) return cls - if mapper_cls is None: return _dec else: _dec(mapper_cls) -@docval( - {"name": "neurodata_type", "type": str, "doc": "the neurodata_type to get the NWBContainer class for"}, - {"name": "namespace", "type": str, "doc": "the namespace the neurodata_type is defined in"}, - is_method=False, -) +@docval({'name': 'neurodata_type', 'type': str, 'doc': 'the neurodata_type to get the NWBContainer class for'}, + {'name': 'namespace', 'type': str, 'doc': 'the namespace the neurodata_type is defined in'}, + is_method=False) def get_class(**kwargs): """ Parse the YAML file for a given neurodata_type that is a subclass of NWBContainer and automatically generate its @@ -200,47 +182,37 @@ def get_sum(self, a, b): MyClass.get_sum = get_sum """ - neurodata_type, namespace = getargs("neurodata_type", "namespace", kwargs) + neurodata_type, namespace = getargs('neurodata_type', 'namespace', kwargs) return __TYPE_MAP.get_dt_container_cls(neurodata_type, namespace) class NWBHDF5IO(_HDF5IO): - @docval( - {"name": "path", "type": (str, Path), "doc": "the path to the HDF5 file", "default": None}, - { - "name": "mode", - "type": str, - "doc": 'the mode to open the HDF5 file with, one of ("w", "r", "r+", "a", "w-", "x")', - "default": "r", - }, - { - "name": "load_namespaces", - "type": bool, - "doc": "whether or not to load cached namespaces from given path - not applicable in write mode", - "default": False, - }, - {"name": "manager", "type": BuildManager, "doc": "the BuildManager to use for I/O", "default": None}, - { - "name": "extensions", - "type": (str, TypeMap, list), - "doc": "a path to a namespace, a TypeMap, or a list consisting paths to namespaces and TypeMaps", - "default": None, - }, - {"name": "file", "type": [h5py.File, "S3File"], "doc": "a pre-existing h5py.File object", "default": None}, - {"name": "comm", "type": "Intracomm", "doc": "the MPI communicator to use for parallel I/O", "default": None}, - {"name": "driver", "type": str, "doc": "driver for h5py to use when opening HDF5 file", "default": None}, - ) + + @docval({'name': 'path', 'type': (str, Path), 'doc': 'the path to the HDF5 file', 'default': None}, + {'name': 'mode', 'type': str, + 'doc': 'the mode to open the HDF5 file with, one of ("w", "r", "r+", "a", "w-", "x")', + 'default': 'r'}, + {'name': 'load_namespaces', 'type': bool, + 'doc': 'whether or not to load cached namespaces from given path - not applicable in write mode', + 'default': False}, + {'name': 'manager', 'type': BuildManager, 'doc': 'the BuildManager to use for I/O', 'default': None}, + {'name': 'extensions', 'type': (str, TypeMap, list), + 'doc': 'a path to a namespace, a TypeMap, or a list consisting paths to namespaces and TypeMaps', + 'default': None}, + {'name': 'file', 'type': [h5py.File, 'S3File'], 'doc': 'a pre-existing h5py.File object', 'default': None}, + {'name': 'comm', 'type': "Intracomm", 'doc': 'the MPI communicator to use for parallel I/O', + 'default': None}, + {'name': 'driver', 'type': str, 'doc': 'driver for h5py to use when opening HDF5 file', 'default': None}) def __init__(self, **kwargs): - path, mode, manager, extensions, load_namespaces, file_obj, comm, driver = popargs( - "path", "mode", "manager", "extensions", "load_namespaces", "file", "comm", "driver", kwargs - ) + path, mode, manager, extensions, load_namespaces, file_obj, comm, driver =\ + popargs('path', 'mode', 'manager', 'extensions', 'load_namespaces', 'file', 'comm', 'driver', kwargs) if load_namespaces: if manager is not None: warn("loading namespaces from file - ignoring 'manager'") if extensions is not None: warn("loading namespaces from file - ignoring 'extensions' argument") # namespaces are not loaded when creating an NWBHDF5IO object in write mode - if "w" in mode or mode == "x": + if 'w' in mode or mode == 'x': raise ValueError("cannot load namespaces from file when writing to it") tm = get_type_map() @@ -262,20 +234,13 @@ def __init__(self, **kwargs): manager = get_manager() super().__init__(path, manager=manager, mode=mode, file=file_obj, comm=comm, driver=driver) - @docval( - { - "name": "src_io", - "type": HDMFIO, - "doc": "the HDMFIO object (such as NWBHDF5IO) that was used to read the data to export", - }, - { - "name": "nwbfile", - "type": "NWBFile", - "doc": "the NWBFile object to export. If None, then the entire contents of src_io will be exported", - "default": None, - }, - {"name": "write_args", "type": dict, "doc": "arguments to pass to :py:meth:`write_builder`", "default": None}, - ) + @docval({'name': 'src_io', 'type': HDMFIO, + 'doc': 'the HDMFIO object (such as NWBHDF5IO) that was used to read the data to export'}, + {'name': 'nwbfile', 'type': 'NWBFile', + 'doc': 'the NWBFile object to export. If None, then the entire contents of src_io will be exported', + 'default': None}, + {'name': 'write_args', 'type': dict, 'doc': 'arguments to pass to :py:meth:`write_builder`', + 'default': None}) def export(self, **kwargs): """Export an NWB file to a new NWB file using the HDF5 backend. @@ -309,8 +274,8 @@ def export(self, **kwargs): See :ref:`export` and :ref:`modifying_data` for more information and examples. """ - nwbfile = popargs("nwbfile", kwargs) - kwargs["container"] = nwbfile + nwbfile = popargs('nwbfile', kwargs) + kwargs['container'] = nwbfile super().export(**kwargs) @@ -334,15 +299,11 @@ def export(self, **kwargs): from hdmf.backends.hdf5 import H5DataIO # noqa: F401,E402 from ._version import get_versions # noqa: E402 - -__version__ = get_versions()["version"] +__version__ = get_versions()['version'] del get_versions from ._due import due, BibTeX # noqa: E402 - -due.cite( - BibTeX( - """ +due.cite(BibTeX(""" @article {R{\"u}bel2021.03.13.435173, author = {R{\"u}bel, Oliver and Tritt, Andrew and Ly, Ryan and Dichter, Benjamin K. and Ghosh, Satrajit and Niu, Lawrence and Soltesz, Ivan and Svoboda, Karel and Frank, Loren and Bouchard, Kristofer E.}, title = {The Neurodata Without Borders ecosystem for neurophysiological data science}, @@ -355,11 +316,6 @@ def export(self, **kwargs): eprint = {https://www.biorxiv.org/content/early/2021/03/15/2021.03.13.435173.full.pdf}, journal = {bioRxiv} } -""" - ), - description="The Neurodata Without Borders ecosystem for neurophysiological data science", # noqa: E501 - path="pynwb/", - version=__version__, - cite_module=True, -) +"""), description="The Neurodata Without Borders ecosystem for neurophysiological data science", # noqa: E501 + path="pynwb/", version=__version__, cite_module=True) del due, BibTeX From 1fc72c157ceb811acd854bb86e6c74e10005970c Mon Sep 17 00:00:00 2001 From: CodyCBakerPhD Date: Thu, 1 Sep 2022 17:37:37 +0000 Subject: [PATCH 05/38] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9b61988d7..9dde8f3b4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ - Created a GitHub Actions workflow to generate test files for testing backward compatibility. @rly [#1548](https://github.com/NeurodataWithoutBorders/pynwb/pull/1548) - Enhanced docs for ``LabMetaData`` to clarify its usage @oruebel [#1546](https://github.com/NeurodataWithoutBorders/pynwb/pull/1546) +- Enhanced `pynwb.validate` API function to accept a list of file paths as well as the ability to operate on cached namespaces. Also adjusted the validate CLI to directly use the API function. @CodyCBakerPhD [#1511](https://github.com/NeurodataWithoutBorders/pynwb/pull/1511) ## PyNWB 2.1.0 (July 6, 2022) From 200b87106dddd74aca9fd75b77ddcbc8e48d8a82 Mon Sep 17 00:00:00 2001 From: CodyCBakerPhD Date: Thu, 1 Sep 2022 17:44:26 +0000 Subject: [PATCH 06/38] try moving order --- src/pynwb/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/pynwb/__init__.py b/src/pynwb/__init__.py index cd2f91aea..503a45b99 100644 --- a/src/pynwb/__init__.py +++ b/src/pynwb/__init__.py @@ -14,12 +14,11 @@ from hdmf.build import BuildManager, TypeMap import hdmf.common -from .validation import validate # noqa: F401 - CORE_NAMESPACE = 'core' __core_ns_file_name = 'nwb.namespace.yaml' from .spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace # noqa E402 +from .validation import validate # noqa: F401 def __get_resources(): From ebbb0c8ce0a0aaa3897a4ce627b29c1bd4f47599 Mon Sep 17 00:00:00 2001 From: Cody Baker Date: Thu, 1 Sep 2022 14:01:20 -0400 Subject: [PATCH 07/38] restore validate for -m cli --- src/pynwb/__init__.py | 2 +- src/pynwb/{validation.py => validate.py} | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) rename src/pynwb/{validation.py => validate.py} (94%) diff --git a/src/pynwb/__init__.py b/src/pynwb/__init__.py index 503a45b99..e36341cdc 100644 --- a/src/pynwb/__init__.py +++ b/src/pynwb/__init__.py @@ -18,7 +18,7 @@ __core_ns_file_name = 'nwb.namespace.yaml' from .spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace # noqa E402 -from .validation import validate # noqa: F401 +from .validate import validate # noqa: F401 def __get_resources(): diff --git a/src/pynwb/validation.py b/src/pynwb/validate.py similarity index 94% rename from src/pynwb/validation.py rename to src/pynwb/validate.py index 922a2d220..15028eae0 100644 --- a/src/pynwb/validation.py +++ b/src/pynwb/validate.py @@ -83,19 +83,19 @@ def _get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManag @docval( { "name": "io", - "type": HDMFIO, # want to do Optional[HDMFIO] but docval complains? + "type": (HDMFIO, type(None)), # want to do Optional[HDMFIO] but docval complains? "doc": "An open IO to an NWB file.", - "default": False, + "default": None, }, # For back-compatability { "name": "namespace", - "type": str, # want to do Optional[str] but docval complains? + "type": (str, type(None)), # want to do Optional[str] but docval complains? "doc": "A specific namespace to validate against.", "default": CORE_NAMESPACE, }, # Argument order is for back-compatability { "name": "paths", - "type": list, # want to do Optional[List[str]] but docval complains? + "type": (list, type(None)), # want to do Optional[List[str]] but docval complains? "doc": "List of NWB file paths.", "default": None, }, @@ -122,6 +122,7 @@ def validate(**kwargs): io, paths, use_cached_namespaces, chosen_namespace, verbose = getargs( "io", "paths", "use_cached_namespaces", "namespace", "verbose", kwargs ) + assert io != paths, "Both 'io' and 'paths' were specified! Please choose only one." if io is not None: validation_errors = _validate_helper(io=io, namespace=chosen_namespace) @@ -210,7 +211,7 @@ def validate_cli(): print("\n".join(cached_namespaces)) else: validation_errors, validation_status = validate( - paths=args.paths, use_cached_namespace=args.cached_namespace, namespace=args.ns, verbose=True + paths=args.paths, use_cached_namespaces=args.cached_namespace, namespace=args.ns, verbose=True ) _print_errors(validation_errors=validation_errors) status = status or validation_status or (validation_errors is not None and len(validation_errors) > 0) From cac3836c0b9bf25bbdf7b0e3e57436345970e5d0 Mon Sep 17 00:00:00 2001 From: Cody Baker Date: Fri, 2 Sep 2022 12:35:42 -0400 Subject: [PATCH 08/38] fixed --- src/pynwb/validate.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index 15028eae0..f2f615dcd 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -58,9 +58,9 @@ def _get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManag from . import NWBHDF5IO # TODO: modularize to avoid circular import catalog = NamespaceCatalog( - group_spec=NWBGroupSpec, dataset_spec_cls=NWBDatasetSpec, spec_namespace_cls=NWBNamespace + group_spec_cls=NWBGroupSpec, dataset_spec_cls=NWBDatasetSpec, spec_namespace_cls=NWBNamespace ) - namespace_dependencies = NWBHDF5IO.load_namespaces(namepsace_catalog=catalog, path=path) + namespace_dependencies = NWBHDF5IO.load_namespaces(namespace_catalog=catalog, path=path) # Determine which namespaces are the most specific (i.e. extensions) and validate against those candidate_namespaces = set(namespace_dependencies.keys()) @@ -83,19 +83,20 @@ def _get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManag @docval( { "name": "io", - "type": (HDMFIO, type(None)), # want to do Optional[HDMFIO] but docval complains? + "type": HDMFIO, # want to do Optional[HDMFIO] but docval complains? "doc": "An open IO to an NWB file.", "default": None, }, # For back-compatability { "name": "namespace", - "type": (str, type(None)), # want to do Optional[str] but docval complains? + "type": str, # want to do Optional[str] but docval complains? "doc": "A specific namespace to validate against.", "default": CORE_NAMESPACE, + "allow_none": True, }, # Argument order is for back-compatability { "name": "paths", - "type": (list, type(None)), # want to do Optional[List[str]] but docval complains? + "type": list, # want to do Optional[List[str]] but docval complains? "doc": "List of NWB file paths.", "default": None, }, From 7e55ba9e4be871a97256e30a5be6dd82f78c01b9 Mon Sep 17 00:00:00 2001 From: Cody Baker Date: Fri, 2 Sep 2022 12:36:28 -0400 Subject: [PATCH 09/38] removed comments --- src/pynwb/validate.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index f2f615dcd..07e86d9b0 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -83,20 +83,20 @@ def _get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManag @docval( { "name": "io", - "type": HDMFIO, # want to do Optional[HDMFIO] but docval complains? + "type": HDMFIO, "doc": "An open IO to an NWB file.", "default": None, }, # For back-compatability { "name": "namespace", - "type": str, # want to do Optional[str] but docval complains? + "type": str, "doc": "A specific namespace to validate against.", "default": CORE_NAMESPACE, "allow_none": True, }, # Argument order is for back-compatability { "name": "paths", - "type": list, # want to do Optional[List[str]] but docval complains? + "type": list, "doc": "List of NWB file paths.", "default": None, }, From 8bf4a3b0977ac0e52cfeebd09dc1da029f44450f Mon Sep 17 00:00:00 2001 From: Cody Baker Date: Fri, 2 Sep 2022 12:41:05 -0400 Subject: [PATCH 10/38] adjust CLI flag --- src/pynwb/validate.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index 07e86d9b0..4cf9fffc9 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -200,9 +200,12 @@ def validate_cli(): parser.add_argument("-n", "--ns", type=str, help="the namespace to validate against") feature_parser = parser.add_mutually_exclusive_group(required=False) feature_parser.add_argument( - "--cached-namespace", dest="cached_namespace", action="store_true", help="Use the cached namespace (default)." + "--no-cached-namespace", + dest="no_cached_namespace", + action="store_true", + help="Use the cached namespace (default)." ) - parser.set_defaults(cached_namespace=True) + parser.set_defaults(no_cached_namespace=False) args = parser.parse_args() status = 0 @@ -212,7 +215,7 @@ def validate_cli(): print("\n".join(cached_namespaces)) else: validation_errors, validation_status = validate( - paths=args.paths, use_cached_namespaces=args.cached_namespace, namespace=args.ns, verbose=True + paths=args.paths, use_cached_namespaces=not args.no_cached_namespace, namespace=args.ns, verbose=True ) _print_errors(validation_errors=validation_errors) status = status or validation_status or (validation_errors is not None and len(validation_errors) > 0) From fb5c674a6dc283cf9921e43d76f5bc66d437a755 Mon Sep 17 00:00:00 2001 From: Cody Baker Date: Fri, 2 Sep 2022 12:52:28 -0400 Subject: [PATCH 11/38] debugs --- src/pynwb/validate.py | 39 +++++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index 4cf9fffc9..3de1a5a4b 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -92,7 +92,6 @@ def _get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManag "type": str, "doc": "A specific namespace to validate against.", "default": CORE_NAMESPACE, - "allow_none": True, }, # Argument order is for back-compatability { "name": "paths", @@ -120,62 +119,65 @@ def validate(**kwargs): """Validate NWB file(s) against a namespace or its cached namespaces.""" from . import NWBHDF5IO # TODO: modularize to avoid circular import - io, paths, use_cached_namespaces, chosen_namespace, verbose = getargs( + io, paths, use_cached_namespaces, namespace, verbose = getargs( "io", "paths", "use_cached_namespaces", "namespace", "verbose", kwargs ) assert io != paths, "Both 'io' and 'paths' were specified! Please choose only one." if io is not None: - validation_errors = _validate_helper(io=io, namespace=chosen_namespace) + validation_errors = _validate_helper(io=io, namespace=namespace) return validation_errors status = 0 validation_errors = list() + namespaces = [CORE_NAMESPACE] + namespace_message = "PyNWB namespace information" for path in paths: + io_kwargs = dict(path=path, mode="r") + if use_cached_namespaces: cached_namespaces, manager, namespace_dependencies = _get_cached_namespaces_to_validate(path=path) + io_kwargs.update(manager=manager) if any(cached_namespaces): namespaces = cached_namespaces namespace_message = "cached namespace information" else: - namespace_message = "PyNWB namespace information" if verbose: print( - f"The file {path} has no cached namespace information. " "Falling back to {namespace_source}.", + f"The file {path} has no cached namespace information. Falling back to {namespace_message}.", file=sys.stderr, ) - namespaces = [CORE_NAMESPACE] - if chosen_namespace: - if chosen_namespace in namespaces: - namespaces = [chosen_namespace] + if namespace: + if namespace in namespaces: + namespaces = [namespace] elif ( - use_cached_namespaces and chosen_namespace in namespace_dependencies + use_cached_namespaces and namespace in namespace_dependencies ): # validating against a dependency for namespace_dependency in namespace_dependencies: - if chosen_namespace in namespace_dependencies[namespace_dependency]: + if namespace in namespace_dependencies[namespace_dependency]: status = 1 if verbose: print( - f"The namespace '{chosen_namespace}' is included by the namespace " - "'{namespace_dependency}'. Please validate against that namespace instead.", + f"The namespace '{namespace}' is included by the namespace " + f"'{namespace_dependency}'. Please validate against that namespace instead.", file=sys.stderr, ) else: status = 1 if verbose: print( - f"The namespace '{chosen_namespace}' could not be found in {namespace_message} as only " + f"The namespace '{namespace}' could not be found in {namespace_message} as only " f"{namespaces} is present.", file=sys.stderr, ) - with NWBHDF5IO(path, mode="r", manager=manager) as io: - for namespace in namespaces: + with NWBHDF5IO(**io_kwargs) as io: + for validation_namespace in namespaces: if verbose: - print(f"Validating {path} against {namespace_message} using namespace '{namespace}'.") - validation_errors += _validate_helper(io=io, namespace=namespace) + print(f"Validating {path} against {namespace_message} using namespace '{validation_namespace}'.") + validation_errors += _validate_helper(io=io, namespace=validation_namespace) return validation_errors, status @@ -206,6 +208,7 @@ def validate_cli(): help="Use the cached namespace (default)." ) parser.set_defaults(no_cached_namespace=False) + parser.set_defaults(ns=CORE_NAMESPACE) args = parser.parse_args() status = 0 From d6f029fdae6ea863691be32d041c40329f1a301b Mon Sep 17 00:00:00 2001 From: CodyCBakerPhD Date: Fri, 2 Sep 2022 16:55:40 +0000 Subject: [PATCH 12/38] fix flake8 --- src/pynwb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pynwb/__init__.py b/src/pynwb/__init__.py index e36341cdc..1c60ff8e6 100644 --- a/src/pynwb/__init__.py +++ b/src/pynwb/__init__.py @@ -18,7 +18,7 @@ __core_ns_file_name = 'nwb.namespace.yaml' from .spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace # noqa E402 -from .validate import validate # noqa: F401 +from .validate import validate # noqa: F401, E402 def __get_resources(): From d949296bb87ff650b435e56b1d4304de8aadae66 Mon Sep 17 00:00:00 2001 From: CodyCBakerPhD Date: Fri, 2 Sep 2022 17:01:32 +0000 Subject: [PATCH 13/38] fix import structure for tox --- tests/back_compat/test_import_structure.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/back_compat/test_import_structure.py b/tests/back_compat/test_import_structure.py index 56b34b50b..fd95c9856 100644 --- a/tests/back_compat/test_import_structure.py +++ b/tests/back_compat/test_import_structure.py @@ -24,7 +24,6 @@ def test_outer_import_structure(self): "ProcessingModule", "TimeSeries", "TypeMap", - "ValidatorMap", "_HDF5IO", "__NS_CATALOG", "__TYPE_MAP", @@ -77,7 +76,7 @@ def test_outer_import_structure(self): "register_map", "retinotopy", "spec", - "validation", + "testing", "validate", "warn", ] From f40352592f44d38c891e32c713581d19c46cdf0a Mon Sep 17 00:00:00 2001 From: rly Date: Wed, 14 Sep 2022 16:47:27 -0700 Subject: [PATCH 14/38] Fix tests --- CHANGELOG.md | 13 +++++++++---- src/pynwb/validate.py | 4 ++-- tests/validation/test_validate.py | 25 ++++++++++++++++++------- 3 files changed, 29 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6131046bc..a6d740f0d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,17 @@ # PyNWB Changelog +## PyNWB 2.2.0 (Upcoming) + +### Enhancements and minor changes + +- Enhanced `pynwb.validate` API function to accept a list of file paths as well as the ability to operate on cached + namespaces. Also adjusted the validate CLI to directly use the API function. @CodyCBakerPhD + [#1511](https://github.com/NeurodataWithoutBorders/pynwb/pull/1511) + ## PyNWB 2.1.1 (September 1, 2022) ### Documentation and tutorial enhancements: -- Added support for explicit ordering of sphinx gallery tutorials in the docs. @oruebel +- Added support for explicit ordering of sphinx gallery tutorials in the docs. @oruebel [#1504](https://github.com/NeurodataWithoutBorders/pynwb/pull/1504), @bdichter [#1495](https://github.com/NeurodataWithoutBorders/pynwb/pull/1495) - Added developer guide on how to create a new tutorial. @oruebel @@ -33,9 +41,6 @@ - Enhanced docs for ``LabMetaData`` to clarify its usage @oruebel [#1546](https://github.com/NeurodataWithoutBorders/pynwb/pull/1546) - Updated requirements, including allowing numpy 1.23. @rly [#1550](https://github.com/NeurodataWithoutBorders/pynwb/pull/1550) -- Enhanced docs for ``LabMetaData`` to clarify its usage. @oruebel - [#1546](https://github.com/NeurodataWithoutBorders/pynwb/pull/1546) -- Enhanced `pynwb.validate` API function to accept a list of file paths as well as the ability to operate on cached namespaces. Also adjusted the validate CLI to directly use the API function. @CodyCBakerPhD [#1511](https://github.com/NeurodataWithoutBorders/pynwb/pull/1511) ## PyNWB 2.1.0 (July 6, 2022) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index 3de1a5a4b..c7220d830 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -130,9 +130,9 @@ def validate(**kwargs): status = 0 validation_errors = list() - namespaces = [CORE_NAMESPACE] - namespace_message = "PyNWB namespace information" for path in paths: + namespaces = [CORE_NAMESPACE] + namespace_message = "PyNWB namespace information" io_kwargs = dict(path=path, mode="r") if use_cached_namespaces: diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index 327034e59..a14741d96 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -28,14 +28,15 @@ def test_validate_file_no_cache(self): r".*UserWarning: No cached namespaces found in tests/back_compat/1\.0\.2_nwbfile\.nwb\s*" r"warnings.warn\(msg\)\s*" r"The file tests/back_compat/1\.0\.2_nwbfile\.nwb has no cached namespace information\. " - r"Falling back to pynwb namespace information\.\s*" + r"Falling back to PyNWB namespace information\.\s*" ) self.assertRegex(result.stderr.decode('utf-8'), stderr_regex) stdout_regex = re.compile( - r"Validating tests/back_compat/1\.0\.2_nwbfile\.nwb against pynwb namespace information using namespace " + r"Validating tests/back_compat/1\.0\.2_nwbfile\.nwb against PyNWB namespace information using namespace " r"'core'\.\s* - no errors found\.\s*") self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + # self.assertEqual(result.stdout.decode('utf-8'), "") def test_validate_file_no_cache_bad_ns(self): """Test that validating a file with no cached spec against a specified, unknown namespace fails.""" @@ -46,13 +47,18 @@ def test_validate_file_no_cache_bad_ns(self): r".*UserWarning: No cached namespaces found in tests/back_compat/1\.0\.2_nwbfile\.nwb\s*" r"warnings.warn\(msg\)\s*" r"The file tests/back_compat/1\.0\.2_nwbfile\.nwb has no cached namespace information\. " - r"Falling back to pynwb namespace information\.\s*" - r"The namespace 'notfound' could not be found in pynwb namespace information as only " + r"Falling back to PyNWB namespace information\.\s*" + r"The namespace 'notfound' could not be found in PyNWB namespace information as only " r"\['core'\] is present\.\s*" ) self.assertRegex(result.stderr.decode('utf-8'), stderr_regex) - self.assertEqual(result.stdout.decode('utf-8'), '') + stdout_regex = re.compile( + r"Validating tests/back_compat/1\.0\.2_nwbfile\.nwb against PyNWB namespace information using namespace " + r"'core'\.\s*- no errors found\.\s*" + ) + + self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) def test_validate_file_cached(self): """Test that validating a file with cached spec against its cached namespace succeeds.""" @@ -77,7 +83,12 @@ def test_validate_file_cached_bad_ns(self): ) self.assertRegex(result.stderr.decode('utf-8'), stderr_regex) - self.assertEqual(result.stdout.decode('utf-8'), '') + stdout_regex = re.compile( + r"Validating tests/back_compat/1\.1\.2_nwbfile\.nwb against cached namespace information using namespace " + r"'core'\.\s*- no errors found\.\s*" + ) + + self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) def test_validate_file_cached_hdmf_common(self): """Test that validating a file with cached spec against the hdmf-common namespace fails.""" @@ -98,7 +109,7 @@ def test_validate_file_cached_ignore(self): self.assertEqual(result.stderr.decode('utf-8'), '') stdout_regex = re.compile( - r"Validating tests/back_compat/1\.1\.2_nwbfile\.nwb against pynwb namespace information using namespace " + r"Validating tests/back_compat/1\.1\.2_nwbfile\.nwb against PyNWB namespace information using namespace " r"'core'\.\s* - no errors found\.\s*") self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) From ccf22823a7efdec93eb3af1c99b0f1da84d73ee7 Mon Sep 17 00:00:00 2001 From: rly Date: Wed, 14 Sep 2022 16:48:12 -0700 Subject: [PATCH 15/38] Undo changelog change --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a6d740f0d..b6fae5bbd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,9 +38,10 @@ [#1516](https://github.com/NeurodataWithoutBorders/pynwb/pull/1516) - Created a GitHub Actions workflow to generate test files for testing backward compatibility. @rly [#1548](https://github.com/NeurodataWithoutBorders/pynwb/pull/1548) -- Enhanced docs for ``LabMetaData`` to clarify its usage @oruebel [#1546](https://github.com/NeurodataWithoutBorders/pynwb/pull/1546) - Updated requirements, including allowing numpy 1.23. @rly [#1550](https://github.com/NeurodataWithoutBorders/pynwb/pull/1550) +- Enhanced docs for ``LabMetaData`` to clarify its usage @oruebel + [#1546](https://github.com/NeurodataWithoutBorders/pynwb/pull/1546) ## PyNWB 2.1.0 (July 6, 2022) From e82ac2c719b8de79f55b6a721e552ed3d1b8cd2c Mon Sep 17 00:00:00 2001 From: rly Date: Wed, 14 Sep 2022 17:04:50 -0700 Subject: [PATCH 16/38] Fix tests --- src/pynwb/validate.py | 18 +++++++++++------- tests/validation/test_validate.py | 15 ++------------- 2 files changed, 13 insertions(+), 20 deletions(-) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index c7220d830..c5b0171fc 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -131,7 +131,7 @@ def validate(**kwargs): status = 0 validation_errors = list() for path in paths: - namespaces = [CORE_NAMESPACE] + namespaces_to_validate = [CORE_NAMESPACE] namespace_message = "PyNWB namespace information" io_kwargs = dict(path=path, mode="r") @@ -140,7 +140,7 @@ def validate(**kwargs): io_kwargs.update(manager=manager) if any(cached_namespaces): - namespaces = cached_namespaces + namespaces_to_validate = cached_namespaces namespace_message = "cached namespace information" else: if verbose: @@ -150,8 +150,8 @@ def validate(**kwargs): ) if namespace: - if namespace in namespaces: - namespaces = [namespace] + if namespace in namespaces_to_validate: + namespaces_to_validate = [namespace] elif ( use_cached_namespaces and namespace in namespace_dependencies ): # validating against a dependency @@ -169,12 +169,15 @@ def validate(**kwargs): if verbose: print( f"The namespace '{namespace}' could not be found in {namespace_message} as only " - f"{namespaces} is present.", + f"{namespaces_to_validate} is present.", file=sys.stderr, ) + if status == 1: + continue + with NWBHDF5IO(**io_kwargs) as io: - for validation_namespace in namespaces: + for validation_namespace in namespaces_to_validate: if verbose: print(f"Validating {path} against {namespace_message} using namespace '{validation_namespace}'.") validation_errors += _validate_helper(io=io, namespace=validation_namespace) @@ -220,7 +223,8 @@ def validate_cli(): validation_errors, validation_status = validate( paths=args.paths, use_cached_namespaces=not args.no_cached_namespace, namespace=args.ns, verbose=True ) - _print_errors(validation_errors=validation_errors) + if not validation_status: + _print_errors(validation_errors=validation_errors) status = status or validation_status or (validation_errors is not None and len(validation_errors) > 0) sys.exit(status) diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index a14741d96..c4b20c381 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -36,7 +36,6 @@ def test_validate_file_no_cache(self): r"Validating tests/back_compat/1\.0\.2_nwbfile\.nwb against PyNWB namespace information using namespace " r"'core'\.\s* - no errors found\.\s*") self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) - # self.assertEqual(result.stdout.decode('utf-8'), "") def test_validate_file_no_cache_bad_ns(self): """Test that validating a file with no cached spec against a specified, unknown namespace fails.""" @@ -53,12 +52,7 @@ def test_validate_file_no_cache_bad_ns(self): ) self.assertRegex(result.stderr.decode('utf-8'), stderr_regex) - stdout_regex = re.compile( - r"Validating tests/back_compat/1\.0\.2_nwbfile\.nwb against PyNWB namespace information using namespace " - r"'core'\.\s*- no errors found\.\s*" - ) - - self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + self.assertEqual(result.stdout.decode('utf-8'), '') def test_validate_file_cached(self): """Test that validating a file with cached spec against its cached namespace succeeds.""" @@ -83,12 +77,7 @@ def test_validate_file_cached_bad_ns(self): ) self.assertRegex(result.stderr.decode('utf-8'), stderr_regex) - stdout_regex = re.compile( - r"Validating tests/back_compat/1\.1\.2_nwbfile\.nwb against cached namespace information using namespace " - r"'core'\.\s*- no errors found\.\s*" - ) - - self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + self.assertEqual(result.stdout.decode('utf-8'), '') def test_validate_file_cached_hdmf_common(self): """Test that validating a file with cached spec against the hdmf-common namespace fails.""" From be4c65e5ff79dbefe937970a778bff21bb80dc66 Mon Sep 17 00:00:00 2001 From: Cody Baker Date: Tue, 20 Sep 2022 11:29:12 -0400 Subject: [PATCH 17/38] small debug --- src/pynwb/validate.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index c5b0171fc..88771fe1f 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -152,11 +152,9 @@ def validate(**kwargs): if namespace: if namespace in namespaces_to_validate: namespaces_to_validate = [namespace] - elif ( - use_cached_namespaces and namespace in namespace_dependencies - ): # validating against a dependency + elif use_cached_namespaces and namespace in namespace_dependencies: # validating against a dependency for namespace_dependency in namespace_dependencies: - if namespace in namespace_dependencies[namespace_dependency]: + if namespace != "core" and namespace in namespace_dependencies[namespace_dependency]: status = 1 if verbose: print( @@ -208,7 +206,7 @@ def validate_cli(): "--no-cached-namespace", dest="no_cached_namespace", action="store_true", - help="Use the cached namespace (default)." + help="Use the cached namespace (default).", ) parser.set_defaults(no_cached_namespace=False) parser.set_defaults(ns=CORE_NAMESPACE) From 881e44d92df19d6ad9244d337e49c27c2f11beb1 Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 20 Sep 2022 15:58:23 -0700 Subject: [PATCH 18/38] Add test to validate against file with extension --- .github/workflows/generate_test_files.yml | 1 + src/pynwb/testing/make_test_files.py | 57 +++++++++++++++++++-- tests/validation/test_validate.py | 62 ++++++++++++++++++++++- 3 files changed, 116 insertions(+), 4 deletions(-) diff --git a/.github/workflows/generate_test_files.yml b/.github/workflows/generate_test_files.yml index db8a0521b..52f078c29 100644 --- a/.github/workflows/generate_test_files.yml +++ b/.github/workflows/generate_test_files.yml @@ -1,6 +1,7 @@ name: Generate test files on: workflow_dispatch: + pull_request: # TEMP REMOVE ME jobs: gen-test-files: diff --git a/src/pynwb/testing/make_test_files.py b/src/pynwb/testing/make_test_files.py index 052b8501e..301381688 100644 --- a/src/pynwb/testing/make_test_files.py +++ b/src/pynwb/testing/make_test_files.py @@ -1,9 +1,10 @@ +from datetime import datetime import numpy as np from pathlib import Path - -from datetime import datetime -from pynwb import NWBFile, NWBHDF5IO, __version__, TimeSeries +from pynwb import NWBFile, NWBHDF5IO, __version__, TimeSeries, get_class, load_namespaces from pynwb.image import ImageSeries +from pynwb.spec import NWBNamespaceBuilder, export_spec, NWBGroupSpec, NWBAttributeSpec + # pynwb 1.0.2 should be installed with hdmf 1.0.3 # pynwb 1.0.3 should be installed with hdmf 1.0.5 @@ -149,12 +150,61 @@ def _make_imageseries_nonmatch_starting_frame(): _write(test_name, nwbfile) +def _make_empty_with_extension(): + ns_builder = NWBNamespaceBuilder( + doc="An NWB test extension", + name="ndx-testextension", + version="0.1.0", + author="PyNWB Test File Generator", + contact="my_email@example.com", + ) + + ns_builder.include_type('TimeSeries', namespace='core') + tetrode_series = NWBGroupSpec( + neurodata_type_def='TimeSeriesWithID', + neurodata_type_inc='TimeSeries', + doc=('An extension of TimeSeries to include an ID.'), + attributes=[ + NWBAttributeSpec( + name='id', + doc='The time series ID.', + dtype='int32' + ), + ], + ) + + new_data_types = [tetrode_series] + + # export the spec to yaml files in the current directory + export_spec(ns_builder, new_data_types, output_dir=".") + + nwbfile = NWBFile(session_description='ADDME', + identifier='ADDME', + session_start_time=datetime.now().astimezone()) + + load_namespaces("ndx-testextension.namespace.yaml") # load from the current directory + TimeSeriesWithID = get_class("TimeSeriesWithID", "ndx-testextension") + ts = TimeSeriesWithID( + name="test_ts", + data=[1., 2., 3.], + description="ADDME", + unit="ADDME", + rate=1., + id=1, + ) + nwbfile.add_acquisition(ts) + test_name = 'nwbfile_with_extension' + _write(test_name, nwbfile) + + if __name__ == '__main__': # install these versions of PyNWB and run this script to generate new files # python src/pynwb/testing/make_test_files.py # files will be made in src/pynwb/testing/ # files should be moved to tests/back_compat/ + # NOTE: this script is run in the GitHub Actions workflow generate_test_files.yml + if __version__ == '1.1.2': _make_empty() _make_str_experimenter() @@ -170,3 +220,4 @@ def _make_imageseries_nonmatch_starting_frame(): _make_imageseries_no_data() _make_imageseries_non_external_format() _make_imageseries_nonmatch_starting_frame() + _make_empty_with_extension() diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index c4b20c381..38803b804 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -18,6 +18,10 @@ class TestValidateScript(TestCase): # simplify collecting and merging coverage data from multiple subprocesses. if "-p" # is not used, then each "coverage run" will overwrite the .coverage file from a # previous "coverage run". + # NOTE the run_coverage.yml GitHub Action runs "python -m coverage combine" to + # combine the individual coverage reprots into one .coverage file. + + # TODO test validation on files with cached extensions def test_validate_file_no_cache(self): """Test that validating a file with no cached spec against the core namespace succeeds.""" @@ -79,6 +83,44 @@ def test_validate_file_cached_bad_ns(self): self.assertEqual(result.stdout.decode('utf-8'), '') + def test_validate_file_cached_extension(self): + """Test that validating a file with cached spec against the cached namespaces succeeds.""" + result = subprocess.run(["coverage", "run", "-p", "-m", "pynwb.validate", + "tests/back_compat/2.1.0_nwbfile_with_extension.nwb"], capture_output=True) + + self.assertEqual(result.stderr.decode('utf-8'), '') + + stdout_regex = re.compile( + r"Validating tests/back_compat/2\.1\.0_nwbfile_with_extension\.nwb against cached namespace information " + r"using namespace 'ndx-testextension'\.\s* - no errors found\.\s*") + self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + + def test_validate_file_cached_extension_pass_ns(self): + """Test that validating a file with cached spec against the extension namespace succeeds.""" + result = subprocess.run(["coverage", "run", "-p", "-m", "pynwb.validate", + "tests/back_compat/2.1.0_nwbfile_with_extension.nwb", + "--ns", "ndx-testextension"], capture_output=True) + + self.assertEqual(result.stderr.decode('utf-8'), '') + + stdout_regex = re.compile( + r"Validating tests/back_compat/2\.1\.0_nwbfile_with_extension\.nwb against cached namespace information " + r"using namespace 'ndx-testextension'\.\s* - no errors found\.\s*") + self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + + def test_validate_file_cached_core(self): # TODO determine correct behavior + """Test that validating a file with cached spec against the core namespace succeeds.""" + result = subprocess.run(["coverage", "run", "-p", "-m", "pynwb.validate", + "tests/back_compat/2.1.0_nwbfile_with_extension.nwb", + "--ns", "core"], capture_output=True) + + self.assertEqual(result.stderr.decode('utf-8'), '') + + stdout_regex = re.compile( + r"Validating tests/back_compat/2\.1\.0_nwbfile_with_extension\.nwb against cached namespace information " + r"using namespace 'core'\.\s* - no errors found\.\s*") + self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + def test_validate_file_cached_hdmf_common(self): """Test that validating a file with cached spec against the hdmf-common namespace fails.""" result = subprocess.run(["coverage", "run", "-p", "-m", "pynwb.validate", "tests/back_compat/1.1.2_nwbfile.nwb", @@ -107,7 +149,7 @@ class TestValidateFunction(TestCase): # 1.0.2_nwbfile.nwb has no cached specifications # 1.0.3_nwbfile.nwb has cached "core" specification - # 1.1.2_nwbfile.nwb has cached "core" and "hdmf-common" specifications + # 1.1.2_nwbfile.nwb has cached "core" and "hdmf-common" specificaitions def test_validate_file_no_cache(self): """Test that validating a file with no cached spec against the core namespace succeeds.""" @@ -127,6 +169,24 @@ def test_validate_file_cached(self): errors = validate(io) self.assertEqual(errors, []) + def test_validate_file_cached_extension(self): + """Test that validating a file with cached spec against its cached namespaces succeeds.""" + with NWBHDF5IO('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', 'r', load_namespaces=True) as io: + errors = validate(io) + self.assertEqual(errors, []) + + def test_validate_file_cached_extension_pass_ns(self): + """Test that validating a file with cached extension spec against the extension namespace succeeds.""" + with NWBHDF5IO('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', 'r', load_namespaces=True) as io: + errors = validate(io, 'ndx-testextension') + self.assertEqual(errors, []) + + def test_validate_file_cached_core(self): # TODO determine correct behavior + """Test that validating a file with cached extension spec against the core namespace succeeds.""" + with NWBHDF5IO('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', 'r', load_namespaces=True) as io: + errors = validate(io, 'core') + self.assertEqual(errors, []) + def test_validate_file_cached_bad_ns(self): """Test that validating a file with cached spec against a specified, unknown namespace fails.""" with NWBHDF5IO('tests/back_compat/1.1.2_nwbfile.nwb', 'r') as io: From 6efe21fd936d44752d5b805cb41cefb65dc70fe8 Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 20 Sep 2022 16:09:41 -0700 Subject: [PATCH 19/38] Add test file --- .github/workflows/generate_test_files.yml | 1 - .../2.1.0_nwbfile_with_extension.nwb | Bin 0 -> 184152 bytes 2 files changed, 1 deletion(-) create mode 100644 tests/back_compat/2.1.0_nwbfile_with_extension.nwb diff --git a/.github/workflows/generate_test_files.yml b/.github/workflows/generate_test_files.yml index 52f078c29..db8a0521b 100644 --- a/.github/workflows/generate_test_files.yml +++ b/.github/workflows/generate_test_files.yml @@ -1,7 +1,6 @@ name: Generate test files on: workflow_dispatch: - pull_request: # TEMP REMOVE ME jobs: gen-test-files: diff --git a/tests/back_compat/2.1.0_nwbfile_with_extension.nwb b/tests/back_compat/2.1.0_nwbfile_with_extension.nwb new file mode 100644 index 0000000000000000000000000000000000000000..4471a57e6c3cdfdf5876dcf0f43e2ca356771447 GIT binary patch literal 184152 zcmeFaOKfD>nI^`PTHV#%>S?uSY^yysaKSM^W{aO?mXm~7mrHug@-qcdI=b^C;SPL5{cw?bir3*;Kl}F|*dH(1?~m;F$J%`1rG?+de+vs=x94B;{{NvqII`!v_io=_ zSoqR{_wT>$>xIAmivIPsJ-EB6e|+o>=D$DJ5P19c=3OrE14FdGwAKF2clD1yFuW?` z-&Yqlqyh2pW&Ce#*w34tUejJ*_z&^N%lOan_sHwLQn0|9tTv;Q0FkTe?%(j|_htBBOjb+8Yf6KECZ=T=MtMo%gn00#$ds%M5QF z-^+OHbb9#@bwF=-*FN|EzCM@3h(CW;IxppE`uxB5da=JF`~1<(@*AJc&o2RjDIM^8 z>}?Vd{QR{6>O4B|y{~F}-+%oy|M!amfifM)Y~eq)`@!MRYK$8EdpK$gM(yt5-l%psZ`!*;K$-`9=f`>d`0=DS~APz-u$;kTrp-oe2zAJxyZ_`deSe-*F36!2=} zcjI?ee>vTI{XeMs_KW_^xmnxfg_pnQ5XtbzNB@a_;u$8};GcP9)6W#&;KpZ>=*a&2Y$HMcYvS%{lACLe~gFz`S0TMf0umzd-(kSVG_SQ!5{eH z`+tRh{4+(tf3k>w{u}&8(-#-Oo z;otN91MnsO`8V+SufDag@U?%Ha|p}=ftQT$d+8;;vv|kw$A6dkzCk|h zb;hHM@_qkX?B^@O_l>)4aD594e~59(oAE!Lfl3Pccagu~GxCk{oi%XD!rNb-@BOLq zy)LO>F#JS*Rp&iujB-Aoo!|W)n`6!wTQ|ib=X;bpa1K_w3;(!Yuq;P?bpebDeyD#` ze*O43??S<2Kj*=nyqk|tT!{UBRr+h?!{(sf4~TGHe<`0xz9s5Mzl^_MUTC*;VqXLJ zbqBsD4}4$!cbv!X$UM4@<9yg}G-Xrp#(DD~&*|&poML~!ul?oYL61W4-sr54PS5L) zyW;)*W}!cwUvK}1d2_Vq%R9Hf|1{~3I8dhtF6+fq|Fcl52S!)(pD!>3%K9Itaz8e` zs)NSQ*j{Mp>vdiOSd>m9U!+gE>vIW$16Vc-*7{nG7a&0ccVkk<33+@F6YJ?2lhb-GF$ zM1DHTyHTyQK$^-Qo8H1M)R8;+pq&rD-yR)pZ~1&kH~I6;>-+g@tyd0KU%URz*H&-j zuf4w7cs*~f9&8+Z^OfeS2e0PWZ?K)?f5s2^xwdib+UmwPSFdgCUb}vCl@m?spl7FZEwsT~$>CyTr^p~@RB-h6-$z2@T0#m#Qk*dLAtjpiuh*BS5B z?4UQu8uA&<*A`dgO}TFOTKNH9+d|XDKYg^acsS^d`?Gc^jkDpXH-Oq2NZ9G%;su{N z%z6hpdZXLQj`LB2Q(4R2z-*HVbvl!c&0I1z1Tti!dQU%}%!Dl{d z*cRK|Y$&tO!fa$`k9wUJHvq8dWyf3@R#*$j@_Htbp$bG;_Ma-+El@|uALCd)?I!^Fc3r(&&uy;UXImVFB=QyF21O z2v+YNaXp>!aW`vsTkU2p$U#OFE$xY5#JM95dM88l0bIzlGg4Q5k+;MOMP)oH60ac!1<_)^}H*)iY;o4u)X_ZTyY6~Bqj-x zWXll^i4-eY{H!n-ULlGK(J;swRO9w5x=kRhW8t+*q^}um@ez&~582<_3VcYd#{6chx1+#HPo=&V2JJ!!YF)%$1J+gtCxiOCL|NBMC>fH3Th2TdNDeVkH7 zTYoZY>~~6!blHM#hVMtMy5mdZMB<6IC4CoP1646h+Q;( zh^-xszSr6BtR42Aplxp;?^T$Bq-SwQouvl2Vu%yG`rOaC@7p8eyi&`FA6_k38NWbDmu zI!s+4AXhflf|=#$G#?O`GM^**HzY}(7@hxvg{Pr*=eS zpUey^_9Wf6lzG9r!ad}7%0Z{s7+t?cPzd$hp~XF;xt;rx_j}Ew6@uS5?S$-#ZC(uX zTueb>v8PlinK)UK`XPJ$8At5(FEb$7`LMwI!+fv`4iEek=yBtaTZ9b*eE_d>aF&5B zY(i2x1OxXW_Uj4NRmrOP2Kq`^qwmhTe1Y$_+?r%Q*;A=wzzuimg}RsiglZW-BU1=?@PF`5`1< zT&;q>k)h^Uj7!jc&l}AnYuD)aJAegFw#>B(qqoj*YTM0Sxfdp|UgZXkuuYx_)NPpX zh&+#(3$}7v=l?ha=6XYi#Qb*^1kUp-{6MrR(a*lBkNLCDF9hcF&theDdi|G; zZul5}J>T^QPx-s-AGmBjY(7p|f9~FncT4Tg)1N=(G(YpN&(HT~ev3ZU3D%wOUzO}l ze(s*t`L45fmE$|+`1`4%$7g=>YWOC9ac z^GBh5(*03rBdj|oKikrAfBB~VKJQ=pFUb^bo@Ku=hmZeuKde8*1KMu+pMIy7*BF1G zk7NA7$7b5(8|lwS>4#7K?@4&*x+d3O?cY|v{-4*Z|0kdNnx{?R`N89p&pA9UdOrU0 zg@lLWPNVswaeLTy_n3u7x6wHpw%t8u0Tv?|REN+Z>KnsR8%E@D`2H}5HNMeVIDqrQ zUUQH`VY~-Zg7`S_4bvvK^MSq3&U#&V78LyzhxR<`(zQK?gBMEUxIf$rh7ON`V+)3H zw-4G-RMWMHToYiJYegeHn3L{BgY1PxF7j zC=hTt;%ByCKPNx_U!E@n>iGNqTRQHKxApgV`1`dx+V17&JKcHjpHT=n{{Fz`N1Df< zpBet>r-cRapuq>fkKgt$F8N#emfx^!Iml=6k#^+$zx?O=dHFB&x8vvAhTq<_*xx7i`By!C?)3WQe^zYwH~)uTc)$GD*3oDDR~;QV{l_}-pS-KT&!YoJM!0ae(`(b2k%XvZf0NqxHG+--!FgmczQdB z*3bU4>Fqx2-#$OQdf7r&98wO6&o*9p+|(}&5u8pjp9z0Te!K33ZP5qvYegdw;@{N| z^l5b7e!$R$4fr72A8^NOA2Vp6_*oubt9vZpUSE6^o%y<1cNHCJ@Kr;DVdy(N$StIe zZhZ~+$cKk&BTIeq*0Sw`Y)r?0#<%pwBY53sjVAn&`;G1y-&KD_@tRzLZ}MP>m>?f7 zLo7_A(?ctI3eqQCz1{P|7~;q;Zi9#u>nAT%^_Derg!EvZ?6E_)UOIll6L`&LBF@ZB z&ayM?W&63#9upNNyT zz;cu<^1;TlHlsJUIv|@TcYLfKn%bv* zS?T7z74@{-yc2o|V+RpQqVpWa?MeJv6YVZ1V}9D=eGDgUctHF4n+*QhBev0_-``@a zPjs!n?? zu6Kg>fUOS~)wTHHVijJ*$rO+##SWnm__h)~g7>pW0oa^~Xrxx+Q7%VBPEA(wEY=t; zeJ3Ky#ohj=V^jz%NU#b>#R1K6y3+>r!~P%(#*fC>F?E55(Lzaw1twXfV2G7m$?~jk?PrA~Sl-c4EVXxVS8A~^p8&5h%>z0PBuF7QmYLySIF`VYr4kbW2lTOQ* znVQJ5Bj8qJ(8Qu~wGZ-+ny-%9{b8UiAy%^pwPE!VLUiddE=F9Q#f$NSgs^03GpexJf21!$uK!AIJj3O9(6Mp2fqU=uvv(!Xzj~7~QOyDN*`rSh8MUf?$Hk zHS~zVZs7(hlcTg_v;qLiaYzjD5z111dt!J!Ty~lyax_F+C{3JxgMyINg)?2?W=$!> zrr(k^B+!nS@qt)DO9Mc3CCSxaN!)6vm;y<0(>Ph69_V3bECHc5NH#`nS1;SVyUnf65O&#oEIdNq1#8jFuL#uuV--e^jBl%bAVGc% zC}f~>h*CBVdK1CDBabuGCVKVS>V|bUn{e+Sot!oUe7RPnj8Jkt$Yl|&Fr&2$>=*7t zwmKN(Ag!PhEwJM__(_257_9bj!aoJtv~07#g2?*;82|JnDce6B_xruU2p2%g=m!kY zC}fRP(u>h4H*X}!FAk<5Le>CJn_g9KE0izrPC^+q`vBIYc*l({1}H~FlBR3f4qyOu zLQo?Dujqjwl19>uq0Z?D6hw}Z0wQZ6??Jl*Vt&8dX+P#fD4O-gXfL-3NeZK`c`E{+ z=$6qt?2QF=f+SS9$-7$9yT#EQAS9zZ8it=JH714uh{-2RY9h*N7E;!CWPDHEFQzTBCRJmKJhTWQ2j zvFy!OOPo$1p!u2Ml9L%^C%DP5t#U^xQFZ{4HD6F$a%X%J%yL+J|#4wLMTFj_|e zpobHO0>O-J#?0c2G4_z)07b_kqiFhxyZJmZiY6l38w*4*=<}igI<3i90ft62))__+6>xFd)tESWq*tYi10qKt`nIVMy>1{ zr@7SeTJ~le_(MR;q|c1&j*-3E_y(& z)5C^3_<%0h;l6GVK-tQHTL*#$LN7b{X?wrjK?)7w9SbMFiYd4iQh9)0gYQctlT)fu z#|`Y_?277JFAdLJV007PVuVw$-nKeXIjC0=b{oh&Xi-z4f!IcflIspnWSqz@ELxE5 zl#AH9K~VoxP65-DlnnS?ciT>96VQ9gCYZ33!jD|7W=PT##(SjQ978_?oR`?&GucF& zvCb#Qd6Mg}&=Xz-Is>&W234*zV|3oVzq2dH#I8|csF&Mo`Pxc`V~mRg`!%E>9Ip}T z0j^{`4b&TA4q2`>kg+e9Ph6R-h=+$X0&g3pyFDg zbF4A%P+P~4aBhM!K(ce(tz+cjvE2p8uvyp$glt3aD7(r{OZzKs7op8uLYW}OGz_qd zN$7cw+g)fh=&+J}US-!-JBUp;8bxd{QPSrqBo6}?(IuI_WX<6Xt_l<3-();)HP{=w zr;KO^O{mg3vO=;H!91zo?z?>o24|=lQ{7`2~dsJxXF+x96EH`_Jr3{j~b}Q<-^gyZu#+ zWAlH$Fc2vFlem@l<$e(a8*A4jf1%T#w~Jq2_*0!f)JHb%#}*%O9{!$=b!fNuUw9(( z7hY%xl=1g;%gh5;K+vkT@Z@-Xu z<^E8fP}u1d>of7Es2Nggy{7k@qPL<-CUMOOd4+c&NR zfd;$IW+^;bGKC!ED3h%o=gjixP9Www5{)-1eN&g}+Kw;*j7y~jC38Fz=Ls=VQi;cO z)=+BG=4$srqRYdO%>Nc;=fpC9F{hORWXzhN8bpi{R?qP&7e0iG0F&SO>LuN6)D;rk z+NtPkDZ5TUG6FKJ^!!dIY=3a8VcO#aM*6^97^o$I7_6HURGFOK{d?O%`KzyBQ$!_T z$tqJ%5Eb^#1P_%fQnW))44VT~C6hys2kitjQxF3v*r&23K@3wl67W}u4h|b#Co~-8 z;qd4=U<6Uz^@2WyWMqFdvCLW!`x~Y$I~SDRe(%=>Ods%hK=c0n?X7@pnhP`mW+?bE z9h&Nc0ZlYaR7Yyt;<*aiY}MuQQRvW(0;Cs`)fFbMps)&hQt=(4XG+kdx^IcPX41wm zL6xTNBMr*Mx#3_1rzBH<+Qdl<;i|_3+83S!0A{q8Oo!t^n>p2kMy+hp#9Iji*QDGE~#7 z2qDT*Ci#uo)j?j2+R0TovCAp7u}(OLpt^ky*UyrTW7kgU*eH%NW4z)G=;Xvnn)z-r zli5;o%Z1^J9kWH{<#fRf0N&V^u*Zp+@9}9ZfRnN0Fw=NBHWy{}0;GlbF}HnUBh!tB zs#vjLExYsH?(Lh3gxpkhF96!23EW5V8JP|AG{B?;1I1Ws6OpVmj~ZQA(WS2rw8aB> z+e&d)E~<Q}r!I*i;xu_%!d6)=Ti!`OUuwb5$JeBW@smkM3 zLuVhOVA40&XZn5JBmVR-VCY06g?k$xWZ_$1qW(l~>AJ$Y#%)Xyw&`O4xk zfXu#2xy-*TT));^o#g+;GW@QU;xHcvy-{G-4+%nGbbA61YIS>E0n)=eM*OsfX9xf| z#&a4QQLYP3kQ;c=8#a4=+1<*d*eH3DqHd{;x*%`Khy=agtSNSG6`j+0)#((oWqRgf z7MM#%q8pA+h?TabaAXK+P2lNxFe-kTkD6;@d8UukDjgnhR8OTeilI|t>BZdKPipbz z0o*C_$hTK8#_YhaNBB{U58!ozBOnDY;`|fZl{PwmteCjo_B#{`^3U_%UkC_1KYhp# zUPygNo&BL4&%S$({p^E(rsy-bpM4>Y(ZnLl_6N70y>0mr`~|Q7<$Aa0Wk37G#{JcQ zr6Vi;eyO(ZBe@aI19`R~s)1RQ@qFvR>zTkS{xLLb!8$#VXUx^`c) z#8eyqr%M<*K-qrwQ^Q+7A7woLTyMftEbjT;>qjr-y{>rmm4#0Xrhomv7xAtPYQMKX z{;$j56h9x?$3Od@i_goS+|KrA|GfN7@w3z0ul)~>@x|{y?N4qm`m;ryiO=$%*k?pE zrA8u)p@FVrhI*1E$u3cA)fA3_WVnUVw3v??8G=Pc%QBVBAFMR9BlUZ-CC(Ftui*c; z6p}|SiS7b=o#4G0RCBE?zAOriiQpblW-{WzBFCwC>>xM+Ix-BKh-xs}>`DGg$Z2l6 zfrSP&rK0WTPAG54dD7<@QVxQPK*M_grkR3Ny~t8r0D))~ZfRvkx6%c;i%PVS zPHkB{zBTMXoMz(~y7DBhM*Y#%gH&~unmE)Jh0PbZP?v^B49ip>bLgW~uO{Hdx6n$# z&sn3a;cI@vxPSVb!|`9Wabd!)?vJw)Wn3}Dm)+{75u84sg%)$H_~n zAu^__K(+~n(o1fNmFtC1H;^2_t7|@3q*?JyvQX$>*u0hOa3$NXdWL8sO-W(9E7jZD zO>}YY`A00tF#>@QBE>SFaKn}O+H0#@+4AX1c6J5Pe1q&dK59r8RUg1g6rGIW_+I9 zPLBm0bMCnJPh{%en6d@5Dls;^<9int4Z?Wbdiu$=)M%< z=;kt7s`Jt^Nav~bzf2JvPLssI(d9jc(m)w(Hwo2X;dBW(kS;u0@TvAIoE+92D+bGI z6wH*HOHvvmD$7i0*f=L%F`Po~5uxC`Xr88saV3#BS1(%7okpbCd{)cCiNO*Pi)juN zVTHu5nnN_pq_=SCZL=gC^qyKzRQ&x@rS3b8{Z-HEthXRRJUr_*LCPiiOb#06XVHqJ z8PG)|_*U(mvY* z(I~Kz?4l>*Y=T#dMY04l6sE_4fD>YV6tY7*q{h5(d4q<=E3RelF?27^T2L6%K^g)^ zePK|E1Qy&Uk>wd(BJ9CYL4SVS8w!{~VkLV449i)6JRq8+_BOr%L`g!NzN;}z*nKi# zT5ZLw0#1$;+w7QcQx};Y?gcx{=8oQa?&RJc_T^J zM7$0)UFCz_c-X?Lt6M9GQo=vD@8sUQvAU(`Q;s7yTeY#5A$~?SjBm(J;f$9uBnd)f z`v~#`Udxu6{X>X0P6dn(Ly0dhbznI^fI#w=Ar{S!iGiM|lw4SBiOsFN4tIb@2pvR! z7E6Do;ZJsF6QN}wGyrvlH3qR|ND`y?Ylj|qzXV65(UPRFB4W!cM89NyiB(FiPmFLU zzQU4%cLw%R&Wzova)vo56VP)|oHelL`2Z1+V&8|oNb93(2%rp$*tR8|D&KyF1!I~`Th=r*+#!I3IXC7LL~f{YcUNCs-3O>RUu4c2X3O~N>Z>@|*W`Rg_FHi^;e{Hl??^1i*WybAr~`+UgXV8)b0I;basd&p=%AkXU?8yi0m z62XbW?<|FlK@X`-*fVpkNQ}4>pTg=br4HJu84^xudn+;J`kg5QAo_od;tsf$nhiQvU>rT* zi0!76gg7~Yfn}B|2OHc2;CIQhm~57|ztl!;6gG;{=(upfaW$B-12Op_=>zUHPs(M; zWz+`AYzmRWaaXXPoT(eM38UzFA)UCXwv#t7DtADNO(4!0r%`A*NT?>W!`-qEK;yWx z87{_dk3LA)*vH-8i4z=)w$}7~UHL{cQLF+Z6Np_*ESNxV_7?H`4cxl2QZ5Ie8^YpD z8A^DdlionQT7t(K=^j!hz%GqN80ZY5?%Cm7R2!&K0IrRL0OMUrB*3H(j(SSm=pZ8K zkx*OH*_4kLi4qUwlA_O20v`4NbaE|A$oUeaKZHgIsJRqM$ z8zI_g(goWp14-5+V`|iE5J!cnM7VQMNFdDy9)RrdkYjSn?UM`_!~n_1!H$tSB_DJo zp`&8(HY0uMZLJ<0Xu^yla=pscE+p5kf9HGIg4ZlAPm{gYB2f3)CeFQ|3W#<688oB?M z45;Uk95{*5wHA(Q_5$7O;H5n!hC*NuK_k$v+6}j~rN$*hGE3wY7}%Kf#OmO9GL43^ zE`h`F7&sgRoYui=v(p3Nh+RQse3RULK!|dcr3A>a4M081roB(l@F*=^UBWANFofd< z)RO(eBk0HjEU(6kYudhv*v%vho+If~*}YiK#ednw*x0ZwPu%?3c#4C2@vvi|U;$oP^P6J>8bh*4awU ze;U1@NdRz4@ekQnNEm!j7DVwf2TQh}SJI%zZ7b~kIoNJ*?de=ov=`FnM6RUF)RY-I zpe}PQdzYCTs2u@kL<$5x%Q`2D+0!C0eDK6u57~A-mxMkgXAm|7*PCQP^Nzp$?JVWV zD*BrMN3eUmrG?R4J))ziX+WS_qeR;R)VG6z45y~pu52N|_<_czO0*I{o#>lEDcRNp zHt4HXj`H0q3Q>zl6_<7p5ANGz_${uY99&D>eMyjn${y@%@ zEwK9toPf|~p(UCb-X)_bLdft%!^4>*8C(1WBDRCld1uGrlCgBuf$9BDh3WeQln+djzW)cgF zrsr*$TODjB4*mpz(6bbJx#S7b71draMS(r>R)2neO{XuUU(@r0$47H`z{M%nxBkf- z9tC)OJcmaC9zTB};o*M1KTvl*PXn>=H>LV{8*9)MqmBFfJ*}U*4R_&R&yU`H@aSJYPLBj?{lUf_s_+U|dAh46s|FQUFW%0wQu8wac&K1Fm zLYt_Nt)14l-zzuh>A6pIqOG@qaOIfN!35ND@$^^TOS|teBdFks# zGlqtDz@e03EeJWGTKq#~4>>I;v_n)2IJkl;yDT+Co>e@T#f>|iha2KVkhzO=TymGE zSr&s^INwFmFPhiflm=!~RQ8DxRXyszz2vXK?RJ+;sW*jhLH-0jP;NdGe%I`XY!ta$ z>ccrEeDxuJ9!ms4lKoL%OeTew)$V5e@16IgMu^z?QvG;w6stQJyxq-p74DESHacY) zLp2o&cg@EM8|~}dn}Q$DV97)<5bc*D4G>KPHVDvC z!%D&K85nv3^$B=0m`5N7Xe1l>v=G0hjtbEoaJ`XE;L`6&bx?(c)sR-iq%NLY^bCi=9aERKR+j|^@>q+41(DMAgmp^sTYU}h z$Cb(h%n}t4Pc|e3YOZB}oDX`djI`hzFnoxxWy#@|ZMuqbEM4r9jzmX3Mw~I*og6R1nVSOpv*0)f6xg%vE4Iz*d|_7X@a@%& z@U_1WtVEn!XmKsJK<_la0f)oR0N{YKn9*gSQ2_3WraPdNg3~&G%{vXz{6lR_{uEnnnKmE7~ zDoC=()W-lvkSG#q9Xw%WlWZ*b!l%Q~HGWyQZIzzK-Mn+95XXVniUihNWzEbQJC6v1 zSD1V)bqP|NuGtIe^b25odx>*_V>djB@CquPLQB?4c0`*^Qb;#D8ppecZp6$_vYWb~ zyu-q21`kc+gF&~Y|pYq0vHVn>2+e00>a30hfse1M?j*L{3>ZmC%q$(|)B~Ehl@mlskE`Uc5P9Hrud&IDu!h;7N>fx`_ z6Yd7jAna|)izP8qE4>oGh&t5UCMFqOvxpkO03oqDTR28O7W{{gUR$_^52?2 zm3cX9q9?*hxSkr!P?xXn(=tPuGH53Jsk*W#__*UZsdG3YzNF$geH|HOsF4^-k*d%n zh5R%;;gS_mKB&syZ-G;hB8TF7(S#4sps|kV&o%-{gr-SJ>Gk|9Y3?h_% zYH|(8b_YjKMRCiR3O^YobJ47WZ304*ewhE(TzRhHmb|)z?Ay&8U6MTLavroefZW83 zTi}%P22O3~L5nI!fbp3t&-L1vE6>4^&w~~<@zgwMQ9ZKfL5p9Ipv8i+Wh)7+aE(hL zL~RH`I{G`Cg3&TosFG960T2xO(A`Pkk7T@1UWHj+7`}wAgU@ur#r7bqLo3TL(g_}x z?4>Zw!b}<;WWrgL$G|&!JT4G8$Ysa@jvz#;R*!q&kjtXG=+!B|hs;+Rj~P@2xGdus zXy{4?$Vaipu&^9x>47-#s~^KfAI(*$$D+xddwz4)p+9B=JSS&oqNKKsT8d_mSuaEi z;2RRkib2?b;())ck}Z@4!LOwS_^XaFV@x$Xrc9bn3|b*=4h0Op$u)4Sun!_%Gi3=X zE%yctsFwY@q)=Cwh=xU^BHa`{p~$bPF@?cy=QAyy{x$E3Seu zLxV{ss$fVkk#GPC2F+Zm9NVRmSGNg&jJu)8YN0~hp#Dzu>saI9^78#k{S>H@xSd}p#gq$v-Z5m-S&mSNO5axF57bJfG zF18EkKuw((s61~1SVyQ26dDIJKz1LBc)3(Sp4c^MI22sSsyTon5o#o;$z6F3WfLw; zCJ$T7-Z8B|u0f-VlCOrTDG&^roJSi;^K_lBTdgpg>s9Up7O1dmOu0nNM53@TX$GcY zQ9|0Q7QKrLvDlA9ZW+9w&s?&eOeGwwl^!w1@p!9h*q2?(Hk_HwtbanK0vg&DRK7yX z%-Tgo-+{o->@dL%g>j@hrG0`|Ew;%_kFMpVMZ&hlt{8C*NTSX;_v8xVrlT*|4Z$_Q zFhRBoS@@)Dd|(a|&7d0c>v;e0Gm!MDuKabYj$fVW+kuNiJmGk>+B;Yk@KNWC@h$on z)+m9jX|!b9QbiiJ=H z=HL9IN`HDiOPgJ0AdyspNq;8Z4E>R{$6s z&_HbOF+`j@!6T~-shTQq227WzhSoN;Z-Rjs3oSgC~J}3;)M<-U~(6b^1@(p{Iox!l5CMZnmjHfzDnijv$HP0-dtO>XunC z(j2V)87z|6AWO{!=Mbu(<0h&UrYn&yNX7!qWxMqRt2DS<-BF}9NhthhU@ztkfLjo* zHb#eyv;N3zl5Beq{(bL%w zYr)-x#EW8!N-nr*r|(wC36Mw$4M1qx$xBgCs#jtgxndI_~p`us!d7yA4T7*O7n* ztUpyi1=*6aa3?6+}M|DpV9#!qTDmLG~;^{b44u zDY}yItn_AFF)Gd3NgH8z$0(D^lmOThbS##3^$p=Zq=(6|l!!iYlF}R7*i>kGX2fWm z|EhD8_9=N$XYFtDp3)0EReYqK6H>-v%yR5gmLhujY-X}IJO+wG&cK8Xaq5z7@v5<% zj0ccZ5hsn2Fu)H#Bx~6lXVR7jMz)l1NUBC;Uj^%noQ|Em#dLq<-f(OvJ<;_lmc0yL zwT&G(&RKz?0lvo%P_ofO<(P_lhrG#E+rG^VL2%a+PtUMWkP0Eao@1Sz&jQ$q{3#K- zJflZc*Ag`4c5>^|V+&;#k#+_8P2~0F0VF^xrze0^b{C)uskpIs{_1{?9&jusspndD z+cT!R(Q(*sKL(|QrxwP;MuC+%<>f0U97(C$LK^fk^*fnUfL0Y{&6{%Pj(SK`i`{dv z2JR$GOnKt+PH7Rv02Nq zr3la^EC_XxMYOEN6l_TmnR&pE8`y4m%^ed(U<2q%aw(qt3UmSTo21YW`GLx(X@Z_c z$tvVrGO`-z0HKb3mrIOz1W&>CtS2pYpE!%{Xdg+hq1;Q#Z6~C{##bXpC4qbHv=&wv8P(b zl}TXugMY5XCfb{D?+F4)r%762`I-`wN;M)m zhz`bKBAS-K(?$mKxPdUtvdkI5VWbq4g|w28v0diI7G%u{NpnXPWcy&Tl5=JlBi;Gj zBEIN8cEnwgoS(Q#3oL1V4DGRwNDnOhT9XQ@#f{%1QiALSr8<$ta9x3|i+yz3`vZU( zyr&>nTToS43OKxqbU^r#Z_w1zeueg{cNvW#;srrBfuL-a@3&qIg>9z?C~vd&fmE(X zTtz4VuBtd}rs7kCL0gr44op+n4f0jlc%x;qY)?n39CLu6iZ*~#Z*YjT-blF{SY<}m znCVM!u5{M@QZ~s%&_aC3!CA}b#r=?RxqxtH3!*Xi*0xdw6YxB?8fqgjEgUBJbmzy= zT0ub}dnPTD6X~leUo}b^?^2FfWjF>n4(uYaN-85!r6D}13kd~L2uG>0Dre1_M|osj z%GESFvtB{(H1^*`ZzK}cQxx)%i0~kgtol8(bZ&0>k%uh#15A`>Nzo*t8_1BA@D{II zoTbgJTiJdOQYZe7<;C%V`m1=`+jNV(ukz7$J*8D-Li%VR9=I*aj=<6(E(kfT>W)Cf z5a?4q>q?qrNb^H24_Tu*-5HB&yc+s zNCTX~={)20lnk+Vgm;(&XzsU3AY`@hW3HxS;QRf8v6f8gSSvWpg& zJO>Z|m|@UR-sLCA{fZZn61EF=krIiRyBgqMX^Z!vAWEWm2(jBYURdqPxS<&kFE{{f zW}>Mj&eKX2u#APG)?leJnhTx0a9doUS`0CpWG?Z&841Xw%2)g=$zck^B+|4p&Z@XV zeN{2i*hn-*wvp-VFwJyTXt^g=%kp$8l^&annwRP;#X(v^!xLVl*0Kb>DvP)1>$jba zQVvQM_F(Jw-FvrhZSLOQdX!1VYy~I;17+Bvo5&7)f#gv+_J#m)(AlKK(|pPJPm zSgUD4od1&1?}2~?9)kxAAE z)FdV|K{6uqM*|ZPd39M2;V9wEBk3aM0(B9XY%&$l0P|25CrDd3OyiuTSmu4g@V13! ziu!gc8z9=iU6=M@sBc*uGU){{rzC3Yqyj`IfqkK>ASwV#f;ajX$|lMTszXZJssi}L zoUWxRT#dwwbr+f!#c4C26FQ;zwE$PEFWU{=?jmri&63QW@o_h}jnTCO-L78c6vkMr zxG#azBk-tPf-S`&t|b4#eh#iItWs%oqK6TZrtqN3oX+A*!qI_N8`THmVOk?#p*ACq*sDu$`~bxP!$Gz!v7pD^NV!QH=`Jj=d(B8K-gy_}ATrmx6vRdOFI%Jb<8N&3>~p zIdlT{aH9|~Dh{<-j2Ap8;Rbr)^hRm*#NDkI)0C|gyJYAG+GR^b05JR3B|ye}oHr=5 zT{SFec}BBCv<8eo?l1@35oV#4Lkupb2RNnoQDi=FEYc5qpzy}UBcqS29Vv z!rO?>vr5xekWdOdLc!6IWU^#ri768lg?OpQ z6So3drw0~P4-5+3Jxx*KscJO$0(sOCShsF+yYNudc5Ms6-)7Rdrp zOWwzhv8p?GJu%ybHHP>*du>xuSz={Vi8uE^~ zm}5`y>wp|k2%OVoxZx11Uv4h6;m`+Sz*-1l{BO4ApE~^-$Y|Tawt>hdJxlXN-$X_Ak6&IMY*> z2xD3guCzVo$`Nzz49kOIuWDJ2oN4 z1-Oly(CPzVKncBZ;wq3W32Di2NTCaw89uj@%a{s^Q4bwJ>Vd#b7DtP+njukFhue1Z z*8N+du_ZQ4kifgCn#7c2+aK?9KrXEw|L(>{IA*vJ)vy6~vv;?*RVX(O{&_ip=q;Tq7u-^bLJ%s*%uccF#0cV~rTu8yPh{>L+T3YqV zNh)0F2Qz?=O|u6rm&-!X>(Ux3IurMoPsf*6_LLPWPopOG*Gad&B$G*c1S6EY;h_NX zDd`Qn1i&H46ZMaD;bHxWZb-Qhl#)>*J*O$*+!$v_#r|Y z*7Y}1=%sjpE1Ah@jmo4qm%WzGN;@ipm)uR|q_*S9vZ*|255~jg+|}QZdm^DOOiLPO zJd279$iBtxr(+Ul|3l+5+z=>EQ*dZ6A6V&TV!lCJUm#nqFHNE2G-$;Fq)3ZX0E2Ls z+$aD`cvvRE(OthwK1o1yI~( zA@#tsC3BBiqH8O@^l{y#Rb)yJsV6N6i_=v}ARzFjW_1iY$U6|9xI+vqwB&W}ww1=w zSgI}aqcE3R$O7UwE}}TaRZrMVD2(#MGo?hrToCqL9Kqi0byuN7P8LM@zb6_-h*kl=x^Tp$Mh-|3);{z;mVMy-#;x9YA&B#=_)hCc~bXvYF`k@YaV zyW}1r|1&-Ur^Hv+HUYvThOk7u{2o^UG_@EX0a{&#PMV}kzSPRj09RBAOxHBJZ0E5Q zYuL=VJ5+vqR$~DO@KX!{4ie%j^&#M2Xea|a0?I@%5Rs`|I5ABbQAw9zoP)7Lj_fvb zgi34n2GL;#SLq6IlY@W@FA7HqHec~j8>)c;IlX!imZIiH%nvidN@W6=nc%so)pYj9 z3CT2oiy}rRwkV!IV#L})ppdwG%d&}pD2V%*SQU>Jzz$2NM6&|v#`~*`F4C8*d|@UW)Bnn5mie#2ci%wU*J+{#0LO0X(eAu2B`Hv-9nNqMd0naiUNn7FZQ$ zCW>HAxu5|}9QzM@4Y2mQmq?n7{M`08`jJh)l2!SdYUG2UqQ0eQ$gNSXqX4HkED(|^0^55T6@ZUzN?BNqg`^Ar^q6R z$6?^%q%hJR_OK?&VvNDn9dgqSv1{zvK!_nZQ4!Le5b=%!Q~){C!!z(F4kX^Sgqct& zyM;lT?(g1GitNy95&;DE9Wwd)e!+Zd?WZiIG?kjRbz!Wf;M4X4f{Z zzP`Hg&DCqK?_Rxe^Tzd?uiRL>dhPl*Hm=>=*nnyj;|31IMrBYR{(QLj$8gOMt^_7U z;(s!}zKK|qCNf*VsR{&EgDQ_9%eK`69+$-~ZU+H~Ip7AERK-P8voRQhg%M|7^hi6K z>RFUR@UDrdPf80rIwyu8C#pzOxbhNW`|ioU&H(A6s9xprxg%=0{PpYnm5grcliF3`f(oe#vKs|61WuX3jJA~k~nL>{1fB)0w>Lsa-(EtI;Tbg>1;pTOyrT)s&pFj=_T>R?Ha1^*OlaB+3ihO}qV64x6qhbLtmXrM!i@9E zDVUmIA0jAG!>w1p@yCq$t>>3&QgE zchlh@T=rx~r$D~2Zj8lvr)0M=&2ULhcs-|gWT7947XbaL?ufwz`GlBP-X?%5%esXRQMtAbZ3-wu8^@Q&8eBr8Vc9a7CU8*{Wx*?@05&wZh$8;`Qa%8> z52e$XP8{BPB{MKS?cNlwAl#4IXiE?PHRz2U3JhE;*^pz-mMK3HZy(L(omY zMgfBl-!Up@iYs7`3Rv$qI;?WREg^4j?{H7#Gy6*bPQr;-EAOBk$ju^~K@ntfNVMLH zNU8FFh~D)1K@y>AR+vd`2V7jTZ};R06r{V@IoyQ}_!z6s&G>%^Fkw1m!QliC>AUoL z!Ka-)ViB%aok&&cr@ELmy{g+p6*Mn$425eeo7ZUo3lEPly&pq%ury*-4M@!(%lpt|^D;(D#&Sm;vDs>L0B8=NaiM>Fvw_Q!?vc%@c z?JADZ07?oWR|&<`_S48s5|mY>IP)UMQ;2je)QUxpT^&u;uJE^JNCayGu4mjHNdN@7 z0^(iZ^B*o+1H`gCTwKAWLWUobl!mwP5riMcTjhP+EyWU!4Nq1fx%Qx?^mJO~UOj^` z+>qhg1Z2h~7(BpDyo~dpR|xE2v<<;jBPFhIYs01oF98vplW`_y>qGfUF^-vtB&Db7 zypIvvLCXadM!fg4UFjF2AjTB}k-eds+jLn92AGBPuy7EBmq^t-*dD;a(;5QliGqXH z)8fZ2T`)P21aX)sDrP(0M7Yw5o_obzV%(DaO@~ zxN_^RZX+wt3z94uZUfHym5TlrS-Qy4IlCgl^L?u0%iLT-jtX2AsM3UR`ZR-=BuMVD z^LP>8Ky$H$#4L@{xp`Ce2y-!+%U?;nPdE2zaE37I-+^Vc$^*6l;3P`gCZeXwVR?5* z_cwDpQ6{oHG&DasWJ&Uo*|jY_S;~^iRf1B6$udmhrRv$r8NePb@{@a~0MpUcF=OXx zCoOmm4Jm6uMH^k5sXyTnA2g7z3b8;Eu8A{&v`ugVfUXAj2~iwDD2=l#&R5eS3OGWM zuW@>~BoUqjVQ30F*)9js)yQ{)qX$<)L?X-Vh>cL3La-D%v>_;c?{ znHHgXC_md9A0CNP4y1v*oPtzV)597GRT3*`!z9^@?;RnHMnh=3=>dYfnnEd5?q$+Y z@t%1u=td&D95XqZ$sY{-u*rli8Jp){y1=|S}dYycaN}J%lj1T7!pEsFk8vKReUaK1QV8T&WJkOJ}b{WXGC!vNxOu?LuOq9?J+llB_2=x2+fkW z!Lw?%V_r;pUQ8MuqKRlSXGDKAZbK_GVH1r~xY=O$Ow=tHDdvpmoDqey&WodM51t~% zh=v#eYLis2wTo*c;UraAY&<8gm6(E^JSUyHX)g&841r%Qa;q@{A;xWFKSa#}=bW-- za8z=}L^msZQ??9lN}s_oJor)gU}7KDMA%R~2Llht8x1;`s@PwK2q*x{jf+#O$A%=- zh%a|$PBhO~MTO=H;LVlnTH(PPE7|qJgIf~iCcHC!vZ+&-E81WM4*K=gebh(bbP;+P zRk0I1HUL=7W785;$4-a!iq;<5(zpp+|1)Q+w{43PraFyfO4Yiq$}CuF?j(Me`#>FY z%fvnv!aDesN8FZF%g~HK@+zfuP3<=;*P^qKcm%OC`k*0>4B6XYqDU3W35i!fObfGT zJD3@dv31xO5uMl%f%^7`;+n@zr2zI8Oyvl;M|Nt-!Xou(fHgaCK!bA`5)Q!{Pqjb$ zAwH^sK9L4wu`(2AdC}!oE;iAamlwE;DNx521ND;xL2{VQNUoMI5H})>9yw9Q8S!8U3H&S>1P|&$OSM>Jpd5&1 z7Y5OU{DeRc%>xyrZ}waQaP6K?=J8BVR{`UrpB!XVb(2fHh47s-)!X#u6`wRH{lAXJ(J-8XkbY=*hBZ zwxGF9WG;%rM!Dn&*QR^c++U8YnIxN^i=vBje>w2#lA9oe@FVZtxXY4$h}ons;xwyy zEn!ZWpCWB8n$zbJ{9J;6p(OaaG&{=NBMP_4Gvm?~@^}rIw28S;Mq{SU=_3Pjnq>`^ zF1(X4Dk<}MPfBp$9TB;CPDPB-Dl{pd?A)T(c&z7|$%3kcp&5Nr-n8JFhjvB3A{G87 zCnsKWOdXZF^t@xKq!M#gPouj;nikex7(+7l@N{B=*T~s3bWXli56hT*Si*Dw0aOqT zr5-KvgLTr>2VO?dyrFcWQgN9sK`J$-OsoWp3m2;*N1C3C`9%^w)W9W*8Bu5ek}x^J z^M20)UE*5tRRNYUY3B~paKp_T3yFCZV8EmHKsU{#a%v>H**kCErQ0ub6a73#u#DC_ zlUY@5PGJV6?Nhg~EfXpPO%yVdPGM?l+lFf&${A7gAkInR295R+Eem|*Qs}VssD6v$ zFL<#Gf*N#EU4jgy^DtP- z0;kG3Z3KrB&21r*WGpP)9U=O&#Q$Gf}fcOE*wTiu(pFHVGa}C1M zOs_!F73!oy!DLZoA#aiDH`1t&1g9izMGM!t`h7!T2(^ef-Ocpo@EEbgD4)14M5y1> zqbeF1(Zd9|P}UB(qDZZrg}dsbg7mk}jDp6f=Bb&T%DrY$(EGBzP$<9+$dWuUFo$NUa zty;pKyg1`JYILwk)MgUFxgy@ZQCX>BoZ>2r;i!?aa|t}H|4v&*yeI^DCd|I5a(9GY zpP<+eXu`27J%c7LZhZm=Z@Xto&%y;d_4b%Xzjsms%j9|)uJuu>XqBad1n-dq102(7 z5Ip1J8M=@Ahg#Ca37A?*s2;^+QgA$8VqMYU?Ebvc+~}PXoX_g+R_Kmr-0R^)YSsBp z!?`Wp=z>z&4~ZzQ3+Fa?ua|YnAoS!v7<&=kj@OGGMH)*OiyaQPp|RLlT0LaxMItD2 zb>L!aX-*G^!`oG4zP6Ox2>GRsDGh&UUD zWVVrtLDNFWT)k7_ZOSoh9zB4VaxZdJJ>!~rlZmNa99S9vgZaeZ&L|5^Cd~jDUsO8x zQpBF|YvnOY(}C>h>h}lNRL!T584B8rPsJdD-Wq$V7b?@Eiv_vOOn@& zX|)W3W+u~)732hk#LZoQnsKgIj-%D z1erBdiXG`zDFO<|Ke2_VD~j|4N25`HcyoOnDRNe&wqIk75;M90i#A+q_tx|7ItV#x zlCAT~Zj91AU28|9gRo2608oA)-`zqQGB0MprBZ# za!dv4$u_EV)DED#Sy=cD%SUwZ_w|np<|Df5iHV?gQ6~BNjOmF^gKR7MIZX9*<*e%2 z%7+EPw#-gnU4?JaAiIWNnh}9e<(}AJ)0k$K2^-o+L@-GZ8@NNs=m~kc7`LtD~8wo%IkqN1A{L%ofVQ zffbZUW+*(GBRaS;kq~q9tTv9IaDgWxoVtq^P)`&s93fc1f`l4<2ezE6R2KoH#yQ9i z8y_;`5A%MZwzp)$$8itx67yAK zRAKowr>xeEhmr&-nVMETz~?NB)%f_0l@0awi3y$9DGG?Lc5# zR@@uakBL&HN)U{QRwo7)x)UBf4PTMtv|R{sftV`^p&9Crly1aOF{z8-0J*Cl1!ApY z!pS9vr?z@aahN3SS6qZ3jmviXIW&ZAl(y_-Hc%$O)cH}o14Kp58T4t`0GvuVg}f2p z74!Z9cHhb(CEtmB58Q`X9$$#Fk%C*;8_h$9>JJQs0OB3>dXE*qhrNT*iPmW9b%yI& zOpm_!G)KY_rFSAHtJmw;ciiex<1tP0nErUwhgS&|v1Wm90xy6EoUPXeqZrRAS(&8U>e9Nw)zXE3RL@K=BSFGCQj+z}n~BWXw!xDKb9*KXFKg zZRloL1%w@0eX2(5NdxJBSp>QXCm6oR>B~m2#8sF+@>7xG>Qtr409_%PA%fT5^0iZ^ z%EX_5oRmTwLP;*C2i+zwnHm-zEQ~Nx?JW|O^}{j%Sh-uNN8te_q{Fl(=}@C`>*E`o z58V8)Sb71mGeBfag*T6}=&);+r_c;KS9=X=}JQ&!jte*(}j_qE0X7GNYpPr~xjFxUPr4)X+267ZAiJP+Ut;Cgxu zjH1J);;872f6e=lU`~y8_Tu=T^m+09C_{xTQGeWTBkd#d{FL_O8+8bjP9dnqZ=dC> z^netGhC93xEsesu7KSOI?p=PlYj4*LJ1eY;omB#Q+-3nfmT zZlR&hKmg$BN_M)AoVKe#<^4P@h(bSNcl(Oj-G1};X0^M`k`cCS9NV;7 zMH1p7n1h_!Jqjx<2<1s`#SQZWcCJ(lLa1TMmDVo`9z6MppQUu}kfx6rSw~Ou3iB;G zuP`T-(jtgN3+_*v&y7YN=YdiMbE6SzZp|Zw>o8)TwCbsnR$VIJH9R|*x-tmyF;aqw zbwD9qE6mH92`5lK0XhkF+X3<9;|^F(#;ch3AzupwpTgjiwrnAiUPa#Ukq6`Jx?2&Ew8^xYLF|c^V{ZnQ@>k z=Gg^zDPBa%+AH&JZ;A`NB*}%Il6<4O+VKs1Dh8?644g2sn_X5y@!18gO*+1u;y)cz zLubjZlY2vm-jtGgCgD%yJsYJVv-P?$WU4ACw#6~kmC1@wJVX{$K3W| z$>DpUBWQRV>(&aDZU#Aowo`D>%^ngtDpQy___32ux!P&gA2 zRHce=<5BO}9JCLmYJ(J_fCwbY^9^p}4}{*X8HFa7r?>4(qU-_H*ozj`6zv2*9WE#kzo?bMI|#q=$+ za^wp?`&ZN3ePmx}|D$44UF+BFi=Y0>x)%EU2ln+Zem=dOW8djNpFZx#*3GZ}<@9zw z+~5AI>Fu5$Uj6M0iC4Z$2wPs;hq@sBtm36U;rHU2XASsSO2{Ukkt4eB{&9QQO#5>e z5QezMYMpPj#-A3VPPLc+s$=B-=ry+fS+>3^?J9Do1nTl%@^EthKc3dw6c zYEzBNG0jPJh9YQdqynktB+^e%@}vp1Jz-Jhm)h0>#-~Pg-a}eEM8G52HL~=u@HCZ^ zsU~ZRb^0UTqcUtM z!+ARM(580&^SI4<+-A`!V{U?WZ@3!$(jYv%`4gFoh?sxeAwyjBWiA9Qe@y{2JxGv*agps%?&IO zQfy0GVWVtnc>R4Q)P(JR4eXYD<*qg`Uu$uM?2mjQ2_Nera(&PP=0LPqImRLF zg8|HJ1!Qmg`C!!{AtNSTE39F1&vVKVnD%ho@An2eb@H1OE3hs_6tl!r>VS_pphsi< zh>?W0z=&vI9A&UcP8GCE7+XM}Ex3jFllje!EsqJ%!1Kyi4uC26A|PF}Wa(OxjbV6> zjEPiaj}l-4_mlX`@8V-Xi!?j@E>KE>c7aBcR8lYqz^On=JBtAar_3B^hB&ifzb7T6 zdbYEZV%F7U4ICNM^y39KUff0GrNx_9;Uo}3P&S2(J2k*;Wk;k$QX(~K{H>rm!8lzHwemNg@ln1?X{0`oA2xeg_-(WEB1ubd65 zP*PAsGMrZ6Mzmfd)Zws1HLS=1HCQykkl_^{e3eol)2F#)c$w2vo;3Ch779BGJi_5>yOyB(CYlE-{Sr5Fg* zQ(hO|@HUfKSUM|m@f>7ygj6Pj+(lQ&Ak2TBNTIP;uy>Vm()*Hr8O;9D)uqS~iq#4H zU&0hlNU%W#b{~8YrKxflZ5+Ik3$2WdDn~rKp z-dw7(k0yaV%9f$wki>9>*5PYv+$@8?>c~(p>WZ9^Dh)(guI8C2w$td3^1f(zP?d;T zLE8@U#t7Nf3goRe{U4=tYmZb{he#c_bobWPrKKxEL&SQGCd9i?%PqW+kb(peWv(gd z6?s>MUPysSv2lecVURjU=trN_1dUGOqC`Wy8q`BTfnX8w*V9>3kWh0xiCr>zZ#*$` z5`xM3CUSPUEE>6OY9CAhtACwCD}m-;hk zAxmyrDdUTcep@RIG6HPkOkFI)(`1L2iu+?;Lc;=~gHCuOl&JZjdOh3~l6KhE;96x29E*0FjNB12ZX*2PiTZ(#Lg*=F7rpz^XPyy?T(LN-5E z)zpp1%9N}?Xro4(DpKk;a4v*U%gFufp!wrPi3E6R-bvr~gnVn) zUdfJGw+Iv%`aVru-|vlZwA-Kr{k-vbMb-6w1IsKV?9h_Mh#p__R>C&fONc{EN9i|o zLN2VPYP)rMaVS?34CU)UW))vjt}+=`sX`5-RmUYskIMc%4>qYBX!(v6(B*wm#t85VuoLhd{i%Iq^bXK`-Wel{){L;3mZ3BXaVzhXG zw|kx5q1tRq`&OG%sytY-Z~9faW2LHTwohle^$0SL@_r660y#dJLKic{DGinw0KNU8X4$-s7#t@o^(trfrJ-(i@U0v?#H_EN5NZrVG4fObf|VY2Ylc zF!IDO@<{K6Z18x)sr?5x%p_NpXd*_Tw{Q2CIeC=R!BDa3!-75M^7mJ1MBVL z#RxnDVFId$@Wfq0UeT*imO`~|OKvMSvC=Qj8ITF~;6y45)>TXdF9OOqGUS~>Mt5-= z2jME$l)a!zDu2pUaUWv}U06hi!|^d*zIyZK)r9*+0_YqYJv!lDhlQxf>s%qF#oVhP zLb<|;x}aq_UdTVw{3^8kxY0d>8v~$uR(ucr5aU8$B7aNT*;sBd%)yfV0>Fwz>j2Rq z5eX%Ukbef0pYkkZE6`)g&~FL`(dy{d#JGV0ZrrYkonO*1K}`}5k_rl z3BAwZ*Rj(W>uL9wUDOFH58O%p6erOWa&SPEIZyGYTWGy3vUDX9=vn1EXH(z#u=1S` ztKJEsWSTYvm}MGNsW5QT8$3>Wl+s>0by)8uhaq}S*p2VXbu^7Z)krM8ddQ5@gMb%~ z>YZE!040kUCdrkF+Xgl50-uZNYPKZbNOB);X1rp=eM-6@Zh?@8jzQqW&L`roibWcM-=4QTjymFQL+P-)LTsPjd@mTX}1z8F4xOBYYsFY9Xiga+C6A((*!eN zm8iFgSs?lI)8&?7iMVHSLlM|B5jv7k#+lFI!|Jb{EIG0D_i;@E-$?Yb0?HuP~u zb}w886B1>W-V9O;4~iUtg0o224L`wHl$>Ak4;(%4wvm&u&Q^f_oFgwQ(Y}hWBs2h| z@S-*2WO)tS3szN>PL=XfQm*rs&QF%Cx!$Q2N9rJbjxw@9FuZ13S!U#aaQzX4a%XNa zNbrf1Nx_{GDtNx*eGFm$5k4YbxcPBE=Ia-|1(}NuG#Bx}28a_H&k5K&Baj6iVIM33 zqD{XR+JG+>DU%CC#=F@A>d-b(N7E9p2QD{KH`&Pix?*=Vd^%}Kix-R3;>e{3kO8FCvDN(Ll85! zx%Pq7PeJ7n!*!TIS~)!I&eRCvFaeotbU<05MpSn|Fsxw3fVAX0!IjEU6gduUq;Bs~ z@Q2e=r&SV&k-}o;{RYekp-O?f=fenv?<37f~dP;77+wb1KMaFO+XNV>r zZyvc$%9Paq**EHTkNZbt&2>2B_mNDicx=VWNW&-L2YEMySA8tVUre zs}`eONpET!_iZ&cCyK|3<4-nZs=6PrPRtLs;-tKCv@XN}VcR%wAYb%UTL)Zzk5QgT zOk62(5ltuAmV58+5aB5Yxln4P+4|3xm9YgvHZrJmnVLyvJ!VZukSif)jdzu zE&hzcmWn*s#)aTs&+|Cp8t=A5uqzgk-dNj+>Y90~ZrbSLja3V^)vGDkkaHD$GldL@ zBvf*M1tEUY;)TjytOn=u2`;m>>=p!SG%0(dK3trLyv2?9a8UzfOJYJJB=1nuR1#2Y z!OOS!po5u;0RgB8qU{N8LywA#g++1;M>zt7VCW`_&lYW|K zc$Gq=G(mtGeuFJ#1JzwX=3?3mFKNdXDj_~;55euBZWMx%Xu}X^h9IP>a|ZZtx-78l zNRvK239A}W4ZMRpgXx;Zaklme)intA-w&Bh$`? z{Yd5)UdJSh+KTF#B$|s&q6BN+A_AV?E|82MV~Fw#rCysy3JgVnRY>TX{G8lA7=46p zg4#IG%ZFyum$_9j^t-`^hN+fVer4tFa@1Sg7`3&bT_x*$7p0MUP1eWsIZ?ML6BiJb zXOD2(1Y-WuUMdzxx>;shVX*6)xC`vo@|+OPM6HVq02EPe*27dk`h| zB?opbs*hFXt_ZSIsj@75BttxFiK9N&?K6=1NQlimiP1cXktJ?Q{8BQ@5JmFM+w1j> zjg23O8dWv-OSp7GdMry^HLoKiS|=Jq=E8}ZS$hWuxIAzV)za!(HhV6dTn1`$;RHg{ zgec_(8S?JJj|YdGc@m=+ML2n*O*^uxh(#s=o11#G@8kPD`yK?RC~(lnphoC=_l}MWJxhx{YjKgjZ#9qz7iD0GnYe9j6&vBhgH3 zJuzZ%eWBFlIXvBc4?<`nK{>=s770=vLb*!z@>>d86(3A4P%$D~Hc!@}**T%>z+S-b z#G1j&KYGGQTUCK*kC>1;X4XnJW!@hPu9R;IQ#_}hyev; z5*^vau&2J`A}&qqdIsdF9i|qQqCsm0+zHqMjRd?dA-p*{spNq0R;C$1Fk3#Nol12S zxbr=tadN}d9Ea!>I9X5+MwbhXtOyREg_nwmn3(WRq@Zy(+x}zp;F*e2IuG(-v!fCQ z39x%zIP%ljOBD+oO&xnP`h=nk6?|<)aR=Q)Ev6Rc<_`UpxGM9SK^BJ+q1;q_kCO?) zv6h)Wu#6zh^T0LS#wf~1ro@6+{c!Ql`sTw$o*?DGe3jFr^_4|Wtb&)J;Fz?+%Msdf zZ9&rxws|il=jPWSK4V`IB8r-~FSGz{s}KvhDkbLjy}5mFZr_{R_e9m6kkq*dEjR&Q z!D1+2Z8?a!;g|-OLT>^w+z!3YSPDDNMd-PG59yL&wo?h^VBGn<+xLQSSjVn0=LId7 z&c(rM1(ROj^=oLW)Ap|WtWoywAdUIOmOI27FYI#f<%p4+ zG+x0})@_}%Tcab{Zn$WTK1?`vNZbwnT9PPSu@yyPx#^55UTFK5>A}t?8l0&d8UG<#;l4ez*-ibk{REegU>hPn_2f6n(=MuXJ^JQOtH?4 zU$i*-96l9`BL{!c?TxaAxwUbw9KvBU#;=J#cBvcVJ!29d^xKWNvMoTN~%f zp?TF&2Chyc*B(t6c-UZVEU{{CZHzIi&)VMjzB<8HaJ&`vM$_z=@k~9hU|2(5sBC-7 zV;?DcVev*1MG$MZ%7R7ex>@M1l$|C77^ia+uCMN+ayxh%%h~~-e|H5 zfO_g=Z)rxrn70tgnoy4zJ1^i!+`0%{(UO$J-T3XyVBMA&WN|aM zh~*)2U}>U4=4C4y`procEuLlo#3UmqnyCJJ7r7Enw?!}cfP3N`%vfhcT{p;BO7B(SxX5vJ>%3Hobs@wIM<}>K z7+JEwRiEZg5vI+Mj=Q3XRd`&pVgpNGW}@ssekzEk8k-af(i<|; zp#|nW1q#whf*}uLzNbf-6wy%phP-fI{8>E49U>*k+jq9HNn8`P+qFh z0TOFRV0xr$%YFcY+(lkdPTA`_^Kd6=%+Ea{X(jSW@N3L^gjMZrUB<`)_yQ%JikG+G z5O|D`8!UkFDcleuWT=?sa$pTrNKh(Jipd2{;%Fc^y0z)>H6vk49mz=dOn_SrDa8cL zKZFyc&R8Y(#S3f5aRoNFw62WeN&=UZW2tpgdKxg)h9%hEPTg0%^oYtH!@#{z6oA!4 zQcEs_f@&p&Qs_k8PVa_<4EJ)$JlV`sk~;I|hdDjJy;ttME%5&=Zu}GwJez$7BPAI( zEXf6resSNq$;u3Q)91bIKF)JI^7m}jpWPEb1qizFa@(f#>1k+4*yBLOYg3p`C7mLY z3G;E@Y^ES~n2fe**Wp`XCWV7M`!uG2IZVUb%m$JYY9+k2UJasHX6kPIp^03QfKlPH zeLM?X^+F9{Yc$5t9}g3jC>~yv_h8%}D7W&#$h}1#1`RA&K*CgU-ODpS@z6+9q7^RS ztj`SVh-gKYt{Bm3V~atZghMU}pD+RtkbAU;j5$qsG9$e%%61_u=>VCCy9|wumkjP0 z)RA%0gWEIQLx`V6&S1&)iRi<~R7u|w>&L#~x{gX_$~*hTMgA~!DPffT9+(N|U?dNB zyqQ?40Bh$>huTwnaOc3F?4sEl<0VT?*6zZMxXp*cCV6nC#T2eyW#W%97ApC@C2=tx zKtMx~qWB*V5weIx-%^JqEYWY+a0CDipEwJjQfV=2Kgm01YuO#t3ju3t@ZrY1dw*va z^O3SahAq{Z%yyS>PWwJ`A)gS=Rgq?}$th}Y<^_SW?BJ*1{3GBDktxLAglG;+X zPMEDiuh6|1V%MJB%?=8Y#ckVvPDOcFjco5dy z`=+2TP%Q>p8Q)3xq}5Ky$~r}ywxbhl{Q0<)B9RuBtYatxSM0Ru{F&%UiwJ`i9@0#w zc7GP!vt-Yr@f3TW>god?VATZ-o1AWnOH^YBD#@)O$J{Dx2GCAcFlJk<*#a@9N~eq6HC&u6=x(w3|P4cR&aX<>4AbZ@f%cwd06c9Is(<14ON!e;}(n>|~oX@hQe*X)2 zt-dcQ7R9mTR>##TG$9Tj2y2=pBYkBC(wFZ4B!^`>h-4P!9TOhw(r52nMNYDlPqDBI zp2Krz^*K0(Q`QJHxXdZ6*`LWHS<~0=03T#!-yYoD1i4(Ej1j>-& znDIq>LW98iS828@l^EwximGuFGBIU0!aZ#19yOUicx6n{SaVrn(7XnDqIyiEDq*5j zgM?NtQCPK|X%HS+;3%r;Cn%&xy2m0OxR_BSCt+tzjsUF!Hy3+NVH&ir&x#y{4xceG zs_S=x3y6@t;F2y%h^Eiz^FW5`R(M$=6#7ecrh)`jHM;--3cAbLk9{q#;O7;knrH}; zL_0fSegVak^B$ykp2~&@%I)U3(}2d-_%$bfTG*#-#lg@!Nynbg1ugZY*6j4SRp z|Nr*R=QoZcyW^A{1HNTXLH0J_AtV!WNYn9Rm%(D~n3Ao;wg^hu4ZsM4CZ|cxYKBA1 z3`J7A|3EG|=TFF=5+q2D`7Z+GkbCm^zE{;5f5l(@UL+JMEGp6crASFc{Z_kBMq zEwIdZq7$XALOAMw8#QBom<N6V-J`~b|B4HUq1Qf0iU)`{d^jI)_?!|ssH`Wataxzn!6n)w!Cw2u>E|Uv*RS= z{N$#CWlLxJ?P<<2Uo*}ls$TeQKR=(HF53;JH|;FXSC{*hr$c!MX6@TO5;21j75eP7 zX!}I@F=~w&?&G>fLQx7J4g0lM;8iE~9+TvZEIPxUJb5TM}F|rf&TcBlj)M8OnVP&BcIxOD!*ZL@XP*_$NZr7VMTF~ z={{mep&VK`I@urrwNBnbFGa(~`7R^G!=!Vxvs?sV2|6rKJG`+!qK+wpL%CFf=e4@c zlp2ymv|9uK2O`TBID%b5jC?4TZKa!3${)}krtq&OUqjeC%8WnM&e~%LOdG=5&xH`I zv>rqAb$CR0Y(p%29C^_u3hvtY+o~6B)r+?3MO*da_g(ekeLJnMJr?R>`VY(auk)h} zU8da4ut9MBnro1vy!hZbJZFS2UA{tgRcQ>UQPLX<^|2$zs5gO)@m0>HzHHW0DJd+yUPs7pFr=*o;lcQ4$sj+^_W46T- zwi+>XWY0|V6D@r=aCG2%my{Tx4~oU}CL(mR_UvG z;_P_k;FWSts_-8?jf2NcsG~Uva?2%U2+Y%jsgot8^#Mum4Z8`)ad zA>K$9W+afyNHoe`5TQjxp&$1>eSy$%S`!!nH4Syaq%J(78nJc(6MyG}p_LA<&Y4s)#i1Ukr0Q_#&pX+rUK(mtA2ExgH{6t&l?@ z5Qq7U{1Dq7M-tU@BJ8nHr3woz5Z|{IX7vu|R!42uc_I{1?}#3yR~PI7vL|HkCj@CD0xfon)PX!`HGilnV9< ztIFxFpS8Gb-{gQ$tq6T4g$4Bnd{4~^J;ykc{9B;3HRHxnuA>r7e#F%$5Q|a>gy0d( zc($SQHxi=RQ+s-9Rp9+%(Y&Rx0)-0c8Cj+&+VZsylc$sy3+G3|Bd>W@$kZxBBo0WX zGkWexvUaXE%9PLKnVO`CHOBKHivjzc*Ufh-4|?I9zcQCWoECer=UL^+Ll^;1_*=>EZLfpQjsdl%|!~wz<4R z38Kp8mN0)cz&jbyFVaNjnv?e7mCj-_U)m+<3d8*DB-F4xGes=OxV&iAqcZ+szmgqY z>vSI(i_}Cfvb_@_jr=v=5akce&HME$Q#DdP$!!U-ohK5^a^|x(-U@ z2cg`qo1AdCT~9ZmY9!n=%FYJI<6*6WO6$ROW6HLld-dSjjECLmiZX2*pHZ!iC;sKn zeg>CTFSsx(C6(G?oOec@EBkjM6WOGcBfOP5Ye}F5w-I6F8%bkfIcLM*Bp<8bhD;SC z;r8YyRFg{>2z3@zPd;jdH~Y+)(y& z;RvGutqyP5J7Sxx$ewzTk|c_WlR~uIATO=H^8?{*e%&|D9pU9kWVds+am-z7Vb1pQobdXIvC4A=}m{*cm&!()Pk(x}#XwmM{wf=2`2q~}0*s?2_CoJkO_BMBax%urkcGN4E3vVk& zvQlD<1Pnc;SWXRFI@EyccrOenvQnGTa|bw3L~zVaYK!s<`(nF3@Q zRH(byi3?V24gh$dc*6qA&BJZ>@u5R^yoSD*00jm_$J^foa~SdLqHF43XX-w z3#!`KsETW}lp1*|%**e*ti7@R`E0FRj$uFgImhEZG2|4&Th$l%*e$knBXTOk5OWUW zxyg!@*+9Q`8g-oI!>jvb|GY{@`AIESv~)4gGr@?m`Ppy-?HoVq@PTLD&ieMp+>ywHU;wLks^%mnm1}cznB(S+GuRLtg*K# zG};scP8Hl#qGAGpEJUfklPI8HFf00H>8n{2v{*CReai?&|e|YO@+X}tJ)YlZsogH z0oLR;zt#OfU@Lq@CJD!*qr$~_aH;OAr=XfvmE@}PXIOfLfM|@K_|}LU=;GGaqM6+V z&wDJXeT+IP6|WiwgUW%opw?d_7-{5pC!c#>{l$U5oVrlV)(F56R!m9KG$K6k1E(0( zhT^d2ba9MzgmYnIYsx-dJ>g{5(6jfPl_OS;x8P{x~;oXm1CY8?v*kf zYc?2vC2Ribx@UTpa2Ydo=ZNZWQlf^&t&0S2^1?AA#8Jw!AYtU$WC*TQSuxOh-bV~+ zDxFt%S(=S6bz5I{^P089pbtF#XX)xsZkq>G;CT{(o(W|BToum{AK}ua+=B8K3jg1{ z_}ycDDb_F)7lS!sW+JQ36&h0_P^e^%lKg{GoX}fed!O{q)eX*%a~|7h&VFawnPi<+ z_K>jfN)Tq}iGQ7$I^R8c_HajygtS^QpCYcrQ2@jQxt9ZURGG#M*s)2)XmFxaGPFWe zFs5ksprC)c&G!h9f>G2cw=PvO5H4A1!FjFBCTVw$2_w5+n*6N!w_M8>l z)&36SmWe=SnsR39I5Y@|Is*t6NmuF6H65Hv zUc*k~$NGbv0A04aTax{hVmi03v>(6^1y71sK)}>y1PMk!{)Mt@cO-Kj*-;$@c1(8C zg-3qv5kY5hW252cm3oq<_XRIUTpyOd)i?7ZsS!AlIY-Ndv{ zeDx%=05j6ta6q`~!AiziOS6l9zOvp~$zHJm&rVmK3rAf!Seaz~!NpX+@aS2#@;s3f z_96)mKm=pro4$kUp|H14ej<&=G^4M0`3=(}f~6~j^DM*z3%w>lOf@p6n@M4upe-Ha zR3f=pdGTnDMKA?XhaOudU{a%I>JQ;dUkG$`m z{ha8Ct{|6cZ>t&IRWo{2au{aMF}!G4R^41sJ7=0?9-^^O&IEoI*iRNzu$16CBDEY@D^c$q3ArB$Shx>BgE?1&@HC2go+vdTIupdXQisrZz?kqwQ{={k zur4B5Vg|YiES1er~&$3X=+~7@}AorQv3{+d@h&yKs8y=?&Jsqg*>=^esJ|% zK38!EvnQa?ZYyxe_(+w+<^^-g!5!kWk;@s+x}}a@vtjal(0q#@3b8#gIrOnJ;(ozo z5Zh#Y!y4N|G+U_%|B3?qXi~kGQeK!ZBxvXCkfJE{lZJ8XMeT4oMiTBU zb9!xei-s8%8H3+BSpMRyKY{I@sq#2KJQ)%nJL&9S+|{Ug&uzpR*!!^6rrtvx`~Y9Zyc^?$yUR9b0CKBs>YXQ zC=1r3F;R7C^{+N`cX4yK_~bc(i{aGv86yuftCU-1zHNU6lf{XQlOr&0%o%qGortrpx8=^e!gzxizo__aa_W^P%%B8J;@)q&FAN{NI<<{Id=KFcyzu(_} u_|xy@e&~Gl!P07AjIaMQ1a2*0l*oVo^Y1seTX6di_VWGl)z{xkzWRSU+`Yp9 literal 0 HcmV?d00001 From ce89646b19616bfcee35cc49f558e38db1062c26 Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 20 Sep 2022 16:14:17 -0700 Subject: [PATCH 20/38] Fix test read --- tests/back_compat/test_read.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/back_compat/test_read.py b/tests/back_compat/test_read.py index 568a53c81..65816488a 100644 --- a/tests/back_compat/test_read.py +++ b/tests/back_compat/test_read.py @@ -2,7 +2,7 @@ from pathlib import Path import warnings -from pynwb import NWBHDF5IO, validate, TimeSeries +from pynwb import NWBHDF5IO, load_namespaces, validate, TimeSeries from pynwb.image import ImageSeries from pynwb.testing import TestCase @@ -42,7 +42,7 @@ def test_read(self): for f in nwb_files: with self.subTest(file=f.name): with warnings.catch_warnings(record=True) as warnings_on_read: - with NWBHDF5IO(str(f), 'r') as io: + with NWBHDF5IO(str(f), 'r', load_namespaces=True) as io: errors = validate(io) io.read() for w in warnings_on_read: From 71d60e4c3299dd7986e9919ed1daf8c5888959ec Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 20 Sep 2022 16:16:08 -0700 Subject: [PATCH 21/38] Fix conda CircleCI issue --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ec86e28ee..99775dbda 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -97,7 +97,7 @@ references: name: Configure conda command: | pip install --upgrade pip - conda update -n base -c defaults conda + conda install -n base -c defaults conda conda config --set always_yes yes --set changeps1 no conda config --add channels conda-forge conda install python=$CONDA_PYTHON_VER From d19d951f1220c48c16c40a742dd98f5bf5dad495 Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 20 Sep 2022 16:18:40 -0700 Subject: [PATCH 22/38] Fix flake8 --- tests/back_compat/test_read.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/back_compat/test_read.py b/tests/back_compat/test_read.py index 65816488a..0d3b0d7c9 100644 --- a/tests/back_compat/test_read.py +++ b/tests/back_compat/test_read.py @@ -2,7 +2,7 @@ from pathlib import Path import warnings -from pynwb import NWBHDF5IO, load_namespaces, validate, TimeSeries +from pynwb import NWBHDF5IO, validate, TimeSeries from pynwb.image import ImageSeries from pynwb.testing import TestCase From c18279418ea2a3f437af2f2db4f1a5c1f8e7cc99 Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 20 Sep 2022 16:19:45 -0700 Subject: [PATCH 23/38] Fix conda --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 99775dbda..aa8a733a5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -97,7 +97,7 @@ references: name: Configure conda command: | pip install --upgrade pip - conda install -n base -c defaults conda + conda install -n base -c defaults conda=4.13.0 conda config --set always_yes yes --set changeps1 no conda config --add channels conda-forge conda install python=$CONDA_PYTHON_VER From 626b5d30c5e29ef05e81936379aaf103cc0ed400 Mon Sep 17 00:00:00 2001 From: CodyCBakerPhD Date: Wed, 21 Sep 2022 14:29:31 +0000 Subject: [PATCH 24/38] revert to previous init structure for core --- src/pynwb/validate.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index 88771fe1f..b48836041 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -131,7 +131,7 @@ def validate(**kwargs): status = 0 validation_errors = list() for path in paths: - namespaces_to_validate = [CORE_NAMESPACE] + namespaces_to_validate = [] namespace_message = "PyNWB namespace information" io_kwargs = dict(path=path, mode="r") @@ -143,6 +143,7 @@ def validate(**kwargs): namespaces_to_validate = cached_namespaces namespace_message = "cached namespace information" else: + namespaces_to_validate = [CORE_NAMESPACE] if verbose: print( f"The file {path} has no cached namespace information. Falling back to {namespace_message}.", @@ -154,7 +155,7 @@ def validate(**kwargs): namespaces_to_validate = [namespace] elif use_cached_namespaces and namespace in namespace_dependencies: # validating against a dependency for namespace_dependency in namespace_dependencies: - if namespace != "core" and namespace in namespace_dependencies[namespace_dependency]: + if namespace in namespace_dependencies[namespace_dependency]: status = 1 if verbose: print( From 90783d7a093deca6b9b1b183e44268593531bb9c Mon Sep 17 00:00:00 2001 From: Cody Baker Date: Wed, 21 Sep 2022 10:41:34 -0400 Subject: [PATCH 25/38] remaining debug --- src/pynwb/validate.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index b48836041..8eb227cc6 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -91,7 +91,7 @@ def _get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManag "name": "namespace", "type": str, "doc": "A specific namespace to validate against.", - "default": CORE_NAMESPACE, + "default": None, }, # Argument order is for back-compatability { "name": "paths", @@ -125,7 +125,7 @@ def validate(**kwargs): assert io != paths, "Both 'io' and 'paths' were specified! Please choose only one." if io is not None: - validation_errors = _validate_helper(io=io, namespace=namespace) + validation_errors = _validate_helper(io=io, namespace=namespace or CORE_NAMESPACE) return validation_errors status = 0 @@ -149,8 +149,10 @@ def validate(**kwargs): f"The file {path} has no cached namespace information. Falling back to {namespace_message}.", file=sys.stderr, ) + else: + namespaces_to_validate = [CORE_NAMESPACE] - if namespace: + if namespace is not None: if namespace in namespaces_to_validate: namespaces_to_validate = [namespace] elif use_cached_namespaces and namespace in namespace_dependencies: # validating against a dependency @@ -210,7 +212,6 @@ def validate_cli(): help="Use the cached namespace (default).", ) parser.set_defaults(no_cached_namespace=False) - parser.set_defaults(ns=CORE_NAMESPACE) args = parser.parse_args() status = 0 From ab9b789f1e356e2394cb38015d2057e9213daee0 Mon Sep 17 00:00:00 2001 From: CodyCBakerPhD Date: Wed, 21 Sep 2022 15:29:54 +0000 Subject: [PATCH 26/38] correct tests --- src/pynwb/validate.py | 2 +- tests/validation/test_validate.py | 46 +++++++++++++++++++++++-------- 2 files changed, 36 insertions(+), 12 deletions(-) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index 8eb227cc6..80505a292 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -112,7 +112,7 @@ def _get_cached_namespaces_to_validate(path: str) -> Tuple[List[str], BuildManag "default": False, }, returns="Validation errors in the file.", - rtype=(list, bool), + rtype=(list, (list, bool)), is_method=False, ) def validate(**kwargs): diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index 38803b804..376bc5719 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -1,5 +1,7 @@ import subprocess import re +from unittest.mock import patch +from io import StringIO from pynwb.testing import TestCase from pynwb import validate, NWBHDF5IO @@ -108,18 +110,17 @@ def test_validate_file_cached_extension_pass_ns(self): r"using namespace 'ndx-testextension'\.\s* - no errors found\.\s*") self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) - def test_validate_file_cached_core(self): # TODO determine correct behavior + def test_validate_file_cached_core(self): """Test that validating a file with cached spec against the core namespace succeeds.""" result = subprocess.run(["coverage", "run", "-p", "-m", "pynwb.validate", "tests/back_compat/2.1.0_nwbfile_with_extension.nwb", "--ns", "core"], capture_output=True) - self.assertEqual(result.stderr.decode('utf-8'), '') - stdout_regex = re.compile( - r"Validating tests/back_compat/2\.1\.0_nwbfile_with_extension\.nwb against cached namespace information " - r"using namespace 'core'\.\s* - no errors found\.\s*") - self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + r"The namespace 'core' is included by the namespace 'ndx-testextension'. " + r"Please validate against that namespace instead\.\s*" + ) + self.assertRegex(result.stderr.decode('utf-8'), stdout_regex) def test_validate_file_cached_hdmf_common(self): """Test that validating a file with cached spec against the hdmf-common namespace fails.""" @@ -181,11 +182,34 @@ def test_validate_file_cached_extension_pass_ns(self): errors = validate(io, 'ndx-testextension') self.assertEqual(errors, []) - def test_validate_file_cached_core(self): # TODO determine correct behavior - """Test that validating a file with cached extension spec against the core namespace succeeds.""" - with NWBHDF5IO('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', 'r', load_namespaces=True) as io: - errors = validate(io, 'core') - self.assertEqual(errors, []) + def test_validate_file_cached_core_with_io(self): + """ + For back-compatability, test that validating a file with cached extension spec against the core + namespace succeeds when using the `io` + `namespace` keywords. + """ + with NWBHDF5IO( + path='tests/back_compat/2.1.0_nwbfile_with_extension.nwb', mode='r', load_namespaces=True + ) as io: + results = validate(io=io, namespace="core") + self.assertEqual(results, []) + + def test_validate_file_cached_core_with_paths(self): + """ + Test that validating a file with cached extension spec against the core + namespace raises an error with the new CLI-mimicing paths keyword. + """ + nwbfile_path = "tests/back_compat/2.1.0_nwbfile_with_extension.nwb" + with patch("sys.stderr", new=StringIO()) as fake_out: + results, status = validate(paths=[nwbfile_path], namespace="core", verbose=True) + self.assertEqual(results, []) + self.assertEqual(status, 1) + self.assertEqual( + first=fake_out.getvalue(), + second=( + "The namespace 'core' is included by the namespace 'ndx-testextension'. " + "Please validate against that namespace instead.\n" + ) + ) def test_validate_file_cached_bad_ns(self): """Test that validating a file with cached spec against a specified, unknown namespace fails.""" From b5529abb77e5b4035f4a09abbc53ec99a19c2d3c Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Tue, 27 Sep 2022 00:12:22 -0700 Subject: [PATCH 27/38] debugging miniconda37 error --- .circleci/config.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index aa8a733a5..040426f63 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -96,11 +96,11 @@ references: - run: name: Configure conda command: | - pip install --upgrade pip conda install -n base -c defaults conda=4.13.0 conda config --set always_yes yes --set changeps1 no conda config --add channels conda-forge conda install python=$CONDA_PYTHON_VER + pip install --upgrade pip # work around incompatibilities between virtualenv & importlib-metadata if [[ "${TEST_TOX_ENV}" == *"py37"* ]]; then conda install "importlib-metadata=1.7" @@ -108,6 +108,8 @@ references: conda install importlib-metadata fi conda install tox + conda list + pip list - run: name: Run the tests command: | From a8e457592c1e0db7fbf78be2730f784ce57fa7a4 Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Tue, 27 Sep 2022 15:30:49 -0700 Subject: [PATCH 28/38] Restore config.yml --- .circleci/config.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3fdfc4ea9..ca945579c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -99,7 +99,10 @@ references: conda install -n base -c defaults conda=4.13.0 conda config --set always_yes yes --set changeps1 no conda config --add channels conda-forge - conda install python=$CONDA_PYTHON_VER + conda create --name test python=$CONDA_PYTHON_VER --yes + conda init bash + source ~/.bashrc + conda activate test pip install --upgrade pip # work around incompatibilities between virtualenv & importlib-metadata if [[ "${TEST_TOX_ENV}" == *"py37"* ]]; then From 4e90651658ac43156af24ed6fd24e8fdf1d25ab5 Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Fri, 14 Oct 2022 09:40:49 -0700 Subject: [PATCH 29/38] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 78cfc686c..c46e2a179 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # PyNWB Changelog -## PyNWB 2.2.0 (October 11, 2022) +## PyNWB 2.2.0 (October 14, 2022) ### Enhancements and minor changes - Enhanced `pynwb.validate` API function to accept a list of file paths as well as the ability to operate on cached From 765312ab9e431c6973e3d68fe622a4659082aa2d Mon Sep 17 00:00:00 2001 From: rly Date: Mon, 17 Oct 2022 23:34:27 -0700 Subject: [PATCH 30/38] Cleanup, prep for invalid file test --- .github/workflows/generate_test_files.yml | 1 + CHANGELOG.md | 2 +- src/pynwb/testing/make_test_files.py | 20 ++++++++++++++++++++ src/pynwb/validate.py | 4 ---- test.py | 4 ++-- tests/validation/test_validate.py | 2 +- 6 files changed, 25 insertions(+), 8 deletions(-) diff --git a/.github/workflows/generate_test_files.yml b/.github/workflows/generate_test_files.yml index 5c3648c0f..f86e2ef30 100644 --- a/.github/workflows/generate_test_files.yml +++ b/.github/workflows/generate_test_files.yml @@ -1,6 +1,7 @@ name: Generate test files on: workflow_dispatch: + pull_request: # TODO comment jobs: gen-test-files: diff --git a/CHANGELOG.md b/CHANGELOG.md index c46e2a179..b579f7076 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # PyNWB Changelog -## PyNWB 2.2.0 (October 14, 2022) +## PyNWB 2.2.0 (October 19, 2022) ### Enhancements and minor changes - Enhanced `pynwb.validate` API function to accept a list of file paths as well as the ability to operate on cached diff --git a/src/pynwb/testing/make_test_files.py b/src/pynwb/testing/make_test_files.py index 301381688..0d3094712 100644 --- a/src/pynwb/testing/make_test_files.py +++ b/src/pynwb/testing/make_test_files.py @@ -1,4 +1,5 @@ from datetime import datetime +import h5py import numpy as np from pathlib import Path from pynwb import NWBFile, NWBHDF5IO, __version__, TimeSeries, get_class, load_namespaces @@ -46,6 +47,24 @@ def _make_str_pub(): _write(test_name, nwbfile) +def _make_timeseries_invalid(): + nwbfile = NWBFile(session_description='ADDME', + identifier='ADDME', + session_start_time=datetime.now().astimezone()) + ts = TimeSeries( + name='test_timeseries', + rate=1., + unit='unit', + ) + nwbfile.add_acquisition(ts) + + test_name = 'timeseries_no_data' + filename = _write(test_name, nwbfile) + + with h5py.File(filename, "a") as f: + del f["timeseries_no_data/starting_time"] + + def _make_timeseries_no_data(): nwbfile = NWBFile(session_description='ADDME', identifier='ADDME', @@ -211,6 +230,7 @@ def _make_empty_with_extension(): _make_str_pub() if __version__ == '1.5.1': + _make_timeseries_invalid() # NOTE: this is not specific to 1.5.1 _make_timeseries_no_data() _make_timeseries_no_unit() _make_imageseries_no_data() diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index 80505a292..f215d524e 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -14,10 +14,6 @@ from pynwb.spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace -def _check_errors(validation_errors: list) -> bool: - return validation_errors is not None and len(validation_errors) > 0 - - def _print_errors(validation_errors: list): if validation_errors: print(" - found the following errors:", file=sys.stderr) diff --git a/test.py b/test.py index f218f49be..401a75e5c 100755 --- a/test.py +++ b/test.py @@ -228,7 +228,7 @@ def run_integration_tests(verbose=True): logging.info('all classes have integration tests') # also test the validation script - run_test_suite("tests/validation", "validation CLI tests", verbose=verbose) + run_test_suite("tests/validation", "validation tests", verbose=verbose) def clean_up_tests(): @@ -293,7 +293,7 @@ def main(): parser.add_argument('-b', '--backwards', action='append_const', const=flags['backwards'], dest='suites', help='run backwards compatibility tests') parser.add_argument('-w', '--validation', action='append_const', const=flags['validation'], dest='suites', - help='run validation tests') + help='run example tests and validation tests on example NWB files') parser.add_argument('-r', '--ros3', action='append_const', const=flags['ros3'], dest='suites', help='run ros3 streaming tests') args = parser.parse_args() diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index 376bc5719..c2d4de319 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -7,7 +7,7 @@ from pynwb import validate, NWBHDF5IO -class TestValidateScript(TestCase): +class TestValidateCLI(TestCase): # 1.0.2_nwbfile.nwb has no cached specifications # 1.0.3_nwbfile.nwb has cached "core" specification From 2334dfd556cab1d143e49637e3255172a71f71be Mon Sep 17 00:00:00 2001 From: rly Date: Mon, 17 Oct 2022 23:37:40 -0700 Subject: [PATCH 31/38] Try again to make test file --- src/pynwb/testing/make_test_files.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/pynwb/testing/make_test_files.py b/src/pynwb/testing/make_test_files.py index 0d3094712..ec1d51cfd 100644 --- a/src/pynwb/testing/make_test_files.py +++ b/src/pynwb/testing/make_test_files.py @@ -53,16 +53,17 @@ def _make_timeseries_invalid(): session_start_time=datetime.now().astimezone()) ts = TimeSeries( name='test_timeseries', + data=[0], rate=1., unit='unit', ) nwbfile.add_acquisition(ts) - test_name = 'timeseries_no_data' + test_name = 'timeseries' filename = _write(test_name, nwbfile) with h5py.File(filename, "a") as f: - del f["timeseries_no_data/starting_time"] + del f["timeseries"]["starting_time"] def _make_timeseries_no_data(): From 6164a78ccae4048694f32bf4c51c8e65dbbde4f6 Mon Sep 17 00:00:00 2001 From: rly Date: Mon, 17 Oct 2022 23:38:59 -0700 Subject: [PATCH 32/38] Try again --- src/pynwb/testing/make_test_files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pynwb/testing/make_test_files.py b/src/pynwb/testing/make_test_files.py index ec1d51cfd..4a0e4d611 100644 --- a/src/pynwb/testing/make_test_files.py +++ b/src/pynwb/testing/make_test_files.py @@ -63,7 +63,7 @@ def _make_timeseries_invalid(): filename = _write(test_name, nwbfile) with h5py.File(filename, "a") as f: - del f["timeseries"]["starting_time"] + del f["acquisition/timeseries"]["starting_time"] def _make_timeseries_no_data(): From a8849050ad646c2e426cae1c9cfe0f6862c6df8b Mon Sep 17 00:00:00 2001 From: rly Date: Mon, 17 Oct 2022 23:46:24 -0700 Subject: [PATCH 33/38] Fix --- src/pynwb/testing/make_test_files.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pynwb/testing/make_test_files.py b/src/pynwb/testing/make_test_files.py index 4a0e4d611..d73553341 100644 --- a/src/pynwb/testing/make_test_files.py +++ b/src/pynwb/testing/make_test_files.py @@ -59,11 +59,11 @@ def _make_timeseries_invalid(): ) nwbfile.add_acquisition(ts) - test_name = 'timeseries' + test_name = 'timeseries_invalid' filename = _write(test_name, nwbfile) with h5py.File(filename, "a") as f: - del f["acquisition/timeseries"]["starting_time"] + del f["acquisition/test_timeseries/starting_time"] def _make_timeseries_no_data(): From 6ec27bee81e7fd32b9c7d0c7efe0055ba66e23d3 Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 18 Oct 2022 00:19:00 -0700 Subject: [PATCH 34/38] Fix, cleanup --- .github/workflows/generate_test_files.yml | 2 +- src/pynwb/testing/make_test_files.py | 20 -------------------- src/pynwb/validate.py | 2 +- tests/validation/test_validate.py | 20 ++++++++++++++++++++ 4 files changed, 22 insertions(+), 22 deletions(-) diff --git a/.github/workflows/generate_test_files.yml b/.github/workflows/generate_test_files.yml index f86e2ef30..49bc31b14 100644 --- a/.github/workflows/generate_test_files.yml +++ b/.github/workflows/generate_test_files.yml @@ -1,7 +1,7 @@ name: Generate test files on: workflow_dispatch: - pull_request: # TODO comment + # pull_request: jobs: gen-test-files: diff --git a/src/pynwb/testing/make_test_files.py b/src/pynwb/testing/make_test_files.py index d73553341..5f78ac54f 100644 --- a/src/pynwb/testing/make_test_files.py +++ b/src/pynwb/testing/make_test_files.py @@ -47,25 +47,6 @@ def _make_str_pub(): _write(test_name, nwbfile) -def _make_timeseries_invalid(): - nwbfile = NWBFile(session_description='ADDME', - identifier='ADDME', - session_start_time=datetime.now().astimezone()) - ts = TimeSeries( - name='test_timeseries', - data=[0], - rate=1., - unit='unit', - ) - nwbfile.add_acquisition(ts) - - test_name = 'timeseries_invalid' - filename = _write(test_name, nwbfile) - - with h5py.File(filename, "a") as f: - del f["acquisition/test_timeseries/starting_time"] - - def _make_timeseries_no_data(): nwbfile = NWBFile(session_description='ADDME', identifier='ADDME', @@ -231,7 +212,6 @@ def _make_empty_with_extension(): _make_str_pub() if __version__ == '1.5.1': - _make_timeseries_invalid() # NOTE: this is not specific to 1.5.1 _make_timeseries_no_data() _make_timeseries_no_unit() _make_imageseries_no_data() diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index f215d524e..e77e30c32 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -205,7 +205,7 @@ def validate_cli(): "--no-cached-namespace", dest="no_cached_namespace", action="store_true", - help="Use the cached namespace (default).", + help="Use the PyNWB loaded namespace (true) or use the cached namespace (false; default).", ) parser.set_defaults(no_cached_namespace=False) args = parser.parse_args() diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index c2d4de319..88b8e4697 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -145,6 +145,26 @@ def test_validate_file_cached_ignore(self): r"'core'\.\s* - no errors found\.\s*") self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + def test_validate_file_invalid(self): + """Test that validating a file with cached spec against the core namespace succeeds.""" + result = subprocess.run( + ["coverage", "run", "-p", "-m", "pynwb.validate", "tests/back_compat/1.0.2_str_experimenter.nwb", + "--no-cached-namespace"], + capture_output=True + ) + + stderr_regex = re.compile( + r" - found the following errors:\s*" + r"root/general/experimenter \(general/experimenter\): incorrect shape - expected an array of shape " + r"'\[None\]', got non-array data 'one experimenter'\s*" + ) + self.assertRegex(result.stderr.decode('utf-8'), stderr_regex) + + stdout_regex = re.compile( + r"Validating tests/back_compat/1\.0\.2_str_experimenter\.nwb against PyNWB namespace information using " + r"namespace 'core'\.\s*") + self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + class TestValidateFunction(TestCase): From 5108f2cfd320228f0eaf0f9cbfde6db36a310b6d Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 18 Oct 2022 11:06:29 -0700 Subject: [PATCH 35/38] Fix error with --list-namespaces --- src/pynwb/testing/make_test_files.py | 1 - src/pynwb/validate.py | 2 +- tests/validation/test_validate.py | 25 +++++++++++++++++++++---- 3 files changed, 22 insertions(+), 6 deletions(-) diff --git a/src/pynwb/testing/make_test_files.py b/src/pynwb/testing/make_test_files.py index 5f78ac54f..301381688 100644 --- a/src/pynwb/testing/make_test_files.py +++ b/src/pynwb/testing/make_test_files.py @@ -1,5 +1,4 @@ from datetime import datetime -import h5py import numpy as np from pathlib import Path from pynwb import NWBFile, NWBHDF5IO, __version__, TimeSeries, get_class, load_namespaces diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index e77e30c32..66b4bac42 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -213,7 +213,7 @@ def validate_cli(): if args.list_namespaces: for path in args.paths: - cached_namespaces, _, _, specloc = _get_cached_namespaces_to_validate(path=path) + cached_namespaces, _, _ = _get_cached_namespaces_to_validate(path=path) print("\n".join(cached_namespaces)) else: validation_errors, validation_status = validate( diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index 88b8e4697..d961ade03 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -146,11 +146,13 @@ def test_validate_file_cached_ignore(self): self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) def test_validate_file_invalid(self): - """Test that validating a file with cached spec against the core namespace succeeds.""" + """Test that validating an invalid file outputs errors.""" result = subprocess.run( - ["coverage", "run", "-p", "-m", "pynwb.validate", "tests/back_compat/1.0.2_str_experimenter.nwb", - "--no-cached-namespace"], - capture_output=True + [ + "coverage", "run", "-p", "-m", "pynwb.validate", "tests/back_compat/1.0.2_str_experimenter.nwb", + "--no-cached-namespace" + ], + capture_output=True ) stderr_regex = re.compile( @@ -165,6 +167,21 @@ def test_validate_file_invalid(self): r"namespace 'core'\.\s*") self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + def test_validate_file_list_namespaces(self): + """Test listing namespaces from a file""" + result = subprocess.run( + [ + "coverage", "run", "-p", "-m", "pynwb.validate", "tests/back_compat/2.1.0_nwbfile_with_extension.nwb", + "--list-namespaces" + ], + capture_output=True + ) + + self.assertEqual(result.stderr.decode('utf-8'), '') + + stdout_regex = re.compile(r"ndx-testextension\s*") + self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + class TestValidateFunction(TestCase): From 78b063860d54b5d046319e44c9b8fd34b10fdcc1 Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 18 Oct 2022 11:41:33 -0700 Subject: [PATCH 36/38] Add test, retcode=1 should print error msg --- src/pynwb/validate.py | 22 ++++---- tests/validation/test_validate.py | 89 ++++++++++++++++++++++++------- 2 files changed, 79 insertions(+), 32 deletions(-) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index 66b4bac42..a5a313481 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -155,20 +155,18 @@ def validate(**kwargs): for namespace_dependency in namespace_dependencies: if namespace in namespace_dependencies[namespace_dependency]: status = 1 - if verbose: - print( - f"The namespace '{namespace}' is included by the namespace " - f"'{namespace_dependency}'. Please validate against that namespace instead.", - file=sys.stderr, - ) + print( + f"The namespace '{namespace}' is included by the namespace " + f"'{namespace_dependency}'. Please validate against that namespace instead.", + file=sys.stderr, + ) else: status = 1 - if verbose: - print( - f"The namespace '{namespace}' could not be found in {namespace_message} as only " - f"{namespaces_to_validate} is present.", - file=sys.stderr, - ) + print( + f"The namespace '{namespace}' could not be found in {namespace_message} as only " + f"{namespaces_to_validate} is present.", + file=sys.stderr, + ) if status == 1: continue diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index d961ade03..1c7278539 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -167,7 +167,22 @@ def test_validate_file_invalid(self): r"namespace 'core'\.\s*") self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) - def test_validate_file_list_namespaces(self): + def test_validate_file_list_namespaces_core(self): + """Test listing namespaces from a file""" + result = subprocess.run( + [ + "coverage", "run", "-p", "-m", "pynwb.validate", "tests/back_compat/1.1.2_nwbfile.nwb", + "--list-namespaces" + ], + capture_output=True + ) + + self.assertEqual(result.stderr.decode('utf-8'), '') + + stdout_regex = re.compile(r"core\s*") + self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + + def test_validate_file_list_namespaces_extension(self): """Test listing namespaces from a file""" result = subprocess.run( [ @@ -189,37 +204,37 @@ class TestValidateFunction(TestCase): # 1.0.3_nwbfile.nwb has cached "core" specification # 1.1.2_nwbfile.nwb has cached "core" and "hdmf-common" specificaitions - def test_validate_file_no_cache(self): + def test_validate_io_no_cache(self): """Test that validating a file with no cached spec against the core namespace succeeds.""" with NWBHDF5IO('tests/back_compat/1.0.2_nwbfile.nwb', 'r') as io: errors = validate(io) self.assertEqual(errors, []) - def test_validate_file_no_cache_bad_ns(self): + def test_validate_io_no_cache_bad_ns(self): """Test that validating a file with no cached spec against a specified, unknown namespace fails.""" with NWBHDF5IO('tests/back_compat/1.0.2_nwbfile.nwb', 'r') as io: with self.assertRaisesWith(KeyError, "\"'notfound' not a namespace\""): validate(io, 'notfound') - def test_validate_file_cached(self): + def test_validate_io_cached(self): """Test that validating a file with cached spec against its cached namespace succeeds.""" with NWBHDF5IO('tests/back_compat/1.1.2_nwbfile.nwb', 'r') as io: errors = validate(io) self.assertEqual(errors, []) - def test_validate_file_cached_extension(self): + def test_validate_io_cached_extension(self): """Test that validating a file with cached spec against its cached namespaces succeeds.""" with NWBHDF5IO('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', 'r', load_namespaces=True) as io: errors = validate(io) self.assertEqual(errors, []) - def test_validate_file_cached_extension_pass_ns(self): + def test_validate_io_cached_extension_pass_ns(self): """Test that validating a file with cached extension spec against the extension namespace succeeds.""" with NWBHDF5IO('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', 'r', load_namespaces=True) as io: errors = validate(io, 'ndx-testextension') self.assertEqual(errors, []) - def test_validate_file_cached_core_with_io(self): + def test_validate_io_cached_core_with_io(self): """ For back-compatability, test that validating a file with cached extension spec against the core namespace succeeds when using the `io` + `namespace` keywords. @@ -230,31 +245,65 @@ def test_validate_file_cached_core_with_io(self): results = validate(io=io, namespace="core") self.assertEqual(results, []) - def test_validate_file_cached_core_with_paths(self): + def test_validate_file_cached_extension(self): """ Test that validating a file with cached extension spec against the core namespace raises an error with the new CLI-mimicing paths keyword. """ nwbfile_path = "tests/back_compat/2.1.0_nwbfile_with_extension.nwb" - with patch("sys.stderr", new=StringIO()) as fake_out: - results, status = validate(paths=[nwbfile_path], namespace="core", verbose=True) - self.assertEqual(results, []) - self.assertEqual(status, 1) - self.assertEqual( - first=fake_out.getvalue(), - second=( - "The namespace 'core' is included by the namespace 'ndx-testextension'. " - "Please validate against that namespace instead.\n" + with patch("sys.stderr", new=StringIO()) as fake_err: + with patch("sys.stdout", new=StringIO()) as fake_out: + results, status = validate(paths=[nwbfile_path], namespace="core", verbose=True) + self.assertEqual(results, []) + self.assertEqual(status, 1) + self.assertEqual( + fake_err.getvalue(), + ( + "The namespace 'core' is included by the namespace 'ndx-testextension'. " + "Please validate against that namespace instead.\n" + ) ) - ) + self.assertEqual(fake_out.getvalue(), "") - def test_validate_file_cached_bad_ns(self): + def test_validate_file_cached_core(self): + """ + Test that validating a file with cached core spec with verbose=False. + """ + nwbfile_path = "tests/back_compat/1.1.2_nwbfile.nwb" + with patch("sys.stderr", new=StringIO()) as fake_err: + with patch("sys.stdout", new=StringIO()) as fake_out: + results, status = validate(paths=[nwbfile_path], namespace="core") + self.assertEqual(results, []) + self.assertEqual(status, 0) + self.assertEqual(fake_err.getvalue(), "") + self.assertEqual(fake_out.getvalue(), "") + + def test_validate_file_cached_no_cache_bad_ns(self): + """ + Test that validating a file with no cached namespace, a namespace that is not found, and verbose=False. + """ + nwbfile_path = "tests/back_compat/1.0.2_nwbfile.nwb" + with patch("sys.stderr", new=StringIO()) as fake_err: + with patch("sys.stdout", new=StringIO()) as fake_out: + results, status = validate(paths=[nwbfile_path], namespace="notfound") + self.assertEqual(results, []) + self.assertEqual(status, 1) + self.assertEqual( + fake_err.getvalue(), + ( + "The namespace 'notfound' could not be found in PyNWB namespace information as only " + "['core'] is present.\n" + ) + ) + self.assertEqual(fake_out.getvalue(), "") + + def test_validate_io_cached_bad_ns(self): """Test that validating a file with cached spec against a specified, unknown namespace fails.""" with NWBHDF5IO('tests/back_compat/1.1.2_nwbfile.nwb', 'r') as io: with self.assertRaisesWith(KeyError, "\"'notfound' not a namespace\""): validate(io, 'notfound') - def test_validate_file_cached_hdmf_common(self): + def test_validate_io_cached_hdmf_common(self): """Test that validating a file with cached spec against the hdmf-common namespace fails.""" with NWBHDF5IO('tests/back_compat/1.1.2_nwbfile.nwb', 'r') as io: # TODO this error should not be different from the error when using the validate script above From effd64e21128272aa06944308c152a88c5c505fe Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 18 Oct 2022 16:53:45 -0700 Subject: [PATCH 37/38] Fix --- tests/validation/test_validate.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index 1c7278539..8aa7a4c2a 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -288,13 +288,11 @@ def test_validate_file_cached_no_cache_bad_ns(self): results, status = validate(paths=[nwbfile_path], namespace="notfound") self.assertEqual(results, []) self.assertEqual(status, 1) - self.assertEqual( - fake_err.getvalue(), - ( - "The namespace 'notfound' could not be found in PyNWB namespace information as only " - "['core'] is present.\n" - ) + stderr_regex = ( + r"The namespace 'notfound' could not be found in PyNWB namespace information as only " + r"\['core'\] is present.\n" ) + self.assertRegex(fake_err.getvalue(), stderr_regex) self.assertEqual(fake_out.getvalue(), "") def test_validate_io_cached_bad_ns(self): From 06ca152dc994580ab6dc8438363c0327aac963ff Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 18 Oct 2022 17:36:04 -0700 Subject: [PATCH 38/38] Clean up comments --- tests/back_compat/test_import_structure.py | 5 +++++ tests/validation/test_validate.py | 4 +--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/back_compat/test_import_structure.py b/tests/back_compat/test_import_structure.py index fd95c9856..dba11a48a 100644 --- a/tests/back_compat/test_import_structure.py +++ b/tests/back_compat/test_import_structure.py @@ -4,6 +4,11 @@ class TestImportStructure(TestCase): + """Test whether the classes/modules imported from pynwb in version 2.1.1 are still accessible. + + NOTE: this test was needed to ensure backward compatibility of "import pynwb" after changes to the package file + hierarchy in PyNWB 2.2.0 around validate.py (see https://github.com/NeurodataWithoutBorders/pynwb/pull/1511). + """ def test_outer_import_structure(self): current_structure = dir(pynwb) expected_structure = [ diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index 8aa7a4c2a..4cf9bea33 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -23,8 +23,6 @@ class TestValidateCLI(TestCase): # NOTE the run_coverage.yml GitHub Action runs "python -m coverage combine" to # combine the individual coverage reprots into one .coverage file. - # TODO test validation on files with cached extensions - def test_validate_file_no_cache(self): """Test that validating a file with no cached spec against the core namespace succeeds.""" result = subprocess.run(["coverage", "run", "-p", "-m", "pynwb.validate", @@ -183,7 +181,7 @@ def test_validate_file_list_namespaces_core(self): self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) def test_validate_file_list_namespaces_extension(self): - """Test listing namespaces from a file""" + """Test listing namespaces from a file with an extension""" result = subprocess.run( [ "coverage", "run", "-p", "-m", "pynwb.validate", "tests/back_compat/2.1.0_nwbfile_with_extension.nwb",