Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add pydoc with ruff check that public classes should have documentation. #1034

Merged
merged 10 commits into from
Aug 30, 2024
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
* Replaced all interface annotations with Pydantic types. [PR #1017](https://github.com/catalystneuro/neuroconv/pull/1017)
* Changed typehint collections (e.g. `List`) to standard collections (e.g. `list`). [PR #1021](https://github.com/catalystneuro/neuroconv/pull/1021)
* Testing now is only one dataset per test [PR #1026](https://github.com/catalystneuro/neuroconv/pull/1026)
* Using ruff to enforce existence of public classes' docstrings [PR #1034](https://github.com/catalystneuro/neuroconv/pull/1034)



Expand Down
11 changes: 6 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -123,20 +123,21 @@ extend-exclude = '''


[tool.ruff]
exclude = [
"*/__init__.py"
]

[tool.ruff.lint]
select = ["F401", "I"] # TODO: eventually, expand to other 'F' linting
select = ["F401", "I", "D101"] # TODO: eventually, expand to other 'F' linting
fixable = ["ALL"]

[tool.ruff.lint.per-file-ignores]
"**__init__.py" = ["F401", "I"]
"tests/**" = ["D"] # We are not enforcing docstrings in tests
"src/neuroconv/tools/testing/data_interface_mixins.py" = ["D"] # We are not enforcing docstrings in the interface mixings

[tool.ruff.lint.isort]
relative-imports-order = "closest-to-furthest"
known-first-party = ["neuroconv"]



[tool.codespell]
skip = '.git*,*.pdf,*.css'
check-hidden = true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,9 @@ def __init__(self, file_path: FilePath, noise_std: float = 3.5):


class AxonaLFPDataInterface(BaseLFPExtractorInterface):
"""
Primary data interface class for converting Axona LFP data.
"""

display_name = "Axona LFP"
associated_suffixes = (".bin", ".set")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -361,6 +361,14 @@ def get_original_timestamps(self):


class CellExplorerLFPInterface(CellExplorerRecordingInterface):
"""
Adds lfp data from binary files with the new CellExplorer format:

https://cellexplorer.org/

See the `CellExplorerRecordingInterface` class for more information.
"""

display_name = "CellExplorer LFP"
keywords = BaseRecordingExtractorInterface.keywords + (
"extracellular electrophysiology",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,11 @@ def get_metadata(self) -> dict:


class NeuralynxSortingInterface(BaseSortingExtractorInterface):
"""
Primary data interface for converting Neuralynx sorting data. Uses
:py:class:`~spikeinterface.extractors.NeuralynxSortingExtractor`.
"""

display_name = "Neuralynx Sorting"
associated_suffixes = (".nse", ".ntt", ".nse", ".nev")
info = "Interface for Neuralynx sorting data."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@


class SpikeGLXRecordingInterface(BaseRecordingExtractorInterface):
"""
Primary SpikeGLX interface for converting raw SpikeGLX data using a :py:class:`~spikeinterface.extractors.SpikeGLXRecordingExtractor`.
"""

display_name = "SpikeGLX Recording"
keywords = BaseRecordingExtractorInterface.keywords + ("Neuropixels",)
associated_suffixes = (".imec{probe_index}", ".ap", ".lf", ".meta", ".bin")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@


class BrukerTiffMultiPlaneConverter(NWBConverter):
"""
Converter class for Bruker imaging data with multiple channels and multiple planes.
"""

display_name = "Bruker TIFF Imaging (multiple channels, multiple planes)"
keywords = BrukerTiffMultiPlaneImagingInterface.keywords
associated_suffixes = BrukerTiffMultiPlaneImagingInterface.associated_suffixes
Expand Down Expand Up @@ -123,6 +127,10 @@ def run_conversion(


class BrukerTiffSinglePlaneConverter(NWBConverter):
"""
Primary data interface class for converting Bruker imaging data with multiple channels and a single plane.
"""

display_name = "Bruker TIFF Imaging (multiple channels, single plane)"
keywords = BrukerTiffMultiPlaneImagingInterface.keywords
associated_suffixes = BrukerTiffMultiPlaneImagingInterface.associated_suffixes
Expand Down
4 changes: 3 additions & 1 deletion src/neuroconv/tools/hdmf.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
from hdmf.data_utils import GenericDataChunkIterator as HDMFGenericDataChunkIterator


class GenericDataChunkIterator(HDMFGenericDataChunkIterator):
class GenericDataChunkIterator(HDMFGenericDataChunkIterator): # noqa: D101
# TODO Should this be added to the API?

def _get_default_buffer_shape(self, buffer_gb: float = 1.0) -> tuple[int]:
return self.estimate_default_buffer_shape(
buffer_gb=buffer_gb, chunk_shape=self.chunk_shape, maxshape=self.maxshape, dtype=self.dtype
Expand Down
17 changes: 16 additions & 1 deletion src/neuroconv/tools/path_expansion.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,15 @@


class AbstractPathExpander(abc.ABC):
"""
Abstract base class for expanding file paths and extracting metadata.

This class provides methods to extract metadata from file paths within a directory
and to expand paths based on a specified data specification. It is designed to be
subclassed, with the `list_directory` method needing to be implemented by any
subclass to provide the specific logic for listing files in a directory.
"""

def extract_metadata(self, base_directory: DirectoryPath, format_: str):
"""
Uses the parse library to extract metadata from file paths in the base_directory.
Expand Down Expand Up @@ -128,7 +137,13 @@ def expand_paths(self, source_data_spec: dict[str, dict]) -> list[DeepDict]:


class LocalPathExpander(AbstractPathExpander):
def list_directory(self, base_directory: DirectoryPath) -> Iterable[FilePath]:
"""
Class for expanding file paths and extracting metadata on a local filesystem.

See https://neuroconv.readthedocs.io/en/main/user_guide/expand_path.html for more information.
"""

def list_directory(self, base_directory: DirectoryPath) -> Iterable[FilePath]: # noqa: D101
base_directory = Path(base_directory)
assert base_directory.is_dir(), f"The specified 'base_directory' ({base_directory}) is not a directory!"
return (str(path.relative_to(base_directory)) for path in base_directory.rglob("*"))
Expand Down
30 changes: 30 additions & 0 deletions src/neuroconv/tools/testing/data_interface_mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -767,6 +767,10 @@ def test_interface_alignment(self, setup_interface):


class AudioInterfaceTestMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing Audio interfaces.
"""

# Currently asserted in the downstream testing suite; could be refactored in future PR
def check_read_nwb(self, nwbfile_path: str):
pass
Expand All @@ -777,6 +781,10 @@ def test_interface_alignment(self):


class DeepLabCutInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing DeepLabCut interfaces.
"""

def check_interface_get_original_timestamps(self):
pass # TODO in separate PR

Expand All @@ -797,6 +805,10 @@ def check_nwbfile_temporal_alignment(self):


class VideoInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing Video interfaces.
"""

def check_read_nwb(self, nwbfile_path: str):
with NWBHDF5IO(path=nwbfile_path, mode="r", load_namespaces=True) as io:
nwbfile = io.read()
Expand Down Expand Up @@ -867,6 +879,10 @@ def check_interface_original_timestamps_inmutability(self):


class MedPCInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing MedPC interfaces.
"""

def check_no_metadata_mutation(self, metadata: dict):
"""Ensure the metadata object was not altered by `add_to_nwbfile` method."""

Expand Down Expand Up @@ -1101,6 +1117,10 @@ def test_interface_alignment(self, medpc_name_to_info_dict: dict):


class MiniscopeImagingInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing Miniscope Imaging interfaces.
"""

def check_read_nwb(self, nwbfile_path: str):
from ndx_miniscope import Miniscope

Expand Down Expand Up @@ -1129,6 +1149,10 @@ def check_read_nwb(self, nwbfile_path: str):


class ScanImageSinglePlaneImagingInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixing for testing ScanImage Single Plane Imaging interfaces.
"""

def check_read_nwb(self, nwbfile_path: str):
with NWBHDF5IO(nwbfile_path, "r") as io:
nwbfile = io.read()
Expand Down Expand Up @@ -1160,6 +1184,10 @@ def check_read_nwb(self, nwbfile_path: str):


class ScanImageMultiPlaneImagingInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing ScanImage MultiPlane Imaging interfaces.
"""

def check_read_nwb(self, nwbfile_path: str):
with NWBHDF5IO(nwbfile_path, "r") as io:
nwbfile = io.read()
Expand Down Expand Up @@ -1190,6 +1218,8 @@ def check_read_nwb(self, nwbfile_path: str):


class TDTFiberPhotometryInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""Mixin for testing TDT Fiber Photometry interfaces."""

def check_no_metadata_mutation(self, metadata: dict):
"""Ensure the metadata object was not altered by `add_to_nwbfile` method."""

Expand Down
12 changes: 12 additions & 0 deletions src/neuroconv/tools/testing/mock_interfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@


class MockBehaviorEventInterface(BaseTemporalAlignmentInterface):
"""
A mock behavior event interface for testing purposes.
"""

@classmethod
def get_source_schema(cls) -> dict:
source_schema = get_schema_from_method_signature(method=cls.__init__, exclude=["event_times"])
Expand Down Expand Up @@ -56,6 +60,10 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict):


class MockSpikeGLXNIDQInterface(SpikeGLXNIDQInterface):
"""
A mock SpikeGLX interface for testing purposes.
"""

ExtractorName = "NumpyRecording"

@classmethod
Expand Down Expand Up @@ -150,6 +158,10 @@ def get_metadata(self) -> dict:


class MockImagingInterface(BaseImagingExtractorInterface):
"""
A mock imaging interface for testing purposes.
"""

def __init__(
self,
num_frames: int = 30,
Expand Down
16 changes: 16 additions & 0 deletions src/neuroconv/utils/json_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,17 @@


class NWBMetaDataEncoder(json.JSONEncoder):
"""
Custom JSON encoder for NWB metadata.

This encoder extends the default JSONEncoder class and provides custom serialization
for certain data types commonly used in NWB metadata.
"""

def default(self, obj):
"""
Serialize custom data types to JSON. This overwrites the default method of the JSONEncoder class.
"""
# Over-write behaviors for datetime object
if isinstance(obj, datetime):
return obj.isoformat()
Expand All @@ -34,6 +44,12 @@ def default(self, obj):


class NWBSourceDataEncoder(NWBMetaDataEncoder):
"""
Custom JSON encoder for data interface source data (i.e. kwargs).

This encoder extends the default JSONEncoder class and provides custom serialization
for certain data types commonly used in interface source data.
"""

def default(self, obj):

Expand Down
Loading