Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add pydoc with ruff check that public classes should have documentation. #1034

Merged
merged 10 commits into from
Aug 30, 2024
121 changes: 60 additions & 61 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,71 +8,71 @@ version = "0.5.1"
description = "Convert data from proprietary formats to NWB format."
readme = "README.md"
authors = [
{name = "Cody Baker"},
{name = "Szonja Weigl"},
{name = "Heberto Mayorquin"},
{name = "Luiz Tauffer"},
{name = "Ben Dichter", email = "[email protected]"}
{ name = "Cody Baker" },
{ name = "Szonja Weigl" },
{ name = "Heberto Mayorquin" },
{ name = "Luiz Tauffer" },
{ name = "Ben Dichter", email = "[email protected]" },
]
urls = { "Homepage" = "https://github.com/catalystneuro/neuroconv" }
license = {file = "license.txt"}
license = { file = "license.txt" }
keywords = ["nwb"]
classifiers = [
"Intended Audience :: Science/Research",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"License :: BSD-3-Clause ",
"Intended Audience :: Science/Research",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"License :: BSD-3-Clause ",
]
requires-python = ">=3.9"
dependencies = [
"numpy>=1.22.0, <2.0.0; python_version <= '3.11'",
"numpy>=1.26.0, <2.0.0; python_version >= '3.12'",
"jsonschema>=3.2.0",
"PyYAML>=5.4",
"scipy>=1.4.1",
"h5py>=3.9.0",
"hdmf>=3.13.0",
"hdmf_zarr>=0.7.0",
"pynwb>=2.7.0",
"pydantic>=2.0.0",
"typing_extensions>=4.1.0",
"psutil>=5.8.0",
"tqdm>=4.60.0",
"pandas",
"parse>=1.20.0",
"click",
"docstring-parser",
"packaging" # Issue 903
"numpy>=1.22.0, <2.0.0; python_version <= '3.11'",
"numpy>=1.26.0, <2.0.0; python_version >= '3.12'",
"jsonschema>=3.2.0",
"PyYAML>=5.4",
"scipy>=1.4.1",
"h5py>=3.9.0",
"hdmf>=3.13.0",
"hdmf_zarr>=0.7.0",
"pynwb>=2.7.0",
"pydantic>=2.0.0",
"typing_extensions>=4.1.0",
"psutil>=5.8.0",
"tqdm>=4.60.0",
"pandas",
"parse>=1.20.0",
"click",
"docstring-parser",
"packaging", # Issue 903
]


[project.optional-dependencies]
test = [
"pytest",
"pytest-cov",
"ndx-events>=0.2.0", # for special tests to ensure load_namespaces is set to allow NWBFile load at all times
"parameterized>=0.8.1",
"ndx-miniscope",
"spikeinterface[qualitymetrics]>=0.101.0",
"zarr<2.18.0", # Error with Blosc (read-only during decode) in numcodecs on May 7; check later if resolved
"pytest-xdist"
"pytest",
"pytest-cov",
"ndx-events>=0.2.0", # for special tests to ensure load_namespaces is set to allow NWBFile load at all times
"parameterized>=0.8.1",
"ndx-miniscope",
"spikeinterface[qualitymetrics]>=0.101.0",
"zarr<2.18.0", # Error with Blosc (read-only during decode) in numcodecs on May 7; check later if resolved
"pytest-xdist",
]

docs = [
"Jinja2<3.1",
"Sphinx==5.1.1",
"sphinx_rtd_theme==1.0.0",
"readthedocs-sphinx-search==0.1.2",
"sphinx-toggleprompt==0.2.0",
"sphinx-copybutton==0.5.0",
"roiextractors", # Needed for the API documentation
"spikeinterface>=0.101.0", # Needed for the API documentation
"pydata_sphinx_theme==0.12.0"
"Jinja2<3.1",
"Sphinx==5.1.1",
"sphinx_rtd_theme==1.0.0",
"readthedocs-sphinx-search==0.1.2",
"sphinx-toggleprompt==0.2.0",
"sphinx-copybutton==0.5.0",
"roiextractors", # Needed for the API documentation
"spikeinterface>=0.101.0", # Needed for the API documentation
"pydata_sphinx_theme==0.12.0",
]
dandi = ["dandi>=0.58.1"]
compressors = ["hdf5plugin"]
Expand All @@ -89,14 +89,10 @@ neuroconv = "neuroconv.tools.yaml_conversion_specification._yaml_conversion_spec
[tool.pytest.ini_options]
minversion = "6.0"
addopts = "-ra --doctest-glob='*.rst'"
testpaths = [
"docs/conversion_examples_gallery/",
"tests"
]
testpaths = ["docs/conversion_examples_gallery/", "tests"]
doctest_optionflags = "ELLIPSIS"



[tool.black]
line-length = 120
target-version = ['py38', 'py39', 'py310']
Expand All @@ -119,21 +115,24 @@ extend-exclude = '''
'''



[tool.ruff]
exclude = [
"*/__init__.py"
]

[tool.ruff.lint]
select = ["F401", "I"] # TODO: eventually, expand to other 'F' linting
select = ["F401", "I", "D101"] # TODO: eventually, expand to other 'F' linting
ignore = ["D100"]
pauladkisson marked this conversation as resolved.
Show resolved Hide resolved
fixable = ["ALL"]

[tool.ruff.lint.per-file-ignores]
"**__init__.py" = ["F401", "I"]
"tests/**" = ["D"] # We are not enforcing docstrings in tests
"src/neuroconv/tools/testing/data_interface_mixins.py" = ["D"] # We are not enforcing docstrings in tests

[tool.ruff.lint.isort]
relative-imports-order = "closest-to-furthest"
known-first-party = ["neuroconv"]


[tool.ruff.lint.pydocstyle]
convention = "numpy"
pauladkisson marked this conversation as resolved.
Show resolved Hide resolved

[tool.codespell]
skip = '.git*,*.pdf,*.css'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,9 @@ def __init__(self, file_path: FilePath, noise_std: float = 3.5):


class AxonaLFPDataInterface(BaseLFPExtractorInterface):
"""
Primary data interface class for converting Axona LFP data.
"""

display_name = "Axona LFP"
associated_suffixes = (".bin", ".set")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -361,6 +361,14 @@ def get_original_timestamps(self):


class CellExplorerLFPInterface(CellExplorerRecordingInterface):
"""
Adds lfp data from binary files with the new CellExplorer format:

https://cellexplorer.org/

See the `CellExplorerRecordingInterface` class for more information.
"""

display_name = "CellExplorer LFP"
keywords = BaseRecordingExtractorInterface.keywords + (
"extracellular electrophysiology",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,11 @@ def get_metadata(self) -> dict:


class NeuralynxSortingInterface(BaseSortingExtractorInterface):
"""
Primary data interface for converting Neuralynx sorting data. Uses
:py:class:`~spikeinterface.extractors.NeuralynxSortingExtractor`.
"""

display_name = "Neuralynx Sorting"
associated_suffixes = (".nse", ".ntt", ".nse", ".nev")
info = "Interface for Neuralynx sorting data."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@


class SpikeGLXRecordingInterface(BaseRecordingExtractorInterface):
"""
Primary SpikeGLX interface for converting raw SpikeGLX data using a :py:class:`~spikeinterface.extractors.SpikeGLXRecordingExtractor`.
"""

display_name = "SpikeGLX Recording"
keywords = BaseRecordingExtractorInterface.keywords + ("Neuropixels",)
associated_suffixes = (".imec{probe_index}", ".ap", ".lf", ".meta", ".bin")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@


class BrukerTiffMultiPlaneConverter(NWBConverter):
"""
Converter class for Bruker imaging data with multiple channels and multiple planes.
"""

display_name = "Bruker TIFF Imaging (multiple channels, multiple planes)"
keywords = BrukerTiffMultiPlaneImagingInterface.keywords
associated_suffixes = BrukerTiffMultiPlaneImagingInterface.associated_suffixes
Expand Down Expand Up @@ -123,6 +127,10 @@ def run_conversion(


class BrukerTiffSinglePlaneConverter(NWBConverter):
"""
Primary data interface class for converting Bruker imaging data with multiple channels and a single plane.
"""

display_name = "Bruker TIFF Imaging (multiple channels, single plane)"
keywords = BrukerTiffMultiPlaneImagingInterface.keywords
associated_suffixes = BrukerTiffMultiPlaneImagingInterface.associated_suffixes
Expand Down
4 changes: 3 additions & 1 deletion src/neuroconv/tools/hdmf.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
from hdmf.data_utils import GenericDataChunkIterator as HDMFGenericDataChunkIterator


class GenericDataChunkIterator(HDMFGenericDataChunkIterator):
class GenericDataChunkIterator(HDMFGenericDataChunkIterator): # noqa: D101
# TODO Should this be added to the API?

def _get_default_buffer_shape(self, buffer_gb: float = 1.0) -> tuple[int]:
return self.estimate_default_buffer_shape(
buffer_gb=buffer_gb, chunk_shape=self.chunk_shape, maxshape=self.maxshape, dtype=self.dtype
Expand Down
17 changes: 16 additions & 1 deletion src/neuroconv/tools/path_expansion.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,15 @@


class AbstractPathExpander(abc.ABC):
"""
Abstract base class for expanding file paths and extracting metadata.

This class provides methods to extract metadata from file paths within a directory
and to expand paths based on a specified data specification. It is designed to be
subclassed, with the `list_directory` method needing to be implemented by any
subclass to provide the specific logic for listing files in a directory.
"""

def extract_metadata(self, base_directory: DirectoryPath, format_: str):
"""
Uses the parse library to extract metadata from file paths in the base_directory.
Expand Down Expand Up @@ -128,7 +137,13 @@ def expand_paths(self, source_data_spec: dict[str, dict]) -> list[DeepDict]:


class LocalPathExpander(AbstractPathExpander):
def list_directory(self, base_directory: DirectoryPath) -> Iterable[FilePath]:
"""
Class for expanding file paths and extracting metadata on a local filesystem.

See https://neuroconv.readthedocs.io/en/main/user_guide/expand_path.html for more information.
"""

def list_directory(self, base_directory: DirectoryPath) -> Iterable[FilePath]: # noqa: D101
base_directory = Path(base_directory)
assert base_directory.is_dir(), f"The specified 'base_directory' ({base_directory}) is not a directory!"
return (str(path.relative_to(base_directory)) for path in base_directory.rglob("*"))
Expand Down
30 changes: 30 additions & 0 deletions src/neuroconv/tools/testing/data_interface_mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -767,6 +767,10 @@ def test_interface_alignment(self, setup_interface):


class AudioInterfaceTestMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing Audio interfaces.
"""

# Currently asserted in the downstream testing suite; could be refactored in future PR
def check_read_nwb(self, nwbfile_path: str):
pass
Expand All @@ -777,6 +781,10 @@ def test_interface_alignment(self):


class DeepLabCutInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing DeepLabCut interfaces.
"""

def check_interface_get_original_timestamps(self):
pass # TODO in separate PR

Expand All @@ -797,6 +805,10 @@ def check_nwbfile_temporal_alignment(self):


class VideoInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing Video interfaces.
"""

def check_read_nwb(self, nwbfile_path: str):
with NWBHDF5IO(path=nwbfile_path, mode="r", load_namespaces=True) as io:
nwbfile = io.read()
Expand Down Expand Up @@ -867,6 +879,10 @@ def check_interface_original_timestamps_inmutability(self):


class MedPCInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing MedPC interfaces.
"""

def check_no_metadata_mutation(self, metadata: dict):
"""Ensure the metadata object was not altered by `add_to_nwbfile` method."""

Expand Down Expand Up @@ -1101,6 +1117,10 @@ def test_interface_alignment(self, medpc_name_to_info_dict: dict):


class MiniscopeImagingInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing Miniscope Imaging interfaces.
"""

def check_read_nwb(self, nwbfile_path: str):
from ndx_miniscope import Miniscope

Expand Down Expand Up @@ -1129,6 +1149,10 @@ def check_read_nwb(self, nwbfile_path: str):


class ScanImageSinglePlaneImagingInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixing for testing ScanImage Single Plane Imaging interfaces.
"""

def check_read_nwb(self, nwbfile_path: str):
with NWBHDF5IO(nwbfile_path, "r") as io:
nwbfile = io.read()
Expand Down Expand Up @@ -1160,6 +1184,10 @@ def check_read_nwb(self, nwbfile_path: str):


class ScanImageMultiPlaneImagingInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""
A mixin for testing ScanImage MultiPlane Imaging interfaces.
"""

def check_read_nwb(self, nwbfile_path: str):
with NWBHDF5IO(nwbfile_path, "r") as io:
nwbfile = io.read()
Expand Down Expand Up @@ -1190,6 +1218,8 @@ def check_read_nwb(self, nwbfile_path: str):


class TDTFiberPhotometryInterfaceMixin(DataInterfaceTestMixin, TemporalAlignmentMixin):
"""Mixin for testing TDT Fiber Photometry interfaces."""

def check_no_metadata_mutation(self, metadata: dict):
"""Ensure the metadata object was not altered by `add_to_nwbfile` method."""

Expand Down
12 changes: 12 additions & 0 deletions src/neuroconv/tools/testing/mock_interfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@


class MockBehaviorEventInterface(BaseTemporalAlignmentInterface):
"""
A mock behavior event interface for testing purposes.
"""

@classmethod
def get_source_schema(cls) -> dict:
source_schema = get_schema_from_method_signature(method=cls.__init__, exclude=["event_times"])
Expand Down Expand Up @@ -56,6 +60,10 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict):


class MockSpikeGLXNIDQInterface(SpikeGLXNIDQInterface):
"""
A mock SpikeGLX interface for testing purposes.
"""

ExtractorName = "NumpyRecording"

@classmethod
Expand Down Expand Up @@ -151,6 +159,10 @@ def get_metadata(self) -> dict:


class MockImagingInterface(BaseImagingExtractorInterface):
"""
A mock imaging interface for testing purposes.
"""

def __init__(
self,
num_frames: int = 30,
Expand Down
Loading
Loading