Skip to content

Commit

Permalink
Support datetime in conversion options (#1139)
Browse files Browse the repository at this point in the history
  • Loading branch information
h-mayorquin authored Nov 14, 2024
1 parent 6960872 commit e3cde1f
Show file tree
Hide file tree
Showing 6 changed files with 92 additions and 10 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
* Completely removed compression settings from most places[PR #1126](https://github.com/catalystneuro/neuroconv/pull/1126)

## Bug Fixes
* datetime objects now can be validated as conversion options [#1139](https://github.com/catalystneuro/neuroconv/pull/1126)

## Features
* Imaging interfaces have a new conversion option `always_write_timestamps` that can be used to force writing timestamps even if neuroconv's heuristics indicates regular sampling rate [PR #1125](https://github.com/catalystneuro/neuroconv/pull/1125)
Expand Down
4 changes: 3 additions & 1 deletion src/neuroconv/basedatainterface.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def create_nwbfile(self, metadata: Optional[dict] = None, **conversion_options)
return nwbfile

@abstractmethod
def add_to_nwbfile(self, nwbfile: NWBFile, **conversion_options) -> None:
def add_to_nwbfile(self, nwbfile: NWBFile, metadata: Optional[dict], **conversion_options) -> None:
"""
Define a protocol for mapping the data from this interface to NWB neurodata objects.
Expand All @@ -136,6 +136,8 @@ def add_to_nwbfile(self, nwbfile: NWBFile, **conversion_options) -> None:
----------
nwbfile : pynwb.NWBFile
The in-memory object to add the data to.
metadata : dict
Metadata dictionary with information used to create the NWBFile.
**conversion_options
Additional keyword arguments to pass to the `.add_to_nwbfile` method.
"""
Expand Down
29 changes: 20 additions & 9 deletions src/neuroconv/nwbconverter.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,11 @@
unroot_schema,
)
from .utils.dict import DeepDict
from .utils.json_schema import _NWBMetaDataEncoder, _NWBSourceDataEncoder
from .utils.json_schema import (
_NWBConversionOptionsEncoder,
_NWBMetaDataEncoder,
_NWBSourceDataEncoder,
)


class NWBConverter:
Expand Down Expand Up @@ -63,11 +67,10 @@ def validate_source(cls, source_data: dict[str, dict], verbose: bool = True):

def _validate_source_data(self, source_data: dict[str, dict], verbose: bool = True):

# We do this to ensure that python objects are in string format for the JSON schema
encoder = _NWBSourceDataEncoder()
# The encoder produces a serialized object, so we deserialized it for comparison

serialized_source_data = encoder.encode(source_data)
decoded_source_data = json.loads(serialized_source_data)
encoded_source_data = encoder.encode(source_data)
decoded_source_data = json.loads(encoded_source_data)

validate(instance=decoded_source_data, schema=self.get_source_schema())
if verbose:
Expand Down Expand Up @@ -106,9 +109,10 @@ def get_metadata(self) -> DeepDict:
def validate_metadata(self, metadata: dict[str, dict], append_mode: bool = False):
"""Validate metadata against Converter metadata_schema."""
encoder = _NWBMetaDataEncoder()
# The encoder produces a serialized object, so we deserialized it for comparison
serialized_metadata = encoder.encode(metadata)
decoded_metadata = json.loads(serialized_metadata)

# We do this to ensure that python objects are in string format for the JSON schema
encoded_metadta = encoder.encode(metadata)
decoded_metadata = json.loads(encoded_metadta)

metadata_schema = self.get_metadata_schema()
if append_mode:
Expand Down Expand Up @@ -138,7 +142,14 @@ def get_conversion_options_schema(self) -> dict:

def validate_conversion_options(self, conversion_options: dict[str, dict]):
"""Validate conversion_options against Converter conversion_options_schema."""
validate(instance=conversion_options or {}, schema=self.get_conversion_options_schema())

conversion_options = conversion_options or dict()

# We do this to ensure that python objects are in string format for the JSON schema
encoded_conversion_options = _NWBConversionOptionsEncoder().encode(conversion_options)
decoded_conversion_options = json.loads(encoded_conversion_options)

validate(instance=decoded_conversion_options, schema=self.get_conversion_options_schema())
if self.verbose:
print("conversion_options is valid!")

Expand Down
21 changes: 21 additions & 0 deletions src/neuroconv/tools/testing/mock_interfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from pynwb.base import DynamicTable

from .mock_ttl_signals import generate_mock_ttl_signal
from ...basedatainterface import BaseDataInterface
from ...basetemporalalignmentinterface import BaseTemporalAlignmentInterface
from ...datainterfaces import SpikeGLXNIDQInterface
from ...datainterfaces.ecephys.baserecordingextractorinterface import (
Expand All @@ -23,6 +24,26 @@
from ...utils import ArrayType, get_json_schema_from_method_signature


class MockInterface(BaseDataInterface):
"""
A mock interface for testing basic command passing without side effects.
"""

def __init__(self, verbose: bool = False, **source_data):

super().__init__(verbose=verbose, **source_data)

def get_metadata(self) -> dict:
metadata = super().get_metadata()
session_start_time = datetime.now().astimezone()
metadata["NWBFile"]["session_start_time"] = session_start_time
return metadata

def add_to_nwbfile(self, nwbfile: NWBFile, metadata: Optional[dict], **conversion_options):

return None


class MockBehaviorEventInterface(BaseTemporalAlignmentInterface):
"""
A mock behavior event interface for testing purposes.
Expand Down
17 changes: 17 additions & 0 deletions src/neuroconv/utils/json_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,23 @@ def default(self, obj):
return super().default(obj)


class _NWBConversionOptionsEncoder(_NWBMetaDataEncoder):
"""
Custom JSON encoder for conversion options of the data interfaces and converters (i.e. kwargs).
This encoder extends the default JSONEncoder class and provides custom serialization
for certain data types commonly used in interface source data.
"""

def default(self, obj):

# Over-write behaviors for Paths
if isinstance(obj, Path):
return str(obj)

return super().default(obj)


def get_base_schema(
tag: Optional[str] = None,
root: bool = False,
Expand Down
30 changes: 30 additions & 0 deletions tests/test_minimal/test_interface_validation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from datetime import datetime
from typing import Optional

from pynwb import NWBFile

from neuroconv import ConverterPipe
from neuroconv.tools.testing.mock_interfaces import (
MockInterface,
)


def test_conversion_options_validation(tmp_path):

class InterfaceWithDateTimeConversionOptions(MockInterface):
"class for testing how a file with datetime object is validated"

def add_to_nwbfile(self, nwbfile: NWBFile, metadata: Optional[dict], datetime_option: datetime):
pass

interface = InterfaceWithDateTimeConversionOptions()

nwbfile_path = tmp_path / "interface_test.nwb"
interface.run_conversion(nwbfile_path=nwbfile_path, datetime_option=datetime.now(), overwrite=True)

data_interfaces = {"InterfaceWithDateTimeConversionOptions": interface}
conversion_options = {"InterfaceWithDateTimeConversionOptions": {"datetime_option": datetime.now()}}
converter = ConverterPipe(data_interfaces=data_interfaces)

nwbfile_path = tmp_path / "converter_test.nwb"
converter.run_conversion(nwbfile_path=nwbfile_path, overwrite=True, conversion_options=conversion_options)

0 comments on commit e3cde1f

Please sign in to comment.