diff --git a/CHANGELOG.md b/CHANGELOG.md index a06ddf300..cdc70223f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ * Completely removed compression settings from most places[PR #1126](https://github.com/catalystneuro/neuroconv/pull/1126) ## Bug Fixes +* datetime objects now can be validated as conversion options [#1139](https://github.com/catalystneuro/neuroconv/pull/1126) ## Features * Imaging interfaces have a new conversion option `always_write_timestamps` that can be used to force writing timestamps even if neuroconv's heuristics indicates regular sampling rate [PR #1125](https://github.com/catalystneuro/neuroconv/pull/1125) diff --git a/src/neuroconv/basedatainterface.py b/src/neuroconv/basedatainterface.py index adcec89b5..272abbd0c 100644 --- a/src/neuroconv/basedatainterface.py +++ b/src/neuroconv/basedatainterface.py @@ -126,7 +126,7 @@ def create_nwbfile(self, metadata: Optional[dict] = None, **conversion_options) return nwbfile @abstractmethod - def add_to_nwbfile(self, nwbfile: NWBFile, **conversion_options) -> None: + def add_to_nwbfile(self, nwbfile: NWBFile, metadata: Optional[dict], **conversion_options) -> None: """ Define a protocol for mapping the data from this interface to NWB neurodata objects. @@ -136,6 +136,8 @@ def add_to_nwbfile(self, nwbfile: NWBFile, **conversion_options) -> None: ---------- nwbfile : pynwb.NWBFile The in-memory object to add the data to. + metadata : dict + Metadata dictionary with information used to create the NWBFile. **conversion_options Additional keyword arguments to pass to the `.add_to_nwbfile` method. """ diff --git a/src/neuroconv/nwbconverter.py b/src/neuroconv/nwbconverter.py index 1f3e7c9f8..fe1b09915 100644 --- a/src/neuroconv/nwbconverter.py +++ b/src/neuroconv/nwbconverter.py @@ -29,7 +29,11 @@ unroot_schema, ) from .utils.dict import DeepDict -from .utils.json_schema import _NWBMetaDataEncoder, _NWBSourceDataEncoder +from .utils.json_schema import ( + _NWBConversionOptionsEncoder, + _NWBMetaDataEncoder, + _NWBSourceDataEncoder, +) class NWBConverter: @@ -63,11 +67,10 @@ def validate_source(cls, source_data: dict[str, dict], verbose: bool = True): def _validate_source_data(self, source_data: dict[str, dict], verbose: bool = True): + # We do this to ensure that python objects are in string format for the JSON schema encoder = _NWBSourceDataEncoder() - # The encoder produces a serialized object, so we deserialized it for comparison - - serialized_source_data = encoder.encode(source_data) - decoded_source_data = json.loads(serialized_source_data) + encoded_source_data = encoder.encode(source_data) + decoded_source_data = json.loads(encoded_source_data) validate(instance=decoded_source_data, schema=self.get_source_schema()) if verbose: @@ -106,9 +109,10 @@ def get_metadata(self) -> DeepDict: def validate_metadata(self, metadata: dict[str, dict], append_mode: bool = False): """Validate metadata against Converter metadata_schema.""" encoder = _NWBMetaDataEncoder() - # The encoder produces a serialized object, so we deserialized it for comparison - serialized_metadata = encoder.encode(metadata) - decoded_metadata = json.loads(serialized_metadata) + + # We do this to ensure that python objects are in string format for the JSON schema + encoded_metadta = encoder.encode(metadata) + decoded_metadata = json.loads(encoded_metadta) metadata_schema = self.get_metadata_schema() if append_mode: @@ -138,7 +142,14 @@ def get_conversion_options_schema(self) -> dict: def validate_conversion_options(self, conversion_options: dict[str, dict]): """Validate conversion_options against Converter conversion_options_schema.""" - validate(instance=conversion_options or {}, schema=self.get_conversion_options_schema()) + + conversion_options = conversion_options or dict() + + # We do this to ensure that python objects are in string format for the JSON schema + encoded_conversion_options = _NWBConversionOptionsEncoder().encode(conversion_options) + decoded_conversion_options = json.loads(encoded_conversion_options) + + validate(instance=decoded_conversion_options, schema=self.get_conversion_options_schema()) if self.verbose: print("conversion_options is valid!") diff --git a/src/neuroconv/tools/testing/mock_interfaces.py b/src/neuroconv/tools/testing/mock_interfaces.py index dd3ec12c2..0652284e7 100644 --- a/src/neuroconv/tools/testing/mock_interfaces.py +++ b/src/neuroconv/tools/testing/mock_interfaces.py @@ -6,6 +6,7 @@ from pynwb.base import DynamicTable from .mock_ttl_signals import generate_mock_ttl_signal +from ...basedatainterface import BaseDataInterface from ...basetemporalalignmentinterface import BaseTemporalAlignmentInterface from ...datainterfaces import SpikeGLXNIDQInterface from ...datainterfaces.ecephys.baserecordingextractorinterface import ( @@ -23,6 +24,26 @@ from ...utils import ArrayType, get_json_schema_from_method_signature +class MockInterface(BaseDataInterface): + """ + A mock interface for testing basic command passing without side effects. + """ + + def __init__(self, verbose: bool = False, **source_data): + + super().__init__(verbose=verbose, **source_data) + + def get_metadata(self) -> dict: + metadata = super().get_metadata() + session_start_time = datetime.now().astimezone() + metadata["NWBFile"]["session_start_time"] = session_start_time + return metadata + + def add_to_nwbfile(self, nwbfile: NWBFile, metadata: Optional[dict], **conversion_options): + + return None + + class MockBehaviorEventInterface(BaseTemporalAlignmentInterface): """ A mock behavior event interface for testing purposes. diff --git a/src/neuroconv/utils/json_schema.py b/src/neuroconv/utils/json_schema.py index 182558b98..07dc3321f 100644 --- a/src/neuroconv/utils/json_schema.py +++ b/src/neuroconv/utils/json_schema.py @@ -60,6 +60,23 @@ def default(self, obj): return super().default(obj) +class _NWBConversionOptionsEncoder(_NWBMetaDataEncoder): + """ + Custom JSON encoder for conversion options of the data interfaces and converters (i.e. kwargs). + + This encoder extends the default JSONEncoder class and provides custom serialization + for certain data types commonly used in interface source data. + """ + + def default(self, obj): + + # Over-write behaviors for Paths + if isinstance(obj, Path): + return str(obj) + + return super().default(obj) + + def get_base_schema( tag: Optional[str] = None, root: bool = False, diff --git a/tests/test_minimal/test_interface_validation.py b/tests/test_minimal/test_interface_validation.py new file mode 100644 index 000000000..1bc409b06 --- /dev/null +++ b/tests/test_minimal/test_interface_validation.py @@ -0,0 +1,30 @@ +from datetime import datetime +from typing import Optional + +from pynwb import NWBFile + +from neuroconv import ConverterPipe +from neuroconv.tools.testing.mock_interfaces import ( + MockInterface, +) + + +def test_conversion_options_validation(tmp_path): + + class InterfaceWithDateTimeConversionOptions(MockInterface): + "class for testing how a file with datetime object is validated" + + def add_to_nwbfile(self, nwbfile: NWBFile, metadata: Optional[dict], datetime_option: datetime): + pass + + interface = InterfaceWithDateTimeConversionOptions() + + nwbfile_path = tmp_path / "interface_test.nwb" + interface.run_conversion(nwbfile_path=nwbfile_path, datetime_option=datetime.now(), overwrite=True) + + data_interfaces = {"InterfaceWithDateTimeConversionOptions": interface} + conversion_options = {"InterfaceWithDateTimeConversionOptions": {"datetime_option": datetime.now()}} + converter = ConverterPipe(data_interfaces=data_interfaces) + + nwbfile_path = tmp_path / "converter_test.nwb" + converter.run_conversion(nwbfile_path=nwbfile_path, overwrite=True, conversion_options=conversion_options)