Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use mixing tests for mocks #1136

Merged
merged 8 commits into from
Nov 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
* Added .csv support to DeepLabCutInterface [PR #1140](https://github.com/catalystneuro/neuroconv/pull/1140)

## Improvements
* Use mixing tests for ecephy's mocks [PR #1136](https://github.com/catalystneuro/neuroconv/pull/1136)

# v0.6.5 (November 1, 2024)

Expand Down
1 change: 0 additions & 1 deletion src/neuroconv/tools/testing/data_interface_mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,6 @@ def test_metadata_schema_valid(self, setup_interface):
Draft7Validator.check_schema(schema=schema)

def test_metadata(self, setup_interface):
# Validate metadata now happens on the class itself
metadata = self.interface.get_metadata()
self.check_extracted_metadata(metadata)

Expand Down
114 changes: 42 additions & 72 deletions tests/test_ecephys/test_ecephys_interfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,42 +27,61 @@

python_version = Version(get_python_version())

from neuroconv.tools.testing.data_interface_mixins import (
RecordingExtractorInterfaceTestMixin,
SortingExtractorInterfaceTestMixin,
)

class TestRecordingInterface(TestCase):
@classmethod
def setUpClass(cls):
cls.single_segment_recording_interface = MockRecordingInterface(durations=[0.100])
cls.multi_segment_recording_interface = MockRecordingInterface(durations=[0.100, 0.100])

def test_stub_single_segment(self):
interface = self.single_segment_recording_interface
class TestSortingInterface(SortingExtractorInterfaceTestMixin):

data_interface_cls = MockSortingInterface
interface_kwargs = dict(num_units=4, durations=[0.100])

def test_propagate_conversion_options(self, setup_interface):
interface = self.interface
metadata = interface.get_metadata()
interface.create_nwbfile(stub_test=True, metadata=metadata)
nwbfile = interface.create_nwbfile(
stub_test=True,
metadata=metadata,
write_as="processing",
units_name="processed_units",
units_description="The processed units.",
)

def test_stub_multi_segment(self):
interface = self.multi_segment_recording_interface
ecephys = get_module(nwbfile, "ecephys")

assert nwbfile.units is None
assert "processed_units" in ecephys.data_interfaces


class TestRecordingInterface(RecordingExtractorInterfaceTestMixin):
data_interface_cls = MockRecordingInterface
interface_kwargs = dict(durations=[0.100])

def test_stub(self, setup_interface):
interface = self.interface
metadata = interface.get_metadata()
interface.create_nwbfile(stub_test=True, metadata=metadata)

def test_no_slash_in_name(self):
interface = self.single_segment_recording_interface
def test_no_slash_in_name(self, setup_interface):
interface = self.interface
metadata = interface.get_metadata()
metadata["Ecephys"]["ElectricalSeries"]["name"] = "test/slash"
with self.assertRaises(jsonschema.exceptions.ValidationError):
with pytest.raises(jsonschema.exceptions.ValidationError):
interface.validate_metadata(metadata)

def test_stub_multi_segment(self):

class TestAlwaysWriteTimestamps:
interface = MockRecordingInterface(durations=[0.100, 0.100])
metadata = interface.get_metadata()
interface.create_nwbfile(stub_test=True, metadata=metadata)

def test_always_write_timestamps(self):
# By default the MockRecordingInterface has a uniform sampling rate
interface = MockRecordingInterface(durations=[1.0], sampling_frequency=30_000.0)
def test_always_write_timestamps(self, setup_interface):

nwbfile = interface.create_nwbfile(always_write_timestamps=True)
nwbfile = self.interface.create_nwbfile(always_write_timestamps=True)
electrical_series = nwbfile.acquisition["ElectricalSeries"]

expected_timestamps = interface.recording_extractor.get_times()

expected_timestamps = self.interface.recording_extractor.get_times()
np.testing.assert_array_equal(electrical_series.timestamps[:], expected_timestamps)


Expand All @@ -84,33 +103,9 @@ def test_spike2_import_assertions_3_11(self):
Spike2RecordingInterface.get_all_channels_info(file_path="does_not_matter.smrx")


class TestSortingInterface:

def test_run_conversion(self, tmp_path):

nwbfile_path = Path(tmp_path) / "test_sorting.nwb"
num_units = 4
interface = MockSortingInterface(num_units=num_units, durations=(1.0,))
interface.sorting_extractor = interface.sorting_extractor.rename_units(new_unit_ids=["a", "b", "c", "d"])

interface.run_conversion(nwbfile_path=nwbfile_path)
with NWBHDF5IO(nwbfile_path, "r") as io:
nwbfile = io.read()

units = nwbfile.units
assert len(units) == num_units
units_df = units.to_dataframe()
# Get index in units table
for unit_id in interface.sorting_extractor.unit_ids:
# In pynwb we write unit name as unit_id
row = units_df.query(f"unit_name == '{unit_id}'")
spike_times = interface.sorting_extractor.get_unit_spike_train(unit_id=unit_id, return_times=True)
written_spike_times = row["spike_times"].iloc[0]

np.testing.assert_array_equal(spike_times, written_spike_times)


class TestSortingInterfaceOld(unittest.TestCase):
"""Old-style tests for the SortingInterface. Remove once we we are sure all the behaviors are covered by the mock."""

@classmethod
def setUpClass(cls) -> None:
cls.test_dir = Path(mkdtemp())
Expand Down Expand Up @@ -194,28 +189,3 @@ def test_sorting_full(self):
nwbfile = io.read()
for i, start_times in enumerate(self.sorting_start_frames):
assert len(nwbfile.units["spike_times"][i]) == self.num_frames - start_times

def test_sorting_propagate_conversion_options(self):
minimal_nwbfile = self.test_dir / "temp2.nwb"
metadata = self.test_sorting_interface.get_metadata()
metadata["NWBFile"]["session_start_time"] = datetime.now().astimezone()
units_description = "The processed units."
conversion_options = dict(
TestSortingInterface=dict(
write_as="processing",
units_name="processed_units",
units_description=units_description,
)
)
self.test_sorting_interface.run_conversion(
nwbfile_path=minimal_nwbfile,
metadata=metadata,
conversion_options=conversion_options,
)

with NWBHDF5IO(minimal_nwbfile, "r") as io:
nwbfile = io.read()
ecephys = get_module(nwbfile, "ecephys")
self.assertIsNone(nwbfile.units)
self.assertIn("processed_units", ecephys.data_interfaces)
self.assertEqual(ecephys["processed_units"].description, units_description)
9 changes: 0 additions & 9 deletions tests/test_ecephys/test_mock_recording_interface.py

This file was deleted.

Loading