Skip to content

Commit

Permalink
Add standalone interfaces (#18)
Browse files Browse the repository at this point in the history
* enhancements

* add pump probe imaging interface

* debugging

* debugging name collision

* saving state

* added pumpprobe segmentation

* added neuropal and debugged ogen

* added neuropal segmentation

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* typos

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
CodyCBakerPhD and pre-commit-ci[bot] authored Jun 26, 2024
1 parent 8040465 commit 1bb46f9
Show file tree
Hide file tree
Showing 18 changed files with 789 additions and 304 deletions.
8 changes: 7 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,14 @@ Includes the publication Neural signal propagation atlas of Caenorhabditis elega

## Installation

```
```bash
git clone https://github.com/catalystneuro/leifer-lab-to-nwb
cd leifer-lab-to-nwb
pip install -e .
```

Then to install the specific set of dependencies for a particular conversion, such as `randi_nature_2023`:

```bash
pip install --requirement src/leifer_lab_to_nwb/randi_nature_2023/requirements.txt
```
Original file line number Diff line number Diff line change
@@ -1,26 +1,37 @@
import copy
from typing import Union

import ndx_multichannel_volume
import neuroconv
import pynwb
from pydantic import FilePath


class RandiNature2023Converter(neuroconv.ConverterPipe):
def get_metadata_schema(self) -> dict:
base_metadata_schema = super().get_metadata_schema()

# Suppress special Subject field validations
metadata_schema = copy.deepcopy(base_metadata_schema)
metadata_schema["properties"].pop("Subject")

return metadata_schema

def run_conversion(
self,
nwbfile_path: Union[str, None] = None,
nwbfile: Union[pynwb.NWBFile, None] = None,
metadata: Union[dict, None] = None,
nwbfile_path: FilePath | None = None,
nwbfile: pynwb.NWBFile | None = None,
metadata: dict | None = None,
overwrite: bool = False,
conversion_options: Union[dict, None] = None,
conversion_options: dict | None = None,
) -> pynwb.NWBFile:
if metadata is None:
metadata = self.get_metadata()
self.validate_metadata(metadata=metadata)

metadata_copy = dict(metadata)
subject_metadata = metadata_copy.pop("Subject") # Must remove from base metadata
ibl_subject = ndx_multichannel_volume.CElegansSubject(**subject_metadata)
subject = ndx_multichannel_volume.CElegansSubject(**subject_metadata)

conversion_options = conversion_options or dict()
self.validate_conversion_options(conversion_options=conversion_options)
Expand All @@ -32,7 +43,7 @@ def run_conversion(
overwrite=overwrite,
verbose=self.verbose,
) as nwbfile_out:
nwbfile_out.subject = ibl_subject
nwbfile_out.subject = subject
for interface_name, data_interface in self.data_interface_objects.items():
data_interface.add_to_nwbfile(
nwbfile=nwbfile_out, metadata=metadata_copy, **conversion_options.get(interface_name, dict())
Expand Down
33 changes: 18 additions & 15 deletions src/leifer_lab_to_nwb/randi_nature_2023/convert_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
from leifer_lab_to_nwb.randi_nature_2023 import RandiNature2023Converter
from leifer_lab_to_nwb.randi_nature_2023.interfaces import (
ExtraOphysMetadataInterface,
OnePhotonSeriesInterface,
OptogeneticStimulationInterface,
PumpProbeImagingInterface,
SubjectInterface,
)

Expand All @@ -30,38 +30,41 @@
raw_data_file_path = raw_pumpprobe_folder_path / "sCMOS_Frames_U16_1024x512.dat"
logbook_file_path = raw_pumpprobe_folder_path.parent / "logbook.txt"

nwbfile_path = base_folder_path / "nwbfiles" / f"{session_string}.nwb"
nwbfile_folder_path = base_folder_path / "nwbfiles"
nwbfile_folder_path.mkdir(exist_ok=True)
nwbfile_path = nwbfile_folder_path / f"{session_string}.nwb"

# Initialize interfaces
data_interfaces = list()

# TODO: pending logbook consistency across sessions (still uploading)
# subject_interface = SubjectInterface(file_path=logbook_file_path, session_id=session_string)
# data_interfaces.append(subject_interface)

# one_photon_series_interface = OnePhotonSeriesInterface(folder_path=raw_pumpprobe_folder_path)
# TODO: pending extension
# one_photon_series_interface = PumpProbeImagingInterface(folder_path=raw_pumpprobe_folder_path)
# data_interfaces.append(one_photon_series_interface)

extra_ophys_metadata_interface = ExtraOphysMetadataInterface(folder_path=raw_pumpprobe_folder_path)
data_interfaces.append(extra_ophys_metadata_interface)

optogenetic_stimulation_interface = OptogeneticStimulationInterface(folder_path=raw_pumpprobe_folder_path)
data_interfaces.append(optogenetic_stimulation_interface)

# Initialize converter
data_interfaces = [
# subject_interface, # TODO: pending logbook consistency across sessions (still uploading)
# one_photon_series_interface, # TODO: pending extension
extra_ophys_metadata_interface,
optogenetic_stimulation_interface,
]
converter = RandiNature2023Converter(data_interfaces=data_interfaces)

metadata = converter.get_metadata()

metadata["NWBFile"]["session_start_time"] = session_start_time

# metadata["Subject"]["subject_id"] = session_start_time.strftime("%y%m%d") # TODO: hopefully come up with better ID
# metadata["Subject"]["species"] = "C. elegans"
# metadata["Subject"]["sex"] = "XX" # TODO: pull from global listing by subject
# metadata["Subject"]["age"] = "P1D" # TODO: request
# TODO: shouldn't need most of this once logbook parsing is done
metadata["Subject"]["subject_id"] = session_start_time.strftime("%y%m%d")
metadata["Subject"]["species"] = "C. elegans"
metadata["Subject"]["sex"] = "XX"
metadata["Subject"]["age"] = "P1D"
# metadata["Subject"]["growth_stage_time"] = pandas.Timedelta(hours=2, minutes=30).isoformat() # TODO: request
# metadata["Subject"]["growth_stage"] = "YA" # TODO: request
# metadata["Subject"]["cultivation_temp"] = "20." # TODO: request, schema says in units Celsius
metadata["Subject"]["growth_stage"] = "YA"
metadata["Subject"]["cultivation_temp"] = 20.0

converter.run_conversion(nwbfile_path=nwbfile_path, metadata=metadata, overwrite=True)
12 changes: 8 additions & 4 deletions src/leifer_lab_to_nwb/randi_nature_2023/interfaces/__init__.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
"""Collection of interfaces for the conversion of data related to the Randi (Nature 2023) paper from the Leifer lab."""

from ._extra_ophys_metadata import ExtraOphysMetadataInterface
from ._logbook_metadata import SubjectInterface
from ._onephotonseries import OnePhotonSeriesInterface
from ._neuropal_imaging_interface import NeuroPALImagingInterface
from ._neuropal_segmentation_interface import NeuroPALSegmentationInterface
from ._optogenetic_stimulation import OptogeneticStimulationInterface
from ._pump_probe_imaging_interface import PumpProbeImagingInterface
from ._pump_probe_segmentation_interface import PumpProbeSegmentationInterface

__all__ = [
"ExtraOphysMetadataInterface",
"OnePhotonSeriesInterface",
"PumpProbeImagingInterface",
"PumpProbeSegmentationInterface",
"NeuroPALImagingInterface",
"NeuroPALSegmentationInterface",
"OptogeneticStimulationInterface",
"SubjectInterface",
]

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -8,46 +8,29 @@


class ExtraOphysMetadataInterface(neuroconv.BaseDataInterface):
"""A custom interface for adding extra table metadata for the ophys rig."""

def __init__(self, *, folder_path: DirectoryPath) -> None:
def __init__(self, *, pumpprobe_folder_path: DirectoryPath) -> None:
"""
A custom interface for adding extra table metadata for the ophys rig.
Parameters
----------
folder_path : DirectoryPath
pumpprobe_folder_path : DirectoryPath
Path to the raw pumpprobe folder.
"""
folder_path = pathlib.Path(folder_path)
pumpprobe_folder_path = pathlib.Path(pumpprobe_folder_path)

super().__init__(folder_path=folder_path)

self.z_scan_file_path = folder_path / "zScan.json"
with open(file=self.z_scan_file_path, mode="r") as fp:
z_scan_file_path = pumpprobe_folder_path / "zScan.json"
with open(file=z_scan_file_path, mode="r") as fp:
self.z_scan = json.load(fp=fp)

self.sync_table_file_path = folder_path / "other-frameSynchronous.txt"
self.sync_table = pandas.read_table(filepath_or_buffer=self.sync_table_file_path, index_col=False)
sync_table_file_path = pumpprobe_folder_path / "other-frameSynchronous.txt"
self.sync_table = pandas.read_table(filepath_or_buffer=sync_table_file_path, index_col=False)

def add_to_nwbfile(self, nwbfile: pynwb.NWBFile):
def add_to_nwbfile(self, nwbfile: pynwb.NWBFile, metadata: dict):
# Plane depths
volt_per_um = 0.125 # Hardcoded value by the lab
depth_in_um_per_pixel = 0.42 # Hardcoded value by the lab
frame_depth_table = pynwb.file.DynamicTable(
name="FrameDepths",
description=(
"Each frame was acquired at a different depth as tracked by the voltage supplied to an "
"Electrically Tunable Lense (ETL)."
),
columns=[
pynwb.file.VectorData(
name="depth_in_um",
# Referred to in file as 'piezo' but it's really the ETL
data=self.sync_table["Piezo position (V)"] / volt_per_um,
)
],
)

# zScan contents

Expand Down
Original file line number Diff line number Diff line change
@@ -1,48 +1 @@
import json
import pathlib

import ndx_multichannel_volume
import neuroconv
import pandas
import pynwb
from pydantic import FilePath


class SubjectInterface(neuroconv.BaseDataInterface):
"""A custom interface for adding extra subject metadata from the logbook."""

def __init__(self, *, file_path: FilePath, session_id: str) -> None:
"""
A custom interface for adding extra subject metadata from the logbook.
Parameters
----------
file_path : FilePath
Path to the logbook for this session.
"""
file_path = pathlib.Path(file_path)

super().__init__(file_path=file_path, session_id=session_id)

with open(file=file_path, mode="r") as io:
self.logbook = io.readlines()

def add_to_nwbfile(self, nwbfile: pynwb.NWBFile):
session_id = self.source_data["session_id"]

logbook_growth_stage_mapping = {
"L4": "L4",
"young adult": "YA",
"L4/ya": "YA", # TODO: consult them on how to handle this case
}

subject_start_line = self.logbook

subject = ndx_multichannel_volume.CElegansSubject(
subject_id=session_id, # Sessions are effectively defined by the subject number on that day
description="", # TODO: find something from paper
species="Caenorhabditis elegans",
growth_stage=logbook_growth_stage_mapping[growth_stage],
strain=strain,
)
nwbfile.subject = subject
# TODO: write a simple function to read the logbook YAML file and lookup the information for this session
Loading

0 comments on commit 1bb46f9

Please sign in to comment.