From 326ab85146c787f23639c2765aadd95c8f5f791b Mon Sep 17 00:00:00 2001 From: Eric Perlman Date: Thu, 26 Oct 2023 21:47:35 -0400 Subject: [PATCH 1/3] Rough version of NML export. --- navis/io/__init__.py | 5 ++- navis/io/nmx_io.py | 101 ++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 102 insertions(+), 4 deletions(-) diff --git a/navis/io/__init__.py b/navis/io/__init__.py index a7edfbfb..3dc6a28c 100644 --- a/navis/io/__init__.py +++ b/navis/io/__init__.py @@ -17,7 +17,7 @@ from .precomputed_io import read_precomputed, write_precomputed from .hdf_io import read_h5, write_h5, inspect_h5 from .rda_io import read_rda -from .nmx_io import read_nmx +from .nmx_io import read_nml, read_nmx, write_nml, write_nmx from .mesh_io import read_mesh, write_mesh from .tiff_io import read_tiff from .pq_io import read_parquet, write_parquet, scan_parquet @@ -29,6 +29,7 @@ 'read_precomputed', 'write_precomputed', 'read_tiff', 'read_rda', - 'read_nmx', + 'read_nml', 'write_nml', + 'read_nmx', 'write_nmx', 'read_mesh', 'write_mesh', 'read_parquet', 'write_parquet', 'scan_parquet'] diff --git a/navis/io/nmx_io.py b/navis/io/nmx_io.py index cd5c3e18..eea6c8d6 100644 --- a/navis/io/nmx_io.py +++ b/navis/io/nmx_io.py @@ -17,14 +17,18 @@ import pandas as pd import xml.etree.ElementTree as ET -from typing import Union, Dict, Optional, Any, IO, Iterable +from pathlib import Path + + +from typing import Union, Dict, Optional, Any, IO, Iterable, List from zipfile import ZipFile from .. import config, core from . import base +from .swc_io import make_swc_table -__all__ = ["read_nmx", "read_nml"] +__all__ = ["read_nmx", "read_nml", "write_nmx", "write_nml"] # Set up logging logger = config.get_logger(__name__) @@ -286,3 +290,96 @@ def read_nml(f: Union[str, pd.DataFrame, Iterable], include_subdirs=include_subdirs) return neurons + + +def write_nml(x: 'core.NeuronObject', + filepath: Union[str, Path], + return_node_map: bool = False) -> None: + """Write TreeNeuron(s) to NML. + + Follows the format described + `here `_. + + Parameters + ---------- + x : TreeNeuron | Dotprops | NeuronList + If multiple neurons, will generate a single SWC file + for each neuron (see also ``filepath``). + filepath : str | pathlib.Path | list thereof + Destination for the SWC files. See examples for options. + If ``x`` is multiple neurons, ``filepath`` must either + be a folder, a "formattable" filename, a filename ending + in `.zip` or a list of filenames (one for each neuron + in ``x``). Existing files will be overwritten! + + return_node_map : bool + If True, will return a dictionary mapping the old node + ID to the new reindexed node IDs in the file. + + Returns + ------- + node_map : dict + Only if ``return_node_map=True``. + + See Also + -------- + :func:`navis.read_nml` + Import skeleton from NML files. + + """ + + root = ET.Element('things') + + # Parameters section + parameters = ET.SubElement(root, 'parameters') + offset = ET.SubElement(parameters, 'offset', x='0', y='0', z='0') + scale = ET.SubElement(parameters, 'scale', x='1', y='1', z='1') + + # This neuron + thing = ET.SubElement(root, 'thing', id="1") + thing.attrib["name"] = x.name + thing.attrib.update({"color.r": '0.0', + "color.g": '0.0', + "color.b": '1.0', + "color.a": '1.0'}) + + nodes = ET.SubElement(thing, 'nodes') + edges = ET.SubElement(thing, 'edges') + + # Use the SWC table as the basis for nodes & edges + res = make_swc_table(x, + labels=False, + export_connectors=False, + return_node_map=return_node_map) + + if return_node_map: + swc, node_map = res[0], res[1] + else: + swc = res + + for index, row in swc.iterrows(): + node = ET.SubElement(nodes, 'node') + node.attrib.update({"id": str(int(row["PointNo"])), + "radius": str(row["Radius"]), + "x": str(row["X"]), + "y": str(row["Y"]), + "z": str(row["Z"])}) + if row["Parent"] != -1: + edge = ET.SubElement(edges, 'edge', source=str(int(row["Parent"])), + target=str(int(row["PointNo"]))) + + with open(filepath, 'wb') as file: + tree = ET.ElementTree(root) + ET.indent(tree, space=" ", level=0) + tree.write(file) + + if return_node_map: + return node_map + + + +def write_nmx(): + """ + TODO: Generate NMX files (collection of NML files) + """ + raise NotImplementedError("Not yet implemented") \ No newline at end of file From d1b5f5ab9ad5e76e63a0a21c2128a43607173197 Mon Sep 17 00:00:00 2001 From: Connor Laughland Date: Tue, 18 Jun 2024 13:42:41 -0700 Subject: [PATCH 2/3] Edited read_nml to handle NMLs with multiple skeletons; Edited write_nml to write either a single NML or multiple NMLS --- navis/core/neuronlist.py | 20 +++++ navis/io/nmx_io.py | 159 ++++++++++++++++++++------------------- 2 files changed, 100 insertions(+), 79 deletions(-) diff --git a/navis/core/neuronlist.py b/navis/core/neuronlist.py index e510635c..0d4c288c 100644 --- a/navis/core/neuronlist.py +++ b/navis/core/neuronlist.py @@ -935,6 +935,26 @@ def unmix(self): """ return {t: self.__class__([n for n in self.neurons if isinstance(n, t)]) for t in self.types} + + + def unique_nodes(self) -> core.TreeNeuron: + """Return neuronlist with unique node IDs""" + st_node = 1 + for sk in self: + keys = list(sk.nodes['node_id']) + values = list(range(st_node,st_node+len(keys))) + res = {keys[i]: values[i] for i in range(len(keys))} + sk.nodes.replace({"node_id": res}, inplace=True) + sk.nodes.replace({"parent_id": res}, inplace=True) + st_node += len(keys) + return self + + def to_TreeNeuron(self) -> core.TreeNeuron: + """Return single treeneuron object.""" + skels = self.unique_nodes() + skels = core.TreeNeuron(self.nodes) + skels.nodes.drop(columns=['neuron'], inplace=True) + return skels class _IdIndexer(): diff --git a/navis/io/nmx_io.py b/navis/io/nmx_io.py index b5fa1284..f3738b84 100644 --- a/navis/io/nmx_io.py +++ b/navis/io/nmx_io.py @@ -12,6 +12,7 @@ # GNU General Public License for more details. import io +import os import networkx as nx import pandas as pd @@ -61,9 +62,7 @@ def __init__( 'radius': float_, } - def read_buffer( - self, f: IO, attrs: Optional[Dict[str, Any]] = None - ) -> 'core.TreeNeuron': + def read_buffer(self, f: IO, attrs: Optional[Dict[str, Any]] = None) -> 'core.TreeNeuron': """Read .nml buffer into a TreeNeuron. NML files are XML-encoded files containing data for a single neuron. @@ -81,10 +80,11 @@ def read_buffer( """ return self.read_nml(f.read(), attrs=attrs) + def read_nml( self, f: IO, attrs: Optional[Dict[str, Any]] = None ) -> 'core.TreeNeuron': - """Read .nml buffer into a TreeNeuron. + """Read .nml buffers into a NeuronList. NML files are XML files containing a single neuron. @@ -105,6 +105,7 @@ def read_nml( f = io.StringIO(f) root = ET.parse(f).getroot() + nl = core.NeuronList(None) # Copy the attributes dict for element in root: if element.tag == 'thing': @@ -115,19 +116,21 @@ def read_nml( nodes.rename({'id': 'node_id'}, axis=1, inplace=True) nodes = nodes.astype({k: v for k, v in self._dtypes.items() if k in nodes.columns}) - G = nx.Graph() - G.add_edges_from(edges.values) - tree = nx.bfs_tree(G, list(G.nodes)[0]) - edges = pd.DataFrame(list(tree.edges), columns=['source', 'target']) - nodes['parent_id'] = edges.set_index('target').reindex(nodes.node_id.values).source.values - nodes['parent_id'] = nodes.parent_id.fillna(-1).astype(self._dtypes['node_id']) - nodes.sort_values('node_id', inplace=True) + G = nx.Graph() + G.add_edges_from(edges.values) + tree = nx.bfs_tree(G, list(G.nodes)[0]) + edges = pd.DataFrame(list(tree.edges), columns=['source', 'target']) + nodes['parent_id'] = edges.set_index('target').reindex(nodes.node_id.values).source.values + nodes['parent_id'] = nodes.parent_id.fillna(-1).astype(self._dtypes['node_id']) + nodes.sort_values('node_id', inplace=True) - return core.TreeNeuron( - nodes, - **(self._make_attributes({'name': 'NML', 'origin': 'nml'}, attrs)) - ) + nl.append(core.NeuronList(nodes)) + + for key,value in attrs.items(): + nl.set_neuron_attributes(value,key) + return nl + class NMXReader(NMLReader): """This is a version of the NML file reader that reads from zipped archives.""" @@ -302,89 +305,87 @@ def read_nml(f: Union[str, pd.DataFrame, Iterable], return neurons -def write_nml(x: 'core.NeuronObject', - filepath: Union[str, Path], - return_node_map: bool = False) -> None: +def write_nml(x, filepath, return_node_map=False, single_file=True): """Write TreeNeuron(s) to NML. - Follows the format described `here `_. - Parameters ---------- x : TreeNeuron | Dotprops | NeuronList - If multiple neurons, will generate a single SWC file - for each neuron (see also ``filepath``). filepath : str | pathlib.Path | list thereof - Destination for the SWC files. See examples for options. - If ``x`` is multiple neurons, ``filepath`` must either - be a folder, a "formattable" filename, a filename ending - in `.zip` or a list of filenames (one for each neuron - in ``x``). Existing files will be overwritten! - - return_node_map : bool - If True, will return a dictionary mapping the old node - ID to the new reindexed node IDs in the file. - - Returns - ------- - node_map : dict - Only if ``return_node_map=True``. - + It will generate a single NML file in chosen filepath(see also ``filepath``). + ``Filepath`` must be a folder. See Also -------- :func:`navis.read_nml` Import skeleton from NML files. - """ + + if single_file==True: + if filepath.endswith(".nml") == False: + raise ValueError('For a single nml file, the filepath needs to end with .nml') + + if single_file==False: + if os.path.isdir(filepath) == False: + raise ValueError('For multiple nml files, an existing directory must be provided') + + # Format datatypes + x = core.NeuronList(x) + if x.type[0] == 'navis.Dotprops': + x = x.to_skeleton() + else: + x = x.unique_nodes() root = ET.Element('things') - # Parameters section parameters = ET.SubElement(root, 'parameters') offset = ET.SubElement(parameters, 'offset', x='0', y='0', z='0') scale = ET.SubElement(parameters, 'scale', x='1', y='1', z='1') - - # This neuron - thing = ET.SubElement(root, 'thing', id="1") - thing.attrib["name"] = x.name - thing.attrib.update({"color.r": '0.0', - "color.g": '0.0', - "color.b": '1.0', - "color.a": '1.0'}) - - nodes = ET.SubElement(thing, 'nodes') - edges = ET.SubElement(thing, 'edges') - - # Use the SWC table as the basis for nodes & edges - res = make_swc_table(x, - labels=False, - export_connectors=False, - return_node_map=return_node_map) - - if return_node_map: - swc, node_map = res[0], res[1] - else: - swc = res - for index, row in swc.iterrows(): - node = ET.SubElement(nodes, 'node') - node.attrib.update({"id": str(int(row["PointNo"])), - "radius": str(row["Radius"]), - "x": str(row["X"]), - "y": str(row["Y"]), - "z": str(row["Z"])}) - if row["Parent"] != -1: - edge = ET.SubElement(edges, 'edge', source=str(int(row["Parent"])), - target=str(int(row["PointNo"]))) - - with open(filepath, 'wb') as file: - tree = ET.ElementTree(root) - ET.indent(tree, space=" ", level=0) - tree.write(file) - - if return_node_map: - return node_map + for ind,sk in enumerate(x): + + if single_file == False: + root = ET.Element('things') + # Parameters section + parameters = ET.SubElement(root, 'parameters') + offset = ET.SubElement(parameters, 'offset', x='0', y='0', z='0') + scale = ET.SubElement(parameters, 'scale', x='1', y='1', z='1') + + # This neuron + thing = ET.SubElement(root, 'thing', id=str(ind+1)) + thing.attrib["name"] = str(ind+1) + thing.attrib.update({"color.r": '0.0', + "color.g": '0.0', + "color.b": '1.0', + "color.a": '1.0'}) + + nodes = ET.SubElement(thing, 'nodes') + edges = ET.SubElement(thing, 'edges') + + for index,row in sk.nodes.iterrows(): + node = ET.SubElement(nodes, 'node') + node.attrib.update({"id": str(int(row["node_id"])), + "radius": str(row["radius"]), + "x": str(row["x"]), + "y": str(row["y"]), + "z": str(row["z"])}) + if row["parent_id"] != -1: + edge = ET.SubElement(edges, 'edge', source=str(int(row["parent_id"])), + target=str(int(row["node_id"]))) + + if single_file == False: + with open(filepath+str(ind)+".nml", 'wb') as file: + tree = ET.ElementTree(root) + ET.indent(tree, space=" ", level=0) + tree.write(file) + file.close() + + if single_file == True: + with open(filepath, 'wb') as file: + tree = ET.ElementTree(root) + ET.indent(tree, space=" ", level=0) + tree.write(file) + file.close() From 2b7099f02f42e837d418c4b7c9e052cd8847e1de Mon Sep 17 00:00:00 2001 From: Connor Laughland <71669181+conochur@users.noreply.github.com> Date: Wed, 31 Jul 2024 15:18:15 -0400 Subject: [PATCH 3/3] Added swc<-> nml functions --- navis/io/nmx_io.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/navis/io/nmx_io.py b/navis/io/nmx_io.py index f3738b84..825de312 100644 --- a/navis/io/nmx_io.py +++ b/navis/io/nmx_io.py @@ -26,10 +26,10 @@ from .. import config, core from . import base -from .swc_io import make_swc_table +from .swc_io import make_swc_table, read_swc, write_swc -__all__ = ["read_nmx", "read_nml", "write_nmx", "write_nml"] +__all__ = ["read_nmx", "read_nml", "write_nmx", "write_nml", "swc_to_nml", "nml_to_swc"] # Set up logging logger = config.get_logger(__name__) @@ -387,10 +387,19 @@ def write_nml(x, filepath, return_node_map=False, single_file=True): tree.write(file) file.close() - - def write_nmx(): """ TODO: Generate NMX files (collection of NML files) """ - raise NotImplementedError("Not yet implemented") \ No newline at end of file + raise NotImplementedError("Not yet implemented") + +def swc_to_nml(filepath: Union[str, Path], + outpath: Union[str, Path]): + skels = read_swc(filepath) + for sk in core.NeuronList(skels): + write_nml(sk,os.path.join(outpath, sk.id + ".nml")) + +def nml_to_swc(filepath: Union[str, Path], + outpath: Union[str, Path]): + sk = read_nml(filepath) + write_swc(sk,outpath)