diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml
index e6c78076..72538faf 100644
--- a/.github/workflows/build-docs.yml
+++ b/.github/workflows/build-docs.yml
@@ -8,6 +8,10 @@ on:
branches:
- master
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
jobs:
deploy:
runs-on: ubuntu-latest
diff --git a/README.md b/README.md
index 9930c033..166919b9 100644
--- a/README.md
+++ b/README.md
@@ -8,13 +8,13 @@ NAVis is a Python 3 library for **N**euron **A**nalysis and **Vis**ualization.
Visit our documentation [here](https://navis-org.github.io/navis/ "NAVis Documentation")!
## Features
-* **polyglot**: work and convert between neuron skeletons, meshes, dotprops and images
-* **visualize**: 2D (matplotlib) and 3D (vispy, plotly or k3d)
+* **polyglot**: work with and convert between neuron skeletons, meshes, dotprops and images
+* **visualize**: 2D (matplotlib) and 3D (octarine, vispy, plotly or k3d) plots
* **process**: skeletonization, meshing, smoothing, repair, downsampling, etc.
* **morphometrics**: Strahler analysis, cable length, volume, tortuosity and more
* **similarity**: compare & cluster by morphology (e.g. NBLAST, persistence or form factor) or connectivity metrics
* **transform**: move data between template brains (built-in support for HDF5, CMTK, Elastix and landmark-based transforms)
-* **interface**: load neurons directly from [neuPrint](https://neuprint.janelia.org), [neuromorpho.org](http://neuromorpho.org) and other data sources
+* **interface**: load neurons directly from [neuPrint](https://neuprint.janelia.org), [neuromorpho.org](http://neuromorpho.org) and other remote data repositories
* **model** neurons and networks using the *NEURON* simulator
* **render**: use Blender 3D for high quality [visualizations](https://youtu.be/wl3sFG7WQJc)
* **R** neuron libraries: interfaces with [nat](https://github.com/jefferis/nat), [rcatmaid](https://github.com/jefferis/rcatmaid), [elmr](https://github.com/jefferis/elmr) and more
@@ -27,7 +27,7 @@ Visit our documentation [here](https://navis-org.github.io/navis/ "NAVis Documen
See the [documentation](http://navis.readthedocs.io/ "NAVis ReadTheDocs") for detailed installation instructions, tutorials and examples. For the impatient:
```sh
-pip3 install 'navis[all]'
+pip3 install "navis[all]"
```
which includes all optional extras providing features and/or performance improvements.
diff --git a/docs/_static/lm_tut/C1.gif b/docs/_static/lm_tut/C1.gif
new file mode 100644
index 00000000..3f72d34c
Binary files /dev/null and b/docs/_static/lm_tut/C1.gif differ
diff --git a/docs/_static/lm_tut/all_skeletons.png b/docs/_static/lm_tut/all_skeletons.png
new file mode 100644
index 00000000..c088e458
Binary files /dev/null and b/docs/_static/lm_tut/all_skeletons.png differ
diff --git a/docs/_static/lm_tut/download.png b/docs/_static/lm_tut/download.png
new file mode 100644
index 00000000..2b3ffe3b
Binary files /dev/null and b/docs/_static/lm_tut/download.png differ
diff --git a/docs/_static/lm_tut/image_stack.png b/docs/_static/lm_tut/image_stack.png
new file mode 100644
index 00000000..e59e05db
Binary files /dev/null and b/docs/_static/lm_tut/image_stack.png differ
diff --git a/docs/_static/lm_tut/labels.png b/docs/_static/lm_tut/labels.png
new file mode 100644
index 00000000..6eeb55fb
Binary files /dev/null and b/docs/_static/lm_tut/labels.png differ
diff --git a/docs/_static/lm_tut/mask.png b/docs/_static/lm_tut/mask.png
new file mode 100644
index 00000000..e04848cb
Binary files /dev/null and b/docs/_static/lm_tut/mask.png differ
diff --git a/docs/_static/lm_tut/stack.gif b/docs/_static/lm_tut/stack.gif
new file mode 100644
index 00000000..328b1478
Binary files /dev/null and b/docs/_static/lm_tut/stack.gif differ
diff --git a/docs/_static/lm_tut/z_stack.png b/docs/_static/lm_tut/z_stack.png
new file mode 100644
index 00000000..0a96f960
Binary files /dev/null and b/docs/_static/lm_tut/z_stack.png differ
diff --git a/docs/_static/lm_tut/zoom_in.png b/docs/_static/lm_tut/zoom_in.png
new file mode 100644
index 00000000..6b21c464
Binary files /dev/null and b/docs/_static/lm_tut/zoom_in.png differ
diff --git a/docs/_static/voxel.png b/docs/_static/voxel.png
new file mode 100644
index 00000000..783376d0
Binary files /dev/null and b/docs/_static/voxel.png differ
diff --git a/docs/api.md b/docs/api.md
index 0382bf81..5a970a8a 100644
--- a/docs/api.md
+++ b/docs/api.md
@@ -20,7 +20,7 @@ This API reference is a more or less complete account of the primary functions:
1. [Neuron- and NeuronList functions and methods](#neurons-neuronlists)
2. [Functions for visualization](#visualization)
-3. [Manipulate or analyze neuron morphology](#morphometrics)
+3. [Manipulate or analyze neuron morphology](#neuron-morphology)
4. [Transforming and mirroring data](#transforming-and-mirroring)
5. [Analyze connectivity](#connectivity)
6. [Import/Export](#importexport)
@@ -124,9 +124,12 @@ In addition, a [`navis.TreeNeuron`][] has a range of different properties:
| Method | Description |
|--------|-------------|
+| [`TreeNeuron.adjacency_matrix`][navis.TreeNeuron.adjacency_matrix] | {{ autosummary("navis.TreeNeuron.adjacency_matrix") }} |
| [`TreeNeuron.cable_length`][navis.TreeNeuron.cable_length] | {{ autosummary("navis.TreeNeuron.cable_length") }} |
| [`TreeNeuron.cycles`][navis.TreeNeuron.cycles] | {{ autosummary("navis.TreeNeuron.cycles") }} |
| [`TreeNeuron.downsample`][navis.TreeNeuron.downsample] | {{ autosummary("navis.TreeNeuron.downsample") }} |
+| [`TreeNeuron.edges`][navis.TreeNeuron.edges] | {{ autosummary("navis.TreeNeuron.edges") }} |
+| [`TreeNeuron.edge_coords`][navis.TreeNeuron.edge_coords] | {{ autosummary("navis.TreeNeuron.edge_coords") }} |
| [`TreeNeuron.igraph`][navis.TreeNeuron.igraph] | {{ autosummary("navis.TreeNeuron.igraph") }} |
| [`TreeNeuron.is_tree`][navis.TreeNeuron.is_tree] | {{ autosummary("navis.TreeNeuron.is_tree") }} |
| [`TreeNeuron.n_branches`][navis.TreeNeuron.n_branches] | {{ autosummary("navis.TreeNeuron.n_branches") }} |
@@ -140,9 +143,22 @@ In addition, a [`navis.TreeNeuron`][] has a range of different properties:
| [`TreeNeuron.simple`][navis.TreeNeuron.simple] | {{ autosummary("navis.TreeNeuron.simple") }} |
| [`TreeNeuron.soma_pos`][navis.TreeNeuron.soma_pos] | {{ autosummary("navis.TreeNeuron.soma_pos") }} |
| [`TreeNeuron.subtrees`][navis.TreeNeuron.subtrees] | {{ autosummary("navis.TreeNeuron.subtrees") }} |
+| [`TreeNeuron.vertices`][navis.TreeNeuron.vertices] | {{ autosummary("navis.TreeNeuron.vertices") }} |
| [`TreeNeuron.volume`][navis.TreeNeuron.volume] | {{ autosummary("navis.TreeNeuron.volume") }} |
+#### Skeleton utility functions
+
+| Function | Description |
+|----------|-------------|
+| [`navis.rewire_skeleton()`][navis.rewire_skeleton] | {{ autosummary("navis.rewire_skeleton") }} |
+| [`navis.insert_nodes()`][navis.insert_nodes] | {{ autosummary("navis.insert_nodes") }} |
+| [`navis.remove_nodes()`][navis.remove_nodes] | {{ autosummary("navis.remove_nodes") }} |
+| [`navis.graph.simplify_graph()`][navis.graph.simplify_graph] | {{ autosummary("navis.graph.simplify_graph") }} |
+| [`navis.graph.skeleton_adjacency_matrix()`][navis.graph.skeleton_adjacency_matrix] | {{ autosummary("navis.graph.skeleton_adjacency_matrix") }} |
+
+
+
### Mesh neurons
Properties specific to [`navis.MeshNeuron`][]:
@@ -173,10 +189,16 @@ These are methods and properties specific to [VoxelNeurons][navis.VoxelNeuron]:
| Property | Description |
|------|------|
+| [`VoxelNeuron.density`][navis.VoxelNeuron.density] | {{ autosummary("navis.VoxelNeuron.density") }} |
| [`VoxelNeuron.grid`][navis.VoxelNeuron.grid] | {{ autosummary("navis.VoxelNeuron.grid") }} |
-| [`VoxelNeuron.voxels`][navis.VoxelNeuron.voxels] | {{ autosummary("navis.VoxelNeuron.voxels") }} |
+| [`VoxelNeuron.max`][navis.VoxelNeuron.max] | {{ autosummary("navis.VoxelNeuron.max") }} |
+| [`VoxelNeuron.min`][navis.VoxelNeuron.min] | {{ autosummary("navis.VoxelNeuron.min") }} |
+| [`VoxelNeuron.nnz`][navis.VoxelNeuron.nnz] | {{ autosummary("navis.VoxelNeuron.nnz") }} |
+| [`VoxelNeuron.offset`][navis.VoxelNeuron.offset] | {{ autosummary("navis.VoxelNeuron.offset") }} |
| [`VoxelNeuron.shape`][navis.VoxelNeuron.shape] | {{ autosummary("navis.VoxelNeuron.shape") }} |
| [`VoxelNeuron.strip()`][navis.VoxelNeuron.strip] | {{ autosummary("navis.VoxelNeuron.strip") }} |
+| [`VoxelNeuron.threshold()`][navis.VoxelNeuron.threshold] | {{ autosummary("navis.VoxelNeuron.threshold") }} |
+| [`VoxelNeuron.voxels`][navis.VoxelNeuron.voxels] | {{ autosummary("navis.VoxelNeuron.voxels") }} |
### Dotprops
@@ -207,6 +229,8 @@ These functions will let you convert between neuron types:
| [`navis.conversion.voxels2mesh()`][navis.conversion.voxels2mesh] | {{ autosummary("navis.conversion.voxels2mesh") }} |
| [`navis.conversion.tree2meshneuron()`][navis.conversion.tree2meshneuron] | {{ autosummary("navis.conversion.tree2meshneuron") }} |
+See also [Utility](#utility) for functions to convert to/from basic data types.
+
### NeuronList methods
[`NeuronLists`][navis.NeuronList] let you access all the properties and methods of the neurons
@@ -233,14 +257,14 @@ Properties:
|----------|-------------|
| [`NeuronList.bbox`][navis.NeuronList.bbox] | {{ autosummary("navis.NeuronList.bbox") }} |
| [`NeuronList.empty`][navis.NeuronList.empty] | {{ autosummary("navis.NeuronList.empty") }} |
-| [`NeuronList.id`] | An array with the IDs of the neurons contained in the list. |
-| [`NeuronList.idx`] | An indexer similar to pandas' `iloc` that accepts neuron IDs. |
+| [`NeuronList.id`][navis.NeuronList.id] | An array with the IDs of the neurons contained in the list. |
+| [`NeuronList.idx`][navis.NeuronList.idx] | An indexer similar to pandas' `iloc` that accepts neuron IDs. |
| [`NeuronList.is_degenerated`][navis.NeuronList.is_degenerated] | {{ autosummary("navis.NeuronList.is_degenerated") }} |
| [`NeuronList.is_mixed`][navis.NeuronList.is_mixed] | {{ autosummary("navis.NeuronList.is_mixed") }} |
| [`NeuronList.shape`][navis.NeuronList.shape] | {{ autosummary("navis.NeuronList.shape") }} |
| [`NeuronList.types`][navis.NeuronList.types]| {{ autosummary("navis.NeuronList.types") }} |
-Please see the [tutorial on ``NeuronList``](../generated/gallery/plot_02_neuronlists_intro/) for more
+Please see the [tutorial on ``NeuronList``](../generated/gallery/tutorial_basic_02_neuronlists_intro/) for more
information, including how to index them.
## Visualization
@@ -314,6 +338,7 @@ Functions to analyze morphology.
| [`navis.persistence_vectors()`][navis.persistence_vectors] | {{ autosummary("navis.persistence_vectors") }} |
| [`navis.strahler_index()`][navis.strahler_index] | {{ autosummary("navis.strahler_index") }} |
| [`navis.segment_analysis()`][navis.segment_analysis] | {{ autosummary("navis.segment_analysis") }} |
+| [`navis.ivscc_features()`][navis.ivscc_features] | {{ autosummary("navis.ivscc_features") }} |
| [`navis.sholl_analysis()`][navis.sholl_analysis] | {{ autosummary("navis.sholl_analysis") }} |
| [`navis.tortuosity()`][navis.tortuosity] | {{ autosummary("navis.tortuosity") }} |
| [`navis.betweeness_centrality()`][navis.betweeness_centrality] | {{ autosummary("navis.betweeness_centrality") }} |
@@ -345,6 +370,7 @@ Functions to edit morphology:
| [`navis.smooth_skeleton()`][navis.smooth_skeleton] | {{ autosummary("navis.smooth_skeleton") }} |
| [`navis.smooth_mesh()`][navis.smooth_mesh] | {{ autosummary("navis.smooth_mesh") }} |
| [`navis.smooth_voxels()`][navis.smooth_voxels] | {{ autosummary("navis.smooth_voxels") }} |
+| [`navis.thin_voxels()`][navis.thin_voxels] | {{ autosummary("navis.thin_voxels") }} |
### Resampling
@@ -369,7 +395,6 @@ NBLAST and related functions:
| [`navis.nblast_smart`][navis.nblast_smart] | {{ autosummary("navis.nblast_smart") }} |
| [`navis.nblast_allbyall`][navis.nblast_allbyall] | {{ autosummary("navis.nblast_allbyall") }} |
| [`navis.nblast_align`][navis.nblast_align] | {{ autosummary("navis.nblast_align") }} |
-| [`navis.vxnblast`][navis.vxnblast] | {{ autosummary("navis.vxnblast") }} |
| [`navis.synblast`][navis.synblast] | {{ autosummary("navis.synblast") }} |
| [`navis.persistence_distances`][navis.persistence_distances] | {{ autosummary("navis.persistence_distances") }} |
@@ -428,7 +453,7 @@ like to know which part of a neuron is inside a certain brain region.
## Transforming and Mirroring
Functions to transform spatial data, e.g. move neurons from one brain space to
-another. Check out the [tutorials](../generated/gallery/6_misc/plot_01_transforms/) for examples on how to
+another. Check out the [tutorials](../generated/gallery/6_misc/tutorial_misc_01_transforms/) for examples on how to
use them.
High-level functions:
@@ -475,6 +500,12 @@ So to register and use a new transform you would look something like this:
>>> xf = navis.xform_brain(data, 'brainA', 'brainB')
```
+You can check which transforms are registered like so:
+
+``` python
+>>> navis.transforms.registry.summary() # this outputs a dataframe
+```
+
These are the methods and properties of ``registry``:
| Method | Description |
@@ -502,22 +533,6 @@ Collection of functions to work with graphs and adjacency matrices.
|----------|-------------|
| [`navis.NeuronConnector`][] | {{ autosummary("navis.NeuronConnector") }} |
-### Graphs
-
-Functions to convert between neurons graph representation (networkx or iGraph).
-
-| Function | Description |
-|----------|-------------|
-| [`navis.neuron2nx()`][navis.neuron2nx] | {{ autosummary("navis.neuron2nx") }} |
-| [`navis.neuron2igraph()`][navis.neuron2igraph] | {{ autosummary("navis.neuron2igraph") }} |
-| [`navis.neuron2KDTree()`][navis.neuron2KDTree] | {{ autosummary("navis.neuron2KDTree") }} |
-| [`navis.network2nx()`][navis.network2nx] | {{ autosummary("navis.network2nx") }} |
-| [`navis.network2igraph()`][navis.network2igraph] | {{ autosummary("navis.network2igraph") }} |
-| [`navis.rewire_skeleton()`][navis.rewire_skeleton] | {{ autosummary("navis.rewire_skeleton") }} |
-| [`navis.insert_nodes()`][navis.insert_nodes] | {{ autosummary("navis.insert_nodes") }} |
-| [`navis.remove_nodes()`][navis.remove_nodes] | {{ autosummary("navis.remove_nodes") }} |
-| [`navis.graph.simplify_graph()`][navis.graph.simplify_graph] | {{ autosummary("navis.graph.simplify_graph") }} |
-
### Connectivity metrics
Functions to analyse/cluster neurons based on connectivity.
@@ -575,6 +590,21 @@ Various utility functions.
| [`navis.example_neurons()`][navis.example_neurons] | {{ autosummary("navis.example_neurons") }} |
| [`navis.example_volume()`][navis.example_volume] | {{ autosummary("navis.example_volume") }} |
+### Conversion
+
+Functions to convert between data types.
+
+| Function | Description |
+|----------|-------------|
+| [`navis.neuron2nx()`][navis.neuron2nx] | {{ autosummary("navis.neuron2nx") }} |
+| [`navis.neuron2igraph()`][navis.neuron2igraph] | {{ autosummary("navis.neuron2igraph") }} |
+| [`navis.neuron2KDTree()`][navis.neuron2KDTree] | {{ autosummary("navis.neuron2KDTree") }} |
+| [`navis.neuron2tangents()`][navis.neuron2tangents] | {{ autosummary("navis.neuron2tangents") }} |
+| [`navis.network2nx()`][navis.network2nx] | {{ autosummary("navis.network2nx") }} |
+| [`navis.network2igraph()`][navis.network2igraph] | {{ autosummary("navis.network2igraph") }} |
+| [`navis.nx2neuron()`][navis.nx2neuron] | {{ autosummary("navis.nx2neuron") }} |
+| [`navis.edges2neuron()`][navis.edges2neuron] | {{ autosummary("navis.edges2neuron") }} |
+
## Network Models
{{ navis }} comes with a simple network traversal model (used in [Schlegel, Bates et al., 2021](https://elifesciences.org/articles/66018)).
@@ -600,7 +630,7 @@ imported explicitly as they are not imported at top level.
### NEURON simulator
Functions to facilitate creating models of neurons/networks. Please see
-the [tutorials](../generated/gallery/3_interfaces/plot_00_interfaces_neuron/) for examples.
+the [tutorials](../generated/gallery/3_interfaces/tutorial_interfaces_00_neuron/) for examples.
_Not imported at top level! Must be imported explicitly:_
@@ -728,12 +758,12 @@ These are the additional functions added by {{ navis }}:
| [`neuprint.fetch_skeletons()`][navis.interfaces.neuprint.fetch_skeletons] | {{ autosummary("navis.interfaces.neuprint.fetch_skeletons") }} |
| [`neuprint.fetch_mesh_neuron()`][navis.interfaces.neuprint.fetch_mesh_neuron] | {{ autosummary("navis.interfaces.neuprint.fetch_mesh_neuron") }} |
-Please also check out the [tutorials](../generated/gallery/4_remote/plot_00_remote_neuprint/) for examples of how to fetch and work with data from neuPrint.
+Please also check out the [tutorials](../generated/gallery/4_remote/tutorial_remote_00_neuprint/) for examples of how to fetch and work with data from neuPrint.
### InsectBrain DB API
Set of functions to grab data from [InsectBrain](https://www.insectbraindb.org)
-which hosts some neurons and standard brains (see [tutorials](../generated/gallery/4_remote/plot_03_remote_insect_db/)).
+which hosts some neurons and standard brains (see [tutorials](../generated/gallery/4_remote/tutorial_remote_03_insect_db/)).
_Not imported at top level! Must be imported explicitly:_
@@ -757,7 +787,7 @@ from navis.interfaces import insectbrain_db
Functions to be run inside [Blender 3D](https://www.blender.org/) and import
CATMAID data (see Examples). Please note that this requires Blender >2.8 as
earlier versions are shipped with older Python versions not supported by {{ navis }}.
-See the [tutorials](../generated/gallery/3_interfaces/plot_01_interfaces_blender/) for an introduction of how to use {{ navis }} in
+See the [tutorials](../generated/gallery/3_interfaces/tutorial_interfaces_02_blender/) for an introduction of how to use {{ navis }} in
Blender.
_Not imported at top level! Must be imported explicitly:_
@@ -858,7 +888,7 @@ from navis.interfaces import microns
| [`microns.fetch_neurons()`][navis.interfaces.microns.fetch_neurons] | {{ autosummary("navis.interfaces.microns.fetch_neurons") }} |
| [`microns.get_somas()`][navis.interfaces.microns.get_somas] | {{ autosummary("navis.interfaces.microns.get_somas") }} |
-Please also see the [MICrONS tutorial](../generated/gallery/4_remote/plot_02_remote_microns/).
+Please also see the [MICrONS tutorial](../generated/gallery/4_remote/tutorial_remote_02_microns/).
### R interface
diff --git a/docs/changelog.md b/docs/changelog.md
index 5b33ed12..42f99171 100644
--- a/docs/changelog.md
+++ b/docs/changelog.md
@@ -20,48 +20,61 @@ pip uninstall navis -y
pip install git+https://github.com/navis-org/navis@master
```
+## Version `1.8.0` { data-toc-label="1.8.0" }
+_Date: 22/09/24_
+
This version contains a major internal rework of both [`navis.plot2d`][] and [`navis.plot3d`][] to make them
more consistent and easier to use.
##### Breaking
- Plotting: the `synapse_layout` parameter was renamed to `cn_layout` (matching e.g. other parameters such as `cn_colors`)
-- Negative views in [`navis.plot2d`][] (e.g. `view=("x", "-z")`) are now implemented by inverting axis rather than changing the underlying data
+- Negative views in [`navis.plot2d`][] (e.g. `view=("x", "-z")`) will now invert axes rather than changing the underlying data
- Minimum version of `matplotlib` is now `3.9` (was `3.6`)
- The `plotly` backend is not part of a minimal install anymore (still installed using `navis[all]`)
- The Vispy backend is now deprecated and will be removed in a future release
- Removed `navis.screenshot` - please use the Octarine/Vispy viewer's `.screenshot()` method instead
+- [`navis.tortuosity`][] now calculates tortuosity as-is (i.e. without resampling) by default
##### Additions
- Added [Octarine](https://github.com/schlegelp/octarine) as the default backend for plotting from terminal
+- New Function: [`navis.ivscc_features`][] computes some basic IVSCC features
- New function: [`navis.graph.skeleton_adjacency_matrix`][] computes the node adjacency for skeletons
- New function: [`navis.graph.simplify_graph`][] simplifies skeleton graphs to only root, branch and leaf nodes while preserving branch length (i.e. weights)
- New [`NeuronList`][navis.NeuronList] method: [`get_neuron_attributes`][navis.NeuronList.get_neuron_attributes] is analagous to `dict.get`
-- [`NeuronLists`][navis.NeuronList] now implemented the `|` (`__or__`) operator which can be used to get the union of two [`NeuronLists`][navis.NeuronList]
+- [`NeuronLists`][navis.NeuronList] now implement the `|` (`__or__`) operator which can be used to get the union of two [`NeuronLists`][navis.NeuronList]
- [`navis.Volume`][] now have an (optional) `.units` property similar to neurons
+- `Tree/MeshNeurons` and `Dotprops` now support addition/subtraction (similar to the already existing multiplication and division) to allow offsetting neurons
##### Improvements
- Plotting:
- [`navis.plot3d`][]:
- `legendgroup` parameter (plotly backend) now also sets the legend group's title
- new parameters for the plotly backend:
- - `legend` (default `True`): determines whether legends is shown
- - `legend_orientation` (default `v`): determines whether legend is aranged vertically (`v`) or horizontally (`h`)
- - `linestyle` (default `-`): determines line style for skeletons
+ - `legend` (default `True`): determines whether legends is shown
+ - `legend_orientation` (default `v`): determines whether legend is aranged vertically (`v`) or horizontally (`h`)
+ - `linestyle` (default `-`): determines line style for skeletons
- default for `radius` is now `"auto"`
- [`navis.plot2d`][]:
- the `view` parameter now also works with `methods` `3d` and `3d_complex`
- the `color_by` and `shade_by` parameters now also work when plotting skeletons with `radius=True`
- new defaults: `radius="auto"`, `alpha=1`, `figsize=None` (use matplotlib defaults)
- new parameters for methods `3d` and `3d_complex`: `mesh_shade=False` and `non_view_axes3d`
- - the `scalebar` parameter can now be a dictionary used to style (color, width, etc) the scalebar
+ - the `scalebar` and `soma` parameters can now also be dictionaries to style (color, width, etc) the scalebar/soma
- the `connectors` parameter can now be used to show specific connector types (e.g. `connectors="pre"`)
+- I/O:
+ - `read_*` functions are now able to read from FTP servers (`ftp://...`)
+ - the `limit` parameter used in many `read_*` functions can now also be a regex pattern or a `slice`
+- New parameter in [`navis.resample_skeleton`][]: use `map_column` to include arbitrary columns in the resampling
+- [`navis.prune_twigs`][] and [`navis.cable_length`][] now accept a `mask` parameter
- General improvements to docs and tutorials
##### Fixes
- Memory usage of `Neuron/Lists` is now correctly re-calculated when the neuron is modified
- Various fixes and improvements for the MICrONS interface (`navis.interfaces.microns`)
- [`navis.graph.node_label_sorting`][] now correctly prioritizes total branch length
-- [`navis.TreeNeuron.simple][] now correctly drops soma nodes if they aren't root, branch or leaf points themselves
+- [`navis.TreeNeuron.simple`][] now correctly drops soma nodes if they aren't root, branch or leaf points themselves
+
+**Full Changelog**: [v1.7.0...v1.8.0](https://github.com/navis-org/navis/compare/v1.7.0...v1.8.0)
## Version `1.7.0` { data-toc-label="1.7.0" }
_Date: 25/07/24_
diff --git a/docs/examples/0_io/README.md b/docs/examples/0_io/README.md
index 3c28edce..64546360 100644
--- a/docs/examples/0_io/README.md
+++ b/docs/examples/0_io/README.md
@@ -1,3 +1,3 @@
-### Import / Export
+## Import / Export
These tutorials will illustrate how to load and save your data:
diff --git a/docs/examples/0_io/plot_02_io_dotprops.py b/docs/examples/0_io/plot_02_io_dotprops.py
deleted file mode 100644
index e2029941..00000000
--- a/docs/examples/0_io/plot_02_io_dotprops.py
+++ /dev/null
@@ -1,80 +0,0 @@
-"""
-Dotprops
-========
-
-This tutorial will show you have to work with Dotprops.
-
-[`navis.Dotprops`][] are point clouds with associated principal vectors which are mostly used for
-NBLASTing. They are typically derivatives of skeletons or meshes but you can load them straight from
-confocal data using [`navis.read_nrrd`][]:
-"""
-
-# %%
-import navis
-
-# %%
-# ## From image data
-#
-# For this example I downloaded one of Janelia's Fly Light confocal stacks ([link](https://splitgal4.janelia.org/))
-# and converted it to NRRD format using [ImageJ](https://imagej.net/ij/).
-#
-# Load NRRD file into Dotprops instead of VoxelNeuron:
-# ```python
-# dp = navis.read_nrrd(
-# "~/Downloads/JRC_SS86025_JRC_SS86025-20211112_49_B6.nrrd",
-# output="dotprops",
-# threshold=3000,
-# )
-# ```
-
-# %%
-# !!! note
-# Note the threshold parameter? It determines which voxels (by brightness) are used and which are ignored!
-#
-# ## From other neurons
-#
-# Let's say you have a bunch of skeletons and you need to convert them to dotprops for NBLAST. For that you
-# [`navis.make_dotprops`][]:
-
-sk = navis.example_neurons(3, kind="skeleton")
-dp = navis.make_dotprops(sk, k=5)
-
-# Plot one of the dotprops
-fig, ax = navis.plot2d(dp[0], view=("x", "-z"), method="2d", color="red")
-
-# Add a zoom-in
-axins = ax.inset_axes([0.03, 0.03, 0.47, 0.47], xticklabels=[], yticklabels=[])
-_ = navis.plot2d(dp[0], view=("x", "-z"), method="2d", color="red", ax=axins)
-axins.set_xlim(17e3, 19e3)
-axins.set_ylim(15e3, 13e3)
-ax.indicate_inset_zoom(axins, edgecolor="black")
-
-# %%
-# !!! note
-# The `k` parameter in [`make_dotprops`][navis.make_dotprops] determines how many neighbours are considered to
-# generated the tangent vector for a given point.
-# Higher `k` = smoother. Lower `k` = more detailed but also more noisy. If you have clean data such as these
-# connectome-derived skeletons, you can go with a low `k`. For confocal data, you might want to go with a higher `k`
-# (e.g. 20) to smooth out the noise. You can pass `k` to [`navis.read_nrrd`][] as well.
-#
-# ## Manual construction
-#
-# If not loaded from file, you would typically create [`Dotprops`][navis.Dotprops] via [`navis.make_dotprops`][] but just
-# like all other neuron types, [`Dotprops`][navis.Dotprops] can be constructed manually:
-
-# %%
-import numpy as np
-
-# Create some x/y/z coordinates
-points = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]])
-
-# Create vectors for each point
-# You can skip this point and just provide the `k` parameter
-vect = np.array([[1, 0, 0], [0, 1, 0], [0, 1, 0]])
-
-dp = navis.Dotprops(points, k=None, vect=vect)
-dp
-
-# %%
-# There is no established format to store dotprops. But like all other neuron types in navis, you can pickle data for later (re)use
-# - see the [pickling tutorial](../plot_04_io_pickle). See also the [I/O API reference](../../../api.md#importexport).
diff --git a/docs/examples/0_io/plot_00_io_skeletons.py b/docs/examples/0_io/tutorial_io_00_skeletons.py
similarity index 85%
rename from docs/examples/0_io/plot_00_io_skeletons.py
rename to docs/examples/0_io/tutorial_io_00_skeletons.py
index aea7dab5..364571fd 100644
--- a/docs/examples/0_io/plot_00_io_skeletons.py
+++ b/docs/examples/0_io/tutorial_io_00_skeletons.py
@@ -16,6 +16,9 @@
(e.g. the MICrONS, neuromorpho, Virtual Fly Brain or Janelia hemibrain datasets).
These are covered in separate [tutorials](../../gallery).
+ If you have light-level microscopy data, you might also be interested in the
+ tutorial on [skeletons from light-level data](../zzz_tutorial_io_05_skeletonize).
+
## From SWC files
SWC is a common format for storing neuron skeletons. Thus {{ navis }} provides functions to both
@@ -62,11 +65,26 @@
s
# %%
-# You can even use URLs directly:
+# You can even use URLs or FTP servers directly:
# %%
+
+# From URL:
s = navis.read_swc('https://v2.virtualflybrain.org/data/VFB/i/jrch/jup2/VFB_00101567/volume.swc')
+# %%
+
+# From an FTP folder:
+nl = navis.read_swc('ftp://download.brainlib.org:8811/biccn/zeng/pseq/morph/200526/', limit=3)
+
+
+# !!! tip
+# [`read_swc`][navis.read_swc] is super flexible and can handle a variety of inputs (file names, folders, archives, URLs, etc.).
+# Importantly, it also let you customize which/how neurons are loaded. For example:
+# - the `limit` parameter can also be used to load only files matching a given pattern
+# - the `fmt` parameter lets you specify how to parse filenames into neuron names and ids
+# Many of the other `navis.read_*` functions share these features!
+
# %%
# ## To SWC files
#
@@ -125,7 +143,7 @@
#
# Among other formats, neuroglancer supports a "precomputed" format for skeletons
# (see specs [here](https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/skeletons.md).
-# This binary format is more compact than uncompressed SWC files but probably is not used outside of neuroglancer afaik.
+# This binary format is more compact than uncompressed SWC files but is not used outside of neuroglancer as far as I know.
# That said: {{ navis }} lets you read and write skeletons from/to precomputed format using [`navis.read_precomputed`][] and
# [`navis.write_precomputed`][]. Note that these functions work on both precomputed skeletons and meshes.
#
@@ -161,11 +179,10 @@
# %%
# There are a few other ways to construct a [`navis.TreeNeuron`][] (e.g. using a graph) - see the docstring for details.
#
-# Also note that all {{ navis }} neurons can be stored to disk using `pickle` - see the [pickling tutorial](../plot_04_io_pickle).
+# Also note that all {{ navis }} neurons can be stored to disk using `pickle` - see the [pickling tutorial](../tutorial_io_04_pickle).
#
# Hopefully the above has given you some entry points on how to load your data. See also the [I/O API reference](../../../api.md#importexport).
#
# Please also keep in mind that you can also convert one neuron type into another - for example by skeletonizing [`MeshNeurons`][navis.MeshNeuron]
# (see also the API reference on [neuron conversion](../../../api.md#converting-between-types)).
-
diff --git a/docs/examples/0_io/plot_01_io_meshes.py b/docs/examples/0_io/tutorial_io_01_meshes.py
similarity index 98%
rename from docs/examples/0_io/plot_01_io_meshes.py
rename to docs/examples/0_io/tutorial_io_01_meshes.py
index a9ab8151..ab87ed9c 100644
--- a/docs/examples/0_io/plot_01_io_meshes.py
+++ b/docs/examples/0_io/tutorial_io_01_meshes.py
@@ -119,7 +119,7 @@
# [`navis.Volume`][] for details.
#
# This tutorial has hopefully given you some entry points on how to load your data. See also the [I/O API reference](../../../api.md#importexport).
-# Also note that all {{ navis }} neurons can be stored to disk using ``pickle`` - see the [pickling tutorial](../plot_04_io_pickle).
+# Also note that all {{ navis }} neurons can be stored to disk using ``pickle`` - see the [pickling tutorial](../tutorial_io_04_pickle).
#
# Please also keep in mind that you can also convert one neuron type into another - for example by skeletonizing [`MeshNeurons`][navis.MeshNeuron]
# (see also the API reference on [neuron conversion](../../../api.md#converting-between-types)).
\ No newline at end of file
diff --git a/docs/examples/0_io/tutorial_io_02_dotprops.py b/docs/examples/0_io/tutorial_io_02_dotprops.py
new file mode 100644
index 00000000..3726e99e
--- /dev/null
+++ b/docs/examples/0_io/tutorial_io_02_dotprops.py
@@ -0,0 +1,144 @@
+"""
+Dotprops
+========
+
+This tutorial will show you how to load/create Dotprops.
+
+[`navis.Dotprops`][] are point clouds with associated principal vectors which are mostly used for
+NBLASTing. They are typically derivatives of skeletons or meshes but you can load them straight from
+e.g. confocal image stacks using [`navis.read_nrrd`][] or [`navis.read_tiff`][].
+
+![dotprops](../../../../_static/dotprops.png)
+"""
+
+# %%
+import navis
+import matplotlib.pyplot as plt
+
+# %%
+# ## From image data
+#
+# For this example we will use a stack from [Janelia's split Gal4 collection](https://splitgal4.janelia.org/).
+# This `LH2094` line is also available from [Virtual Fly Brain](https://v2.virtualflybrain.org/org.geppetto.frontend/geppetto?id=VFB_00102926&i=VFB_00101567,VFB_00102926)
+# where, conveniently, they can be downloaded in NRRD format which we can directly read into {{ navis }}.
+#
+# Let's do this step-by-step first:
+
+# Load raw NRRD image
+im, header = navis.read_nrrd(
+ "https://v2.virtualflybrain.org/data/VFB/i/0010/2926/VFB_00101567/volume.nrrd",
+ output="raw"
+)
+
+# Plot a maximum projection
+max_proj = im.max(axis=2)
+plt.imshow(
+ max_proj.T,
+ extent=(0, int(0.5189 * 1210), (0.5189 * 566), 0), # extent is calculated from the spacing (see `header`) times the no of x/y pixels
+ cmap='Greys_r',
+ vmax=10 # make it really bright so we can see neurons + outline of the brain
+ )
+
+# %%
+# At this point we could threshold the image, extract above-threshold voxels and convert them to a Dotprops object.
+# However, the easier option is to use [`navis.read_nrrd`][] with the `output="dotprops"` parameter:
+
+dp = navis.read_nrrd(
+ "https://v2.virtualflybrain.org/data/VFB/i/0010/2926/VFB_00101567/volume.nrrd",
+ output="dotprops",
+ threshold=5, # threshold to determine which voxels are used for the dotprops
+ thin=True, # see note below on this parameter!
+ k=10 # number of neighbours to consider when calculating the tangent vector
+)
+
+# %%
+# !!! note "Thinning"
+# In the above [`read_nrrd`][navis.read_nrrd] call we used `thin=True`. This is a post-processing step that
+# thins the image to a single pixel width. This will produce "cleaner" dotprops but can also remove denser
+# neurites thus emphasizing the backbone of the neuron. This option requires the `scikit-image` package:
+#
+# ```bash
+# pip install scikit-image
+# ```
+#
+# Let's overlay the dotprops on the maximum projection:
+
+fig, ax = plt.subplots()
+ax.imshow(
+ max_proj.T,
+ extent=(0, int(0.5189 * 1210), (0.5189 * 566), 0),
+ cmap='Greys_r',
+ vmax=10
+ )
+navis.plot2d(dp, ax=ax, view=("x", "-y"), method="2d", color="r", linewidth=1.5)
+
+# %%
+# This looks pretty good but we have a bit of little fluff around the brain which we may want to get rid off:
+
+# Drop everything but the two largest connected components
+dp = navis.drop_fluff(dp, n_largest=2)
+
+# Plot again
+fig, ax = plt.subplots()
+ax.imshow(
+ max_proj.T,
+ extent=(0, int(0.5189 * 1210), (0.5189 * 566), 0),
+ cmap='Greys_r',
+ vmax=10
+ )
+navis.plot2d(dp, ax=ax, view=("x", "-y"), method="2d", color="r", linewidth=1.5)
+
+# %%
+# !!! note
+# To extract the connected components, [`navis.drop_fluff`][] treats all pairs of points within a certain distance
+# as connected. The distance is determined by the `dp_dist` parameter which defaults to 5 x the average distance
+# between points. This is a good value ehre but you may need adjust it for your data.
+#
+#
+# ## From other neurons
+#
+# Let's say you have a bunch of skeletons and you need to convert them to dotprops for NBLAST. For that you
+# [`navis.make_dotprops`][]:
+
+sk = navis.example_neurons(3, kind="skeleton")
+dp = navis.make_dotprops(sk, k=5)
+
+# Plot one of the dotprops
+fig, ax = navis.plot2d(dp[0], view=("x", "-z"), method="2d", color="red")
+
+# Add a zoom-in
+axins = ax.inset_axes([0.03, 0.03, 0.47, 0.47], xticklabels=[], yticklabels=[])
+_ = navis.plot2d(dp[0], view=("x", "-z"), method="2d", color="red", ax=axins)
+axins.set_xlim(17e3, 19e3)
+axins.set_ylim(15e3, 13e3)
+ax.indicate_inset_zoom(axins, edgecolor="black")
+
+# %%
+# !!! note
+# The `k` parameter in [`make_dotprops`][navis.make_dotprops] determines how many neighbours are considered to
+# generated the tangent vector for a given point.
+# Higher `k` = smoother. Lower `k` = more detailed but also more noisy. If you have clean data such as these
+# connectome-derived skeletons, you can go with a low `k`. For confocal data, you might want to go with a higher `k`
+# (e.g. 20) to smooth out the noise. You can pass `k` to [`navis.read_nrrd`][] as well.
+#
+# ## Manual construction
+#
+# If not loaded from file, you would typically create [`Dotprops`][navis.Dotprops] via [`navis.make_dotprops`][] but just
+# like all other neuron types, [`Dotprops`][navis.Dotprops] can be constructed manually:
+
+# %%
+import numpy as np
+
+# Create some x/y/z coordinates
+points = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]])
+
+# Create vectors for each point
+# You can skip this point and just provide the `k` parameter
+vect = np.array([[1, 0, 0], [0, 1, 0], [0, 1, 0]])
+
+dp = navis.Dotprops(points, k=None, vect=vect)
+dp
+
+# %%
+# There is no established format to store dotprops. But like all other neuron types in navis, you can pickle data for later (re)use
+# - see the [pickling tutorial](../tutorial_io_04_pickle). See also the [I/O API reference](../../../api.md#importexport).
diff --git a/docs/examples/0_io/plot_04_io_pickle.py b/docs/examples/0_io/tutorial_io_04_pickle.py
similarity index 100%
rename from docs/examples/0_io/plot_04_io_pickle.py
rename to docs/examples/0_io/tutorial_io_04_pickle.py
diff --git a/docs/examples/0_io/zzz_tutorial_io_05_skeletonize.py b/docs/examples/0_io/zzz_tutorial_io_05_skeletonize.py
new file mode 100644
index 00000000..845d4947
--- /dev/null
+++ b/docs/examples/0_io/zzz_tutorial_io_05_skeletonize.py
@@ -0,0 +1,242 @@
+"""
+Skeletons from light-level data
+===============================
+
+This tutorial will show you how to extract skeletons from confocal microscopy stacks.
+
+!!! important "This example is not executed"
+ In contrast to almost all other tutorials, this one is not executed when the documentation is built.
+ Consequently, it also does not display any actual code output or plots - images shown are statically
+ embedded. The main reason for this is that the example requires downloading a large-ish file which
+ is a pain in the neck to get to work in the CI enviroment.
+
+Extracting neuron skeletons from microscopy data is a common but non-trivial task. There are about
+as many ways to do this as there are people doing it - from fully manual to fully automated tracing.
+
+In this tutorial, we will show you a fully automated way using a number of easy-to-install Python
+packages. If this isn't for you, check out the [Alternatives](#alternatives) section at the end of this tutorial.
+
+## Requirements:
+
+Please make sure you have the following packages installed:
+
+- [`pynrrd`](https://github.com/mhe/pynrrd) to load image stacks
+ ```shell
+ pip install pynrrd -U
+ ```
+- [`connected-components-3d`](https://github.com/seung-lab/connected-components-3d) (cc3d) to label connected components
+ ``` shell
+ pip install connected-components-3d -U
+ ```
+- [`kimimaro`](https://github.com/seung-lab/kimimaro) to extract the skeletons
+ ```shell
+ pip install kimimaro -U
+ ```
+
+## The Data
+
+The pipeline we're using here was written for pre-segmented data, i.e. there is little in the way
+of dealing with noisy data. There is of course nothing stopping you from doing some additional
+pre-processing to clean up you data _before_ running this pipeline.
+
+### Download Image Stack
+
+As example data, we will use a confocal stack from the [Janelia Split-Gal4 collection](https://splitgal4.janelia.org/cgi-bin/splitgal4.cgi).
+We picked the [SS00731](https://flweb.janelia.org/cgi-bin/view_splitgal4_imagery.cgi?line=SS00731)
+line because it's already fairly clean as is but we're lucky in that there are high-resolution stacks
+with stochastic multi-color labeling of individual neurons available.
+
+Scroll all the way to the bottom of the page and in the dropdown for the left-most image,
+select "Download H5J stack: Unaligned".
+
+![download](../../../_static/lm_tut/download.png)
+
+### Convert to NRRD
+
+Next, we need to open this file in [Fiji/ImageJ](https://imagej.net/software/fiji/) to convert it to
+a format we can work with in Python:
+
+1. Fire up Fiji/ImageJ
+2. Drag & drop the `SS00731-20140620_20_C5-f-63x-ventral-Split_GAL4-unaligned_stack.h5j` file into Fiji
+3. Go to "Image" -> "Colors" -> "Split Channels" to split the image into the channels
+4. Discard all but the red "C1" channel with our neurons
+5. Go to "Image" -> "Type" -> "8-bit" to convert the image to 8-bit (optional but recommended)
+6. Save via "File" -> "Save As" -> "NRRD" and save the file as `neuron.nrrd`
+
+![Z stack](../../../_static/lm_tut/C1.gif)
+
+## Extracting the Skeleton
+
+Now that we have that file in a format we can load it into Python, we can get started:
+"""
+
+# %%
+import kimimaro
+import nrrd
+import navis
+import cc3d
+import numpy as np
+
+# %%
+# First load the image stack:
+
+# `im` is numpy array, `header` is a dictionary
+im, header = nrrd.read(
+ "neuron.nrrd"
+)
+
+# %%
+# Next, we need to find some sensible threshold to binarize the image. This is not strictly
+# necessary (see the further note down) but at least for starters this more intuitive.
+
+# Threshold the image
+mask = (im >= 20).astype(np.uint8)
+
+# %%
+# You can inspect the mask to see if the thresholding worked as expected:
+# ```python
+# import matplotlib.pyplot as plt
+# plt.imshow(mask.max(axis=2))
+# ```
+#
+# With the `octarine` backend, you can also visualize the volume in 3D:
+# ```python
+# # spacing can be found in the `header` dictionary
+# import octarine as oc
+# v = oc.Viewer()
+# v.add_volume(mask, spacing=(.19, .19, .38))
+# ```
+#
+# ![mask](../../../_static/lm_tut/mask.png)
+#
+# A couple notes on the thresholding:
+#
+# - feel free to test the thresholding in e.g. ImageJ/Fiji
+# - remove as much background as possible without disconnecting neurites
+# - perfection is the enemy of progress: we can denoise/reconnect during postprocessing
+#
+# Next, we we need to label the connected components in the image:
+
+# %%
+# Extract the labels
+labels, N = cc3d.connected_components(mask, return_N=True)
+
+# %%
+# Visualize the labels:
+# ```python
+# import cmap
+# import octarine as oc
+# v = oc.Viewer()
+# v.add_volume(labels, spacing=(.19, .19, .38), color=cmap.Colormap('prism'))
+# ```
+#
+# ![labels](../../../_static/lm_tut/labels.png)
+#
+# !!! experiment
+# `cc3d.connected_component` also works with non-thresholded image - see the `delta` parameter.
+
+# Collect some statistics
+stats = cc3d.statistics(labels)
+
+print("Total no. of labeled componenents:", N)
+print("Per-label voxel counts:", np.sort(stats["voxel_counts"])[::-1])
+print("Label IDs:", np.argsort(stats["voxel_counts"])[::-1])
+
+# %%
+# ```
+# Total no. of labeled componenents: 37836
+# Per-label voxel counts: [491996140 527374 207632 ... 1 1 1]
+# Label IDs: [ 0 6423 6091 ... 22350 22351 18918]
+# ```
+#
+# Note how label `0` has suspiciously many voxels? That's because this is the background label.
+# We need to make sure to exlude it from the skeletonization process:
+to_skeletonize = np.arange(1, N)
+
+
+# %%
+# Now we can run the actual skeletonization!
+#
+# !!! note "Skeletonization paramters"
+# There are a number of parameters that are worth explaining
+# first because you might want to tweak them for your data:
+#
+# - `scale` & `const`: control how detailed your skeleton will be: lower = more detailed but more noise
+# - `anisotropy`: controls the voxel size - see the `header` dictionary for the voxel size of our image
+# - `dust_threshold`: controls how small connected components are skipped
+# - `object_ids`: a list of labels to process (remember that we skipped the background label)
+# - `max_path`: if this is set, the algorithm will only process N paths in each skeleton - you can use
+# this to finish early (e.g. for testing)
+#
+# See the [`kimimaro` repository](https://github.com/seung-lab/kimimaro) for a detailed explanation
+# of the parameters!
+
+skels = kimimaro.skeletonize(
+ labels,
+ teasar_params={
+ "scale": 1.5,
+ "const": 1, # physical units (1 micron in our case)
+ "pdrf_scale": 100000,
+ "pdrf_exponent": 4,
+ "soma_acceptance_threshold": 3.5, # physical units
+ "soma_detection_threshold": 1, # physical units
+ "soma_invalidation_const": 0.5, # physical units
+ "soma_invalidation_scale": 2,
+ "max_paths": None, # default None
+ },
+ object_ids=list(to_skeletonize), # process only the specified labels
+ dust_threshold=500, # skip connected components with fewer than this many voxels
+ anisotropy=(0.19, .19, 0.38), # voxel size in physical units
+ progress=True, # show progress bar
+ parallel=6, # <= 0 all cpu, 1 single process, 2+ multiprocess
+ parallel_chunk_size=1, # how many skeletons to process before updating progress bar
+)
+
+# %%
+# `skels` is a dictionary of `{label: cloudvolume.Skeleton}`. Let's convert these to {{ navis }} neurons:
+
+# Convert skeletons to NAVis neurons
+nl = navis.NeuronList([navis.read_swc(s.to_swc(), id=i) for i, s in skels.items()])
+
+# %%
+# Based on the voxel sizes in `stats`, we can make an educated guess that label `6423` is one of our neurons.
+# Let's visualize it in 3D:
+#
+# ```python
+# import octarine as oc
+# v = oc.Viewer()
+# v.add_neurons(nl.idx[6423], color='r', linewidth=2, radius=False))
+# v.add_volume(im, spacing=(.19, .19, .38), opacity=.5)
+# ```
+#
+# ![stack animation](../../../_static/lm_tut/stack.gif)
+#
+# This looks pretty good off the bat! Now obviously we will have the other large neuron (label `6091`)
+# plus bunch of smaller skeletons in our NeuronList. Let's have a look at those as well:
+#
+# ![all skeletons](../../../_static/lm_tut/all_skeletons.png)
+#
+# Zooming in on `6091` you will see that it wasn't fully skeletonized: some of the branches are missing
+# and others are disconnected. That's either because our threshold for the mask was too high (this neuron
+# had a weaker signal than the other) and/or we dropped too many fragments during the skeletonization process
+# (see the `dust_threshold` parameter).
+#
+# ![zoom in](../../../_static/lm_tut/zoom_in.png)
+#
+# ## Acknowledgements
+#
+# The packages we used here were written by the excellent Will Silversmith from the Seung lab in Princeton.
+# The image stack we processed is from the Janelia Split-Gal4 collection and was published as part of the
+# [Cheong, Eichler, Stuerner, _et al._ (2024)](https://elifesciences.org/reviewed-preprints/96084v1) paper.
+#
+# ## Alternatives
+#
+# If the pipeline described in this tutorial does not work for you, there are a number of alternatives:
+#
+# 1. [Simple Neurite Tracer](https://imagej.net/plugins/snt/index) is a popular ImageJ plugin for semi-automated tracing
+# 2. Folks at the Allen Institute for Brain Science have published a [protocol for reconstructing neurons](https://portal.brain-map.org/explore/toolkit/morpho-reconstruction/vaa3d-mozak)
+# 3. [NeuTube](https://neutracing.com/tutorial/) is an open-source software for reconstructing neurongs from fluorescence microscopy images
+
+# %%
+
+# mkdocs_gallery_thumbnail_path = '_static/lm_tut/z_stack.png'
\ No newline at end of file
diff --git a/docs/examples/1_plotting/README.md b/docs/examples/1_plotting/README.md
index 12d96689..cfdfe9c9 100644
--- a/docs/examples/1_plotting/README.md
+++ b/docs/examples/1_plotting/README.md
@@ -1,3 +1,3 @@
-### Plotting
+## Plotting
These tutorials will show you how to visualize your neurons:
diff --git a/docs/examples/1_plotting/plot_00_plotting_intro.py b/docs/examples/1_plotting/tutorial_plotting_00_intro.py
similarity index 99%
rename from docs/examples/1_plotting/plot_00_plotting_intro.py
rename to docs/examples/1_plotting/tutorial_plotting_00_intro.py
index 865c8d50..a1e9f397 100644
--- a/docs/examples/1_plotting/plot_00_plotting_intro.py
+++ b/docs/examples/1_plotting/tutorial_plotting_00_intro.py
@@ -290,7 +290,7 @@
#
# Above we demo'ed making a little GIF using matplotlib. While that's certainly fun, it's not
# very high production value. For high quality videos and renderings I recommend you check out
-# the tutorial on navis' [Blender interface](../3_interfaces/plot_01_interfaces.blender). Here's a little taster:
+# the tutorial on navis' [Blender interface](../3_interfaces/tutorial_interfaces_02_blender). Here's a little taster:
#
#
#
diff --git a/docs/examples/1_plotting/plot_01_plotting_colors.py b/docs/examples/1_plotting/tutorial_plotting_01_colors.py
similarity index 99%
rename from docs/examples/1_plotting/plot_01_plotting_colors.py
rename to docs/examples/1_plotting/tutorial_plotting_01_colors.py
index fec9c8ea..5f42be3d 100644
--- a/docs/examples/1_plotting/plot_01_plotting_colors.py
+++ b/docs/examples/1_plotting/tutorial_plotting_01_colors.py
@@ -5,7 +5,7 @@
This tutorial demonstrates how to adjust colors in NAVis plots.
By now, you should already have a basic understanding on how to plot neurons in {{ navis }} (2d vs 3d plots, the various
-backends and plotting methods, etc.) - if not, check out the [plotting tutorial](../plot_00_plotting_intro).
+backends and plotting methods, etc.) - if not, check out the [plotting tutorial](../tutorial_plotting_00_intro).
In this tutorial we will focus on how to finetune these plots by changing colors :rainbow:. We will demonstrate
this using `matplotlib` ([`plot2d`][navis.plot2d]) and `plotly` ([`plot3d`][navis.plot3d]) but everything shown here
diff --git a/docs/examples/1_plotting/plot_02_plotting_1d.py b/docs/examples/1_plotting/tutorial_plotting_02_1d.py
similarity index 100%
rename from docs/examples/1_plotting/plot_02_plotting_1d.py
rename to docs/examples/1_plotting/tutorial_plotting_02_1d.py
diff --git a/docs/examples/1_plotting/plot_03_plotting_dend.py b/docs/examples/1_plotting/tutorial_plotting_03_dend.py
similarity index 100%
rename from docs/examples/1_plotting/plot_03_plotting_dend.py
rename to docs/examples/1_plotting/tutorial_plotting_03_dend.py
diff --git a/docs/examples/1_plotting/plot_04_plotting_skeletons.py b/docs/examples/1_plotting/tutorial_plotting_04_skeletons.py
similarity index 97%
rename from docs/examples/1_plotting/plot_04_plotting_skeletons.py
rename to docs/examples/1_plotting/tutorial_plotting_04_skeletons.py
index e74d75ff..250ed31a 100644
--- a/docs/examples/1_plotting/plot_04_plotting_skeletons.py
+++ b/docs/examples/1_plotting/tutorial_plotting_04_skeletons.py
@@ -5,7 +5,7 @@
In this example we will demonstrate various ways to fine-tune plots with skeletons.
By now, you should already have a basic understanding on how to plot neurons in {{ navis }} (2d vs 3d plots, the various
-backends and plotting methods, etc.) - if not, check out the [plotting tutorial](../plot_00_plotting_intro).
+backends and plotting methods, etc.) - if not, check out the [plotting tutorial](../tutorial_plotting_00_intro).
We will focus on how to finetune [`plot2d`][navis.plot2d] plots because `matplotlib` is much more flexible than the
[`plot3d`][navis.plot3d] backends when it comes to rendering lines. That said: some of the things we show here will also
diff --git a/docs/examples/1_plotting/plot_05_plotting_depth.py b/docs/examples/1_plotting/tutorial_plotting_05_depth.py
similarity index 100%
rename from docs/examples/1_plotting/plot_05_plotting_depth.py
rename to docs/examples/1_plotting/tutorial_plotting_05_depth.py
diff --git a/docs/examples/1_plotting/tutorial_plotting_06_cortex.py b/docs/examples/1_plotting/tutorial_plotting_06_cortex.py
new file mode 100644
index 00000000..f94d1fed
--- /dev/null
+++ b/docs/examples/1_plotting/tutorial_plotting_06_cortex.py
@@ -0,0 +1,256 @@
+"""
+Cortical Neurons
+================
+
+This tutorial demonstrates how to plot cortical neurons.
+
+
+In this exercise we will visualize morphological data from ["Integrated Morphoelectric and Transcriptomic Classification of Cortical GABAergic Cells"](https://www.cell.com/cell/pdf/S0092-8674(20)31254-X.pdf)
+by Gouwens, Sorensen _et al._, Cell (2020). Specifically, we will re-create a plot similar to their
+[Figure 4A](https://www.cell.com/cms/10.1016/j.cell.2020.09.057/asset/c684cc3f-ee17-4a36-98c9-e464a7ce8063/main.assets/gr4_lrg.jpg).
+
+For brevity, we will use some fixed cell IDs and properties from the dataset. These were taken from the
+[`20200711_patchseq_metadata_mouse.csv`](https://brainmapportal-live-4cc80a57cd6e400d854-f7fdcae.divio-media.net/filer_public/5e/2a/5e2a5936-61da-4e09-b6da-74ab97ce1b02/20200711_patchseq_metadata_mouse.csv)
+file provided alongside the supplementary material of the paper:
+
+"""
+
+# %%
+
+# The cell IDs we will use (it's the first 5 in the meta data file)
+ids = [601506507, 601790961, 601803754, 601808698, 601810307]
+
+# The normalized soma depths for these cells (also from the meta data file)
+soma_depths = [0.36101451, 0.62182935, 0.16423996, 0.48303029, 0.2956563]
+
+# %%
+# ## Part I: Loading and Aligning Neurons
+#
+# First we need to load the neurons. Here, we will take them straight from their FTP server
+# but you can of course download them first and then load from disk!
+
+import navis
+
+nl = navis.read_swc(
+ "ftp://download.brainlib.org:8811/biccn/zeng/pseq/morph/200526/",
+ limit=[f"{i}_transformed.swc" for i in ids], # Load only the files we need
+ fmt="{name,id:int}_transformed.swc", # Parse the name and id from the file name
+)
+
+# %%
+# To make our lives a bit easier, we will attach the soma depth to the neurons as metadata:
+
+nl.set_neuron_attributes(
+ soma_depths,
+ name="cell_soma_normalized_depth",
+ register=True
+ )
+
+nl
+
+# %%
+# Next, we need to align the neurons according to their soma depth! The normalized `cell_soma_normalized_depth` should
+# map to a physical range of `0` to `922.5861720311` microns.
+#
+# Let's demo with one neuron before we run this for all neurons:
+
+# Grab one of the neurons
+n = nl[0]
+
+# This is the normalized soma depth:
+print(f"Normalized soma depth: {n.cell_soma_normalized_depth}")
+
+# %%
+# The physical soma depth is simply the normalized depth multiplied by the total depth of the cortex.
+# Note that we're positioning from the bottom - i.e. 922.586 will be at the surface and 0 at the bottom!
+# This is to make our lifes easier when it comes to plotting since the origin in `matplotlib`
+# figures is in the bottom left corner.
+
+phys_y = (1 - n.cell_soma_normalized_depth) * 922.5861720311
+print(f"Physical soma depth: {phys_y}")
+
+# Current soma
+print(f"Current soma coordinates: {n.soma_pos[0]}")
+
+# %%
+# We will now offset the neuron such that the soma is at `(0, 589.519, 0)`:
+
+# %%
+offset = [0, phys_y, 0] - n.soma_pos[0]
+offset
+
+# %%
+# Moving or scaling neurons in {{ navis }} is super straight forward: adding, subtracting, dividing or multiplying neurons by a number or an
+# `[x, y, z]` vector will change their coordinates:
+
+# Move the neuron to the new centered position
+n += offset
+
+# Check the that the soma is now in the correct position
+n.soma_pos[0]
+
+# %%
+# That looks good! Let's do it for all neurons:
+
+# %%
+for n in nl:
+ phys_y = (1 - n.cell_soma_normalized_depth) * 922.5861720311
+ offset = [0, phys_y, 0] - n.soma_pos[0]
+ n += offset
+
+# %%
+# Check that all soma positions are correct:
+nl.soma_pos.reshape(-1, 3)
+
+# %% [markdown]
+# ## Part II: Plotting
+#
+# Now that we have loaded and aligned the neurons, let's recreate a plot similar to those in Figure 4A:
+
+def plot_neurons(to_plot, color="purple", axon_color="magenta", offset=500):
+ """Plot all neurons of a given transcriptomic type.
+
+ Parameters
+ ----------
+ neurons : NeuronList
+ The aligned neurons to plot.
+ color : str
+ The color of the dendrites.
+ axon_color : str
+ The color of the axon.
+ offset : int
+ The offset between neurons along the x-axis.
+
+ Returns
+ -------
+ fig, ax
+ The matplotlib figure and axis.
+
+ """
+ # Offset the neurons along the x-axis so that they don't overlap
+ to_plot = [n + [offset * i, 0, 0] for i, n in enumerate(to_plot)]
+
+ # The SWC files for this dataset include a `label` column which
+ # indicates the compartment type:
+ # 1 = soma
+ # 2 = axon
+ # 3 = dendrites
+ # We will use this `label` to color the neurons' compartments.
+
+ # Here we define a color palette for the compartments:
+ compartment_palette = {1: color, 2: axon_color, 3: color}
+
+ # Plot the neuron
+ fig, ax = navis.plot2d(
+ to_plot,
+ radius=False,
+ lw=1.5,
+ soma=dict(
+ fc="black", # soma fill color
+ ec="white", # highlight the soma with a white outline
+ radius=10, # override the default soma radius
+ ),
+ color_by="label", # color by `label` column in node table
+ palette=compartment_palette,
+ figsize=(
+ len(to_plot) * 2,
+ 10,
+ ), # scale the figure size with the number of neurons
+ method="2d",
+ )
+
+ # Add the layer boundaries (top bound for each layer in microns)
+ layer_bounds = {
+ "L1": 0,
+ "L2/3": 115.1112491335,
+ "L4": 333.4658190171,
+ "L5": 453.6227158132,
+ "L6": 687.6482650269,
+ "L6b": 883.1308910545,
+ }
+
+ for layer, y in layer_bounds.items():
+ y = 922.5861720311 - y # flip the y-axis
+ # Add a dashed line
+ ax.axhline(y, color="gray", ls="--", lw=1)
+ # Add the layer name
+ ax.text(-300, y - 25, layer, color="gray", va="center", size=10)
+ # Add the bottom bound
+ ax.axhline(0, color="gray", ls="--", lw=1)
+
+ # Set the axis y limits according to the layers
+ ax.set_ylim(-10, 930)
+
+ # Hide axes
+ ax.axis("off")
+
+ return fig, ax
+
+
+fig, ax = plot_neurons(nl)
+
+# %% [markdown]
+# That looks close enough. The last bit is to add the little KDE plots for the depth-distribution of
+# cable length!
+#
+# We're going to be cheap here and simply generate a histogram over the node positions.
+# To make this representative, we should make sure that the number of nodes per unit of cable
+# is homogeneous across neurons. For that we will resample the neurons:
+print(
+ f"Sampling rate (nodes per micron of cable) before resampling: {nl.sampling_resolution.mean():.2f}"
+)
+
+# Resample to 2 nodes per micron
+resampled = navis.resample_skeleton(
+ nl,
+ resample_to=0.5,
+ map_columns="label", # make sure label column is carried over
+)
+
+print(
+ f"Sampling rate (nodes per micron of cable) after resampling: {resampled.sampling_resolution.mean():.2f}"
+)
+
+# %%
+# Get the combined nodes table:
+nodes = resampled.nodes
+nodes.head()
+
+# %%
+# Now we can plot the distribution of cable lengths for our neurons:
+
+import seaborn as sns
+from mpl_toolkits.axes_grid1 import make_axes_locatable
+
+# Plot the neurons again, re-using the function we defined above
+fig, ax = plot_neurons(nl)
+
+# Add a new axis to the right of the main plot
+divider = make_axes_locatable(ax)
+ax_hist = divider.append_axes("right", size=0.75, pad=0.05)
+
+# Add histograms
+# For axon:
+sns.kdeplot(
+ data=nodes[nodes.label == 2], y="y", ax=ax_hist, color="magenta", linewidth=1.5
+)
+# For the rest:
+sns.kdeplot(
+ data=nodes[nodes.label != 2], y="y", ax=ax_hist, color="purple", linewidth=1.5
+)
+
+# Add soma positions
+soma_pos = nl.soma_pos.reshape(-1, 3)
+ax_hist.scatter([0] * len(soma_pos), soma_pos[:, 1], color="black", s=10, clip_on=False)
+
+# Set same axis limits as the main plot
+ax_hist.set_ylim(-10, 930)
+
+# Hide axes
+ax_hist.set_axis_off()
+
+# %%
+# ## Acknowledgements
+#
+# We thank Staci Sorensen and Casey Schneider-Mizell from the Allen Institute for Brain Science
+# for helping with extra information and data for this tutorial!
\ No newline at end of file
diff --git a/docs/examples/1_plotting/plot_07_plotting_xkcd.py b/docs/examples/1_plotting/tutorial_plotting_07_xkcd.py
similarity index 100%
rename from docs/examples/1_plotting/plot_07_plotting_xkcd.py
rename to docs/examples/1_plotting/tutorial_plotting_07_xkcd.py
diff --git a/docs/examples/2_morpho/README.md b/docs/examples/2_morpho/README.md
index a52743a5..c3e271cf 100644
--- a/docs/examples/2_morpho/README.md
+++ b/docs/examples/2_morpho/README.md
@@ -1,3 +1,3 @@
-### Morphology
+## Morphology
These tutorials will show you how to analyse and manipulate your neurons' morphology:
diff --git a/docs/examples/2_morpho/plot_00_morpho_manipulate.py b/docs/examples/2_morpho/tutorial_morpho_00_manipulate.py
similarity index 99%
rename from docs/examples/2_morpho/plot_00_morpho_manipulate.py
rename to docs/examples/2_morpho/tutorial_morpho_00_manipulate.py
index bd8c73e5..c83420ae 100644
--- a/docs/examples/2_morpho/plot_00_morpho_manipulate.py
+++ b/docs/examples/2_morpho/tutorial_morpho_00_manipulate.py
@@ -351,5 +351,5 @@
# About half the presynapses are in the LH (most of the rest will be in the MB calyx). The large majority of postsynapses are
# outside the LH in the antennal lobe where this neuron has its dendrites.
#
-# That's it for now! Please see the [NBLAST tutorial](../../05_nblast/plot_00_nblast_intro.py) for morphological comparisons using NBLAST and the
+# That's it for now! Please see the [NBLAST tutorial](../../05_nblast/tutorial_nblast_00_intro.py) for morphological comparisons using NBLAST and the
# [API reference](../../../api.md#neuron-morphology) for a full list of morphology-related functions.
diff --git a/docs/examples/2_morpho/plot_01_morpho_analyze.py b/docs/examples/2_morpho/tutorial_morpho_01_analyze.py
similarity index 97%
rename from docs/examples/2_morpho/plot_01_morpho_analyze.py
rename to docs/examples/2_morpho/tutorial_morpho_01_analyze.py
index 1c616d74..f61c904b 100644
--- a/docs/examples/2_morpho/plot_01_morpho_analyze.py
+++ b/docs/examples/2_morpho/tutorial_morpho_01_analyze.py
@@ -121,7 +121,7 @@
# %%
# ## Sholl Analysis
#
-# For an example of a Sholl analyses, check out the [MICrONS tutorial](../4_remote/plot_02_remote_microns).
+# For an example of a Sholl analyses, check out the [MICrONS tutorial](../4_remote/tutorial_remote_02_microns).
#
# ## Geodesic Distances
#
@@ -245,5 +245,5 @@
# %%
# As you can see in the heatmap, the dendrites and the axon nicely separate.
#
-# That's it for now! Please see the [NBLAST tutorial](../../5_nblast/plot_00_nblast_intro) for morphological comparisons using NBLAST and
-# the :[API reference](../../../api.md) for a full list of morphology-related functions.
+# That's it for now! Please see the [NBLAST tutorial](../../5_nblast/tutorial_nblast_00_intro) for morphological comparisons using NBLAST and
+# the [API reference](../../../api.md) for a full list of morphology-related functions.
diff --git a/docs/examples/3_interfaces/README.md b/docs/examples/3_interfaces/README.md
index 309beac0..b4cf75d8 100644
--- a/docs/examples/3_interfaces/README.md
+++ b/docs/examples/3_interfaces/README.md
@@ -1,3 +1,3 @@
-### Interfaces
+## Interfaces
These tutorials cover interfaces between {{ navis }} and external tools:
diff --git a/docs/examples/3_interfaces/plot_00_interfaces_neuron.py b/docs/examples/3_interfaces/tutorial_interfaces_00_neuron.py
similarity index 99%
rename from docs/examples/3_interfaces/plot_00_interfaces_neuron.py
rename to docs/examples/3_interfaces/tutorial_interfaces_00_neuron.py
index d10e4aed..6439df9d 100644
--- a/docs/examples/3_interfaces/plot_00_interfaces_neuron.py
+++ b/docs/examples/3_interfaces/tutorial_interfaces_00_neuron.py
@@ -245,7 +245,7 @@
#
# ## Point Networks
#
-# While you can link together multiple compartment models to simulate networks this quickly becomes prohibitively slow so
+# While you can link together multiple compartment models to simulate networks this quickly becomes prohibitively slow to
# run. For larger networks it can be sufficient to model each neuron as a single "point process".
# [`PointNetwork`][navis.interfaces.neuron.PointNetwork] lets you quickly create such a network from an edge list.
#
diff --git a/docs/examples/3_interfaces/tutorial_interfaces_01_neuron2.py b/docs/examples/3_interfaces/tutorial_interfaces_01_neuron2.py
new file mode 100644
index 00000000..ba24d546
--- /dev/null
+++ b/docs/examples/3_interfaces/tutorial_interfaces_01_neuron2.py
@@ -0,0 +1,167 @@
+"""
+Visualize NEURON model
+======================
+
+In this tutorial you will learn to visualize a compartment neuron model.
+
+We will jump right in, so please make sure to have a look at the [introductory NEURON tutorial](../tutorial_interfaces_00_neuron)
+first.
+
+## Setup the model
+
+The setup will be similar to the previous tutorial: use one of the example neurons to create a compartment model:
+
+"""
+# %%
+import navis
+import neuron
+
+import navis.interfaces.neuron as nrn
+
+# Load one of the example neurons (a Drosophila projection neuron from the hemibrain connectome)
+# Note the conversion to microns!
+n = navis.example_neurons(1).convert_units("um")
+
+# Here we manually corrected the soma
+n.soma = 20
+
+# Reroot to the soma
+n.reroot(n.soma, inplace=True)
+
+# Create the compartment model
+cmp = nrn.CompartmentModel(n, res=10)
+
+# Set the specific axial resistivity for the entire neuron in Ohm cm
+cmp.Ra = 266.1
+
+# Set the specific membran capacitance in mF / cm**2
+cmp.cm = 0.8
+
+# Add passive membran properties for the entire neuron
+cmp.insert(
+ "pas",
+ g=1
+ / 20800, # specific leakage conductance = 1/Rm; Rm = specific membran resistance in Ohm cm**2
+ e=-60, # leakage reverse potential
+)
+
+# Label axon/dendrite
+navis.split_axon_dendrite(n, label_only=True, cellbodyfiber="soma")
+
+# Collect axon nodes
+axon_nodes = n.nodes.loc[n.nodes.compartment.isin(["axon", "linker"]), "node_id"].values
+
+# Get the sections for the given nodes
+axon_secs = list(set(cmp.get_node_section(axon_nodes)))
+
+# Insert HH mechanism at the given sections
+cmp.insert("hh", subset=axon_secs)
+
+# %%
+# Next, we will add a voltage recording _at every single node_ of the neuron.
+
+cmp.add_voltage_record(n.nodes.node_id.values)
+
+
+# %%
+# Last but not least, we will add a synaptic input at some dendritic postsynapses of the neuron.
+
+# Get dendritic postsynapses
+post = n.postsynapses[n.postsynapses.compartment == "dendrite"]
+
+# Add synaptic input to the first 10 postsynapses after 2 ms
+cmp.add_synaptic_current(where=post.node_id.unique()[0:10], start=2, max_syn_cond=0.1, rev_pot=-10)
+
+# %%
+# Now we can run our simulation for 100ms
+
+# This is equivalent to neuron.h.finitialize + neuron.h.continuerun
+cmp.run_simulation(100, v_init=-60)
+
+# %%
+# ## Collect the data
+#
+# To visualize and animate, we will collect the results into a pandas DataFrame
+
+import numpy as np
+import pandas as pd
+
+# Collect the voltage recordings at each node
+records = pd.DataFrame(np.vstack([r.as_numpy() for r in cmp.records['v'].values()]), index=list(cmp.records['v'].keys()))
+
+# Reindex to make sure it matches the node table
+records = records.reindex(n.nodes.node_id)
+
+records.head()
+
+# %%
+#
+# ## Visualize
+#
+# Let's first visualize a single snapshot of the neuron at time `t=5ms`:
+
+# The interval for each step is 0.025ms by default
+print(neuron.h.dt)
+
+# %%
+# Add a new column to the node table for time `t=5ms`
+n.nodes['v'] = records.loc[:, int(5 / 0.025)].values
+
+# Plot
+fig, ax = navis.plot2d(
+ n,
+ method="2d",
+ color_by="v", # color by the voltage column
+ palette="viridis",
+ vmin = -70,
+ vmax = 10,
+ view=('x', '-y')
+)
+
+# Manually add a colorbar
+import matplotlib.pyplot as plt
+from matplotlib.cm import ScalarMappable
+sm = ScalarMappable(norm=plt.Normalize(vmin=-70, vmax=10), cmap='viridis')
+_ = fig.colorbar(sm, ax=ax, fraction=0.075, shrink=0.5, label="V")
+
+# %%
+# ## Animate
+#
+# One option to animate the voltage recordings over time is to use matplotlib's animation functionality.
+# For that we have to do a bit of setup:
+
+# Convert our skeleton to a mesh for nicer visualization
+mesh = navis.conversion.tree2meshneuron(n, warn_missing_radii=False)
+
+# Plot the neuron
+fig, ax = navis.plot2d(mesh, method='2d',color='k', view=('x','-y'))
+
+sm = ScalarMappable(norm=plt.Normalize(vmin=-70, vmax=10), cmap='viridis')
+_ = fig.colorbar(sm, ax=ax, fraction=0.075, shrink=0.5, label="V")
+
+# Add a text in the top right for the timestamp
+t = ax.text(0.02, 0.95, 'ms', ha='left', va='top', transform=ax.transAxes, color='r')
+
+# Get the collection representing our neuron
+c = ax.collections[0]
+c.set_cmap('viridis')
+c.set_norm(plt.Normalize(vmin=-70, vmax=10))
+
+# This function updates the voltages according to the frame
+def animate(i):
+ # We need to map the voltages at individual nodes to faces in the mesh
+ # First nodes to vertices
+ vert_voltage = records[i].values[mesh.vertex_map]
+ # Then vertices to faces
+ face_voltage = vert_voltage[mesh.faces].mean(axis=1)
+ # Set the values
+ c.set_array(face_voltage)
+ # Also update the timestamp
+ t.set_text(f'{i * 0.025:.2f} ms')
+ return (c, t)
+
+import matplotlib.animation as animation
+ani = animation.FuncAnimation(fig, animate, interval=40, blit=True, repeat=True, frames=400)
+
+
+# %%
diff --git a/docs/examples/3_interfaces/plot_01_interfaces_blender.py b/docs/examples/3_interfaces/tutorial_interfaces_02_blender.py
similarity index 100%
rename from docs/examples/3_interfaces/plot_01_interfaces_blender.py
rename to docs/examples/3_interfaces/tutorial_interfaces_02_blender.py
diff --git a/docs/examples/4_remote/README.md b/docs/examples/4_remote/README.md
index 25b55798..106e05fb 100644
--- a/docs/examples/4_remote/README.md
+++ b/docs/examples/4_remote/README.md
@@ -1,3 +1,3 @@
-### Remote Data Sources
+## Remote Data Sources
These tutorials will show you how to load data from remote data sources:
diff --git a/docs/examples/4_remote/plot_00_remote_neuprint.py b/docs/examples/4_remote/tutorial_remote_00_neuprint.py
similarity index 100%
rename from docs/examples/4_remote/plot_00_remote_neuprint.py
rename to docs/examples/4_remote/tutorial_remote_00_neuprint.py
diff --git a/docs/examples/4_remote/plot_01_remote_cloudvolume.py b/docs/examples/4_remote/tutorial_remote_01_cloudvolume.py
similarity index 97%
rename from docs/examples/4_remote/plot_01_remote_cloudvolume.py
rename to docs/examples/4_remote/tutorial_remote_01_cloudvolume.py
index 84297ab2..d2af0ec1 100644
--- a/docs/examples/4_remote/plot_01_remote_cloudvolume.py
+++ b/docs/examples/4_remote/tutorial_remote_01_cloudvolume.py
@@ -15,7 +15,7 @@
will to register and apply for access. Check out [FAFBseg](https://fafbseg-py.readthedocs.io) for a fairly mature interface built on
top of {{ navis }}.
2. [Google's flood-filling segmentation](http://fafb-ffn1.storage.googleapis.com/landing.html) of an entire *Drosophila* brain.
-3. The Allen Institute's [MICrONs datasets](https://www.microns-explorer.org/). We have a separate [tutorial](../plot_02_remote_microns) on this!
+3. The Allen Institute's [MICrONs datasets](https://www.microns-explorer.org/). We have a separate [tutorial](../tutorial_remote_02_microns) on this!
4. The Janelia [hemibrain connectome](https://neuprint.janelia.org).
`CloudVolume` supports the backends/data formats of these and many up-and-coming datasets. You can use it to query the segmentation directly,
diff --git a/docs/examples/4_remote/plot_02_remote_microns.py b/docs/examples/4_remote/tutorial_remote_02_microns.py
similarity index 99%
rename from docs/examples/4_remote/plot_02_remote_microns.py
rename to docs/examples/4_remote/tutorial_remote_02_microns.py
index 474135a9..ddc457f2 100644
--- a/docs/examples/4_remote/plot_02_remote_microns.py
+++ b/docs/examples/4_remote/tutorial_remote_02_microns.py
@@ -182,7 +182,7 @@
# ## Render Videos
#
# Beautiful data like the MICrONS datasets lend themselves to visualizations. For making high quality videos (and renderings)
-# I recommend you check out the tutorial on navis' [Blender interface](../../gallery/3_interfaces/plot_01_interfaces_blender).
+# I recommend you check out the tutorial on navis' [Blender interface](../../../gallery/3_interfaces/tutorial_interfaces_02_blender).
# Here's a little taster:
#
#
diff --git a/docs/examples/4_remote/plot_03_remote_insect_db.py b/docs/examples/4_remote/tutorial_remote_03_insect_db.py
similarity index 100%
rename from docs/examples/4_remote/plot_03_remote_insect_db.py
rename to docs/examples/4_remote/tutorial_remote_03_insect_db.py
diff --git a/docs/examples/5_nblast/README.md b/docs/examples/5_nblast/README.md
index be6d26cc..fdc7790f 100644
--- a/docs/examples/5_nblast/README.md
+++ b/docs/examples/5_nblast/README.md
@@ -1,3 +1,3 @@
-### NBLAST
+## NBLAST
These tutorials will teach you how to run NBLASTs to compare neuron morphology.
diff --git a/docs/examples/5_nblast/plot_00_nblast_intro.py b/docs/examples/5_nblast/tutorial_nblast_00_intro.py
similarity index 99%
rename from docs/examples/5_nblast/plot_00_nblast_intro.py
rename to docs/examples/5_nblast/tutorial_nblast_00_intro.py
index 55eef2ab..984fefd3 100644
--- a/docs/examples/5_nblast/plot_00_nblast_intro.py
+++ b/docs/examples/5_nblast/tutorial_nblast_00_intro.py
@@ -33,7 +33,7 @@
Importantly, these matrices were created using _Drosophila_ neurons from the [FlyCircuit](http://flycircuit.tw/) light-level dataset which
are in microns. Consequently, you should make sure your neurons are also in micrometer units for NBLAST! If you are working on non-insect
neurons you might have to play around with the scaling to improve results. Alternatively, you can also produce your own scoring function
- (see [this tutorial](../plot_03_nblast_smat)).
+ (see [this tutorial](../tutorial_nblast_03_smat)).
3. Produce a per-pair score:
@@ -205,7 +205,7 @@
# Let's try something more elaborate and pull some hemibrain neurons from [neuPrint](https://neuprint.janelia.org/). For this you need to install the
# `neuprint-python` package (`pip3 install neuprint-python`), make a neuPrint account and generate/set an authentication token. Sounds complicated
# but is all pretty painless - see the [neuPrint documentation](https://connectome-neuprint.github.io/neuprint-python/docs/quickstart.html) for details.
-# There is also a separate {{ navis }} tutorial on neuprint [here](../4_remote/plot_00_remote_neuprint).
+# There is also a separate {{ navis }} tutorial on neuprint [here](../4_remote/tutorial_remote_00_neuprint).
#
# Once that's done we can get started by importing the neuPrint interface from {{ navis }}:
diff --git a/docs/examples/5_nblast/plot_03_nblast_smat.py b/docs/examples/5_nblast/tutorial_nblast_03_smat.py
similarity index 100%
rename from docs/examples/5_nblast/plot_03_nblast_smat.py
rename to docs/examples/5_nblast/tutorial_nblast_03_smat.py
diff --git a/docs/examples/5_nblast/zzz_no_plot_01_nblast_flycircuit.py b/docs/examples/5_nblast/zzz_tutorial_nblast_01_flycircuit.py
similarity index 100%
rename from docs/examples/5_nblast/zzz_no_plot_01_nblast_flycircuit.py
rename to docs/examples/5_nblast/zzz_tutorial_nblast_01_flycircuit.py
diff --git a/docs/examples/5_nblast/zzz_no_plot_02_nblast_hemibrain.py b/docs/examples/5_nblast/zzz_tutorial_nblast_02_hemibrain.py
similarity index 100%
rename from docs/examples/5_nblast/zzz_no_plot_02_nblast_hemibrain.py
rename to docs/examples/5_nblast/zzz_tutorial_nblast_02_hemibrain.py
diff --git a/docs/examples/6_misc/README.md b/docs/examples/6_misc/README.md
index 27e6f6aa..31c46e0c 100644
--- a/docs/examples/6_misc/README.md
+++ b/docs/examples/6_misc/README.md
@@ -1 +1 @@
-### Misc
+## Misc
diff --git a/docs/examples/6_misc/plot_00_misc_multiprocess.py b/docs/examples/6_misc/tutorial_misc_00_multiprocess.py
similarity index 62%
rename from docs/examples/6_misc/plot_00_misc_multiprocess.py
rename to docs/examples/6_misc/tutorial_misc_00_multiprocess.py
index 4476a87b..1b552b93 100644
--- a/docs/examples/6_misc/plot_00_misc_multiprocess.py
+++ b/docs/examples/6_misc/tutorial_misc_00_multiprocess.py
@@ -4,12 +4,12 @@
This notebook will show you how to use parallel processing with `navis`.
-By default, most {{ navis }} functions use only a single core (although some third-party functions used under
-the hood might). Distributing expensive computations across multiple cores can speed things up considerable.
+By default, most {{ navis }} functions use only a single thread/process (although some third-party functions
+used under the hood might). Distributing expensive computations across multiple cores can speed things up considerable.
Many {{ navis }} functions natively support parallel processing. This notebook will illustrate various ways
to use parallelism. Before we get start: {{ navis }} uses `pathos` for multiprocessing - if you installed
-{{ navis }} with `pip install navis[all]` you should be all set. If not, you can install it separately:
+{{ navis }} with `pip install navis[all]` you should be all set. If not, you can install `pathos` separately:
```shell
pip install pathos -U
@@ -25,6 +25,7 @@
import navis
def time_func(func, *args, **kwargs):
+ """A function to time the execution of a function."""
start = time.time()
func(*args, **kwargs)
print(f"Execution time: {round(time.time() - start, 2)}s")
@@ -32,6 +33,12 @@ def time_func(func, *args, **kwargs):
# Load example neurons
nl = navis.example_neurons()
+# %%
+# !!! important
+# This documentation is built on Github Actions where the number of cores can be as low as 2. The speedup on
+# your machine should be more pronounced than what you see below. That said: parallel processing has some
+# overhead and for small tasks the overhead can be larger than the speed-up.
+
# %%
# Without parallel processing:
time_func (
@@ -41,7 +48,7 @@ def time_func(func, *args, **kwargs):
)
# %%
-# With parallel processing (by default this will use half the available CPU cores):
+# With parallel processing:
time_func (
navis.resample_skeleton,
nl,
@@ -66,10 +73,19 @@ def time_func(func, *args, **kwargs):
)
# %%
-# !!! important
-# This documentation is built on Github Actions where the number of cores can be as low as 2. The speedup on
-# your machine should be more pronounced than what you see here. That said: parallel processing has some
-# overhead and for small tasks, the overhead can be larger than the speedup.
+# By default `parallel=True` will use half the available CPU cores.
+# You can adjust that behaviour using the `n_cores` parameter:
+
+time_func (
+ nl.resample, 125, parallel=True, n_cores=2
+)
+
+# %%
+# !!! note
+# The name `n_cores` is actually a bit misleading as it determines the number of parallel processes
+# that {{ navis }} will spawn. There is nothing stopping you from setting `n_cores` to a number higher than
+# the number of available CPU cores. However, doing so will likely over-subscribe your CPU and end up
+# slowing things down.
# %%
# ## Parallelizing generic functions
diff --git a/docs/examples/6_misc/plot_01_transforms.py b/docs/examples/6_misc/tutorial_misc_01_transforms.py
similarity index 100%
rename from docs/examples/6_misc/plot_01_transforms.py
rename to docs/examples/6_misc/tutorial_misc_01_transforms.py
diff --git a/docs/examples/plot_00_basic_neurons.py b/docs/examples/tutorial_basic_00_basics.py
similarity index 95%
rename from docs/examples/plot_00_basic_neurons.py
rename to docs/examples/tutorial_basic_00_basics.py
index 3e751ec2..231b01ec 100644
--- a/docs/examples/plot_00_basic_neurons.py
+++ b/docs/examples/tutorial_basic_00_basics.py
@@ -121,14 +121,14 @@
#
# Find out more about the different neuron types in {{ navis }}.
#
-# [:octicons-arrow-right-24: Neuron types tutorial](../plot_01_neurons_intro)
+# [:octicons-arrow-right-24: Neuron types tutorial](../tutorial_basic_01_neurons)
#
# - :fontawesome-solid-list-ul:{ .lg .middle } __Lists of Neurons__
# ---
#
# Check out the guide on lists of neurons.
#
-# [:octicons-arrow-right-24: NeuronLists tutorial](../plot_02_neuronlists_intro)
+# [:octicons-arrow-right-24: NeuronLists tutorial](../tutorial_basic_02_neuronlists)
#
# - :octicons-file-directory-symlink-16:{ .lg .middle } __Neuron I/O__
# ---
diff --git a/docs/examples/plot_01_neurons_intro.py b/docs/examples/tutorial_basic_01_neurons.py
similarity index 82%
rename from docs/examples/plot_01_neurons_intro.py
rename to docs/examples/tutorial_basic_01_neurons.py
index 466f6174..aaca0a01 100644
--- a/docs/examples/plot_01_neurons_intro.py
+++ b/docs/examples/tutorial_basic_01_neurons.py
@@ -115,10 +115,12 @@
# [`VoxelNeurons`][navis.VoxelNeuron] represent neurons as either 3d image or x/y/z voxel coordinates
# typically obtained from e.g. light-level microscopy.
#
-# [`navis.VoxelNeuron`][] consist of either a 3d `(N, M, K)` array (a "grid") or an 2d `(N, 3)`
-# array of voxel coordinates. You will probably find yourself loading these data from image files
-# (e.g. `.nrrd` via [`navis.read_nrrd()`][navis.read_nrrd]). That said we can also "voxelize"
-# other neuron types to produce [`VoxelNeurons`][navis.VoxelNeuron]:
+# ![voxels](../../../_static/voxel.png)
+#
+# [`navis.VoxelNeuron`][] consist of either a dense 3d `(N, M, K)` array (a "grid") or a sparse 2d `(N, 3)`
+# array of voxel coordinates (COO format). You will probably find yourself loading these
+# data from image files (e.g. `.nrrd` via [`navis.read_nrrd()`][navis.read_nrrd]). That said we can
+# also "voxelize" other neuron types to produce [`VoxelNeurons`][navis.VoxelNeuron]:
# Load an example mesh
m = navis.example_neurons(n=1, kind="mesh")
@@ -150,7 +152,7 @@
# explicitly, it will default to some rather cryptic random UUID - you have been warned!
# :wink:
#
-# ## Neuron Meta Data
+# ## Neuron meta data
#
# ### Connectors
#
@@ -223,34 +225,70 @@
# %%
# ### Units
#
-# {{ navis }} supports assigning units to neurons. The neurons shipping with navis, for example, are in 8x8x8nm voxel space:
+# {{ navis }} supports assigning units to neurons. The neurons shipping with {{ navis }}, for example, are in 8x8x8nm voxel space[^1]:
+#
+# [^1]: The example neurons are from the [Janelia hemibrain connectome](https://www.janelia.org/project-team/flyem/hemibrain) project which as imaged at 8x8x8nm resolution.
# %%
m = navis.example_neurons(1, kind="mesh")
-m.units
+print(m.units)
# %%
-# To assign or change the units simply use a descriptive string:
+# To set the neuron's units simply use a descriptive string:
# %%
m.units = "10 micrometers"
-m.units
+print(m.units)
# %%
-# Tracking units is good practive but can also be very useful: some {{ navis }} functions let you pass quantities as unit strings:
+# !!! note
+# Setting the units as we did above does not actually change the neuron's coordinates. It
+# merely sets a property that can be used by other functions to interpret the neuron's
+# coordinate space. See below on how to convert the units of a neuron.
+#
+# Tracking units is good practice in general but is also very useful in a variety of scenarios:
+#
+# First, certain {{ navis }} functions let you pass quantities as unit strings:
-# Load example neuron in 8x8x8nm
+# Load example neuron which is in 8x8x8nm space
n = navis.example_neurons(1, kind="skeleton")
# Resample to 1 micrometer
rs = navis.resample_skeleton(n, resample_to="1 um")
# %%
-# To change the units on a neuron, you have two options:
+# Second, {{ navis }} optionally uses the neuron's units to make certain properties more
+# interpretable. By default, properties like cable length or volume are returned in the
+# neuron's units, i.e. in 8x8x8nm voxel space in our case:
+
+print(n.cable_length)
+
+# %%
+# You can tell {{ navis}} to use the neuron's `.units` to make these properties more readable:
+
+navis.config.add_units = True
+print(n.cable_length)
+navis.config.add_units = False # reset to default
+
+# %%
+# !!! note
+# Note that `n.cable_length` is now a `pint.Quantity` object. This may make certain operations
+# a bit more cumbersome which is why this feature is optional. You can to a float by calling
+# `.magnitude`:
+#
+# ```python
+# n.cable_length.magnitude
+# ```
+
+# %%
+# Check out Pint's [documentation](https://pint.readthedocs.io/en/stable/) to learn more.
+#
+# To actually convert the neuron's coordinate space, you have two options:
#
# === "Multiply/Divide"
#
-# You can multiply or divide any neuron (or ``NeuronList``) by a number to change the units:
+# You can multiply or divide any neuron or [`NeuronList`][navis.NeuronList] by a number
+# to change the units:
#
# ```python
# # Example neuron are in 8x8x8nm voxel space
@@ -261,6 +299,14 @@
# n_um = n_nm / 1000
# ```
#
+# For non-isometric conversions you can pass a vector of scaling factors:
+# ```python
+# neuron * [4, 4, 40]
+# ```
+# Note that for `TreeNeurons`, this is expected to be scaling factors for
+# `(x, y, z, radius)`.
+#
+#
# === "Convert units"
#
# If your neuron has known units, you can let {{ navis }} do the conversion for you:
@@ -270,12 +316,26 @@
# # Convert to micrometers
# n_um = n.convert_units("micrometers")
# ```
-
-
-# %%
-# ## Operating on Neurons
#
-# Above we've already seen examples of passing neurons to functions - for example [`navis.plot2d(n)`](navis.plot2d).
+# !!! experiment "Addition & Subtraction"
+# Multiplication and division will scale the neuro as you've seen above.
+# Similarly, adding or subtracting to/from neurons will offset the neuron's coordinates:
+# ```python
+# n = navis.example_neurons(1)
+#
+# # Convert to microns
+# n_um = n.convert_units("micrometers")
+#
+# # Add 100 micrometers along all axes to the neuron
+# n_offset = n + 100
+#
+# # Subtract 100 micrometers along just one axis
+# n_offset = n - [0, 0, 100]#
+# ```
+#
+# ## Operating on neurons
+#
+# Above we've already seen examples of passing neurons to functions - for example [`navis.plot2d(n)`][navis.plot2d].
#
# For some {{ navis }} functions, neurons offer have shortcut "methods":
@@ -293,7 +353,7 @@
# sk.plot3d(color='red') # plot the neuron in 3d
# ```
#
-# === "Using navis functions"
+# === "Using NAVis functions"
# ```python
# import navis
# sk = navis.example_neurons(1, kind='skeleton')
@@ -310,7 +370,7 @@
#
# In some cases the shorthand methods might offer only a subset of the full function's functionality.
#
-# #### The `inplace` parameter
+# ### The `inplace` parameter
#
# The `inplace` parameter is part of many {{ navis }} functions and works like e.g. in the `pandas` library:
#
@@ -332,9 +392,9 @@
print(f"{n.n_nodes} nodes before and {n_lh.n_nodes} nodes after pruning")
# %%
-# ## All Neurons are Equal...
+# ## All neurons are equal...
#
-# ... but some are more equal than others. :wink:
+# ... but some are more equal than others.
#
# In Python the `==` operator compares two objects:
@@ -405,9 +465,9 @@
# %%
# Here, the changes to the node table automatically triggered a regeneration of the graph. This works
-# because {{ navis }} generates and checks hash values for neurons to detect changes and because here
-# the node table is the master. It would not work the other way around (i.e. changing the graph to
-# change the node table).
+# because {{ navis }} checks hash values of neurons and in this instance it detected that the node
+# node table - which represents the core data for [`TreeNeurons`][navis.TreeNeuron] - had changed.
+# It would not work the other way around: changing the graph does not trigger changes in the node table.
#
# Again: as long as you are using built-in functions, you don't have to worry about this. If you do
# run some custom manipulation of neurons be aware that you might want to make sure that the data
@@ -533,7 +593,7 @@
#
# Check out the guide on lists of neurons.
#
-# [:octicons-arrow-right-24: NeuronLists tutorial](../plot_02_neuronlists_intro)
+# [:octicons-arrow-right-24: NeuronLists tutorial](../tutorial_basic_02_neuronlists)
#
# - :octicons-file-directory-symlink-16:{ .lg .middle } __Neuron I/O__
#
diff --git a/docs/examples/plot_02_neuronlists_intro.py b/docs/examples/tutorial_basic_02_neuronlists.py
similarity index 99%
rename from docs/examples/plot_02_neuronlists_intro.py
rename to docs/examples/tutorial_basic_02_neuronlists.py
index 440a16ba..43497e7b 100644
--- a/docs/examples/plot_02_neuronlists_intro.py
+++ b/docs/examples/tutorial_basic_02_neuronlists.py
@@ -5,7 +5,7 @@
This tutorial will show you how to use NeuronLists to efficiently work with many neurons at a time.
!!! note
- If you haven't please check out the [neuron types tutorial](../plot_01_neurons_intro) first.
+ If you haven't please check out the [neuron types tutorial](../tutorial_basic_01_neurons) first.
{{ navis }} will typically collect multiple neurons into a [`navis.NeuronList`][] as container.
This container behaves like a mix of lists, numpy arrays and pandas dataframes, and allows you
diff --git a/docs/index.md b/docs/index.md
index 84730788..3f103040 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -28,7 +28,7 @@ morphology. It stands on the shoulders of the excellent
---
- Support for all kinds of [neuron types](generated/gallery/plot_01_neurons_intro): skeletons, meshes, dotprops and images.
+ Support for all kinds of [neuron types](generated/gallery/tutorial_basic_01_neurons): skeletons, meshes, dotprops and images.
- :material-eye:{ .lg .middle } __Exploration__
@@ -42,13 +42,13 @@ morphology. It stands on the shoulders of the excellent
---
Calculate Strahler indices, cable length, volume, tortuosity, NBLAST
- and many other [morphometrics](generated/gallery/2_morpho/plot_01_morpho_analyze).
+ and many other [morphometrics](generated/gallery/2_morpho/tutorial_morpho_01_analyze).
- :fontawesome-solid-brush:{ .lg .middle } __Visualization__
---
- Generate beautiful publication-ready 2D (matplotlib) and 3D (octarine,
+ Generate beautiful, publication-ready 2D (matplotlib) and 3D (octarine,
vispy or plotly) [figures](generated/gallery/#plotting).
- :material-progress-wrench:{ .lg .middle } __Processing__
@@ -61,21 +61,21 @@ morphology. It stands on the shoulders of the excellent
---
- Uses compiled Rust code under-the-hood. Also scale thanks to
- out-of-the-box support for [multiprocessing](generated/gallery/6_misc/plot_00_misc_multiprocess).
+ Uses compiled Rust code under-the-hood and
+ out-of-the-box support for [multiprocessing](generated/gallery/6_misc/tutorial_misc_00_multiprocess).
- :material-lightbulb-group:{ .lg .middle } __Clustering__
---
- Cluster your neurons by e.g. morphology using [NBLAST](generated/gallery/5_nblast/plot_00_nblast_intro).
+ Cluster your neurons by e.g. morphology using [NBLAST](generated/gallery/5_nblast/tutorial_nblast_00_intro).
- :material-move-resize:{ .lg .middle } __Transforms__
---
- Fully featured [transform system](generated/gallery/5_transforms/plot_00_transforms) to move neurons between brain spaces.
- We support e.g. CMTK or Elastix.
+ Fully featured [transform system](generated/gallery/6_misc/tutorial_misc_01_transforms) to move neurons between brain spaces.
+ We support CMTK, Elastix, landmark-based transforms and more!
- :octicons-file-directory-symlink-24:{ .lg .middle } __Import/Export__
@@ -89,16 +89,16 @@ morphology. It stands on the shoulders of the excellent
---
Load neurons straight from Allen's
- [MICrONS](generated/gallery/4_remote/plot_02_remote_microns) datasets,
- [neuromorpho](http://neuromorpho.org), [neuPrint](generated/gallery/4_remote/plot_00_remote_neuprint)
+ [MICrONS](generated/gallery/4_remote/tutorial_remote_02_microns) datasets,
+ [neuromorpho](http://neuromorpho.org), [neuPrint](generated/gallery/4_remote/tutorial_remote_00_neuprint)
or any NeuroGlancer source.
- :material-connection:{ .lg .middle } __Interfaces__
---
- Load neurons into [Blender 3D](generated/gallery/3_interfaces/plot_01_interfaces_blender), simulate neurons and networks using
- [NEURON](generated/gallery/3_interfaces/plot_00_interfaces_neuron), or use the R natverse library via `rpy2`.
+ Load neurons into [Blender 3D](generated/gallery/3_interfaces/tutorial_interfaces_02_blender), simulate neurons and networks using
+ [NEURON](generated/gallery/3_interfaces/tutorial_interfaces_00_neuron), or use the R natverse library via `rpy2`.
- :material-google-circles-extended:{ .lg .middle } __Extensible__
diff --git a/docs/installation.md b/docs/installation.md
index 7ebdd35a..96b42ec4 100644
--- a/docs/installation.md
+++ b/docs/installation.md
@@ -54,6 +54,13 @@ Open up a terminal and install {{ navis }} with:
pip install git+https://github.com/navis-org/navis@master
```
+ To install the latest dev with extras:
+
+ ``` sh
+ pip install "navis[all] @ git+https://github.com/navis-org/navis@master"
+ ```
+
+
!!! note
MacOS (both Intel and the new ARM chips) and Linux should work off the bat without any problems.
diff --git a/docs/make.bat b/docs/make.bat
deleted file mode 100644
index a559ffdf..00000000
--- a/docs/make.bat
+++ /dev/null
@@ -1,281 +0,0 @@
-@ECHO OFF
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
-)
-set BUILDDIR=_build
-set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
-set I18NSPHINXOPTS=%SPHINXOPTS% .
-if NOT "%PAPER%" == "" (
- set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
- set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
-)
-
-if "%1" == "" goto help
-
-if "%1" == "help" (
- :help
- echo.Please use `make ^` where ^ is one of
- echo. html to make standalone HTML files
- echo. dirhtml to make HTML files named index.html in directories
- echo. singlehtml to make a single large HTML file
- echo. pickle to make pickle files
- echo. json to make JSON files
- echo. htmlhelp to make HTML files and a HTML help project
- echo. qthelp to make HTML files and a qthelp project
- echo. devhelp to make HTML files and a Devhelp project
- echo. epub to make an epub
- echo. epub3 to make an epub3
- echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
- echo. text to make text files
- echo. man to make manual pages
- echo. texinfo to make Texinfo files
- echo. gettext to make PO message catalogs
- echo. changes to make an overview over all changed/added/deprecated items
- echo. xml to make Docutils-native XML files
- echo. pseudoxml to make pseudoxml-XML files for display purposes
- echo. linkcheck to check all external links for integrity
- echo. doctest to run all doctests embedded in the documentation if enabled
- echo. coverage to run coverage check of the documentation if enabled
- echo. dummy to check syntax errors of document sources
- goto end
-)
-
-if "%1" == "clean" (
- for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
- del /q /s %BUILDDIR%\*
- goto end
-)
-
-
-REM Check if sphinx-build is available and fallback to Python version if any
-%SPHINXBUILD% 1>NUL 2>NUL
-if errorlevel 9009 goto sphinx_python
-goto sphinx_ok
-
-:sphinx_python
-
-set SPHINXBUILD=python -m sphinx.__init__
-%SPHINXBUILD% 2> nul
-if errorlevel 9009 (
- echo.
- echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
- echo.installed, then set the SPHINXBUILD environment variable to point
- echo.to the full path of the 'sphinx-build' executable. Alternatively you
- echo.may add the Sphinx directory to PATH.
- echo.
- echo.If you don't have Sphinx installed, grab it from
- echo.http://sphinx-doc.org/
- exit /b 1
-)
-
-:sphinx_ok
-
-
-if "%1" == "html" (
- %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/html.
- goto end
-)
-
-if "%1" == "dirhtml" (
- %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
- goto end
-)
-
-if "%1" == "singlehtml" (
- %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
- goto end
-)
-
-if "%1" == "pickle" (
- %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can process the pickle files.
- goto end
-)
-
-if "%1" == "json" (
- %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can process the JSON files.
- goto end
-)
-
-if "%1" == "htmlhelp" (
- %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can run HTML Help Workshop with the ^
-.hhp project file in %BUILDDIR%/htmlhelp.
- goto end
-)
-
-if "%1" == "qthelp" (
- %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can run "qcollectiongenerator" with the ^
-.qhcp project file in %BUILDDIR%/qthelp, like this:
- echo.^> qcollectiongenerator %BUILDDIR%\qthelp\navis.qhcp
- echo.To view the help file:
- echo.^> assistant -collectionFile %BUILDDIR%\qthelp\navis.ghc
- goto end
-)
-
-if "%1" == "devhelp" (
- %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished.
- goto end
-)
-
-if "%1" == "epub" (
- %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The epub file is in %BUILDDIR%/epub.
- goto end
-)
-
-if "%1" == "epub3" (
- %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
- goto end
-)
-
-if "%1" == "latex" (
- %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
- goto end
-)
-
-if "%1" == "latexpdf" (
- %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
- cd %BUILDDIR%/latex
- make all-pdf
- cd %~dp0
- echo.
- echo.Build finished; the PDF files are in %BUILDDIR%/latex.
- goto end
-)
-
-if "%1" == "latexpdfja" (
- %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
- cd %BUILDDIR%/latex
- make all-pdf-ja
- cd %~dp0
- echo.
- echo.Build finished; the PDF files are in %BUILDDIR%/latex.
- goto end
-)
-
-if "%1" == "text" (
- %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The text files are in %BUILDDIR%/text.
- goto end
-)
-
-if "%1" == "man" (
- %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The manual pages are in %BUILDDIR%/man.
- goto end
-)
-
-if "%1" == "texinfo" (
- %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
- goto end
-)
-
-if "%1" == "gettext" (
- %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
- goto end
-)
-
-if "%1" == "changes" (
- %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
- if errorlevel 1 exit /b 1
- echo.
- echo.The overview file is in %BUILDDIR%/changes.
- goto end
-)
-
-if "%1" == "linkcheck" (
- %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
- if errorlevel 1 exit /b 1
- echo.
- echo.Link check complete; look for any errors in the above output ^
-or in %BUILDDIR%/linkcheck/output.txt.
- goto end
-)
-
-if "%1" == "doctest" (
- %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
- if errorlevel 1 exit /b 1
- echo.
- echo.Testing of doctests in the sources finished, look at the ^
-results in %BUILDDIR%/doctest/output.txt.
- goto end
-)
-
-if "%1" == "coverage" (
- %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
- if errorlevel 1 exit /b 1
- echo.
- echo.Testing of coverage in the sources finished, look at the ^
-results in %BUILDDIR%/coverage/python.txt.
- goto end
-)
-
-if "%1" == "xml" (
- %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The XML files are in %BUILDDIR%/xml.
- goto end
-)
-
-if "%1" == "pseudoxml" (
- %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
- goto end
-)
-
-if "%1" == "dummy" (
- %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. Dummy builder generates no files.
- goto end
-)
-
-:end
diff --git a/docs/quickstart.md b/docs/quickstart.md
index 6e504b0d..93950dea 100644
--- a/docs/quickstart.md
+++ b/docs/quickstart.md
@@ -34,7 +34,7 @@ print(n) # markdown-exec: hide
[I/O Tutorials](../generated/gallery#import-export) to learn more!
{{ navis }} represents neurons as [`navis.TreeNeuron`][], [`navis.MeshNeuron`][], [`navis.VoxelNeuron`][] or
-[`navis.Dotprops`][] - see the tutorial on [Neuron Types](../generated/gallery/plot_01_neurons_intro/)
+[`navis.Dotprops`][] - see the tutorial on [Neuron Types](../generated/gallery/tutorial_basic_01_neurons/)
for details.
In above code we asked for a skeleton, so the neuron returned is a [`TreeNeuron`][navis.TreeNeuron].
@@ -113,7 +113,7 @@ print(buffer.getvalue()) # markdown-exec: hide
plt.close() # markdown-exec: hide
```
-See the [Lists of Neurons](../generated/gallery/plot_02_neuronlists_intro/)
+See the [Lists of Neurons](../generated/gallery/tutorial_basic_02_neuronlists/)
tutorial for more information.
## Methods vs Functions
@@ -189,14 +189,14 @@ Note that most functions have helpful `Examples`!
Find out more about the different neuron types in {{ navis }}.
- [:octicons-arrow-right-24: Neuron types tutorial](../generated/gallery/plot_01_neurons_intro)
+ [:octicons-arrow-right-24: Neuron types tutorial](../generated/gallery/tutorial_basic_01_neurons)
- :material-cube:{ .lg .middle } __Lists of Neurons__
---
Check out the guide on lists of neurons.
- [:octicons-arrow-right-24: NeuronLists tutorial](../generated/gallery/plot_02_neuronlists_intro)
+ [:octicons-arrow-right-24: NeuronLists tutorial](../generated/gallery/tutorial_basic_02_neuronlists)
- :octicons-file-directory-symlink-16:{ .lg .middle } __Neuron I/O__
diff --git a/mkdocs.yml b/mkdocs.yml
index f2916622..d2422fde 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -81,6 +81,7 @@ markdown_extensions:
- pymdownx.details
- pymdownx.arithmatex:
generic: true
+ - footnotes
plugins:
# the privacy plugin will bundle external assets (like Google Fonts)
@@ -134,12 +135,14 @@ plugins:
- "^__init__$"
- glightbox
- gallery:
+ filename_pattern: "/tutorial_"
examples_dirs: docs/examples # path to your example scripts
gallery_dirs: docs/generated/gallery # where to save generated gallery
conf_script: docs/gallery_conf.py # configuration script
within_subsection_order: FileNameSortKey # sort examples by filename
download_all_examples: false
remove_config_comments: true
+ matplotlib_animations: true
# only_warn_on_example_error: true
- markdown-exec
diff --git a/navis/__version__.py b/navis/__version__.py
index 7ac4b99f..1f9dfb7f 100644
--- a/navis/__version__.py
+++ b/navis/__version__.py
@@ -11,5 +11,5 @@
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-__version__ = "1.7.0"
-__version_vector__ = (1, 7, 0)
+__version__ = "1.8.0"
+__version_vector__ = (1, 8, 0)
diff --git a/navis/config.py b/navis/config.py
index 643fc296..9aec3b17 100644
--- a/navis/config.py
+++ b/navis/config.py
@@ -79,6 +79,9 @@ def get_logger(name: str):
# Unit registry
ureg = pint.UnitRegistry()
+# Whether to add units to certain spatial neuron properties
+add_units = False
+
# Set to true to prevent Viewer from ever showing
headless = os.environ.get('NAVIS_HEADLESS', 'False').lower() == 'true'
if headless:
@@ -108,7 +111,7 @@ def get_logger(name: str):
def _type_of_script():
"""Returns context in which navis is run. """
try:
- ipy_str = str(type(get_ipython()))
+ ipy_str = str(type(get_ipython())) #noqa
if 'zmqshell' in ipy_str:
return 'jupyter'
if 'terminal' in ipy_str:
diff --git a/navis/conftest.py b/navis/conftest.py
index 62326a8f..aca9994b 100644
--- a/navis/conftest.py
+++ b/navis/conftest.py
@@ -10,10 +10,8 @@
try:
import pytest
-
decorator = pytest.fixture(autouse=True)
-
-except ImportError:
+except ModuleNotFoundError:
def decorator(fn):
return fn
diff --git a/navis/conversion/converters.py b/navis/conversion/converters.py
index e0cb8f61..8f95ec7f 100644
--- a/navis/conversion/converters.py
+++ b/navis/conversion/converters.py
@@ -520,7 +520,8 @@ def neuron2voxels(x: 'core.BaseNeuron',
def tree2meshneuron(x: 'core.TreeNeuron',
tube_points: int = 8,
radius_scale_factor: float = 1,
- use_normals: bool = True
+ use_normals: bool = True,
+ warn_missing_radii: bool = True
) -> 'core.MeshNeuron':
"""Convert TreeNeuron to MeshNeuron.
@@ -538,6 +539,8 @@ def tree2meshneuron(x: 'core.TreeNeuron',
Factor to scale radii by.
use_normals : bool
If True will rotate tube along its curvature.
+ warn_missing_radii : bool
+ Whether to warn if radii are missing or <= 0.
Returns
-------
@@ -566,9 +569,8 @@ def tree2meshneuron(x: 'core.TreeNeuron',
# Note that we are treating missing radii as "0"
radii_map = x.nodes.radius.fillna(0).values
- if (radii_map <= 0).any():
- logger.warning('At least some radii are missing or <= 0. Mesh will '
- 'look funny.')
+ if warn_missing_radii and (radii_map <= 0).any():
+ logger.warning('At least some radii are missing or <= 0. Mesh may look funny.')
# Map radii onto segments
radii = [radii_map[seg] * radius_scale_factor for seg in segments]
diff --git a/navis/conversion/meshing.py b/navis/conversion/meshing.py
index 83acfbdf..0b1a75a5 100644
--- a/navis/conversion/meshing.py
+++ b/navis/conversion/meshing.py
@@ -22,13 +22,13 @@
try:
from fastremap import unique
-except ImportError:
+except ModuleNotFoundError:
from numpy import unique
try:
import skimage
from skimage import measure
-except ImportError:
+except ModuleNotFoundError:
skimage = None
logger = config.get_logger(__name__)
@@ -86,8 +86,10 @@ def voxels2mesh(vox: Union['core.VoxelNeuron', np.ndarray],
"""
if not skimage:
- raise ImportError('Meshing requires `skimage`:\n '
- 'pip3 install scikit-image')
+ raise ModuleNotFoundError(
+ 'Meshing requires `skimage`:\n '
+ 'pip3 install scikit-image'
+ )
utils.eval_param(vox, 'vox', allowed_types=(core.VoxelNeuron, np.ndarray))
diff --git a/navis/core/base.py b/navis/core/base.py
index 3ee3b415..73bf2bc4 100644
--- a/navis/core/base.py
+++ b/navis/core/base.py
@@ -31,10 +31,10 @@
try:
import xxhash
-except ImportError:
+except ModuleNotFoundError:
xxhash = None
-__all__ = ['Neuron']
+__all__ = ["Neuron"]
# Set up logging
logger = config.get_logger(__name__)
@@ -45,8 +45,9 @@
pint.Quantity([])
-def Neuron(x: Union[nx.DiGraph, str, pd.DataFrame, 'TreeNeuron', 'MeshNeuron'],
- **metadata):
+def Neuron(
+ x: Union[nx.DiGraph, str, pd.DataFrame, "TreeNeuron", "MeshNeuron"], **metadata
+):
"""Constructor for Neuron objects. Depending on the input, either a
`TreeNeuron` or a `MeshNeuron` is returned.
@@ -183,10 +184,10 @@ class BaseNeuron(UnitObject):
connectors: Optional[pd.DataFrame]
#: Attributes used for neuron summary
- SUMMARY_PROPS = ['type', 'name', 'units']
+ SUMMARY_PROPS = ["type", "name", "units"]
#: Attributes to be used when comparing two neurons.
- EQ_ATTRIBUTES = ['name']
+ EQ_ATTRIBUTES = ["name"]
#: Temporary attributes that need clearing when neuron data changes
TEMP_ATTR = ["_memory_usage"]
@@ -212,8 +213,8 @@ def __init__(self, **kwargs):
def __getattr__(self, key):
"""Get attribute."""
- if key.startswith('has_'):
- key = key[key.index('_') + 1:]
+ if key.startswith("has_"):
+ key = key[key.index("_") + 1 :]
if hasattr(self, key):
data = getattr(self, key)
if isinstance(data, pd.DataFrame):
@@ -223,7 +224,7 @@ def __getattr__(self, key):
return False
# This is necessary because np.any does not like strings
elif isinstance(data, str):
- if data == 'NA' or not data:
+ if data == "NA" or not data:
return False
return True
elif utils.is_iterable(data) and len(data) > 0:
@@ -231,16 +232,16 @@ def __getattr__(self, key):
elif data:
return True
return False
- elif key.startswith('n_'):
- key = key[key.index('_') + 1:]
+ elif key.startswith("n_"):
+ key = key[key.index("_") + 1 :]
if hasattr(self, key):
data = getattr(self, key, None)
if isinstance(data, pd.DataFrame):
return data.shape[0]
elif utils.is_iterable(data):
return len(data)
- elif isinstance(data, str) and data == 'NA':
- return 'NA'
+ elif isinstance(data, str) and data == "NA":
+ return "NA"
return None
raise AttributeError(f'Attribute "{key}" not found')
@@ -284,8 +285,7 @@ def __add__(self, other):
"""Implement addition."""
if isinstance(other, BaseNeuron):
return core.NeuronList([self, other])
- else:
- return NotImplemented
+ return NotImplemented
def __imul__(self, other):
"""Multiplication with assignment (*=)."""
@@ -295,28 +295,37 @@ def __itruediv__(self, other):
"""Division with assignment (/=)."""
return self.__truediv__(other, copy=False)
+ def __iadd__(self, other):
+ """Addition with assignment (+=)."""
+ return self.__add__(other, copy=False)
+
+ def __isub__(self, other):
+ """Subtraction with assignment (-=)."""
+ return self.__sub__(other, copy=False)
+
def _repr_html_(self):
frame = self.summary().to_frame()
- frame.columns = ['']
+ frame.columns = [""]
# return self._gen_svg_thumbnail() + frame._repr_html_()
return frame._repr_html_()
def _gen_svg_thumbnail(self):
"""Generate 2D plot for thumbnail."""
import matplotlib.pyplot as plt
+
# Store some previous states
prev_level = logger.getEffectiveLevel()
prev_pbar = config.pbar_hide
prev_int = plt.isinteractive()
plt.ioff() # turn off interactive mode
- logger.setLevel('WARNING')
+ logger.setLevel("WARNING")
config.pbar_hide = True
fig = plt.figure(figsize=(2, 2))
ax = fig.add_subplot(111)
fig, ax = self.plot2d(connectors=False, ax=ax)
output = StringIO()
- fig.savefig(output, format='svg')
+ fig.savefig(output, format="svg")
if prev_int:
plt.ion() # turn on interactive mode
@@ -339,9 +348,11 @@ def _clear_temp_attr(self, exclude: list = []) -> None:
for a in [at for at in self.TEMP_ATTR if at not in exclude]:
try:
delattr(self, a)
- logger.debug(f'Neuron {self.id} {hex(id(self))}: attribute {a} cleared')
+ logger.debug(f"Neuron {self.id} {hex(id(self))}: attribute {a} cleared")
except AttributeError:
- logger.debug(f'Neuron {self.id} at {hex(id(self))}: Unable to clear temporary attribute "{a}"')
+ logger.debug(
+ f'Neuron {self.id} at {hex(id(self))}: Unable to clear temporary attribute "{a}"'
+ )
except BaseException:
raise
@@ -358,8 +369,10 @@ def _register_attr(self, name, value, summary=True, temporary=False):
if isinstance(value, (numbers.Number, str, bool, np.bool_, type(None))):
self.SUMMARY_PROPS.append(name)
else:
- logger.error(f'Attribute "{name}" of type "{type(value)}" '
- 'can not be added to summary')
+ logger.error(
+ f'Attribute "{name}" of type "{type(value)}" '
+ "can not be added to summary"
+ )
if temporary:
self.TEMP_ATTR.append(name)
@@ -386,14 +399,14 @@ def core_md5(self) -> str:
MD5 checksum of core data. `None` if no core data.
"""
- hash = ''
+ hash = ""
for prop in self.CORE_DATA:
cols = None
# See if we need to parse props into property and columns
# e.g. "nodes:node_id,parent_id,x,y,z"
- if ':' in prop:
- prop, cols = prop.split(':')
- cols = cols.split(',')
+ if ":" in prop:
+ prop, cols = prop.split(":")
+ cols = cols.split(",")
if hasattr(self, prop):
data = getattr(self, prop)
@@ -419,9 +432,11 @@ def datatables(self) -> List[str]:
@property
def extents(self) -> np.ndarray:
"""Extents of neuron in x/y/z direction (includes connectors)."""
- if not hasattr(self, 'bbox'):
- raise ValueError('Neuron must implement `.bbox` (bounding box) '
- 'property to calculate extents.')
+ if not hasattr(self, "bbox"):
+ raise ValueError(
+ "Neuron must implement `.bbox` (bounding box) "
+ "property to calculate extents."
+ )
bbox = self.bbox
return bbox[:, 1] - bbox[:, 0]
@@ -432,26 +447,26 @@ def id(self) -> Any:
Must be hashable. If not set, will assign a random unique identifier.
Can be indexed by using the `NeuronList.idx[]` locator.
"""
- return getattr(self, '_id', None)
+ return getattr(self, "_id", None)
@id.setter
def id(self, value):
try:
hash(value)
except BaseException:
- raise ValueError('id must be hashable')
+ raise ValueError("id must be hashable")
self._id = value
@property
def label(self) -> str:
"""Label (e.g. for legends)."""
# If explicitly set return that label
- if getattr(self, '_label', None):
+ if getattr(self, "_label", None):
return self._label
# If no label set, produce one from name + id (optional)
- name = getattr(self, 'name', None)
- id = getattr(self, 'id', None)
+ name = getattr(self, "name", None)
+ id = getattr(self, "id", None)
# If no name, use type
if not name:
@@ -465,11 +480,11 @@ def label(self) -> str:
try:
id = str(id)
except BaseException:
- id = ''
+ id = ""
# Only use ID if it is not the same as name
if id and name != id:
- label += f' ({id})'
+ label += f" ({id})"
return label
@@ -482,7 +497,7 @@ def label(self, value: str):
@property
def name(self) -> str:
"""Neuron name."""
- return getattr(self, '_name', None)
+ return getattr(self, "_name", None)
@name.setter
def name(self, value: str):
@@ -498,10 +513,9 @@ def connectors(self, v):
if isinstance(v, type(None)):
self._connectors = None
else:
- self._connectors = utils.validate_table(v,
- required=['x', 'y', 'z'],
- rename=True,
- restrict=False)
+ self._connectors = utils.validate_table(
+ v, required=["x", "y", "z"], rename=True, restrict=False
+ )
@property
def presynapses(self):
@@ -510,19 +524,19 @@ def presynapses(self):
Requires a "type" column in connector table. Will look for type labels
that include "pre" or that equal 0 or "0".
"""
- if not isinstance(getattr(self, 'connectors', None), pd.DataFrame):
- raise ValueError('No connector table found.')
+ if not isinstance(getattr(self, "connectors", None), pd.DataFrame):
+ raise ValueError("No connector table found.")
# Make an educated guess what presynapses are
types = self.connectors["type"].unique()
pre = [t for t in types if "pre" in str(t).lower() or t in [0, "0"]]
if len(pre) == 0:
- logger.debug(f'Unable to find presynapses in types: {types}')
+ logger.debug(f"Unable to find presynapses in types: {types}")
return self.connectors.iloc[0:0] # return empty DataFrame
elif len(pre) > 1:
- raise ValueError(f'Found ambigous presynapse labels: {pre}')
+ raise ValueError(f"Found ambigous presynapse labels: {pre}")
- return self.connectors[self.connectors['type'] == pre[0]]
+ return self.connectors[self.connectors["type"] == pre[0]]
@property
def postsynapses(self):
@@ -531,27 +545,25 @@ def postsynapses(self):
Requires a "type" column in connector table. Will look for type labels
that include "post" or that equal 1 or "1".
"""
- if not isinstance(getattr(self, 'connectors', None), pd.DataFrame):
- raise ValueError('No connector table found.')
+ if not isinstance(getattr(self, "connectors", None), pd.DataFrame):
+ raise ValueError("No connector table found.")
# Make an educated guess what presynapses are
types = self.connectors["type"].unique()
post = [t for t in types if "post" in str(t).lower() or t in [1, "1"]]
if len(post) == 0:
- logger.debug(f'Unable to find postsynapses in types: {types}')
+ logger.debug(f"Unable to find postsynapses in types: {types}")
return self.connectors.iloc[0:0] # return empty DataFrame
elif len(post) > 1:
- raise ValueError(f'Found ambigous postsynapse labels: {post}')
-
- return self.connectors[self.connectors['type'] == post[0]]
-
+ raise ValueError(f"Found ambigous postsynapse labels: {post}")
+ return self.connectors[self.connectors["type"] == post[0]]
@property
def is_stale(self) -> bool:
"""Test if temporary attributes might be outdated."""
# If we know we are stale, just return True
- if getattr(self, '_stale', False):
+ if getattr(self, "_stale", False):
return True
else:
# Only check if we believe we are not stale
@@ -561,7 +573,7 @@ def is_stale(self) -> bool:
@property
def is_locked(self):
"""Test if neuron is locked."""
- return getattr(self, '_lock', 0) > 0
+ return getattr(self, "_lock", 0) > 0
@property
def type(self) -> str:
@@ -578,9 +590,9 @@ def bbox(self) -> np.ndarray:
"""Bounding box of neuron."""
raise NotImplementedError(f"Bounding box not implemented for {type(self)}.")
- def convert_units(self,
- to: Union[pint.Unit, str],
- inplace: bool = False) -> Optional['BaseNeuron']:
+ def convert_units(
+ self, to: Union[pint.Unit, str], inplace: bool = False
+ ) -> Optional["BaseNeuron"]:
"""Convert coordinates to different unit.
Only works if neuron's `.units` is not dimensionless.
@@ -622,19 +634,21 @@ def convert_units(self,
# Multiply by conversion factor
n *= conv
- n._clear_temp_attr(exclude=['classify_nodes'])
+ n._clear_temp_attr(exclude=["classify_nodes"])
return n
- def copy(self, deepcopy=False) -> 'BaseNeuron':
+ def copy(self, deepcopy=False) -> "BaseNeuron":
"""Return a copy of the neuron."""
copy_fn = copy.deepcopy if deepcopy else copy.copy
# Attributes not to copy
- no_copy = ['_lock']
+ no_copy = ["_lock"]
# Generate new empty neuron
x = self.__class__()
# Override with this neuron's data
- x.__dict__.update({k: copy_fn(v) for k, v in self.__dict__.items() if k not in no_copy})
+ x.__dict__.update(
+ {k: copy_fn(v) for k, v in self.__dict__.items() if k not in no_copy}
+ )
return x
@@ -643,20 +657,22 @@ def summary(self, add_props=None) -> pd.Series:
# Do not remove the list -> otherwise we might change the original!
props = list(self.SUMMARY_PROPS)
+ # Make sure ID is always in second place
+ if "id" in props and props.index("id") != 2:
+ props.remove("id")
+ props.insert(2, "id")
# Add .id to summary if not a generic UUID
- if not isinstance(self.id, uuid.UUID):
- props.insert(2, 'id')
+ elif not isinstance(self.id, uuid.UUID) and "id" not in props:
+ props.insert(2, "id")
if add_props:
- props, ix = np.unique(np.append(props, add_props),
- return_inverse=True)
+ props, ix = np.unique(np.append(props, add_props), return_inverse=True)
props = props[ix]
# This is to catch an annoying "UnitStrippedWarning" with pint
with warnings.catch_warnings():
warnings.simplefilter("ignore")
- s = pd.Series([getattr(self, at, 'NA') for at in props],
- index=props)
+ s = pd.Series([getattr(self, at, "NA") for at in props], index=props)
return s
@@ -705,10 +721,11 @@ def plot3d(self, **kwargs):
return plot3d(core.NeuronList(self, make_copy=False), **kwargs)
- def map_units(self,
- units: Union[pint.Unit, str],
- on_error: Union[Literal['raise'],
- Literal['ignore']] = 'raise') -> Union[int, float]:
+ def map_units(
+ self,
+ units: Union[pint.Unit, str],
+ on_error: Union[Literal["raise"], Literal["ignore"]] = "raise",
+ ) -> Union[int, float]:
"""Convert units to match neuron space.
Only works if neuron's `.units` is isometric and not dimensionless.
@@ -725,7 +742,7 @@ def map_units(self,
See Also
--------
- [`navis.to_neuron_space`][]
+ [`navis.core.to_neuron_space`][]
The base function for this method.
Examples
@@ -744,8 +761,7 @@ def map_units(self,
[0.125, 0.125, 0.125]
"""
- return core.core_utils.to_neuron_space(units, neuron=self,
- on_error=on_error)
+ return core.core_utils.to_neuron_space(units, neuron=self, on_error=on_error)
def memory_usage(self, deep=False, estimate=False):
"""Return estimated memory usage of this neuron.
@@ -775,8 +791,8 @@ def memory_usage(self, deep=False, estimate=False):
# as possible
if hasattr(self, "_memory_usage"):
mu = self._memory_usage
- if mu['deep'] == deep and mu['estimate'] == estimate:
- return mu['size']
+ if mu["deep"] == deep and mu["estimate"] == estimate:
+ return mu["size"]
size = 0
if not estimate:
@@ -803,8 +819,6 @@ def memory_usage(self, deep=False, estimate=False):
else:
size += v.dtype.itemsize * v.shape[0]
- self._memory_usage = {'deep': deep,
- 'estimate': estimate,
- 'size': size}
+ self._memory_usage = {"deep": deep, "estimate": estimate, "size": size}
return size
diff --git a/navis/core/core_utils.py b/navis/core/core_utils.py
index 8b0dd2c3..66a29034 100644
--- a/navis/core/core_utils.py
+++ b/navis/core/core_utils.py
@@ -33,7 +33,7 @@
# (see https://stackoverflow.com/questions/55611806/how-to-set-chunk-size-when-using-pathos-processingpools-map)
import pathos
ProcessingPool = pathos.pools._ProcessPool
-except ImportError:
+except ModuleNotFoundError:
ProcessingPool = None
__all__ = ['make_dotprops', 'to_neuron_space']
@@ -55,14 +55,33 @@ def wrapper(*args, **kwargs):
return wrapper
+def add_units(compact=True, power=1):
+ """Add neuron units (if present) to output of function."""
+ def outer(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ self = args[0]
+ res = func(*args, **kwargs)
+
+ if config.add_units and self.has_units and not self.units.dimensionless:
+ res = res * np.power(self.units, power)
+ if compact:
+ res = res.to_compact()
+
+ return res
+ return wrapper
+ return outer
+
+
@utils.map_neuronlist(desc='Dotprops', allow_parallel=True)
def make_dotprops(x: Union[pd.DataFrame, np.ndarray,
'core.TreeNeuron', 'core.MeshNeuron',
'core.VoxelNeuron', 'core.NeuronList'],
k: int = 20,
resample: Union[float, int, bool, str] = False,
- threshold: float = None) -> Union['core.Dotprops', 'core.NeuronList']:
- """Produce dotprops from neurons or x/y/z points.
+ threshold: float = None,
+ make_using: Optional = None) -> Union['core.Dotprops', 'core.NeuronList']:
+ """Produce dotprops from neurons or point clouds.
This is following the implementation in R's `nat` library.
@@ -388,9 +407,11 @@ def __call__(self, *args, **kwargs):
# Apply function
if parallel:
if not ProcessingPool:
- raise ImportError('navis relies on pathos for multiprocessing!'
- 'Please install pathos and try again:\n'
- ' pip3 install pathos -U')
+ raise ModuleNotFoundError(
+ 'navis relies on pathos for multiprocessing!'
+ 'Please install pathos and try again:\n'
+ ' pip3 install pathos -U'
+ )
if self.warn_inplace and kwargs.get('inplace', False):
logger.warning('`inplace=True` does not work with '
diff --git a/navis/core/dotprop.py b/navis/core/dotprop.py
index aa4893b7..0f51dd1b 100644
--- a/navis/core/dotprop.py
+++ b/navis/core/dotprop.py
@@ -23,18 +23,18 @@
from typing import Union, Callable, List, Optional, Tuple
from typing_extensions import Literal
-from .. import utils, config, core, sampling, graph
+from .. import utils, config, core, sampling, graph, morpho
from .base import BaseNeuron
try:
import xxhash
-except ImportError:
+except ModuleNotFoundError:
xxhash = None
try:
from pykdtree.kdtree import KDTree
-except ImportError:
+except ModuleNotFoundError:
from scipy.spatial import cKDTree as KDTree
__all__ = ['Dotprops']
@@ -182,6 +182,41 @@ def __mul__(self, other, copy=True):
return n
return NotImplemented
+ def __add__(self, other, copy=True):
+ """Implement addition for coordinates."""
+ if isinstance(other, numbers.Number) or utils.is_iterable(other):
+ # If a number, consider this an offset for coordinates
+ n = self.copy() if copy else self
+ _ = np.add(n.points, other, out=n.points, casting='unsafe')
+ if n.has_connectors:
+ n.connectors.loc[:, ['x', 'y', 'z']] += other
+
+ # Force recomputing of KDTree
+ if hasattr(n, '_tree'):
+ delattr(n, '_tree')
+
+ return n
+ # If another neuron, return a list of neurons
+ elif isinstance(other, BaseNeuron):
+ return core.NeuronList([self, other])
+ return NotImplemented
+
+ def __sub__(self, other, copy=True):
+ """Implement subtraction for coordinates."""
+ if isinstance(other, numbers.Number) or utils.is_iterable(other):
+ # If a number, consider this an offset for coordinates
+ n = self.copy() if copy else self
+ _ = np.subtract(n.points, other, out=n.points, casting='unsafe')
+ if n.has_connectors:
+ n.connectors.loc[:, ['x', 'y', 'z']] -= other
+
+ # Force recomputing of KDTree
+ if hasattr(n, '_tree'):
+ delattr(n, '_tree')
+
+ return n
+ return NotImplemented
+
def __getstate__(self):
"""Get state (used e.g. for pickling)."""
state = {k: v for k, v in self.__dict__.items() if not callable(v)}
@@ -195,6 +230,9 @@ def __getstate__(self):
return state
+ def __len__(self):
+ return len(self.points)
+
@property
def alpha(self):
"""Alpha value for tangent vectors (optional)."""
@@ -461,7 +499,47 @@ def copy(self) -> 'Dotprops':
return x
- def recalculate_tangents(self, k: int, inplace=False) -> None:
+ def drop_fluff(self, epsilon, keep_size: int = None, n_largest: int = None, inplace=False):
+ """Remove fluff from neuron.
+
+ By default, this function will remove all but the largest connected
+ component from the neuron. You can change that behavior using the
+ `keep_size` and `n_largest` parameters.
+
+ Parameters
+ ----------
+ epsilon : float
+ Distance at which to consider two points to be connected.
+ If `None`, will use the default value of 5 times the average
+ node distance (`self.sampling_resolution`).
+ keep_size : float, optional
+ Use this to set a size (in number of points) for small
+ bits to keep. If `keep_size` < 1 it will be intepreted as
+ fraction of total nodes/vertices/points.
+ n_largest : int, optional
+ If set, will keep the `n_largest` connected components. Note:
+ if provided, `keep_size` will be applied first!
+ inplace : bool, optional
+ If False, will return a copy and leave the original data
+ unmodified.
+
+ Returns
+ -------
+ Dotprops
+ Only if `inplace=False`.
+
+ See Also
+ --------
+ [`navis.drop_fluff`][]
+ Base function. See for details and examples.
+
+ """
+ x = morpho.drop_fluff(self, epsilon=epsilon, keep_size=keep_size, n_largest=n_largest, inplace=inplace)
+
+ if not inplace:
+ return x
+
+ def recalculate_tangents(self, k: int, inplace=False):
"""Recalculate tangent vectors and alpha with a new `k`.
Parameters
@@ -569,10 +647,14 @@ def snap(self, locs, to='points'):
def to_skeleton(self,
scale_vec: Union[float, Literal['auto']] = 'auto'
) -> core.TreeNeuron:
- """Turn dotprops into a skeleton.
+ """Turn Dotprop into a TreeNeuron.
- This is mainly for things like plotting as it does not produce
- meaningful edges. Also note that only minimal meta data is carried over.
+ This does *not* skeletonize the neuron but rather generates a line
+ segment for each point based on the tangent vector. This is mainly
+ used under the hood for plotting. Also note that only minimal meta
+ data is carried over.
+
+ For proper skeletonization see [`navis.skeletonize`][].
Parameters
----------
@@ -625,5 +707,3 @@ def to_skeleton(self,
return tn
- def __len__(self):
- return len(self.points)
diff --git a/navis/core/mesh.py b/navis/core/mesh.py
index fc75a560..233076e0 100644
--- a/navis/core/mesh.py
+++ b/navis/core/mesh.py
@@ -24,18 +24,18 @@
import skeletor as sk
import trimesh as tm
-from io import BufferedIOBase
from typing import Union, Optional
from .. import utils, config, meshes, conversion, graph
from .base import BaseNeuron
+from .neuronlist import NeuronList
from .skeleton import TreeNeuron
-from .core_utils import temp_property
+from .core_utils import temp_property, add_units
try:
import xxhash
-except ImportError:
+except ModuleNotFoundError:
xxhash = None
@@ -225,6 +225,35 @@ def __mul__(self, other, copy=True):
return n
return NotImplemented
+ def __add__(self, other, copy=True):
+ """Implement addition for coordinates (vertices, connectors)."""
+ if isinstance(other, numbers.Number) or utils.is_iterable(other):
+ n = self.copy() if copy else self
+ _ = np.add(n.vertices, other, out=n.vertices, casting='unsafe')
+ if n.has_connectors:
+ n.connectors.loc[:, ['x', 'y', 'z']] += other
+
+ self._clear_temp_attr()
+
+ return n
+ # If another neuron, return a list of neurons
+ elif isinstance(other, BaseNeuron):
+ return NeuronList([self, other])
+ return NotImplemented
+
+ def __sub__(self, other, copy=True):
+ """Implement subtraction for coordinates (vertices, connectors)."""
+ if isinstance(other, numbers.Number) or utils.is_iterable(other):
+ n = self.copy() if copy else self
+ _ = np.subtract(n.vertices, other, out=n.vertices, casting='unsafe')
+ if n.has_connectors:
+ n.connectors.loc[:, ['x', 'y', 'z']] -= other
+
+ self._clear_temp_attr()
+
+ return n
+ return NotImplemented
+
@property
def bbox(self) -> np.ndarray:
"""Bounding box (includes connectors)."""
@@ -294,6 +323,7 @@ def sampling_resolution(self) -> float:
return float(self.trimesh.edges_unique_length.mean())
@property
+ @add_units(compact=True, power=3)
def volume(self) -> float:
"""Volume of the neuron.
@@ -308,7 +338,7 @@ def volume(self) -> float:
def skeleton(self) -> 'TreeNeuron':
"""Skeleton representation of this neuron.
- Uses [`navis.mesh2skeleton`][].
+ Uses [`navis.conversion.mesh2skeleton`][].
"""
if not hasattr(self, '_skeleton'):
@@ -329,6 +359,29 @@ def soma(self):
"""Not implemented for MeshNeurons - use `.soma_pos`."""
raise AttributeError("MeshNeurons have a soma position (`.soma_pos`), not a soma.")
+ @property
+ def soma_pos(self):
+ """X/Y/Z position of the soma.
+
+ Returns `None` if no soma.
+ """
+ return getattr(self, '_soma_pos', None)
+
+ @soma_pos.setter
+ def soma_pos(self, value):
+ """Set soma by position."""
+ if value is None:
+ self._soma_pos = None
+ return
+
+ try:
+ value = np.asarray(value).astype(np.float64).reshape(3)
+ except BaseException:
+ raise ValueError(f'Unable to convert soma position "{value}" '
+ f'to numeric (3, ) numpy array.')
+
+ self._soma_pos = value
+
@property
def type(self) -> str:
"""Neuron type."""
diff --git a/navis/core/neuronlist.py b/navis/core/neuronlist.py
index b20f1136..ab5e6a61 100644
--- a/navis/core/neuronlist.py
+++ b/navis/core/neuronlist.py
@@ -566,9 +566,11 @@ def append(self, v):
>>> nl = navis.example_neurons()
>>> len(nl)
5
+ >>> # Add a single neuron to the list
>>> nl.append(nl[0])
>>> len(nl)
6
+ >>> # Add a list of neurons to the list
>>> nl.append(nl)
>>> len(nl)
12
diff --git a/navis/core/skeleton.py b/navis/core/skeleton.py
index dc64bbea..5bcab701 100644
--- a/navis/core/skeleton.py
+++ b/navis/core/skeleton.py
@@ -32,11 +32,11 @@
from .. import io # type: ignore # double import
from .base import BaseNeuron
-from .core_utils import temp_property
+from .core_utils import temp_property, add_units
try:
import xxhash
-except ImportError:
+except ModuleNotFoundError:
xxhash = None
__all__ = ['TreeNeuron']
@@ -75,13 +75,15 @@ class TreeNeuron(BaseNeuron):
- `pandas.Series` is expected to have a DataFrame as
`.nodes` - additional properties will be attached
as meta data
- - `str` filepath is passed to [`navis.read_swc`][]
+ - `tuple` of `(vertices, edges)` arrays is passed to
+ [`navis.edges2neuron`][]
+ - `str` is passed to [`navis.read_swc`][]
- `BufferedIOBase` e.g. from `open(filename)`
- - `networkx.DiGraph` parsed by `navis.nx2neuron`
- - `None` will initialize an empty neuron
+ - `networkx.DiGraph` parsed by [`navis.nx2neuron`][]
- `skeletor.Skeleton`
- `TreeNeuron` - in this case we will try to copy every
attribute
+ - `None` will initialize an empty neuron
units : str | pint.Units | pint.Quantity
Units for coordinates. Defaults to `None` (dimensionless).
Strings must be parsable by pint: e.g. "nm", "um",
@@ -177,6 +179,11 @@ def __init__(self,
setattr(self, at, copy.copy(getattr(self, at)))
except BaseException:
logger.warning(f'Unable to deep-copy attribute "{at}"')
+ elif isinstance(x, tuple):
+ # Tuple of vertices and edges
+ if len(x) != 2:
+ raise ValueError('Tuple must have 2 elements: vertices and edges.')
+ self.nodes = graph.edges2neuron(edges=x[1], vertices=x[0]).nodes
elif isinstance(x, type(None)):
# This is a essentially an empty neuron
pass
@@ -277,6 +284,57 @@ def __mul__(self, other, copy=True):
return n
return NotImplemented
+ def __add__(self, other, copy=True):
+ """Implement addition for coordinates (nodes, connectors)."""
+ if isinstance(other, numbers.Number) or utils.is_iterable(other):
+ if utils.is_iterable(other):
+ # If offset isotropic use only single value
+ if len(set(other)) == 1:
+ other == other[0]
+ elif len(other) != 3:
+ raise ValueError('Addition by list/array requires 3'
+ 'multipliers for x/y/z coordinates '
+ f'got {len(other)}')
+
+ # If a number, consider this an offset for coordinates
+ n = self.copy() if copy else self
+ n.nodes[['x', 'y', 'z']] += other
+
+ # Do the connectors
+ if n.has_connectors:
+ n.connectors[['x', 'y', 'z']] += other
+
+ n._clear_temp_attr(exclude=['classify_nodes'])
+ return n
+ # If another neuron, return a list of neurons
+ elif isinstance(other, BaseNeuron):
+ return core.NeuronList([self, other])
+ return NotImplemented
+
+ def __sub__(self, other, copy=True):
+ """Implement subtraction for coordinates (nodes, connectors)."""
+ if isinstance(other, numbers.Number) or utils.is_iterable(other):
+ if utils.is_iterable(other):
+ # If offset is isotropic use only single value
+ if len(set(other)) == 1:
+ other == other[0]
+ elif len(other) != 3:
+ raise ValueError('Addition by list/array requires 3'
+ 'multipliers for x/y/z coordinates '
+ f'got {len(other)}')
+
+ # If a number, consider this an offset for coordinates
+ n = self.copy() if copy else self
+ n.nodes[['x', 'y', 'z']] -= other
+
+ # Do the connectors
+ if n.has_connectors:
+ n.connectors[['x', 'y', 'z']] -= other
+
+ n._clear_temp_attr(exclude=['classify_nodes'])
+ return n
+ return NotImplemented
+
def __getstate__(self):
"""Get state (used e.g. for pickling)."""
state = {k: v for k, v in self.__dict__.items() if not callable(v)}
@@ -298,6 +356,12 @@ def adjacency_matrix(self):
self._adjacency_matrix = graph.skeleton_adjacency_matrix(self)
return self._adjacency_matrix
+ @property
+ @requires_nodes
+ def vertices(self) -> np.ndarray:
+ """Vertices of the skeleton."""
+ return self.nodes[['x', 'y', 'z']].values
+
@property
@requires_nodes
def edges(self) -> np.ndarray:
@@ -313,6 +377,7 @@ def edges(self) -> np.ndarray:
return not_root[['node_id', 'parent_id']].values
@property
+ @requires_nodes
def edge_coords(self) -> np.ndarray:
"""Coordinates of edges between nodes.
@@ -624,6 +689,7 @@ def n_leafs(self) -> Optional[int]:
@property
@temp_property
+ @add_units(compact=True)
def cable_length(self) -> Union[int, float]:
"""Cable length."""
if not hasattr(self, '_cable_length'):
@@ -631,6 +697,7 @@ def cable_length(self) -> Union[int, float]:
return self._cable_length
@property
+ @add_units(compact=True, power=2)
def surface_area(self) -> float:
"""Radius-based lateral surface area."""
if 'radius' not in self.nodes.columns:
@@ -657,6 +724,7 @@ def surface_area(self) -> float:
return (np.pi * (r1 + r2) * np.sqrt( (r1-r2)**2 + h**2)).sum()
@property
+ @add_units(compact=True, power=3)
def volume(self) -> float:
"""Radius-based volume."""
if 'radius' not in self.nodes.columns:
@@ -700,7 +768,12 @@ def bbox(self) -> np.ndarray:
@property
def sampling_resolution(self) -> float:
"""Average cable length between child -> parent nodes."""
- return self.cable_length / self.n_nodes
+ res = self.cable_length / self.n_nodes
+
+ if isinstance(res, pint.Quantity):
+ res = res.to_compact()
+
+ return res
@property
@temp_property
diff --git a/navis/core/volumes.py b/navis/core/volumes.py
index dcd33f05..14a0de60 100644
--- a/navis/core/volumes.py
+++ b/navis/core/volumes.py
@@ -86,7 +86,7 @@ def __init__(
# Trimesh return a navis.Volume instead of a trimesh.Trimesh
for f in dir(trimesh.Trimesh):
# Don't mess with magic/private methods
- if f.startswith('_'):
+ if f.startswith("_"):
continue
# Skip properties
if not callable(getattr(trimesh.Trimesh, f)):
@@ -96,38 +96,40 @@ def __init__(
@property
def name(self):
"""Name of this volume."""
- return self.metadata.get('name')
+ return self.metadata.get("name")
@name.setter
def name(self, value):
- self.metadata['name'] = value
+ self.metadata["name"] = value
@property
def color(self):
"""Color used for plotting."""
- return self.metadata.get('color')
+ return self.metadata.get("color")
@color.setter
def color(self, value):
- self.metadata['color'] = value
+ self.metadata["color"] = value
@property
def id(self):
"""ID of this volume."""
- return self.metadata.get('id')
+ return self.metadata.get("id")
@id.setter
def id(self, value):
- self.metadata['id'] = value
+ self.metadata["id"] = value
@classmethod
- def from_csv(cls,
- vertices: str,
- faces: str,
- name: Optional[str] = None,
- color: Union[str,
- Sequence[Union[int, float]]] = (.85, .85, .85, .2),
- volume_id: Optional[int] = None, **kwargs) -> 'Volume':
+ def from_csv(
+ cls,
+ vertices: str,
+ faces: str,
+ name: Optional[str] = None,
+ color: Union[str, Sequence[Union[int, float]]] = (0.85, 0.85, 0.85, 0.2),
+ volume_id: Optional[int] = None,
+ **kwargs,
+ ) -> "Volume":
"""Load volume from csv files containing vertices and faces.
Parameters
@@ -145,18 +147,19 @@ def from_csv(cls,
"""
if not os.path.isfile(vertices) or not os.path.isfile(faces):
- raise ValueError('File(s) not found.')
+ raise ValueError("File(s) not found.")
- with open(vertices, 'r') as f:
+ with open(vertices, "r") as f:
reader = csv.reader(f, **kwargs)
vertices = np.array([r for r in reader]).astype(float)
- with open(faces, 'r') as f:
+ with open(faces, "r") as f:
reader = csv.reader(f, **kwargs)
faces = np.array([r for r in reader]).astype(int)
- return cls(faces=faces, vertices=vertices, name=name, color=color,
- volume_id=volume_id)
+ return cls(
+ faces=faces, vertices=vertices, name=name, color=color, volume_id=volume_id
+ )
def to_csv(self, filename: str, **kwargs) -> None:
"""Save volume as two separated csv files containing vertices and faces.
@@ -170,17 +173,17 @@ def to_csv(self, filename: str, **kwargs) -> None:
Keyword arguments passed to `csv.reader`.
"""
- for data, suffix in zip([self.faces, self.vertices],
- ['_faces.csv', '_vertices.csv']):
- with open(filename + suffix, 'w') as csvfile:
+ for data, suffix in zip(
+ [self.faces, self.vertices], ["_faces.csv", "_vertices.csv"]
+ ):
+ with open(filename + suffix, "w") as csvfile:
writer = csv.writer(csvfile)
writer.writerows(data)
@classmethod
- def from_json(cls,
- filename: str,
- import_kwargs: Dict = {},
- **init_kwargs) -> 'Volume':
+ def from_json(
+ cls, filename: str, import_kwargs: Dict = {}, **init_kwargs
+ ) -> "Volume":
"""Load volume from json file containing vertices and faces.
Parameters
@@ -198,13 +201,12 @@ def from_json(cls,
"""
if not os.path.isfile(filename):
- raise ValueError('File not found.')
+ raise ValueError("File not found.")
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
data = json.load(f, **import_kwargs)
- return cls(faces=data['faces'],
- vertices=data['vertices'], **init_kwargs)
+ return cls(faces=data["faces"], vertices=data["vertices"], **init_kwargs)
@classmethod
def from_object(cls, obj: Any, **init_kwargs) -> "Volume":
@@ -223,16 +225,15 @@ def from_object(cls, obj: Any, **init_kwargs) -> "Volume":
navis.Volume
"""
- if not hasattr(obj, 'vertices') or not hasattr(obj, 'faces'):
- raise ValueError('Object must have faces and vertices attributes.')
+ if not hasattr(obj, "vertices") or not hasattr(obj, "faces"):
+ raise ValueError("Object must have faces and vertices attributes.")
return cls(faces=obj.faces, vertices=obj.vertices, **init_kwargs)
@classmethod
- def from_file(cls,
- filename: str,
- import_kwargs: Dict = {},
- **init_kwargs) -> 'Volume':
+ def from_file(
+ cls, filename: str, import_kwargs: Dict = {}, **init_kwargs
+ ) -> "Volume":
"""Load volume from file.
Parameters
@@ -253,20 +254,22 @@ def from_file(cls,
"""
if not os.path.isfile(filename):
- raise ValueError('File not found.')
+ raise ValueError("File not found.")
f, ext = os.path.splitext(filename)
- if ext == '.json':
- return cls.from_json(filename=filename,
- import_kwargs=import_kwargs,
- **init_kwargs)
+ if ext == ".json":
+ return cls.from_json(
+ filename=filename, import_kwargs=import_kwargs, **init_kwargs
+ )
try:
import trimesh
- except ImportError:
- raise ImportError('Unable to import: trimesh missing - please '
- 'install: "pip install trimesh"')
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ "Unable to import: trimesh missing - please "
+ 'install: "pip install trimesh"'
+ )
except BaseException:
raise
@@ -283,18 +286,18 @@ def to_json(self, filename: str) -> None:
Filename to use.
"""
- with open(filename, 'w') as f:
- json.dump({'vertices': self.vertices.tolist(),
- 'faces': self.faces.tolist()},
- f)
+ with open(filename, "w") as f:
+ json.dump(
+ {"vertices": self.vertices.tolist(), "faces": self.faces.tolist()}, f
+ )
@classmethod
- def combine(cls,
- x: Sequence['Volume'],
- name: str = 'comb_vol',
- color: Union[str,
- Sequence[Union[int, float]]] = (.85, .85, .85, .2)
- ) -> 'Volume':
+ def combine(
+ cls,
+ x: Sequence["Volume"],
+ name: str = "comb_vol",
+ color: Union[str, Sequence[Union[int, float]]] = (0.85, 0.85, 0.85, 0.2),
+ ) -> "Volume":
"""Merge multiple volumes into a single object.
Parameters
@@ -320,7 +323,7 @@ def combine(cls,
x = [x] # type: ignore
if False in [isinstance(v, Volume) for v in x]:
- raise TypeError('Input must be list of volumes')
+ raise TypeError("Input must be list of volumes")
vertices: np.ndarray = np.empty((0, 3))
faces: List[List[int]] = []
@@ -329,8 +332,7 @@ def combine(cls,
for vol in x:
offs = len(vertices)
vertices = np.append(vertices, vol.vertices, axis=0)
- faces += [[f[0] + offs, f[1] + offs, f[2] + offs]
- for f in vol.faces]
+ faces += [[f[0] + offs, f[1] + offs, f[2] + offs] for f in vol.faces]
return cls(vertices=vertices, faces=faces, name=name, color=color)
@@ -371,28 +373,29 @@ def __repr__(self):
"""
elements = []
- if hasattr(self, 'name'):
+ if hasattr(self, "name"):
# for Trimesh
- elements.append(f'name={self.name}')
- if hasattr(self, 'id') and not isinstance(self.id, uuid.UUID):
+ elements.append(f"name={self.name}")
+ if hasattr(self, "id") and not isinstance(self.id, uuid.UUID):
# for Trimesh
- elements.append(f'id={self.id}')
- if hasattr(self, 'color'):
+ elements.append(f"id={self.id}")
+ elements.append(f"units={self.units}")
+ if hasattr(self, "color"):
# for Trimesh
- elements.append(f'color={self.color}')
- if hasattr(self, 'vertices'):
+ elements.append(f"color={self.color}")
+ if hasattr(self, "vertices"):
# for Trimesh and PointCloud
- elements.append(f'vertices.shape={self.vertices.shape}')
- if hasattr(self, 'faces'):
+ elements.append(f"vertices.shape={self.vertices.shape}")
+ if hasattr(self, "faces"):
# for Trimesh
- elements.append(f'faces.shape={self.faces.shape}')
+ elements.append(f"faces.shape={self.faces.shape}")
return f''
def __truediv__(self, other):
"""Implement division for vertices."""
if isinstance(other, numbers.Number) or utils.is_iterable(other):
n = self.copy()
- _ = np.divide(n.vertices, other, out=n.vertices, casting='unsafe')
+ _ = np.divide(n.vertices, other, out=n.vertices, casting="unsafe")
return n
return NotImplemented
@@ -400,17 +403,37 @@ def __mul__(self, other):
"""Implement multiplication for vertices."""
if isinstance(other, numbers.Number) or utils.is_iterable(other):
n = self.copy()
- _ = np.multiply(n.vertices, other, out=n.vertices, casting='unsafe')
+ _ = np.multiply(n.vertices, other, out=n.vertices, casting="unsafe")
return n
return NotImplemented
- def resize(self,
- x: Union[float, int],
- method: Union[Literal['center'],
- Literal['centroid'],
- Literal['normals'],
- Literal['origin']] = 'center',
- inplace: bool = False) -> Optional['Volume']:
+ def __add__(self, other):
+ """Implement addition for vertices."""
+ if isinstance(other, numbers.Number) or utils.is_iterable(other):
+ n = self.copy()
+ _ = np.add(n.vertices, other, out=n.vertices, casting="unsafe")
+ return n
+ return NotImplemented
+
+ def __sub__(self, other):
+ """Implement subtraction for vertices."""
+ if isinstance(other, numbers.Number) or utils.is_iterable(other):
+ n = self.copy()
+ _ = np.subtract(n.vertices, other, out=n.vertices, casting="unsafe")
+ return n
+ return NotImplemented
+
+ def resize(
+ self,
+ x: Union[float, int],
+ method: Union[
+ Literal["center"],
+ Literal["centroid"],
+ Literal["normals"],
+ Literal["origin"],
+ ] = "center",
+ inplace: bool = False,
+ ) -> Optional["Volume"]:
"""Resize volume.
Parameters
@@ -457,25 +480,27 @@ def resize(self,
method = method.lower()
- perm_methods = ['center', 'origin', 'normals', 'centroid']
+ perm_methods = ["center", "origin", "normals", "centroid"]
if method not in perm_methods:
- raise ValueError(f'Unknown method "{method}". Allowed '
- f'methods: {", ".join(perm_methods)}')
+ raise ValueError(
+ f'Unknown method "{method}". Allowed '
+ f'methods: {", ".join(perm_methods)}'
+ )
if not inplace:
v = self.copy()
else:
v = self
- if method == 'normals':
+ if method == "normals":
v.vertices = v.vertices + (v.vertex_normals * x)
else:
# Get the center
- if method == 'center':
+ if method == "center":
cn = np.mean(v.vertices, axis=0)
- elif method == 'centroid':
+ elif method == "centroid":
cn = v.centroid
- elif method == 'origin':
+ elif method == "origin":
cn = np.array([0, 0, 0])
# Get vector from center to each vertex
@@ -488,8 +513,8 @@ def resize(self,
v.vertices = vec + cn
# Make sure to reset any pyoctree data on this volume
- if hasattr(v, 'pyoctree'):
- delattr(v, 'pyoctree')
+ if hasattr(v, "pyoctree"):
+ delattr(v, "pyoctree")
if not inplace:
return v
@@ -517,8 +542,8 @@ def plot3d(self, **kwargs):
"""
from .. import plotting
- if 'color' in kwargs:
- self.color = kwargs['color']
+ if "color" in kwargs:
+ self.color = kwargs["color"]
return plotting.plot3d(self, **kwargs)
@@ -545,19 +570,18 @@ def _outlines_3d(self, view="xy", **kwargs):
"""
co2d = np.array(self.to_2d(view=view, **kwargs))
- if view in ['xy', 'yx']:
+ if view in ["xy", "yx"]:
third = np.repeat(self.center[2], co2d.shape[0])
- elif view in ['xz', 'zx']:
+ elif view in ["xz", "zx"]:
third = np.repeat(self.center[1], co2d.shape[0])
- elif view in ['yz', 'zy']:
+ elif view in ["yz", "zy"]:
third = np.repeat(self.center[0], co2d.shape[0])
return np.append(co2d, third.reshape(co2d.shape[0], 1), axis=1)
- def to_2d(self,
- alpha: float = 0.00017,
- view: tuple = ('x', 'y'),
- invert_y: bool = False) -> Sequence[Union[float, int]]:
+ def to_2d(
+ self, alpha: float = 0.00017, view: tuple = ("x", "y"), invert_y: bool = False
+ ) -> Sequence[Union[float, int]]:
"""Compute the 2d alpha shape (concave hull) this volume.
Uses Scipy Delaunay and shapely.
@@ -587,7 +611,7 @@ def add_edge(edges, edge_points, coords, i, j):
edges.add((i, j))
edge_points.append(coords[[i, j]])
- accepted_views = ['x', 'z', 'y', '-x', '-z', '-y']
+ accepted_views = ["x", "z", "y", "-x", "-z", "-y"]
for ax in view:
if ax not in accepted_views:
@@ -596,15 +620,15 @@ def add_edge(edges, edge_points, coords, i, j):
try:
from shapely.ops import unary_union, polygonize # type: ignore
import shapely.geometry as geometry # type: ignore
- except ImportError:
- raise ImportError('This function needs the shapely>=1.8.0')
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError("This function needs the shapely>=1.8.0")
coords: np.ndarray
- map = {'x': 0, 'y': 1, 'z': 2}
+ map = {"x": 0, "y": 1, "z": 2}
- x_ix = map[view[0].replace('-', '').replace('+', '')]
- y_ix = map[view[1].replace('-', '').replace('+', '')]
+ x_ix = map[view[0].replace("-", "").replace("+", "")]
+ y_ix = map[view[1].replace("-", "").replace("+", "")]
coords = self.vertices[:, [x_ix, y_ix]]
@@ -614,14 +638,14 @@ def add_edge(edges, edge_points, coords, i, j):
# loop over triangles:
# ia, ib, ic = indices of corner points of the triangle
# Note that "vertices" property was renamed to "simplices"
- for ia, ib, ic in getattr(tri, 'simplices', getattr(tri, 'vertices', [])):
+ for ia, ib, ic in getattr(tri, "simplices", getattr(tri, "vertices", [])):
pa: np.ndarray = coords[ia] # type: ignore
pb: np.ndarray = coords[ib] # type: ignore
pc: np.ndarray = coords[ic] # type: ignore
# Lengths of sides of triangle
- a = math.sqrt((pa[0] - pb[0])**2 + (pa[1] - pb[1])**2) # type: ignore
- b = math.sqrt((pb[0] - pc[0])**2 + (pb[1] - pc[1])**2) # type: ignore
- c = math.sqrt((pc[0] - pa[0])**2 + (pc[1] - pa[1])**2) # type: ignore
+ a = math.sqrt((pa[0] - pb[0]) ** 2 + (pa[1] - pb[1]) ** 2) # type: ignore
+ b = math.sqrt((pb[0] - pc[0]) ** 2 + (pb[1] - pc[1]) ** 2) # type: ignore
+ c = math.sqrt((pc[0] - pa[0]) ** 2 + (pc[1] - pa[1]) ** 2) # type: ignore
# Semiperimeter of triangle
s = (a + b + c) / 2.0
# Area of triangle by Heron's formula
@@ -653,9 +677,11 @@ def validate(self):
self.fill_holes()
self.fix_normals()
if not self.is_volume:
- raise utils.VolumeError("Mesh is not a volume "
- "(e.g. not watertight, incorrect "
- "winding) and could not be fixed.")
+ raise utils.VolumeError(
+ "Mesh is not a volume "
+ "(e.g. not watertight, incorrect "
+ "winding) and could not be fixed."
+ )
def _force_volume(f):
diff --git a/navis/core/voxel.py b/navis/core/voxel.py
index ba28e829..4a3e5cc3 100644
--- a/navis/core/voxel.py
+++ b/navis/core/voxel.py
@@ -23,11 +23,11 @@
from .. import utils, config
from .base import BaseNeuron
-from .core_utils import temp_property
+from .core_utils import temp_property, add_units
try:
import xxhash
-except ImportError:
+except ModuleNotFoundError:
xxhash = None
@@ -143,7 +143,7 @@ def __setstate__(self, d):
self.__dict__.update(d)
def __truediv__(self, other, copy=True):
- """Implement division for coordinates (units, connectors)."""
+ """Implement division for coordinates (units, connectors, offset)."""
if isinstance(other, numbers.Number) or utils.is_iterable(other):
# If a number, consider this an offset for coordinates
n = self.copy() if copy else self
@@ -165,7 +165,7 @@ def __truediv__(self, other, copy=True):
return NotImplemented
def __mul__(self, other, copy=True):
- """Implement multiplication for coordinates (units, connectors)."""
+ """Implement multiplication for coordinates (units, connectors, offset)."""
if isinstance(other, numbers.Number) or utils.is_iterable(other):
# If a number, consider this an offset for coordinates
n = self.copy() if copy else self
@@ -186,6 +186,36 @@ def __mul__(self, other, copy=True):
return n
return NotImplemented
+ def __add__(self, other, copy=True):
+ """Implement addition for coordinates (offset, connectors)."""
+ if isinstance(other, numbers.Number) or utils.is_iterable(other):
+ # If a number, consider this an offset for coordinates
+ n = self.copy() if copy else self
+
+ n.offset = n.offset + other
+ if n.has_connectors:
+ n.connectors.loc[:, ['x', 'y', 'z']] += other
+
+ self._clear_temp_attr()
+
+ return n
+ return NotImplemented
+
+ def __sub__(self, other, copy=True):
+ """Implement subtraction for coordinates (offset, connectors)."""
+ if isinstance(other, numbers.Number) or utils.is_iterable(other):
+ # If a number, consider this an offset for coordinates
+ n = self.copy() if copy else self
+
+ n.offset = n.offset - other
+ if n.has_connectors:
+ n.connectors.loc[:, ['x', 'y', 'z']] -= other
+
+ self._clear_temp_attr()
+
+ return n
+ return NotImplemented
+
@property
def _base_data_type(self) -> str:
"""Type of data (grid or voxels) underlying this neuron."""
@@ -218,12 +248,12 @@ def bbox(self) -> np.ndarray:
return np.vstack((mn, mx)).T
@property
+ @add_units(compact=True, power=3)
def volume(self) -> float:
"""Volume of neuron."""
# Get volume of a single voxel
voxel_volume = self.units_xyz[0] * self.units_xyz[2] * self.units_xyz[2]
- voxel_volume = voxel_volume.to_compact()
- return self.voxels.shape[0] * voxel_volume
+ return (self.nnz * voxel_volume).to_compact()
@property
@temp_property
@@ -344,6 +374,25 @@ def type(self) -> str:
"""Neuron type."""
return 'navis.VoxelNeuron'
+ @property
+ def density(self) -> float:
+ """Fraction of filled voxels."""
+ return self.nnz / np.product(self.shape)
+
+ @property
+ def nnz(self) -> int:
+ """Number of non-zero voxels."""
+ return self.count_nonzero()
+
+ def count_nonzero(self) -> int:
+ """Count non-zero voxels."""
+ if self._base_data_type == "grid":
+ return np.count_nonzero(self.grid)
+ elif self._base_data_type == "voxels":
+ return np.count_nonzero(self.values)
+
+ raise TypeError(f"Unexpected data type: {self._base_data_type}")
+
def copy(self) -> 'VoxelNeuron':
"""Return a copy of the neuron."""
no_copy = ['_lock']
diff --git a/navis/graph/__init__.py b/navis/graph/__init__.py
index 685ba8b6..05348c0e 100644
--- a/navis/graph/__init__.py
+++ b/navis/graph/__init__.py
@@ -16,6 +16,7 @@
network2igraph,
neuron2igraph,
nx2neuron,
+ edges2neuron,
neuron2nx,
neuron2KDTree,
neuron2tangents,
@@ -66,6 +67,7 @@
"network2igraph",
"neuron2igraph",
"nx2neuron",
+ "edges2neuron",
"neuron2nx",
"neuron2KDTree",
"neuron2tangents",
diff --git a/navis/graph/converters.py b/navis/graph/converters.py
index 1b626942..aa0f7edd 100644
--- a/navis/graph/converters.py
+++ b/navis/graph/converters.py
@@ -21,7 +21,7 @@
try:
import igraph
-except ImportError:
+except ModuleNotFoundError:
igraph = None
from .. import config, core
@@ -29,12 +29,23 @@
# Set up logging
logger = config.get_logger(__name__)
-__all__ = sorted(['network2nx', 'network2igraph', 'neuron2igraph', 'nx2neuron',
- 'neuron2nx', 'neuron2KDTree', 'neuron2tangents', "simplify_graph"])
+__all__ = sorted(
+ [
+ "network2nx",
+ "network2igraph",
+ "neuron2igraph",
+ "nx2neuron",
+ "edges2neuron",
+ "neuron2nx",
+ "neuron2KDTree",
+ "neuron2tangents",
+ "simplify_graph",
+ ]
+)
-def neuron2tangents(x: 'core.NeuronObject') -> 'core.Dotprops':
- """Turn TreeNeuron into an tangent vectors.
+def neuron2tangents(x: "core.NeuronObject") -> "core.Dotprops":
+ """Turn skeleton(s) into points + tangent vectors.
This will drop zero-length vectors (i.e when node and parent occupy the
exact same position).
@@ -68,16 +79,17 @@ def neuron2tangents(x: 'core.NeuronObject') -> 'core.Dotprops':
nodes = x.nodes[x.nodes.parent_id >= 0]
# Get child->parent vectors
- parent_locs = x.nodes.set_index('node_id').loc[nodes.parent_id,
- ['x', 'y', 'z']].values
- child_locs = nodes[['x', 'y', 'z']].values
+ parent_locs = (
+ x.nodes.set_index("node_id").loc[nodes.parent_id, ["x", "y", "z"]].values
+ )
+ child_locs = nodes[["x", "y", "z"]].values
vect = child_locs - parent_locs
# Get mid point
points = child_locs + (parent_locs - child_locs) / 2
# Get length
- length = np.sqrt(np.sum(vect ** 2, axis=1))
+ length = np.sqrt(np.sum(vect**2, axis=1))
# Drop zero length points
points = points[length != 0]
@@ -90,9 +102,11 @@ def neuron2tangents(x: 'core.NeuronObject') -> 'core.Dotprops':
return points, vect, length
-def network2nx(x: Union[pd.DataFrame, Iterable],
- threshold: Optional[float] = None,
- group_by: Union[dict, None] = None) -> nx.DiGraph:
+def network2nx(
+ x: Union[pd.DataFrame, Iterable],
+ threshold: Optional[float] = None,
+ group_by: Union[dict, None] = None,
+) -> nx.DiGraph:
"""Generate NetworkX graph from edge list or adjacency.
Parameters
@@ -117,22 +131,24 @@ def network2nx(x: Union[pd.DataFrame, Iterable],
"""
if isinstance(x, pd.DataFrame):
- present = [c in x.columns for c in ['source', 'target', 'weight']]
+ present = [c in x.columns for c in ["source", "target", "weight"]]
if all(present):
- edges = x[['source', 'target', 'weight']].values
+ edges = x[["source", "target", "weight"]].values
else:
# Assume it's an adjacency matrix
- ix_name = x.index.name if x.index.name else 'index'
- edges = x.reset_index(inplace=False,
- drop=False).melt(id_vars=ix_name).values
+ ix_name = x.index.name if x.index.name else "index"
+ edges = (
+ x.reset_index(inplace=False, drop=False).melt(id_vars=ix_name).values
+ )
elif isinstance(x, (list, np.ndarray)):
edges = np.array(x)
else:
raise TypeError(f'Expected numpy array or pandas DataFrame, got "{type(x)}"')
if edges.ndim != 2 or edges.shape[1] != 3:
- raise ValueError('Edges must be (N, 3) array containing source, '
- 'target, weight')
+ raise ValueError(
+ "Edges must be (N, 3) array containing source, " "target, weight"
+ )
if not isinstance(threshold, (type(None), bool)):
edges = edges[edges[:, 2] >= threshold]
@@ -149,13 +165,14 @@ def network2nx(x: Union[pd.DataFrame, Iterable],
g = nx.contracted_nodes(g, str(skids[0]), str(s))
# Now relabel the first node
g = nx.relabel_nodes(g, {str(skids[0]): str(n)})
- g.nodes[str(n)]['neuron_name'] = str(n)
+ g.nodes[str(n)]["neuron_name"] = str(n)
return g
-def network2igraph(x: Union[pd.DataFrame, Iterable],
- threshold: Optional[float] = None) -> 'igraph.Graph':
+def network2igraph(
+ x: Union[pd.DataFrame, Iterable], threshold: Optional[float] = None
+) -> "igraph.Graph":
"""Generate iGraph graph from edge list or adjacency.
Requires iGraph to be installed.
@@ -179,24 +196,27 @@ def network2igraph(x: Union[pd.DataFrame, Iterable],
"""
if igraph is None:
- raise ImportError('igraph must be installed to use this function.')
+ raise ModuleNotFoundError("igraph must be installed to use this function.")
if isinstance(x, pd.DataFrame):
- present = [c in x.columns for c in ['source', 'target', 'weight']]
+ present = [c in x.columns for c in ["source", "target", "weight"]]
if all(present):
- edges = x[['source', 'target', 'weight']].values
+ edges = x[["source", "target", "weight"]].values
else:
- edges = x.reset_index(inplace=False,
- drop=False).melt(id_vars='index',
- inplace=False).values
+ edges = (
+ x.reset_index(inplace=False, drop=False)
+ .melt(id_vars="index", inplace=False)
+ .values
+ )
elif isinstance(x, (list, np.ndarray)):
edges = np.array(x)
else:
raise TypeError(f'Expected numpy array or pandas DataFrame, got "{type(x)}"')
if edges.ndim != 2 or edges.shape[1] != 3:
- raise ValueError('Edges must be (N, 3) array containing source, '
- 'target, weight')
+ raise ValueError(
+ "Edges must be (N, 3) array containing source, " "target, weight"
+ )
if not isinstance(threshold, (type(None), bool)):
edges = edges[edges[:, 2] >= threshold]
@@ -210,14 +230,14 @@ def network2igraph(x: Union[pd.DataFrame, Iterable],
g.add_vertices(len(names))
g.add_edges(edges_by_index)
- g.vs['node_id'] = names
+ g.vs["node_id"] = names
# g.vs['neuron_name'] = g.vs['label'] = neuron_names
- g.es['weight'] = edges[:, 2]
+ g.es["weight"] = edges[:, 2]
return g
-def neuron2nx(x: 'core.NeuronObject', simplify=False) -> nx.DiGraph:
+def neuron2nx(x: "core.NeuronObject", simplify=False, epsilon=None) -> nx.DiGraph:
"""Turn Tree-, Mesh- or VoxelNeuron into an NetworkX graph.
Parameters
@@ -228,6 +248,10 @@ def neuron2nx(x: 'core.NeuronObject', simplify=False) -> nx.DiGraph:
For TreeNeurons only: simplify the graph by keeping only roots,
leaves and branching points. Preserves the original
branch lengths (i.e. weights).
+ epsilon : float, optional
+ For Dotprops only: maximum distance between two points to
+ connect them. If `None`, will use 5x the average distance
+ between points (i.e. `5 * x.sampling_resolution`).
Returns
-------
@@ -243,12 +267,20 @@ def neuron2nx(x: 'core.NeuronObject', simplify=False) -> nx.DiGraph:
if isinstance(x, core.TreeNeuron):
# Collect nodes
- nodes = x.nodes.set_index('node_id', inplace=False)
+ nodes = x.nodes.set_index("node_id", inplace=False)
# Collect edges
- edges = x.nodes[x.nodes.parent_id >= 0][['node_id', 'parent_id']].values
+ edges = x.nodes[x.nodes.parent_id >= 0][["node_id", "parent_id"]].values
# Collect weight
- weights = np.sqrt(np.sum((nodes.loc[edges[:, 0], ['x', 'y', 'z']].values.astype(float)
- - nodes.loc[edges[:, 1], ['x', 'y', 'z']].values.astype(float)) ** 2, axis=1))
+ weights = np.sqrt(
+ np.sum(
+ (
+ nodes.loc[edges[:, 0], ["x", "y", "z"]].values.astype(float)
+ - nodes.loc[edges[:, 1], ["x", "y", "z"]].values.astype(float)
+ )
+ ** 2,
+ axis=1,
+ )
+ )
# It's fastest to generate a list of (source, target, weight) tuples to pass to networkX
elist = [(e[0], e[1], l) for e, l in zip(edges, weights)]
# Create empty directed Graph
@@ -263,9 +295,22 @@ def neuron2nx(x: 'core.NeuronObject', simplify=False) -> nx.DiGraph:
elif isinstance(x, core.MeshNeuron):
G = nx.Graph()
G.add_nodes_from(np.arange(x.n_vertices))
- edges = [(e[0], e[1], l) for e, l in zip(x.trimesh.edges_unique,
- x.trimesh.edges_unique_length)]
+ edges = [
+ (e[0], e[1], l)
+ for e, l in zip(x.trimesh.edges_unique, x.trimesh.edges_unique_length)
+ ]
G.add_weighted_edges_from(edges)
+ elif isinstance(x, core.Dotprops):
+ if epsilon is None:
+ epsilon = 5 * x.sampling_resolution
+
+ # Generate KDTree
+ tree = neuron2KDTree(x)
+
+ # Generate graph and assign custom properties
+ G = nx.Graph()
+ G.add_nodes_from(np.arange(x.n_points))
+ G.add_edges_from(tree.query_pairs(epsilon))
elif isinstance(x, core.VoxelNeuron):
# First we need to determine the 6-connecivity between voxels
edges = []
@@ -277,8 +322,9 @@ def neuron2nx(x: 'core.NeuronObject', simplify=False) -> nx.DiGraph:
# Combine real and offset voxels
vox_off = x.voxels + offset
# Find out which voxels overlap (i.e. count == 2 after offset)
- unique, cnt = np.unique(np.append(x.voxels, vox_off, axis=0),
- axis=0, return_counts=True)
+ unique, cnt = np.unique(
+ np.append(x.voxels, vox_off, axis=0), axis=0, return_counts=True
+ )
connected = unique[cnt > 1]
for vox in connected:
@@ -287,7 +333,9 @@ def neuron2nx(x: 'core.NeuronObject', simplify=False) -> nx.DiGraph:
G.add_nodes_from([tuple(v) for v in x.voxels])
G.add_edges_from(edges)
else:
- raise ValueError(f'Unable to convert data of type "{type(x)}" to networkx graph.')
+ raise ValueError(
+ f'Unable to convert data of type "{type(x)}" to networkx graph.'
+ )
return G
@@ -296,7 +344,7 @@ def simplify_graph(G, inplace=False):
"""Simplify skeleton graph (networkX or igraph).
This function will simplify the graph by keeping only roots, leafs and
- branch points. Preserves branch lengths (i.e. weights).
+ branch points. Preserves branch lengths (i.e. weights)!
Parameters
----------
@@ -342,7 +390,7 @@ def simplify_graph(G, inplace=False):
node = start_node
while True:
parent = next(G.successors(node))
- dist += G.edges[node, parent]['weight']
+ dist += G.edges[node, parent]["weight"]
if parent in stop_nodes:
G.add_weighted_edges_from([(start_node, parent, dist)])
@@ -367,7 +415,7 @@ def simplify_graph(G, inplace=False):
node = start_node
while True:
parent = G.successors(node)[0]
- dist += G.es[G.get_eid(node, parent)]['weight']
+ dist += G.es[G.get_eid(node, parent)]["weight"]
if parent in stop_nodes:
G.add_edge(start_node, parent, weight=dist)
@@ -401,19 +449,19 @@ def _voxels2edges(x, connectivity=18):
"""
# The distances and metric we will use depend on the connectedness
- METRICS = {6: 'manhattan',
- 18: 'euclidean',
- 26: 'chebyshev'}
- DISTANCES = {6: 1,
- 18: 1.5,
- 26: 1}
+ METRICS = {6: "manhattan", 18: "euclidean", 26: "chebyshev"}
+ DISTANCES = {6: 1, 18: 1.5, 26: 1}
try:
from sklearn.neighbors import KDTree
- except ImportError:
- raise ImportError('This function requires scikit-learn to be installed.')
-
- assert connectivity in (6, 18, 26), f'`connectivity` must be 6, 18 or 26, not "{connectivity}"'
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError("This function requires scikit-learn to be installed.")
+
+ assert connectivity in (
+ 6,
+ 18,
+ 26,
+ ), f'`connectivity` must be 6, 18 or 26, not "{connectivity}"'
assert isinstance(x, core.VoxelNeuron)
voxels = x.voxels
@@ -439,10 +487,12 @@ def _voxels2edges(x, connectivity=18):
return edges
-def neuron2igraph(x: 'core.NeuronObject',
- simplify: bool = False,
- connectivity: int = 18,
- raise_not_installed: bool = True) -> 'igraph.Graph':
+def neuron2igraph(
+ x: "core.NeuronObject",
+ simplify: bool = False,
+ connectivity: int = 18,
+ raise_not_installed: bool = True,
+) -> "igraph.Graph":
"""Turn Tree-, Mesh- or VoxelNeuron(s) into an iGraph graph.
Requires iGraph to be installed.
@@ -479,12 +529,16 @@ def neuron2igraph(x: 'core.NeuronObject',
if not raise_not_installed:
return None
else:
- raise ImportError('iGraph appears to not be installed (properly). '
- 'Make sure "import igraph" works.')
+ raise ModuleNotFoundError(
+ "iGraph appears to not be installed (properly). "
+ 'Make sure "import igraph" works.'
+ )
if isinstance(x, core.NeuronList):
- return [neuron2igraph(x.loc[i],
- connectivity=connectivity) for i in range(x.shape[0])]
+ return [
+ neuron2igraph(x.loc[i], connectivity=connectivity)
+ for i in range(x.shape[0])
+ ]
if isinstance(x, core.TreeNeuron):
# Make sure we have correctly numbered indices
@@ -496,14 +550,19 @@ def neuron2igraph(x: 'core.NeuronObject',
# Get list of edges as indices (needs to exclude root node)
tn_index_with_parent = nodes.index.values[nodes.parent_id >= 0]
parent_ids = nodes.parent_id.values[nodes.parent_id >= 0]
- nodes['temp_index'] = nodes.index # add temporary index column
+ nodes["temp_index"] = nodes.index # add temporary index column
try:
- parent_index = nodes.set_index('node_id', inplace=False).loc[parent_ids,
- 'temp_index'].values
+ parent_index = (
+ nodes.set_index("node_id", inplace=False)
+ .loc[parent_ids, "temp_index"]
+ .values
+ )
except KeyError:
miss = nodes[~nodes.parent_id.isin(nodes.node_id)].node_id.unique()
- raise KeyError(f"{len(miss)} nodes (e.g. {miss[0]}) in TreeNeuron "
- f"{x.id} connect to non-existent parent nodes.")
+ raise KeyError(
+ f"{len(miss)} nodes (e.g. {miss[0]}) in TreeNeuron "
+ f"{x.id} connect to non-existent parent nodes."
+ )
except BaseException:
raise
@@ -511,28 +570,28 @@ def neuron2igraph(x: 'core.NeuronObject',
elist = np.vstack((tn_index_with_parent, parent_index)).T
# iGraph < 0.8.0 does not like arrays as edge list
- if getattr(igraph, '__version_info__', (0, 0, 0))[1] < 8:
+ if getattr(igraph, "__version_info__", (0, 0, 0))[1] < 8:
elist = elist.tolist()
# Generate graph and assign custom properties
G = igraph.Graph(elist, n=len(vlist), directed=True)
- G.vs['node_id'] = G.vs['name'] = nodes.node_id.values
- G.vs['parent_id'] = nodes.parent_id.values
+ G.vs["node_id"] = G.vs["name"] = nodes.node_id.values
+ G.vs["parent_id"] = nodes.parent_id.values
# Generate weights by calculating edge lengths = distance between nodes
- tn_coords = nodes[['x', 'y', 'z']].values[tn_index_with_parent, :]
- parent_coords = nodes[['x', 'y', 'z']].values[parent_index.astype(int), :]
+ tn_coords = nodes[["x", "y", "z"]].values[tn_index_with_parent, :]
+ parent_coords = nodes[["x", "y", "z"]].values[parent_index.astype(int), :]
w = np.sqrt(np.sum((tn_coords - parent_coords) ** 2, axis=1))
- G.es['weight'] = w
+ G.es["weight"] = w
if simplify:
simplify_graph(G, inplace=True)
elif isinstance(x, core.MeshNeuron):
elist = x.trimesh.edges_unique
G = igraph.Graph(elist, n=x.n_vertices, directed=False)
- G.es['weight'] = x.trimesh.edges_unique_length
+ G.es["weight"] = x.trimesh.edges_unique_length
elif isinstance(x, core.VoxelNeuron):
edges = _voxels2edges(x, connectivity=connectivity)
G = igraph.Graph(edges, n=len(x.voxels), directed=False)
@@ -542,23 +601,24 @@ def neuron2igraph(x: 'core.NeuronObject',
return G
-def nx2neuron(g: nx.Graph,
- root: Optional[Union[int, str]] = None,
- break_cycles: bool = False,
- **kwargs
- ) -> pd.DataFrame:
- """Generate node table from NetworkX Graph.
+def nx2neuron(
+ G: nx.Graph,
+ root: Optional[Union[int, str]] = None,
+ break_cycles: bool = False,
+ **kwargs,
+) -> pd.DataFrame:
+ """Create TreeNeuron from NetworkX Graph.
This function will try to generate a neuron-like tree structure from
the Graph. Therefore the graph must not contain loops!
- Node attributes (e.g. `x`, `y`, `z`, `radius`) need
- to be properties of the graph's nodes. All node property will be added to
+ All node attributes (e.g. `x`, `y`, `z`, `radius`) will be added to
the neuron's `.nodes` table.
Parameters
----------
- g : networkx.Graph
+ G : networkx.Graph
+ Graph to convert to neuron.
root : str | int | list, optional
Node in graph to use as root for neuron. If not provided,
will use first node in `g.nodes`. Ignored if graph
@@ -574,44 +634,64 @@ def nx2neuron(g: nx.Graph,
-------
TreeNeuron
+ Examples
+ --------
+ >>> import navis
+ >>> import networkx as nx
+ >>> G = nx.balanced_tree(2, 3)
+ >>> tn = navis.nx2neuron(G)
+ >>> tn
+ type navis.TreeNeuron
+ name None
+ n_nodes 15
+ n_connectors None
+ n_branches 6
+ n_leafs 8
+ cable_length 0.0
+ soma None
+ units 1 dimensionless
+ dtype: object
+
"""
# First some sanity checks
- if not isinstance(g, nx.Graph):
- raise TypeError(f'`g` must be NetworkX Graph, got "{type(g)}"')
+ if not isinstance(G, nx.Graph):
+ raise TypeError(f'`G` must be NetworkX Graph, got "{type(G)}"')
# We need an undirected Graph
- if isinstance(g, nx.DiGraph):
- g = g.to_undirected(as_view=True)
+ if isinstance(G, nx.DiGraph):
+ G = G.to_undirected(as_view=True)
- if not nx.is_forest(g):
+ if not nx.is_forest(G):
if not break_cycles:
- raise TypeError("Graph must be tree-like. You can try setting "
- "the `cut_cycles` parameter to True.")
+ raise TypeError(
+ "Graph must be tree-like. You can try setting "
+ "the `cut_cycles` parameter to True."
+ )
else:
if break_cycles:
while True:
try:
# Find cycle
- cycle = nx.find_cycle(g)
+ cycle = nx.find_cycle(G)
except nx.exception.NetworkXNoCycle:
break
except BaseException:
raise
# Sort by degree
- cycle = sorted(cycle, key=lambda x: g.degree[x[0]])
+ cycle = sorted(cycle, key=lambda x: G.degree[x[0]])
# Remove the edge with the lowest degree
- g.remove_edge(cycle[0][0], cycle[0][1])
+ G.remove_edge(cycle[0][0], cycle[0][1])
# Ignore root if this is a forest
- if not nx.is_tree(g):
+ if not nx.is_tree(G):
root = None
# This effectively makes sure that all edges point in the same direction
lop = {}
- for c in nx.connected_components(g):
- sg = nx.subgraph(g, c)
+ for c in nx.connected_components(G):
+ sg = nx.subgraph(G, c)
# Pick a random root if not explicitly provided
if not root:
r = list(sg.nodes)[0]
@@ -624,80 +704,178 @@ def nx2neuron(g: nx.Graph,
this_lop = nx.predecessor(sg, r)
# Make sure no node has more than one parent
- if max([len(v) for v in this_lop.values()]) > 1:
- raise ValueError('Nodes with multiple parents found. Make sure graph '
- 'is tree-like.')
+ if any((len(v) > 1 for v in this_lop.values())):
+ raise ValueError(
+ "Nodes with multiple parents found. Make sure graph is tree-like."
+ )
# Note that we assign -1 as root's parent
lop.update({k: v[0] if v else -1 for k, v in this_lop.items()})
# Generate node table
- tn_table = pd.DataFrame(index=list(g.nodes))
- tn_table.index = tn_table.index.set_names('node_id', inplace=False)
+ tn_table = pd.DataFrame(index=list(G.nodes))
+ tn_table.index = tn_table.index.set_names("node_id", inplace=False)
# Add parents - use -1 for root's parent
- tn_table['parent_id'] = tn_table.index.map(lop)
+ tn_table["parent_id"] = tn_table.index.map(lop)
try:
tn_table.index = tn_table.index.astype(int)
- tn_table['parent_id'] = tn_table.parent_id.astype(int)
+ tn_table["parent_id"] = tn_table.parent_id.astype(int)
except (ValueError, TypeError):
- raise ValueError('Node IDs must be convertible to integers.')
+ raise ValueError("Node IDs must be convertible to integers.")
except BaseException:
raise
# Add additional generic attribute -> will skip node_id and parent_id
# if they exist
- all_attr = set([k for n in g.nodes for k in g.nodes[n].keys()])
+ all_attr = set([k for n in G.nodes for k in G.nodes[n].keys()])
# Remove some that we don't need
- all_attr -= set(['parent_id', 'node_id'])
+ all_attr -= set(["parent_id", "node_id"])
# Add some that we want as columns even if they don't exist
- all_attr |= set(['x', 'y', 'z', 'radius'])
+ all_attr |= set(["x", "y", "z", "radius"])
# For some we want to have set default values
- defaults = {'x': 0, 'y': 0, 'z': 0, 'radius': -1}
+ defaults = {"x": 0, "y": 0, "z": 0, "radius": -1}
# Now map the attributes onto node table
for at in all_attr:
- vals = nx.get_node_attributes(g, at)
- tn_table[at] = tn_table.index.map(lambda a: vals.get(a, defaults.get(at)))
+ vals = nx.get_node_attributes(G, at)
+ tn_table[at] = tn_table.index.map(vals).fillna(defaults.get(at, None))
+
+ return core.TreeNeuron(tn_table.reset_index(drop=False, inplace=False), **kwargs)
- return core.TreeNeuron(tn_table.reset_index(drop=False, inplace=False),
- **kwargs)
+def edges2neuron(edges, vertices=None, validate=True, **kwargs):
+ """Create TreeNeuron from edges and (optional) vertex coordinates.
-def _find_all_paths(g: nx.DiGraph,
- start,
- end,
- mode: str = 'OUT',
- maxlen: Optional[int] = None) -> list:
+ Parameters
+ ----------
+ edges : (N, 2) array
+ Edges between vertices.
+ vertices : (N, 3) array, optional
+ Vertex positions. If not provided, will position
+ all vertices at (0, 0, 0).
+ validate : bool
+ If True (default) will fix issues with cycles
+ and edges orientation. Only skip this if
+ you are absolutely sure your data are good.
+ **kwargs
+ Additional keyword arguments are passed to
+ initialization of the TreeNeuron.
+
+ Returns
+ -------
+ TreeNeuron
+
+ Examples
+ --------
+
+ >>> import navis
+ >>> import numpy as np
+ >>> verts = np.random.rand(5, 3)
+ >>> edges = np.array([(0, 1), (1, 2), (2, 3), (2, 4)])
+ >>> sk = navis.edges2neuron(edges, vertices=verts)
+
+ """
+ # Make sure we're dealing with arrays
+ edges = np.asarray(edges)
+
+ if vertices is not None:
+ vertices = np.asarray(vertices)
+ else:
+ vertices = np.zeros((edges.max() + 1, 3))
+
+ if vertices.ndim != 2 or vertices.shape[1] != 3:
+ raise ValueError(
+ f"Expected `vertices` to be of shape (N, 3), got {vertices.shape}"
+ )
+ if edges.ndim != 2 or edges.shape[1] != 2:
+ raise ValueError(
+ f"Expected `edges` to be of shape (N, 2), got {edges.shape}"
+ )
+
+ if edges.max() > (len(vertices)-1):
+ raise IndexError("vertex index out of range")
+
+ G = nx.Graph()
+ G.add_nodes_from(np.arange(len(vertices)))
+ G.add_edges_from(edges)
+
+ # Note: at this point we could just pass the graph to nx2neuron
+ # But because we know it came from from vertices and edges, we
+ # can skip certain checks and make the process a bit faster
+
+ if validate:
+ if not nx.is_forest(G):
+ while True:
+ try:
+ # Find cycle
+ cycle = nx.find_cycle(G)
+ except nx.exception.NetworkXNoCycle:
+ break
+ except BaseException:
+ raise
+
+ # Sort by degree
+ cycle = sorted(cycle, key=lambda x: G.degree[x[0]])
+
+ # Remove the edge with the lowest degree
+ G.remove_edge(cycle[0][0], cycle[0][1])
+
+ parents = {}
+ for cc in nx.connected_components(G):
+ # If this is a disconnected node
+ if len(cc) == 1:
+ parents[cc.pop()] = -1
+ continue
+
+ sg = nx.subgraph(G, cc)
+ # Pick a random root
+ r = cc.pop()
+ # Generate parent->child dictionary
+ this = nx.predecessor(sg, r)
+
+ # Update overall parent dictionary
+ # (note that we assign -1 as root's parent)
+ parents.update({k: v[0] if v else -1 for k, v in this.items()})
+
+ nodes = pd.DataFrame(vertices, columns=['x', 'y', 'z'])
+ nodes.insert(0, 'node_id', nodes.index)
+ nodes.insert(1, 'parent_id', nodes.index.map(parents))
+
+ return core.TreeNeuron(nodes, **kwargs)
+
+
+
+def _find_all_paths(
+ g: nx.DiGraph, start, end, mode: str = "OUT", maxlen: Optional[int] = None
+) -> list:
"""Find all paths between two vertices in an iGraph object.
For some reason this function exists in R iGraph but not Python iGraph. This
is rather slow and should not be used for large graphs.
"""
- def find_all_paths_aux(adjlist: List[set],
- start: int,
- end: int,
- path: list,
- maxlen: Optional[int] = None) -> list:
+
+ def find_all_paths_aux(
+ adjlist: List[set],
+ start: int,
+ end: int,
+ path: list,
+ maxlen: Optional[int] = None,
+ ) -> list:
path = path + [start]
if start == end:
return [path]
paths: list = []
if maxlen is None or len(path) <= maxlen:
for node in adjlist[start] - set(path):
- paths.extend(find_all_paths_aux(adjlist,
- node,
- end,
- path,
- maxlen))
+ paths.extend(find_all_paths_aux(adjlist, node, end, path, maxlen))
return paths
- adjlist = [set(g.neighbors(node, mode=mode))
- for node in range(g.vcount())]
+ adjlist = [set(g.neighbors(node, mode=mode)) for node in range(g.vcount())]
all_paths: list = []
start = start if isinstance(start, list) else [start]
end = end if isinstance(end, list) else [end]
@@ -707,11 +885,9 @@ def find_all_paths_aux(adjlist: List[set],
return all_paths
-def neuron2KDTree(x: 'core.NeuronObject',
- tree_type: str = 'c',
- data: str = 'auto',
- **kwargs) -> Union[scipy.spatial.cKDTree,
- scipy.spatial.KDTree]:
+def neuron2KDTree(
+ x: "core.NeuronObject", tree_type: str = "c", data: str = "auto", **kwargs
+) -> Union[scipy.spatial.cKDTree, scipy.spatial.KDTree]:
"""Turn neuron into scipy KDTree.
Parameters
@@ -738,42 +914,45 @@ def neuron2KDTree(x: 'core.NeuronObject',
`scipy.spatial.cKDTree` or `scipy.spatial.KDTree`
"""
- if tree_type not in ['c', 'normal']:
+ if tree_type not in ["c", "normal"]:
raise ValueError('"tree_type" needs to be either "c" or "normal"')
if isinstance(x, core.NeuronList):
if len(x) == 1:
x = x[0]
else:
- raise ValueError('Need a single TreeNeuron')
+ raise ValueError("Need a single TreeNeuron")
elif not isinstance(x, core.BaseNeuron):
raise TypeError(f'Need Neuron, got "{type(x)}"')
- if data == 'auto':
+ if data == "auto":
if isinstance(x, core.TreeNeuron):
- data = 'nodes'
+ data = "nodes"
if isinstance(x, core.MeshNeuron):
- data = 'vertices'
+ data = "vertices"
if isinstance(x, core.VoxelNeuron):
- data = 'voxels'
+ data = "voxels"
if isinstance(x, core.Dotprops):
- data = 'points'
+ data = "points"
if not hasattr(x, data):
- raise ValueError(f'Neuron does not have a {data} property')
+ raise ValueError(f"Neuron does not have a {data} property")
data = getattr(x, data)
if isinstance(data, pd.DataFrame):
- if not all(np.isin(['x', 'y', 'z'], data.columns)):
- raise ValueError(f'"{data}" DataFrame must contain "x", "y" and '
- '"z" columns.')
- data = data[['x', 'y', 'z']].values
+ if not all(np.isin(["x", "y", "z"], data.columns)):
+ raise ValueError(
+ f'"{data}" DataFrame must contain "x", "y" and ' '"z" columns.'
+ )
+ data = data[["x", "y", "z"]].values
if not isinstance(data, np.ndarray) or data.ndim != 2 or data.shape[1] != 3:
- raise ValueError(f'"{data}" must be DataFrame or (N, 3) array, got {type(data)}')
+ raise ValueError(
+ f'"{data}" must be DataFrame or (N, 3) array, got {type(data)}'
+ )
- if tree_type == 'c':
+ if tree_type == "c":
return scipy.spatial.cKDTree(data=data, **kwargs)
else:
return scipy.spatial.KDTree(data=data, **kwargs)
diff --git a/navis/interfaces/allen_celltypes.py b/navis/interfaces/allen_celltypes.py
index 86e4e5f4..4c9bd693 100644
--- a/navis/interfaces/allen_celltypes.py
+++ b/navis/interfaces/allen_celltypes.py
@@ -18,14 +18,14 @@
try:
import allensdk
from allensdk.core.cell_types_cache import CellTypesCache
-except ImportError:
+except ModuleNotFoundError as e:
msg = dedent("""
allensdk library not found. Please install using pip:
pip install allensdk --no-deps
""")
- raise ImportError(msg)
+ raise ModuleNotFoundError(msg) from e
except BaseException:
raise
diff --git a/navis/interfaces/blender.py b/navis/interfaces/blender.py
index 70d840a4..b9c154dd 100644
--- a/navis/interfaces/blender.py
+++ b/navis/interfaces/blender.py
@@ -40,7 +40,7 @@
import bpy
import bmesh
import mathutils
-except ImportError:
+except ModuleNotFoundError:
logger.error('Unable to load Blender API - this module only works from '
'within Blender!')
except BaseException:
diff --git a/navis/interfaces/microns.py b/navis/interfaces/microns.py
index 4488e54f..5646030e 100644
--- a/navis/interfaces/microns.py
+++ b/navis/interfaces/microns.py
@@ -28,7 +28,7 @@
try:
from caveclient import CAVEclient
import cloudvolume as cv
-except ImportError:
+except ModuleNotFoundError:
config.logger.error(err_msg)
CAVEclient = None
cv = None
@@ -76,7 +76,7 @@ def get_datastacks(microns_only=True):
"""
if not CAVEclient:
- raise ImportError(err_msg)
+ raise ModuleNotFoundError(err_msg)
stacks = CAVEclient().info.get_datastacks()
@@ -98,7 +98,7 @@ def get_cave_client(datastack="cortex65"):
"""
if not CAVEclient:
- raise ImportError(err_msg)
+ raise ModuleNotFoundError(err_msg)
# Try mapping, else pass-through
datastack = _translate_datastack(datastack)
diff --git a/navis/interfaces/neuprint.py b/navis/interfaces/neuprint.py
index 9d8d0b35..e82237d5 100644
--- a/navis/interfaces/neuprint.py
+++ b/navis/interfaces/neuprint.py
@@ -28,14 +28,14 @@
# remove neuprint's own fetch_skeleton function to avoid confusion
del fetch_skeleton # noqa
from neuprint.client import inject_client
-except ImportError:
+except ModuleNotFoundError:
msg = dedent("""
neuprint library not found. Please install using pip:
pip install neuprint-python
""")
- raise ImportError(msg)
+ raise ModuleNotFoundError(msg)
except BaseException:
raise
@@ -289,7 +289,7 @@ def fetch_mesh_neuron(x, *, lod=1, with_synapses=False, missing_mesh='raise',
n.soma_radius = radii[n.id] / n.units.to('nm').magnitude
else:
n.soma_radius = None
- n.soma = n.somaLocation
+ n.soma_pos = n.somaLocation
if with_synapses:
# Fetch synapses
diff --git a/navis/interfaces/neuron/comp.py b/navis/interfaces/neuron/comp.py
index 11130c7a..842e36d8 100644
--- a/navis/interfaces/neuron/comp.py
+++ b/navis/interfaces/neuron/comp.py
@@ -97,10 +97,11 @@
# We will belay any import error
try:
import neuron
-except ImportError:
- raise ImportError('This interface requires the `neuron` libary to be '
- 'installed:\n pip3 install neuron\n'
- 'See also https://neuron.yale.edu/neuron/')
+except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ 'This interface requires the `neuron` libary to be '
+ 'installed:\n pip3 install neuron\n'
+ 'See also https://neuron.yale.edu/neuron/')
from neuron.units import ms, mV
neuron.h.load_file('stdrun.hoc')
diff --git a/navis/interfaces/neuron/network.py b/navis/interfaces/neuron/network.py
index 67e4f33d..5c2f4d17 100644
--- a/navis/interfaces/neuron/network.py
+++ b/navis/interfaces/neuron/network.py
@@ -29,10 +29,11 @@
# We will belay any import error
try:
import neuron
-except ImportError:
- raise ImportError('This interface requires the `neuron` libary to be '
- 'installed:\n pip3 install neuron\n'
- 'See also https://neuron.yale.edu/neuron/')
+except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ 'This interface requires the `neuron` libary to be '
+ 'installed:\n pip3 install neuron\n'
+ 'See also https://neuron.yale.edu/neuron/')
from neuron.units import ms, mV
neuron.h.load_file('stdrun.hoc')
diff --git a/navis/interfaces/vfb.py b/navis/interfaces/vfb.py
index 014aaf71..f36f5892 100644
--- a/navis/interfaces/vfb.py
+++ b/navis/interfaces/vfb.py
@@ -18,14 +18,14 @@
try:
from vfb_connect.cross_server_tools import VfbConnect
vc = VfbConnect(neo_endpoint='http://pdb.v4.virtualflybrain.org', neo_credentials=('neo4j', 'vfb'))
-except ImportError:
+except ModuleNotFoundError:
msg = dedent("""
vfb_connect library not found. Please install using pip:
pip install vfb_connect
""")
- raise ImportError(msg)
+ raise ModuleNotFoundError(msg)
except BaseException:
raise
diff --git a/navis/intersection/intersect.py b/navis/intersection/intersect.py
index d689fbb9..7462de8c 100644
--- a/navis/intersection/intersect.py
+++ b/navis/intersection/intersect.py
@@ -29,13 +29,13 @@
try:
from pyoctree import pyoctree
-except ImportError:
+except ModuleNotFoundError:
pyoctree = None
logger.debug("Package pyoctree not found.")
try:
import ncollpyde
-except ImportError:
+except ModuleNotFoundError:
ncollpyde = None
logger.debug("Package ncollpyde not found.")
diff --git a/navis/intersection/ray.py b/navis/intersection/ray.py
index a4b325b7..a9330438 100644
--- a/navis/intersection/ray.py
+++ b/navis/intersection/ray.py
@@ -21,12 +21,12 @@
try:
from pyoctree import pyoctree
-except ImportError:
+except ModuleNotFoundError:
pyoctree = None
try:
import ncollpyde
-except ImportError:
+except ModuleNotFoundError:
ncollpyde = None
diff --git a/navis/io/base.py b/navis/io/base.py
index 4dfd4edb..627890bd 100644
--- a/navis/io/base.py
+++ b/navis/io/base.py
@@ -24,19 +24,21 @@
import pandas as pd
from abc import ABC
-from functools import partial
+from functools import partial, wraps
from pathlib import Path
from typing import List, Union, Iterable, Dict, Optional, Any, IO
from typing_extensions import Literal
from zipfile import ZipFile, ZipInfo
+from ftplib import FTP
from .. import config, utils, core
try:
import zlib
import zipfile
+
compression = zipfile.ZIP_DEFLATED
-except ImportError:
+except ModuleNotFoundError:
compression = zipfile.ZIP_STORED
__all__ = ["BaseReader"]
@@ -46,6 +48,9 @@
DEFAULT_INCLUDE_SUBDIRS = False
+# Regular expression to figure out if a string is a regex pattern
+rgx = re.compile(r"[\\\.\?\[\]\+\^\$\*]")
+
def merge_dicts(*dicts: Optional[Dict], **kwargs) -> Dict:
"""Merge dicts and kwargs left to right.
@@ -61,6 +66,49 @@ def merge_dicts(*dicts: Optional[Dict], **kwargs) -> Dict:
return out
+def handle_errors(func):
+ """Decorator for read_buffer and read_dataframe methods to handle errors.
+
+ Catches exceptions, logs/raises and potentially return `None`.
+
+ Note: various other BaseReader methods have their own error handling.
+
+ Parameters
+ ----------
+ func : callable
+ Function to wrap.
+
+ Returns
+ -------
+ callable
+ Wrapped function.
+
+ """
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ self = args[0]
+ attrs = kwargs.get("attrs", {}) # we rely on this being a keyword argument!
+ try:
+ return func(*args, **kwargs)
+ except BaseException as e:
+ # Check if we can provide any hint as to which file failed
+ id = self.name_fallback
+ for a in ("file", "origin", "name"):
+ if a in attrs:
+ id = attrs[a]
+ break
+
+ if self.errors == "raise":
+ raise ReadError(f"Error reading {id}. See above traceback for details.") from e
+ elif self.errors == "log":
+ logger.exception(f"Failed to read {id}", exc_info=True)
+
+ return None
+
+ return wrapper
+
+
class Writer:
"""Writer class that takes care of things like filenames, archives, etc.
@@ -77,7 +125,7 @@ class Writer:
def __init__(self, write_func, ext):
assert callable(write_func)
if ext:
- assert isinstance(ext, str) and ext.startswith('.')
+ assert isinstance(ext, str) and ext.startswith(".")
self.write_func = write_func
self.ext = ext
@@ -87,7 +135,9 @@ def write_single(self, x, filepath, **kwargs):
try:
as_str = os.fspath(filepath)
except TypeError:
- raise ValueError(f'`filepath` must be str or pathlib.Path, got "{type(filepath)}"')
+ raise ValueError(
+ f'`filepath` must be str or pathlib.Path, got "{type(filepath)}"'
+ )
# Format filename (e.g. "{neuron.name}.swc")
formatted_str = as_str.format(neuron=x)
@@ -103,11 +153,11 @@ def write_single(self, x, filepath, **kwargs):
# If not specified, generate filename
if self.ext and not str(filepath).endswith(self.ext):
- filepath = filepath / f'{x.id}{self.ext}'
+ filepath = filepath / f"{x.id}{self.ext}"
# Make sure the parent directory exists
if not filepath.parent.exists():
- raise ValueError(f'Parent folder {filepath.parent} must exist.')
+ raise ValueError(f"Parent folder {filepath.parent} must exist.")
# Track the path we put this (and presumably all other files in)
self.path = Path(filepath)
@@ -126,39 +176,50 @@ def write_many(self, x, filepath, **kwargs):
if not is_filename or is_single or is_formattable:
filepath = [filepath] * len(x)
else:
- raise ValueError('`filepath` must either be a folder, a '
- 'formattable filepath or a list of filepaths'
- 'when saving multiple neurons.')
+ raise ValueError(
+ "`filepath` must either be a folder, a "
+ "formattable filepath or a list of filepaths"
+ "when saving multiple neurons."
+ )
if len(filepath) != len(x):
- raise ValueError(f'Got {len(filepath)} file names for '
- f'{len(x)} neurons.')
+ raise ValueError(
+ f"Got {len(filepath)} file names for " f"{len(x)} neurons."
+ )
# At this point filepath is iterable
filepath: Iterable[str]
- for n, f in config.tqdm(zip(x, filepath), disable=config.pbar_hide,
- leave=config.pbar_leave, total=len(x),
- desc='Writing'):
+ for n, f in config.tqdm(
+ zip(x, filepath),
+ disable=config.pbar_hide,
+ leave=config.pbar_leave,
+ total=len(x),
+ desc="Writing",
+ ):
self.write_single(n, filepath=f, **kwargs)
def write_zip(self, x, filepath, **kwargs):
"""Write files to zip."""
filepath = Path(filepath).expanduser()
# Parse pattern, if given
- pattern = '{neuron.id}' + (self.ext if self.ext else '')
- if '@' in str(filepath):
- pattern, filename = filepath.name.split('@')
+ pattern = "{neuron.id}" + (self.ext if self.ext else "")
+ if "@" in str(filepath):
+ pattern, filename = filepath.name.split("@")
filepath = filepath.parent / filename
# Make sure we have an iterable
x = core.NeuronList(x)
- with ZipFile(filepath, mode='w') as zf:
+ with ZipFile(filepath, mode="w") as zf:
# Context-manager will remove temporary directory and its contents
with tempfile.TemporaryDirectory() as tempdir:
- for n in config.tqdm(x, disable=config.pbar_hide,
- leave=config.pbar_leave, total=len(x),
- desc='Writing'):
+ for n in config.tqdm(
+ x,
+ disable=config.pbar_hide,
+ leave=config.pbar_leave,
+ total=len(x),
+ desc="Writing",
+ ):
# Save to temporary file
f = None
try:
@@ -167,8 +228,11 @@ def write_zip(self, x, filepath, **kwargs):
# Write to temporary file
self.write_single(n, filepath=f, **kwargs)
# Add file to zip
- zf.write(f, arcname=pattern.format(neuron=n),
- compress_type=compression)
+ zf.write(
+ f,
+ arcname=pattern.format(neuron=n),
+ compress_type=compression,
+ )
except BaseException:
raise
finally:
@@ -184,7 +248,7 @@ def write_zip(self, x, filepath, **kwargs):
def write_any(self, x, filepath, **kwargs):
"""Write any to file. Default entry point."""
# If target is a zipfile
- if isinstance(filepath, (str, Path)) and str(filepath).endswith('.zip'):
+ if isinstance(filepath, (str, Path)) and str(filepath).endswith(".zip"):
return self.write_zip(x, filepath=filepath, **kwargs)
elif isinstance(x, core.NeuronList):
return self.write_many(x, filepath=filepath, **kwargs)
@@ -196,15 +260,16 @@ class BaseReader(ABC):
"""Abstract reader to parse various inputs into neurons.
Any subclass should implement at least one of `read_buffer` or
- `read_dataframe`.
+ `read_dataframe`. Entry methods such as `read_any` will pass
+ and parse an input through to the appropriate method.
Parameters
----------
fmt : str
A string describing how to parse filenames into neuron
properties. For example '{id}.swc'.
- file_ext : str
- The file extension to look for when searching folders.
+ file_ext : str | tuple
+ The file extension(s) to look for when searching folders.
For example '.swc'. Alternatively, you can re-implement
the `is_valid_file` method for more complex filters. That
method needs to be able to deal with: Path objects, ZipInfo
@@ -216,6 +281,14 @@ class BaseReader(ABC):
Will be overwritten by later additions (e.g. from `fmt`).
ignore_hidden : bool
Whether to ignore files that start with "._".
+ errors : "raise" | "log" | "ignore"
+ What to do when an error is encountered:
+ - "raise" (default) will raise an error
+ - "log" will log a warning and return `None`
+ - "ignore" will return `None`
+ Applies only to errors in parsing file contents into neurons
+ not to errors in reading files, archives, URLs, etc.
+
"""
def __init__(
@@ -226,6 +299,7 @@ def __init__(
read_binary: bool = False,
attrs: Optional[Dict[str, Any]] = None,
ignore_hidden=True,
+ errors="raise"
):
self.attrs = attrs
self.fmt = fmt
@@ -233,19 +307,50 @@ def __init__(
self.name_fallback = name_fallback
self.read_binary = read_binary
self.ignore_hidden = ignore_hidden
+ self.errors = errors
- if self.file_ext.startswith("*"):
+ assert errors in ("raise", "log", "ignore")
+
+ @property
+ def file_ext(self):
+ return self._file_ext
+
+ @file_ext.setter
+ def file_ext(self, value):
+ """Makes sure file_ext is always a tuple."""
+ if isinstance(value, str):
+ value = (value,)
+
+ if any((ext.startswith("*") for ext in value)):
raise ValueError('File extension must be ".ext", not "*.ext"')
- def files_in_dir(self,
- dpath: Path,
- include_subdirs: bool = DEFAULT_INCLUDE_SUBDIRS
- ) -> Iterable[Path]:
+ self._file_ext = value
+
+ def format_output(self, x):
+ """Format output into NeuronList.
+
+ Replace this method if output is not (always) a NeuronList.
+ See for example NrrdReader in nrrd_io.py.
+ """
+ if not x:
+ return core.NeuronList([])
+ else:
+ return core.NeuronList([n for n in x if n])
+
+ def files_in_dir(
+ self, dpath: Path, include_subdirs: bool = DEFAULT_INCLUDE_SUBDIRS
+ ) -> Iterable[Path]:
"""List files to read in directory."""
if not isinstance(dpath, Path):
dpath = Path(dpath)
+
+ if "*" in str(dpath):
+ pattern = str(dpath.name)
+ dpath = dpath.parent
+ else:
+ pattern = "*"
+
dpath = dpath.expanduser()
- pattern = '*'
if include_subdirs:
pattern = os.path.join("**", pattern)
@@ -263,8 +368,9 @@ def is_valid_file(self, file):
if self.ignore_hidden and str(file).startswith("._"):
return False
- if str(file).endswith(self.file_ext):
- return True
+ for ext in self.file_ext:
+ if str(file).endswith(ext):
+ return True
return False
def _make_attributes(
@@ -285,36 +391,15 @@ def _make_attributes(
Arbitrary string-keyed attributes.
"""
return merge_dicts(
- dict(
- created_at=str(datetime.datetime.now())
- ),
+ dict(created_at=str(datetime.datetime.now())),
self.attrs,
*dicts,
**kwargs,
)
- def read_buffer(
- self, f: IO, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.BaseNeuron':
- """Read buffer into a single neuron.
-
- Parameters
- ----------
- f : IO
- Readable buffer.
- attrs : dict | None
- Arbitrary attributes to include in the neuron.
-
- Returns
- -------
- core.NeuronObject
- """
- raise NotImplementedError('Reading from buffer not implemented for '
- f'{type(self)}')
-
def read_file_path(
self, fpath: os.PathLike, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.BaseNeuron':
+ ) -> "core.BaseNeuron":
"""Read single file from path into a neuron.
Parameters
@@ -330,19 +415,17 @@ def read_file_path(
"""
p = Path(fpath)
with open(p, "rb" if self.read_binary else "r") as f:
- try:
- props = self.parse_filename(f.name)
- props["origin"] = str(p)
- return self.read_buffer(f, merge_dicts(props, attrs))
- except BaseException as e:
- raise ValueError(f"Error reading file {p}") from e
+ props = self.parse_filename(f.name)
+ props["origin"] = str(p)
+ return self.read_buffer(f, attrs=merge_dicts(props, attrs))
def read_from_zip(
- self, files: Union[str, List[str]],
+ self,
+ files: Union[str, List[str]],
zippath: os.PathLike,
attrs: Optional[Dict[str, Any]] = None,
- on_error: Union[Literal['ignore', Literal['raise']]] = 'ignore'
- ) -> 'core.NeuronList':
+ on_error: Union[Literal["ignore", Literal["raise"]]] = "ignore",
+ ) -> "core.NeuronList":
"""Read given files from a zip into a NeuronList.
Typically not used directly but via `read_zip()` dispatcher.
@@ -367,30 +450,30 @@ def read_from_zip(
files = utils.make_iterable(files)
neurons = []
- with ZipFile(p, 'r') as zip:
+ with ZipFile(p, "r") as zip:
for file in files:
# Note the `file` is of type zipfile.ZipInfo here
props = self.parse_filename(file.orig_filename)
- props['origin'] = str(p)
+ props["origin"] = str(p)
try:
- n = self.read_bytes(zip.read(file),
- merge_dicts(props, attrs))
+ n = self.read_bytes(zip.read(file), attrs=merge_dicts(props, attrs))
neurons.append(n)
except BaseException:
- if on_error == 'ignore':
+ if on_error == "ignore":
logger.warning(f'Failed to read "{file.filename}" from zip.')
else:
raise
- return core.NeuronList(neurons)
+ return self.format_output(neurons)
def read_zip(
- self, fpath: os.PathLike,
+ self,
+ fpath: os.PathLike,
parallel="auto",
limit: Optional[int] = None,
attrs: Optional[Dict[str, Any]] = None,
- on_error: Union[Literal['ignore', Literal['raise']]] = 'ignore'
- ) -> 'core.NeuronList':
+ on_error: Union[Literal["ignore", Literal["raise"]]] = "ignore",
+ ) -> "core.NeuronList":
"""Read files from a zip into a NeuronList.
This is a dispatcher for `.read_from_zip`.
@@ -412,22 +495,25 @@ def read_zip(
"""
fpath = Path(fpath).expanduser()
- read_fn = partial(self.read_from_zip,
- zippath=fpath, attrs=attrs,
- on_error=on_error)
- neurons = parallel_read_archive(read_fn=read_fn,
- fpath=fpath,
- file_ext=self.is_valid_file,
- limit=limit,
- parallel=parallel)
- return core.NeuronList(neurons)
+ read_fn = partial(
+ self.read_from_zip, zippath=fpath, attrs=attrs, on_error=on_error
+ )
+ neurons = parallel_read_archive(
+ read_fn=read_fn,
+ fpath=fpath,
+ file_ext=self.is_valid_file,
+ limit=limit,
+ parallel=parallel,
+ )
+ return self.format_output(neurons)
def read_from_tar(
- self, files: Union[str, List[str]],
+ self,
+ files: Union[str, List[str]],
tarpath: os.PathLike,
attrs: Optional[Dict[str, Any]] = None,
- on_error: Union[Literal['ignore', Literal['raise']]] = 'ignore'
- ) -> 'core.NeuronList':
+ on_error: Union[Literal["ignore", Literal["raise"]]] = "ignore",
+ ) -> "core.NeuronList":
"""Read given files from a tar into a NeuronList.
Typically not used directly but via `read_tar()` dispatcher.
@@ -452,30 +538,32 @@ def read_from_tar(
files = utils.make_iterable(files)
neurons = []
- with tarfile.open(p, 'r') as tf:
+ with tarfile.open(p, "r") as tf:
for file in files:
# Note the `file` is of type tarfile.TarInfo here
- props = self.parse_filename(file.name.split('/')[-1])
- props['origin'] = str(p)
+ props = self.parse_filename(file.name.split("/")[-1])
+ props["origin"] = str(p)
try:
- n = self.read_bytes(tf.extractfile(file).read(),
- merge_dicts(props, attrs))
+ n = self.read_bytes(
+ tf.extractfile(file).read(), attrs=merge_dicts(props, attrs)
+ )
neurons.append(n)
except BaseException:
- if on_error == 'ignore':
+ if on_error == "ignore":
logger.warning(f'Failed to read "{file.filename}" from tar.')
else:
raise
- return core.NeuronList(neurons)
+ return self.format_output(neurons)
def read_tar(
- self, fpath: os.PathLike,
+ self,
+ fpath: os.PathLike,
parallel="auto",
limit: Optional[int] = None,
attrs: Optional[Dict[str, Any]] = None,
- on_error: Union[Literal['ignore', Literal['raise']]] = 'ignore'
- ) -> 'core.NeuronList':
+ on_error: Union[Literal["ignore", Literal["raise"]]] = "ignore",
+ ) -> "core.NeuronList":
"""Read files from a tar archive into a NeuronList.
This is a dispatcher for `.read_from_tar`.
@@ -497,23 +585,136 @@ def read_tar(
"""
fpath = Path(fpath).expanduser()
- read_fn = partial(self.read_from_tar,
- tarpath=fpath, attrs=attrs,
- on_error=on_error)
- neurons = parallel_read_archive(read_fn=read_fn,
- fpath=fpath,
- file_ext=self.is_valid_file,
- limit=limit,
- parallel=parallel)
- return core.NeuronList(neurons)
+ read_fn = partial(
+ self.read_from_tar, tarpath=fpath, attrs=attrs, on_error=on_error
+ )
+ neurons = parallel_read_archive(
+ read_fn=read_fn,
+ fpath=fpath,
+ file_ext=self.is_valid_file,
+ limit=limit,
+ parallel=parallel,
+ )
+ return self.format_output(neurons)
+
+ def read_ftp(
+ self,
+ url,
+ parallel="auto",
+ limit: Optional[int] = None,
+ attrs: Optional[Dict[str, Any]] = None,
+ on_error: Union[Literal["ignore", Literal["raise"]]] = "ignore",
+ ) -> "core.NeuronList":
+ """Read files from an FTP server.
+
+ This is a dispatcher for `.read_from_ftp`.
+
+ Parameters
+ ----------
+ url : str
+ Can be the path to a single file or a directory.
+ limit : int, optional
+ Limit the number of files read from this directory.
+ attrs : dict or None
+ Arbitrary attributes to include in the TreeNeuron.
+ on_error : 'ignore' | 'raise'
+ What do do when error is encountered.
+
+ Returns
+ -------
+ core.NeuronList
+
+ """
+ # Remove the ftp:// prefix
+ url = url.replace("ftp://", "")
+
+ # Split into server and path
+ server, path = url.split("/", 1)
+
+ # Check if server contains a port
+ if ":" in server:
+ server, port = server.split(":")
+ port = int(port)
+ else:
+ port = 21 # default port
+
+ read_fn = partial(self.read_from_ftp, attrs=attrs, on_error=on_error)
+ neurons = parallel_read_ftp(
+ read_fn=read_fn,
+ server=server,
+ port=port,
+ path=path,
+ file_ext=self.is_valid_file,
+ limit=limit,
+ parallel=parallel,
+ )
+ return self.format_output(neurons)
+
+ def read_from_ftp(
+ self,
+ files: Union[str, List[str]],
+ ftp: FTP,
+ attrs: Optional[Dict[str, Any]] = None,
+ on_error: Union[Literal["ignore", Literal["raise"]]] = "ignore",
+ ) -> "core.NeuronList":
+ """Read given files from an FTP server into a NeuronList.
+
+ Typically not used directly but via `read_ftp()` dispatcher.
+
+ Parameters
+ ----------
+ files : tarfile.TarInfo | list thereof
+ Files inside the tar file to read.
+ ftp : ftplib.FTP | "GLOBAL"
+ The FTP client. This should already be connected, logged in
+ and in the correct directory. If "GLOBAL", we will look for a
+ `_FTP` global variable.
+ attrs : dict or None
+ Arbitrary attributes to include in the TreeNeuron.
+ on_error : 'ignore' | 'raise'
+ What do do when error is encountered.
+
+ Returns
+ -------
+ core.NeuronList
+
+ """
+ # When reading in parallel, we expect there to be a global FTP connection
+ # that was initialized once for each worker process.
+ if ftp == "GLOBAL":
+ if "_FTP" not in globals():
+ raise ValueError("No global FTP connection found.")
+ ftp = _FTP
+
+ files = utils.make_iterable(files)
+
+ neurons = []
+ for file in files:
+ # Read the file into a bytes
+ with io.BytesIO() as f:
+ ftp.retrbinary("RETR " + file, f.write)
+ f.seek(0)
+ props = self.parse_filename(file)
+ props["origin"] = f"{ftp.host}:{ftp.port}{ftp.pwd()}/{file}"
+ try:
+ n = self.read_buffer(f, attrs=merge_dicts(props, attrs))
+ neurons.append(n)
+ except BaseException:
+ if on_error == "ignore":
+ logger.warning(f'Failed to read "{file}" from FTP.')
+ else:
+ raise
+
+ return self.format_output(neurons)
def read_directory(
- self, path: os.PathLike,
+ self,
+ path: os.PathLike,
include_subdirs=DEFAULT_INCLUDE_SUBDIRS,
parallel="auto",
limit: Optional[int] = None,
- attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.NeuronList':
+ attrs: Optional[Dict[str, Any]] = None,
+ ) -> "core.NeuronList":
"""Read directory of files into a NeuronList.
Parameters
@@ -532,19 +733,30 @@ def read_directory(
Returns
-------
core.NeuronList
+
"""
files = list(self.files_in_dir(Path(path), include_subdirs))
- if limit:
+ if isinstance(limit, int):
files = files[:limit]
+ elif isinstance(limit, list):
+ files = [f for f in files if f in limit]
+ elif isinstance(limit, slice):
+ files = files[limit]
+ elif isinstance(limit, str):
+ # Check if limit is a regex
+ if rgx.search(limit):
+ files = [f for f in files if re.search(limit, str(f.name))]
+ else:
+ files = [f for f in files if limit in str(f)]
read_fn = partial(self.read_file_path, attrs=attrs)
neurons = parallel_read(read_fn, files, parallel)
- return core.NeuronList(neurons)
+ return self.format_output(neurons)
def read_url(
self, url: str, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.BaseNeuron':
+ ) -> "core.BaseNeuron":
"""Read file from URL into a neuron.
Parameters
@@ -569,16 +781,13 @@ def read_url(
# the wrong format.
with requests.get(url, stream=False) as r:
r.raise_for_status()
- props = self.parse_filename(url.split('/')[-1])
- props['origin'] = url
- return self.read_buffer(
- io.BytesIO(r.content),
- merge_dicts(props, attrs)
- )
+ props = self.parse_filename(url.split("/")[-1])
+ props["origin"] = url
+ return self.read_buffer(io.BytesIO(r.content), attrs=merge_dicts(props, attrs))
def read_string(
self, s: str, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.BaseNeuron':
+ ) -> "core.BaseNeuron":
"""Read single string into a Neuron.
Parameters
@@ -594,13 +803,12 @@ def read_string(
"""
sio = io.StringIO(s)
return self.read_buffer(
- sio,
- merge_dicts({'name': self.name_fallback, 'origin': 'string'}, attrs)
+ sio, attrs=merge_dicts({"name": self.name_fallback, "origin": "string"}, attrs)
)
def read_bytes(
self, s: str, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.BaseNeuron':
+ ) -> "core.BaseNeuron":
"""Read bytes into a Neuron.
Parameters
@@ -616,13 +824,13 @@ def read_bytes(
"""
sio = io.BytesIO(s)
return self.read_buffer(
- sio,
- merge_dicts({'name': self.name_fallback, 'origin': 'string'}, attrs)
+ sio, attrs=merge_dicts({"name": self.name_fallback, "origin": "string"}, attrs)
)
+ @handle_errors
def read_dataframe(
self, nodes: pd.DataFrame, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.BaseNeuron':
+ ) -> "core.BaseNeuron":
"""Convert a DataFrame into a neuron.
Parameters
@@ -635,12 +843,36 @@ def read_dataframe(
-------
core.BaseNeuron
"""
- raise NotImplementedError('Reading DataFrames not implemented for '
- f'{type(self)}')
+ raise NotImplementedError(
+ "Reading DataFrames not implemented for " f"{type(self)}"
+ )
+
+ @handle_errors
+ def read_buffer(
+ self, f: IO, attrs: Optional[Dict[str, Any]] = None
+ ) -> "core.BaseNeuron":
+ """Read buffer into a single neuron.
+
+
+
+ Parameters
+ ----------
+ f : IO
+ Readable buffer.
+ attrs : dict | None
+ Arbitrary attributes to include in the neuron.
+
+ Returns
+ -------
+ core.NeuronObject
+ """
+ raise NotImplementedError(
+ "Reading from buffer not implemented for " f"{type(self)}"
+ )
def read_any_single(
self, obj, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.BaseNeuron':
+ ) -> "core.BaseNeuron":
"""Attempt to convert an arbitrary object into a neuron.
Parameters
@@ -656,30 +888,32 @@ def read_any_single(
core.BaseNeuron
"""
if hasattr(obj, "read"):
- return self.read_buffer(obj, attrs)
+ return self.read_buffer(obj, attrs=attrs)
if isinstance(obj, pd.DataFrame):
- return self.read_dataframe(obj, attrs)
+ return self.read_dataframe(obj, attrs=attrs)
if isinstance(obj, os.PathLike):
- if str(obj).endswith('.zip'):
+ if str(obj).endswith(".zip"):
return self.read_zip(obj, attrs=attrs)
elif ".tar" in str(obj):
return self.read_tar(obj, attrs=attrs)
- return self.read_file_path(obj, attrs)
+ return self.read_file_path(obj, attrs=attrs)
if isinstance(obj, str):
# See if this might be a file (make sure to expand user)
if os.path.isfile(os.path.expanduser(obj)):
p = Path(obj).expanduser()
- if p.suffix == '.zip':
+ if p.suffix == ".zip":
return self.read_zip(p, attrs=attrs)
- return self.read_file_path(p, attrs)
+ elif p.suffix in (".tar", "tar.gz", "tar.bz"):
+ return self.read_tar(p, attrs=attrs)
+ return self.read_file_path(p, attrs=attrs)
if obj.startswith("http://") or obj.startswith("https://"):
- return self.read_url(obj, attrs)
- return self.read_string(obj, attrs)
+ return self.read_url(obj, attrs=attrs)
+ if obj.startswith("ftp://"):
+ return self.read_ftp(obj, attrs=attrs)
+ return self.read_string(obj, attrs=attrs)
if isinstance(obj, bytes):
- return self.read_bytes(obj, attrs)
- raise ValueError(
- f"Could not read neuron from object of type '{type(obj)}'"
- )
+ return self.read_bytes(obj, attrs=attrs)
+ raise ValueError(f"Could not read neuron from object of type '{type(obj)}'")
def read_any_multi(
self,
@@ -687,7 +921,7 @@ def read_any_multi(
include_subdirs=DEFAULT_INCLUDE_SUBDIRS,
parallel="auto",
attrs: Optional[Dict[str, Any]] = None,
- ) -> 'core.NeuronList':
+ ) -> "core.NeuronList":
"""Attempt to convert an arbitrary object into a NeuronList,
potentially in parallel.
@@ -716,28 +950,35 @@ def read_any_multi(
if not objs:
logger.warning("No files found, returning empty NeuronList")
- return core.NeuronList([])
+ return self.format_output(objs)
new_objs = []
for obj in objs:
try:
- if os.path.isdir(os.path.expanduser(obj)):
+ if is_dir(obj):
new_objs.extend(self.files_in_dir(obj, include_subdirs))
continue
except TypeError:
pass
new_objs.append(obj)
+ # `parallel` can be ("auto", threshold) in which case `threshold`
+ # determines at what length we use parallel processing
+ if isinstance(parallel, tuple):
+ parallel, threshold = parallel
+ else:
+ threshold = 200
+
if (
isinstance(parallel, str)
- and parallel.lower() == 'auto'
- and len(new_objs) < 200
+ and parallel.lower() == "auto"
+ and len(new_objs) < threshold
):
parallel = False
read_fn = partial(self.read_any_single, attrs=attrs)
neurons = parallel_read(read_fn, new_objs, parallel)
- return core.NeuronList(neurons)
+ return self.format_output(neurons)
def read_any(
self,
@@ -746,7 +987,7 @@ def read_any(
parallel="auto",
limit=None,
attrs: Optional[Dict[str, Any]] = None,
- ) -> 'core.NeuronObject':
+ ) -> "core.NeuronObject":
"""Attempt to read an arbitrary object into a neuron.
Parameters
@@ -760,27 +1001,29 @@ def read_any(
core.NeuronObject
"""
if utils.is_iterable(obj) and not hasattr(obj, "read"):
- return self.read_any_multi(obj, parallel, include_subdirs, attrs)
+ return self.read_any_multi(obj, parallel, include_subdirs, attrs=attrs)
else:
try:
- if os.path.isdir(os.path.expanduser(obj)):
+ if is_dir(obj):
return self.read_directory(
- obj, include_subdirs, parallel, limit, attrs
+ obj, include_subdirs, parallel, limit, attrs=attrs
)
except TypeError:
pass
try:
- if os.path.isfile(os.path.expanduser(obj)) and str(obj).endswith('.zip'):
- return self.read_zip(obj, parallel, limit, attrs)
+ if os.path.isfile(os.path.expanduser(obj)) and str(obj).endswith(
+ ".zip"
+ ):
+ return self.read_zip(obj, parallel, limit, attrs=attrs)
if os.path.isfile(os.path.expanduser(obj)) and ".tar" in str(obj):
- return self.read_tar(obj, parallel, limit, attrs)
+ return self.read_tar(obj, parallel, limit, attrs=attrs)
+ if isinstance(obj, str) and obj.startswith("ftp://"):
+ return self.read_ftp(obj, parallel, limit, attrs=attrs)
except TypeError:
pass
- return self.read_any_single(obj, attrs)
+ return self.read_any_single(obj, attrs=attrs)
- def parse_filename(
- self, filename: str
- ) -> dict:
+ def parse_filename(self, filename: str) -> dict:
"""Extract properties from filename according to specified formatter.
Parameters
@@ -800,7 +1043,7 @@ def parse_filename(
fmt = re.escape(self.fmt)
# Unescape { and }
- fmt = fmt.replace('\\{', '{').replace('\\}', '}')
+ fmt = fmt.replace("\\{", "{").replace("\\}", "}")
# Replace all e.g. {name} with {.*}
prop_names = []
@@ -814,37 +1057,36 @@ def parse_filename(
if not match:
raise ValueError(f'Unable to match "{self.fmt}" to filename "{filename}"')
- props = {'file': filename}
+ props = {"file": filename}
for i, prop in enumerate(prop_names):
- for p in prop.split(','):
+ for p in prop.split(","):
# Ignore empty ("{}")
if not p:
continue
# If datatype was specified
if ":" in p:
- p, dt = p.split(':')
+ p, dt = p.split(":")
props[p] = match.group(i + 1)
- if dt == 'int':
+ if dt == "int":
props[p] = int(props[p])
- elif dt == 'float':
+ elif dt == "float":
props[p] = float(props[p])
- elif dt == 'bool':
+ elif dt == "bool":
props[p] = bool(props[p])
- elif dt == 'str':
+ elif dt == "str":
props[p] = str(props[p])
else:
- raise ValueError(f'Unable to interpret datatype "{dt}" '
- f'for property {p}')
+ raise ValueError(
+ f'Unable to interpret datatype "{dt}" ' f"for property {p}"
+ )
else:
props[p] = match.group(i + 1)
return props
- def _extract_connectors(
- self, nodes: pd.DataFrame
- ) -> Optional[pd.DataFrame]:
+ def _extract_connectors(self, nodes: pd.DataFrame) -> Optional[pd.DataFrame]:
"""Infer outgoing/incoming connectors from data.
Parameters
@@ -859,7 +1101,128 @@ def _extract_connectors(
return
-def parallel_read(read_fn, objs, parallel="auto") -> List['core.NeuronList']:
+class ImageReader(BaseReader):
+ """Reader for image data.
+
+
+ """
+
+ def __init__(self, output, thin, threshold, dotprop_kwargs, **kwargs):
+ super().__init__(**kwargs)
+ self.output = output
+ self.thin = thin
+ self.threshold = threshold
+ self.dotprop_kwargs = dotprop_kwargs
+
+ def convert_image(self, data, attrs, header, voxdim, units, space_units):
+ """Convert image data to desired output.
+
+ Parameters
+ ----------
+ data : np.ndarray
+ Image/Voxel data.
+ attrs : dict
+ Additional attributes to associate with the neuron.
+ header : dict
+ Header information.
+ voxdim : list of numbers
+ Voxel dimensions.
+ units : str | list, optional
+ Units for the neuron (e.g. "1um" or ["4um", "4um", "40um"]).
+ space_units : str, optional
+ Space units (e.g. "um").
+
+ """
+ if self.output == "dotprops":
+ # If we're trying to get voxels from an image
+ if data.ndim == 3:
+ if self.threshold:
+ if self.threshold >= 1:
+ data = data >= self.threshold
+ elif self.threshold < 1 and self.threshold > 0:
+ data = data >= (self.threshold * data.max())
+ else:
+ raise ValueError(
+ "Threshold must be either >=1 or 0-1, got "
+ f"{self.threshold}"
+ )
+
+ if self.thin:
+ from skimage.morphology import skeletonize
+
+ data = skeletonize(data)
+
+ # Convert data to x/y/z coordinates
+ # Note we need to multiply units before creating the Dotprops
+ # - otherwise the KNN will be wrong
+ x, y, z = np.where(data)
+ points = np.vstack((x, y, z)).T
+ points = points * voxdim
+
+ if not len(points):
+ raise ValueError(
+ f"No points extracted from {self.name_fallback} file. Try lowering the threshold?"
+ )
+
+ x = core.make_dotprops(points, **self.dotprop_kwargs)
+ elif data.ndim == 2:
+ if data.shape[1] == 3:
+ points, vect, alpha = data, None, None
+ elif data.shape[1] == 6:
+ points, vect, alpha = data[:, :3], data[:, 3:6], None
+ elif data.shape[1] == 7:
+ points, vect, alpha = data[:, :3], data[:, 3:6], data[:, 6]
+ else:
+ raise ValueError(
+ "Expected data to be either (N, 3), (N, 6) "
+ f"or (N, 7) but {self.name_fallback} file contains {data.shape}"
+ )
+ # Get `k` either from provided kwargs or the file's header
+ k = self.dotprop_kwargs.pop("k", header.get("k", 20))
+
+ x = core.Dotprops(
+ points, k=k, vect=vect, alpha=alpha, **self.dotprop_kwargs
+ )
+ else:
+ raise ValueError(
+ "Data must be 2- or 3-dimensional to extract "
+ f"Dotprops, got {data.ndim}"
+ )
+
+ # Set units from space_units (points are already in physical space)
+ if space_units:
+ if isinstance(space_units, str):
+ x.units = f"1 {space_units}"
+ elif len(space_units) == 3:
+ x.units = [f"1 {s}" for s in space_units]
+ else:
+ if data.ndim == 2:
+ logger.warning(
+ f"Data in {self.name_fallback} file is of shape {data.shape} - "
+ "i.e. 2D. Could this be a point cloud or dotprops "
+ "instead of voxels?"
+ )
+ x = core.VoxelNeuron(data, units=units)
+
+ # Header is special - we do not want to register it
+ setattr(x, f"{self.name_fallback.lower()}_header", header)
+
+ # Try adding properties one-by-one. If one fails, we'll keep track of it
+ # in the `.meta` attribute
+ meta = {}
+ for k, v in attrs.items():
+ try:
+ x._register_attr(k, v)
+ except (AttributeError, ValueError, TypeError):
+ meta[k] = v
+
+ if meta:
+ x.meta = meta
+
+ return x
+
+
+def parallel_read(read_fn, objs, parallel="auto") -> List["core.NeuronList"]:
"""Read neurons from some objects with the given reader function,
potentially in parallel.
@@ -885,17 +1248,24 @@ def parallel_read(read_fn, objs, parallel="auto") -> List['core.NeuronList']:
prog = partial(
config.tqdm,
- desc='Importing',
+ desc="Importing",
total=length,
disable=config.pbar_hide,
- leave=config.pbar_leave
+ leave=config.pbar_leave,
)
+ # `parallel` can be ("auto", threshold) in which case `threshold`
+ # determines at what length we use parallel processing
+ if isinstance(parallel, tuple):
+ parallel, threshold = parallel
+ else:
+ threshold = 200
+
if (
isinstance(parallel, str)
- and parallel.lower() == 'auto'
+ and parallel.lower() == "auto"
and not isinstance(length, type(None))
- and length < 200
+ and length < threshold
):
parallel = False
@@ -915,10 +1285,9 @@ def parallel_read(read_fn, objs, parallel="auto") -> List['core.NeuronList']:
return neurons
-def parallel_read_archive(read_fn, fpath, file_ext,
- limit=None,
- parallel="auto",
- ignore_hidden=True) -> List['core.NeuronList']:
+def parallel_read_archive(
+ read_fn, fpath, file_ext, limit=None, parallel="auto", ignore_hidden=True
+) -> List["core.NeuronList"]:
"""Read neurons from a archive (zip or tar), potentially in parallel.
Reader function must be picklable.
@@ -955,38 +1324,38 @@ def parallel_read_archive(read_fn, fpath, file_ext,
p = Path(fpath)
to_read = []
- if p.name.endswith('.zip'):
- with ZipFile(p, 'r') as zip:
+ if p.name.endswith(".zip"):
+ with ZipFile(p, "r") as zip:
for i, file in enumerate(zip.filelist):
- fname = file.filename.split('/')[-1]
- if ignore_hidden and fname.startswith('._'):
+ fname = file.filename.split("/")[-1]
+ if ignore_hidden and fname.startswith("._"):
continue
if callable(file_ext):
if file_ext(file):
to_read.append(file)
- elif file_ext == '*':
+ elif file_ext == "*":
to_read.append(file)
elif file_ext and fname.endswith(file_ext):
to_read.append(file)
- elif '.' not in file.filename:
+ elif "." not in file.filename:
to_read.append(file)
if isinstance(limit, int) and i >= limit:
break
- elif '.tar' in p.name: # can be ".tar", "tar.gz" or "tar.bz"
- with tarfile.open(p, 'r') as tf:
+ elif ".tar" in p.name: # can be ".tar", "tar.gz" or "tar.bz"
+ with tarfile.open(p, "r") as tf:
for i, file in enumerate(tf):
- fname = file.name.split('/')[-1]
- if ignore_hidden and fname.startswith('._'):
+ fname = file.name.split("/")[-1]
+ if ignore_hidden and fname.startswith("._"):
continue
if callable(file_ext):
if file_ext(file):
to_read.append(file)
- elif file_ext == '*':
+ elif file_ext == "*":
to_read.append(file)
elif file_ext and fname.endswith(file_ext):
to_read.append(file)
- elif '.' not in file.filename:
+ elif "." not in file.filename:
to_read.append(file)
if isinstance(limit, int) and i >= limit:
@@ -994,19 +1363,34 @@ def parallel_read_archive(read_fn, fpath, file_ext,
if isinstance(limit, list):
to_read = [f for f in to_read if f in limit]
+ elif isinstance(limit, slice):
+ to_read = to_read[limit]
+ elif isinstance(limit, str):
+ # Check if limit is a regex
+ if rgx.search(limit):
+ to_read = [f for f in to_read if re.search(limit, f.filename)]
+ else:
+ to_read = [f for f in to_read if limit in f.filename]
prog = partial(
config.tqdm,
- desc='Importing',
+ desc="Importing",
total=len(to_read),
disable=config.pbar_hide,
- leave=config.pbar_leave
+ leave=config.pbar_leave,
)
+ # `parallel` can be ("auto", threshold) in which case `threshold`
+ # determines at what length we use parallel processing
+ if isinstance(parallel, tuple):
+ parallel, threshold = parallel
+ else:
+ threshold = 200
+
if (
isinstance(parallel, str)
- and parallel.lower() == 'auto'
- and len(to_read) < 200
+ and parallel.lower() == "auto"
+ and len(to_read) < threshold
):
parallel = False
@@ -1026,6 +1410,163 @@ def parallel_read_archive(read_fn, fpath, file_ext,
return neurons
+def parallel_read_ftp(
+ read_fn,
+ server,
+ port,
+ path,
+ file_ext,
+ limit=None,
+ parallel="auto",
+) -> List["core.NeuronList"]:
+ """Read neurons from an FTP server, potentially in parallel.
+
+ Reader function must be picklable.
+
+ Parameters
+ ----------
+ read_fn : Callable
+ server : str
+ FTP server address.
+ port : int
+ FTP server port.
+ path : str
+ Path to directory containing files or single file.
+ file_ext : str | callable
+ File extension to search for - e.g. ".swc". `None` or `''`
+ are interpreted as looking for filenames without extension.
+ To include all files use `'*'`. Can also be callable that
+ accepts a filename and returns True or False depending on
+ if it should be included.
+ limit : int, optional
+ Limit the number of files read from this directory.
+ parallel : str | bool | int
+ "auto" or True for n_cores // 2, otherwise int for number of
+ jobs, or false for serial.
+
+ Returns
+ -------
+ core.NeuronList
+
+ """
+ # Check if this is a single file
+ is_single_file = False
+ if "*" not in path:
+ if isinstance(file_ext, str) and path.endswith(file_ext):
+ is_single_file = True
+ elif callable(file_ext) and file_ext(path.rsplit("/", 1)[1]):
+ is_single_file = True
+
+ if is_single_file:
+ path, fname = path.rsplit("/", 1)
+ to_read = [fname]
+ else:
+ pattern = ""
+ # Check if path contains a "*." pattern - e.g. something like "*_raw.swc"
+ if "*" in path:
+ path, fname = path.rsplit("/", 1)
+ pattern = fname
+
+ # Remove leading /
+ if path.startswith("/"):
+ path = path[1:]
+
+ # First check content
+ with FTP() as ftp:
+ ftp.connect(server, port) # connect to server
+ ftp.login() # anonymous login
+ ftp.cwd(path) # change to path
+
+ # Read content
+ content = []
+ ftp.retrlines(f"LIST {pattern}", content.append)
+
+ # Parse content into filenames
+ to_read = []
+ for line in content:
+ if not line:
+ continue
+ file = line.split()[-1].strip()
+
+ if callable(file_ext):
+ if file_ext(file):
+ to_read.append(file)
+ elif file_ext == "*":
+ to_read.append(file)
+ elif file_ext and fname.endswith(file_ext):
+ to_read.append(file)
+
+ if isinstance(limit, int):
+ to_read = to_read[:limit]
+ elif isinstance(limit, list):
+ to_read = [f for f in to_read if f in limit]
+ elif isinstance(limit, slice):
+ to_read = to_read[limit]
+ elif isinstance(limit, str):
+ # Check if limit is a regex
+ if rgx.search(limit):
+ to_read = [f for f in to_read if re.search(limit, f)]
+ else:
+ to_read = [f for f in to_read if limit in f]
+
+ if not to_read:
+ return []
+
+ prog = partial(
+ config.tqdm,
+ desc="Loading",
+ total=len(to_read),
+ disable=config.pbar_hide,
+ leave=config.pbar_leave,
+ )
+
+ # `parallel` can be ("auto", threshold) in which case `threshold`
+ # determines at what length we use parallel processing
+ if isinstance(parallel, tuple):
+ parallel, threshold = parallel
+ else:
+ threshold = 200
+
+ if (
+ isinstance(parallel, str)
+ and parallel.lower() == "auto"
+ and len(to_read) < threshold
+ ):
+ parallel = False
+
+ if parallel:
+ # Do not swap this as `isinstance(True, int)` returns `True`
+ if isinstance(parallel, (bool, str)):
+ n_cores = max(1, os.cpu_count() // 2)
+ else:
+ n_cores = int(parallel)
+
+ # We can't send the FTP object to the process (because its socket is not pickleable)
+ # Instead, we need to initialize a new FTP connection in each process via a global variable
+ with mp.Pool(
+ processes=n_cores, initializer=_ftp_pool_init, initargs=(server, port, path)
+ ) as pool:
+ results = pool.imap(partial(read_fn, ftp="GLOBAL"), to_read)
+ neurons = list(prog(results))
+ else:
+ with FTP() as ftp:
+ ftp.connect(server, port)
+ ftp.login()
+ ftp.cwd(path)
+
+ neurons = [read_fn(file, ftp=ftp) for file in prog(to_read)]
+
+ return neurons
+
+
+def _ftp_pool_init(server, port, path):
+ global _FTP
+ _FTP = FTP()
+ _FTP.connect(server, port)
+ _FTP.login()
+ _FTP.cwd(path)
+
+
def parse_precision(precision: Optional[int]):
"""Convert bit width into int and float dtypes.
@@ -1051,5 +1592,35 @@ def parse_precision(precision: Optional[int]):
return (INT_DTYPES[precision], FLOAT_DTYPES[precision])
except KeyError:
raise ValueError(
- f'Unknown precision {precision}. Expected on of the following: 16, 32 (default), 64 or None'
+ f"Unknown precision {precision}. Expected on of the following: 16, 32 (default), 64 or None"
)
+
+
+class ReadError(Exception):
+ """Error raised when reading a file fails."""
+
+
+def is_dir(path: os.PathLike) -> bool:
+ """Check if path is a directory.
+
+ The main purpose of this function is to catch
+ *.file_ext at the end of the path.
+
+ Parameters
+ ----------
+ path : os.PathLike
+ Path to check.
+
+ Returns
+ -------
+ bool
+ True if path is a directory.
+
+ """
+ # Expand user
+ path = os.path.expanduser(path)
+
+ # Remove any trailing *.file_ext
+ path = path.split("*")[0]
+
+ return os.path.isdir(path)
diff --git a/navis/io/hdf_io.py b/navis/io/hdf_io.py
index 3f56792a..1c90fd9f 100644
--- a/navis/io/hdf_io.py
+++ b/navis/io/hdf_io.py
@@ -733,7 +733,7 @@ def read_h5(filepath: str,
note that due to HDF5 restrictions numeric IDs will be
converted to strings.
prefer_raw : bool
- If True and a neuron has is saved as both serialized and
+ If True and a neuron is saved as both serialized and
raw data, will load the neuron from the raw data.
parallel : "auto" | bool | int
Defaults to `auto` which means only use parallel
@@ -770,13 +770,13 @@ def read_h5(filepath: str,
are causing troubles. If False (default), will read
every attribute and dataframe column and attach it to
the neuron.
- reader : "auto" | subclass of BaseH5Reader
+ reader : "auto" | str | subclass of BaseH5Reader
Which reader to use to parse the given format. By
default ("auto") will try to pick the correct parser
for you depending on the `format_spec` attribute in
- the HDF5 file. You can also directly provide a subclass
- of BaseH5Reader that is capable of reading neurons from
- the file.
+ the HDF5 file. Alternatively, you can also provided either
+ a format version (e.g. "v1") or a subclass of BaseH5Reader
+ that is capable of reading neurons from the file.
Returns
-------
@@ -820,9 +820,20 @@ def read_h5(filepath: str,
# Get a reader for these specs
if reader == 'auto':
- if info['format_spec'] not in READERS:
+ if info['format_spec'] is None:
+ config.logger.warning(
+ 'No format specifier found in file, suggesting this file may not have '
+ 'been created using NAVis. We will try to read using the latest '
+ 'version of the schema. If this fails you may have to specify a reader '
+ 'or version manually (see the `reader` parameter).')
+ reader = READERS['latest']
+ elif info['format_spec'] not in READERS:
raise TypeError(f'No reader for HDF5 format {info["format_spec"]}')
reader = READERS[info['format_spec']]
+ elif isinstance(reader, str):
+ if reader not in READERS:
+ raise TypeError(f'No reader for HDF5 format "{reader}"')
+ reader = READERS[reader]
elif not isinstance(reader, BaseH5Reader):
raise TypeError('If provided, the reader must be a subclass of '
f'BaseH5Reader - got "{type(reader)}"')
@@ -1108,4 +1119,5 @@ def neuron_nm_units(neuron):
WRITERS = {'v1': H5WriterV1,
'latest': H5WriterV1}
-READERS = {'hnf_v1': H5ReaderV1}
+READERS = {'hnf_v1': H5ReaderV1,
+ 'latest': H5ReaderV1}
diff --git a/navis/io/mesh_io.py b/navis/io/mesh_io.py
index 5e5677f4..19ee47ff 100644
--- a/navis/io/mesh_io.py
+++ b/navis/io/mesh_io.py
@@ -12,13 +12,13 @@
# GNU General Public License for more details.
import os
+import io
-import multiprocessing as mp
import trimesh as tm
-from pathlib import Path
from typing import Union, Iterable, Optional, Dict, Any
from typing_extensions import Literal
+from urllib3 import HTTPResponse
from .. import config, utils, core
from . import base
@@ -26,28 +26,118 @@
# Set up logging
logger = config.get_logger(__name__)
+# Mesh files can have all sort of extensions
+DEFAULT_FMT = "{name}.{file_ext}"
+
+# Mesh extensions supported by trimesh
+MESH_LOAD_EXT = tuple(tm.exchange.load.mesh_loaders.keys())
+MESH_WRITE_EXT = tuple(tm.exchange.export._mesh_exporters.keys())
+
+
+class MeshReader(base.BaseReader):
+ def __init__(
+ self,
+ output: str,
+ fmt: str = DEFAULT_FMT,
+ attrs: Optional[Dict[str, Any]] = None,
+ ):
+ super().__init__(
+ fmt=fmt,
+ attrs=attrs,
+ file_ext=MESH_LOAD_EXT,
+ name_fallback="MESH",
+ read_binary=True,
+ )
+ self.output = output
+
+ def format_output(self, x):
+ # This function replaces the BaseReader.format_output()
+ # This is to avoid trying to convert multiple (image, header) to NeuronList
+ if self.output == "trimesh":
+ return x
+ elif x:
+ return core.NeuronList(x)
+ else:
+ return core.NeuronList([])
+
+ @base.handle_errors
+ def read_buffer(
+ self, f, attrs: Optional[Dict[str, Any]] = None
+ ) -> Union[tm.Trimesh, "core.Volume", "core.MeshNeuron"]:
+ """Read buffer into mesh.
+
+ Parameters
+ ----------
+ f : IO
+ Readable buffer (must be bytes).
+ attrs : dict | None
+ Arbitrary attributes to include in the neurons.
+
+ Returns
+ -------
+ Trimesh | MeshNeuron | Volume
+
+ """
+ if isinstance(f, HTTPResponse):
+ f = io.StringIO(f.content)
+
+ if isinstance(f, bytes):
+ f = io.BytesIO(f)
+
+ # We need to tell trimesh what file type we are reading
+ if "file" not in attrs:
+ raise KeyError(
+ f'Unable to parse file type. "file" not in attributes: {attrs}'
+ )
+
+ file_type = attrs["file"].split(".")[-1]
+
+ mesh = tm.load_mesh(f, file_type=file_type)
+
+ if self.output == "trimesh":
+ return mesh
+ elif self.output == "volume":
+ return core.Volume(mesh.vertices, mesh.faces, **attrs)
+
+ # Turn into a MeshNeuron
+ n = core.MeshNeuron(mesh)
+
+ # Try adding properties one-by-one. If one fails, we'll keep track of it
+ # in the `.meta` attribute
+ meta = {}
+ for k, v in attrs.items():
+ try:
+ n._register_attr(k, v)
+ except (AttributeError, ValueError, TypeError):
+ meta[k] = v
+
+ if meta:
+ n.meta = meta
+
+ return n
-def read_mesh(f: Union[str, Iterable],
- include_subdirs: bool = False,
- parallel: Union[bool, int] = 'auto',
- output: Union[Literal['neuron'],
- Literal['volume'],
- Literal['trimesh']] = 'neuron',
- errors: Union[Literal['raise'],
- Literal['log'],
- Literal['ignore']] = 'log',
- limit: Optional[int] = None,
- **kwargs) -> 'core.NeuronObject':
- """Create Neuron/List from mesh.
+
+def read_mesh(
+ f: Union[str, Iterable],
+ include_subdirs: bool = False,
+ parallel: Union[bool, int] = "auto",
+ output: Union[Literal["neuron"], Literal["volume"], Literal["trimesh"]] = "neuron",
+ errors: Literal["raise", "log", "ignore"] = "raise",
+ limit: Optional[int] = None,
+ fmt: str = "{name}.",
+ **kwargs,
+) -> "core.NeuronObject":
+ """Load mesh file into Neuron/List.
This is a thin wrapper around `trimesh.load_mesh` which supports most
- common formats (obj, ply, stl, etc.).
+ commonly used formats (obj, ply, stl, etc.).
Parameters
----------
f : str | iterable
- Filename(s) or folder. If folder must include file
- extension (e.g. `my/dir/*.ply`).
+ Filename(s) or folder. If folder should include file
+ extension (e.g. `my/dir/*.ply`) otherwise all
+ mesh files in the folder will be read.
include_subdirs : bool, optional
If True and `f` is a folder, will also search
subdirectories for meshes.
@@ -59,14 +149,22 @@ def read_mesh(f: Union[str, Iterable],
neurons. Integer will be interpreted as the number of
cores (otherwise defaults to `os.cpu_count() - 2`).
output : "neuron" | "volume" | "trimesh"
- Determines function's output. See Returns.
+ Determines function's output - see `Returns`.
errors : "raise" | "log" | "ignore"
- If "log" or "ignore", errors will not be raised.
- limit : int, optional
- If reading from a folder you can use this parameter to
- read only the first `limit` files. Useful when
- wanting to get a sample from a large library of
- meshes.
+ If "log" or "ignore", errors will not be raised and the
+ mesh will be skipped. Can result in empty output.
+ limit : int | str | slice | list, optional
+ When reading from a folder or archive you can use this parameter to
+ restrict the which files read:
+ - if an integer, will read only the first `limit` mesh files
+ (useful to get a sample from a large library of meshes)
+ - if a string, will interpret it as filename (regex) pattern
+ and only read files that match the pattern; e.g. `limit='.*_R.*'`
+ will only read files that contain `_R` in their filename
+ - if a slice (e.g. `slice(10, 20)`) will read only the files in
+ that range
+ - a list is expected to be a list of filenames to read from
+ the folder/archive
**kwargs
Keyword arguments passed to [`navis.MeshNeuron`][]
or [`navis.Volume`][]. You can use this to e.g.
@@ -74,19 +172,24 @@ def read_mesh(f: Union[str, Iterable],
Returns
-------
- navis.MeshNeuron
+ MeshNeuron
If `output="neuron"` (default).
- navis.Volume
+ Volume
If `output="volume"`.
- trimesh.Trimesh
- If `output='trimesh'`.
- navis.NeuronList
+ Trimesh
+ If `output="trimesh"`.
+ NeuronList
If `output="neuron"` and import has multiple meshes
will return NeuronList of MeshNeurons.
list
If `output!="neuron"` and import has multiple meshes
will return list of Volumes or Trimesh.
+ See Also
+ --------
+ [`navis.read_precomputed`][]
+ Read meshes and skeletons from Neuroglancer's precomputed format.
+
Examples
--------
@@ -107,101 +210,19 @@ def read_mesh(f: Union[str, Iterable],
>>> nl = navis.read_mesh('mesh.obj', output='volume') # doctest: +SKIP
"""
- utils.eval_param(output, name='output',
- allowed_values=('neuron', 'volume', 'trimesh'))
-
- # If is directory, compile list of filenames
- if isinstance(f, str) and '*' in f:
- f, ext = f.split('*')
- f = Path(f).expanduser()
-
- if not f.is_dir():
- raise ValueError(f'{f} does not appear to exist')
-
- if not include_subdirs:
- f = list(f.glob(f'*{ext}'))
- else:
- f = list(f.rglob(f'*{ext}'))
-
- if limit:
- f = f[:limit]
-
- if utils.is_iterable(f):
- # Do not use if there is only a small batch to import
- if isinstance(parallel, str) and parallel.lower() == 'auto':
- if len(f) < 100:
- parallel = False
-
- if parallel:
- # Do not swap this as `isinstance(True, int)` returns `True`
- if isinstance(parallel, (bool, str)):
- n_cores = os.cpu_count() - 2
- else:
- n_cores = int(parallel)
-
- with mp.Pool(processes=n_cores) as pool:
- results = pool.imap(_worker_wrapper, [dict(f=x,
- output=output,
- errors=errors,
- include_subdirs=include_subdirs,
- parallel=False) for x in f],
- chunksize=1)
-
- res = list(config.tqdm(results,
- desc='Importing',
- total=len(f),
- disable=config.pbar_hide,
- leave=config.pbar_leave))
-
- else:
- # If not parallel just import the good 'ole way: sequentially
- res = [read_mesh(x,
- include_subdirs=include_subdirs,
- output=output,
- errors=errors,
- parallel=parallel,
- **kwargs)
- for x in config.tqdm(f, desc='Importing',
- disable=config.pbar_hide,
- leave=config.pbar_leave)]
-
- if output == 'neuron':
- return core.NeuronList([r for r in res if r])
-
- return res
-
- try:
- # Open the file
- fname = '.'.join(os.path.basename(f).split('.')[:-1])
- mesh = tm.load_mesh(f)
-
- if output == 'trimesh':
- return mesh
-
- attrs = {'name': fname, 'origin': f}
- attrs.update(kwargs)
- if output == 'volume':
- return core.Volume(mesh.vertices, mesh.faces, **attrs)
- else:
- return core.MeshNeuron(mesh, **attrs)
- except BaseException as e:
- msg = f'Error reading file {fname}.'
- if errors == 'raise':
- raise ImportError(msg) from e
- elif errors == 'log':
- logger.error(f'{msg}: {e}')
- return
-
+ utils.eval_param(
+ output, name="output", allowed_values=("neuron", "volume", "trimesh")
+ )
-def _worker_wrapper(kwargs):
- """Helper for importing meshes using multiple processes."""
- return read_mesh(**kwargs)
+ reader = MeshReader(fmt=fmt, output=output, errors=errors, attrs=kwargs)
+ return reader.read_any(f, include_subdirs, parallel, limit=limit)
-def write_mesh(x: Union['core.NeuronList', 'core.MeshNeuron', 'core.Volume', 'tm.Trimesh'],
- filepath: Optional[str] = None,
- filetype: str = None,
- ) -> None:
+def write_mesh(
+ x: Union["core.NeuronList", "core.MeshNeuron", "core.Volume", "tm.Trimesh"],
+ filepath: Optional[str] = None,
+ filetype: str = None,
+) -> None:
"""Export meshes (MeshNeurons, Volumes, Trimeshes) to disk.
Under the hood this is using trimesh to export meshes.
@@ -257,41 +278,44 @@ def write_mesh(x: Union['core.NeuronList', 'core.MeshNeuron', 'core.Volume', 'tm
>>> navis.write_mesh(nl, tmp_dir / 'meshes.zip', filetype='obj')
"""
- ALLOWED_FILETYPES = ('stl', 'ply', 'obj')
if filetype is not None:
- utils.eval_param(filetype, name='filetype', allowed_values=ALLOWED_FILETYPES)
+ utils.eval_param(filetype, name="filetype", allowed_values=MESH_WRITE_EXT)
else:
# See if we can get filetype from filepath
if filepath is not None:
- for f in ALLOWED_FILETYPES:
- if str(filepath).endswith(f'.{f}'):
+ for f in MESH_WRITE_EXT:
+ if str(filepath).endswith(f".{f}"):
filetype = f
break
if not filetype:
- raise ValueError('Must provide mesh type either explicitly via '
- '`filetype` variable or implicitly via the '
- 'file extension in `filepath`')
+ raise ValueError(
+ "Must provide mesh type either explicitly via "
+ "`filetype` variable or implicitly via the "
+ "file extension in `filepath`"
+ )
- writer = base.Writer(_write_mesh, ext=f'.{filetype}')
+ writer = base.Writer(_write_mesh, ext=f".{filetype}")
- return writer.write_any(x,
- filepath=filepath)
+ return writer.write_any(x, filepath=filepath)
-def _write_mesh(x: Union['core.MeshNeuron', 'core.Volume', 'tm.Trimesh'],
- filepath: Optional[str] = None) -> None:
+def _write_mesh(
+ x: Union["core.MeshNeuron", "core.Volume", "tm.Trimesh"],
+ filepath: Optional[str] = None,
+) -> None:
"""Write single mesh to disk."""
if filepath and os.path.isdir(filepath):
if isinstance(x, core.MeshNeuron):
if not x.id:
- raise ValueError('Neuron(s) must have an ID when destination '
- 'is a folder')
- filepath = os.path.join(filepath, f'{x.id}')
+ raise ValueError(
+ "Neuron(s) must have an ID when destination " "is a folder"
+ )
+ filepath = os.path.join(filepath, f"{x.id}")
elif isinstance(x, core.Volume):
- filepath = os.path.join(filepath, f'{x.name}')
+ filepath = os.path.join(filepath, f"{x.name}")
else:
- raise ValueError(f'Unable to generate filename for {type(x)}')
+ raise ValueError(f"Unable to generate filename for {type(x)}")
if isinstance(x, core.MeshNeuron):
mesh = x.trimesh
diff --git a/navis/io/nmx_io.py b/navis/io/nmx_io.py
index f57b85fc..824d3215 100644
--- a/navis/io/nmx_io.py
+++ b/navis/io/nmx_io.py
@@ -29,7 +29,7 @@
# Set up logging
logger = config.get_logger(__name__)
-NODE_COLUMNS = ('node_id', 'label', 'x', 'y', 'z', 'radius', 'parent_id')
+NODE_COLUMNS = ("node_id", "label", "x", "y", "z", "radius", "parent_id")
DEFAULT_PRECISION = 32
DEFAULT_FMT = "{name}.nmx"
@@ -38,28 +38,33 @@ class NMLReader(base.BaseReader):
def __init__(
self,
precision: int = DEFAULT_PRECISION,
- attrs: Optional[Dict[str, Any]] = None
+ attrs: Optional[Dict[str, Any]] = None,
+ errors: str = "raise",
):
- super().__init__(fmt='',
- attrs=attrs,
- file_ext='.nml',
- read_binary=False,
- name_fallback='NML')
+ super().__init__(
+ fmt="",
+ attrs=attrs,
+ file_ext=".nml",
+ read_binary=False,
+ errors=errors,
+ name_fallback="NML",
+ )
int_, float_ = base.parse_precision(precision)
self._dtypes = {
- 'node_id': int_,
- 'parent_id': int_,
- 'label': 'category',
- 'x': float_,
- 'y': float_,
- 'z': float_,
- 'radius': float_,
+ "node_id": int_,
+ "parent_id": int_,
+ "label": "category",
+ "x": float_,
+ "y": float_,
+ "z": float_,
+ "radius": float_,
}
+ @base.handle_errors
def read_buffer(
self, f: IO, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.TreeNeuron':
+ ) -> "core.TreeNeuron":
"""Read .nml buffer into a TreeNeuron.
NML files are XML-encoded files containing data for a single neuron.
@@ -79,7 +84,7 @@ def read_buffer(
def read_nml(
self, f: IO, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.TreeNeuron':
+ ) -> "core.TreeNeuron":
"""Read .nml buffer into a TreeNeuron.
NML files are XML files containing a single neuron.
@@ -103,46 +108,51 @@ def read_nml(
# Copy the attributes dict
for element in root:
- if element.tag == 'thing':
+ if element.tag == "thing":
nodes = pd.DataFrame.from_records([n.attrib for n in element[0]])
edges = pd.DataFrame.from_records([n.attrib for n in element[1]])
- edges = edges.astype(self._dtypes['node_id'])
+ edges = edges.astype(self._dtypes["node_id"])
- nodes.rename({'id': 'node_id'}, axis=1, inplace=True)
- nodes = nodes.astype({k: v for k, v in self._dtypes.items() if k in nodes.columns})
+ nodes.rename({"id": "node_id"}, axis=1, inplace=True)
+ nodes = nodes.astype(
+ {k: v for k, v in self._dtypes.items() if k in nodes.columns}
+ )
G = nx.Graph()
G.add_edges_from(edges.values)
tree = nx.bfs_tree(G, list(G.nodes)[0])
- edges = pd.DataFrame(list(tree.edges), columns=['source', 'target'])
- nodes['parent_id'] = edges.set_index('target').reindex(nodes.node_id.values).source.values
- nodes['parent_id'] = nodes.parent_id.fillna(-1).astype(self._dtypes['node_id'])
- nodes.sort_values('node_id', inplace=True)
+ edges = pd.DataFrame(list(tree.edges), columns=["source", "target"])
+ nodes["parent_id"] = (
+ edges.set_index("target").reindex(nodes.node_id.values).source.values
+ )
+ nodes["parent_id"] = nodes.parent_id.fillna(-1).astype(self._dtypes["node_id"])
+ nodes.sort_values("node_id", inplace=True)
return core.TreeNeuron(
- nodes,
- **(self._make_attributes({'name': 'NML', 'origin': 'nml'}, attrs))
+ nodes, **(self._make_attributes({"name": "NML", "origin": "nml"}, attrs))
)
class NMXReader(NMLReader):
"""This is a version of the NML file reader that reads from zipped archives."""
+
def __init__(
self,
precision: int = DEFAULT_PRECISION,
- attrs: Optional[Dict[str, Any]] = None
+ attrs: Optional[Dict[str, Any]] = None,
+ errors: str = "raise",
):
- super().__init__(precision=precision,
- attrs=attrs)
+ super().__init__(precision=precision, errors=errors, attrs=attrs)
# Overwrite some of the settings
self.read_binary = True
- self.file_ext = '.nmx'
- self.name_fallback = 'NMX'
+ self.file_ext = ".nmx"
+ self.name_fallback = "NMX"
+ @base.handle_errors
def read_buffer(
self, f: IO, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.TreeNeuron':
+ ) -> "core.TreeNeuron":
"""Read .nmx buffer into a TreeNeuron.
NMX files are zip files containing XML-encoded .nml files containing
@@ -164,20 +174,24 @@ def read_buffer(
zip = ZipFile(f)
for f in zip.filelist:
- if f.filename.endswith('.nml') and 'skeleton' in f.filename:
- attrs['file'] = f.filename
- attrs['id'] = f.filename.split('/')[0]
+ if f.filename.endswith(".nml") and "skeleton" in f.filename:
+ attrs["file"] = f.filename
+ attrs["id"] = f.filename.split("/")[0]
return self.read_nml(zip.read(f), attrs=attrs)
- logger.warning(f'Skipped "{f.filename.split("/")[0]}.nmx": failed to '
- 'import skeleton.')
+ logger.warning(
+ f'Skipped "{f.filename.split("/")[0]}.nmx": failed to ' 'import skeleton.'
+ )
-def read_nmx(f: Union[str, pd.DataFrame, Iterable],
- include_subdirs: bool = False,
- parallel: Union[bool, int] = 'auto',
- precision: int = 32,
- limit: Optional[int] = None,
- **kwargs) -> 'core.NeuronObject':
+def read_nmx(
+ f: Union[str, pd.DataFrame, Iterable],
+ include_subdirs: bool = False,
+ parallel: Union[bool, int] = "auto",
+ precision: int = 32,
+ limit: Optional[int] = None,
+ errors: str = "raise",
+ **kwargs,
+) -> "core.NeuronObject":
"""Read NMX files into Neuron/Lists.
NMX is an xml-based format used by pyKNOSSOS.
@@ -204,11 +218,21 @@ def read_nmx(f: Union[str, pd.DataFrame, Iterable],
Precision for data. Defaults to 32 bit integers/floats.
If `None` will let pandas infer data types - this
typically leads to higher than necessary precision.
- limit : int, optional
- If reading from a folder you can use this parameter to
- read only the first `limit` NMX files. Useful if
- wanting to get a sample from a large library of
- skeletons.
+ limit : int | str | slice | list, optional
+ When reading from a folder or archive you can use this parameter to
+ restrict the which files read:
+ - if an integer, will read only the first `limit` NMX files
+ (useful to get a sample from a large library of skeletons)
+ - if a string, will interpret it as filename (regex) pattern
+ and only read files that match the pattern; e.g. `limit='.*_R.*'`
+ will only read files that contain `_R` in their filename
+ - if a slice (e.g. `slice(10, 20)`) will read only the files in
+ that range
+ - a list is expected to be a list of filenames to read from
+ the folder/archive
+ errors : "raise" | "log" | "ignore"
+ If "log" or "ignore", errors will not be raised and the
+ mesh will be skipped. Can result in empty output.
**kwargs
Keyword arguments passed to the construction of
`navis.TreeNeuron`. You can use this to e.g. set
@@ -224,13 +248,11 @@ def read_nmx(f: Union[str, pd.DataFrame, Iterable],
Read NML file(s).
"""
- reader = NMXReader(precision=precision,
- attrs=kwargs)
+ reader = NMXReader(precision=precision, errors=errors, attrs=kwargs)
# Read neurons
- neurons = reader.read_any(f,
- parallel=parallel,
- limit=limit,
- include_subdirs=include_subdirs)
+ neurons = reader.read_any(
+ f, parallel=parallel, limit=limit, include_subdirs=include_subdirs
+ )
# Failed reads will produce empty neurons which we need to remove
if isinstance(neurons, core.NeuronList):
@@ -239,12 +261,14 @@ def read_nmx(f: Union[str, pd.DataFrame, Iterable],
return neurons
-def read_nml(f: Union[str, pd.DataFrame, Iterable],
- include_subdirs: bool = False,
- parallel: Union[bool, int] = 'auto',
- precision: int = 32,
- limit: Optional[int] = None,
- **kwargs) -> 'core.NeuronObject':
+def read_nml(
+ f: Union[str, pd.DataFrame, Iterable],
+ include_subdirs: bool = False,
+ parallel: Union[bool, int] = "auto",
+ precision: int = 32,
+ limit: Optional[int] = None,
+ **kwargs,
+) -> "core.NeuronObject":
"""Read xml-based NML files into Neuron/Lists.
Parameters
@@ -267,11 +291,18 @@ def read_nml(f: Union[str, pd.DataFrame, Iterable],
Precision for data. Defaults to 32 bit integers/floats.
If `None` will let pandas infer data types - this
typically leads to higher than necessary precision.
- limit : int, optional
- If reading from a folder you can use this parameter to
- read only the first `limit` NML files. Useful if
- wanting to get a sample from a large library of
- skeletons.
+ limit : int | str | slice | list, optional
+ When reading from a folder or archive you can use this
+ parameter to restrict the which files read:
+ - if an integer, will read only the first `limit` NML files
+ (useful to get a sample from a large library of skeletons)
+ - if a string, will interpret it as filename (regex) pattern
+ and only read files that match the pattern; e.g. `limit='.*_R.*'`
+ will only read files that contain `_R` in their filename
+ - if a slice (e.g. `slice(10, 20)`) will read only the files in
+ that range
+ - a list is expected to be a list of filenames to read from
+ the folder/archive
**kwargs
Keyword arguments passed to the construction of
`navis.TreeNeuron`. You can use this to e.g. set
@@ -287,12 +318,10 @@ def read_nml(f: Union[str, pd.DataFrame, Iterable],
Read NMX files (collections of NML files).
"""
- reader = NMLReader(precision=precision,
- attrs=kwargs)
+ reader = NMLReader(precision=precision, attrs=kwargs)
# Read neurons
- neurons = reader.read_any(f,
- parallel=parallel,
- limit=limit,
- include_subdirs=include_subdirs)
+ neurons = reader.read_any(
+ f, parallel=parallel, limit=limit, include_subdirs=include_subdirs
+ )
return neurons
diff --git a/navis/io/nrrd_io.py b/navis/io/nrrd_io.py
index 45701750..8d768002 100644
--- a/navis/io/nrrd_io.py
+++ b/navis/io/nrrd_io.py
@@ -12,15 +12,14 @@
# GNU General Public License for more details.
import nrrd
-import os
+import io
-import multiprocessing as mp
import numpy as np
-from glob import glob
from pathlib import Path
from typing import Union, Iterable, Optional, Dict, Any
from typing_extensions import Literal
+from urllib3 import HTTPResponse
from .. import config, utils, core
from . import base
@@ -28,11 +27,101 @@
# Set up logging
logger = config.get_logger(__name__)
+DEFAULT_FMT = "{name}.nrrd"
+
+
+class NrrdReader(base.ImageReader):
+ def __init__(
+ self,
+ output: Literal["voxels", "dotprops", "raw"] = "voxels",
+ threshold: Optional[Union[int, float]] = None,
+ thin: bool = False,
+ dotprop_kwargs: Dict[str, Any] = {},
+ fmt: str = DEFAULT_FMT,
+ attrs: Optional[Dict[str, Any]] = None,
+ errors: str = "raise",
+ ):
+ if not fmt.endswith(".nrrd"):
+ raise ValueError('`fmt` must end with ".nrrd"')
+
+ super().__init__(
+ fmt=fmt,
+ attrs=attrs,
+ file_ext=".nrrd",
+ name_fallback="NRRD",
+ read_binary=True,
+ output=output,
+ threshold=threshold,
+ thin=thin,
+ dotprop_kwargs=dotprop_kwargs,
+ errors=errors,
+ )
+
+ def format_output(self, x):
+ # This function replaces the BaseReader.format_output()
+ # This is to avoid trying to convert multiple (image, header) to NeuronList
+ if self.output == "raw":
+ return [n for n in x if n]
+ elif x:
+ return core.NeuronList([n for n in x if n])
+ else:
+ return core.NeuronList([])
+
+ @base.handle_errors
+ def read_buffer(
+ self, f, attrs: Optional[Dict[str, Any]] = None
+ ) -> Union[np.ndarray, "core.Dotprops", "core.VoxelNeuron"]:
+ """Read buffer into (image, header) or a neuron.
+
+ Parameters
+ ----------
+ f : IO
+ Readable buffer (must be bytes).
+ attrs : dict | None
+ Arbitrary attributes to include in the neuron.
+
+ Returns
+ -------
+ core.Dotprops | core.VoxelNeuron | np.ndarray
+
+ """
+ if isinstance(f, HTTPResponse):
+ f = io.StringIO(f.content)
+
+ if isinstance(f, bytes):
+ f = io.BytesIO(f)
+
+ header = nrrd.read_header(f)
+ data = nrrd.read_data(header, f)
+
+ if self.output == "raw":
+ return data, header
+
+ # Try parsing units - this is modelled after the nrrd files you get from
+ # Virtual Fly Brain (VFB)
+ units = None
+ space_units = None
+ voxdim = np.array([1, 1, 1])
+ if "space directions" in header:
+ sd = np.asarray(header["space directions"])
+ if sd.ndim == 2:
+ voxdim = np.diag(sd)[:3]
+ if "space units" in header:
+ space_units = header["space units"]
+ if len(space_units) == 3:
+ units = [f"{m} {u}" for m, u in zip(voxdim, space_units)]
+ else:
+ units = voxdim
+
+ return self.convert_image(data, attrs, header, voxdim, units, space_units)
+
-def write_nrrd(x: 'core.NeuronObject',
- filepath: Union[str, Path],
- compression_level: int = 3,
- attrs: Optional[Dict[str, Any]] = None) -> None:
+def write_nrrd(
+ x: "core.NeuronObject",
+ filepath: Union[str, Path],
+ compression_level: int = 3,
+ attrs: Optional[Dict[str, Any]] = None,
+) -> None:
"""Write VoxelNeurons or Dotprops to NRRD file(s).
Parameters
@@ -104,57 +193,57 @@ def write_nrrd(x: 'core.NeuronObject',
compression_level = int(compression_level)
if (compression_level < 1) or (compression_level > 9):
- raise ValueError('`compression_level` must be 1-9, got '
- f'{compression_level}')
+ raise ValueError("`compression_level` must be 1-9, got " f"{compression_level}")
- writer = base.Writer(_write_nrrd, ext='.nrrd')
+ writer = base.Writer(_write_nrrd, ext=".nrrd")
- return writer.write_any(x,
- filepath=filepath,
- compression_level=compression_level,
- **(attrs or {}))
+ return writer.write_any(
+ x, filepath=filepath, compression_level=compression_level, **(attrs or {})
+ )
-def _write_nrrd(x: Union['core.VoxelNeuron', 'core.Dotprops'],
- filepath: Optional[str] = None,
- compression_level: int = 1,
- **attrs) -> None:
+def _write_nrrd(
+ x: Union["core.VoxelNeuron", "core.Dotprops"],
+ filepath: Optional[str] = None,
+ compression_level: int = 1,
+ **attrs,
+) -> None:
"""Write single neuron to NRRD file."""
if not isinstance(x, (core.VoxelNeuron, core.Dotprops)):
raise TypeError(f'Expected VoxelNeuron or Dotprops, got "{type(x)}"')
header = getattr(x, "nrrd_header", {})
- header['space dimension'] = 3
- header['space directions'] = np.diag(x.units_xyz.magnitude)
- header['space units'] = [str(x.units_xyz.units)] * 3
+ header["space dimension"] = 3
+ header["space directions"] = np.diag(x.units_xyz.magnitude)
+ header["space units"] = [str(x.units_xyz.units)] * 3
header.update(attrs or {})
if isinstance(x, core.VoxelNeuron):
data = x.grid
if data.dtype == bool:
- data = data.astype('uint8')
+ data = data.astype("uint8")
else:
# For dotprops make a horizontal stack from points + vectors
data = np.hstack((x.points, x.vect))
- header['k'] = x.k
-
- nrrd.write(str(filepath),
- data=data,
- header=header,
- compression_level=compression_level)
-
-
-def read_nrrd(f: Union[str, Iterable],
- threshold: Optional[Union[int, float]] = None,
- include_subdirs: bool = False,
- parallel: Union[bool, int] = 'auto',
- output: Union[Literal['voxels'],
- Literal['dotprops'],
- Literal['raw']] = 'voxels',
- errors: Union[Literal['raise'],
- Literal['log'],
- Literal['ignore']] = 'log',
- **kwargs) -> 'core.NeuronObject':
+ header["k"] = x.k
+
+ nrrd.write(
+ str(filepath), data=data, header=header, compression_level=compression_level
+ )
+
+
+def read_nrrd(
+ f: Union[str, Iterable],
+ output: Union[Literal["voxels"], Literal["dotprops"], Literal["raw"]] = "voxels",
+ threshold: Optional[Union[int, float]] = None,
+ thin: bool = False,
+ include_subdirs: bool = False,
+ parallel: Union[bool, int] = "auto",
+ fmt: str = "{name}.nrrd",
+ limit: Optional[int] = None,
+ errors: str = "raise",
+ **dotprops_kwargs,
+) -> "core.NeuronObject":
"""Create Neuron/List from NRRD file.
See [here](http://teem.sourceforge.net/nrrd/format.html) for specs of
@@ -162,13 +251,28 @@ def read_nrrd(f: Union[str, Iterable],
Parameters
----------
- f : str | iterable
- Filename(s) or folder. If folder, will import all
- `.nrrd` files.
+ f : str | list thereof
+ Filename, folder or URL:
+ - if folder, will import all `.nrrd` files
+ - if a `.zip`, `.tar` or `.tar.gz` archive will read all
+ NRRD files from the file
+ - if a URL (http:// or https://), will download the
+ file and import it
+ - FTP address (ftp://) can point to a folder or a single
+ file
+ See also `limit` parameter to read only a subset of files.
+ output : "voxels" | "dotprops" | "raw"
+ Determines function's output. See Returns for details.
threshold : int | float | None
For `output='dotprops'` only: a threshold to filter
- low intensity voxels. If `None`, no threshold is
- applied and all values > 0 are converted to points.
+ low intensity voxels.
+ - if `None`, all values > 0 are converted to points
+ - if >=1, all values >= threshold are converted to points
+ - if <1, all values >= threshold * max(data) are converted
+ thin : bool
+ For `output='dotprops'` only: if True, will thin the
+ point cloud using `skimage.morphology.skeletonize`
+ after thresholding. Requires `scikit-image`.
include_subdirs : bool, optional
If True and `f` is a folder, will also search
subdirectories for `.nrrd` files.
@@ -179,13 +283,44 @@ def read_nrrd(f: Union[str, Iterable],
considerably slower for imports of small numbers of
neurons. Integer will be interpreted as the number of
cores (otherwise defaults to `os.cpu_count() - 2`).
- output : "voxels" | "dotprops" | "raw"
- Determines function's output. See Returns for details.
+ fmt : str
+ Formatter to specify how filenames are parsed into neuron
+ attributes. Some illustrative examples:
+ - `{name}` (default) uses the filename
+ (minus the suffix) as the neuron's name property
+ - `{id}` (default) uses the filename as the neuron's ID
+ property
+ - `{name,id}` uses the filename as the neuron's
+ name and ID properties
+ - `{name}.{id}` splits the filename at a "."
+ and uses the first part as name and the second as ID
+ - `{name,id:int}` same as above but converts
+ into integer for the ID
+ - `{name}_{myproperty}` splits the filename at
+ "_" and uses the first part as name and as a
+ generic "myproperty" property
+ - `{name}_{}_{id}` splits the filename at
+ "_" and uses the first part as name and the last as
+ ID. The middle part is ignored.
+
+ Throws a ValueError if pattern can't be found in
+ filename.
+ limit : int | str | slice | list, optional
+ When reading from a folder or archive you can use this parameter to
+ restrict the which files read:
+ - if an integer, will read only the first `limit` NMX files
+ (useful to get a sample from a large library of skeletons)
+ - if a string, will interpret it as filename (regex) pattern
+ and only read files that match the pattern; e.g. `limit='.*_R.*'`
+ will only read files that contain `_R` in their filename
+ - if a slice (e.g. `slice(10, 20)`) will read only the files in
+ that range
+ - a list is expected to be a list of filenames to read from
+ the folder/archive
errors : "raise" | "log" | "ignore"
- If "log" or "ignore", errors will not be raised but
- instead empty neuron will be returned.
-
- **kwargs
+ If "log" or "ignore", errors will not be raised and the
+ mesh will be skipped. Can result in empty output.
+ **dotprops_kwargs
Keyword arguments passed to [`navis.make_dotprops`][]
if `output='dotprops'`. Use this to adjust e.g. the
number of nearest neighbors used for calculating the
@@ -217,145 +352,24 @@ def read_nrrd(f: Union[str, Iterable],
file.
"""
- utils.eval_param(output, name='output',
- allowed_values=('raw', 'dotprops', 'voxels'))
-
- # If is directory, compile list of filenames
- if isinstance(f, (str, Path)) and Path(f).expanduser().is_dir():
- f = Path(f).expanduser()
- if not include_subdirs:
- f = [os.path.join(f, x) for x in os.listdir(f) if
- os.path.isfile(os.path.join(f, x)) and x.endswith('.nrrd')]
- else:
- f = [y for x in os.walk(f) for y in glob(os.path.join(x[0], '*.nrrd'))]
-
- if utils.is_iterable(f):
- # Do not use if there is only a small batch to import
- if isinstance(parallel, str) and parallel.lower() == 'auto':
- if len(f) < 10:
- parallel = False
-
- if parallel:
- # Do not swap this as `isinstance(True, int)` returns `True`
- if isinstance(parallel, (bool, str)):
- n_cores = os.cpu_count() - 2
- else:
- n_cores = int(parallel)
-
- with mp.Pool(processes=n_cores) as pool:
- results = pool.imap(_worker_wrapper, [dict(f=x,
- threshold=threshold,
- output=output,
- errors=errors,
- include_subdirs=include_subdirs,
- parallel=False) for x in f],
- chunksize=1)
-
- res = list(config.tqdm(results,
- desc='Importing',
- total=len(f),
- disable=config.pbar_hide,
- leave=config.pbar_leave))
-
- else:
- # If not parallel just import the good 'ole way: sequentially
- res = [read_nrrd(x,
- threshold=threshold,
- include_subdirs=include_subdirs,
- output=output,
- errors=errors,
- parallel=parallel,
- **kwargs)
- for x in config.tqdm(f, desc='Importing',
- disable=config.pbar_hide,
- leave=config.pbar_leave)]
-
- if output == 'raw':
- return [r[0] for r in res], [r[1] for r in res]
-
- return core.NeuronList([r for r in res if r])
-
- # Open the file
- f = str(Path(f).expanduser())
- fname = os.path.basename(f).split('.')[0]
- data, header = nrrd.read(f)
-
- if output == 'raw':
- return data, header
-
- # Try parsing units - this is modelled after the nrrd files you get from
- # Virtual Fly Brain (VFB)
- units = None
- su = None
- voxdim = np.array([1, 1, 1])
- if 'space directions' in header:
- sd = np.asarray(header['space directions'])
- if sd.ndim == 2:
- voxdim = np.diag(sd)[:3]
- if 'space units' in header:
- su = header['space units']
- if len(su) == 3:
- units = [f'{m} {u}' for m, u in zip(voxdim, su)]
- else:
- units = voxdim
-
- try:
- if output == 'dotprops':
- # If we're trying to get voxels from an image
- if data.ndim == 3:
- if threshold:
- data = data >= threshold
-
- # Convert data to x/y/z coordinates
- # Note we need to multiply units before creating the Dotprops
- # - otherwise the KNN will be wrong
- x, y, z = np.where(data)
- points = np.vstack((x, y, z)).T
- points = points * voxdim
-
- x = core.make_dotprops(points, **kwargs)
- elif data.ndim == 2:
- if data.shape[1] == 3:
- points, vect, alpha = data, None, None
- elif data.shape[1] == 6:
- points, vect, alpha = data[:, :3], data[:, 3:6], None
- elif data.shape[1] == 7:
- points, vect, alpha = data[:, :3], data[:, 3:6], data[:, 6]
- else:
- raise ValueError('Expected data to be either (N, 3), (N, 6) '
- f'or (N, 7) but NRRD file contains {data.shape}')
- # Get `k` either from provided kwargs or the file's header
- k = kwargs.pop('k', header.get('k', 20))
-
- x = core.Dotprops(points, k=k, vect=vect, alpha=alpha, **kwargs)
- else:
- raise ValueError('Data must be 2- or 3-dimensional to extract '
- f'Dotprops, got {data.ndim}')
-
- if su and len(su) == 3:
- x.units = [f'1 {s}' for s in su]
- else:
- if data.ndim == 2:
- logger.warning(f'Data in NRRD file is of shape {data.shape} - '
- 'i.e. 2D. Could this be a point cloud/dotprops '
- 'instead of voxels?')
- x = core.VoxelNeuron(data, units=units)
- except BaseException as e:
- msg = f'Error converting file {fname} to neuron.'
- if errors == 'raise':
- raise ImportError(msg) from e
- elif errors == 'log':
- logger.error(f'{msg}: {e}')
- return
-
- # Add some additional properties
- x.name = fname
- x.origin = f
- x.nrrd_header = header
-
- return x
-
-
-def _worker_wrapper(kwargs):
- """Helper for importing NRRDs using multiple processes."""
- return read_nrrd(**kwargs)
+ if thin:
+ try:
+ from skimage.morphology import skeletonize
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ "The 'thin' option requires 'scikit-image' to be installed:\n"
+ " pip install scikit-image -U"
+ )
+
+ utils.eval_param(
+ output, name="output", allowed_values=("raw", "dotprops", "voxels")
+ )
+
+ if parallel == "auto":
+ # Set a lower threshold of 10 on parallel processing for NRRDs (default is 200)
+ parallel = ("auto", 10)
+
+ reader = NrrdReader(
+ output=output, threshold=threshold, thin=thin, fmt=fmt, errors=errors, dotprop_kwargs=dotprops_kwargs
+ )
+ return reader.read_any(f, include_subdirs, parallel, limit=limit)
diff --git a/navis/io/pq_io.py b/navis/io/pq_io.py
index a23b16a2..16fa6434 100644
--- a/navis/io/pq_io.py
+++ b/navis/io/pq_io.py
@@ -57,9 +57,10 @@ def scan_parquet(file: Union[str, Path]):
"""
try:
import pyarrow.parquet as pq
- except ImportError:
- raise ImportError('Reading parquet files requires the pyarrow library:\n'
- ' pip3 install pyarrow')
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ 'Reading parquet files requires the pyarrow library:\n'
+ ' pip3 install pyarrow')
f = Path(file).expanduser()
if not f.is_file():
@@ -154,9 +155,10 @@ def read_parquet(f: Union[str, Path],
try:
import pyarrow.parquet as pq
- except ImportError:
- raise ImportError('Reading parquet files requires the pyarrow library:\n'
- ' pip3 install pyarrow')
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ 'Reading parquet files requires the pyarrow library:\n'
+ ' pip3 install pyarrow')
if limit is not None:
if subset not in (None, False):
@@ -381,12 +383,11 @@ def _write_parquet_skeletons(x: 'core.TreeNeuron',
try:
import pyarrow as pa
import pyarrow.parquet as pq
- except ImportError:
- raise ImportError('Writing parquet files requires the pyarrow library:\n'
- ' pip3 install pyarrow')
-
- # Make sure we're working with a list, not a single neuron
- x = core.NeuronList(x)
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ 'Writing parquet files requires the pyarrow library:\n'
+ ' pip3 install pyarrow'
+ )
# Generate node table
nodes = x.nodes[x.nodes.columns[np.isin(x.nodes.columns, SKELETON_COLUMNS)]]
@@ -426,9 +427,11 @@ def _write_parquet_dotprops(x: 'core.Dotprops',
try:
import pyarrow as pa
import pyarrow.parquet as pq
- except ImportError:
- raise ImportError('Writing parquet files requires the pyarrow library:\n'
- ' pip3 install pyarrow')
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ 'Writing parquet files requires the pyarrow library:\n'
+ ' pip3 install pyarrow'
+ )
# Make sure we're working with a list, not a single neuron
x = core.NeuronList(x)
diff --git a/navis/io/precomputed_io.py b/navis/io/precomputed_io.py
index 76f3b0c2..3449c350 100644
--- a/navis/io/precomputed_io.py
+++ b/navis/io/precomputed_io.py
@@ -23,18 +23,19 @@
from pathlib import Path
from functools import lru_cache
-from typing import Union, Dict, Optional, Any, IO, List
+from typing import Union, Dict, Optional, Any, IO
from typing_extensions import Literal
-from zipfile import ZipFile, ZipInfo
+from zipfile import ZipFile
-from .. import config, utils, core
+from .. import utils, core
from . import base
try:
import zlib
import zipfile
+
compression = zipfile.ZIP_DEFLATED
-except ImportError:
+except ModuleNotFoundError:
compression = zipfile.ZIP_STORED
@@ -54,13 +55,13 @@ def is_valid_file(self, file):
file = str(file)
# Drop anything with a file extension or hidden files (e.g. ".DS_store")
- if '.' in file:
+ if "." in file:
return False
# Ignore the info file
- if file == 'info':
+ if file == "info":
return False
# Ignore manifests
- if file.endswith(':0'):
+ if file.endswith(":0"):
return False
return True
@@ -69,17 +70,22 @@ class PrecomputedMeshReader(PrecomputedReader):
def __init__(
self,
fmt: str = DEFAULT_FMT,
- attrs: Optional[Dict[str, Any]] = None
+ attrs: Optional[Dict[str, Any]] = None,
+ errors: str = "raise",
):
- super().__init__(fmt=fmt,
- attrs=attrs,
- file_ext='',
- name_fallback='mesh',
- read_binary=True)
-
+ super().__init__(
+ fmt=fmt,
+ attrs=attrs,
+ file_ext="",
+ name_fallback="mesh",
+ read_binary=True,
+ errors=errors,
+ )
+
+ @base.handle_errors
def read_buffer(
self, f: IO, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.MeshNeuron':
+ ) -> "core.MeshNeuron":
"""Read buffer into a MeshNeuron.
Parameters
@@ -94,17 +100,22 @@ def read_buffer(
core.MeshNeuron
"""
if not isinstance(f.read(0), bytes):
- raise ValueError(f'Expected bytes, got {type(f.read(0))}')
+ raise ValueError(f"Expected bytes, got {type(f.read(0))}")
num_vertices = np.frombuffer(f.read(4), np.uint32)[0]
- vertices = np.frombuffer(f.read(int(3 * 4 * num_vertices)),
- np.float32).reshape(-1, 3)
- faces = np.frombuffer(f.read(),
- np.uint32).reshape(-1, 3)
+ vertices = np.frombuffer(f.read(int(3 * 4 * num_vertices)), np.float32).reshape(
+ -1, 3
+ )
+ faces = np.frombuffer(f.read(), np.uint32).reshape(-1, 3)
- return core.MeshNeuron({'vertices': vertices, 'faces': faces},
- **(self._make_attributes({'name': self.name_fallback,
- 'origin': 'DataFrame'}, attrs)))
+ return core.MeshNeuron(
+ {"vertices": vertices, "faces": faces},
+ **(
+ self._make_attributes(
+ {"name": self.name_fallback, "origin": "DataFrame"}, attrs
+ )
+ ),
+ )
class PrecomputedSkeletonReader(PrecomputedReader):
@@ -112,19 +123,23 @@ def __init__(
self,
fmt: str = DEFAULT_FMT,
attrs: Optional[Dict[str, Any]] = None,
- info: Dict[str, Any] = {}
+ info: Dict[str, Any] = {},
+ errors: str = "raise",
):
- super().__init__(fmt=fmt,
- attrs=attrs,
- file_ext='',
- name_fallback='skeleton',
- read_binary=True)
+ super().__init__(
+ fmt=fmt,
+ attrs=attrs,
+ file_ext="",
+ name_fallback="skeleton",
+ read_binary=True,
+ errors=errors,
+ )
self.info = info
+ @base.handle_errors
def read_buffer(
- self,
- f: IO, attrs: Optional[Dict[str, Any]] = None
- ) -> 'core.TreeNeuron':
+ self, f: IO, attrs: Optional[Dict[str, Any]] = None
+ ) -> "core.TreeNeuron":
"""Read buffer into a TreeNeuron.
Parameters
@@ -140,40 +155,42 @@ def read_buffer(
"""
if not isinstance(f.read(0), bytes):
- raise ValueError(f'Expected bytes, got {type(f.read(0))}')
+ raise ValueError(f"Expected bytes, got {type(f.read(0))}")
num_nodes = np.frombuffer(f.read(4), np.uint32)[0]
num_edges = np.frombuffer(f.read(4), np.uint32)[0]
- nodes = np.frombuffer(f.read(int(3 * 4 * num_nodes)),
- np.float32).reshape(-1, 3)
- edges = np.frombuffer(f.read(int(2 * 4 * num_edges)),
- np.uint32).reshape(-1, 2)
+ nodes = np.frombuffer(f.read(int(3 * 4 * num_nodes)), np.float32).reshape(-1, 3)
+ edges = np.frombuffer(f.read(int(2 * 4 * num_edges)), np.uint32).reshape(-1, 2)
swc = self.make_swc(nodes, edges)
# Check for malformed vertex attributes (should be list of dicts)
- if isinstance(self.info.get('vertex_attributes', None), dict):
- self.info['vertex_attributes'] = [self.info['vertex_attributes']]
+ if isinstance(self.info.get("vertex_attributes", None), dict):
+ self.info["vertex_attributes"] = [self.info["vertex_attributes"]]
# Parse additional vertex attributes if specified as per the info file
- for attr in self.info.get('vertex_attributes', []):
- dtype = np.dtype(attr['data_type'])
- n_comp = attr['num_components']
- values = np.frombuffer(f.read(int(n_comp * dtype.itemsize * num_nodes)),
- dtype).reshape(-1, n_comp)
+ for attr in self.info.get("vertex_attributes", []):
+ dtype = np.dtype(attr["data_type"])
+ n_comp = attr["num_components"]
+ values = np.frombuffer(
+ f.read(int(n_comp * dtype.itemsize * num_nodes)), dtype
+ ).reshape(-1, n_comp)
if n_comp == 1:
- swc[attr['id']] = values.flatten()
+ swc[attr["id"]] = values.flatten()
else:
for i in range(n_comp):
swc[f"{attr['id']}_{i}"] = values[:, i]
- return core.TreeNeuron(swc,
- **(self._make_attributes({'name': self.name_fallback,
- 'origin': 'DataFrame'}, attrs)))
+ return core.TreeNeuron(
+ swc,
+ **(
+ self._make_attributes(
+ {"name": self.name_fallback, "origin": "DataFrame"}, attrs
+ )
+ ),
+ )
- def make_swc(
- self, nodes: np.ndarray, edges: np.ndarray
- ) -> pd.DataFrame:
+ def make_swc(self, nodes: np.ndarray, edges: np.ndarray) -> pd.DataFrame:
"""Make SWC table from nodes and edges.
Parameters
@@ -186,25 +203,28 @@ def make_swc(
pandas.DataFrame
"""
swc = pd.DataFrame()
- swc['node_id'] = np.arange(len(nodes))
- swc['x'], swc['y'], swc['z'] = nodes[:, 0], nodes[:, 1], nodes[:, 2]
+ swc["node_id"] = np.arange(len(nodes))
+ swc["x"], swc["y"], swc["z"] = nodes[:, 0], nodes[:, 1], nodes[:, 2]
edge_dict = dict(zip(edges[:, 1], edges[:, 0]))
- swc['parent_id'] = swc.node_id.map(lambda x: edge_dict.get(x, -1)).astype(np.int32)
+ swc["parent_id"] = swc.node_id.map(lambda x: edge_dict.get(x, -1)).astype(
+ np.int32
+ )
return swc
-def read_precomputed(f: Union[str, io.BytesIO],
- datatype: Union[Literal['auto'],
- Literal['mesh'],
- Literal['skeleton']] = 'auto',
- include_subdirs: bool = False,
- fmt: str = '{id}',
- info: Union[bool, str, dict] = True,
- limit: Optional[int] = None,
- parallel: Union[bool, int] = 'auto',
- **kwargs) -> 'core.NeuronObject':
+def read_precomputed(
+ f: Union[str, io.BytesIO],
+ datatype: Union[Literal["auto"], Literal["mesh"], Literal["skeleton"]] = "auto",
+ include_subdirs: bool = False,
+ fmt: str = "{id}",
+ info: Union[bool, str, dict] = True,
+ limit: Optional[int] = None,
+ parallel: Union[bool, int] = "auto",
+ errors: Literal["raise", "log", "ignore"] = "raise",
+ **kwargs,
+) -> "core.NeuronObject":
"""Read skeletons and meshes from neuroglancer's precomputed format.
Follows the formats specified
@@ -224,8 +244,7 @@ def read_precomputed(f: Union[str, io.BytesIO],
If True and `f` is a folder, will also search
subdirectories for binary files.
fmt : str
- Formatter to specify what files to look for (when `f` is
- directory) and how they are parsed into neuron
+ Formatter to specify how filenames are parsed into neuron
attributes. Some illustrative examples:
- `{name}` (default) uses the filename
(minus the suffix) as the neuron's name property
@@ -245,18 +264,25 @@ def read_precomputed(f: Union[str, io.BytesIO],
ID. The middle part is ignored.
Throws a ValueError if pattern can't be found in
- filename. Ignored for DataFrames.
+ filename.
info : bool | str | dict
An info file describing the data:
- `True` = will look for `info` file in base folder
- `False` = do not use/look for `info` file
- `str` = filepath to `info` file
- `dict` = already parsed info file
- limit : int, optional
- If reading from a folder you can use this parameter to
- read only the first `limit` files. Useful if
- wanting to get a sample from a large library of
- skeletons/meshes.
+ limit : int | str | slice | list, optional
+ When reading from a folder or archive you can use this parameter to
+ restrict the which files read:
+ - if an integer, will read only the first `limit` files
+ (useful to get a sample from a large library of neurons)
+ - if a string, will interpret it as filename (regex) pattern
+ and only read files that match the pattern; e.g. `limit='.*_R.*'`
+ will only read files that contain `_R` in their filename
+ - if a slice (e.g. `slice(10, 20)`) will read only the files in
+ that range
+ - a list is expected to be a list of filenames to read from
+ the folder/archive
parallel : "auto" | bool | int
Defaults to `auto` which means only use parallel
processing if more than 200 files are imported. Spawning
@@ -265,6 +291,9 @@ def read_precomputed(f: Union[str, io.BytesIO],
neurons. Integer will be interpreted as the
number of cores (otherwise defaults to
`os.cpu_count() // 2`).
+ errors : "raise" | "log" | "ignore"
+ If "log" or "ignore", errors will not be raised and the
+ mesh will be skipped. Can result in empty output.
**kwargs
Keyword arguments passed to the construction of the
neurons. You can use this to e.g. set meta data such
@@ -279,26 +308,30 @@ def read_precomputed(f: Union[str, io.BytesIO],
--------
[`navis.write_precomputed`][]
Export neurons/volumes to precomputed format.
+ [`navis.read_mesh`][]
+ Read common mesh formats (obj, stl, etc).
"""
- utils.eval_param(datatype, name='datatype', allowed_values=('skeleton',
- 'mesh',
- 'auto'))
+ utils.eval_param(
+ datatype, name="datatype", allowed_values=("skeleton", "mesh", "auto")
+ )
# See if we can get the info file from somewhere
if info is True and not isinstance(f, bytes):
# Find info in zip archive
- if str(f).endswith('.zip'):
- with ZipFile(Path(f).expanduser(), 'r') as zip:
- if 'info' in [f.filename for f in zip.filelist]:
- info = json.loads(zip.read('info').decode())
- elif datatype == 'auto':
- raise ValueError('No `info` file found in zip file. Please '
- 'specify data type using the `datatype` '
- 'parameter.')
+ if str(f).endswith(".zip"):
+ with ZipFile(Path(f).expanduser(), "r") as zip:
+ if "info" in [f.filename for f in zip.filelist]:
+ info = json.loads(zip.read("info").decode())
+ elif datatype == "auto":
+ raise ValueError(
+ "No `info` file found in zip file. Please "
+ "specify data type using the `datatype` "
+ "parameter."
+ )
# Try loading info from URL
elif utils.is_url(str(f)):
- base_url = '/'.join(str(f).split('/')[:-1])
+ base_url = "/".join(str(f).split("/")[:-1])
info = _fetch_info_file(base_url, raise_missing=False)
# Try loading info from parent path
else:
@@ -306,9 +339,9 @@ def read_precomputed(f: Union[str, io.BytesIO],
# Find first existing root
while not fp.is_dir():
fp = fp.parent
- fp = fp / 'info'
+ fp = fp / "info"
if fp.is_file():
- with open(fp, 'r') as info_file:
+ with open(fp, "r") as info_file:
info = json.load(info_file)
# At this point we should have a dictionary - even if it's empty
@@ -316,30 +349,36 @@ def read_precomputed(f: Union[str, io.BytesIO],
info = {}
# Parse data type from info file (if required)
- if datatype == 'auto':
- if '@type' not in info:
- raise ValueError('Either no `info` file found or it does not specify '
- 'a data type. Please provide data type using the '
- '`datatype` parameter.')
-
- if info.get('@type', None) == 'neuroglancer_legacy_mesh':
- datatype = 'mesh'
- elif info.get('@type', None) == 'neuroglancer_skeletons':
- datatype = 'skeleton'
+ if datatype == "auto":
+ if "@type" not in info:
+ raise ValueError(
+ "Either no `info` file found or it does not specify "
+ "a data type. Please provide data type using the "
+ "`datatype` parameter."
+ )
+
+ if info.get("@type", None) == "neuroglancer_legacy_mesh":
+ datatype = "mesh"
+ elif info.get("@type", None) == "neuroglancer_skeletons":
+ datatype = "skeleton"
else:
- raise ValueError('Data type specified in `info` file unknown: '
- f'{info.get("@type", None)}. Please provide data '
- 'type using the `datatype` parameter.')
+ raise ValueError(
+ 'Data type specified in `info` file unknown: '
+ f'{info.get("@type", None)}. Please provide data '
+ 'type using the `datatype` parameter.'
+ )
if isinstance(f, bytes):
f = io.BytesIO(f)
- if datatype == 'skeleton':
+ if datatype == "skeleton":
if not isinstance(info, dict):
info = {}
- reader = PrecomputedSkeletonReader(fmt=fmt, attrs=kwargs, info=info)
+ reader = PrecomputedSkeletonReader(
+ fmt=fmt, errors=errors, attrs=kwargs, info=info
+ )
else:
- reader = PrecomputedMeshReader(fmt=fmt, attrs=kwargs)
+ reader = PrecomputedMeshReader(fmt=fmt, errors=errors, attrs=kwargs)
return reader.read_any(f, include_subdirs, parallel, limit=limit)
@@ -350,28 +389,28 @@ class PrecomputedWriter(base.Writer):
def write_any(self, x, filepath, write_info=True, **kwargs):
"""Write any to file. Default entry point."""
# First write the actual neurons
- kwargs['write_info'] = False
+ kwargs["write_info"] = False
super().write_any(x, filepath=filepath, **kwargs)
# Write info file to the correct directory/zipfile
if write_info:
add_props = {}
- if kwargs.get('radius', False):
- add_props['vertex_attributes'] = [{'id': 'radius',
- 'data_type': 'float32',
- 'num_components': 1}]
+ if kwargs.get("radius", False):
+ add_props["vertex_attributes"] = [
+ {"id": "radius", "data_type": "float32", "num_components": 1}
+ ]
- if str(self.path).endswith('.zip'):
- with ZipFile(self.path, mode='a') as zf:
+ if str(self.path).endswith(".zip"):
+ with ZipFile(self.path, mode="a") as zf:
# Context-manager will remove temporary directory and its contents
with tempfile.TemporaryDirectory() as tempdir:
# Write info to zip
if write_info:
# Generate temporary filename
- f = os.path.join(tempdir, 'info')
+ f = os.path.join(tempdir, "info")
write_info_file(x, f, add_props=add_props)
# Add file to zip
- zf.write(f, arcname='info', compress_type=compression)
+ zf.write(f, arcname="info", compress_type=compression)
else:
fp = self.path
# Find the first existing root directory
@@ -381,11 +420,13 @@ def write_any(self, x, filepath, write_info=True, **kwargs):
write_info_file(x, fp, add_props=add_props)
-def write_precomputed(x: Union['core.NeuronList', 'core.TreeNeuron', 'core.MeshNeuron', 'core.Volume'],
- filepath: Optional[str] = None,
- write_info: bool = True,
- write_manifest: bool = False,
- radius: bool = False) -> None:
+def write_precomputed(
+ x: Union["core.NeuronList", "core.TreeNeuron", "core.MeshNeuron", "core.Volume"],
+ filepath: Optional[str] = None,
+ write_info: bool = True,
+ write_manifest: bool = False,
+ radius: bool = False,
+) -> None:
"""Export skeletons or meshes to neuroglancer's (legacy) precomputed format.
Note that you should not mix meshes and skeletons in the same folder!
@@ -458,35 +499,39 @@ def write_precomputed(x: Union['core.NeuronList', 'core.TreeNeuron', 'core.MeshN
"""
writer = PrecomputedWriter(_write_precomputed, ext=None)
- return writer.write_any(x,
- filepath=filepath,
- write_info=write_info,
- write_manifest=write_manifest,
- radius=radius)
-
-
-def _write_precomputed(x: Union['core.TreeNeuron', 'core.MeshNeuron', 'core.Volume'],
- filepath: Optional[str] = None,
- write_info: bool = True,
- write_manifest: bool = False,
- radius: bool = False) -> None:
+ return writer.write_any(
+ x,
+ filepath=filepath,
+ write_info=write_info,
+ write_manifest=write_manifest,
+ radius=radius,
+ )
+
+
+def _write_precomputed(
+ x: Union["core.TreeNeuron", "core.MeshNeuron", "core.Volume"],
+ filepath: Optional[str] = None,
+ write_info: bool = True,
+ write_manifest: bool = False,
+ radius: bool = False,
+) -> None:
"""Write single neuron to neuroglancer's precomputed format."""
if filepath and os.path.isdir(filepath):
if isinstance(x, core.BaseNeuron):
if not x.id:
- raise ValueError('Neuron(s) must have an ID when destination '
- 'is a folder')
- filepath = os.path.join(filepath, f'{x.id}')
+ raise ValueError(
+ "Neuron(s) must have an ID when destination " "is a folder"
+ )
+ filepath = os.path.join(filepath, f"{x.id}")
elif isinstance(x, core.Volume):
- filepath = os.path.join(filepath, f'{x.name}')
+ filepath = os.path.join(filepath, f"{x.name}")
else:
- raise ValueError(f'Unable to generate filename for {type(x)}')
+ raise ValueError(f"Unable to generate filename for {type(x)}")
if isinstance(x, core.TreeNeuron):
return _write_skeleton(x, filepath, radius=radius)
elif utils.is_mesh(x):
- return _write_mesh(x.vertices, x.faces, filepath,
- write_manifest=write_manifest)
+ return _write_mesh(x.vertices, x.faces, filepath, write_manifest=write_manifest)
else:
raise TypeError(f'Unable to write data of type "{type(x)}"')
@@ -507,53 +552,54 @@ def write_info_file(data, filepath, add_props={}):
if utils.is_iterable(data):
types = list(set([type(d) for d in data]))
if len(types) > 1:
- raise ValueError('Unable to write info file for mixed data: '
- f'{data.types}')
+ raise ValueError(
+ "Unable to write info file for mixed data: " f"{data.types}"
+ )
data = data[0]
if utils.is_mesh(data):
- info['@type'] = 'neuroglancer_legacy_mesh'
+ info["@type"] = "neuroglancer_legacy_mesh"
elif isinstance(data, core.TreeNeuron):
- info['@type'] = 'neuroglancer_skeletons'
+ info["@type"] = "neuroglancer_skeletons"
# If we know the units add transform from "stored model"
# to "model space" which is supposed to be nm
if not data.units.dimensionless:
- u = data.units.to('1 nm').magnitude
+ u = data.units.to("1 nm").magnitude
else:
u = 1
tr = np.zeros((4, 3), dtype=int)
tr[:3, :3] = np.diag([u, u, u])
- info['transform'] = tr.T.flatten().tolist()
+ info["transform"] = tr.T.flatten().tolist()
else:
raise TypeError(f'Unable to write info file for data of type "{type(data)}"')
info.update(add_props)
- if not str(filepath).endswith('/info'):
- filepath = os.path.join(filepath, 'info')
- with open(filepath, 'w') as f:
+ if not str(filepath).endswith("/info"):
+ filepath = os.path.join(filepath, "info")
+ with open(filepath, "w") as f:
json.dump(info, f)
def _write_mesh(vertices, faces, filename, write_manifest=False):
"""Write mesh to precomputed binary format."""
# Make sure we are working with the correct data types
- vertices = np.asarray(vertices, dtype='float32')
- faces = np.asarray(faces, dtype='uint32')
+ vertices = np.asarray(vertices, dtype="float32")
+ faces = np.asarray(faces, dtype="uint32")
n_vertices = np.uint32(vertices.shape[0])
vertex_index_format = [n_vertices, vertices, faces]
- results = b''.join([array.tobytes('C') for array in vertex_index_format])
+ results = b"".join([array.tobytes("C") for array in vertex_index_format])
if filename:
filename = Path(filename)
- with open(filename, 'wb') as f:
+ with open(filename, "wb") as f:
f.write(results)
if write_manifest:
- with open(filename.parent / f'{filename.name}:0', 'w') as f:
- json.dump({'fragments': [filename.name]}, f)
+ with open(filename.parent / f"{filename.name}:0", "w") as f:
+ json.dump({"fragments": [filename.name]}, f)
else:
return results
@@ -563,25 +609,25 @@ def _write_skeleton(x, filename, radius=False):
# Below code modified from:
# https://github.com/google/neuroglancer/blob/master/python/neuroglancer/skeleton.py#L34
result = io.BytesIO()
- vertex_positions = x.nodes[['x', 'y', 'z']].values.astype('float32', order='C')
+ vertex_positions = x.nodes[["x", "y", "z"]].values.astype("float32", order="C")
# Map edges node IDs to node indices
node_ix = pd.Series(x.nodes.reset_index(drop=True).index, index=x.nodes.node_id)
- edges = x.edges.copy().astype('uint32', order='C')
+ edges = x.edges.copy().astype("uint32", order="C")
edges[:, 0] = node_ix.loc[edges[:, 0]].values
edges[:, 1] = node_ix.loc[edges[:, 1]].values
edges = edges[:, [1, 0]] # For some reason we have to switch direction
- result.write(struct.pack(' 'core.TreeNeuron':
@@ -130,6 +126,7 @@ def read_buffer(
return self.read_dataframe(nodes, base.merge_dicts({'swc_header': '\n'.join(header_rows)}, attrs))
+ @base.handle_errors
def read_dataframe(
self, nodes: pd.DataFrame, attrs: Optional[Dict[str, Any]] = None
) -> 'core.TreeNeuron':
@@ -265,6 +262,7 @@ def read_swc(f: Union[str, pd.DataFrame, Iterable],
fmt: str = "{name}.swc",
read_meta: bool = True,
limit: Optional[int] = None,
+ errors: str = 'raise',
**kwargs) -> 'core.NeuronObject':
"""Create Neuron/List from SWC file.
@@ -273,14 +271,20 @@ def read_swc(f: Union[str, pd.DataFrame, Iterable],
Parameters
----------
- f : str | pandas.DataFrame | iterable
- Filename, folder, SWC string, URL or DataFrame.
- If folder, will import all `.swc` files. If a
- `.zip`, `.tar` or `.tar.gz` file will read all
- SWC files in the file. See also `limit` parameter.
+ f : str | pandas.DataFrame | list thereof
+ Filename, folder, SWC string, URL or DataFrame:
+ - if folder, will import all `.swc` files
+ - if a `.zip`, `.tar` or `.tar.gz` archive will read all
+ SWC files from the file
+ - if a URL (http:// or https://), will download the
+ file and import it
+ - FTP address (ftp://) can point to a folder or a single
+ file
+ - DataFrames are interpreted as a SWC tables
+ See also `limit` parameter to read only a subset of files.
connector_labels : dict, optional
If provided will extract connectors from SWC.
- Dictionary must map type to label:
+ Dictionary must map types to labels:
`{'presynapse': 7, 'postsynapse': 8}`
include_subdirs : bool, optional
If True and `f` is a folder, will also search
@@ -293,7 +297,7 @@ def read_swc(f: Union[str, pd.DataFrame, Iterable],
and joining processes causes overhead and is
considerably slower for imports of small numbers of
neurons. Integer will be interpreted as the
- number of cores (otherwise defaults to
+ number of processes to use (defaults to
`os.cpu_count() // 2`).
precision : int [8, 16, 32, 64] | None
Precision for data. Defaults to 32 bit integers/floats.
@@ -325,16 +329,26 @@ def read_swc(f: Union[str, pd.DataFrame, Iterable],
read_meta : bool
If True and SWC header contains a line with JSON-encoded
meta data e.g. (`# Meta: {'id': 123}`), these data
- will be read as neuron properties. `fmt` takes
+ will be read as neuron properties. `fmt` still takes
precedence. Will try to assign meta data directly as
neuron attribute (e.g. `neuron.id`). Failing that
(can happen for properties intrinsic to `TreeNeurons`),
will add a `.meta` dictionary to the neuron.
- limit : int, optional
- If reading from a folder you can use this parameter to
- read only the first `limit` SWC files. Useful if
- wanting to get a sample from a large library of
- skeletons.
+ limit : int | str | slice | list, optional
+ When reading from a folder or archive you can use this parameter to
+ restrict the which files read:
+ - if an integer, will read only the first `limit` SWC files
+ (useful to get a sample from a large library of skeletons)
+ - if a string, will interpret it as filename (regex) pattern
+ and only read files that match the pattern; e.g. `limit='.*_R.*'`
+ will only read files that contain `_R` in their filename
+ - if a slice (e.g. `slice(10, 20)`) will read only the files in
+ that range
+ - a list is expected to be a list of filenames to read from
+ the folder/archive
+ errors : "raise" | "log" | "ignore"
+ If "log" or "ignore", errors will not be raised and the
+ mesh will be skipped. Can result in empty output.
**kwargs
Keyword arguments passed to the construction of
`navis.TreeNeuron`. You can use this to e.g. set
@@ -368,12 +382,16 @@ def read_swc(f: Union[str, pd.DataFrame, Iterable],
>>> s = navis.read_swc('skeletons.zip') # doctest: +SKIP
- Sample first 100 SWC files a zip archive:
+ Sample the first 100 SWC files in a zip archive:
>>> s = navis.read_swc('skeletons.zip', limit=100) # doctest: +SKIP
+ Read first all SWC files an ftp folder:
+
+ >>> s = navis.read_swc('ftp://server:port/path/to/swc/') # doctest: +SKIP
+
"""
- # SwcReader will try its best to read whatever you throw at it - with limit
+ # SwcReader will try its best to read whatever you throw at it - with limited
# sanity checks. For example: if you misspell a filepath, it will assume
# that it's a SWC string (because anything that's a string but doesn't
# point to an existing file or a folder MUST be a SWC) which will lead to
@@ -392,6 +410,7 @@ def read_swc(f: Union[str, pd.DataFrame, Iterable],
precision=precision,
read_meta=read_meta,
fmt=fmt,
+ errors=errors,
attrs=kwargs)
res = reader.read_any(f, include_subdirs, parallel, limit=limit)
diff --git a/navis/io/tiff_io.py b/navis/io/tiff_io.py
index 5bb6b1e0..203b76d4 100644
--- a/navis/io/tiff_io.py
+++ b/navis/io/tiff_io.py
@@ -11,15 +11,13 @@
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-import os
+import io
-import multiprocessing as mp
import numpy as np
-from glob import glob
-from pathlib import Path
from typing import Union, Iterable, Optional, Dict, Any
from typing_extensions import Literal
+from urllib3 import HTTPResponse
from .. import config, utils, core
from . import base
@@ -27,20 +25,153 @@
# Set up logging
logger = config.logger
+DEFAULT_FMT = "{name}.tif"
+
+
+class TiffReader(base.ImageReader):
+ def __init__(
+ self,
+ output: Literal["voxels", "dotprops", "raw"] = "voxels",
+ channel: int = 0,
+ threshold: Optional[Union[int, float]] = None,
+ thin: bool = False,
+ dotprop_kwargs: Dict[str, Any] = {},
+ fmt: str = DEFAULT_FMT,
+ errors: str = "raise",
+ attrs: Optional[Dict[str, Any]] = None,
+ ):
+ if not fmt.endswith(".tif") and not fmt.endswith(".tiff"):
+ raise ValueError('`fmt` must end with ".tif" or ".tiff"')
+
+ super().__init__(
+ fmt=fmt,
+ attrs=attrs,
+ file_ext=(".tif", ".tiff"),
+ name_fallback="TIFF",
+ read_binary=True,
+ output=output,
+ threshold=threshold,
+ thin=thin,
+ dotprop_kwargs=dotprop_kwargs,
+ errors=errors,
+ )
+ self.channel = channel
+
+ def format_output(self, x):
+ # This function replaces the BaseReader.format_output()
+ # This is to avoid trying to convert multiple (image, header) to NeuronList
+ if self.output == "raw":
+ return x
+ elif x:
+ return core.NeuronList([n for n in x if n])
+ else:
+ return core.NeuronList([])
+
+ @base.handle_errors
+ def read_buffer(
+ self, f, attrs: Optional[Dict[str, Any]] = None
+ ) -> Union[np.ndarray, "core.Dotprops", "core.VoxelNeuron"]:
+ """Read buffer into (image, header) or a neuron.
+
+ Parameters
+ ----------
+ f : IO
+ Readable buffer (must be bytes).
+ attrs : dict | None
+ Arbitrary attributes to include in the neuron.
+
+ Returns
+ -------
+ core.Dotprops | core.VoxelNeuron | np.ndarray
+
+ """
+ import tifffile
+
+ if isinstance(f, HTTPResponse):
+ f = io.StringIO(f.content)
+ if isinstance(f, bytes):
+ f = io.BytesIO(f)
-def read_tiff(f: Union[str, Iterable],
- channel: int = 0,
- threshold: Optional[Union[int, float]] = None,
- include_subdirs: bool = False,
- parallel: Union[bool, int] = 'auto',
- output: Union[Literal['voxels'],
- Literal['dotprops'],
- Literal['raw']] = 'voxels',
- errors: Union[Literal['raise'],
- Literal['log'],
- Literal['ignore']] = 'log',
- **kwargs) -> 'core.NeuronObject':
+ with tifffile.TiffFile(f) as tif:
+ # The header contains some but not all the info
+ if hasattr(tif, "imagej_metadata") and tif.imagej_metadata is not None:
+ header = tif.imagej_metadata
+ else:
+ header = {}
+
+ # Read the x/y resolution from the first "page" (i.e. the first slice)
+ res = tif.pages[0].resolution
+ # Resolution to spacing
+ header["xy_spacing"] = (1 / res[0], 1 / res[1])
+
+ # Get the axes; this will be something like "ZCYX" where:
+ # Z = slices, C = channels, Y = rows, X = columns, S = color(?), Q = empty(?)
+ axes = tif.series[0].axes
+
+ # Generate volume
+ data = tif.asarray()
+
+ if self.output == "raw":
+ return data, header
+
+ # Drop "Q" axes if they have dimenions of 1 (we're assuming these are empty)
+ while "Q" in axes and data.shape[axes.index("Q")] == 1:
+ data = np.squeeze(data, axis=axes.index("Q"))
+ axes = axes.replace("Q", "", 1) # Only remove the first occurrence
+ if "C" in axes:
+ # Extract the requested channel from the volume
+ data = data.take(self.channel, axis=axes.index("C"))
+ axes = axes.replace("C", "")
+
+ # At this point we expect 3D data
+ if data.ndim != 3:
+ raise ValueError(f'Expected 3D greyscale data, got {data.ndim} ("{axes}").')
+
+ # Swap axes to XYZ order
+ order = []
+ for a in ("X", "Y", "Z"):
+ if a not in axes:
+ logger.warning(
+ f'Expected axes to contain "Z", "Y", and "X", got "{axes}". '
+ "Axes will not be automatically reordered."
+ )
+ order = None
+ break
+ order.append(axes.index(a))
+ if order:
+ data = np.transpose(data, order)
+
+ # Try parsing units - this is modelled after the tif files you get from ImageJ
+ units = None
+ space_units = None
+ voxdim = np.array([1, 1, 1], dtype=np.float64)
+ if "spacing" in header:
+ voxdim[2] = header["spacing"]
+ if "xy_spacing" in header:
+ voxdim[:2] = header["xy_spacing"]
+ if "unit" in header:
+ space_units = header["unit"]
+ units = [f"{m} {space_units}" for m in voxdim]
+ else:
+ units = voxdim
+
+ return self.convert_image(data, attrs, header, voxdim, units, space_units)
+
+
+def read_tiff(
+ f: Union[str, Iterable],
+ output: Union[Literal["voxels"], Literal["dotprops"], Literal["raw"]] = "voxels",
+ channel: int = 0,
+ threshold: Optional[Union[int, float]] = None,
+ thin: bool = False,
+ include_subdirs: bool = False,
+ parallel: Union[bool, int] = "auto",
+ fmt: str = "{name}.tif",
+ limit: Optional[int] = None,
+ errors: str = "raise",
+ **dotprops_kwargs,
+) -> "core.NeuronObject":
"""Create Neuron/List from TIFF file.
Requires `tifffile` library which is not automatically installed!
@@ -50,13 +181,20 @@ def read_tiff(f: Union[str, Iterable],
f : str | iterable
Filename(s) or folder. If folder, will import all
`.tif` files.
+ output : "voxels" | "dotprops" | "raw"
+ Determines function's output. See Returns for details.
channel : int
Which channel to import. Ignored if file has only one
- channel. Can use e.g. -1 to get the last channel.
+ channel or when `output="raw". Can use e.g. -1 to
+ get the last channel.
threshold : int | float | None
For `output='dotprops'` only: a threshold to filter
low intensity voxels. If `None`, no threshold is
applied and all values > 0 are converted to points.
+ thin : bool
+ For `output='dotprops'` only: if True, will thin the
+ point cloud using `skimage.morphology.skeletonize`
+ after thresholding. Requires `scikit-image`.
include_subdirs : bool, optional
If True and `f` is a folder, will also search
subdirectories for `.tif` files.
@@ -67,13 +205,45 @@ def read_tiff(f: Union[str, Iterable],
considerably slower for imports of small numbers of
neurons. Integer will be interpreted as the number of
cores (otherwise defaults to `os.cpu_count() - 2`).
- output : "voxels" | "dotprops" | "raw"
- Determines function's output. See Returns for details.
+ fmt : str
+ Formatter to specify how filenames are parsed into neuron
+ attributes. Some illustrative examples:
+ - `{name}` (default) uses the filename
+ (minus the suffix) as the neuron's name property
+ - `{id}` (default) uses the filename as the neuron's ID
+ property
+ - `{name,id}` uses the filename as the neuron's
+ name and ID properties
+ - `{name}.{id}` splits the filename at a "."
+ and uses the first part as name and the second as ID
+ - `{name,id:int}` same as above but converts
+ into integer for the ID
+ - `{name}_{myproperty}` splits the filename at
+ "_" and uses the first part as name and as a
+ generic "myproperty" property
+ - `{name}_{}_{id}` splits the filename at
+ "_" and uses the first part as name and the last as
+ ID. The middle part is ignored.
+
+ Throws a ValueError if pattern can't be found in
+ filename.
+ limit : int | str | slice | list, optional
+ When reading from a folder or archive you can use this parameter to
+ restrict the which files read:
+ - if an integer, will read only the first `limit` NMX files
+ (useful to get a sample from a large library of skeletons)
+ - if a string, will interpret it as filename (regex) pattern
+ and only read files that match the pattern; e.g. `limit='.*_R.*'`
+ will only read files that contain `_R` in their filename
+ - if a slice (e.g. `slice(10, 20)`) will read only the files in
+ that range
+ - a list is expected to be a list of filenames to read from
+ the folder/archive
errors : "raise" | "log" | "ignore"
- If "log" or "ignore", errors will not be raised but
- instead empty neuron will be returned.
+ If "log" or "ignore", errors will not be raised and the
+ mesh will be skipped. Can result in empty output.
- **kwargs
+ **dotprops_kwargs
Keyword arguments passed to [`navis.make_dotprops`][]
if `output='dotprops'`. Use this to adjust e.g. the
number of nearest neighbors used for calculating the
@@ -98,155 +268,27 @@ def read_tiff(f: Union[str, Iterable],
"""
try:
import tifffile
- except ImportError:
- raise ImportError('`navis.read_tiff` requires the `tifffile` library:\n'
- ' pip3 install tifffile -U')
-
- utils.eval_param(output, name='output',
- allowed_values=('raw', 'dotprops', 'voxels'))
-
- # If is directory, compile list of filenames
- if isinstance(f, (str, Path)) and Path(f).expanduser().is_dir():
- f = Path(f).expanduser()
- if not include_subdirs:
- f = [os.path.join(f, x) for x in os.listdir(f) if
- os.path.isfile(os.path.join(f, x)) and x.endswith('.tif')]
- else:
- f = [y for x in os.walk(f) for y in glob(os.path.join(x[0], '*.tif'))]
-
- if utils.is_iterable(f):
- # Do not use if there is only a small batch to import
- if isinstance(parallel, str) and parallel.lower() == 'auto':
- if len(f) < 10:
- parallel = False
-
- if parallel:
- # Do not swap this as `isinstance(True, int)` returns `True`
- if isinstance(parallel, (bool, str)):
- n_cores = os.cpu_count() - 2
- else:
- n_cores = int(parallel)
-
- with mp.Pool(processes=n_cores) as pool:
- results = pool.imap(_worker_wrapper, [dict(f=x,
- channel=channel,
- threshold=threshold,
- output=output,
- errors=errors,
- include_subdirs=include_subdirs,
- parallel=False) for x in f],
- chunksize=1)
-
- res = list(config.tqdm(results,
- desc='Importing',
- total=len(f),
- disable=config.pbar_hide,
- leave=config.pbar_leave))
-
- else:
- # If not parallel just import the good 'ole way: sequentially
- res = [read_tiff(x,
- channel=channel,
- threshold=threshold,
- include_subdirs=include_subdirs,
- output=output,
- errors=errors,
- parallel=parallel,
- **kwargs)
- for x in config.tqdm(f, desc='Importing',
- disable=config.pbar_hide,
- leave=config.pbar_leave)]
-
- if output == 'raw':
- return [r[0] for r in res], [r[1] for r in res]
-
- return core.NeuronList([r for r in res if r])
-
- # Open the file
- f = str(Path(f).expanduser())
- fname = os.path.basename(f).split('.')[0]
-
- with tifffile.TiffFile(f) as tif:
- # The header contains some but not all the info
- if hasattr(tif, 'imagej_metadata'):
- header = tif.imagej_metadata
- else:
- header = {}
-
- # Read the x/y resolution from the first "page" (i.e. the first slice)
- res = tif.pages[0].resolution
- # Resolution to spacing
- header['xy_spacing'] = (1 / res[0], 1 / res[1])
-
- # Get the axes (this will be something like "ZCYX")
- axes = tif.series[0].axes
-
- # Generate volume
- data = tif.asarray()
-
- # Extract channel from volume - from what I've seen ImageJ always has the
- # "ZCYX" order
- data = data[:, channel, :, :]
-
- # And sort into x, y, z order
- data = np.transpose(data, axes=[2, 1, 0])
-
- if output == 'raw':
- return data, header
-
- # Try parsing units - this is modelled after the tif files you get from
- # ImageJ
- units = None
- su = None
- voxdim = np.array([1, 1, 1], dtype=np.float64)
- if 'spacing' in header:
- voxdim[2] = header['spacing']
- if 'xy_spacing' in header:
- voxdim[:2] = header['xy_spacing']
- if 'unit' in header:
- su = header['unit']
- units = [f'{m} {su}' for m in voxdim]
- else:
- units = voxdim
-
- try:
- if output == 'dotprops':
- # This really should be a 3D image but who knows
- if data.ndim == 3:
- if threshold:
- data = data >= threshold
-
- # Convert data to x/y/z coordinates
- # Note we need to multiply units before creating the Dotprops
- # - otherwise the KNN will be wrong
- x, y, z = np.where(data)
- points = np.vstack((x, y, z)).T
- points = points * voxdim
-
- x = core.make_dotprops(points, **kwargs)
- else:
- raise ValueError('Data must be 2- or 3-dimensional to extract '
- f'Dotprops, got {data.ndim}')
- if su:
- x.units = f'1 {su}'
- else:
- x = core.VoxelNeuron(data, units=units)
- except BaseException as e:
- msg = f'Error converting file {fname} to neuron.'
- if errors == 'raise':
- raise ImportError(msg) from e
- elif errors == 'log':
- logger.error(f'{msg}: {e}')
- return
-
- # Add some additional properties
- x.name = fname
- x.origin = f
- x.tiff_header = header
-
- return x
-
-
-def _worker_wrapper(kwargs):
- """Helper for importing TIFFs using multiple processes."""
- return read_tiff(**kwargs)
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ "`navis.read_tiff` requires the `tifffile` library:\n"
+ " pip3 install tifffile -U"
+ )
+
+ utils.eval_param(
+ output, name="output", allowed_values=("raw", "dotprops", "voxels")
+ )
+
+ if parallel == "auto":
+ # Set a lower threshold of 10 on parallel processing for TIFFs (default is 200)
+ parallel = ("auto", 10)
+
+ reader = TiffReader(
+ channel=channel,
+ output=output,
+ threshold=threshold,
+ thin=thin,
+ fmt=fmt,
+ dotprop_kwargs=dotprops_kwargs,
+ errors=errors,
+ )
+ return reader.read_any(f, include_subdirs, parallel, limit=limit)
diff --git a/navis/meshes/b3d.py b/navis/meshes/b3d.py
index 556205b4..79bc7879 100644
--- a/navis/meshes/b3d.py
+++ b/navis/meshes/b3d.py
@@ -39,7 +39,7 @@ def simplify_mesh_blender(x, F, inplace=False):
"""
if not tm.interfaces.blender.exists:
- raise ImportError('No Blender 3D unavailable (executable not found).')
+ raise ModuleNotFoundError('No Blender 3D unavailable (executable not found).')
_blender_executable = tm.interfaces.blender._blender_executable
if F > 1 or F < 0:
@@ -111,7 +111,7 @@ def smooth_mesh_blender(x, iterations=5, L=0.5, inplace=False):
"""
if not tm.interfaces.blender.exists:
- raise ImportError('No Blender 3D unavailable (executable not found).')
+ raise ModuleNotFoundError('No Blender 3D unavailable (executable not found).')
_blender_executable = tm.interfaces.blender._blender_executable
if L > 1 or L < 0:
diff --git a/navis/meshes/fqmr.py b/navis/meshes/fqmr.py
index c4e8329e..eefd54ba 100644
--- a/navis/meshes/fqmr.py
+++ b/navis/meshes/fqmr.py
@@ -44,10 +44,8 @@ def simplify_mesh_fqmr(x, F, inplace=False, **kwargs):
try:
import pyfqmr
- except ImportError:
- raise ImportError('Please install pyfqmr: pip3 install pyfqmr')
- except BaseException:
- raise
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError('Please install pyfqmr: pip3 install pyfqmr')
defaults = dict(aggressiveness=7, preserve_border=True, verbose=False)
defaults.update(kwargs)
diff --git a/navis/meshes/mesh_utils.py b/navis/meshes/mesh_utils.py
index 333c9062..9782c99d 100644
--- a/navis/meshes/mesh_utils.py
+++ b/navis/meshes/mesh_utils.py
@@ -26,7 +26,7 @@
try:
import skimage
from skimage import measure
-except ImportError:
+except ModuleNotFoundError:
skimage = None
from .. import core, config, intersection, graph, morpho
@@ -187,8 +187,10 @@ def points_to_mesh(points, res, threshold=None, denoise=True):
"""
if not skimage:
- raise ImportError('Meshing requires `skimage`:\n '
- 'pip3 install scikit-image')
+ raise ModuleNotFoundError(
+ 'Meshing requires `skimage`:\n'
+ ' pip3 install scikit-image'
+ )
points = np.asarray(points)
@@ -306,8 +308,10 @@ def pointlabels_to_meshes(points, labels, res, method='kde',
"""
if not skimage:
- raise ImportError('Meshing requires `skimage`:\n '
- 'pip3 install scikit-image')
+ raise ModuleNotFoundError(
+ 'Meshing requires `skimage`:\n'
+ ' pip3 install scikit-image'
+ )
if len(points) != len(labels):
raise ValueError(f'Number of labels ({len(labels)}) must match number '
diff --git a/navis/meshes/o3d.py b/navis/meshes/o3d.py
index 625cb432..0f304d7a 100644
--- a/navis/meshes/o3d.py
+++ b/navis/meshes/o3d.py
@@ -113,8 +113,8 @@ def make_o3d_mesh(x):
"""Turn mesh-like object into an open3d mesh."""
try:
import open3d
- except ImportError:
- raise ImportError('Please install open3d: pip3 install open3d')
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError('Please install open3d: pip3 install open3d')
except BaseException:
raise
diff --git a/navis/meshes/operations.py b/navis/meshes/operations.py
index cb3267d0..688f9772 100644
--- a/navis/meshes/operations.py
+++ b/navis/meshes/operations.py
@@ -18,7 +18,7 @@
try:
from pykdtree.kdtree import KDTree
-except ImportError:
+except ModuleNotFoundError:
from scipy.spatial import cKDTree as KDTree
from .. import core, config, utils
@@ -37,7 +37,7 @@ def available_backends(only_first=False):
try:
import pyfqmr
backends.append('pyfqmr')
- except ImportError:
+ except ModuleNotFoundError:
pass
except BaseException:
raise
@@ -48,7 +48,7 @@ def available_backends(only_first=False):
try:
import open3d
backends.append('open3d')
- except ImportError:
+ except ModuleNotFoundError:
pass
except BaseException:
raise
@@ -61,7 +61,7 @@ def available_backends(only_first=False):
warnings.simplefilter("ignore")
import pymeshlab
backends.append('pymeshlab')
- except ImportError:
+ except ModuleNotFoundError:
pass
except BaseException:
raise
diff --git a/navis/meshes/pyml.py b/navis/meshes/pyml.py
index 780f08fd..1e6f73cd 100644
--- a/navis/meshes/pyml.py
+++ b/navis/meshes/pyml.py
@@ -57,8 +57,8 @@ def simplify_mesh_pyml(x, F, method='quadric', inplace=False, **kwargs):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
import pymeshlab
- except ImportError:
- raise ImportError('Please install pymeshlab: pip3 install pymeshlab')
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError('Please install pymeshlab: pip3 install pymeshlab')
except BaseException:
raise
diff --git a/navis/morpho/__init__.py b/navis/morpho/__init__.py
index f7d4f919..7317a574 100644
--- a/navis/morpho/__init__.py
+++ b/navis/morpho/__init__.py
@@ -20,12 +20,14 @@
despike_skeleton, guess_radius, smooth_skeleton,
heal_skeleton, break_fragments,
prune_twigs, prune_at_depth, cell_body_fiber,
- drop_fluff, smooth_voxels, combine_neurons)
+ drop_fluff, combine_neurons)
from .analyze import find_soma
from .subset import subset_neuron
from .persistence import (persistence_points, persistence_vectors,
persistence_distances)
from .fq import form_factor
+from .ivscc import ivscc_features
+from .images import smooth_voxels, thin_voxels
__all__ = ['strahler_index', 'bending_flow', 'flow_centrality', 'synapse_flow_centrality',
@@ -37,4 +39,4 @@
'subset_neuron', 'smooth_voxels', 'sholl_analysis',
'persistence_points', 'betweeness_centrality',
'persistence_vectors', 'persistence_distances', 'combine_neurons',
- 'segment_analysis', 'form_factor']
+ 'segment_analysis', 'form_factor', 'ivscc_features', "thin_voxels"]
diff --git a/navis/morpho/images.py b/navis/morpho/images.py
new file mode 100644
index 00000000..38dd212b
--- /dev/null
+++ b/navis/morpho/images.py
@@ -0,0 +1,123 @@
+# This script is part of navis (http://www.github.com/navis-org/navis).
+# Copyright (C) 2018 Philipp Schlegel
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+import numpy as np
+
+from scipy.ndimage import gaussian_filter
+from typing import Union
+
+from .. import core, utils
+
+NeuronObject = Union["core.NeuronList", "core.TreeNeuron"]
+
+__all__ = ["smooth_voxels", "thin_voxels"]
+
+
+@utils.map_neuronlist(desc="Smoothing", allow_parallel=True)
+def smooth_voxels(
+ x: NeuronObject, sigma: int = 1, inplace: bool = False
+) -> NeuronObject:
+ """Smooth voxel(s) using a Gaussian filter.
+
+ Parameters
+ ----------
+ x : TreeNeuron | NeuronList
+ Neuron(s) to be processed.
+ sigma : int | (3, ) ints, optional
+ Standard deviation for Gaussian kernel. The standard
+ deviations of the Gaussian filter are given for each axis
+ as a sequence, or as a single number, in which case it is
+ equal for all axes.
+ inplace : bool, optional
+ If False, will use and return copy of original neuron(s).
+
+ Returns
+ -------
+ VoxelNeuron/List
+ Smoothed neuron(s).
+
+ Examples
+ --------
+ >>> import navis
+ >>> n = navis.example_neurons(1, kind='mesh')
+ >>> vx = navis.voxelize(n, pitch='1 micron')
+ >>> smoothed = navis.smooth_voxels(vx, sigma=2)
+
+ See Also
+ --------
+ [`navis.smooth_mesh`][]
+ For smoothing MeshNeurons and other mesh-likes.
+ [`navis.smooth_skeleton`][]
+ For smoothing TreeNeurons.
+
+ """
+ # The decorator makes sure that at this point we have single neurons
+ if not isinstance(x, core.VoxelNeuron):
+ raise TypeError(f"Can only process VoxelNeurons, not {type(x)}")
+
+ if not inplace:
+ x = x.copy()
+
+ # Apply gaussian
+ x._data = gaussian_filter(x.grid.astype(np.float32), sigma=sigma)
+ x._clear_temp_attr()
+
+ return x
+
+
+@utils.map_neuronlist(desc="Thinning", allow_parallel=True)
+def thin_voxels(x, inplace=False):
+ """Skeletonize image data to single voxel width.
+
+ This is a simple thin wrapper around scikit-learn's `skeletonize`.
+
+ Parameters
+ ----------
+ x : VoxelNeuron | numpy array
+ The image to thin.
+ inplace : bool
+ For VoxelNeurons only: Whether to manipulate the neuron
+ in place.
+
+ Returns
+ -------
+ thin
+ Thinned VoxelNeuron or numpy array.
+
+ Examples
+ --------
+ >>> import navis
+ >>> n = navis.example_neurons(1, kind='mesh')
+ >>> vx = navis.voxelize(n, pitch='1 micron')
+ >>> thinned = navis.thin_voxels(vx)
+
+ """
+ try:
+ from skimage.morphology import skeletonize
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ "`thin_image` requires the scikit-image packge:\n"
+ " pip install scikit-image"
+ )
+
+ if isinstance(x, core.VoxelNeuron):
+ if not inplace:
+ x = x.copy()
+
+ x.grid = skeletonize(x.grid)
+ elif isinstance(x, np.ndarray):
+ x = skeletonize(x)
+ else:
+ raise TypeError(f"Unable to thin data of type {type(x)}")
+
+ return x
diff --git a/navis/morpho/ivscc.py b/navis/morpho/ivscc.py
new file mode 100644
index 00000000..605da567
--- /dev/null
+++ b/navis/morpho/ivscc.py
@@ -0,0 +1,415 @@
+# This script is part of navis (http://www.github.com/navis-org/navis).
+# Copyright (C) 2018 Philipp Schlegel
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+import pandas as pd
+import numpy as np
+
+
+from abc import ABC, abstractmethod
+from scipy.stats import wasserstein_distance
+from typing import Union, Sequence
+
+from .. import config, graph, core
+from . import subset_neuron, tortuosity
+
+# Set up logging
+logger = config.get_logger(__name__)
+
+__all__ = sorted(
+ [
+ "ivscc",
+ ]
+)
+
+# A mapping of label IDs to compartment names
+# Note: anything above 5 is considered "undefined" or "custom"
+label_to_comp = {
+ -1: "root",
+ 0: "undefined",
+ 1: "soma",
+ 2: "axon",
+ 3: "basal_dendrite",
+ 4: "apical_dendrite",
+}
+comp_to_label = {v: k for k, v in label_to_comp.items()}
+
+
+class CompartmentNotFoundError(Exception):
+ """An exception raised when a compartment is not found."""
+
+ pass
+
+
+class Features(ABC):
+ def __init__(self, neuron: "core.TreeNeuron", label=None, verbose=False):
+ self.neuron = neuron
+ self.verbose = verbose
+
+ if label is None:
+ self.label = ""
+ elif not label.endswith("_"):
+ self.label = f"{label}_"
+ else:
+ self.label = label
+
+ # Make sure the neuron is rooted to the soma (if present)
+ self.soma = self.neuron.soma
+ if self.soma is not None:
+ self.soma_pos = self.neuron.soma_pos[0]
+ self.soma_radius = self.neuron.nodes.set_index("node_id").loc[
+ self.soma, "radius"
+ ]
+
+ if self.neuron.soma not in self.neuron.root:
+ self.neuron = self.neuron.reroot(self.neuron.soma)
+
+ # Calculate geodesic distances from leafs to all other nodes (directed)
+ self.leaf_dists = graph.geodesic_matrix(
+ self.neuron, self.neuron.leafs.node_id.values, directed=True
+ )
+ # Replace infinities with -1
+ self.leaf_dists[self.leaf_dists == float("inf")] = -1
+
+ self.features = {}
+
+ def record_feature(self, name, value):
+ """Record a feature."""
+ self.features[f"{self.label}{name}"] = value
+
+ @abstractmethod
+ def extract_features(self):
+ """Extract features."""
+ pass
+
+
+class BasicFeatures(Features):
+ """Base class for features."""
+
+ def extract_features(self):
+ """Extract basic features."""
+ self.record_feature(
+ "extent_y", self.neuron.nodes.y.max() - self.neuron.nodes.y.min()
+ )
+ self.record_feature(
+ "extent_x", self.neuron.nodes.x.max() - self.neuron.nodes.x.min()
+ )
+ self.record_feature(
+ "max_branch_order", (self.neuron.nodes.type == "branch").sum() + 1
+ )
+ self.record_feature("num_nodes", len(self.neuron.nodes))
+ self.record_feature("total_length", self.neuron.cable_length)
+
+ if self.soma is None:
+ if self.verbose:
+ logger.warning(
+ f"{self.neuron.id} has no `.soma` attribute, skipping soma-related features."
+ )
+ return
+
+ # x/y bias from soma
+ # Note: this is absolute for x and relative for y
+ self.record_feature(
+ "bias_x",
+ abs(
+ (self.neuron.nodes.x.max() - self.soma_pos[0])
+ - (self.soma_pos[0] - self.neuron.nodes.x.min())
+ ),
+ )
+ self.record_feature(
+ "bias_y",
+ (self.neuron.nodes.y.max() - self.soma_pos[1])
+ - (self.soma_pos[1] - self.neuron.nodes.y.min()),
+ )
+
+ # Distances from soma
+ self.record_feature(
+ "max_euclidean_distance",
+ (
+ (self.neuron.nodes[["x", "y", "z"]] - self.soma_pos)
+ .pow(2)
+ .sum(axis=1)
+ .pow(0.5)
+ .sum()
+ .max()
+ ),
+ )
+ self.record_feature(
+ "max_path_length",
+ self.leaf_dists.loc[
+ self.leaf_dists.index.isin(self.neuron.nodes.node_id)
+ ].values.max(),
+ )
+
+ # Tortuosity
+ self.record_feature("mean_contraction", tortuosity(self.neuron))
+
+ # Branching (number of linear segments between branch)
+ self.record_feature("num_branches", len(self.neuron.small_segments))
+
+ return self.features
+
+
+class CompartmentFeatures(BasicFeatures):
+ """Base class for compartment-specific features."""
+
+ def __init__(self, neuron: "core.TreeNeuron", compartment, verbose=False):
+ if "label" not in neuron.nodes.columns:
+ raise ValueError(
+ f"No 'label' column found in node table for neuron {neuron.id}"
+ )
+
+ if (
+ compartment not in neuron.nodes.label.values
+ and comp_to_label.get(compartment, compartment)
+ not in neuron.nodes.label.values
+ ):
+ raise CompartmentNotFoundError(
+ f"No {compartment} ({comp_to_label.get(compartment, compartment)}) compartments found in neuron {neuron.id}"
+ )
+
+ # Initialize the parent class
+ super().__init__(neuron, label=compartment, verbose=verbose)
+
+ # Now subset the neuron to this compartment
+ self.neuron = subset_neuron(
+ self.neuron,
+ (
+ self.neuron.nodes.label.isin(
+ (compartment, comp_to_label[compartment])
+ ).values
+ ),
+ )
+
+
+class AxonFeatures(CompartmentFeatures):
+ """Extract features from an axon."""
+
+ def __init__(self, neuron: "core.TreeNeuron", verbose=False):
+ super().__init__(neuron, "axon", verbose=verbose)
+
+ def extract_features(self):
+ # Extract basic features via the parent class
+ super().extract_features()
+
+ # Now deal witha axon-specific features:
+
+ if self.soma is not None:
+ # Distance between axon root and soma surface
+ # Note: we're catering for potentially multiple roots here
+ axon_root_pos = self.neuron.nodes.loc[
+ self.neuron.nodes.type == "root", ["x", "y", "z"]
+ ].values
+
+ # Closest dist between an axon root and the soma
+ dist = np.linalg.norm(axon_root_pos - self.soma_pos, axis=1).min()
+
+ # Subtract soma radius from the distance
+ dist -= self.soma_radius
+
+ self.record_feature("exit_distance", dist)
+
+ # Axon theta: The relative radial position of the point where the neurite from which
+ # the axon derives exits the soma.
+
+ # Get the node where the axon exits the soma
+ exit_node = self.neuron.nodes.loc[self.neuron.nodes.type == "root"]
+
+ # Get theta
+ theta = np.arctan2(
+ exit_node.y.values - self.soma_pos[1],
+ exit_node.x.values - self.soma_pos[0],
+ )[0]
+ self.record_feature("exit_theta", theta)
+
+ return self.features
+
+
+class BasalDendriteFeatures(CompartmentFeatures):
+ """Extract features from a basal dendrite."""
+
+ def __init__(self, neuron: "core.TreeNeuron", verbose=False):
+ super().__init__(neuron, "basal_dendrite", verbose=verbose)
+
+ def extract_features(self):
+ # Extract basic features via the parent class
+ super().extract_features()
+
+ # Now deal with basal dendrite-specific features
+ if self.soma is not None:
+ # Number of stems sprouting from the soma
+ # (i.e. number of nodes with a parent that is the soma)
+ self.record_feature(
+ "calculate_number_of_stems",
+ (self.neuron.nodes.parent_id == self.soma).sum(),
+ )
+
+ return self.features
+
+
+class ApicalDendriteFeatures(CompartmentFeatures):
+ """Extract features from a apical dendrite."""
+
+ def __init__(self, neuron: "core.TreeNeuron", verbose=False):
+ super().__init__(neuron, "apical_dendrite", verbose=verbose)
+
+ def extract_features(self):
+ # Extract basic features via the parent class
+ super().extract_features()
+
+ return self.features
+
+
+class OverlapFeatures(Features):
+ """Features that compare two compartments (e.g. overlap)."""
+
+ # Compartments to compare
+ compartments = ("axon", "basal_dendrite", "apical_dendrite")
+
+ def extract_features(self):
+ # Iterate over compartments
+ for c1 in self.compartments:
+ if c1 in self.neuron.nodes.label.values:
+ c1_nodes = self.neuron.nodes[self.neuron.nodes.label == c1]
+ elif comp_to_label.get(c1, c1) in self.neuron.nodes.label.values:
+ c1_nodes = self.neuron.nodes[
+ self.neuron.nodes.label == comp_to_label[c1]
+ ]
+ else:
+ continue
+ for c2 in self.compartments:
+ if c1 == c2:
+ continue
+ if c2 in self.neuron.nodes.label.values:
+ c2_nodes = self.neuron.nodes[self.neuron.nodes.label == c2]
+ elif comp_to_label.get(c2, c2) in self.neuron.nodes.label.values:
+ c2_nodes = self.neuron.nodes[
+ self.neuron.nodes.label == comp_to_label[c2]
+ ]
+ else:
+ continue
+
+ # Calculate % of nodes of a given compartment type above/overlapping/below the
+ # full y-extent of another compartment type
+ self.features[f"{c1}_frac_above_{c2}"] = (
+ c1_nodes.y > c2_nodes.y.max()
+ ).sum() / len(c1_nodes)
+ self.features[f"{c1}_frac_intersect_{c2}"] = (
+ (c1_nodes.y >= c2_nodes.y.min()) & (c1_nodes.y <= c2_nodes.y.max())
+ ).sum() / len(c1_nodes)
+ self.features[f"{c1}_frac_below_{c2}"] = (
+ c1_nodes.y < c2_nodes.y.min()
+ ).sum() / len(c1_nodes)
+
+ # Calculate earth mover's distance (EMD) between the two compartments
+ if f"{c2}_emd_with_{c1}" not in self.features:
+ self.features[f"{c1}_emd_with_{c2}"] = wasserstein_distance(
+ c1_nodes.y, c2_nodes.y
+ )
+
+ return self.features
+
+
+def ivscc_features(
+ x: "core.TreeNeuron",
+ features=None,
+ missing_compartments="ignore",
+ verbose=False,
+ progress=True,
+) -> Union[float, pd.DataFrame]:
+ """Calculate IVSCC features for neuron(s).
+
+ Please see the `IVSCC` tutorial for details.
+
+ Parameters
+ ----------
+ x : TreeNeuron | NeuronList
+ Neuron(s) to calculate IVSCC for.
+ features : Sequence[Features], optional
+ Provide specific features to calculate.
+ Must be subclasses of `BasicFeatures`.
+ If `None`, will use default features.
+ missing_compartments : "ignore" | "skip" | "raise"
+ What to do if a neuron is missing a compartment
+ (e.g. no axon or basal dendrite):
+ - "ignore" (default): ignore that compartment
+ - "skip": skip the entire neuron
+ - "raise": raise an exception
+
+ Returns
+ -------
+ ivscc : pd.DataFrame
+ IVSCC features for the neuron(s).
+
+ """
+
+ if isinstance(x, core.TreeNeuron):
+ x = core.NeuronList([x])
+
+ if features is None:
+ features = DEFAULT_FEATURES
+
+ data = {}
+ for n in config.tqdm(
+ x, desc="Calculating IVSCC features", disable=not progress or config.pbar_hide
+ ):
+ data[n.id] = {}
+ for feat in features:
+ try:
+ f = feat(n, verbose=verbose)
+ except CompartmentNotFoundError as e:
+ if missing_compartments == "ignore":
+ continue
+ elif missing_compartments == "skip":
+ if verbose:
+ print(f"Skipping neuron {n.id}: {e}")
+ data.pop(n.id)
+ break
+ else:
+ raise e
+
+ data[n.id].update(f.extract_features())
+
+ return pd.DataFrame(data)
+
+
+def _check_compartments(n, compartments):
+ """Check if `compartments` are valid."""
+ if compartments == "auto":
+ if "label" not in n.nodes.columns:
+ return None
+ return n.nodes.label.unique()
+ elif compartments is True:
+ return n.nodes.label.unique()
+ elif isinstance(compartments, str):
+ if "label" not in n.nodes.columns or compartments not in n.nodes.label.unique():
+ raise ValueError(f"Compartment not present: {compartments}")
+ return [compartments]
+ elif isinstance(compartments, Sequence):
+ if "label" not in n.nodes.columns:
+ raise ValueError("No 'label' column found in node table.")
+ for c in compartments:
+ if c not in n.nodes.label.unique():
+ raise ValueError(f"Compartment not present: {c}")
+ return compartments
+ elif compartments in (None, False):
+ return None
+
+ raise ValueError(f"Invalid `compartments`: {compartments}")
+
+
+DEFAULT_FEATURES = [
+ AxonFeatures,
+ BasalDendriteFeatures,
+ ApicalDendriteFeatures,
+ OverlapFeatures,
+]
diff --git a/navis/morpho/manipulation.py b/navis/morpho/manipulation.py
index 02586b66..b90f70f6 100644
--- a/navis/morpho/manipulation.py
+++ b/navis/morpho/manipulation.py
@@ -12,8 +12,8 @@
# GNU General Public License for more details.
-""" This module contains functions to analyse and manipulate neuron morphology.
-"""
+"""This module contains functions to analyse and manipulate neuron morphology."""
+
import warnings
import pandas as pd
@@ -23,13 +23,12 @@
from collections import namedtuple
from itertools import combinations
-from scipy.ndimage import gaussian_filter
-from typing import Union, Optional, Sequence, List, Set
+from typing import Union, Optional, Sequence, List, Set, Callable
from typing_extensions import Literal
try:
from pykdtree.kdtree import KDTree
-except ImportError:
+except ModuleNotFoundError:
from scipy.spatial import cKDTree as KDTree
from .. import graph, utils, config, core
@@ -38,26 +37,39 @@
# Set up logging
logger = config.get_logger(__name__)
-__all__ = sorted(['prune_by_strahler', 'stitch_skeletons', 'split_axon_dendrite',
- 'average_skeletons', 'despike_skeleton', 'guess_radius',
- 'smooth_skeleton', 'smooth_voxels',
- 'heal_skeleton', 'cell_body_fiber',
- 'break_fragments', 'prune_twigs', 'prune_at_depth',
- 'drop_fluff', 'combine_neurons'])
-
-NeuronObject = Union['core.NeuronList', 'core.TreeNeuron']
-
-
-@utils.map_neuronlist(desc='Pruning', allow_parallel=True)
-@utils.meshneuron_skeleton(method='subset')
-def cell_body_fiber(x: NeuronObject,
- method: Union[Literal['longest_neurite'],
- Literal['betweenness']] = 'betweenness',
- reroot_soma: bool = True,
- heal: bool = True,
- threshold: float = 0.95,
- inverse: bool = False,
- inplace: bool = False):
+__all__ = sorted(
+ [
+ "prune_by_strahler",
+ "stitch_skeletons",
+ "split_axon_dendrite",
+ "average_skeletons",
+ "despike_skeleton",
+ "guess_radius",
+ "smooth_skeleton",
+ "heal_skeleton",
+ "cell_body_fiber",
+ "break_fragments",
+ "prune_twigs",
+ "prune_at_depth",
+ "drop_fluff",
+ "combine_neurons",
+ ]
+)
+
+NeuronObject = Union["core.NeuronList", "core.TreeNeuron"]
+
+
+@utils.map_neuronlist(desc="Pruning", allow_parallel=True)
+@utils.meshneuron_skeleton(method="subset")
+def cell_body_fiber(
+ x: NeuronObject,
+ method: Union[Literal["longest_neurite"], Literal["betweenness"]] = "betweenness",
+ reroot_soma: bool = True,
+ heal: bool = True,
+ threshold: float = 0.95,
+ inverse: bool = False,
+ inplace: bool = False,
+):
"""Prune neuron to its cell body fiber.
Here, "cell body fiber" (CBF) refers to the tract connecting the soma to the
@@ -113,29 +125,31 @@ def cell_body_fiber(x: NeuronObject,
under the hood for `method='betweeness'`.
"""
- utils.eval_param(method, 'method',
- allowed_values=('longest_neurite', 'betweenness'))
+ utils.eval_param(
+ method, "method", allowed_values=("longest_neurite", "betweenness")
+ )
# The decorator makes sure that at this point we have single neurons
if not isinstance(x, core.TreeNeuron):
- raise TypeError(f'Expected TreeNeuron(s), got {type(x)}')
+ raise TypeError(f"Expected TreeNeuron(s), got {type(x)}")
if not inplace:
x = x.copy()
if x.n_trees > 1 and heal:
- _ = heal_skeleton(x, method='LEAFS', inplace=True)
+ _ = heal_skeleton(x, method="LEAFS", inplace=True)
# If no branches, just return the neuron
- if 'branch' not in x.nodes.type.values:
+ if "branch" not in x.nodes.type.values:
return x
if reroot_soma and not isinstance(x.soma, type(None)):
x.reroot(x.soma, inplace=True)
# Find main branch point
- cut = graph.find_main_branchpoint(x, method=method, threshold=threshold,
- reroot_soma=False)
+ cut = graph.find_main_branchpoint(
+ x, method=method, threshold=threshold, reroot_soma=False
+ )
# Find the path to root (and account for multiple roots)
for r in x.root:
@@ -157,14 +171,16 @@ def cell_body_fiber(x: NeuronObject,
return x
-@utils.map_neuronlist(desc='Pruning', allow_parallel=True)
-@utils.meshneuron_skeleton(method='subset')
-def prune_by_strahler(x: NeuronObject,
- to_prune: Union[int, List[int], range, slice],
- inplace: bool = False,
- reroot_soma: bool = True,
- force_strahler_update: bool = False,
- relocate_connectors: bool = False) -> NeuronObject:
+@utils.map_neuronlist(desc="Pruning", allow_parallel=True)
+@utils.meshneuron_skeleton(method="subset")
+def prune_by_strahler(
+ x: NeuronObject,
+ to_prune: Union[int, List[int], range, slice],
+ inplace: bool = False,
+ reroot_soma: bool = True,
+ force_strahler_update: bool = False,
+ relocate_connectors: bool = False,
+) -> NeuronObject:
"""Prune neuron based on [Strahler order](https://en.wikipedia.org/wiki/Strahler_number).
Parameters
@@ -209,7 +225,7 @@ def prune_by_strahler(x: NeuronObject,
"""
# The decorator makes sure that at this point we have single neurons
if not isinstance(x, core.TreeNeuron):
- raise TypeError(f'Expected TreeNeuron(s), got {type(x)}')
+ raise TypeError(f"Expected TreeNeuron(s), got {type(x)}")
# Make a copy if necessary before making any changes
neuron = x
@@ -219,7 +235,7 @@ def prune_by_strahler(x: NeuronObject,
if reroot_soma and not isinstance(neuron.soma, type(None)):
neuron.reroot(neuron.soma, inplace=True)
- if 'strahler_index' not in neuron.nodes or force_strahler_update:
+ if "strahler_index" not in neuron.nodes or force_strahler_update:
mmetrics.strahler_index(neuron)
# Prepare indices
@@ -228,8 +244,10 @@ def prune_by_strahler(x: NeuronObject,
if isinstance(to_prune, int):
if to_prune < 1:
- raise ValueError('SI to prune must be positive. Please see docs'
- 'for additional options.')
+ raise ValueError(
+ "SI to prune must be positive. Please see docs"
+ "for additional options."
+ )
to_prune = [to_prune]
elif isinstance(to_prune, range):
to_prune = list(to_prune)
@@ -239,26 +257,31 @@ def prune_by_strahler(x: NeuronObject,
# Prepare parent dict if needed later
if relocate_connectors:
- parent_dict = {
- tn.node_id: tn.parent_id for tn in neuron.nodes.itertuples()}
+ parent_dict = {tn.node_id: tn.parent_id for tn in neuron.nodes.itertuples()}
# Avoid setting the nodes as this potentiall triggers a regeneration
# of the graph which in turn will raise an error because some nodes might
# still have parents that don't exist anymore
- neuron._nodes = neuron._nodes[~neuron._nodes.strahler_index.isin(to_prune)].reset_index(drop=True, inplace=False)
+ neuron._nodes = neuron._nodes[
+ ~neuron._nodes.strahler_index.isin(to_prune)
+ ].reset_index(drop=True, inplace=False)
if neuron.has_connectors:
if not relocate_connectors:
- neuron._connectors = neuron._connectors[neuron._connectors.node_id.isin(neuron._nodes.node_id.values)].reset_index(drop=True, inplace=False)
+ neuron._connectors = neuron._connectors[
+ neuron._connectors.node_id.isin(neuron._nodes.node_id.values)
+ ].reset_index(drop=True, inplace=False)
else:
remaining_tns = set(neuron._nodes.node_id.values)
- for cn in neuron._connectors[~neuron.connectors.node_id.isin(neuron._nodes.node_id.values)].itertuples():
+ for cn in neuron._connectors[
+ ~neuron.connectors.node_id.isin(neuron._nodes.node_id.values)
+ ].itertuples():
this_tn = parent_dict[cn.node_id]
while True:
if this_tn in remaining_tns:
break
this_tn = parent_dict[this_tn]
- neuron._connectors.loc[cn.Index, 'node_id'] = this_tn
+ neuron._connectors.loc[cn.Index, "node_id"] = this_tn
# Reset indices of node and connector tables (important for igraph!)
neuron._nodes.reset_index(inplace=True, drop=True)
@@ -268,7 +291,9 @@ def prune_by_strahler(x: NeuronObject,
# Theoretically we can end up with disconnected pieces, i.e. with more
# than 1 root node -> we have to fix the nodes that lost their parents
- neuron._nodes.loc[~neuron._nodes.parent_id.isin(neuron._nodes.node_id.values), 'parent_id'] = -1
+ neuron._nodes.loc[
+ ~neuron._nodes.parent_id.isin(neuron._nodes.node_id.values), "parent_id"
+ ] = -1
# Remove temporary attributes
neuron._clear_temp_attr()
@@ -276,14 +301,16 @@ def prune_by_strahler(x: NeuronObject,
return neuron
-@utils.map_neuronlist(desc='Pruning', allow_parallel=True)
-@utils.meshneuron_skeleton(method='subset')
-def prune_twigs(x: NeuronObject,
- size: Union[float, str],
- exact: bool = False,
- inplace: bool = False,
- recursive: Union[int, bool, float] = False
- ) -> NeuronObject:
+@utils.map_neuronlist(desc="Pruning", allow_parallel=True)
+@utils.meshneuron_skeleton(method="subset")
+def prune_twigs(
+ x: NeuronObject,
+ size: Union[float, str],
+ exact: bool = False,
+ mask: Optional[Union[Sequence[int], Callable]] = None,
+ inplace: bool = False,
+ recursive: Union[int, bool, float] = False,
+) -> NeuronObject:
"""Prune terminal twigs under a given size.
By default this function will simply drop all terminal twigs shorter than
@@ -301,6 +328,10 @@ def prune_twigs(x: NeuronObject,
units, e.g. '5 microns'.
exact: bool
See notes above.
+ mask : iterable | callable, optional
+ Either a boolean mask, a list of node IDs or a callable taking
+ a neuron as input and returning one of the former. If provided,
+ only nodes that are in the mask will be considered for pruning.
inplace : bool, optional
If False, pruning is performed on copy of original neuron
which is then returned.
@@ -359,39 +390,57 @@ def prune_twigs(x: NeuronObject,
"""
# The decorator makes sure that at this point we have single neurons
if not isinstance(x, core.TreeNeuron):
- raise TypeError(f'Expected TreeNeuron(s), got {type(x)}')
+ raise TypeError(f"Expected TreeNeuron(s), got {type(x)}")
# Convert to neuron units - numbers will be passed through
- size = x.map_units(size, on_error='raise')
+ size = x.map_units(size, on_error="raise")
if not exact:
- return _prune_twigs_simple(x,
- size=size,
- inplace=inplace,
- recursive=recursive)
+ return _prune_twigs_simple(
+ x, size=size, inplace=inplace, recursive=recursive, mask=mask
+ )
else:
- return _prune_twigs_precise(x,
- size=size,
- inplace=inplace)
+ return _prune_twigs_precise(x, size=size, mask=mask, inplace=inplace)
-def _prune_twigs_simple(neuron: 'core.TreeNeuron',
- size: float,
- inplace: bool = False,
- recursive: Union[int, bool, float] = False
- ) -> Optional[NeuronObject]:
+def _prune_twigs_simple(
+ neuron: "core.TreeNeuron",
+ size: float,
+ inplace: bool = False,
+ mask: Optional[Union[Sequence[int], Callable]] = None,
+ recursive: Union[int, bool, float] = False,
+) -> Optional[NeuronObject]:
"""Prune twigs using simple method."""
if not isinstance(neuron, core.TreeNeuron):
- raise TypeError(f'Expected Neuron/List, got {type(neuron)}')
+ raise TypeError(f"Expected Neuron/List, got {type(neuron)}")
# If people set recursive=True, assume that they mean float("inf")
if isinstance(recursive, bool) and recursive:
- recursive = float('inf')
+ recursive = float("inf")
# Make a copy if necessary before making any changes
if not inplace:
neuron = neuron.copy()
+ if callable(mask):
+ mask = mask(neuron)
+
+ if mask is not None:
+ mask = np.asarray(mask)
+
+ if mask.dtype == bool:
+ if len(mask) != neuron.n_nodes:
+ raise ValueError("Mask length must match number of nodes")
+ mask_nodes = neuron.nodes.node_id.values[mask]
+ elif mask.dtype in (int, np.int32, np.int64):
+ mask_nodes = mask
+ else:
+ raise TypeError(
+ f"Mask must be boolean or list of node IDs, got {mask.dtype}"
+ )
+ else:
+ mask_nodes = None
+
if utils.fastcore:
nodes_to_keep = utils.fastcore.prune_twigs(
neuron.nodes.node_id.values,
@@ -400,21 +449,30 @@ def _prune_twigs_simple(neuron: 'core.TreeNeuron',
weights=utils.fastcore.dag.parent_dist(
neuron.nodes.node_id.values,
neuron.nodes.parent_id.values,
- neuron.nodes[['x', 'y', 'z']].values,
- )
+ neuron.nodes[["x", "y", "z"]].values,
+ ),
)
+ # If mask is given, check if we have to re-add any nodes
+ # This is a bit cumbersome at the moment - we should add a
+ # mask feature to the fastcore function
+ if mask_nodes is not None:
+ for seg in graph._break_segments(neuron):
+ # If this segment would be dropped and the first node is not in the mask
+ # we have to keep the whole segment
+ if seg[0] not in nodes_to_keep and seg[0] not in mask_nodes:
+ nodes_to_keep = np.append(nodes_to_keep, seg[1:])
+
if len(nodes_to_keep) < neuron.n_nodes:
- subset.subset_neuron(neuron,
- nodes_to_keep,
- inplace=True)
+ subset.subset_neuron(neuron, nodes_to_keep, inplace=True)
if recursive:
- recursive -= 1
- prune_twigs(neuron, size=size, inplace=True, recursive=recursive)
+ prune_twigs(
+ neuron, size=size, inplace=True, recursive=recursive - 1, mask=mask_nodes
+ )
else:
# Find terminal nodes
- leafs = neuron.nodes[neuron.nodes.type == 'end'].node_id.values
+ leafs = neuron.nodes[neuron.nodes.type == "end"].node_id.values
# Find terminal segments
segs = graph._break_segments(neuron)
@@ -425,35 +483,43 @@ def _prune_twigs_simple(neuron: 'core.TreeNeuron',
# Find out which to delete
segs_to_delete = segs[seg_lengths <= size]
+
+ # If mask is given, only consider nodes in mask
+ if mask_nodes is not None:
+ segs_to_delete = [s for s in segs_to_delete if s[0] in mask_nodes]
+
if len(segs_to_delete):
# Unravel the into list of node IDs -> skip the last parent
nodes_to_delete = [n for s in segs_to_delete for n in s[:-1]]
# Subset neuron
- nodes_to_keep = neuron.nodes[~neuron.nodes.node_id.isin(nodes_to_delete)].node_id.values
- subset.subset_neuron(neuron,
- nodes_to_keep,
- inplace=True)
+ nodes_to_keep = neuron.nodes[
+ ~neuron.nodes.node_id.isin(nodes_to_delete)
+ ].node_id.values
+ subset.subset_neuron(neuron, nodes_to_keep, inplace=True)
# Go recursive
if recursive:
- recursive -= 1
- prune_twigs(neuron, size=size, inplace=True, recursive=recursive)
+ prune_twigs(
+ neuron, size=size, inplace=True, recursive=recursive - 1, mask=mask_nodes
+ )
return neuron
-def _prune_twigs_precise(neuron: 'core.TreeNeuron',
- size: float,
- inplace: bool = False,
- recursive: Union[int, bool, float] = False
- ) -> Optional[NeuronObject]:
+def _prune_twigs_precise(
+ neuron: "core.TreeNeuron",
+ size: float,
+ inplace: bool = False,
+ mask: Optional[Union[Sequence[int], Callable]] = None,
+ recursive: Union[int, bool, float] = False,
+) -> Optional[NeuronObject]:
"""Prune twigs using precise method."""
if not isinstance(neuron, core.TreeNeuron):
- raise TypeError(f'Expected Neuron/List, got {type(neuron)}')
+ raise TypeError(f"Expected Neuron/List, got {type(neuron)}")
if size <= 0:
- raise ValueError('`length` must be > 0')
+ raise ValueError("`length` must be > 0")
# Make a copy if necessary before making any changes
if not inplace:
@@ -464,26 +530,49 @@ def _prune_twigs_precise(neuron: 'core.TreeNeuron',
# Find all nodes that could possibly be within distance to a leaf
tree = graph.neuron2KDTree(neuron)
- res = tree.query_ball_point(neuron.leafs[['x', 'y', 'z']].values,
- r=size)
+ res = tree.query_ball_point(neuron.leafs[["x", "y", "z"]].values, r=size)
candidates = neuron.nodes.node_id.values[np.unique(np.concatenate(res))]
+ if callable(mask):
+ mask = mask(neuron)
+
+ if mask is not None:
+ if mask.dtype == bool:
+ if len(mask) != neuron.n_nodes:
+ raise ValueError("Mask length must match number of nodes")
+ mask_nodes = neuron.nodes.node_id.values[mask]
+ elif mask.dtype in (int, np.int32, np.int64):
+ mask_nodes = mask
+ else:
+ raise TypeError(
+ f"Mask must be boolean or list of node IDs, got {mask.dtype}"
+ )
+
+ candidates = np.intersect1d(candidates, mask_nodes)
+
+ if not len(candidates):
+ return neuron
+
# For each node in neuron find out which leafs are directly distal to it
# `distal` is a matrix with all nodes in columns and leafs in rows
distal = graph.distal_to(neuron, a=leafs, b=candidates)
+
# Turn matrix into dictionary {'node': [leafs, distal, to, it]}
- melted = distal.reset_index(drop=False).melt(id_vars='index')
+ melted = distal.reset_index(drop=False).melt(id_vars="index")
melted = melted[melted.value]
- melted.groupby('variable')['index'].apply(list)
+
# `distal` is now a dictionary for {'node_id': [leaf1, leaf2, ..], ..}
- distal = melted.groupby('variable')['index'].apply(list).to_dict()
+ distal = melted.groupby("variable")["index"].apply(list).to_dict()
- # For each node find the distance to any leaf - note we are using `length`
+ # For each node find the distance to any leaf - note we are using `size`
# as cutoff here
# `path_len` is a dict mapping {nodeA: {nodeB: length, ...}, ...}
# if nodeB is not in dictionary, it's not within reach
- path_len = dict(nx.all_pairs_dijkstra_path_length(neuron.graph.reverse(),
- cutoff=size, weight='weight'))
+ path_len = dict(
+ nx.all_pairs_dijkstra_path_length(
+ neuron.graph.reverse(), cutoff=size, weight="weight"
+ )
+ )
# For each leaf in `distal` check if it's within length
not_in_length = {k: set(v) - set(path_len[k]) for k, v in distal.items()}
@@ -491,18 +580,23 @@ def _prune_twigs_precise(neuron: 'core.TreeNeuron',
# For a node to be deleted its PARENT has to be within
# `length` to ALL edges that are distal do it
in_range = {k for k, v in not_in_length.items() if not any(v)}
- nodes_to_keep = neuron.nodes.loc[~neuron.nodes.parent_id.isin(in_range),
- 'node_id'].values
+ nodes_to_keep = neuron.nodes.loc[
+ ~neuron.nodes.parent_id.isin(in_range), "node_id"
+ ].values
if len(nodes_to_keep) < neuron.n_nodes:
# Subset neuron
- subset.subset_neuron(neuron,
- nodes_to_keep,
- inplace=True)
+ subset.subset_neuron(neuron, nodes_to_keep, inplace=True)
# For each of the new leafs check their shortest distance to the
# original leafs to get the remainder
- is_new_leaf = (neuron.nodes.type == 'end').values
+ is_new_leaf = (neuron.nodes.type == "end").values
+
+ # If there is a mask, we have to exclude old leafs which would not have
+ # been in the mask
+ if mask is not None:
+ is_new_leaf = is_new_leaf & np.isin(neuron.nodes.node_id, mask_nodes)
+
new_leafs = neuron.nodes[is_new_leaf].node_id.values
max_len = [max([path_len[l1][l2] for l2 in distal[l1]]) for l1 in new_leafs]
@@ -511,10 +605,10 @@ def _prune_twigs_precise(neuron: 'core.TreeNeuron',
len_to_prune = size - np.array(max_len)
# Get vectors from leafs to their parents
- nodes = neuron.nodes.set_index('node_id')
- parents = nodes.loc[new_leafs, 'parent_id'].values
- loc1 = neuron.leafs[['x', 'y', 'z']].values
- loc2 = nodes.loc[parents, ['x', 'y', 'z']].values
+ nodes = neuron.nodes.set_index("node_id")
+ parents = nodes.loc[new_leafs, "parent_id"].values
+ loc1 = nodes.loc[new_leafs, ["x", "y", "z"]].values
+ loc2 = nodes.loc[parents, ["x", "y", "z"]].values
vec = loc1 - loc2
vec_len = np.linalg.norm(vec, axis=1)
vec_norm = vec / vec_len.reshape(-1, 1)
@@ -527,42 +621,43 @@ def _prune_twigs_precise(neuron: 'core.TreeNeuron',
# will be deleted anyway
if not all(to_remove):
new_loc = loc1 - vec_norm * len_to_prune.reshape(-1, 1)
- neuron.nodes.loc[is_new_leaf, ['x', 'y', 'z']] = new_loc.astype(
+ neuron.nodes.loc[is_new_leaf, ["x", "y", "z"]] = new_loc.astype(
neuron.nodes.x.dtype, copy=False
)
if any(to_remove):
leafs_to_remove = new_leafs[to_remove]
- nodes_to_keep = neuron.nodes.loc[~neuron.nodes.node_id.isin(leafs_to_remove),
- 'node_id'].values
+ nodes_to_keep = neuron.nodes.loc[
+ ~neuron.nodes.node_id.isin(leafs_to_remove), "node_id"
+ ].values
# Subset neuron
- subset.subset_neuron(neuron,
- nodes_to_keep,
- inplace=True)
+ subset.subset_neuron(neuron, nodes_to_keep, inplace=True)
return neuron
-@utils.map_neuronlist(desc='Splitting', allow_parallel=True)
-@utils.meshneuron_skeleton(method='split',
- include_connectors=True,
- copy_properties=['color', 'compartment'],
- disallowed_kwargs={'label_only': True},
- heal=True)
-def split_axon_dendrite(x: NeuronObject,
- metric: Union[Literal['synapse_flow_centrality'],
- Literal['flow_centrality'],
- Literal['bending_flow'],
- Literal['segregation_index']] = 'synapse_flow_centrality',
- flow_thresh: float = .9,
- split: Union[Literal['prepost'],
- Literal['distance']] = 'prepost',
- cellbodyfiber: Union[Literal['soma'],
- Literal['root'],
- bool] = False,
- reroot_soma: bool = True,
- label_only: bool = False
- ) -> 'core.NeuronList':
+@utils.map_neuronlist(desc="Splitting", allow_parallel=True)
+@utils.meshneuron_skeleton(
+ method="split",
+ include_connectors=True,
+ copy_properties=["color", "compartment"],
+ disallowed_kwargs={"label_only": True},
+ heal=True,
+)
+def split_axon_dendrite(
+ x: NeuronObject,
+ metric: Union[
+ Literal["synapse_flow_centrality"],
+ Literal["flow_centrality"],
+ Literal["bending_flow"],
+ Literal["segregation_index"],
+ ] = "synapse_flow_centrality",
+ flow_thresh: float = 0.9,
+ split: Union[Literal["prepost"], Literal["distance"]] = "prepost",
+ cellbodyfiber: Union[Literal["soma"], Literal["root"], bool] = False,
+ reroot_soma: bool = True,
+ label_only: bool = False,
+) -> "core.NeuronList":
"""Split a neuron into axon and dendrite.
The result is highly dependent on the method and on your neuron's
@@ -665,42 +760,55 @@ def split_axon_dendrite(x: NeuronObject,
the axon/dendrite split.
"""
- COLORS = {'axon': (178, 34, 34),
- 'dendrite': (0, 0, 255),
- 'cellbodyfiber': (50, 50, 50),
- 'linker': (150, 150, 150)}
+ COLORS = {
+ "axon": (178, 34, 34),
+ "dendrite": (0, 0, 255),
+ "cellbodyfiber": (50, 50, 50),
+ "linker": (150, 150, 150),
+ }
# The decorator makes sure that at this point we have single neurons
if not isinstance(x, core.TreeNeuron):
raise TypeError(f'Can only process TreeNeurons, got "{type(x)}"')
if not x.has_connectors:
- if metric != 'flow_centrality':
- raise ValueError('Neuron must have connectors.')
- elif split == 'prepost':
- raise ValueError('Set `split="distance"` when trying to split neurons '
- 'without connectors.')
-
- _METRIC = ('synapse_flow_centrality', 'bending_flow', 'segregation_index',
- 'flow_centrality')
- utils.eval_param(metric, 'metric', allowed_values=_METRIC)
- utils.eval_param(split, 'split', allowed_values=('prepost', 'distance'))
- utils.eval_param(cellbodyfiber, 'cellbodyfiber',
- allowed_values=('soma', 'root', False))
-
- if metric == 'flow_centrality':
- msg = ("As of navis version 1.4.0, `method='flow_centrality'` "
- "uses synapse-independent, morphology-only flow to generate splits."
- "Please use `method='synapse_flow_centrality' for "
- "synapse-based axon-dendrite splits. "
- "This warning will be removed in a future version of navis.")
+ if metric != "flow_centrality":
+ raise ValueError("Neuron must have connectors.")
+ elif split == "prepost":
+ raise ValueError(
+ 'Set `split="distance"` when trying to split neurons '
+ "without connectors."
+ )
+
+ _METRIC = (
+ "synapse_flow_centrality",
+ "bending_flow",
+ "segregation_index",
+ "flow_centrality",
+ )
+ utils.eval_param(metric, "metric", allowed_values=_METRIC)
+ utils.eval_param(split, "split", allowed_values=("prepost", "distance"))
+ utils.eval_param(
+ cellbodyfiber, "cellbodyfiber", allowed_values=("soma", "root", False)
+ )
+
+ if metric == "flow_centrality":
+ msg = (
+ "As of navis version 1.4.0, `method='flow_centrality'` "
+ "uses synapse-independent, morphology-only flow to generate splits."
+ "Please use `method='synapse_flow_centrality' for "
+ "synapse-based axon-dendrite splits. "
+ "This warning will be removed in a future version of navis."
+ )
warnings.warn(msg, DeprecationWarning)
logger.warning(msg)
if len(x.root) > 1:
- raise ValueError(f'Unable to split neuron {x.id}: multiple roots. '
- 'Try `navis.heal_skeleton(x)` to merged '
- 'disconnected fragments.')
+ raise ValueError(
+ f"Unable to split neuron {x.id}: multiple roots. "
+ "Try `navis.heal_skeleton(x)` to merged "
+ "disconnected fragments."
+ )
# Make copy, so that we don't screw things up
original = x
@@ -710,11 +818,11 @@ def split_axon_dendrite(x: NeuronObject,
x.reroot(x.soma, inplace=True)
FUNCS = {
- 'bending_flow': mmetrics.bending_flow,
- 'synapse_flow_centrality': mmetrics.synapse_flow_centrality,
- 'flow_centrality': mmetrics.flow_centrality,
- 'segregation_index': mmetrics.arbor_segregation_index
- }
+ "bending_flow": mmetrics.bending_flow,
+ "synapse_flow_centrality": mmetrics.synapse_flow_centrality,
+ "flow_centrality": mmetrics.flow_centrality,
+ "segregation_index": mmetrics.arbor_segregation_index,
+ }
if metric not in FUNCS:
raise ValueError(f'Unknown `metric`: "{metric}"')
@@ -733,7 +841,7 @@ def split_axon_dendrite(x: NeuronObject,
# The first step is to remove the linker -> that's the bit that connects
# the axon and dendrite
is_linker = x.nodes[metric] >= x.nodes[metric].max() * flow_thresh
- linker = set(x.nodes.loc[is_linker, 'node_id'].values)
+ linker = set(x.nodes.loc[is_linker, "node_id"].values)
# We try to perform processing on the graph to avoid overhead from
# (re-)generating neurons
@@ -747,17 +855,17 @@ def split_axon_dendrite(x: NeuronObject,
# Figure out which one is which
axon = set()
- if split == 'prepost':
+ if split == "prepost":
# Collect # of pre- and postsynapses on each of the connected components
sm = pd.DataFrame()
- sm['n_nodes'] = [len(c) for c in cc]
+ sm["n_nodes"] = [len(c) for c in cc]
pre = x.presynapses
post = x.postsynapses
- sm['n_pre'] = [pre[pre.node_id.isin(c)].shape[0] for c in cc]
- sm['n_post'] = [post[post.node_id.isin(c)].shape[0] for c in cc]
- sm['prepost_ratio'] = (sm.n_pre / sm.n_post)
- sm['frac_post'] = sm.n_post / sm.n_post.sum()
- sm['frac_pre'] = sm.n_pre / sm.n_pre.sum()
+ sm["n_pre"] = [pre[pre.node_id.isin(c)].shape[0] for c in cc]
+ sm["n_post"] = [post[post.node_id.isin(c)].shape[0] for c in cc]
+ sm["prepost_ratio"] = sm.n_pre / sm.n_post
+ sm["frac_post"] = sm.n_post / sm.n_post.sum()
+ sm["frac_pre"] = sm.n_pre / sm.n_pre.sum()
# In theory, we can encounter neurons with either no pre- or no
# postsynapses (e.g. sensory neurons).
@@ -765,19 +873,21 @@ def split_axon_dendrite(x: NeuronObject,
# causes frac_pre/post to be NaN. By filling, we make sure that the
# split doesn't fail further down but they might end up missing either
# an axon or a dendrite (which may actually be OK?).
- sm['frac_post'] = sm['frac_post'].fillna(0)
- sm['frac_pre'] = sm['frac_pre'].fillna(0)
+ sm["frac_post"] = sm["frac_post"].fillna(0)
+ sm["frac_pre"] = sm["frac_pre"].fillna(0)
# Produce the ratio of pre- to postsynapses
- sm['frac_prepost'] = (sm.frac_pre / sm.frac_post)
+ sm["frac_prepost"] = sm.frac_pre / sm.frac_post
# Some small side branches might have either no pre- or no postsynapses.
# Even if they have synapses: if the total count is low they might be
# incorrectly assigned to a compartment. Here, we will make sure that
# they are disregarded for now to avoid introducing noise. Instead we
# will connect them onto their parent compartment later.
- sm.loc[sm[['frac_pre', 'frac_post']].max(axis=1) < 0.01,
- ['prepost_ratio', 'frac_prepost']] = np.nan
+ sm.loc[
+ sm[["frac_pre", "frac_post"]].max(axis=1) < 0.01,
+ ["prepost_ratio", "frac_prepost"],
+ ] = np.nan
logger.debug(sm)
# Each fragment is considered separately as either giver or recipient
@@ -818,7 +928,7 @@ def split_axon_dendrite(x: NeuronObject,
# The CBF is defined as the part of the neuron between the soma (or root)
# and the first branch point with sizeable synapse flow
cbf = set()
- if cellbodyfiber and (np.any(x.soma) or cellbodyfiber == 'root'):
+ if cellbodyfiber and (np.any(x.soma) or cellbodyfiber == "root"):
# To excise the CBF, we subset the neuron to those parts with
# no/hardly any flow and find the part that contains the soma
no_flow = x.nodes[x.nodes[metric] <= x.nodes[metric].max() * 0.05]
@@ -846,60 +956,63 @@ def split_axon_dendrite(x: NeuronObject,
# If we have, assign these nodes to the closest node with a compartment
if any(miss):
# Find the closest nodes with a compartment
- m = graph.geodesic_matrix(original,
- directed=False,
- weight=None,
- from_=miss)
+ m = graph.geodesic_matrix(original, directed=False, weight=None, from_=miss)
# Subset geodesic matrix to nodes that have a compartment
- nodes_w_comp = original.nodes.node_id.values[~np.isin(original.nodes.node_id.values, miss)]
+ nodes_w_comp = original.nodes.node_id.values[
+ ~np.isin(original.nodes.node_id.values, miss)
+ ]
closest = np.argmin(m.loc[:, nodes_w_comp].values, axis=1)
closest_id = nodes_w_comp[closest]
linker += m.index.values[np.isin(closest_id, linker)].tolist()
- axon += m.index.values[np.isin(closest_id, axon)].tolist()
- dendrite += m.index.values[np.isin(closest_id, dendrite)].tolist()
- cbf += m.index.values[np.isin(closest_id, cbf)].tolist()
+ axon += m.index.values[np.isin(closest_id, axon)].tolist()
+ dendrite += m.index.values[np.isin(closest_id, dendrite)].tolist()
+ cbf += m.index.values[np.isin(closest_id, cbf)].tolist()
# Add labels
if label_only:
nodes = original.nodes
- nodes['compartment'] = None
+ nodes["compartment"] = None
is_linker = nodes.node_id.isin(linker)
is_axon = nodes.node_id.isin(axon)
is_dend = nodes.node_id.isin(dendrite)
is_cbf = nodes.node_id.isin(cbf)
- nodes.loc[is_linker, 'compartment'] = 'linker'
- nodes.loc[is_dend, 'compartment'] = 'dendrite'
- nodes.loc[is_axon, 'compartment'] = 'axon'
- nodes.loc[is_cbf, 'compartment'] = 'cellbodyfiber'
+ nodes.loc[is_linker, "compartment"] = "linker"
+ nodes.loc[is_dend, "compartment"] = "dendrite"
+ nodes.loc[is_axon, "compartment"] = "axon"
+ nodes.loc[is_cbf, "compartment"] = "cellbodyfiber"
# Set connector compartments
- cmp_map = original.nodes.set_index('node_id').compartment.to_dict()
- original.connectors['compartment'] = original.connectors.node_id.map(cmp_map)
+ cmp_map = original.nodes.set_index("node_id").compartment.to_dict()
+ original.connectors["compartment"] = original.connectors.node_id.map(cmp_map)
# Turn into categorical data
- original.nodes['compartment'] = original.nodes.compartment.astype('category')
- original.connectors['compartment'] = original.connectors.compartment.astype('category')
+ original.nodes["compartment"] = original.nodes.compartment.astype("category")
+ original.connectors["compartment"] = original.connectors.compartment.astype(
+ "category"
+ )
return original
# Generate the actual splits
nl = []
- for label, nodes in zip(['cellbodyfiber', 'dendrite', 'linker', 'axon'],
- [cbf, dendrite, linker, axon]):
+ for label, nodes in zip(
+ ["cellbodyfiber", "dendrite", "linker", "axon"], [cbf, dendrite, linker, axon]
+ ):
if not len(nodes):
continue
n = subset.subset_neuron(original, nodes)
n.color = COLORS.get(label, (100, 100, 100))
- n._register_attr('compartment', label)
+ n._register_attr("compartment", label)
nl.append(n)
return core.NeuronList(nl)
-def combine_neurons(*x: Union[Sequence[NeuronObject], 'core.NeuronList']
- ) -> 'core.NeuronObject':
+def combine_neurons(
+ *x: Union[Sequence[NeuronObject], "core.NeuronList"],
+) -> "core.NeuronObject":
"""Combine multiple neurons into one.
Parameters
@@ -949,10 +1062,10 @@ def combine_neurons(*x: Union[Sequence[NeuronObject], 'core.NeuronList']
# Check that neurons are all of the same type
if len(nl.types) > 1:
- raise TypeError('Unable to combine neurons of different types')
+ raise TypeError("Unable to combine neurons of different types")
if isinstance(nl[0], core.TreeNeuron):
- x = stitch_skeletons(*nl, method='NONE', master='FIRST')
+ x = stitch_skeletons(*nl, method="NONE", master="FIRST")
elif isinstance(nl[0], core.MeshNeuron):
x = nl[0].copy()
comb = tm.util.concatenate([n.trimesh for n in nl])
@@ -960,8 +1073,10 @@ def combine_neurons(*x: Union[Sequence[NeuronObject], 'core.NeuronList']
x._faces = comb.faces
if any(nl.has_connectors):
- x._connectors = pd.concat([n.connectors for n in nl], # type: ignore # no stubs for concat
- ignore_index=True)
+ x._connectors = pd.concat(
+ [n.connectors for n in nl], # type: ignore # no stubs for concat
+ ignore_index=True,
+ )
elif isinstance(nl[0], core.Dotprops):
x = nl[0].copy()
x._points = np.vstack(nl._points)
@@ -972,26 +1087,26 @@ def combine_neurons(*x: Union[Sequence[NeuronObject], 'core.NeuronList']
x._alpha = np.hstack(nl.alpha)
if any(nl.has_connectors):
- x._connectors = pd.concat([n.connectors for n in nl], # type: ignore # no stubs for concat
- ignore_index=True)
+ x._connectors = pd.concat(
+ [n.connectors for n in nl], # type: ignore # no stubs for concat
+ ignore_index=True,
+ )
elif isinstance(nl[0], core.VoxelNeuron):
- raise TypeError('Combining VoxelNeuron not (yet) supported')
+ raise TypeError("Combining VoxelNeuron not (yet) supported")
else:
- raise TypeError(f'Unable to combine {type(nl[0])}')
+ raise TypeError(f"Unable to combine {type(nl[0])}")
return x
-def stitch_skeletons(*x: Union[Sequence[NeuronObject], 'core.NeuronList'],
- method: Union[Literal['LEAFS'],
- Literal['ALL'],
- Literal['NONE'],
- Sequence[int]] = 'ALL',
- master: Union[Literal['SOMA'],
- Literal['LARGEST'],
- Literal['FIRST']] = 'SOMA',
- max_dist: Optional[float] = None,
- ) -> 'core.TreeNeuron':
+def stitch_skeletons(
+ *x: Union[Sequence[NeuronObject], "core.NeuronList"],
+ method: Union[
+ Literal["LEAFS"], Literal["ALL"], Literal["NONE"], Sequence[int]
+ ] = "ALL",
+ master: Union[Literal["SOMA"], Literal["LARGEST"], Literal["FIRST"]] = "SOMA",
+ max_dist: Optional[float] = None,
+) -> "core.TreeNeuron":
"""Stitch multiple skeletons together.
Uses minimum spanning tree to determine a way to connect all fragments
@@ -1061,8 +1176,8 @@ def stitch_skeletons(*x: Union[Sequence[NeuronObject], 'core.NeuronList'],
"""
master = str(master).upper()
- ALLOWED_MASTER = ('SOMA', 'LARGEST', 'FIRST')
- utils.eval_param(master, 'master', allowed_values=ALLOWED_MASTER)
+ ALLOWED_MASTER = ("SOMA", "LARGEST", "FIRST")
+ utils.eval_param(master, "master", allowed_values=ALLOWED_MASTER)
# Compile list of individual neurons
neurons = utils.unpack_neurons(x)
@@ -1071,29 +1186,29 @@ def stitch_skeletons(*x: Union[Sequence[NeuronObject], 'core.NeuronList'],
nl = core.NeuronList(neurons).copy()
if len(nl) < 2:
- logger.warning(f'Need at least 2 neurons to stitch, found {len(nl)}')
+ logger.warning(f"Need at least 2 neurons to stitch, found {len(nl)}")
return nl[0]
# If no soma, switch to largest
- if master == 'SOMA' and not any(nl.has_soma):
- master = 'LARGEST'
+ if master == "SOMA" and not any(nl.has_soma):
+ master = "LARGEST"
# First find master
- if master == 'SOMA':
+ if master == "SOMA":
# Pick the first neuron with a soma
m_ix = [i for i, n in enumerate(nl) if n.has_soma][0]
- elif master == 'LARGEST':
+ elif master == "LARGEST":
# Pick the largest neuron
- m_ix = sorted(list(range(len(nl))),
- key=lambda x: nl[x].n_nodes,
- reverse=True)[0]
+ m_ix = sorted(list(range(len(nl))), key=lambda x: nl[x].n_nodes, reverse=True)[
+ 0
+ ]
else:
# Pick the first neuron
m_ix = 0
m = nl[m_ix]
# Check if we need to make any node IDs unique
- if nl.nodes.duplicated(subset='node_id').sum() > 0:
+ if nl.nodes.duplicated(subset="node_id").sum() > 0:
# Master neuron will not be changed
seen_tn: Set[int] = set(m.nodes.node_id)
for i, n in enumerate(nl):
@@ -1120,17 +1235,21 @@ def stitch_skeletons(*x: Union[Sequence[NeuronObject], 'core.NeuronList'],
new_map = dict(zip(non_unique, new_tn))
# Remap node IDs - if no new value, keep the old
- n.nodes['node_id'] = n.nodes.node_id.map(lambda x: new_map.get(x, x))
+ n.nodes["node_id"] = n.nodes.node_id.map(lambda x: new_map.get(x, x))
if n.has_connectors:
- n.connectors['node_id'] = n.connectors.node_id.map(lambda x: new_map.get(x, x))
+ n.connectors["node_id"] = n.connectors.node_id.map(
+ lambda x: new_map.get(x, x)
+ )
- if getattr(n, 'tags', None) is not None:
+ if getattr(n, "tags", None) is not None:
n.tags = {new_map.get(k, k): v for k, v in n.tags.items()} # type: ignore
# Remap parent IDs
new_map[None] = -1 # type: ignore
- n.nodes['parent_id'] = n.nodes.parent_id.map(lambda x: new_map.get(x, x)).astype(int)
+ n.nodes["parent_id"] = n.nodes.parent_id.map(
+ lambda x: new_map.get(x, x)
+ ).astype(int)
# Add new nodes to seen
seen_tn = seen_tn | set(new_tn)
@@ -1139,96 +1258,100 @@ def stitch_skeletons(*x: Union[Sequence[NeuronObject], 'core.NeuronList'],
n._clear_temp_attr()
# We will start by simply merging all neurons into one
- m._nodes = pd.concat([n.nodes for n in nl], # type: ignore # no stubs for concat
- ignore_index=True)
+ m._nodes = pd.concat(
+ [n.nodes for n in nl], # type: ignore # no stubs for concat
+ ignore_index=True,
+ )
if any(nl.has_connectors):
- m._connectors = pd.concat([n.connectors for n in nl], # type: ignore # no stubs for concat
- ignore_index=True)
+ m._connectors = pd.concat(
+ [n.connectors for n in nl], # type: ignore # no stubs for concat
+ ignore_index=True,
+ )
if not m.has_tags or not isinstance(m.tags, dict):
m.tags = {} # type: ignore # TreeNeuron has no tags
for n in nl:
- for k, v in (getattr(n, 'tags', None) or {}).items():
+ for k, v in (getattr(n, "tags", None) or {}).items():
m.tags[k] = m.tags.get(k, []) + list(utils.make_iterable(v))
# Reset temporary attributes of our final neuron
m._clear_temp_attr()
# If this is all we meant to do, return this neuron
- if not utils.is_iterable(method) and (method == 'NONE' or method is None):
+ if not utils.is_iterable(method) and (method == "NONE" or method is None):
return m
return _stitch_mst(m, nodes=method, inplace=False, max_dist=max_dist)
-def _mst_igraph(nl: 'core.NeuronList',
- new_edges: pd.DataFrame) -> List[List[int]]:
+def _mst_igraph(nl: "core.NeuronList", new_edges: pd.DataFrame) -> List[List[int]]:
"""Compute edges necessary to connect a fragmented neuron using igraph."""
# Generate a union of all graphs
g = nl[0].igraph.disjoint_union(nl[1:].igraph)
# We have to manually set the node IDs again
- nids = np.concatenate([n.igraph.vs['node_id'] for n in nl])
- g.vs['node_id'] = nids
+ nids = np.concatenate([n.igraph.vs["node_id"] for n in nl])
+ g.vs["node_id"] = nids
# Set existing edges to zero weight to make sure they have priority when
# calculating the minimum spanning tree
- g.es['weight'] = 0
+ g.es["weight"] = 0
# If two nodes occupy the same position (e.g. after if fragments are the
# result of cutting), they will have a distance of 0. Hence, we won't be
# able to simply filter by distance
- g.es['new'] = False
+ g.es["new"] = False
# Convert node IDs in new_edges to vertex IDs and add to graph
- name2ix = dict(zip(g.vs['node_id'], range(len(g.vs))))
- new_edges['source_ix'] = new_edges.source.map(name2ix)
- new_edges['target_ix'] = new_edges.target.map(name2ix)
+ name2ix = dict(zip(g.vs["node_id"], range(len(g.vs))))
+ new_edges["source_ix"] = new_edges.source.map(name2ix)
+ new_edges["target_ix"] = new_edges.target.map(name2ix)
# Add new edges
- g.add_edges(new_edges[['source_ix', 'target_ix']].values.tolist())
+ g.add_edges(new_edges[["source_ix", "target_ix"]].values.tolist())
# Add edge weight to new edges
- g.es[-new_edges.shape[0]:]['weight'] = new_edges.weight.values
+ g.es[-new_edges.shape[0] :]["weight"] = new_edges.weight.values
# Keep track of new edges
- g.es[-new_edges.shape[0]:]['new'] = True
+ g.es[-new_edges.shape[0] :]["new"] = True
# Compute the minimum spanning tree
- mst = g.spanning_tree(weights='weight')
+ mst = g.spanning_tree(weights="weight")
# Extract the new edges
to_add = mst.es.select(new=True)
# Convert to node IDs
- to_add = [(g.vs[e.source]['node_id'],
- g.vs[e.target]['node_id'],
- {'weight': e['weight']})
- for e in to_add]
+ to_add = [
+ (g.vs[e.source]["node_id"], g.vs[e.target]["node_id"], {"weight": e["weight"]})
+ for e in to_add
+ ]
return to_add
-def _mst_nx(nl: 'core.NeuronList',
- new_edges: pd.DataFrame) -> List[List[int]]:
+def _mst_nx(nl: "core.NeuronList", new_edges: pd.DataFrame) -> List[List[int]]:
"""Compute edges necessary to connect a fragmented neuron using networkX."""
# Generate a union of all graphs
g = nx.union_all([n.graph for n in nl]).to_undirected()
# Set existing edges to zero weight to make sure they have priority when
# calculating the minimum spanning tree
- nx.set_edge_attributes(g, 0, 'weight')
+ nx.set_edge_attributes(g, 0, "weight")
# If two nodes occupy the same position (e.g. after if fragments are the
# result of cutting), they will have a distance of 0. Hence, we won't be
# able to simply filter by distance
- nx.set_edge_attributes(g, False, 'new')
+ nx.set_edge_attributes(g, False, "new")
# Convert new edges in the right format
- edges_nx = [(r.source, r.target, {'weight': r.weight, 'new': True})
- for r in new_edges.itertuples()]
+ edges_nx = [
+ (r.source, r.target, {"weight": r.weight, "new": True})
+ for r in new_edges.itertuples()
+ ]
# Add edges to union graph
g.add_edges_from(edges_nx)
@@ -1237,15 +1360,16 @@ def _mst_nx(nl: 'core.NeuronList',
edges = nx.minimum_spanning_edges(g)
# Edges that need adding are those that were newly added
- to_add = [e for e in edges if e[2]['new']]
+ to_add = [e for e in edges if e[2]["new"]]
return to_add
-def average_skeletons(x: 'core.NeuronList',
- limit: Union[int, str] = 10,
- base_neuron: Optional[Union[int, 'core.TreeNeuron']] = None
- ) -> 'core.TreeNeuron':
+def average_skeletons(
+ x: "core.NeuronList",
+ limit: Union[int, str] = 10,
+ base_neuron: Optional[Union[int, "core.TreeNeuron"]] = None,
+) -> "core.TreeNeuron":
"""Compute an average from a list of skeletons.
This is a very simple implementation which may give odd results if used
@@ -1288,14 +1412,14 @@ def average_skeletons(x: 'core.NeuronList',
raise TypeError(f'Need NeuronList, got "{type(x)}"')
if len(x) < 2:
- raise ValueError('Need at least 2 neurons to average!')
+ raise ValueError("Need at least 2 neurons to average!")
# Map limit into unit space, if applicable
- limit = x[0].map_units(limit, on_error='raise')
+ limit = x[0].map_units(limit, on_error="raise")
# Generate KDTrees for each neuron
for n in x:
- n.tree = graph.neuron2KDTree(n, tree_type='c', data='nodes') # type: ignore # TreeNeuron has no tree
+ n.tree = graph.neuron2KDTree(n, tree_type="c", data="nodes") # type: ignore # TreeNeuron has no tree
# Set base for average: we will use this neurons nodes to query
# the KDTrees
@@ -1306,9 +1430,11 @@ def average_skeletons(x: 'core.NeuronList',
elif isinstance(base_neuron, type(None)):
bn = x[0].copy()
else:
- raise ValueError(f'Unable to interpret base_neuron of type "{type(base_neuron)}"')
+ raise ValueError(
+ f'Unable to interpret base_neuron of type "{type(base_neuron)}"'
+ )
- base_nodes = bn.nodes[['x', 'y', 'z']].values
+ base_nodes = bn.nodes[["x", "y", "z"]].values
other_neurons = x[[n != bn for n in x]]
# Make sure these stay 2-dimensional arrays -> will add a colum for each
@@ -1319,18 +1445,17 @@ def average_skeletons(x: 'core.NeuronList',
# For each "other" neuron, collect nearest neighbour coordinates
for n in other_neurons:
- nn_dist, nn_ix = n.tree.query(base_nodes,
- k=1,
- distance_upper_bound=limit)
+ nn_dist, nn_ix = n.tree.query(base_nodes, k=1, distance_upper_bound=limit)
# Translate indices into coordinates
# First, make empty array
this_coords = np.zeros((len(nn_dist), 3))
# Set coords without a nearest neighbour within distances to "None"
- this_coords[nn_dist == float('inf')] = None
+ this_coords[nn_dist == float("inf")] = None
# Fill in coords of nearest neighbours
- this_coords[nn_dist != float(
- 'inf')] = n.tree.data[nn_ix[nn_dist != float('inf')]]
+ this_coords[nn_dist != float("inf")] = n.tree.data[
+ nn_ix[nn_dist != float("inf")]
+ ]
# Add coords to base coords
base_x = np.append(base_x, this_coords[:, 0:1], axis=1)
base_y = np.append(base_y, this_coords[:, 1:2], axis=1)
@@ -1349,19 +1474,21 @@ def average_skeletons(x: 'core.NeuronList',
mean_z[np.isnan(mean_z)] = base_nodes[np.isnan(mean_z), 2]
# Change coordinates accordingly
- bn.nodes['x'] = mean_x
- bn.nodes['y'] = mean_y
- bn.nodes['z'] = mean_z
+ bn.nodes["x"] = mean_x
+ bn.nodes["y"] = mean_y
+ bn.nodes["z"] = mean_z
return bn
-@utils.map_neuronlist(desc='Despiking', allow_parallel=True)
-def despike_skeleton(x: NeuronObject,
- sigma: int = 5,
- max_spike_length: int = 1,
- inplace: bool = False,
- reverse: bool = False) -> Optional[NeuronObject]:
+@utils.map_neuronlist(desc="Despiking", allow_parallel=True)
+def despike_skeleton(
+ x: NeuronObject,
+ sigma: int = 5,
+ max_spike_length: int = 1,
+ inplace: bool = False,
+ reverse: bool = False,
+) -> Optional[NeuronObject]:
r"""Remove spikes in skeleton (e.g. from jumps in image data).
For each node A, the Euclidean distance to its next successor (parent)
@@ -1404,13 +1531,13 @@ def despike_skeleton(x: NeuronObject,
# The decorator makes sure that we have single neurons at this point
if not isinstance(x, core.TreeNeuron):
- raise TypeError(f'Can only process TreeNeurons, not {type(x)}')
+ raise TypeError(f"Can only process TreeNeurons, not {type(x)}")
if not inplace:
x = x.copy()
# Index nodes table by node ID
- this_nodes = x.nodes.set_index('node_id', inplace=False)
+ this_nodes = x.nodes.set_index("node_id", inplace=False)
segs_to_walk = x.segments
@@ -1423,45 +1550,48 @@ def despike_skeleton(x: NeuronObject,
# Go over all segments
for seg in segs_to_walk:
# Get nodes A, B and C of this segment
- this_A = this_nodes.loc[seg[:-l - 1]]
+ this_A = this_nodes.loc[seg[: -l - 1]]
this_B = this_nodes.loc[seg[l:-1]]
- this_C = this_nodes.loc[seg[l + 1:]]
+ this_C = this_nodes.loc[seg[l + 1 :]]
# Get coordinates
- A = this_A[['x', 'y', 'z']].values
- B = this_B[['x', 'y', 'z']].values
- C = this_C[['x', 'y', 'z']].values
+ A = this_A[["x", "y", "z"]].values
+ B = this_B[["x", "y", "z"]].values
+ C = this_C[["x", "y", "z"]].values
# Calculate euclidean distances A->B and A->C
dist_AB = np.linalg.norm(A - B, axis=1)
dist_AC = np.linalg.norm(A - C, axis=1)
# Get the spikes
- spikes_ix = np.where(np.divide(dist_AB, dist_AC, where=dist_AC != 0) > sigma)[0]
+ spikes_ix = np.where(
+ np.divide(dist_AB, dist_AC, where=dist_AC != 0) > sigma
+ )[0]
spikes = this_B.iloc[spikes_ix]
if not spikes.empty:
# Interpolate new position(s) between A and C
new_positions = A[spikes_ix] + (C[spikes_ix] - A[spikes_ix]) / 2
- this_nodes.loc[spikes.index, ['x', 'y', 'z']] = new_positions
+ this_nodes.loc[spikes.index, ["x", "y", "z"]] = new_positions
# Reassign node table
x.nodes = this_nodes.reset_index(drop=False, inplace=False)
# The weights in the graph have changed, we need to update that
- x._clear_temp_attr(exclude=['segments', 'small_segments',
- 'classify_nodes'])
+ x._clear_temp_attr(exclude=["segments", "small_segments", "classify_nodes"])
return x
-@utils.map_neuronlist(desc='Guessing', allow_parallel=True)
-def guess_radius(x: NeuronObject,
- method: str = 'linear',
- limit: Optional[int] = None,
- smooth: bool = True,
- inplace: bool = False) -> Optional[NeuronObject]:
+@utils.map_neuronlist(desc="Guessing", allow_parallel=True)
+def guess_radius(
+ x: NeuronObject,
+ method: str = "linear",
+ limit: Optional[int] = None,
+ smooth: bool = True,
+ inplace: bool = False,
+) -> Optional[NeuronObject]:
"""Guess radii for skeleton nodes.
Uses distance between connectors and nodes to guess radii. Interpolate for
@@ -1497,10 +1627,10 @@ def guess_radius(x: NeuronObject,
"""
# The decorator makes sure that at this point we have single neurons
if not isinstance(x, core.TreeNeuron):
- raise TypeError(f'Can only process TreeNeurons, not {type(x)}')
+ raise TypeError(f"Can only process TreeNeurons, not {type(x)}")
- if not hasattr(x, 'connectors') or x.connectors.empty:
- raise ValueError('Neuron must have connectors!')
+ if not hasattr(x, "connectors") or x.connectors.empty:
+ raise ValueError("Neuron must have connectors!")
if not inplace:
x = x.copy()
@@ -1511,59 +1641,58 @@ def guess_radius(x: NeuronObject,
# We will be using the index as distance to interpolate. For this we have
# to change method 'linear' to 'index'
- method = 'index' if method == 'linear' else method
+ method = "index" if method == "linear" else method
# Collect connectors and calc distances
cn = x.connectors.copy()
# Prepare nodes (add parent_dist for later, set index)
- x.nodes['parent_dist'] = mmetrics.parent_dist(x, root_dist=0)
- nodes = x.nodes.set_index('node_id', inplace=False)
+ x.nodes["parent_dist"] = mmetrics.parent_dist(x, root_dist=0)
+ nodes = x.nodes.set_index("node_id", inplace=False)
# For each connector (pre and post), get the X/Y distance to its node
- cn_locs = cn[['x', 'y']].values
- tn_locs = nodes.loc[cn.node_id.values,
- ['x', 'y']].values
+ cn_locs = cn[["x", "y"]].values
+ tn_locs = nodes.loc[cn.node_id.values, ["x", "y"]].values
dist = np.sqrt(np.sum((tn_locs - cn_locs) ** 2, axis=1).astype(int))
- cn['dist'] = dist
+ cn["dist"] = dist
# Get max distance per node (in case of multiple connectors per
# node)
- cn_grouped = cn.groupby('node_id').dist.max()
+ cn_grouped = cn.groupby("node_id").dist.max()
# Set undefined radii to None so that they are ignored for interpolation
- nodes.loc[nodes.radius <= 0, 'radius'] = None
+ nodes.loc[nodes.radius <= 0, "radius"] = None
# Assign radii to nodes
- nodes.loc[cn_grouped.index, 'radius'] = cn_grouped.values.astype(
+ nodes.loc[cn_grouped.index, "radius"] = cn_grouped.values.astype(
nodes.radius.dtype, copy=False
)
# Go over each segment and interpolate radii
- for s in config.tqdm(x.segments, desc='Interp.', disable=config.pbar_hide,
- leave=config.pbar_leave):
-
+ for s in config.tqdm(
+ x.segments, desc="Interp.", disable=config.pbar_hide, leave=config.pbar_leave
+ ):
# Get this segments radii and parent dist
- this_radii = nodes.loc[s, ['radius', 'parent_dist']]
- this_radii['parent_dist_cum'] = this_radii.parent_dist.cumsum()
+ this_radii = nodes.loc[s, ["radius", "parent_dist"]]
+ this_radii["parent_dist_cum"] = this_radii.parent_dist.cumsum()
# Set cumulative distance as index and drop parent_dist
- this_radii = this_radii.set_index('parent_dist_cum',
- drop=True).drop('parent_dist',
- axis=1)
+ this_radii = this_radii.set_index("parent_dist_cum", drop=True).drop(
+ "parent_dist", axis=1
+ )
# Interpolate missing radii
- interp = this_radii.interpolate(method=method, limit_direction='both',
- limit=limit)
+ interp = this_radii.interpolate(
+ method=method, limit_direction="both", limit=limit
+ )
if smooth:
- interp = interp.rolling(smooth,
- min_periods=1).max()
+ interp = interp.rolling(smooth, min_periods=1).max()
- nodes.loc[s, 'radius'] = interp.values
+ nodes.loc[s, "radius"] = interp.values
# Set non-interpolated radii back to -1
- nodes.loc[nodes.radius.isnull(), 'radius'] = -1
+ nodes.loc[nodes.radius.isnull(), "radius"] = -1
# Reassign nodes
x.nodes = nodes.reset_index(drop=False, inplace=False)
@@ -1571,11 +1700,13 @@ def guess_radius(x: NeuronObject,
return x
-@utils.map_neuronlist(desc='Smoothing', allow_parallel=True)
-def smooth_skeleton(x: NeuronObject,
- window: int = 5,
- to_smooth: list = ['x', 'y', 'z'],
- inplace: bool = False) -> NeuronObject:
+@utils.map_neuronlist(desc="Smoothing", allow_parallel=True)
+def smooth_skeleton(
+ x: NeuronObject,
+ window: int = 5,
+ to_smooth: list = ["x", "y", "z"],
+ inplace: bool = False,
+) -> NeuronObject:
"""Smooth skeleton(s) using rolling windows.
Parameters
@@ -1618,26 +1749,28 @@ def smooth_skeleton(x: NeuronObject,
"""
# The decorator makes sure that at this point we have single neurons
if not isinstance(x, core.TreeNeuron):
- raise TypeError(f'Can only process TreeNeurons, not {type(x)}')
+ raise TypeError(f"Can only process TreeNeurons, not {type(x)}")
if not inplace:
x = x.copy()
# Prepare nodes (add parent_dist for later, set index)
# mmetrics.parent_dist(x, root_dist=0)
- nodes = x.nodes.set_index('node_id', inplace=False).copy()
+ nodes = x.nodes.set_index("node_id", inplace=False).copy()
to_smooth = utils.make_iterable(to_smooth)
miss = to_smooth[~np.isin(to_smooth, nodes.columns)]
if len(miss):
- raise ValueError(f'Column(s) not found in node table: {miss}')
+ raise ValueError(f"Column(s) not found in node table: {miss}")
# Go over each segment and smooth
- for s in config.tqdm(x.segments[::-1], desc='Smoothing',
- disable=config.pbar_hide,
- leave=config.pbar_leave):
-
+ for s in config.tqdm(
+ x.segments[::-1],
+ desc="Smoothing",
+ disable=config.pbar_hide,
+ leave=config.pbar_leave,
+ ):
# Get this segment's parent distances and get cumsum
this_co = nodes.loc[s, to_smooth]
@@ -1656,61 +1789,11 @@ def smooth_skeleton(x: NeuronObject,
return x
-@utils.map_neuronlist(desc='Smoothing', allow_parallel=True)
-def smooth_voxels(x: NeuronObject,
- sigma: int = 1,
- inplace: bool = False) -> NeuronObject:
- """Smooth voxel(s) using a Gaussian filter.
-
- Parameters
- ----------
- x : TreeNeuron | NeuronList
- Neuron(s) to be processed.
- sigma : int | (3, ) ints, optional
- Standard deviation for Gaussian kernel. The standard
- deviations of the Gaussian filter are given for each axis
- as a sequence, or as a single number, in which case it is
- equal for all axes.
- inplace : bool, optional
- If False, will use and return copy of original neuron(s).
-
- Returns
- -------
- VoxelNeuron/List
- Smoothed neuron(s).
-
- Examples
- --------
- >>> import navis
- >>> n = navis.example_neurons(1, kind='mesh')
- >>> vx = navis.voxelize(n, pitch='1 micron')
- >>> smoothed = navis.smooth_voxels(vx, sigma=2)
-
- See Also
- --------
- [`navis.smooth_mesh`][]
- For smoothing MeshNeurons and other mesh-likes.
- [`navis.smooth_skeleton`][]
- For smoothing TreeNeurons.
-
- """
- # The decorator makes sure that at this point we have single neurons
- if not isinstance(x, core.VoxelNeuron):
- raise TypeError(f'Can only process VoxelNeurons, not {type(x)}')
-
- if not inplace:
- x = x.copy()
-
- # Apply gaussian
- x._data = gaussian_filter(x.grid.astype(np.float32), sigma=sigma)
- x._clear_temp_attr()
-
- return x
-
-
-def break_fragments(x: Union['core.TreeNeuron', 'core.MeshNeuron'],
- labels_only: bool = False,
- min_size: Optional[int] = None) -> 'core.NeuronList':
+def break_fragments(
+ x: Union["core.TreeNeuron", "core.MeshNeuron"],
+ labels_only: bool = False,
+ min_size: Optional[int] = None,
+) -> "core.NeuronList":
"""Break neuron into its connected components.
Neurons can consists of several disconnected fragments. This function
@@ -1765,7 +1848,7 @@ def break_fragments(x: Union['core.TreeNeuron', 'core.MeshNeuron'],
if labels_only:
cc_id = {n: i for i, cc in enumerate(comp) for n in cc}
if isinstance(x, core.TreeNeuron):
- x.nodes['fragment'] = x.nodes.node_id.map(cc_id).astype(str)
+ x.nodes["fragment"] = x.nodes.node_id.map(cc_id).astype(str)
elif isinstance(x, core.MeshNeuron):
x.fragments = np.array([cc_id[i] for i in range(x.n_vertices)]).astype(str)
return x
@@ -1773,23 +1856,26 @@ def break_fragments(x: Union['core.TreeNeuron', 'core.MeshNeuron'],
if min_size:
comp = [cc for cc in comp if len(cc) >= min_size]
- return core.NeuronList([subset.subset_neuron(x,
- list(ss),
- inplace=False) for ss in config.tqdm(comp,
- desc='Breaking',
- disable=config.pbar_hide,
- leave=config.pbar_leave)])
-
-
-@utils.map_neuronlist(desc='Healing', allow_parallel=True)
-def heal_skeleton(x: 'core.NeuronList',
- method: Union[Literal['LEAFS'],
- Literal['ALL']] = 'ALL',
- max_dist: Optional[float] = None,
- min_size: Optional[float] = None,
- drop_disc: float = False,
- mask: Optional[Sequence] = None,
- inplace: bool = False) -> Optional[NeuronObject]:
+ return core.NeuronList(
+ [
+ subset.subset_neuron(x, list(ss), inplace=False)
+ for ss in config.tqdm(
+ comp, desc="Breaking", disable=config.pbar_hide, leave=config.pbar_leave
+ )
+ ]
+ )
+
+
+@utils.map_neuronlist(desc="Healing", allow_parallel=True)
+def heal_skeleton(
+ x: "core.NeuronList",
+ method: Union[Literal["LEAFS"], Literal["ALL"]] = "ALL",
+ max_dist: Optional[float] = None,
+ min_size: Optional[float] = None,
+ drop_disc: float = False,
+ mask: Optional[Sequence] = None,
+ inplace: bool = False,
+) -> Optional[NeuronObject]:
"""Heal fragmented skeleton(s).
Tries to heal a fragmented skeleton (i.e. a neuron with multiple roots)
@@ -1857,7 +1943,7 @@ def heal_skeleton(x: 'core.NeuronList',
"""
method = str(method).upper()
- if method not in ('LEAFS', 'ALL'):
+ if method not in ("LEAFS", "ALL"):
raise ValueError(f'Unknown method "{method}"')
# The decorator makes sure that at this point we have single neurons
@@ -1865,17 +1951,14 @@ def heal_skeleton(x: 'core.NeuronList',
raise TypeError(f'Expected TreeNeuron(s), got "{type(x)}"')
if not isinstance(max_dist, type(None)):
- max_dist = x.map_units(max_dist, on_error='raise')
+ max_dist = x.map_units(max_dist, on_error="raise")
if not inplace:
x = x.copy()
- _ = _stitch_mst(x,
- nodes=method,
- max_dist=max_dist,
- min_size=min_size,
- mask=mask,
- inplace=True)
+ _ = _stitch_mst(
+ x, nodes=method, max_dist=max_dist, min_size=min_size, mask=mask, inplace=True
+ )
# See if we need to drop remaining disconnected fragments
if drop_disc:
@@ -1888,14 +1971,14 @@ def heal_skeleton(x: 'core.NeuronList',
return x
-def _stitch_mst(x: 'core.TreeNeuron',
- nodes: Union[Literal['LEAFS'],
- Literal['ALL'],
- list] = 'ALL',
- max_dist: Optional[float] = np.inf,
- min_size: Optional[float] = None,
- mask: Optional[Sequence] = None,
- inplace: bool = False) -> Optional['core.TreeNeuron']:
+def _stitch_mst(
+ x: "core.TreeNeuron",
+ nodes: Union[Literal["LEAFS"], Literal["ALL"], list] = "ALL",
+ max_dist: Optional[float] = np.inf,
+ min_size: Optional[float] = None,
+ mask: Optional[Sequence] = None,
+ inplace: bool = False,
+) -> Optional["core.TreeNeuron"]:
"""Stitch disconnected neuron using a minimum spanning tree.
Parameters
@@ -1935,8 +2018,9 @@ def _stitch_mst(x: 'core.TreeNeuron',
mask = np.asarray(mask)
if mask.dtype == bool:
if len(mask) != len(x.nodes):
- raise ValueError("Length of boolean mask must match number of "
- "nodes in the neuron")
+ raise ValueError(
+ "Length of boolean mask must match number of " "nodes in the neuron"
+ )
mask = x.nodes.node_id.values[mask]
# Get connected components
@@ -1960,8 +2044,8 @@ def _stitch_mst(x: 'core.TreeNeuron',
cc = cc[cc.isin(above)]
# Filter to leaf nodes if applicable
- if nodes == 'LEAFS':
- keep = to_use['type'].isin(['end', 'root'])
+ if nodes == "LEAFS":
+ keep = to_use["type"].isin(["end", "root"])
to_use = to_use[keep]
cc = cc[keep]
@@ -1972,10 +2056,10 @@ def _stitch_mst(x: 'core.TreeNeuron',
cc = cc[keep]
# Collect fragments
- Fragment = namedtuple('Fragment', ['frag_id', 'node_ids', 'kd'])
+ Fragment = namedtuple("Fragment", ["frag_id", "node_ids", "kd"])
fragments = []
for frag_id, df in to_use.groupby(cc):
- kd = KDTree(df[[*'xyz']].values)
+ kd = KDTree(df[[*"xyz"]].values)
fragments.append(Fragment(frag_id, df.node_id.values, kd))
# Sort from big-to-small, so the calculations below use a
@@ -2014,30 +2098,36 @@ def _stitch_mst(x: 'core.TreeNeuron',
# Add edge from one fragment to another,
# but keep track of which fine-grained skeleton
# nodes were used to calculate distance.
- frag_graph.add_edge(frag_a.frag_id, frag_b.frag_id,
- node_a=node_a, node_b=node_b,
- distance=dist_ab)
+ frag_graph.add_edge(
+ frag_a.frag_id,
+ frag_b.frag_id,
+ node_a=node_a,
+ node_b=node_b,
+ distance=dist_ab,
+ )
# Compute inter-fragment MST edges
- frag_edges = nx.minimum_spanning_edges(frag_graph, weight='distance', data=True)
+ frag_edges = nx.minimum_spanning_edges(frag_graph, weight="distance", data=True)
# For each inter-fragment edge, add the corresponding
# fine-grained edge between skeleton nodes in the original graph.
g = x.graph.to_undirected()
- to_add = [[e[2]['node_a'], e[2]['node_b']] for e in frag_edges]
+ to_add = [[e[2]["node_a"], e[2]["node_b"]] for e in frag_edges]
g.add_edges_from(to_add)
# Rewire based on graph
return graph.rewire_skeleton(x, g, inplace=inplace)
-@utils.map_neuronlist(desc='Pruning', must_zip=['source'], allow_parallel=True)
-@utils.meshneuron_skeleton(method='subset')
-def prune_at_depth(x: NeuronObject,
- depth: Union[float, int], *,
- source: Optional[int] = None,
- inplace: bool = False
- ) -> Optional[NeuronObject]:
+@utils.map_neuronlist(desc="Pruning", must_zip=["source"], allow_parallel=True)
+@utils.meshneuron_skeleton(method="subset")
+def prune_at_depth(
+ x: NeuronObject,
+ depth: Union[float, int],
+ *,
+ source: Optional[int] = None,
+ inplace: bool = False,
+) -> Optional[NeuronObject]:
"""Prune all neurites past a given distance from a source.
Parameters
@@ -2077,9 +2167,9 @@ def prune_at_depth(x: NeuronObject,
"""
# The decorator makes sure that at this point we only have single neurons
if not isinstance(x, core.TreeNeuron):
- raise TypeError(f'Expected TreeNeuron, got {type(x)}')
+ raise TypeError(f"Expected TreeNeuron, got {type(x)}")
- depth = x.map_units(depth, on_error='raise')
+ depth = x.map_units(depth, on_error="raise")
if depth < 0:
raise ValueError(f'`depth` must be > 0, got "{depth}"')
@@ -2100,26 +2190,36 @@ def prune_at_depth(x: NeuronObject,
return x
-@utils.map_neuronlist(desc='Pruning', allow_parallel=True)
-def drop_fluff(x: Union['core.TreeNeuron',
- 'core.MeshNeuron',
- 'core.NeuronList'],
- keep_size: Optional[float] = None,
- inplace: bool = False):
+@utils.map_neuronlist(desc="Removing fluff", allow_parallel=True)
+def drop_fluff(
+ x: Union["core.TreeNeuron", "core.MeshNeuron", "core.NeuronList"],
+ keep_size: Optional[float] = None,
+ n_largest: Optional[int] = None,
+ epsilon: Optional[float] = None,
+ inplace: bool = False,
+):
"""Remove small disconnected pieces of "fluff".
By default, this function will remove all but the largest connected
- component from the neuron (see also `keep_size`) parameter. Connectors will
+ component from the neuron. You can change that behavior using the
+ `keep_size` and `n_largest` parameters. Connectors (if present) will
be remapped to the closest surviving vertex/node.
Parameters
----------
- x : TreeNeuron | MeshNeuron | NeuronList
- The neuron to remove fluff from.
+ x : TreeNeuron | MeshNeuron | Dotprops | NeuronList
+ The neuron(s) to remove fluff from.
keep_size : float, optional
Use this to set a size (in number of nodes/vertices) for small
bits to keep. If `keep_size` < 1 it will be intepreted as
- fraction of total nodes/vertices.
+ fraction of total nodes/vertices/points.
+ n_largest : int, optional
+ If set, will keep the `n_largest` connected components. Note:
+ if provided, `keep_size` will be applied first!
+ epsilon : float, optional
+ For Dotprops: distance at which to consider two points to be
+ connected. If `None`, will use the default value of 5 times
+ the average node distance (`x.sampling_resolution`).
inplace : bool, optional
If False, pruning is performed on copy of original neuron
which is then returned.
@@ -2133,37 +2233,65 @@ def drop_fluff(x: Union['core.TreeNeuron',
--------
>>> import navis
>>> m = navis.example_neurons(1, kind='mesh')
- >>> clean = navis.drop_fluff(m, keep_size=30)
- >>> m.n_vertices, clean.n_vertices
- (6309, 6037)
+ >>> m.n_vertices
+ 6309
+ >>> # Remove all but the largest connected component
+ >>> top = navis.drop_fluff(m)
+ >>> top.n_vertices
+ 5951
+ >>> # Keep the ten largest connected components
+ >>> two = navis.drop_fluff(m, n_largest=10)
+ >>> two.n_vertices
+ 6069
+ >>> # Keep all fragments with at least 100 vertices
+ >>> clean = navis.drop_fluff(m, keep_size=100)
+ >>> clean.n_vertices
+ 5951
+ >>> # Keep the two largest fragments with at least 50 vertices each
+ >>> # (for this neuron the result is just the largest fragment)
+ >>> clean2 = navis.drop_fluff(m, keep_size=50, n_largest=2)
+ >>> clean2.n_vertices
+ 6037
"""
- utils.eval_param(x, name='x', allowed_types=(core.TreeNeuron, core.MeshNeuron))
+ utils.eval_param(x, name="x", allowed_types=(core.TreeNeuron, core.MeshNeuron, core.Dotprops))
- G = x.graph
- # Skeleton graphs are directed
- if G.is_directed():
- G = G.to_undirected()
+ if isinstance(x, (core.MeshNeuron, core.TreeNeuron)):
+ G = x.graph
+ # Skeleton graphs are directed
+ if G.is_directed():
+ G = G.to_undirected()
+ elif isinstance(x, core.Dotprops):
+ G = graph.neuron2nx(x, epsilon=epsilon)
cc = sorted(nx.connected_components(G), key=lambda x: len(x), reverse=True)
- if keep_size:
- if keep_size < 1:
- keep_size = len(G.nodes) * keep_size
+ # Translate keep_size to number of nodes
+ if keep_size and keep_size < 1:
+ keep_size = len(G.nodes) * keep_size
- keep = [n for c in cc for n in c if len(c) >= keep_size]
+ if keep_size:
+ cc = [c for c in cc if len(c) >= keep_size]
+ if not n_largest:
+ keep = [i for c in cc for i in c]
+ else:
+ keep = [i for c in cc[:n_largest] for i in c]
+ elif n_largest:
+ keep = [i for c in cc[:n_largest] for i in c]
else:
keep = cc[0]
# Subset neuron
x = subset.subset_neuron(x, subset=keep, inplace=inplace, keep_disc_cn=True)
- # See if we need to re-attach any connectors
- id_col = 'node_id' if isinstance(x, core.TreeNeuron) else 'vertex_id'
- if x.has_connectors and id_col in x.connectors:
- disc = ~x.connectors[id_col].isin(x.graph.nodes).values
- if any(disc):
- xyz = x.connectors.loc[disc, ['x', 'y', 'z']].values
- x.connectors.loc[disc, id_col] = x.snap(xyz)[0]
+ # See if we need to/can re-attach any connectors
+ if x.has_connectors:
+ id_col = [c for c in ('node_id', 'vertex_id', 'point_id') if c in x.connectors.columns]
+ if id_col:
+ id_col = id_col[0]
+ disc = ~x.connectors[id_col].isin(x.graph.nodes).values
+ if any(disc):
+ xyz = x.connectors.loc[disc, ["x", "y", "z"]].values
+ x.connectors.loc[disc, id_col] = x.snap(xyz)[0]
return x
diff --git a/navis/morpho/mmetrics.py b/navis/morpho/mmetrics.py
index d84b822c..dcd8c415 100644
--- a/navis/morpho/mmetrics.py
+++ b/navis/morpho/mmetrics.py
@@ -165,14 +165,14 @@ def strahler_index(
raise ValueError(f'`method` must be "standard" or "greedy", got "{method}"')
if utils.fastcore:
- x.nodes['strahler_index'] = utils.fastcore.strahler_index(
+ x.nodes["strahler_index"] = utils.fastcore.strahler_index(
x.nodes.node_id.values,
x.nodes.parent_id.values,
method=method,
to_ignore=to_ignore,
min_twig_size=min_twig_size,
).astype(np.int16)
- x.nodes['strahler_index'] = x.nodes.strahler_index.fillna(1)
+ x.nodes["strahler_index"] = x.nodes.strahler_index.fillna(1)
return x
# Find branch, root and end nodes
@@ -1271,7 +1271,7 @@ def flow_centrality(x: "core.NeuronObject") -> "core.NeuronObject":
def tortuosity(
x: "core.NeuronObject",
- seg_length: Union[int, float, str, Sequence[Union[int, float, str]]] = 10,
+ seg_length: Optional[Union[int, float, str, Sequence[Union[int, float, str]]]] = None,
) -> Union[float, Sequence[float], pd.DataFrame]:
"""Calculate tortuosity of a neuron.
@@ -1280,19 +1280,10 @@ def tortuosity(
`L` (`seg_length`) to the Euclidean distance `R` between its ends.
The way this is implemented in `navis`:
+ For each linear stretch (i.e. segments between branch points, leafs or roots)
+ we calculate its geodesic length `L` and the Euclidean distance `R` between
+ its ends. The final tortuosity is the mean of `L / R` across all segments.
- 1. Each linear stretch (i.e. between branch points or branch points to a
- leaf node) is divided into segments of exactly `seg_length`
- geodesic length. Any remainder is skipped.
- 2. For each of these segments we divide its geodesic length `L`
- (i.e. `seg_length`) by the Euclidean distance `R` between its start and
- its end.
- 3. The final tortuosity is the mean of `L / R` across all segments.
-
- Note
- ----
- If you want to make sure that segments are as close to length `L` as
- possible, consider resampling the neuron using [`navis.resample_skeleton`][].
Parameters
----------
@@ -1300,18 +1291,27 @@ def tortuosity(
Neuron to analyze. If MeshNeuron, will generate and
use a skeleton representation.
seg_length : int | float | str | list thereof, optional
- Target segment length(s) `L`. If neuron(s) have their
- `.units` set, you can also pass a string such as
- "1 micron". `seg_length` must be larger than the
- current sampling resolution of the neuron.
+ Target segment length(s) `L`. If `seg_length` is
+ provided, each linear segment is further divided into
+ segments of exactly `seg_length` (geodesic) length
+ and the tortuosity is calculated for each of these
+ sub-segments. If `seg_length` is not provided, the
+ tortuosity is calculated for each linear segment as is.
+
+ If neuron(s) have their `.units` set, you can also
+ pass a string such as "1 micron". `seg_length` must
+ be larger than the current sampling resolution of the
+ neuron. If you want to make sure that segments are as
+ close to length `L` as possible, consider resampling the
+ neuron using [`navis.resample_skeleton`][].
Returns
-------
tortuosity : float | np.array | pandas.DataFrame
If x is NeuronList, will return DataFrame.
If x is single TreeNeuron, will return either a
- single float (if single seg_length is queried) or a
- DataFrame (if multiple seg_lengths are queried).
+ single float (if no or a single seg_length is queried)
+ or a DataFrame (if multiple seg_lengths are queried).
See Also
--------
@@ -1323,7 +1323,11 @@ def tortuosity(
--------
>>> import navis
>>> n = navis.example_neurons(1)
- >>> # Calculate tortuosity with 1 micron seg lengths
+ >>> # Calculate tortuosity as-is
+ >>> T = navis.tortuosity(n)
+ >>> round(T, 3)
+ 1.074
+ >>> # Calculate tortuosity with 1 micron segment lengths
>>> T = navis.tortuosity(n, seg_length='1 micron')
>>> round(T, 3)
1.054
@@ -1356,6 +1360,34 @@ def tortuosity(
if isinstance(seg_length, (list, np.ndarray)):
return [tortuosity(x, l) for l in seg_length]
+ if seg_length is None:
+ return _tortuosity_simple(x)
+ else:
+ return _tortuosity_segmented(x, seg_length)
+
+
+def _tortuosity_simple(x: "core.TreeNeuron") -> float:
+ """Calculate tortuosity for neuron as-is."""
+ # Iterate over segments
+ locs = x.nodes.set_index("node_id")[["x", "y", "z"]].astype(float)
+ T_all = []
+ for i, seg in enumerate(x.small_segments):
+ # Get coordinates
+ coords = locs.loc[seg].values
+
+ # Calculate geodesic distance for this segment
+ L = np.linalg.norm(np.diff(coords.T), axis=0).sum()
+
+ # Calculate Euclidean distance for this segment
+ R = np.linalg.norm(coords[0] - coords[-1])
+ T = L / R
+ T_all = np.append(T_all, T)
+
+ return T_all.mean()
+
+
+def _tortuosity_segmented(x: "core.TreeNeuron", seg_length: Union[int, float, str]) -> float:
+ """Calculate tortuosity for segmented neuron."""
# From here on out seg length is single value
seg_length: float = x.map_units(seg_length, on_error="raise")
@@ -1656,13 +1688,17 @@ def betweeness_centrality(
@utils.map_neuronlist(desc="Cable length", allow_parallel=True)
@utils.meshneuron_skeleton(method="pass_through")
-def cable_length(x) -> Union[int, float]:
+def cable_length(x, mask=None) -> Union[int, float]:
"""Calculate cable length.
Parameters
----------
x : TreeNeuron | MeshNeuron | NeuronList
Neuron(s) for which to calculate cable length.
+ mask : None | boolean array | callable
+ If provided, will only consider nodes where
+ `mask` is True. Callable must accept a DataFrame of nodes
+ and return a boolean array of the same length.
Returns
-------
@@ -1672,25 +1708,49 @@ def cable_length(x) -> Union[int, float]:
"""
utils.eval_param(x, name="x", allowed_types=(core.TreeNeuron,))
+ nodes = x.nodes
+ if mask is not None:
+ if callable(mask):
+ mask = mask(x.nodes)
+
+ if isinstance(mask, np.ndarray):
+ if len(mask) != len(x.nodes):
+ raise ValueError(
+ f"Length of mask ({len(mask)}) must match number of nodes "
+ f"({len(x.nodes)})."
+ )
+ else:
+ raise ValueError(
+ f"Mask must be callable or boolean array, got {type(mask)}"
+ )
+
+ nodes = x.nodes.loc[mask, ['node_id','parent_id', 'x', 'y', 'z']].copy()
+
+ # Set the parent IDs to -1 for nodes that are not in the mask
+ nodes.loc[~nodes.parent_id.isin(nodes.node_id), "parent_id"] = -1
+
+ if not len(nodes):
+ return 0
+
# See if we can use fastcore
if not utils.fastcore:
# The by far fastest way to get the cable length is to work on the node table
# Using the igraph representation is about the same speed... if it is already calculated!
# However, one problem with the graph representation is that with large neuronlists
# it adds a lot to the memory footprint.
- not_root = (x.nodes.parent_id >= 0).values
- xyz = x.nodes[["x", "y", "z"]].values[not_root]
+ not_root = (nodes.parent_id >= 0).values
+ xyz = nodes[["x", "y", "z"]].values[not_root]
xyz_parent = (
x.nodes.set_index("node_id")
- .loc[x.nodes.parent_id.values[not_root], ["x", "y", "z"]]
+ .loc[nodes.parent_id.values[not_root], ["x", "y", "z"]]
.values
)
cable_length = np.sum(np.linalg.norm(xyz - xyz_parent, axis=1))
else:
cable_length = utils.fastcore.dag.parent_dist(
- x.nodes.node_id.values,
- x.nodes.parent_id.values,
- x.nodes[["x", "y", "z"]].values,
+ nodes.node_id.values,
+ nodes.parent_id.values,
+ nodes[["x", "y", "z"]].values,
root_dist=0,
).sum()
diff --git a/navis/nbl/nblast_funcs.py b/navis/nbl/nblast_funcs.py
index c1ff7ebb..14add44d 100644
--- a/navis/nbl/nblast_funcs.py
+++ b/navis/nbl/nblast_funcs.py
@@ -1490,7 +1490,7 @@ def check_pykdtree_flag():
# See if pykdtree is present
try:
import pykdtree
- except ImportError:
+ except ModuleNotFoundError:
# If not present, just return
return
diff --git a/navis/nbl/synblast_funcs.py b/navis/nbl/synblast_funcs.py
index 36cc1b69..04bbc853 100644
--- a/navis/nbl/synblast_funcs.py
+++ b/navis/nbl/synblast_funcs.py
@@ -37,7 +37,7 @@
try:
from pykdtree.kdtree import KDTree
-except ImportError:
+except ModuleNotFoundError:
from scipy.spatial import cKDTree as KDTree
__all__ = ['synblast']
diff --git a/navis/nbl/utils.py b/navis/nbl/utils.py
index 288f5960..73cba417 100644
--- a/navis/nbl/utils.py
+++ b/navis/nbl/utils.py
@@ -400,9 +400,11 @@ def nblast_prime(scores, n_dim=.2, metric='euclidean'):
"""
try:
from sklearn.decomposition import PCA
- except ImportError:
- raise ImportError('Please install scikit-learn to use `nblast_prime`:\n'
- ' pip3 install scikit-learn -U')
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ 'Please install scikit-learn to use `nblast_prime`:\n'
+ ' pip3 install scikit-learn -U'
+ )
if not isinstance(scores, pd.DataFrame):
raise TypeError(f'`scores` must be pandas DataFrame, got "{type(scores)}"')
diff --git a/navis/plotting/colors.py b/navis/plotting/colors.py
index 13a80ae0..5e38bd98 100644
--- a/navis/plotting/colors.py
+++ b/navis/plotting/colors.py
@@ -315,8 +315,8 @@ def vertex_colors(neurons, by, palette, alpha=1, use_alpha=False, vmin=None, vma
# First check if data is numerical or categorical
is_num = [utils.is_numeric(a, bool_numeric=False, try_convert=False) for a in values]
- # If numerical
- if all(is_num):
+ # If numerical and we weren't given a categorical palette
+ if all(is_num) and not isinstance(palette, dict):
# Get min/max values
if not vmin:
vmin = [np.nanmin(v) for v in values]
@@ -365,8 +365,8 @@ def vertex_colors(neurons, by, palette, alpha=1, use_alpha=False, vmin=None, vma
colors.append(c)
# We don't want to deal with mixed data
- elif any(is_num):
- raise ValueError('Data appears to be mixed numeric and non-numeric.')
+ # elif any(is_num):
+ # raise ValueError('Data appears to be mixed numeric and non-numeric.')
else:
# Find unique values
unique_v = np.unique([v for l in values for v in np.unique(l)])
diff --git a/navis/plotting/d.py b/navis/plotting/d.py
index 6638bf79..2e872733 100644
--- a/navis/plotting/d.py
+++ b/navis/plotting/d.py
@@ -87,7 +87,7 @@ def plot1d(x: 'core.NeuronObject',
>>> plt.close('all')
- See the [`flat plotting tutorial`](generated/gallery/plotting/plot_02_plotting_1d/)
+ See the [`flat plotting tutorial`](generated/gallery/1_plotting/tutorial_plotting_02_1d/)
for more examples.
"""
diff --git a/navis/plotting/dd.py b/navis/plotting/dd.py
index 8620f8f7..7d6e3b9f 100644
--- a/navis/plotting/dd.py
+++ b/navis/plotting/dd.py
@@ -84,11 +84,13 @@ def plot2d(
Object parameters
-----------------
- soma : bool, default=True
+ soma : bool | dict, default=True
Plot soma if one exists. Size of the soma is determined
by the neuron's `.soma_radius` property which defaults
- to the "radius" column for `TreeNeurons`.
+ to the "radius" column for `TreeNeurons`. You can also
+ pass `soma` as a dictionary to customize the appearance
+ of the soma - for example `soma={"color": "red", "lw": 2, "ec": 1}`.
radius : "auto" (default) | bool
@@ -342,7 +344,7 @@ def plot2d(
>>> fig, ax = navis.plot2d(nl, method='3d', depth_coloring=True, view=('x', '-z'))
>>> plt.show() # doctest: +SKIP
- See the [plotting intro](../../../generated/gallery/1_plotting/plot_00_plotting_intro)
+ See the [plotting intro](../../generated/gallery/1_plotting/tutorial_plotting_00_intro)
for more examples.
See Also
@@ -370,8 +372,9 @@ def plot2d(
# Parse objects
(neurons, volumes, points, _) = utils.parse_objects(x)
- # Color_by can be a per-node/vertex color, or a per-neuron color
- # such as property of the neuron
+ # Here we check whether `color_by` is a neuron property which we
+ # want to translate into a single color per neuron, or a
+ # per node/vertex property which we will parse late
color_neurons_by = None
if settings.color_by is not None and neurons:
if not settings.palette:
@@ -380,9 +383,18 @@ def plot2d(
"when using `color_by` argument."
)
- # Check if this is a neuron property
+ # Check if this may be a neuron property
if isinstance(settings.color_by, str):
- if hasattr(neurons[0], settings.color_by):
+ # Check if this could be a neuron property
+ has_prop = hasattr(neurons[0], settings.color_by)
+
+ # For TreeNeurons, we also check if it is a node property
+ # If so, prioritize this.
+ if isinstance(neurons[0], core.TreeNeuron):
+ if settings.color_by in neurons[0].nodes.columns:
+ has_prop = False
+
+ if has_prop:
# If it is, use it to color neurons
color_neurons_by = [
getattr(neuron, settings.color_by) for neuron in neurons
@@ -393,7 +405,7 @@ def plot2d(
color_neurons_by = settings.color_by
settings.color_by = None
- # Generate the colormaps
+ # Generate the per-neuron colors
(neuron_cmap, volumes_cmap) = prepare_colormap(
settings.color,
neurons=neurons,
@@ -550,7 +562,24 @@ def plot2d(
settings.radius = False
if isinstance(neuron, core.TreeNeuron) and settings.radius:
- _neuron = conversion.tree2meshneuron(neuron)
+ # Warn once if more than 5% of nodes have missing radii
+ if not getattr(fig, "_radius_warned", False):
+ if (
+ (neuron.nodes.radius.fillna(0).values <= 0).sum()
+ / neuron.n_nodes
+ ) > 0.05:
+ logger.warning(
+ "Some skeleton nodes have radius <= 0. This may lead to "
+ "rendering artifacts. Set `radius=False` to plot skeletons "
+ "as single-width lines instead."
+ )
+ fig._radius_warned = True
+
+ _neuron = conversion.tree2meshneuron(
+ neuron,
+ warn_missing_radii=False,
+ radius_scale_factor=settings.get("linewidth", 1),
+ )
_neuron.connectors = neuron.connectors
neuron = _neuron
@@ -883,7 +912,7 @@ def _plot_connectors(neuron, color, ax, settings):
inner_dict["color"] = settings.cn_colors
if settings.method == "2d":
- for c, this_cn in connectors.groupby('type'):
+ for c, this_cn in connectors.groupby("type"):
x, y = _parse_view2d(this_cn[["x", "y", "z"]].values, settings.view)
ax.scatter(
@@ -892,7 +921,7 @@ def _plot_connectors(neuron, color, ax, settings):
color=cn_layout[c]["color"],
edgecolor="none",
s=settings.cn_size if settings.cn_size else cn_layout["size"],
- zorder=1000
+ zorder=1000,
)
ax.get_children()[-1].set_gid(f"CN_{neuron.id}")
elif settings.method in ["3d", "3d_complex"]:
@@ -1110,7 +1139,7 @@ def _plot_skeleton(neuron, color, ax, settings):
)
ax.add_line(this_line)
else:
- if settings.palette:
+ if isinstance(settings.palette, str):
cmap = plt.get_cmap(settings.palette)
else:
cmap = DEPTH_CMAP
@@ -1168,9 +1197,7 @@ def _plot_skeleton(neuron, color, ax, settings):
d = [n.x, n.y, n.z][_get_depth_axis(settings.view)]
soma_color = DEPTH_CMAP(settings.norm(d))
- sx, sy = _parse_view2d(np.array([[n.x, n.y, n.z]]), settings.view)
- c = mpatches.Circle(
- (sx[0], sy[0]),
+ soma_defaults = dict(
radius=r,
fill=True,
fc=soma_color,
@@ -1178,6 +1205,11 @@ def _plot_skeleton(neuron, color, ax, settings):
zorder=4,
edgecolor="none",
)
+ if isinstance(settings.soma, dict):
+ soma_defaults.update(settings.soma)
+
+ sx, sy = _parse_view2d(np.array([[n.x, n.y, n.z]]), settings.view)
+ c = mpatches.Circle((sx[0], sy[0]), **soma_defaults)
ax.add_patch(c)
return None, None
@@ -1268,14 +1300,17 @@ def _plot_skeleton(neuron, color, ax, settings):
x = r * np.outer(np.cos(u), np.sin(v)) + n.x
y = r * np.outer(np.sin(u), np.sin(v)) + n.y
z = r * np.outer(np.ones(np.size(u)), np.cos(v)) + n.z
- surf = ax.plot_surface(
- x,
- y,
- z,
+
+ soma_defaults = dict(
color=soma_color,
shade=settings.mesh_shade,
rasterized=settings.rasterize,
)
+ if isinstance(settings.soma, dict):
+ soma_defaults.update(settings.soma)
+
+ surf = ax.plot_surface(x, y, z, **soma_defaults)
+
if settings.group_neurons:
surf.set_gid(neuron.id)
diff --git a/navis/plotting/ddd.py b/navis/plotting/ddd.py
index 8fa788c3..23f2f192 100644
--- a/navis/plotting/ddd.py
+++ b/navis/plotting/ddd.py
@@ -345,7 +345,7 @@ def plot3d(
>>> # Clear viewer (works only with octarine and vispy)
>>> v = navis.plot3d(nl, clear=True)
- See the [plotting intro](../../../generated/gallery/1_plotting/plot_00_plotting_intro)
+ See the [plotting intro](../../generated/gallery/1_plotting/tutorial_plotting_00_intro)
for even more examples.
"""
@@ -461,7 +461,7 @@ def plot3d_vispy(x, **kwargs):
if volumes:
viewer.add(volumes, **settings.to_dict())
if points:
- viewer.add(points, scatter_kws=settings.catter_kws)
+ viewer.add(points, scatter_kws=settings.scatter_kws)
return viewer
@@ -496,12 +496,12 @@ def plot3d_octarine(x, **kwargs):
# Check if any existing viewer has already been closed
if isinstance(getattr(config, "primary_viewer", None), oc.Viewer):
try:
- _ = getattr(config, "primary_viewer").canvas
+ getattr(config, "primary_viewer").canvas.__repr__()
except RuntimeError:
config.primary_viewer = None
if settings.viewer in (None, "new"):
- # If it does not exists yet, initialise a canvas object and make global
+ # If it does not exists yet, initialize a canvas object and make global
if (
not isinstance(getattr(config, "primary_viewer", None), oc.Viewer)
or settings.viewer == "new"
@@ -533,7 +533,7 @@ def plot3d_octarine(x, **kwargs):
neuron_settings = settings.to_dict()
for key in settings._viewer_settings:
neuron_settings.pop(key, None)
- viewer.add_neurons(neurons, **neuron_settings)
+ viewer.add_neurons(neurons, center=settings.get("center", True), **neuron_settings)
if volumes:
for v in volumes:
viewer.add_mesh(
@@ -544,7 +544,8 @@ def plot3d_octarine(x, **kwargs):
center=settings.center,
)
if points:
- viewer.add_points(points, center=settings.center, **settings.scatter_kws)
+ for p in points:
+ viewer.add_points(p, center=settings.center, **settings.scatter_kws)
return viewer
diff --git a/navis/plotting/flat.py b/navis/plotting/flat.py
index 15a10222..7ce0d39f 100644
--- a/navis/plotting/flat.py
+++ b/navis/plotting/flat.py
@@ -124,8 +124,8 @@ def plot_flat(
>>> plt.close('all')
- See the [plotting intro](../../../generated/gallery/1_plotting/plot_00_plotting_intro)
- and the [neuron topology tutorial](../../../generated/gallery/1_plotting/plot_03_plotting_dend)
+ See the [plotting intro](../../generated/gallery/1_plotting/tutorial_plotting_00_intro)
+ and the [neuron topology tutorial](../../generated/gallery/1_plotting/tutorial_plotting_03_dend)
for more examples.
"""
diff --git a/navis/plotting/k3d/k3d_objects.py b/navis/plotting/k3d/k3d_objects.py
index 129e6460..76a7a63c 100644
--- a/navis/plotting/k3d/k3d_objects.py
+++ b/navis/plotting/k3d/k3d_objects.py
@@ -69,6 +69,7 @@ def neuron2k3d(x, colormap, settings):
cn_lay.update(settings.cn_layout)
trace_data = []
+ _radius_warned = False
for i, neuron in enumerate(x):
name = str(getattr(neuron, "name", neuron.id))
color = colormap[i]
@@ -106,7 +107,23 @@ def neuron2k3d(x, colormap, settings):
settings.radius = False
if isinstance(neuron, core.TreeNeuron) and settings.radius:
- _neuron = conversion.tree2meshneuron(neuron)
+ # Warn once if more than 5% of nodes have missing radii
+ if not _radius_warned:
+ if (
+ (neuron.nodes.radius.fillna(0).values <= 0).sum() / neuron.n_nodes
+ ) > 0.05:
+ logger.warning(
+ "Some skeleton nodes have radius <= 0. This may lead to "
+ "rendering artifacts. Set `radius=False` to plot skeletons "
+ "as single-width lines instead."
+ )
+ _radius_warned = True
+
+ _neuron = conversion.tree2meshneuron(
+ neuron,
+ warn_missing_radii=False,
+ radius_scale_factor=settings.get("linewidth", 1),
+ )
_neuron.connectors = neuron.connectors
neuron = _neuron
@@ -157,17 +174,21 @@ def neuron2k3d(x, colormap, settings):
# Add connectors
if (settings.connectors or settings.connectors_only) and neuron.has_connectors:
if isinstance(settings.connectors, (list, np.ndarray, tuple)):
- connectors = neuron.connectors[neuron.connectors.type.isin(settings.connectors)]
- elif settings.connectors == 'pre':
+ connectors = neuron.connectors[
+ neuron.connectors.type.isin(settings.connectors)
+ ]
+ elif settings.connectors == "pre":
connectors = neuron.presynapses
- elif settings.connectors == 'post':
+ elif settings.connectors == "post":
connectors = neuron.postsynapses
elif isinstance(settings.connectors, str):
- connectors = neuron.connectors[neuron.connectors.type == settings.connectors]
+ connectors = neuron.connectors[
+ neuron.connectors.type == settings.connectors
+ ]
else:
connectors = neuron.connectors
- for j, this_cn in connectors.groupby('type'):
+ for j, this_cn in connectors.groupby("type"):
if isinstance(settings.cn_colors, dict):
c = settings.cn_colors.get(
j, cn_lay.get(j, {"color": (10, 10, 10)})["color"]
@@ -193,7 +214,9 @@ def neuron2k3d(x, colormap, settings):
positions=this_cn[["x", "y", "z"]].values,
name=cn_label,
shader="flat",
- point_size=settings.cn_size if settings.cn_size else cn_lay['size'] * 50,
+ point_size=settings.cn_size
+ if settings.cn_size
+ else cn_lay["size"] * 50,
color=c,
)
)
diff --git a/navis/plotting/plotly/graph_objs.py b/navis/plotting/plotly/graph_objs.py
index d5bb6533..ac9f9cb9 100644
--- a/navis/plotting/plotly/graph_objs.py
+++ b/navis/plotting/plotly/graph_objs.py
@@ -98,6 +98,7 @@ def neuron2plotly(x, colormap, settings):
cn_lay.update(settings.cn_layout)
trace_data = []
+ _radius_warned = False
for i, neuron in enumerate(x):
name = str(getattr(neuron, "name", neuron.id))
color = colormap[i]
@@ -135,7 +136,23 @@ def neuron2plotly(x, colormap, settings):
settings.radius = False
if isinstance(neuron, core.TreeNeuron) and settings.radius:
- _neuron = conversion.tree2meshneuron(neuron)
+ # Warn once if more than 5% of nodes have missing radii
+ if not _radius_warned:
+ if (
+ (neuron.nodes.radius.fillna(0).values <= 0).sum() / neuron.n_nodes
+ ) > 0.05:
+ logger.warning(
+ "Some skeleton nodes have radius <= 0. This may lead to "
+ "rendering artifacts. Set `radius=False` to plot skeletons "
+ "as single-width lines instead."
+ )
+ _radius_warned = True
+
+ _neuron = conversion.tree2meshneuron(
+ neuron,
+ warn_missing_radii=False,
+ radius_scale_factor=settings.get("linewidth", 1),
+ )
_neuron.connectors = neuron.connectors
neuron = _neuron
@@ -490,7 +507,7 @@ def skeleton2plotly(neuron, legendgroup, showlegend, label, color, settings):
y=coords[:, 1],
z=coords[:, 2],
mode="lines",
- line=dict(color=c, width=settings.linewidth, dash=dash),
+ line=dict(color=c, width=settings.get('linewidth', 3), dash=dash),
name=label,
legendgroup=legendgroup,
legendgrouptitle_text=legendgroup,
diff --git a/navis/plotting/settings.py b/navis/plotting/settings.py
index 4a476d77..0cf8d0ef 100644
--- a/navis/plotting/settings.py
+++ b/navis/plotting/settings.py
@@ -62,7 +62,10 @@ def to_dict(self):
return {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
def get(self, key, default=None):
- return self.__dict__.get(key, default)
+ value = self.__dict__.get(key, default)
+ if value is None:
+ value = default
+ return value
def pop(self, key, default=None):
return self.__dict__.pop(key, default)
@@ -127,7 +130,7 @@ class Matplotlib2dSettings(BasePlottingSettings):
_name = "matplotlib backend"
- method: Literal["2d", "3d", "3d_complex"] = "3d"
+ method: Literal["2d", "3d", "3d_complex"] = "2d"
group_neurons: bool = False
autoscale: bool = True
orthogonal: bool = True
@@ -162,7 +165,7 @@ class PlotlySettings(BasePlottingSettings):
volume_legend: bool = False
width: Optional[int] = None
height: Optional[int] = 600
- linewidth: float = 3 # for plotly, linewidth 1 is too thin
+ linewidth: Optional[float] = None # for plotly, linewidth 1 is too thin, we default to 3 in graph_objs.py
linestyle: str = "-"
@@ -199,6 +202,7 @@ class OctarineSettings(BasePlottingSettings):
show: bool = True
size: Optional[Tuple[int, int]] = None
offscreen: bool = False
+ spacing: Optional[Tuple[float, float, float]] = None
# These are viewer-specific settings that we must not pass to the plotting
# function
@@ -212,6 +216,7 @@ class OctarineSettings(BasePlottingSettings):
"size",
"offscreen",
"scatter_kws",
+ "spacing"
)
diff --git a/navis/plotting/vispy/conftest.py b/navis/plotting/vispy/conftest.py
index fd07c609..b393904d 100644
--- a/navis/plotting/vispy/conftest.py
+++ b/navis/plotting/vispy/conftest.py
@@ -5,6 +5,5 @@
"""
try:
import vispy
-
-except ImportError:
+except ModuleNotFoundError:
collect_ignore_glob = ["*.py"]
diff --git a/navis/plotting/vispy/viewer.py b/navis/plotting/vispy/viewer.py
index 1cd3b7a9..31a2b889 100644
--- a/navis/plotting/vispy/viewer.py
+++ b/navis/plotting/vispy/viewer.py
@@ -33,7 +33,7 @@
try:
from vispy import scene
from vispy.util.quaternion import Quaternion
-except ImportError:
+except ModuleNotFoundError:
scene = None
@@ -130,8 +130,10 @@ class Viewer:
def __init__(self, picking=False, **kwargs):
if not scene:
- raise ImportError('`navis.Viewer` requires the `vispy` package to '
- 'be installed:\n pip3 install vispy')
+ raise ModuleNotFoundError(
+ '`navis.Viewer` requires the `vispy` package to '
+ 'be installed:\n pip3 install vispy'
+ )
# Update some defaults as necessary
defaults = dict(keys=None,
show=True,
diff --git a/navis/plotting/vispy/visuals.py b/navis/plotting/vispy/visuals.py
index fbd4a5c6..9b756091 100644
--- a/navis/plotting/vispy/visuals.py
+++ b/navis/plotting/vispy/visuals.py
@@ -251,6 +251,7 @@ def neuron2vispy(x, settings):
# List to fill with vispy visuals
visuals = []
+ _radius_warned = False
for i, neuron in enumerate(x):
# Generate random ID -> we need this in case we have duplicate IDs
object_id = uuid.uuid4()
@@ -263,7 +264,23 @@ def neuron2vispy(x, settings):
settings.radius = False
if isinstance(neuron, core.TreeNeuron) and settings.radius:
- _neuron = conversion.tree2meshneuron(neuron)
+ # Warn once if more than 5% of nodes have missing radii
+ if not _radius_warned:
+ if (
+ (neuron.nodes.radius.fillna(0).values <= 0).sum() / neuron.n_nodes
+ ) > 0.05:
+ logger.warning(
+ "Some skeleton nodes have radius <= 0. This may lead to "
+ "rendering artifacts. Set `radius=False` to plot skeletons "
+ "as single-width lines instead."
+ )
+ _radius_warned = True
+
+ _neuron = conversion.tree2meshneuron(
+ neuron,
+ warn_missing_radii=False,
+ radius_scale_factor=settings.get("linewidth", 1),
+ )
_neuron.connectors = neuron.connectors
neuron = _neuron
diff --git a/navis/sampling/downsampling.py b/navis/sampling/downsampling.py
index 0028a0d6..897bdb1a 100644
--- a/navis/sampling/downsampling.py
+++ b/navis/sampling/downsampling.py
@@ -18,19 +18,21 @@
from typing import Optional, Union, List
from .. import config, graph, core, utils, meshes
+from .utils import sample_points_uniform
# Set up logging
logger = config.get_logger(__name__)
-__all__ = ['downsample_neuron']
+__all__ = ["downsample_neuron"]
-@utils.map_neuronlist(desc='Downsampling', allow_parallel=True)
-def downsample_neuron(x: 'core.NeuronObject',
- downsampling_factor: Union[int, float],
- inplace: bool = False,
- preserve_nodes: Optional[List[int]] = None
- ) -> Optional['core.NeuronObject']:
+@utils.map_neuronlist(desc="Downsampling", allow_parallel=True)
+def downsample_neuron(
+ x: "core.NeuronObject",
+ downsampling_factor: Union[int, float],
+ inplace: bool = False,
+ preserve_nodes: Optional[List[int]] = None,
+) -> Optional["core.NeuronObject"]:
"""Downsample neuron(s) by a given factor.
For skeletons: preserves root, leafs, branchpoints by default. Preservation
@@ -122,7 +124,7 @@ def _downsample_voxels(x, downsampling_factor, order=1):
x.units *= downsampling_factor
-def _downsample_dotprops(x, downsampling_factor):
+def _downsample_dotprops(x, downsampling_factor, method="simple"):
"""Downsample Dotprops."""
assert isinstance(x, core.Dotprops)
@@ -135,7 +137,14 @@ def _downsample_dotprops(x, downsampling_factor):
return
# Generate a mask
- mask = np.arange(0, x._points.shape[0], int(downsampling_factor))
+ if method == "simple":
+ mask = np.arange(0, x._points.shape[0], int(downsampling_factor))
+ elif method == "uniform":
+ mask = sample_points_uniform(
+ x._points, x._points.shape[0] // downsampling_factor, output="mask"
+ )
+ else:
+ raise ValueError(f"Unknown (down-)sampling method: {method}")
# Mask vectors
# This will also trigger re-calculation which is necessary for two reasons:
diff --git a/navis/sampling/resampling.py b/navis/sampling/resampling.py
index b14bfeaa..d0d07b30 100644
--- a/navis/sampling/resampling.py
+++ b/navis/sampling/resampling.py
@@ -19,7 +19,7 @@
import scipy.spatial
import scipy.interpolate
-from typing import Union, Optional, List, overload
+from typing import Union, Optional, List
from typing_extensions import Literal
from .. import config, core, utils, graph
@@ -30,29 +30,12 @@
__all__ = ['resample_skeleton', 'resample_along_axis']
-@overload
-def resample_skeleton(x: 'core.TreeNeuron',
- resample_to: int,
- inplace: bool = False,
- method: str = 'linear',
- skip_errors: bool = True
- ) -> 'core.TreeNeuron': ...
-
-
-@overload
-def resample_skeleton(x: 'core.NeuronList',
- resample_to: int,
- inplace: bool = False,
- method: str = 'linear',
- skip_errors: bool = True
- ) -> 'core.NeuronList': ...
-
-
@utils.map_neuronlist(desc='Resampling', allow_parallel=True)
def resample_skeleton(x: 'core.NeuronObject',
resample_to: Union[int, str],
inplace: bool = False,
method: str = 'linear',
+ map_columns: Optional[list] = None,
skip_errors: bool = True
) -> Optional['core.NeuronObject']:
"""Resample skeleton(s) to given resolution.
@@ -85,6 +68,11 @@ def resample_skeleton(x: 'core.NeuronObject',
method : str, optional
See `scipy.interpolate.interp1d` for possible
options. By default, we're using linear interpolation.
+ map_columns : list of str, optional
+ Names of additional columns to carry over to the resampled
+ neuron. Numerical columns will be interpolated according to
+ `method`. Non-numerical columns will be interpolated
+ using nearest neighbour interpolation.
inplace : bool, optional
If True, will modify original neuron. If False, a
resampled copy is returned.
@@ -127,14 +115,43 @@ def resample_skeleton(x: 'core.NeuronObject',
raise TypeError(f'Unable to resample data of type "{type(x)}"')
# Map units (non-str are just passed through)
- resample_to = x.map_units(resample_to, on_error='raise')
+ resample_to = x.map_units(resample_to, on_error="raise")
if not inplace:
x = x.copy()
- # Collect some information for later
- locs = dict(zip(x.nodes.node_id.values, x.nodes[['x', 'y', 'z']].values))
- radii = dict(zip(x.nodes.node_id.values, x.nodes.radius.values))
+ num_cols = ["x", "y", "z", "radius"]
+ non_num_cols = []
+
+ if map_columns:
+ if isinstance(map_columns, str):
+ map_columns = [map_columns]
+
+ for col in map_columns:
+ if col in num_cols or col in non_num_cols:
+ continue
+ if col not in x.nodes.columns:
+ raise ValueError(f'Column "{col}" not found in node table')
+ if pd.api.types.is_numeric_dtype(x.nodes[col].dtype):
+ num_cols.append(col)
+ else:
+ non_num_cols.append(col)
+
+ # Collect coordinates
+ locs = dict(zip(x.nodes.node_id.values, x.nodes[["x", "y", "z"]].values))
+
+ # Collect values for all columns
+ values = {
+ col: dict(zip(x.nodes.node_id.values, x.nodes[col].values))
+ for col in num_cols + non_num_cols
+ }
+
+ # For categorical columns, we need to translate them to numerical values
+ cat2num = {}
+ num2cat = {}
+ for col in non_num_cols:
+ cat2num[col] = {c: i for i, c in enumerate(x.nodes[col].unique())}
+ num2cat[col] = {i: c for c, i in cat2num[col].items()}
new_nodes: List = []
max_tn_id = x.nodes.node_id.max() + 1
@@ -146,7 +163,7 @@ def resample_skeleton(x: 'core.NeuronObject',
# Get coordinates
coords = np.vstack([locs[n] for n in seg])
# Get radii
- rad = [radii[tn] for tn in seg]
+ # rad = [radii[tn] for tn in seg]
# Vecs between subsequently measured points
vecs = np.diff(coords.T)
@@ -156,83 +173,99 @@ def resample_skeleton(x: 'core.NeuronObject',
dist = np.insert(dist, 0, 0)
# If path is too short, just keep the first and last node
- if dist[-1] < resample_to or (method == 'cubic' and len(seg) <= 3):
- new_nodes += [[seg[0], seg[-1],
- coords[0][0], coords[0][1], coords[0][2],
- radii[seg[0]]]]
+ if dist[-1] < resample_to or (method == "cubic" and len(seg) <= 3):
+ new_nodes += [
+ [seg[0], seg[-1]] + [values[c][seg[0]] for c in num_cols + non_num_cols]
+ ]
continue
# Distances (i.e. resolution) of interpolation
n_nodes = np.round(dist[-1] / resample_to)
new_dist = np.linspace(dist[0], dist[-1], int(n_nodes))
- try:
- sampleX = scipy.interpolate.interp1d(dist, coords[:, 0],
- kind=method)
- sampleY = scipy.interpolate.interp1d(dist, coords[:, 1],
- kind=method)
- sampleZ = scipy.interpolate.interp1d(dist, coords[:, 2],
- kind=method)
- sampleR = scipy.interpolate.interp1d(dist, rad,
- kind=method)
- except ValueError as e:
- if skip_errors:
- errors += 1
- new_nodes += x.nodes.loc[x.nodes.node_id.isin(seg[:-1]),
- ['node_id', 'parent_id',
- 'x', 'y', 'z',
- 'radius']].values.tolist()
- continue
- else:
- raise e
-
- # Sample each dim
- xnew = sampleX(new_dist)
- ynew = sampleY(new_dist)
- znew = sampleZ(new_dist)
- rnew = sampleR(new_dist)
-
- # Generate new coordinates
- new_coords = np.array([xnew, ynew, znew]).T
+ samples = {}
+ # Interpolate numerical columns
+ for col in num_cols:
+ try:
+ samples[col] = scipy.interpolate.interp1d(
+ dist, [values[col][n] for n in seg], kind=method
+ )
+ except ValueError as e:
+ if skip_errors:
+ errors += 1
+ new_nodes += x.nodes.loc[
+ x.nodes.node_id.isin(seg[:-1]),
+ ["node_id", "parent_id"] + num_cols + non_num_cols,
+ ].values.tolist()
+ continue
+ else:
+ raise e
+ # Interpolate non-numerical columns
+ for col in non_num_cols:
+ try:
+ samples[col] = scipy.interpolate.interp1d(
+ dist, [cat2num[col][values[col][n]] for n in seg], kind="nearest"
+ )
+ except ValueError as e:
+ if skip_errors:
+ errors += 1
+ new_nodes += x.nodes.loc[
+ x.nodes.node_id.isin(seg[:-1]),
+ ["node_id", "parent_id"] + num_cols + non_num_cols,
+ ].values.tolist()
+ continue
+ else:
+ raise e
+
+ # Sample each column
+ new_values = {}
+ for col in num_cols:
+ new_values[col] = samples[col](new_dist)
+ for col in non_num_cols:
+ new_values[col] = [num2cat[col][int(samples[col](d))] for d in new_dist]
# Generate new ids (start and end node IDs of this segment are kept)
- new_ids = np.concatenate((seg[:1], [max_tn_id + i for i in range(len(new_coords) - 2)], seg[-1:]))
+ new_ids = np.concatenate(
+ (seg[:1], [max_tn_id + i for i in range(len(new_dist) - 2)], seg[-1:])
+ )
# Increase max index
max_tn_id += len(new_ids)
# Keep track of new nodes
- new_nodes += [[tn, pn, co[0], co[1], co[2], r]
- for tn, pn, co, r in zip(new_ids[:-1],
- new_ids[1:],
- new_coords,
- rnew)]
+ new_nodes += [
+ [tn, pn] + [new_values[c][i] for c in num_cols + non_num_cols]
+ for i, (tn, pn) in enumerate(zip(new_ids[:-1], new_ids[1:]))
+ ]
if errors:
- logger.warning(f'{errors} ({errors/i:.0%}) segments skipped due to '
- 'errors')
+ logger.warning(f"{errors} ({errors/i:.0%}) segments skipped due to " "errors")
# Add root node(s)
- root = x.nodes.loc[x.nodes.node_id.isin(utils.make_iterable(x.root)),
- ['node_id', 'parent_id', 'x', 'y', 'z', 'radius']]
+ root = x.nodes.loc[
+ x.nodes.node_id.isin(utils.make_iterable(x.root)),
+ ["node_id", "parent_id"] + num_cols + non_num_cols,
+ ]
new_nodes += [list(r) for r in root.values]
# Generate new nodes dataframe
- new_nodes = pd.DataFrame(data=new_nodes,
- columns=['node_id', 'parent_id',
- 'x', 'y', 'z', 'radius'])
+ new_nodes = pd.DataFrame(
+ data=new_nodes, columns=["node_id", "parent_id"] + num_cols + non_num_cols
+ )
# Convert columns to appropriate dtypes
- dtypes = {k: x.nodes[k].dtype for k in ['node_id', 'parent_id', 'x', 'y', 'z', 'radius']}
+ dtypes = {
+ k: x.nodes[k].dtype for k in ["node_id", "parent_id"] + num_cols + non_num_cols
+ }
for cols in new_nodes.columns:
- new_nodes = new_nodes.astype(dtypes, errors='ignore')
+ new_nodes = new_nodes.astype(dtypes, errors="ignore")
# Remove duplicate nodes (branch points)
new_nodes = new_nodes[~new_nodes.node_id.duplicated()]
# Generate KDTree
- tree = scipy.spatial.cKDTree(new_nodes[['x', 'y', 'z']].values)
+ tree = scipy.spatial.cKDTree(new_nodes[["x", "y", "z"]].values)
# Map soma onto new nodes if required
# Note that if `._soma` is a soma detection function we can't tell
# how to deal with it. Ideally the new soma node will
@@ -241,10 +274,10 @@ def resample_skeleton(x: 'core.NeuronObject',
# than one soma is detected now. Also a "label" column in the node
# table would be lost at this point.
# We will go for the easy option which is to pin the soma at this point.
- nodes = x.nodes.set_index('node_id', inplace=False)
- if np.any(getattr(x, 'soma')):
+ nodes = x.nodes.set_index("node_id", inplace=False)
+ if np.any(getattr(x, "soma")):
soma_nodes = utils.make_iterable(x.soma)
- old_pos = nodes.loc[soma_nodes, ['x', 'y', 'z']].values
+ old_pos = nodes.loc[soma_nodes, ["x", "y", "z"]].values
# Get nearest neighbours
dist, ix = tree.query(old_pos)
@@ -266,13 +299,13 @@ def resample_skeleton(x: 'core.NeuronObject',
# Map connectors back if necessary
if x.has_connectors:
# Get position of old synapse-bearing nodes
- old_tn_position = nodes.loc[x.connectors.node_id, ['x', 'y', 'z']].values
+ old_tn_position = nodes.loc[x.connectors.node_id, ["x", "y", "z"]].values
# Get nearest neighbours
dist, ix = tree.query(old_tn_position)
# Map back onto neuron
- x.connectors['node_id'] = new_nodes.node_id.values[ix]
+ x.connectors["node_id"] = new_nodes.node_id.values[ix]
# Map tags back if necessary
# Expects `tags` to be a dictionary {'tag': [node_id1, node_id2, ...]}
@@ -281,7 +314,7 @@ def resample_skeleton(x: 'core.NeuronObject',
nodes_to_remap = list({n for l in x.tags.values() for n in l})
# Get position of old tag-bearing nodes
- old_tn_position = nodes.loc[nodes_to_remap, ['x', 'y', 'z']].values
+ old_tn_position = nodes.loc[nodes_to_remap, ["x", "y", "z"]].values
# Get nearest neighbours
dist, ix = tree.query(old_tn_position)
diff --git a/navis/sampling/utils.py b/navis/sampling/utils.py
new file mode 100644
index 00000000..3e9ae9d7
--- /dev/null
+++ b/navis/sampling/utils.py
@@ -0,0 +1,108 @@
+# This script is part of navis (http://www.github.com/navis-org/navis).
+# Copyright (C) 2018 Philipp Schlegel
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+import numpy as np
+import networkx as nx
+
+try:
+ from pykdtree.kdtree import KDTree
+except ModuleNotFoundError:
+ from scipy.spatial import cKDTree as KDTree
+
+
+def sample_points_uniform(points, size, output="points"):
+ """Draw uniform sample from point cloud.
+
+ This functions works by iteratively removing the point with the smallest
+ distance to its nearest neighbor until the desired number of points is
+ reached.
+
+ Parameters
+ ----------
+ points : (N, 3 ) array
+ Point cloud to sample from.
+ size : int
+ Number of samples to draw.
+ output : "points" | "indices" | "mask", optional
+ If "points", returns the sampled points. If "indices", returns
+ the indices of the sampled points. If "mask", returns a boolean
+ mask of the sampled points.
+
+ Returns
+ -------
+ See `output` parameter.
+
+ """
+ points = np.asarray(points)
+
+ assert isinstance(points, np.ndarray) and points.ndim == 2 and points.shape[1] == 3
+ assert output in ("points", "indices", "mask")
+ assert (size > 0) and (size <= len(points))
+
+ # Start with all points in the mask
+ mask = np.ones(len(points), dtype=bool)
+
+ # Generate a tree
+ tree = KDTree(points)
+
+ p_ind = np.arange(len(points))
+
+ while mask.sum() > size:
+ # Find the point with the largest distance to its nearest neighbor
+ d, ind = tree.query(points[mask], k=2, mask=~mask)
+ d, ind = d[:, 1], ind[:, 1]
+
+ # Find pairs of nodes that are close to each other
+ is_close = d == d.min()
+ pairs = np.stack((p_ind[mask][is_close], p_ind[ind][is_close]), axis=1)
+
+ # At this point we will have pairs show up multiple times - (a, b) and (b, a)
+ pairs = np.unique(np.sort(pairs, axis=1), axis=0)
+
+ # Imagine we have two candidate pairs for removal: (a, b) and (b, c)
+ # In that case we can remove (a and c) or (b) but not (a, b) or (b, c)
+ # because that might leave a hole in the point cloud
+ G = nx.Graph()
+ G.add_edges_from(pairs)
+
+ to_remove = []
+ for cc in nx.connected_components(G):
+ # If these are two nodes, it doesn't matter which one we drop
+ if len(cc) <= 2:
+ to_remove.append(cc.pop())
+ continue
+ # If we have three or more nodes, we will simply remove the one
+ # with the highest degree
+ to_remove.append(sorted(cc, key=lambda x: G.degree(x))[-1])
+
+ # Number of nodes we still need to remove
+ n_remove = mask.sum() - size
+
+ if n_remove >= len(to_remove):
+ mask[to_remove] = False
+ else:
+ mask[to_remove[:n_remove]] = False
+
+ if output == "mask":
+ return mask
+ elif output == "indices":
+ return p_ind[mask]
+ elif output == "points":
+ return points[mask].copy()
+
+
+
+
+
+
+
diff --git a/navis/transforms/align.py b/navis/transforms/align.py
index f66776de..4d376d2b 100644
--- a/navis/transforms/align.py
+++ b/navis/transforms/align.py
@@ -158,9 +158,11 @@ def align_rigid(x, target=None, scale=False, w=0, verbose=False, sample=None, pr
"""
try:
from pycpd import RigidRegistration as Registration
- except ImportError:
- raise ImportError('`align_rigid()` requires the `pycpd` library:\n'
- ' pip3 install git+https://github.com/siavashk/pycpd@master -U')
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ '`align_rigid()` requires the `pycpd` library:\n'
+ ' pip3 install git+https://github.com/siavashk/pycpd@master -U'
+ )
if isinstance(x, core.BaseNeuron):
x = core.NeuronList(x)
@@ -262,9 +264,11 @@ def align_deform(x, target=None, sample=None, progress=True, **kwargs):
"""
try:
from pycpd import DeformableRegistration as Registration
- except ImportError:
- raise ImportError('`align_deform()` requires the `pycpd` library:\n'
- ' pip3 install git+https://github.com/siavashk/pycpd@master -U')
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ '`align_deform()` requires the `pycpd` library:\n'
+ ' pip3 install git+https://github.com/siavashk/pycpd@master -U'
+ )
if isinstance(x, core.BaseNeuron):
x = core.NeuronList(x)
@@ -332,9 +336,11 @@ def align_pca(x, individually=True):
"""
try:
from sklearn.decomposition import PCA
- except ImportError:
- raise ImportError('`align_pca()` requires the `scikit-learn` library:\n'
- ' pip3 install scikit-learn -U')
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ '`align_pca()` requires the `scikit-learn` library:\n'
+ ' pip3 install scikit-learn -U'
+ )
if isinstance(x, core.BaseNeuron):
x = core.NeuronList(x)
diff --git a/navis/transforms/h5reg_java.py b/navis/transforms/h5reg_java.py
index f861817f..5d73fbd4 100644
--- a/navis/transforms/h5reg_java.py
+++ b/navis/transforms/h5reg_java.py
@@ -25,11 +25,10 @@
# jpype is a soft dependency - defer import errors until we first try to use it
try:
import jpype
-except ImportError as e:
+except ModuleNotFoundError as e:
jpype = None
jpype_import_error = e
-except BaseException:
- raise
+
# Path for the compiled transform-helpers jar which contains the required classes
fp = os.path.dirname(__file__)
diff --git a/navis/utils/__init__.py b/navis/utils/__init__.py
index cd5fbcf5..3ab85f02 100644
--- a/navis/utils/__init__.py
+++ b/navis/utils/__init__.py
@@ -28,8 +28,6 @@
import navis_fastcore as fastcore
except ModuleNotFoundError:
fastcore = None
-except ImportError:
- raise
__all__ = ['set_loggers', 'set_pbars', 'set_default_connector_colors',
'patch_cloudvolume']
diff --git a/navis/utils/cv.py b/navis/utils/cv.py
index c825fbda..4ddf2e05 100644
--- a/navis/utils/cv.py
+++ b/navis/utils/cv.py
@@ -51,7 +51,7 @@ def patch_cloudvolume():
global cv
try:
import cloudvolume as cv
- except ImportError:
+ except ModuleNotFoundError:
cv = None
# If CV not installed do nothing
diff --git a/navis/utils/misc.py b/navis/utils/misc.py
index 2ce67365..3cfa3bab 100644
--- a/navis/utils/misc.py
+++ b/navis/utils/misc.py
@@ -96,6 +96,8 @@ def is_url(x: str) -> bool:
False
>>> is_url('http://www.google.com')
True
+ >>> is_url("ftp://download.ft-server.org:8000")
+ True
"""
parsed = urllib.parse.urlparse(x)
diff --git a/requirements.txt b/requirements.txt
index 2e6ee3b1..3bca2123 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -28,7 +28,9 @@ Shapely>=1.6.0 #extra: shapely
rpy2 #extra: r
-pykdtree #extra: kdtree
+# extra: kdtree
+pykdtree<=1.3.12; platform_system == "Darwin"
+pykdtree; platform_system != "Darwin"
xxhash #extra: hash
@@ -36,7 +38,7 @@ flybrains #extra: flybrains
cloud-volume>=5.2.0 #extra: cloudvolume
-navis-fastcore>=0.0.4 #extra: fastcore
+navis-fastcore>=0.0.7 #extra: fastcore
plotly>=4.9 #extra: plotly
@@ -50,7 +52,7 @@ jupyter_rfb>=0.4.1
#extra: octarine-default
octarine3d[all]>=0.2.3
-octarine-navis-plugin>=0.1.1
+octarine-navis-plugin>=0.1.2
#extra: meshes
@@ -64,6 +66,7 @@ neuprint-python
caveclient
cloud-volume
flybrains
+scikit-image
Shapely>=1.6.0
#extra: dev
@@ -76,6 +79,7 @@ pytest-env
pytest-xvfb
pytest-timeout
gitpython
+scikit-image
#extra: docs
diff --git a/setup.py b/setup.py
index c2149c65..7158581e 100644
--- a/setup.py
+++ b/setup.py
@@ -12,7 +12,7 @@
HERE / "requirements.txt",
)
-dev_only = ["test-notebook", "dev"]
+dev_only = ["test-notebook", "dev", "docs"]
specialized = ["r", "flybrains", "cloud-volume"]
all_dev_deps = []
all_deps = []
diff --git a/tests/common.py b/tests/common.py
index 488069b0..45bed3b0 100644
--- a/tests/common.py
+++ b/tests/common.py
@@ -2,7 +2,7 @@
try:
import igraph
-except ImportError:
+except ModuleNotFoundError:
igraph = None
warnings.warn('iGraph library not found. Will test only with NetworkX.')
diff --git a/tests/test_tutorials.py b/tests/test_tutorials.py
index 80b232e9..3b70984b 100644
--- a/tests/test_tutorials.py
+++ b/tests/test_tutorials.py
@@ -21,8 +21,6 @@
from pathlib import Path
from contextlib import contextmanager
-SKIP = ["zzz_no_plot_01_nblast_flycircuit.py", "zzz_no_plot_02_nblast_hemibrain.py"]
-
@contextmanager
def suppress_stdout():
@@ -52,7 +50,7 @@ def suppress_stdout():
for i, file in enumerate(files):
if not file.is_file():
continue
- if file.name in SKIP:
+ if file.name.startswith('zzz'):
continue
# Note: we're using `exec` here instead of e.g. `subprcoess.run` because we need to avoid