Skip to content

Commit

Permalink
Merge pull request #232 from clbarnes/flexible_anngraph
Browse files Browse the repository at this point in the history
Deprecate get_annotation_graph for get_entity_graph
  • Loading branch information
clbarnes authored Apr 12, 2023
2 parents f161c3c + ced59a7 commit 93d28b9
Show file tree
Hide file tree
Showing 6 changed files with 597 additions and 193 deletions.
14 changes: 9 additions & 5 deletions .github/workflows/run-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.python-version }}-${{ matrix.igraph }}
cancel-in-progress: true
strategy:
fail-fast: false
matrix:
python-version:
- '3.7'
Expand All @@ -14,11 +18,11 @@ jobs:
- '3.10'
igraph: ["igraph", "no-igraph"]
steps:
# This cancels any such job that is still runnning
- name: Cancel Previous Runs
uses: styfle/[email protected]
with:
access_token: ${{ github.token }}
# # This cancels any such job that is still runnning
# - name: Cancel Previous Runs
# uses: styfle/[email protected]
# with:
# access_token: ${{ github.token }}
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
Expand Down
2 changes: 2 additions & 0 deletions docs/source/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ Functions to fetch annotations:
pymaid.get_annotations
pymaid.get_annotation_details
pymaid.get_user_annotations
pymaid.get_annotation_id
pymaid.get_entity_graph

Nodes
-----
Expand Down
4 changes: 4 additions & 0 deletions docs/source/whats_new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ What's new?
* - Version
- Date
-
* - Next
- In progress
- - :func:`pymaid.get_annotation_graph` deprecated in favour of the new
:func:`pymaid.get_entity_graph`.
* - 2.1.0
- 04/04/22
- With this release we mainly follow some renamed functions in ``navis`` but
Expand Down
191 changes: 4 additions & 187 deletions pymaid/fetch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,14 +56,16 @@
from navis import in_volume
from .landmarks import get_landmarks, get_landmark_groups
from .skeletons import get_skeleton_ids
from .annotations import get_annotation_graph, get_entity_graph, get_annotation_id


__all__ = ['get_annotation_details', 'get_annotation_id',
'get_annotation_list', 'get_annotations', 'get_annotation_graph',
'get_arbor',
'get_connector_details', 'get_connectors',
'get_connector_tags',
'get_contributor_statistics', 'get_edges', 'get_history',
'get_contributor_statistics', 'get_edges', 'get_entity_graph',
'get_history',
'get_logs', 'get_names', 'get_neuron',
'get_neurons', 'get_neurons_in_bbox',
'get_neurons_in_volume', 'get_node_tags', 'get_node_details',
Expand Down Expand Up @@ -1901,191 +1903,6 @@ def get_annotations(x, remote_instance=None):
'No annotations retrieved. Make sure that the skeleton IDs exist.')


def _entities_to_ann_graph(data, annotations_by_id=False, skeletons_by_id=True):
ann_ref = "id" if annotations_by_id else "name"
skel_ref = "id" if skeletons_by_id else "name"

g = nx.DiGraph()

for e in data["entities"]:
is_meta_ann = False

if e.get("type") == "neuron":
skids = e.get("skeleton_ids") or []
if len(skids) != 1:
logger.warning("Neuron with id %s is modelled by %s skeletons, ignoring", e["id"], len(skids))
continue
node_data = {
"name": e["name"],
"neuron_id": e["id"],
"is_skeleton": True,
"id": skids[0],
}
node_id = node_data[skel_ref]
else: # is an annotation
node_data = {
"is_skeleton": False,
"id": e["id"],
"name": e["name"],
}
node_id = node_data[ann_ref]
is_meta_ann = True

anns = e.get("annotations", [])
if not anns:
g.add_node(node_id, **node_data)
continue

for ann in e.get("annotations", []):
g.add_edge(
ann[ann_ref],
node_id,
is_meta_annotation=is_meta_ann,
)

g.nodes[node_id].update(**node_data)

return g


@cache.undo_on_error
def get_annotation_graph(annotations_by_id=False, skeletons_by_id=True, remote_instance=None) -> nx.DiGraph:
"""Get a networkx DiGraph of (meta)annotations and skeletons.
Can be slow for large projects.
Nodes in the graph have data:
Skeletons have
- id
- is_skeleton = True
- neuron_id (different to the skeleton ID)
- name
Annotations have
- id
- name
- is_skeleton = False
Edges in the graph have
- is_meta_annotation (whether it is between two annotations)
Parameters
----------
annotations_by_id : bool, default False
Whether to index nodes representing annotations by their integer ID
(uses name by default)
skeletons_by_id : bool, default True
whether to index nodes representing skeletons by their integer ID
(True by default, otherwise uses the neuron name)
remote_instance : optional CatmaidInstance
Returns
-------
networkx.DiGraph
"""
remote_instance = utils._eval_remote_instance(remote_instance)

query_url = remote_instance.make_url(remote_instance.project_id, "annotations", "query-targets")
post = {
"with_annotations": True,
}
data = remote_instance.fetch(query_url, post)

return _entities_to_ann_graph(data, annotations_by_id, skeletons_by_id)


def filter_by_query(names: pd.Series, query: str, allow_partial: bool = False) -> pd.Series:
"""Get a logical index series into a series of strings based on a query.
Parameters
----------
names : pd.Series of str
Dataframe column of strings to filter
query : str
Query string. leading "~" and "annotation:" will be ignored.
Leading "/" will mean the remainder is used as a regex.
allow_partial : bool, default False
For non-regex queries, whether to check that the query is an exact match or just contained in the name.
Returns
-------
pd.Series of bool
Which names match the given query
"""
if not isinstance(names, pd.Series):
names = pd.Series(names, dtype=str)

for prefix in ["annotation:", "~"]:
if query.startswith(prefix):
logger.warning("Removing '%s' prefix from '%s'", prefix, query)
query = query[len(prefix):]

q = query.strip()
# use a regex
if q.startswith("/"):
re_str = q[1:]
filt = names.str.match(re_str)
else:
filt = names.str.contains(q, regex=False)
if not allow_partial:
filt = np.logical_and(filt, names.str.len() == len(q))

return filt


@cache.wipe_and_retry
def get_annotation_id(annotations, allow_partial=False, raise_not_found=True,
remote_instance=None):
"""Retrieve the annotation ID for single or list of annotation(s).
Parameters
----------
annotations : str | list of str
Single annotations or list of multiple annotations.
allow_partial : bool, optional
If True, will allow partial matches.
raise_not_found : bool, optional
If True raise Exception if no match for any of the
query annotations is found. Else log warning.
remote_instance : CatmaidInstance, optional
If not passed directly, will try using global.
Returns
-------
dict
``{'annotation_name': 'annotation_id', ...}``
"""
remote_instance = utils._eval_remote_instance(remote_instance)

logger.debug('Retrieving list of annotations...')

remote_annotation_list_url = remote_instance._get_annotation_list()
an_list = remote_instance.fetch(remote_annotation_list_url)

# Turn into pandas array
an_list = pd.DataFrame.from_records(an_list['annotations'])

annotations = utils._make_iterable(annotations)
annotation_ids = {}
for an in annotations:
filt = filter_by_query(an_list.name, an, allow_partial)

# Search for matches
res = an_list[filt].set_index('name').id.to_dict()
if not res:
logger.warning('No annotation found for "{}"'.format(an))
annotation_ids.update(res)

if not annotation_ids:
if raise_not_found:
raise Exception('No matching annotation(s) found')
else:
logger.warning('No matching annotation(s) found')

return annotation_ids


@cache.undo_on_error
def find_nodes(tags=None, node_ids=None, skeleton_ids=None,
Expand Down Expand Up @@ -3932,7 +3749,7 @@ def get_paths(sources, targets, n_hops=2, min_synapses=1, return_graph=False,
targets = utils._make_iterable(targets).astype(int)
sources = utils._make_iterable(sources).astype(int)

if isinstance(n_hops, (int, np.int)):
if isinstance(n_hops, (int, np.integer)):
n_hops = [n_hops]

if not utils._is_iterable(n_hops):
Expand Down
Loading

0 comments on commit 93d28b9

Please sign in to comment.