-
Notifications
You must be signed in to change notification settings - Fork 3
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
branch: models_igr: graph display + record selection #6
base: master
Are you sure you want to change the base?
Changes from 8 commits
6fe2e9a
d5df424
2ddc41c
e5fe31f
4fb520f
7758ed8
06fc25d
3ed9930
c8fb1e2
597409e
cf742d9
ef7ae7f
79f38d8
34a7336
387e21a
f2d346e
8411e81
2ca9dec
be385e8
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,11 +1,9 @@ | ||
from nrn import Segment, Section | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think this concept of recording with labels should be moved to separated class which inherits from |
||
from collections import defaultdict | ||
|
||
import matplotlib.pyplot as plt | ||
import numpy as np | ||
import pandas as pd | ||
from neuron import h | ||
import matplotlib.pyplot as plt | ||
|
||
from neuronpp.core.hocwrappers.seg import Seg | ||
|
||
|
||
|
@@ -39,7 +37,8 @@ def __init__(self, elements, variables='v'): | |
try: | ||
s = getattr(elem.hoc, "_ref_%s" % var) | ||
except AttributeError: | ||
raise AttributeError("there is no attribute of %s. Maybe you forgot to append loc param for sections?" % var) | ||
raise AttributeError( | ||
"there is no attribute of %s. Maybe you forgot to append loc param for sections?" % var) | ||
|
||
rec = h.Vector().record(s) | ||
self.recs[var].append((name, rec)) | ||
|
@@ -84,16 +83,19 @@ def _plot_static(self, position=None): | |
for i, (name, rec) in enumerate(section_recs): | ||
rec_np = rec.as_numpy() | ||
if np.max(np.isnan(rec_np)): | ||
raise ValueError("Vector recorded for variable: '%s' and segment: '%s' contains nan values." % (var_name, name)) | ||
raise ValueError( | ||
"Vector recorded for variable: '%s' and segment: '%s' contains nan values." % (var_name, name)) | ||
|
||
if position is not "merge": | ||
ax = self._get_subplot(fig=fig, var_name=var_name, position=position, row_len=len(section_recs), index=i + 1) | ||
ax = self._get_subplot(fig=fig, var_name=var_name, position=position, row_len=len(section_recs), | ||
index=i + 1) | ||
ax.set_title("Variable: %s" % var_name) | ||
ax.plot(self.t, rec, label=name) | ||
ax.set(xlabel='t (ms)', ylabel=var_name) | ||
ax.legend() | ||
|
||
def _plot_animate(self, steps=10000, y_lim=None, position=None): | ||
def _plot_animate(self, steps=10000, y_lim=None, position=None, true_class=None, pred_class=None, stepsize=None, | ||
dt=None, show_true_predicted=True, true_labels=None): | ||
""" | ||
Call each time you want to redraw plot. | ||
|
||
|
@@ -106,6 +108,12 @@ def _plot_animate(self, steps=10000, y_lim=None, position=None): | |
* position=(3,3) -> if you have 9 neurons and want to display 'v' on 3x3 matrix | ||
* position='merge' -> it will display all figures on the same graph. | ||
* position=None -> Default, each neuron has separated axis (row) on the figure. | ||
:param true_class: list of true class labels in this window | ||
:param pred_class: list of predicted class labels in window | ||
:param stepsize: agent readout time step | ||
:param dt: agent integration time step | ||
:param show_true_predicted: whther to print true/predicted class' marks on the plot | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think there is no point of using There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is a matter of liking. Lots of code is written so that switch some behaviour on/off using a single switch. |
||
:param true_labels: list of true labels for the consecutive plots | ||
:return: | ||
""" | ||
create_fig = False | ||
|
@@ -116,7 +124,7 @@ def _plot_animate(self, steps=10000, y_lim=None, position=None): | |
fig = self.figs[var_name] | ||
if fig is None: | ||
create_fig = True | ||
fig = plt.figure() | ||
fig = plt.figure(figsize=(16.5, 5.5)) | ||
fig.canvas.draw() | ||
self.figs[var_name] = fig | ||
|
||
|
@@ -125,15 +133,15 @@ def _plot_animate(self, steps=10000, y_lim=None, position=None): | |
if position == 'merge': | ||
ax = fig.add_subplot(1, 1, 1) | ||
else: | ||
ax = self._get_subplot(fig=fig, var_name=var_name, position=position, row_len=len(section_recs), index=i + 1) | ||
ax = self._get_subplot(fig=fig, var_name=var_name, position=position, row_len=len(section_recs), | ||
index=i + 1) | ||
|
||
if y_lim: | ||
ax.set_ylim(y_lim[0], y_lim[1]) | ||
line, = ax.plot([], lw=1) | ||
ax.set_title("Variable: %s" % var_name) | ||
ax.set_ylabel(var_name) | ||
# ax.set_title("Variable: %s" % var_name) | ||
ax.set_ylabel("{}_{}".format(var_name, i)) | ||
ax.set_xlabel("t (ms)") | ||
ax.legend() | ||
|
||
self.axs[var_name].append((ax, line)) | ||
|
||
|
@@ -143,17 +151,66 @@ def _plot_animate(self, steps=10000, y_lim=None, position=None): | |
|
||
ax.set_xlim(t.min(), t.max()) | ||
if y_lim is None: | ||
ax.set_ylim(r.min()-(np.abs(r.min()*0.05)), r.max()+(np.abs(r.max()*0.05))) | ||
y_limits = (r.min() - (np.abs(r.min() * 0.05)), r.max() + (np.abs(r.max() * 0.05))) | ||
ax.set_ylim(y_limits) | ||
|
||
# update data | ||
line.set_data(t, r) | ||
|
||
if show_true_predicted: | ||
# info draw triangles for true and predicted classes | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This whole part should be moved to the separated method. The method There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. THe owner is my god |
||
if true_labels is not None: | ||
true_x, pred_x = self._true_predicted_class_marks(label=true_labels[i], true_class=true_class, | ||
pred_class=pred_class, t=t, r=r, | ||
stepsize=stepsize, dt=dt) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There is no r param in the method |
||
else: | ||
raise ValueError("True_labels parameter need to be given if show_true_prediction is True") | ||
if y_lim is None: | ||
true_y = [y_limits[0] + np.abs(y_limits[0]) * 0.09] * len(true_x) | ||
pred_y = [y_limits[1] - np.abs(y_limits[1] * 0.11)] * len(pred_x) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. variables There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Actually no, because the parameter y_lim is global for all plots (a tuple in parameters); while y_limits are compoted per individual plots. During the course of simulation, the activations tend to have different values. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Actually this shall never work as it was before and is now: if the max--min values come too near, the y_lim plots is much different from what we can read from data. There must be a method which shall first compute the wanted limits, then set them, then read what matplotlib really used. Is there one? |
||
else: | ||
true_y = [y_lim[0]] * len(true_x) | ||
pred_y = [y_lim[1]] * len(pred_x) | ||
ax.scatter(true_x, true_y, c="orange", marker="^", alpha=0.95, label="true") | ||
ax.scatter(pred_x, pred_y, c="magenta", marker="v", alpha=0.95, label="predicted") | ||
if create_fig and i == 0: | ||
# draw legend only the first time and only on the uppermost graph | ||
ax.legend() | ||
|
||
# info join plots by removing labels and ticks from subplots that are not on the edge | ||
if create_fig: | ||
igorpodolak marked this conversation as resolved.
Show resolved
Hide resolved
|
||
for key in self.axs: | ||
for ax in self.axs[key]: | ||
ax[0].label_outer() | ||
fig.subplots_adjust(left=0.09, bottom=0.075, right=0.99, top=0.98, wspace=None, hspace=0.00) | ||
fig.canvas.draw() | ||
fig.canvas.flush_events() | ||
|
||
if create_fig: | ||
plt.show(block=False) | ||
|
||
def _true_predicted_class_marks(self, label, true_class, pred_class, t, stepsize, dt): | ||
""" | ||
find and return lists of time steps for true and predicted labels | ||
:param label: the label id (an int) | ||
:param true_class: list of true classes for the whole time region | ||
:param pred_class: list of predicted labels (class ids) for the whole time region | ||
:param t: the region time steps | ||
:param stepsize: original agent stepsize; class selections are 2 * stepsize / dt | ||
:param dt: integration step | ||
:return: lists of marks for true_x: true classes, pred_x: predicted classes | ||
""" | ||
n = len(true_class) | ||
x = t[::int(2 * stepsize / dt)][-n:] | ||
ziemowit-s marked this conversation as resolved.
Show resolved
Hide resolved
|
||
true_x = [] | ||
pred_x = [] | ||
for k in range(n): | ||
# get the true classes for the current label | ||
if true_class[k] == label: | ||
true_x.append(x[k]) | ||
if pred_class[k] == label: | ||
pred_x.append(x[k]) | ||
return true_x, pred_x | ||
|
||
def to_csv(self, filename): | ||
cols = ['time'] | ||
data = [self.t.as_numpy().tolist()] | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This was original idea how to present plot but now I think it was bad idea to manually shift it like this. But it maybe as it is
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Problem w linii 95 z rzeczywistym przesunięciem polega na znalezieniu nazwy warstwy poprzedzającej. Jeśli jest nią 'hid_1' to wtedy może być przez