Skip to content

Commit

Permalink
Fixing np.logical_and/or problems in Connection ProcessModels (lava-n…
Browse files Browse the repository at this point in the history
…c#412)

* fixing np.logical_and/or problems

* fixing linting issues

Co-authored-by: gkarray <[email protected]>
Co-authored-by: PhilippPlank <[email protected]>
  • Loading branch information
3 people committed Oct 14, 2022
1 parent 4617211 commit c7618ba
Showing 1 changed file with 16 additions and 26 deletions.
42 changes: 16 additions & 26 deletions src/lava/magma/core/model/py/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,18 +186,16 @@ def _build_active_traces(self) -> None:
"""Build and store boolean numpy arrays specifying which x and y
traces are active."""
# Shape : (2, )
self._active_x_traces = np.logical_or(
self._active_x_traces_per_dependency[0],
self._active_x_traces_per_dependency[1],
self._active_x_traces_per_dependency[2],
)
self._active_x_traces = \
self._active_x_traces_per_dependency[0] \
| self._active_x_traces_per_dependency[1] \
| self._active_x_traces_per_dependency[2]

# Shape : (3, )
self._active_y_traces = np.logical_or(
self._active_y_traces_per_dependency[0],
self._active_y_traces_per_dependency[1],
self._active_y_traces_per_dependency[2],
)
self._active_y_traces = \
self._active_y_traces_per_dependency[0] \
| self._active_y_traces_per_dependency[1] \
| self._active_y_traces_per_dependency[2]

def _build_learning_rule_appliers(self) -> None:
"""Build and store LearningRuleApplier for each active learning
Expand Down Expand Up @@ -493,7 +491,7 @@ def _record_pre_spike_times(self, s_in: np.ndarray) -> None:
Pre-synaptic spikes.
"""
self.x0[s_in] = True
multi_spike_x = np.logical_and(self.tx > 0, s_in)
multi_spike_x = (self.tx > 0) & s_in

x_traces = self._x_traces
x_traces[:, multi_spike_x] = self._add_impulse(
Expand All @@ -519,7 +517,7 @@ def _record_post_spike_times(self, s_in_bap: np.ndarray) -> None:
Post-synaptic spikes.
"""
self.y0[s_in_bap] = True
multi_spike_y = np.logical_and(self.ty > 0, s_in_bap)
multi_spike_y = (self.ty > 0) & s_in_bap

y_traces = self._y_traces
y_traces[:, multi_spike_y] = self._add_impulse(
Expand Down Expand Up @@ -956,12 +954,8 @@ def _evaluate_trace(

t_diff = t_eval - t_spikes

decay_only = np.logical_and(
np.logical_or(t_spikes == 0, t_diff < 0), broad_taus > 0
)
decay_spike_decay = np.logical_and(
t_spikes != 0, t_diff >= 0, broad_taus > 0
)
decay_only = ((t_spikes == 0) | (t_diff < 0)) & (broad_taus > 0)
decay_spike_decay = (t_spikes != 0) & (t_diff >= 0) & (broad_taus > 0)

result = trace_values.copy()

Expand Down Expand Up @@ -1160,7 +1154,7 @@ def _record_pre_spike_times(self, s_in: np.ndarray) -> None:
"""

self.x0[s_in] = True
multi_spike_x = np.logical_and(self.tx > 0, s_in)
multi_spike_x = (self.tx > 0) & s_in

x_traces = self._x_traces
x_traces[:, multi_spike_x] += self._x_impulses[:, np.newaxis]
Expand All @@ -1182,7 +1176,7 @@ def _record_post_spike_times(self, s_in_bap: np.ndarray) -> None:
"""

self.y0[s_in_bap] = True
multi_spike_y = np.logical_and(self.ty > 0, s_in_bap)
multi_spike_y = (self.ty > 0) & s_in_bap

y_traces = self._y_traces
y_traces[:, multi_spike_y] += self._y_impulses[:, np.newaxis]
Expand Down Expand Up @@ -1377,12 +1371,8 @@ def _evaluate_trace(

t_diff = t_eval - t_spikes

decay_only = np.logical_and(
np.logical_or(t_spikes == 0, t_diff < 0), broad_taus > 0
)
decay_spike_decay = np.logical_and(
t_spikes != 0, t_diff >= 0, broad_taus > 0
)
decay_only = ((t_spikes == 0) | (t_diff < 0)) & (broad_taus > 0)
decay_spike_decay = (t_spikes != 0) & (t_diff >= 0) & (broad_taus > 0)

result = trace_values.copy()

Expand Down

0 comments on commit c7618ba

Please sign in to comment.