Skip to content

Commit

Permalink
Partial implementation of virtual ports for PyProcModels (lava-nc#187)
Browse files Browse the repository at this point in the history
* permute initial implementation

Signed-off-by: bamsumit <[email protected]>

* Tests for permute ports

* Process property of virtual ports no longer returns None

Signed-off-by: Mathis Richter <[email protected]>

* Added initial run-unittest for flatten() from issue lava-nc#163

Signed-off-by: Mathis Richter <[email protected]>

* User-level API for TransposePort with unit tests

Signed-off-by: Mathis Richter <[email protected]>

* Fixed typo

Signed-off-by: Mathis Richter <[email protected]>

* Unit tests for flatten() and concat_with()

Signed-off-by: Mathis Richter <[email protected]>

* Unit tests for virtual ports in Processes that are executed (wip)

Signed-off-by: Mathis Richter <[email protected]>

* Preliminary implementation of virtual ports between OutPort and InPort (wip)

Signed-off-by: Mathis Richter <[email protected]>

* Fixing unit tests after merge

Signed-off-by: Mathis Richter <[email protected]>

* Added support for virtual ports between an OutPort and InPort of two hierarchical Processes

Signed-off-by: Mathis Richter <[email protected]>

* Clean up, exceptions, and generic unit tests for virtual port topologies

Signed-off-by: Mathis Richter <[email protected]>

* Fixed linter issues

Signed-off-by: Mathis Richter <[email protected]>

* Raising an exception when executing ConcatPort

Signed-off-by: Mathis Richter <[email protected]>

Co-authored-by: bamsumit <[email protected]>
Co-authored-by: Marcus G K Williams <[email protected]>
  • Loading branch information
3 people authored Feb 25, 2022
1 parent 2da093c commit 5512bac
Show file tree
Hide file tree
Showing 11 changed files with 648 additions and 203 deletions.
12 changes: 11 additions & 1 deletion src/lava/magma/compiler/builders/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,17 @@ def build(self):
csp_ports = self.csp_ports[name]
if not isinstance(csp_ports, list):
csp_ports = [csp_ports]
port = port_cls(csp_ports, pm, p.shape, lt.d_type)

# TODO (MR): This is probably just a temporary hack until the
# interface of PyOutPorts has been adjusted.
if issubclass(port_cls, PyInPort):
port = port_cls(csp_ports, pm, p.shape, lt.d_type,
p.transform_funcs)
elif issubclass(port_cls, PyOutPort):
port = port_cls(csp_ports, pm, p.shape, lt.d_type)
else:
raise AssertionError("port_cls must be of type PyInPort or "
"PyOutPort")

# Create dynamic PyPort attribute on ProcModel
setattr(pm, name, port)
Expand Down
22 changes: 18 additions & 4 deletions src/lava/magma/compiler/compiler.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
# See: https://spdx.org/licenses/

import logging
import importlib
import importlib.util as import_utils
Expand Down Expand Up @@ -38,7 +39,7 @@
from lava.magma.core.model.py.ports import RefVarTypeMapping
from lava.magma.core.model.sub.model import AbstractSubProcessModel
from lava.magma.core.process.ports.ports import AbstractPort, VarPort, \
ImplicitVarPort, RefPort
ImplicitVarPort, InPort, RefPort
from lava.magma.core.process.process import AbstractProcess
from lava.magma.core.resources import (
CPU,
Expand Down Expand Up @@ -346,12 +347,25 @@ def _compile_proc_models(
# and Ports
v = [VarInitializer(v.name, v.shape, v.init, v.id)
for v in p.vars]
ports = (list(p.in_ports) + list(p.out_ports))
ports = [PortInitializer(pt.name,

ports = []
for pt in (list(p.in_ports) + list(p.out_ports)):
# For all InPorts that receive input from
# virtual ports...
transform_funcs = None
if isinstance(pt, InPort):
# ... extract a function pointer to the
# transformation function of each virtual port.
transform_funcs = \
[vp.get_transform_func()
for vp in pt.get_incoming_virtual_ports()]
pi = PortInitializer(pt.name,
pt.shape,
self._get_port_dtype(pt, pm),
pt.__class__.__name__,
pp_ch_size) for pt in ports]
pp_ch_size,
transform_funcs)
ports.append(pi)
# Create RefPort (also use PortInitializers)
ref_ports = list(p.ref_ports)
ref_ports = [
Expand Down
2 changes: 2 additions & 0 deletions src/lava/magma/compiler/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import typing as ty
import functools as ft
from dataclasses import dataclass


Expand All @@ -17,6 +18,7 @@ class PortInitializer:
d_type: type
port_type: str
size: int
transform_funcs: ty.List[ft.partial] = None


# check if can be a subclass of PortInitializer
Expand Down
32 changes: 31 additions & 1 deletion src/lava/magma/core/model/py/ports.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
# See: https://spdx.org/licenses/

import typing as ty
from abc import abstractmethod
import functools as ft
Expand Down Expand Up @@ -137,6 +138,16 @@ class PyInPort(AbstractPyIOPort):
SCALAR_DENSE: ty.Type["PyInPortScalarDense"] = None
SCALAR_SPARSE: ty.Type["PyInPortScalarSparse"] = None

def __init__(self,
csp_ports: ty.List[AbstractCspPort],
process_model: AbstractProcessModel,
shape: ty.Tuple[int, ...],
d_type: type,
transform_funcs: ty.Optional[ty.List[ft.partial]] = None):

self._transform_funcs = transform_funcs
super().__init__(csp_ports, process_model, shape, d_type)

@abstractmethod
def recv(self):
"""Abstract method to receive data (vectors/scalars) sent from connected
Expand Down Expand Up @@ -182,6 +193,25 @@ def probe(self) -> bool:
True,
)

def _transform(self, recv_data: np.array) -> np.array:
"""Applies all transformation function pointers to the input data.
Parameters
----------
recv_data : numpy.ndarray
data received on the port that shall be transformed
Returns
-------
recv_data : numpy.ndarray
received data, transformed by the incoming virtual ports
"""
if self._transform_funcs:
# apply all transformation functions to the received data
for f in self._transform_funcs:
recv_data = f(recv_data)
return recv_data


class PyInPortVectorDense(PyInPort):
"""Python implementation of PyInPort for dense vector data."""
Expand All @@ -199,7 +229,7 @@ def recv(self) -> np.ndarray:
fashion.
"""
return ft.reduce(
lambda acc, csp_port: acc + csp_port.recv(),
lambda acc, csp_port: acc + self._transform(csp_port.recv()),
self.csp_ports,
np.zeros(self._shape, self._d_type),
)
Expand Down
42 changes: 42 additions & 0 deletions src/lava/magma/core/process/ports/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,48 @@ def __init__(self, shapes, axis):
super().__init__(self, msg)


class ConcatIndexError(Exception):
"""Raised when the axis over which ports should be concatenated is out of
bounds."""

def __init__(self, shape: ty.Tuple[int], axis: int):
msg = (
"Axis {} is out of bounds for given shape {}.".format(axis, shape)
)
super().__init__(self, msg)


class TransposeShapeError(Exception):
"""Raised when transpose axes is incompatible with old shape dimension."""

def __init__(
self, old_shape: ty.Tuple, axes: ty.Union[ty.Tuple, ty.List]
) -> None:
msg = (
"Cannot transpose 'old_shape'={} with permutation 'axes={}. "
"Total number of dimensions must not change during "
"reshaping.".format(old_shape, axes)
)
super().__init__(msg)


class TransposeIndexError(Exception):
"""Raised when indices in transpose axes are out of bounds for the old
shape dimension."""

def __init__(
self,
old_shape: ty.Tuple,
axes: ty.Union[ty.Tuple, ty.List],
wrong_index
) -> None:
msg = (
f"Cannot transpose 'old_shape'={old_shape} with permutation"
f"'axes'={axes}. The index {wrong_index} is out of bounds."
)
super().__init__(msg)


class VarNotSharableError(Exception):
"""Raised when an attempt is made to connect a RefPort or VarPort to a
non-sharable Var."""
Expand Down
Loading

0 comments on commit 5512bac

Please sign in to comment.