Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Clean up of explicit namespace declaration #89

Closed
wants to merge 9 commits into from
Closed
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions build.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
@init
def set_properties(project):
project.set_property("dir_source_main_python", "src")
project.set_property("dir_source_unittest_python", "tests")
project.set_property("dir_source_unittest_python", "tests/lava")
project.set_property("dir_source_main_scripts", "scripts")
project.set_property("dir_docs", "docs")

Expand Down Expand Up @@ -63,7 +63,7 @@ def set_properties(project):
@init(environments="unit")
def set_properties_unit(project):
project.set_property("dir_source_main_python", "src")
project.set_property("dir_source_unittest_python", "tests")
project.set_property("dir_source_unittest_python", "tests/lava")
project.set_property("dir_source_main_scripts", "scripts")
project.set_property("dir_docs", "docs")

Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@ unittest2

numpy
matplotlib
scipy
5 changes: 0 additions & 5 deletions src/lava/__init__.py

This file was deleted.

5 changes: 0 additions & 5 deletions src/lava/magma/__init__.py

This file was deleted.

5 changes: 0 additions & 5 deletions src/lava/magma/compiler/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/compiler/c/__init__.py

This file was deleted.

5 changes: 0 additions & 5 deletions src/lava/magma/compiler/channels/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/compiler/nc/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/compiler/py/__init__.py

This file was deleted.

5 changes: 0 additions & 5 deletions src/lava/magma/core/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/core/model/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/core/model/c/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/core/model/nc/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/core/model/py/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/core/model/sub/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/core/process/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/core/process/ports/__init__.py

This file was deleted.

15 changes: 0 additions & 15 deletions src/lava/magma/core/sync/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/core/sync/protocols/__init__.py

This file was deleted.

5 changes: 0 additions & 5 deletions src/lava/magma/runtime/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/runtime/channels/__init__.py

This file was deleted.

5 changes: 0 additions & 5 deletions src/lava/magma/runtime/message_infrastructure/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion src/lava/magma/runtime/node/__init__.py

This file was deleted.

5 changes: 0 additions & 5 deletions src/lava/proc/__init__.py

This file was deleted.

63 changes: 63 additions & 0 deletions src/lava/proc/conv/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
# See: https://spdx.org/licenses/

import numpy as np

from lava.magma.core.sync.protocols.loihi_protocol import LoihiProtocol
from lava.magma.core.model.py.ports import PyInPort, PyOutPort
from lava.magma.core.model.py.type import LavaPyType
from lava.magma.core.resources import CPU
from lava.magma.core.decorator import implements, requires, tag
from lava.magma.core.model.py.model import PyLoihiProcessModel
from lava.proc.conv.process import Conv

from lava.proc.conv import utils


class AbstractPyConvModel(PyLoihiProcessModel):
"""Abstract template implemetation of PyConvModel."""
s_in = None
a_out = None
weight = None

kernel_size: np.ndarray = LavaPyType(np.ndarray, np.int32, precision=8)
stride: np.ndarray = LavaPyType(np.ndarray, np.int32, precision=8)
padding: np.ndarray = LavaPyType(np.ndarray, np.int32, precision=8)
dilation: np.ndarray = LavaPyType(np.ndarray, np.int32, precision=8)
groups: np.ndarray = LavaPyType(np.ndarray, np.int32, precision=8)

def run_spk(self):
s_in = self.s_in.recv()
a_out = utils.conv(
s_in, self.weight,
self.kernel_size, self.stride, self.padding, self.dilation,
self.groups[0]
)
self.a_out.send(self.clamp_precision(a_out))

def clamp_precision(self, x):
return x


@implements(proc=Conv, protocol=LoihiProtocol)
@requires(CPU)
@tag('floating_pt')
class PyConvModelBinaryFloat(AbstractPyConvModel):
"""Binary spike float synapse implementation."""
s_in: PyInPort = LavaPyType(PyInPort.VEC_DENSE, bool, precision=1)
a_out: PyOutPort = LavaPyType(PyOutPort.VEC_DENSE, float)
weight: np.ndarray = LavaPyType(np.ndarray, float)


@implements(proc=Conv, protocol=LoihiProtocol)
@requires(CPU)
@tag('fixed_pt')
class PyConvModelBinaryFixed(AbstractPyConvModel):
"""Binary spike fixed point synapse implementation."""
s_in: PyInPort = LavaPyType(PyInPort.VEC_DENSE, bool, precision=1)
a_out: PyOutPort = LavaPyType(PyOutPort.VEC_DENSE, np.int32, precision=24)
weight: np.ndarray = LavaPyType(np.ndarray, np.int32, precision=8)

def clamp_precision(self, x):
return utils.signed_clamp(x, bits=24)
104 changes: 104 additions & 0 deletions src/lava/proc/conv/process.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
# See: https://spdx.org/licenses/

import numpy as np

from lava.magma.core.process.process import AbstractProcess
from lava.magma.core.process.variable import Var
from lava.magma.core.process.ports.ports import InPort, OutPort

from lava.proc.conv import utils


class Conv(AbstractProcess):
"""Convolution connection process between neurons.

Parameters
----------
input_shape : tuple of three ints
shape of input to the process in (X, Y, Z) or (W, H, C) format.
weight : tensor/array
convolution kernel weight. The dimension should be in
(C_out, W, H, C_in) format.
padding : int or tuple of two ints
convolution padding size. Default is 0.
stride : int or tuple of two ints
convolution stride. Default is 1.
dilation : int or tuple of two ints
convolution dilation. Default is 1.
groups : int
number of groups in convolution. Default is 1.

Note
----
padding, stride and dilation are expected in (X, Y) or (W, H) if tuple.
"""
def __init__(self, **kwargs):
# The process In/OutPort shapes are considered to be XYZ(WHC) format.
# The kernel weight shape is expected to be in (C_out, W, H, C_in)
# format.
# Why? This is the format that Loihi conv feature uses.
super().__init__(**kwargs)

def broadcast_arg(name, default):
shape = kwargs.get(name, default)
if np.isscalar(shape):
return (shape, shape)
elif len(shape) == 1:
return (shape[0], shape[0])
elif len(shape) == 2:
return (shape[0], shape[1])
else:
raise Exception(
f'Expected {name} to be two dimensional.'
f'Found {name} = {shape}.'
)

input_shape = kwargs.get('input_shape', (1, 1, 1))
kernel_size = kwargs['weight'].shape[1:3]
in_channels = input_shape[-1]
out_channels = kwargs['weight'].shape[0]
padding = broadcast_arg('padding', 0)
stride = broadcast_arg('stride', 1)
dilation = broadcast_arg('dilation', 1)
groups = kwargs.get('groups', 1)

if len(input_shape) != 3:
raise Exception(
f'Expected input shape to be 3 dimensional.'
f'Found {input_shape}.'
)
if not np.isscalar(groups):
raise Exception(
f'Expected groups to be a scalar.'
f'found {groups = }.'
)
if in_channels % groups != 0:
raise Exception(
f'Expected number of in_channels to be divisible by groups.'
f'Found {in_channels = } and {groups = }.'
)
if out_channels % groups != 0:
raise Exception(
f'Expected number of out_channels to be divisible by groups.'
f'Found {out_channels = } and {groups = }.'
)

output_shape = utils.output_shape(
input_shape, out_channels, kernel_size, stride, padding, dilation
)

self.output_shape = output_shape
self.input_shape = input_shape
self.s_in = InPort(shape=input_shape)
self.a_out = OutPort(shape=output_shape)
self.weight = Var(
shape=kwargs['weight'].shape,
init=kwargs.pop('weight')
)
self.kernel_size = Var(shape=(2,), init=kernel_size)
self.padding = Var(shape=(2,), init=padding)
self.stride = Var(shape=(2,), init=stride)
self.dilation = Var(shape=(2,), init=dilation)
self.groups = Var(shape=(1,), init=groups)
Loading