From 141e732c74bd4575c115fdaecd5621aac90f3c33 Mon Sep 17 00:00:00 2001 From: bamsumit Date: Tue, 8 Mar 2022 10:23:14 -0800 Subject: [PATCH 1/9] update refport unittest to always wait when it writes to port for consistent behavior Signed-off-by: bamsumit --- poetry.lock | 42 +++++++++---------- pyproject.toml | 4 +- .../lava/magma/runtime/test_ref_var_ports.py | 3 ++ 3 files changed, 26 insertions(+), 23 deletions(-) diff --git a/poetry.lock b/poetry.lock index b1c390b6f..130f4162a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -666,7 +666,7 @@ python-versions = "*" [[package]] name = "numpy" -version = "1.22.2" +version = "1.22.3" description = "NumPy is the fundamental package for array computing with Python." category = "main" optional = false @@ -1311,7 +1311,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = ">=3.8, <3.11" -content-hash = "17e10f1725cfdae4b4a2e8da85965da95263c227a876fb1afc363ed447d49583" +content-hash = "e5bd66852734a32a6fdc9358cf6f54452bffb6d31f4249c1618abdbc47625e39" [metadata.files] alabaster = [ @@ -1791,25 +1791,25 @@ msgpack = [ {file = "msgpack-1.0.3.tar.gz", hash = "sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e"}, ] numpy = [ - {file = "numpy-1.22.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:515a8b6edbb904594685da6e176ac9fbea8f73a5ebae947281de6613e27f1956"}, - {file = "numpy-1.22.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76a4f9bce0278becc2da7da3b8ef854bed41a991f4226911a24a9711baad672c"}, - {file = "numpy-1.22.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:168259b1b184aa83a514f307352c25c56af111c269ffc109d9704e81f72e764b"}, - {file = "numpy-1.22.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3556c5550de40027d3121ebbb170f61bbe19eb639c7ad0c7b482cd9b560cd23b"}, - {file = "numpy-1.22.2-cp310-cp310-win_amd64.whl", hash = "sha256:aafa46b5a39a27aca566198d3312fb3bde95ce9677085efd02c86f7ef6be4ec7"}, - {file = "numpy-1.22.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:55535c7c2f61e2b2fc817c5cbe1af7cb907c7f011e46ae0a52caa4be1f19afe2"}, - {file = "numpy-1.22.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:60cb8e5933193a3cc2912ee29ca331e9c15b2da034f76159b7abc520b3d1233a"}, - {file = "numpy-1.22.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b536b6840e84c1c6a410f3a5aa727821e6108f3454d81a5cd5900999ef04f89"}, - {file = "numpy-1.22.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2638389562bda1635b564490d76713695ff497242a83d9b684d27bb4a6cc9d7a"}, - {file = "numpy-1.22.2-cp38-cp38-win32.whl", hash = "sha256:6767ad399e9327bfdbaa40871be4254d1995f4a3ca3806127f10cec778bd9896"}, - {file = "numpy-1.22.2-cp38-cp38-win_amd64.whl", hash = "sha256:03ae5850619abb34a879d5f2d4bb4dcd025d6d8fb72f5e461dae84edccfe129f"}, - {file = "numpy-1.22.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:d76a26c5118c4d96e264acc9e3242d72e1a2b92e739807b3b69d8d47684b6677"}, - {file = "numpy-1.22.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:15efb7b93806d438e3bc590ca8ef2f953b0ce4f86f337ef4559d31ec6cf9d7dd"}, - {file = "numpy-1.22.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:badca914580eb46385e7f7e4e426fea6de0a37b9e06bec252e481ae7ec287082"}, - {file = "numpy-1.22.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94dd11d9f13ea1be17bac39c1942f527cbf7065f94953cf62dfe805653da2f8f"}, - {file = "numpy-1.22.2-cp39-cp39-win32.whl", hash = "sha256:8cf33634b60c9cef346663a222d9841d3bbbc0a2f00221d6bcfd0d993d5543f6"}, - {file = "numpy-1.22.2-cp39-cp39-win_amd64.whl", hash = "sha256:59153979d60f5bfe9e4c00e401e24dfe0469ef8da6d68247439d3278f30a180f"}, - {file = "numpy-1.22.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a176959b6e7e00b5a0d6f549a479f869829bfd8150282c590deee6d099bbb6e"}, - {file = "numpy-1.22.2.zip", hash = "sha256:076aee5a3763d41da6bef9565fdf3cb987606f567cd8b104aded2b38b7b47abf"}, + {file = "numpy-1.22.3-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:92bfa69cfbdf7dfc3040978ad09a48091143cffb778ec3b03fa170c494118d75"}, + {file = "numpy-1.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8251ed96f38b47b4295b1ae51631de7ffa8260b5b087808ef09a39a9d66c97ab"}, + {file = "numpy-1.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48a3aecd3b997bf452a2dedb11f4e79bc5bfd21a1d4cc760e703c31d57c84b3e"}, + {file = "numpy-1.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3bae1a2ed00e90b3ba5f7bd0a7c7999b55d609e0c54ceb2b076a25e345fa9f4"}, + {file = "numpy-1.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:08d9b008d0156c70dc392bb3ab3abb6e7a711383c3247b410b39962263576cd4"}, + {file = "numpy-1.22.3-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:201b4d0552831f7250a08d3b38de0d989d6f6e4658b709a02a73c524ccc6ffce"}, + {file = "numpy-1.22.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8c1f39caad2c896bc0018f699882b345b2a63708008be29b1f355ebf6f933fe"}, + {file = "numpy-1.22.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:568dfd16224abddafb1cbcce2ff14f522abe037268514dd7e42c6776a1c3f8e5"}, + {file = "numpy-1.22.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca688e1b9b95d80250bca34b11a05e389b1420d00e87a0d12dc45f131f704a1"}, + {file = "numpy-1.22.3-cp38-cp38-win32.whl", hash = "sha256:e7927a589df200c5e23c57970bafbd0cd322459aa7b1ff73b7c2e84d6e3eae62"}, + {file = "numpy-1.22.3-cp38-cp38-win_amd64.whl", hash = "sha256:07a8c89a04997625236c5ecb7afe35a02af3896c8aa01890a849913a2309c676"}, + {file = "numpy-1.22.3-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:2c10a93606e0b4b95c9b04b77dc349b398fdfbda382d2a39ba5a822f669a0123"}, + {file = "numpy-1.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fade0d4f4d292b6f39951b6836d7a3c7ef5b2347f3c420cd9820a1d90d794802"}, + {file = "numpy-1.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bfb1bb598e8229c2d5d48db1860bcf4311337864ea3efdbe1171fb0c5da515d"}, + {file = "numpy-1.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97098b95aa4e418529099c26558eeb8486e66bd1e53a6b606d684d0c3616b168"}, + {file = "numpy-1.22.3-cp39-cp39-win32.whl", hash = "sha256:fdf3c08bce27132395d3c3ba1503cac12e17282358cb4bddc25cc46b0aca07aa"}, + {file = "numpy-1.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:639b54cdf6aa4f82fe37ebf70401bbb74b8508fddcf4797f9fe59615b8c5813a"}, + {file = "numpy-1.22.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c34ea7e9d13a70bf2ab64a2532fe149a9aced424cd05a2c4ba662fd989e3e45f"}, + {file = "numpy-1.22.3.zip", hash = "sha256:dbc7601a3b7472d559dc7b933b18b4b66f9aa7452c120e87dfb33d02008c8a18"}, ] packaging = [ {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, diff --git a/pyproject.toml b/pyproject.toml index 0695e8144..02cb3996c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,7 @@ unittest2 = "^1.1.0" [tool.poetry.dev-dependencies] bandit = "1.7.2" -coverage = { extras = ["toml"], version = "^6.3.2" } +coverage = "^6.3.2" darglint = "^1.8.1" flake8 = "^4.0.1" flake8-bandit = "^2.1.2" @@ -70,7 +70,7 @@ flakeheaven = "^0.11.1" pep8-naming = "^0.11.1" poetry = "^1.1.13" pytest = "^7.0.1" -pytest-cov = { extras = ["toml"], version = "^3.0.0" } +pytest-cov = "^3.0.0" sphinx = { extras = ["toml"], version = "^4.4.0" } sphinx-tabs = { extras = ["toml"], version = "^3.2.0" } sphinx_rtd_theme = { extras = ["toml"], version = "^1.0.0" } diff --git a/tests/lava/magma/runtime/test_ref_var_ports.py b/tests/lava/magma/runtime/test_ref_var_ports.py index 68bb8fce5..338bb29ec 100644 --- a/tests/lava/magma/runtime/test_ref_var_ports.py +++ b/tests/lava/magma/runtime/test_ref_var_ports.py @@ -96,6 +96,9 @@ def run_post_mgmt(self): ref_data = np.array([5, 5, 5]) + self.time_step self.ref1.write(ref_data) self.ref3.write(ref_data[:2]) + # ensure write() has finished before moving on + self.ref1.wait() + self.ref3.wait() # A minimal PyProcModel implementing P2 From 7097dd8aeb5942ce570c187119cdf95be5d1ec6a Mon Sep 17 00:00:00 2001 From: bamsumit Date: Tue, 8 Mar 2022 11:45:18 -0800 Subject: [PATCH 2/9] Removed pyproject changes Signed-off-by: bamsumit --- poetry.lock | 2 +- pyproject.toml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 130f4162a..5be154e19 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1311,7 +1311,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = ">=3.8, <3.11" -content-hash = "e5bd66852734a32a6fdc9358cf6f54452bffb6d31f4249c1618abdbc47625e39" +content-hash = "17e10f1725cfdae4b4a2e8da85965da95263c227a876fb1afc363ed447d49583" [metadata.files] alabaster = [ diff --git a/pyproject.toml b/pyproject.toml index 02cb3996c..0695e8144 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,7 @@ unittest2 = "^1.1.0" [tool.poetry.dev-dependencies] bandit = "1.7.2" -coverage = "^6.3.2" +coverage = { extras = ["toml"], version = "^6.3.2" } darglint = "^1.8.1" flake8 = "^4.0.1" flake8-bandit = "^2.1.2" @@ -70,7 +70,7 @@ flakeheaven = "^0.11.1" pep8-naming = "^0.11.1" poetry = "^1.1.13" pytest = "^7.0.1" -pytest-cov = "^3.0.0" +pytest-cov = { extras = ["toml"], version = "^3.0.0" } sphinx = { extras = ["toml"], version = "^4.4.0" } sphinx-tabs = { extras = ["toml"], version = "^3.2.0" } sphinx_rtd_theme = { extras = ["toml"], version = "^1.0.0" } From 1fb7353d788b1b14e96ea4cf4212d5c22d485973 Mon Sep 17 00:00:00 2001 From: bamsumit Date: Fri, 20 May 2022 12:16:35 -0700 Subject: [PATCH 3/9] Fix to convolution tests. Fixed imcompatible mnist_pretrained for old python versions. Signed-off-by: bamsumit --- tests/lava/proc/conv/test_models.py | 10 ++++------ tutorials/end_to_end/mnist_pretrained.npy | 4 ++-- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/tests/lava/proc/conv/test_models.py b/tests/lava/proc/conv/test_models.py index 0d6fe7291..a55008a55 100644 --- a/tests/lava/proc/conv/test_models.py +++ b/tests/lava/proc/conv/test_models.py @@ -92,7 +92,6 @@ def setup_conv() -> Tuple[ class TestConvProcessModels(unittest.TestCase): """Tests for all ProcessModels of Conv""" - @unittest.skip def test_conv_float(self) -> None: """Test for float conv process.""" num_steps = 10 @@ -117,8 +116,8 @@ def test_conv_float(self) -> None: utils.TORCH_IS_AVAILABLE = TORCH_IS_AVAILABLE output_gt = np.zeros_like(output) - for t in range(output.shape[-1]): - output_gt[..., t] = utils.conv(input[..., t], **params) + for t in range(output.shape[-1] - 1): + output_gt[..., t + 1] = utils.conv(input[..., t], **params) error = np.abs(output - output_gt).mean() @@ -139,7 +138,6 @@ def test_conv_float(self) -> None: f'{output_gt[output!=output_gt]=}\n' ) - @unittest.skip def test_conv_fixed(self) -> None: """Test for fixed point conv process.""" num_steps = 10 @@ -164,8 +162,8 @@ def test_conv_fixed(self) -> None: utils.TORCH_IS_AVAILABLE = TORCH_IS_AVAILABLE output_gt = np.zeros_like(output) - for t in range(output.shape[-1]): - output_gt[..., t] = utils.conv(input[..., t], **params) + for t in range(output.shape[-1] - 1): + output_gt[..., t + 1] = utils.conv(input[..., t], **params) output_gt = utils.signed_clamp(output_gt, bits=24) error = np.abs(output - output_gt).mean() diff --git a/tutorials/end_to_end/mnist_pretrained.npy b/tutorials/end_to_end/mnist_pretrained.npy index 97d4d8e42..99ec90bec 100644 --- a/tutorials/end_to_end/mnist_pretrained.npy +++ b/tutorials/end_to_end/mnist_pretrained.npy @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:a55246798c24fb8122ef7be5bd3b8fbb1222a121d9db502d382c3d5590203555 -size 220771 +oid sha256:94f32a3ae7f8dd278cc8933b214642f246ffd859a129d19130ac88208f35c9d6 +size 220767 From 68c817cbe38b2f829de4592db9f11be3a0d556e5 Mon Sep 17 00:00:00 2001 From: bamsumit Date: Thu, 28 Jul 2022 09:44:48 -0700 Subject: [PATCH 4/9] Missing moudle parent fix Signed-off-by: bamsumit --- src/lava/magma/compiler/compiler_graphs.py | 31 +++++++++++----------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/src/lava/magma/compiler/compiler_graphs.py b/src/lava/magma/compiler/compiler_graphs.py index b25e4736d..9197fdb8a 100644 --- a/src/lava/magma/compiler/compiler_graphs.py +++ b/src/lava/magma/compiler/compiler_graphs.py @@ -811,23 +811,24 @@ def _find_proc_models(proc: AbstractProcess) \ if not proc_module.__name__ == "__main__": # Get the parent module. module_spec = importlib.util.find_spec(proc_module.__name__) - parent_module = importlib.import_module(module_spec.parent) - - # Get all the modules inside the parent (namespace) module. This - # is required here, because the namespace module can span multiple - # repositories. - namespace_module_infos = list( - pkgutil.iter_modules( - parent_module.__path__, - parent_module.__name__ + "." + if module_spec.parent: + parent_module = importlib.import_module(module_spec.parent) + + # Get all the modules inside the parent (namespace) module. + # This is required here, because the namespace module can span + # multiple repositories. + namespace_module_infos = list( + pkgutil.iter_modules( + parent_module.__path__, + parent_module.__name__ + "." + ) ) - ) - # Extract the directory name of each module. - for _, name, _ in namespace_module_infos: - module = importlib.import_module(name) - module_dir_name = os.path.dirname(inspect.getfile(module)) - dir_names.append(module_dir_name) + # Extract the directory name of each module. + for _, name, _ in namespace_module_infos: + module = importlib.import_module(name) + module_dir_name = os.path.dirname(inspect.getfile(module)) + dir_names.append(module_dir_name) # Go through all directories and extract all the ProcModels. for dir_name in dir_names: From fd2163cb443794b54e01ac5eab92d1e2d620cc53 Mon Sep 17 00:00:00 2001 From: Joyesh Mishra Date: Fri, 26 Aug 2022 15:09:39 -0700 Subject: [PATCH 5/9] Added ConvVarModel --- src/lava/magma/compiler/utils.py | 11 ++++- src/lava/magma/compiler/var_model.py | 63 ++++++++++++++++++++++++++-- 2 files changed, 69 insertions(+), 5 deletions(-) diff --git a/src/lava/magma/compiler/utils.py b/src/lava/magma/compiler/utils.py index 10014906a..b3c813cac 100644 --- a/src/lava/magma/compiler/utils.py +++ b/src/lava/magma/compiler/utils.py @@ -6,7 +6,7 @@ from lava.magma.compiler.mappable_interface import Mappable from lava.magma.compiler.subcompilers.address import NcLogicalAddress, \ NcVirtualAddress -from lava.magma.compiler.var_model import LoihiVarModel +from lava.magma.compiler.var_model import LoihiVarModel, ConvInVarModel from lava.magma.core.model.spike_type import SpikeType @@ -58,6 +58,10 @@ def get_logical(self) -> ty.List[NcLogicalAddress]: ------- Returns logical address of the port initializer. """ + # TODO: Need to clean this + if isinstance(self.var_model, ConvInVarModel): + return self.var_model.get_logical() + return [NcLogicalAddress(chip_id=addr.logical_chip_id, core_id=addr.logical_core_id) for addr in self.var_model.address] @@ -73,6 +77,11 @@ def set_virtual(self, addrs: ty.List[NcVirtualAddress]): ------- """ + # TODO: Need to clean this + if isinstance(self.var_model, ConvInVarModel): + self.var_model.set_virtual(addrs) + return + if len(addrs) != len(self.var_model.address): raise ValueError("Length of list of address provided doesn't " "match size of the address list of the port " diff --git a/src/lava/magma/compiler/var_model.py b/src/lava/magma/compiler/var_model.py index 9393fe045..a0a6d4b44 100644 --- a/src/lava/magma/compiler/var_model.py +++ b/src/lava/magma/compiler/var_model.py @@ -15,17 +15,20 @@ pass from lava.magma.core.process.variable import Var +ChipIdx: int +CoreIdx: int + @dataclass class LoihiAddress: # physical chip id of the var - physical_chip_id: int + physical_chip_id: ChipIdx # physical core id of the nc var or lmt_id of the spike counter - physical_core_id: int + physical_core_id: CoreIdx # logical chip id used in compilation, before mapping to hardware addresses - logical_chip_id: int + logical_chip_id: ChipIdx # logical core id used in compilation, before mapping to hardware addresses - logical_core_id: int + logical_core_id: CoreIdx # logical address/index of the var; used with nodesets for get/set logical_idx_addr: int # length of the contiguous addresses of var on core_id on chip_id @@ -122,3 +125,55 @@ class CVarModel(LoihiVarModel): @dataclass class NcVarModel(LoihiVarModel): pass + + +@dataclass +class Region: + x_min: int + x_max: int + y_min: int + y_max: int + logical_chip_idx: ChipIdx + logical_core_idx: CoreIdx + physical_chip_idx: ChipIdx = None + physical_core_idx: CoreIdx = None + + +@dataclass +class ConvInVarModel(AbstractVarModel, Mappable): + x_dim: int = 0 + y_dim: int = 0 + f_dim: int = 0 + x_split: int = 0 + f_split: int = 0 + regions: ty.List[Region] = None + + def get_logical(self) -> ty.List[NcLogicalAddress]: + """ + + Returns + ------- + Returns logical address of the port initializer. + """ + return [NcLogicalAddress(chip_id=region.logical_chip_idx, + core_id=region.logical_core_idx) for region in + self.regions] + + def set_virtual(self, addrs: ty.List[NcVirtualAddress]): + """ + Sets physical address of the port initializer + Parameters + ---------- + addrs: List of address + + Returns + ------- + + """ + if len(addrs) != len(self.regions): + raise ValueError("Length of list of address provided doesn't " + "match size of the regions list of the port " + "initializer.") + for idx, addr in enumerate(addrs): + self.regions[idx].physical_chip_idx = addr.chip_id + self.regions[idx].physical_core_idx = addr.core_id From 27c81c2c6f11535dd7d64b1dced0ca97ac722ce9 Mon Sep 17 00:00:00 2001 From: bamsumit Date: Tue, 27 Jun 2023 15:07:21 -0700 Subject: [PATCH 6/9] Added iterable callback function Signed-off-by: bamsumit --- src/lava/magma/core/callback_fx.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/lava/magma/core/callback_fx.py b/src/lava/magma/core/callback_fx.py index 65b7c4fee..7e0f13652 100644 --- a/src/lava/magma/core/callback_fx.py +++ b/src/lava/magma/core/callback_fx.py @@ -4,6 +4,7 @@ import numpy as np from abc import ABC, abstractmethod +from typing import Iterable try: from nxcore.arch.base.nxboard import NxBoard except ImportError: @@ -53,3 +54,23 @@ def post_run_callback(self, board: NxBoard = None, var_id_to_var_model_map: dict = None): pass + + +class IterableCallBack(NxSdkCallbackFx): + """NxSDK callback function to execute iterable of function pointers + as pre and post run.""" + + def __init__(self, + pre_run_fxs: Iterable = [], + post_run_fxs: Iterable = []) -> None: + super().__init__() + self.pre_run_fxs = pre_run_fxs + self.post_run_fxs = post_run_fxs + + def pre_run_callback(self, board: NxBoard, **_) -> None: + for fx in self.pre_run_fxs: + fx(board) + + def post_run_callback(self, board: NxBoard, **_) -> None: + for fx in self.post_run_fxs: + fx(board) From e189d8a0ad2f08ede15e60ef918ed91d40949d87 Mon Sep 17 00:00:00 2001 From: Marcus G K Williams <168222+mgkwill@users.noreply.github.com> Date: Wed, 28 Jun 2023 12:05:15 -0700 Subject: [PATCH 7/9] Fix codacy issues in callback_fx.py --- src/lava/magma/core/callback_fx.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/src/lava/magma/core/callback_fx.py b/src/lava/magma/core/callback_fx.py index 7e0f13652..b8099e450 100644 --- a/src/lava/magma/core/callback_fx.py +++ b/src/lava/magma/core/callback_fx.py @@ -61,16 +61,26 @@ class IterableCallBack(NxSdkCallbackFx): as pre and post run.""" def __init__(self, - pre_run_fxs: Iterable = [], - post_run_fxs: Iterable = []) -> None: + pre_run_fxs: Iterable = None, + post_run_fxs: Iterable = None) -> None: super().__init__() + if pre_run_fxs is None: + pre_run_fxs = [] + if post_run_fxs is None: + post_run_fxs = [] self.pre_run_fxs = pre_run_fxs self.post_run_fxs = post_run_fxs - def pre_run_callback(self, board: NxBoard, **_) -> None: + def pre_run_callback(self, + board: NxBoard, + _var_id_to_var_model_map: dict = None + ) -> None: for fx in self.pre_run_fxs: fx(board) - def post_run_callback(self, board: NxBoard, **_) -> None: + def post_run_callback(self, + board: NxBoard, + _var_id_to_var_model_map: dict = None + ) -> None: for fx in self.post_run_fxs: fx(board) From 4f50a363aa933ae8752325a7272270d75514b687 Mon Sep 17 00:00:00 2001 From: Marcus G K Williams <168222+mgkwill@users.noreply.github.com> Date: Wed, 28 Jun 2023 12:08:04 -0700 Subject: [PATCH 8/9] Fix linting in callback_fx.py --- src/lava/magma/core/callback_fx.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/lava/magma/core/callback_fx.py b/src/lava/magma/core/callback_fx.py index b8099e450..c2d5d6eac 100644 --- a/src/lava/magma/core/callback_fx.py +++ b/src/lava/magma/core/callback_fx.py @@ -74,13 +74,13 @@ def __init__(self, def pre_run_callback(self, board: NxBoard, _var_id_to_var_model_map: dict = None - ) -> None: + ) -> None: for fx in self.pre_run_fxs: fx(board) def post_run_callback(self, - board: NxBoard, - _var_id_to_var_model_map: dict = None - ) -> None: + board: NxBoard, + _var_id_to_var_model_map: dict = None + ) -> None: for fx in self.post_run_fxs: fx(board) From d06bf791bf851a9f465e94160ea6702f3f302a2c Mon Sep 17 00:00:00 2001 From: Marcus G K Williams <168222+mgkwill@users.noreply.github.com> Date: Wed, 28 Jun 2023 12:22:16 -0700 Subject: [PATCH 9/9] Fix codacy sig issue in callback_fx.py --- src/lava/magma/core/callback_fx.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lava/magma/core/callback_fx.py b/src/lava/magma/core/callback_fx.py index c2d5d6eac..ed635b896 100644 --- a/src/lava/magma/core/callback_fx.py +++ b/src/lava/magma/core/callback_fx.py @@ -72,14 +72,14 @@ def __init__(self, self.post_run_fxs = post_run_fxs def pre_run_callback(self, - board: NxBoard, + board: NxBoard = None, _var_id_to_var_model_map: dict = None ) -> None: for fx in self.pre_run_fxs: fx(board) def post_run_callback(self, - board: NxBoard, + board: NxBoard = None, _var_id_to_var_model_map: dict = None ) -> None: for fx in self.post_run_fxs: