diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0ad399734..c76f12bef 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,7 @@ repos: # hooks: # - id: black - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.7 + rev: v0.6.1 hooks: - id: ruff # - repo: https://github.com/econchick/interrogate diff --git a/CHANGELOG.md b/CHANGELOG.md index 0cc6ba4d1..bf5a37ff2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,9 @@ ### Enhancements - Added support to append to a dataset of references for HDMF-Zarr. @mavaylon1 [#1157](https://github.com/hdmf-dev/hdmf/pull/1157) +- Adjusted stacklevel of warnings to point to user code when possible. @rly [#1166](https://github.com/hdmf-dev/hdmf/pull/1166) +- Improved "already exists" error message when adding a container to a `MultiContainerInterface`. @rly [#1165](https://github.com/hdmf-dev/hdmf/pull/1165) +- Added support to write multidimensional string arrays. @stephprince [#1173](https://github.com/hdmf-dev/hdmf/pull/1173) ## HDMF 3.14.3 (July 29, 2024) @@ -11,6 +14,8 @@ - Added new attribute "dimension_labels" on `DatasetBuilder` which specifies the names of the dimensions used in the dataset based on the shape of the dataset data and the dimension names in the spec for the data type. This attribute is available on build (during the write process), but not on read of a dataset from a file. @rly [#1081](https://github.com/hdmf-dev/hdmf/pull/1081) +- Speed up loading namespaces by skipping register_type when already registered. @magland [#1102](https://github.com/hdmf-dev/hdmf/pull/1102) +- Speed up namespace loading: return a shallow copy rather than a deep copy in build_const_args. @magland [#1103](https://github.com/hdmf-dev/hdmf/pull/1103) ## HDMF 3.14.2 (July 7, 2024) diff --git a/src/hdmf/backends/hdf5/h5_utils.py b/src/hdmf/backends/hdf5/h5_utils.py index 8a643dab2..f5d74dd5e 100644 --- a/src/hdmf/backends/hdf5/h5_utils.py +++ b/src/hdmf/backends/hdf5/h5_utils.py @@ -509,7 +509,7 @@ def __init__(self, **kwargs): # Check for possible collision with other parameters if not isinstance(getargs('data', kwargs), Dataset) and self.__link_data: self.__link_data = False - warnings.warn('link_data parameter in H5DataIO will be ignored', stacklevel=2) + warnings.warn('link_data parameter in H5DataIO will be ignored', stacklevel=3) # Call the super constructor and consume the data parameter super().__init__(**kwargs) # Construct the dict with the io args, ignoring all options that were set to None @@ -533,7 +533,7 @@ def __init__(self, **kwargs): self.__iosettings.pop('compression', None) if 'compression_opts' in self.__iosettings: warnings.warn('Compression disabled by compression=False setting. ' + - 'compression_opts parameter will, therefore, be ignored.', stacklevel=2) + 'compression_opts parameter will, therefore, be ignored.', stacklevel=3) self.__iosettings.pop('compression_opts', None) # Validate the compression options used self._check_compression_options() @@ -548,7 +548,7 @@ def __init__(self, **kwargs): if isinstance(self.data, Dataset): for k in self.__iosettings.keys(): warnings.warn("%s in H5DataIO will be ignored with H5DataIO.data being an HDF5 dataset" % k, - stacklevel=2) + stacklevel=3) self.__dataset = None @@ -626,7 +626,7 @@ def _check_compression_options(self): if self.__iosettings['compression'] not in ['gzip', h5py_filters.h5z.FILTER_DEFLATE]: warnings.warn(str(self.__iosettings['compression']) + " compression may not be available " "on all installations of HDF5. Use of gzip is recommended to ensure portability of " - "the generated HDF5 files.", stacklevel=3) + "the generated HDF5 files.", stacklevel=4) @staticmethod def filter_available(filter, allow_plugin_filters): diff --git a/src/hdmf/backends/hdf5/h5tools.py b/src/hdmf/backends/hdf5/h5tools.py index 9c6cc164e..34f4fe284 100644 --- a/src/hdmf/backends/hdf5/h5tools.py +++ b/src/hdmf/backends/hdf5/h5tools.py @@ -344,7 +344,7 @@ def copy_file(self, **kwargs): warnings.warn("The copy_file class method is no longer supported and may be removed in a future version of " "HDMF. Please use the export method or h5py.File.copy method instead.", category=DeprecationWarning, - stacklevel=2) + stacklevel=3) source_filename, dest_filename, expand_external, expand_refs, expand_soft = getargs('source_filename', 'dest_filename', @@ -1469,7 +1469,7 @@ def __list_fill__(cls, parent, name, data, options=None): data_shape = io_settings.pop('shape') elif hasattr(data, 'shape'): data_shape = data.shape - elif isinstance(dtype, np.dtype): + elif isinstance(dtype, np.dtype) and len(dtype) > 1: # check if compound dtype data_shape = (len(data),) else: data_shape = get_data_shape(data) diff --git a/src/hdmf/build/objectmapper.py b/src/hdmf/build/objectmapper.py index a46260fbd..370279707 100644 --- a/src/hdmf/build/objectmapper.py +++ b/src/hdmf/build/objectmapper.py @@ -601,11 +601,17 @@ def __get_data_type(cls, spec): def __convert_string(self, value, spec): """Convert string types to the specified dtype.""" + def __apply_string_type(value, string_type): + if isinstance(value, (list, tuple, np.ndarray, DataIO)): + return [__apply_string_type(item, string_type) for item in value] + else: + return string_type(value) + ret = value if isinstance(spec, AttributeSpec): if 'text' in spec.dtype: if spec.shape is not None or spec.dims is not None: - ret = list(map(str, value)) + ret = __apply_string_type(value, str) else: ret = str(value) elif isinstance(spec, DatasetSpec): @@ -621,7 +627,7 @@ def string_type(x): return x.isoformat() # method works for both date and datetime if string_type is not None: if spec.shape is not None or spec.dims is not None: - ret = list(map(string_type, value)) + ret = __apply_string_type(value, string_type) else: ret = string_type(value) # copy over any I/O parameters if they were specified diff --git a/src/hdmf/common/resources.py b/src/hdmf/common/resources.py index fdca4bb81..1fc731ef5 100644 --- a/src/hdmf/common/resources.py +++ b/src/hdmf/common/resources.py @@ -628,7 +628,7 @@ def add_ref(self, **kwargs): if entity_uri is not None: entity_uri = entity.entity_uri msg = 'This entity already exists. Ignoring new entity uri' - warn(msg, stacklevel=2) + warn(msg, stacklevel=3) ################# # Validate Object diff --git a/src/hdmf/common/table.py b/src/hdmf/common/table.py index 2e90b0cdf..b4530c7b7 100644 --- a/src/hdmf/common/table.py +++ b/src/hdmf/common/table.py @@ -717,7 +717,7 @@ def add_row(self, **kwargs): warn(("Data has elements with different lengths and therefore cannot be coerced into an " "N-dimensional array. Use the 'index' argument when creating a column to add rows " "with different lengths."), - stacklevel=2) + stacklevel=3) def __eq__(self, other): """Compare if the two DynamicTables contain the same data. @@ -776,7 +776,7 @@ def add_column(self, **kwargs): # noqa: C901 if isinstance(index, VectorIndex): warn("Passing a VectorIndex in for index may lead to unexpected behavior. This functionality will be " - "deprecated in a future version of HDMF.", category=FutureWarning, stacklevel=2) + "deprecated in a future version of HDMF.", category=FutureWarning, stacklevel=3) if name in self.__colids: # column has already been added msg = "column '%s' already exists in %s '%s'" % (name, self.__class__.__name__, self.name) @@ -793,7 +793,7 @@ def add_column(self, **kwargs): # noqa: C901 "Please ensure the new column complies with the spec. " "This will raise an error in a future version of HDMF." % (name, self.__class__.__name__, spec_table)) - warn(msg, stacklevel=2) + warn(msg, stacklevel=3) index_bool = index or not isinstance(index, bool) spec_index = self.__uninit_cols[name].get('index', False) @@ -803,7 +803,7 @@ def add_column(self, **kwargs): # noqa: C901 "Please ensure the new column complies with the spec. " "This will raise an error in a future version of HDMF." % (name, self.__class__.__name__, spec_index)) - warn(msg, stacklevel=2) + warn(msg, stacklevel=3) spec_col_cls = self.__uninit_cols[name].get('class', VectorData) if col_cls != spec_col_cls: @@ -841,7 +841,7 @@ def add_column(self, **kwargs): # noqa: C901 warn(("Data has elements with different lengths and therefore cannot be coerced into an " "N-dimensional array. Use the 'index' argument when adding a column of data with " "different lengths."), - stacklevel=2) + stacklevel=3) # Check that we are asked to create an index if (isinstance(index, bool) or isinstance(index, int)) and index > 0 and len(data) > 0: diff --git a/src/hdmf/container.py b/src/hdmf/container.py index 287809406..3772cd634 100644 --- a/src/hdmf/container.py +++ b/src/hdmf/container.py @@ -894,7 +894,7 @@ def set_dataio(self, **kwargs): warn( "Data.set_dataio() is deprecated. Please use Data.set_data_io() instead.", DeprecationWarning, - stacklevel=2, + stacklevel=3, ) dataio = getargs('dataio', kwargs) dataio.data = self.__data @@ -1142,7 +1142,9 @@ def _func(self, **kwargs): # still need to mark self as modified self.set_modified() if tmp.name in d: - msg = "'%s' already exists in %s '%s'" % (tmp.name, cls.__name__, self.name) + msg = (f"Cannot add {tmp.__class__} '{tmp.name}' at 0x{id(tmp)} to dict attribute '{attr_name}' in " + f"{cls} '{self.name}'. {d[tmp.name].__class__} '{tmp.name}' at 0x{id(d[tmp.name])} " + f"already exists in '{attr_name}' and has the same name.") raise ValueError(msg) d[tmp.name] = tmp return container diff --git a/src/hdmf/spec/namespace.py b/src/hdmf/spec/namespace.py index a2ae0bd37..57232bd25 100644 --- a/src/hdmf/spec/namespace.py +++ b/src/hdmf/spec/namespace.py @@ -50,13 +50,13 @@ def __init__(self, **kwargs): self['full_name'] = full_name if version == str(SpecNamespace.UNVERSIONED): # the unversioned version may be written to file as a string and read from file as a string - warn("Loaded namespace '%s' is unversioned. Please notify the extension author." % name, stacklevel=2) + warn(f"Loaded namespace '{name}' is unversioned. Please notify the extension author.") version = SpecNamespace.UNVERSIONED if version is None: # version is required on write -- see YAMLSpecWriter.write_namespace -- but can be None on read in order to # be able to read older files with extensions that are missing the version key. - warn(("Loaded namespace '%s' is missing the required key 'version'. Version will be set to '%s'. " - "Please notify the extension author.") % (name, SpecNamespace.UNVERSIONED), stacklevel=2) + warn(f"Loaded namespace '{name}' is missing the required key 'version'. Version will be set to " + f"'{SpecNamespace.UNVERSIONED}'. Please notify the extension author.") version = SpecNamespace.UNVERSIONED self['version'] = version if date is not None: @@ -466,15 +466,19 @@ def __load_namespace(self, namespace, reader, resolve=True): return included_types def __register_type(self, ndt, inc_ns, catalog, registered_types): - spec = inc_ns.get_spec(ndt) - spec_file = inc_ns.catalog.get_spec_source_file(ndt) - self.__register_dependent_types(spec, inc_ns, catalog, registered_types) - if isinstance(spec, DatasetSpec): - built_spec = self.dataset_spec_cls.build_spec(spec) + if ndt in registered_types: + # already registered + pass else: - built_spec = self.group_spec_cls.build_spec(spec) - registered_types.add(ndt) - catalog.register_spec(built_spec, spec_file) + spec = inc_ns.get_spec(ndt) + spec_file = inc_ns.catalog.get_spec_source_file(ndt) + self.__register_dependent_types(spec, inc_ns, catalog, registered_types) + if isinstance(spec, DatasetSpec): + built_spec = self.dataset_spec_cls.build_spec(spec) + else: + built_spec = self.group_spec_cls.build_spec(spec) + registered_types.add(ndt) + catalog.register_spec(built_spec, spec_file) def __register_dependent_types(self, spec, inc_ns, catalog, registered_types): """Ensure that classes for all types used by this type are registered @@ -529,7 +533,7 @@ def load_namespaces(self, **kwargs): if ns['version'] != self.__namespaces.get(ns['name'])['version']: # warn if the cached namespace differs from the already loaded namespace warn("Ignoring cached namespace '%s' version %s because version %s is already loaded." - % (ns['name'], ns['version'], self.__namespaces.get(ns['name'])['version']), stacklevel=2) + % (ns['name'], ns['version'], self.__namespaces.get(ns['name'])['version'])) else: to_load.append(ns) # now load specs into namespace diff --git a/src/hdmf/spec/spec.py b/src/hdmf/spec/spec.py index 358cc3256..e10d5e43e 100644 --- a/src/hdmf/spec/spec.py +++ b/src/hdmf/spec/spec.py @@ -1,7 +1,6 @@ import re from abc import ABCMeta from collections import OrderedDict -from copy import deepcopy from warnings import warn from ..utils import docval, getargs, popargs, get_docval @@ -84,7 +83,7 @@ class ConstructableDict(dict, metaclass=ABCMeta): def build_const_args(cls, spec_dict): ''' Build constructor arguments for this ConstructableDict class from a dictionary ''' # main use cases are when spec_dict is a ConstructableDict or a spec dict read from a file - return deepcopy(spec_dict) + return spec_dict.copy() @classmethod def build_spec(cls, spec_dict): @@ -322,7 +321,7 @@ def __init__(self, **kwargs): default_name = getargs('default_name', kwargs) if default_name: if name is not None: - warn("found 'default_name' with 'name' - ignoring 'default_name'", stacklevel=2) + warn("found 'default_name' with 'name' - ignoring 'default_name'") else: self['default_name'] = default_name self.__attributes = dict() diff --git a/tests/unit/build_tests/test_classgenerator.py b/tests/unit/build_tests/test_classgenerator.py index 52fdc4839..3c9fda283 100644 --- a/tests/unit/build_tests/test_classgenerator.py +++ b/tests/unit/build_tests/test_classgenerator.py @@ -180,10 +180,11 @@ def test_dynamic_container_creation(self): baz_spec = GroupSpec('A test extension with no Container class', data_type_def='Baz', data_type_inc=self.bar_spec, attributes=[AttributeSpec('attr3', 'a float attribute', 'float'), - AttributeSpec('attr4', 'another float attribute', 'float')]) + AttributeSpec('attr4', 'another float attribute', 'float'), + AttributeSpec('attr_array', 'an array attribute', 'text', shape=(None,)),]) self.spec_catalog.register_spec(baz_spec, 'extension.yaml') cls = self.type_map.get_dt_container_cls('Baz', CORE_NAMESPACE) - expected_args = {'name', 'data', 'attr1', 'attr2', 'attr3', 'attr4', 'skip_post_init'} + expected_args = {'name', 'data', 'attr1', 'attr2', 'attr3', 'attr4', 'attr_array', 'skip_post_init'} received_args = set() for x in get_docval(cls.__init__): @@ -211,7 +212,7 @@ def test_dynamic_container_creation_defaults(self): AttributeSpec('attr4', 'another float attribute', 'float')]) self.spec_catalog.register_spec(baz_spec, 'extension.yaml') cls = self.type_map.get_dt_container_cls('Baz', CORE_NAMESPACE) - expected_args = {'name', 'data', 'attr1', 'attr2', 'attr3', 'attr4', 'foo', 'skip_post_init'} + expected_args = {'name', 'data', 'attr1', 'attr2', 'attr3', 'attr4', 'attr_array', 'foo', 'skip_post_init'} received_args = set(map(lambda x: x['name'], get_docval(cls.__init__))) self.assertSetEqual(expected_args, received_args) self.assertEqual(cls.__name__, 'Baz') diff --git a/tests/unit/build_tests/test_io_map.py b/tests/unit/build_tests/test_io_map.py index 63f397682..e095ef318 100644 --- a/tests/unit/build_tests/test_io_map.py +++ b/tests/unit/build_tests/test_io_map.py @@ -9,6 +9,7 @@ from hdmf.testing import TestCase from abc import ABCMeta, abstractmethod import unittest +import numpy as np from tests.unit.helpers.utils import CORE_NAMESPACE, create_test_type_map @@ -20,24 +21,27 @@ class Bar(Container): {'name': 'attr1', 'type': str, 'doc': 'an attribute'}, {'name': 'attr2', 'type': int, 'doc': 'another attribute'}, {'name': 'attr3', 'type': float, 'doc': 'a third attribute', 'default': 3.14}, + {'name': 'attr_array', 'type': 'array_data', 'doc': 'another attribute', 'default': (1, 2, 3)}, {'name': 'foo', 'type': 'Foo', 'doc': 'a group', 'default': None}) def __init__(self, **kwargs): - name, data, attr1, attr2, attr3, foo = getargs('name', 'data', 'attr1', 'attr2', 'attr3', 'foo', kwargs) + name, data, attr1, attr2, attr3, attr_array, foo = getargs('name', 'data', 'attr1', 'attr2', 'attr3', + 'attr_array', 'foo', kwargs) super().__init__(name=name) self.__data = data self.__attr1 = attr1 self.__attr2 = attr2 self.__attr3 = attr3 + self.__attr_array = attr_array self.__foo = foo if self.__foo is not None and self.__foo.parent is None: self.__foo.parent = self def __eq__(self, other): - attrs = ('name', 'data', 'attr1', 'attr2', 'attr3', 'foo') + attrs = ('name', 'data', 'attr1', 'attr2', 'attr3', 'attr_array', 'foo') return all(getattr(self, a) == getattr(other, a) for a in attrs) def __str__(self): - attrs = ('name', 'data', 'attr1', 'attr2', 'attr3', 'foo') + attrs = ('name', 'data', 'attr1', 'attr2', 'attr3', 'attr_array', 'foo') return ','.join('%s=%s' % (a, getattr(self, a)) for a in attrs) @property @@ -60,6 +64,10 @@ def attr2(self): def attr3(self): return self.__attr3 + @property + def attr_array(self): + return self.__attr_array + @property def foo(self): return self.__foo @@ -333,12 +341,15 @@ def test_build_1d(self): datasets=[DatasetSpec('an example dataset', 'text', name='data', shape=(None,), attributes=[AttributeSpec( 'attr2', 'an example integer attribute', 'int')])], - attributes=[AttributeSpec('attr1', 'an example string attribute', 'text')]) + attributes=[AttributeSpec('attr1', 'an example string attribute', 'text'), + AttributeSpec('attr_array', 'an example array attribute', 'text', + shape=(None,))]) type_map = self.customSetUp(bar_spec) type_map.register_map(Bar, BarMapper) - bar_inst = Bar('my_bar', ['a', 'b', 'c', 'd'], 'value1', 10) + bar_inst = Bar('my_bar', ['a', 'b', 'c', 'd'], 'value1', 10, attr_array=['a', 'b', 'c', 'd']) builder = type_map.build(bar_inst) - self.assertEqual(builder.get('data').data, ['a', 'b', 'c', 'd']) + np.testing.assert_array_equal(builder.get('data').data, np.array(['a', 'b', 'c', 'd'])) + np.testing.assert_array_equal(builder.get('attr_array'), np.array(['a', 'b', 'c', 'd'])) def test_build_scalar(self): bar_spec = GroupSpec('A test group specification with a data type', @@ -353,6 +364,102 @@ def test_build_scalar(self): builder = type_map.build(bar_inst) self.assertEqual(builder.get('data').data, "['a', 'b', 'c', 'd']") + def test_build_2d_lol(self): + bar_spec = GroupSpec( + doc='A test group specification with a data type', + data_type_def='Bar', + datasets=[ + DatasetSpec( + doc='an example dataset', + dtype='text', + name='data', + shape=(None, None), + attributes=[AttributeSpec(name='attr2', doc='an example integer attribute', dtype='int')], + ) + ], + attributes=[AttributeSpec(name='attr_array', doc='an example array attribute', dtype='text', + shape=(None, None))], + ) + type_map = self.customSetUp(bar_spec) + type_map.register_map(Bar, BarMapper) + str_lol_2d = [['aa', 'bb'], ['cc', 'dd']] + bar_inst = Bar('my_bar', str_lol_2d, 'value1', 10, attr_array=str_lol_2d) + builder = type_map.build(bar_inst) + self.assertEqual(builder.get('data').data, str_lol_2d) + self.assertEqual(builder.get('attr_array'), str_lol_2d) + + def test_build_2d_ndarray(self): + bar_spec = GroupSpec( + doc='A test group specification with a data type', + data_type_def='Bar', + datasets=[ + DatasetSpec( + doc='an example dataset', + dtype='text', + name='data', + shape=(None, None), + attributes=[AttributeSpec(name='attr2', doc='an example integer attribute', dtype='int')], + ) + ], + attributes=[AttributeSpec(name='attr_array', doc='an example array attribute', dtype='text', + shape=(None, None))], + ) + type_map = self.customSetUp(bar_spec) + type_map.register_map(Bar, BarMapper) + str_array_2d = np.array([['aa', 'bb'], ['cc', 'dd']]) + bar_inst = Bar('my_bar', str_array_2d, 'value1', 10, attr_array=str_array_2d) + builder = type_map.build(bar_inst) + np.testing.assert_array_equal(builder.get('data').data, str_array_2d) + np.testing.assert_array_equal(builder.get('attr_array'), str_array_2d) + + def test_build_3d_lol(self): + bar_spec = GroupSpec( + doc='A test group specification with a data type', + data_type_def='Bar', + datasets=[ + DatasetSpec( + doc='an example dataset', + dtype='text', + name='data', + shape=(None, None, None), + attributes=[AttributeSpec(name='attr2', doc='an example integer attribute', dtype='int')], + ) + ], + attributes=[AttributeSpec(name='attr_array', doc='an example array attribute', dtype='text', + shape=(None, None, None))], + ) + type_map = self.customSetUp(bar_spec) + type_map.register_map(Bar, BarMapper) + str_lol_3d = [[['aa', 'bb'], ['cc', 'dd']], [['ee', 'ff'], ['gg', 'hh']]] + bar_inst = Bar('my_bar', str_lol_3d, 'value1', 10, attr_array=str_lol_3d) + builder = type_map.build(bar_inst) + self.assertEqual(builder.get('data').data, str_lol_3d) + self.assertEqual(builder.get('attr_array'), str_lol_3d) + + def test_build_3d_ndarray(self): + bar_spec = GroupSpec( + doc='A test group specification with a data type', + data_type_def='Bar', + datasets=[ + DatasetSpec( + doc='an example dataset', + dtype='text', + name='data', + shape=(None, None, None), + attributes=[AttributeSpec(name='attr2', doc='an example integer attribute', dtype='int')], + ) + ], + attributes=[AttributeSpec(name='attr_array', doc='an example array attribute', dtype='text', + shape=(None, None, None))], + ) + type_map = self.customSetUp(bar_spec) + type_map.register_map(Bar, BarMapper) + str_array_3d = np.array([[['aa', 'bb'], ['cc', 'dd']], [['ee', 'ff'], ['gg', 'hh']]]) + bar_inst = Bar('my_bar', str_array_3d, 'value1', 10, attr_array=str_array_3d) + builder = type_map.build(bar_inst) + np.testing.assert_array_equal(builder.get('data').data, str_array_3d) + np.testing.assert_array_equal(builder.get('attr_array'), str_array_3d) + def test_build_dataio(self): bar_spec = GroupSpec('A test group specification with a data type', data_type_def='Bar', diff --git a/tests/unit/test_io_hdf5_h5tools.py b/tests/unit/test_io_hdf5_h5tools.py index 62ddc80e9..fa5f9081c 100644 --- a/tests/unit/test_io_hdf5_h5tools.py +++ b/tests/unit/test_io_hdf5_h5tools.py @@ -24,7 +24,7 @@ from hdmf.data_utils import DataChunkIterator, GenericDataChunkIterator, InvalidDataIOError from hdmf.spec.catalog import SpecCatalog from hdmf.spec.namespace import NamespaceCatalog, SpecNamespace -from hdmf.spec.spec import GroupSpec +from hdmf.spec.spec import GroupSpec, DtypeSpec from hdmf.testing import TestCase, remove_test_file from hdmf.common.resources import HERD from hdmf.term_set import TermSet, TermSetWrapper @@ -164,6 +164,31 @@ def test_write_dataset_list(self): dset = self.f['test_dataset'] self.assertTrue(np.all(dset[:] == a)) + def test_write_dataset_lol_strings(self): + a = [['aa', 'bb'], ['cc', 'dd']] + self.io.write_dataset(self.f, DatasetBuilder('test_dataset', a, attributes={})) + dset = self.f['test_dataset'] + decoded_dset = [[item.decode('utf-8') if isinstance(item, bytes) else item for item in sublist] + for sublist in dset[:]] + self.assertTrue(decoded_dset == a) + + def test_write_dataset_list_compound_datatype(self): + a = np.array([(1, 2, 0.5), (3, 4, 0.5)], dtype=[('x', 'int'), ('y', 'int'), ('z', 'float')]) + dset_builder = DatasetBuilder( + name='test_dataset', + data=a.tolist(), + attributes={}, + dtype=[ + DtypeSpec('x', doc='x', dtype='int'), + DtypeSpec('y', doc='y', dtype='int'), + DtypeSpec('z', doc='z', dtype='float'), + ], + ) + self.io.write_dataset(self.f, dset_builder) + dset = self.f['test_dataset'] + for field in a.dtype.names: + self.assertTrue(np.all(dset[field][:] == a[field])) + def test_write_dataset_list_compress_gzip(self): a = H5DataIO(np.arange(30).reshape(5, 2, 3), compression='gzip', diff --git a/tests/unit/test_multicontainerinterface.py b/tests/unit/test_multicontainerinterface.py index c705d0a6e..6da81c2cc 100644 --- a/tests/unit/test_multicontainerinterface.py +++ b/tests/unit/test_multicontainerinterface.py @@ -198,7 +198,10 @@ def test_add_single_dup(self): """Test that adding a container to the attribute dict correctly adds the container.""" obj1 = Container('obj1') foo = Foo(obj1) - msg = "'obj1' already exists in Foo 'Foo'" + msg = (f"Cannot add 'obj1' at 0x{id(obj1)} to dict attribute " + "'containers' in 'Foo'. " + f" 'obj1' at 0x{id(obj1)} already exists in 'containers' " + "and has the same name.") with self.assertRaisesWith(ValueError, msg): foo.add_container(obj1)