Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update requirements, prepare 3.0.0 #640

Merged
merged 5 commits into from
Jul 7, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@
passed to ``DynamicTable.get``, then nested DataFrames will be returned, one DataFrame per row of the original
resulting DataFrame. @rly (#579)

### Minor improvements
- Updated requirements and tests. @rly (#640)

### Bug fixes
- Update the validator to allow extensions to data types which only define data_type_inc. @dsleiter (#609)
- Fix error when validating lazy-loaded datasets containing references. @dsleiter (#609)
Expand Down
6 changes: 3 additions & 3 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
# compute coverage, and create test environments
codecov==2.1.11
coverage==5.5
flake8==3.9.1
flake8==3.9.2
flake8-debugger==4.0.0
flake8-print==4.0.0
importlib-metadata==4.0.1
importlib-metadata==4.6.1
python-dateutil==2.8.1
tox==3.23.0
tox==3.23.1
12 changes: 6 additions & 6 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# pinned dependencies to reproduce an entire development environment to use HDMF
h5py==3.2.1
numpy==1.19.3
scipy==1.5.4
pandas==1.1.5
ruamel.yaml==0.17.4
h5py==3.3.0
numpy==1.21.0
scipy==1.7.0
pandas==1.3.0
ruamel.yaml==0.17.10
jsonschema==3.2.0
setuptools==56.0.0
setuptools==57.1.0
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

reqs = [
'h5py>=2.10,<4',
'numpy>=1.16,<1.21',
'numpy>=1.16,<1.22',
'scipy>=1.1,<2',
'pandas>=1.0.5,<2',
'ruamel.yaml>=0.16,<1',
Expand Down
2 changes: 1 addition & 1 deletion src/hdmf/backends/hdf5/h5_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -452,7 +452,7 @@ def __init__(self, **kwargs):
self.__allow_plugin_filters):
msg = "%s compression may not be supported by this version of h5py." % str(self.__iosettings['compression'])
if not self.__allow_plugin_filters:
msg += "Set `allow_plugin_filters=True` to enable the use of dynamically-loaded plugin filters."
msg += " Set `allow_plugin_filters=True` to enable the use of dynamically-loaded plugin filters."
raise ValueError(msg)
# Check possible parameter collisions
if isinstance(self.data, Dataset):
Expand Down
23 changes: 15 additions & 8 deletions tests/unit/test_io_hdf5_h5tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -556,22 +556,28 @@ def test_warning_on_non_gzip_compression(self):
self.assertEqual(len(w), 0)
self.assertEqual(dset.io_settings['compression'], 'gzip')
# Make sure a warning is issued when using szip (even if installed)
warn_msg = ("szip compression may not be available on all installations of HDF5. Use of gzip is "
"recommended to ensure portability of the generated HDF5 files.")
if "szip" in h5py_filters.encode:
with warnings.catch_warnings(record=True) as w:
with self.assertWarnsWith(UserWarning, warn_msg):
dset = H5DataIO(np.arange(30),
compression='szip',
compression_opts=('ec', 16))
self.assertEqual(len(w), 1)
self.assertEqual(dset.io_settings['compression'], 'szip')
self.assertEqual(dset.io_settings['compression'], 'szip')
else:
with self.assertRaises(ValueError):
H5DataIO(np.arange(30), compression='szip', compression_opts=('ec', 16))
with self.assertWarnsWith(UserWarning, warn_msg):
dset = H5DataIO(np.arange(30),
compression='szip',
compression_opts=('ec', 16))
self.assertEqual(dset.io_settings['compression'], 'szip')
# Make sure a warning is issued when using lzf compression
with warnings.catch_warnings(record=True) as w:
warn_msg = ("lzf compression may not be available on all installations of HDF5. Use of gzip is "
"recommended to ensure portability of the generated HDF5 files.")
with self.assertWarnsWith(UserWarning, warn_msg):
dset = H5DataIO(np.arange(30),
compression='lzf')
self.assertEqual(len(w), 1)
self.assertEqual(dset.io_settings['compression'], 'lzf')
self.assertEqual(dset.io_settings['compression'], 'lzf')

def test_error_on_unsupported_compression_filter(self):
# Make sure gzip does not raise an error
Expand All @@ -584,7 +590,8 @@ def test_error_on_unsupported_compression_filter(self):
"recommended to ensure portability of the generated HDF5 files.")
if "szip" not in h5py_filters.encode:
with self.assertRaises(ValueError):
H5DataIO(np.arange(30), compression='szip', compression_opts=('ec', 16))
with self.assertWarnsWith(UserWarning, warn_msg):
H5DataIO(np.arange(30), compression='szip', compression_opts=('ec', 16))
else:
try:
with self.assertWarnsWith(UserWarning, warn_msg):
Expand Down
10 changes: 5 additions & 5 deletions tests/unit/utils_test/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,29 +170,29 @@ def test_strict_no_data_load(self):
class TestToUintArray(TestCase):

def test_ndarray_uint(self):
arr = np.array([0, 1, 2], dtype=np.uint)
arr = np.array([0, 1, 2], dtype=np.uint32)
res = to_uint_array(arr)
np.testing.assert_array_equal(res, arr)

def test_ndarray_int(self):
arr = np.array([0, 1, 2], dtype=np.int)
arr = np.array([0, 1, 2], dtype=np.int32)
res = to_uint_array(arr)
np.testing.assert_array_equal(res, arr)

def test_ndarray_int_neg(self):
arr = np.array([0, -1, 2], dtype=np.int)
arr = np.array([0, -1, 2], dtype=np.int32)
with self.assertRaisesWith(ValueError, 'Cannot convert negative integer values to uint.'):
to_uint_array(arr)

def test_ndarray_float(self):
arr = np.array([0, 1, 2], dtype=np.float)
arr = np.array([0, 1, 2], dtype=np.float64)
with self.assertRaisesWith(ValueError, 'Cannot convert array of dtype float64 to uint.'):
to_uint_array(arr)

def test_list_int(self):
arr = [0, 1, 2]
res = to_uint_array(arr)
expected = np.array([0, 1, 2], dtype=np.uint)
expected = np.array([0, 1, 2], dtype=np.uint32)
np.testing.assert_array_equal(res, expected)

def test_list_int_neg(self):
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/validator_tests/test_validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -424,7 +424,7 @@ def test_int64_for_numeric(self):
def test_bool_for_numeric(self):
"""Test that validator does not allow bool data where numeric is specified."""
self.set_up_spec('numeric')
value = np.bool(1)
value = True
bar_builder = GroupBuilder('my_bar',
attributes={'data_type': 'Bar', 'attr1': value},
datasets=[DatasetBuilder('data', value)])
Expand Down