From d27e6da56418932a8263ad480b5b41191243ac22 Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Fri, 26 Oct 2018 13:29:03 -0400 Subject: [PATCH 01/19] First steps towards better mode handling --- asdf/asdf.py | 35 +++++++++++++++++++++++++++++++---- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/asdf/asdf.py b/asdf/asdf.py index f476dc11c..bfb321da4 100644 --- a/asdf/asdf.py +++ b/asdf/asdf.py @@ -589,8 +589,33 @@ def _find_asdf_version_in_comments(cls, comments): return None + def _check_and_set_mode(self, fileobj, asdf_mode): + + memmap = self._blocks.memmap + + if asdf_mode is not None and asdf_mode not in ['r', 'rw']: + msg = "Unrecognized asdf mode '{}'. Must be either 'r' or 'rw'" + raise ValueError(msg.format(asdf_mode)) + + if asdf_mode is None: + if isinstance(fileobj, str): + parsed = generic_io.urlparse(fileobj) + if parsed.scheme == 'http': + return 'r' + return 'rw' if memmap else 'r' + if isinstance(fileobj, io.IOBase): + return 'rw' if fileobj.writable() and memmap else 'r' + + raise ValueError("Unknown file object type, can't guess mode") + + # It is not safe to open files with memory maps in readonly mode + elif asdf_mode == 'r' and memmap: + raise ValueError("Can't open file as readonly without copy_arrays=True") + + return asdf_mode + @classmethod - def _open_asdf(cls, self, fd, uri=None, mode='r', + def _open_asdf(cls, self, fd, uri=None, mode=None, validate_checksums=False, do_not_fill_defaults=False, _get_yaml_content=False, @@ -604,7 +629,9 @@ def _open_asdf(cls, self, fd, uri=None, mode='r', "'strict_extension_check' and 'ignore_missing_extensions' are " "incompatible options") - fd = generic_io.get_file(fd, mode=mode, uri=uri) + self._mode = self._check_and_set_mode(fd, mode) + + fd = generic_io.get_file(fd, mode=self._mode, uri=uri) self._fd = fd # The filename is currently only used for tracing warning information self._fname = self._fd._uri if self._fd._uri else '' @@ -671,7 +698,7 @@ def _open_asdf(cls, self, fd, uri=None, mode='r', return self @classmethod - def _open_impl(cls, self, fd, uri=None, mode='r', + def _open_impl(cls, self, fd, uri=None, mode=None, validate_checksums=False, do_not_fill_defaults=False, _get_yaml_content=False, @@ -710,7 +737,7 @@ def _open_impl(cls, self, fd, uri=None, mode='r', ignore_missing_extensions=ignore_missing_extensions) @classmethod - def open(cls, fd, uri=None, mode='r', + def open(cls, fd, uri=None, mode=None, validate_checksums=False, extensions=None, do_not_fill_defaults=False, From 23a3982ee6468c7d59c18fdbe43406d9637d99f3 Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Fri, 26 Oct 2018 14:52:29 -0400 Subject: [PATCH 02/19] Refactor asdf.open into standalone factory method... This allows us to improve the way that file modes are handled. --- asdf/__init__.py | 4 +- asdf/asdf.py | 246 ++++++++++++++++++++++++++--------------------- 2 files changed, 138 insertions(+), 112 deletions(-) diff --git a/asdf/__init__.py b/asdf/__init__.py index 4586236be..990758acb 100644 --- a/asdf/__init__.py +++ b/asdf/__init__.py @@ -33,7 +33,7 @@ except ImportError: raise ImportError("asdf requires numpy") -from .asdf import AsdfFile +from .asdf import AsdfFile, open_asdf from .asdftypes import CustomType from .extension import AsdfExtension from .stream import Stream @@ -43,4 +43,4 @@ from jsonschema import ValidationError -open = AsdfFile.open +open = open_asdf diff --git a/asdf/asdf.py b/asdf/asdf.py index bfb321da4..a4170a6cc 100644 --- a/asdf/asdf.py +++ b/asdf/asdf.py @@ -589,33 +589,8 @@ def _find_asdf_version_in_comments(cls, comments): return None - def _check_and_set_mode(self, fileobj, asdf_mode): - - memmap = self._blocks.memmap - - if asdf_mode is not None and asdf_mode not in ['r', 'rw']: - msg = "Unrecognized asdf mode '{}'. Must be either 'r' or 'rw'" - raise ValueError(msg.format(asdf_mode)) - - if asdf_mode is None: - if isinstance(fileobj, str): - parsed = generic_io.urlparse(fileobj) - if parsed.scheme == 'http': - return 'r' - return 'rw' if memmap else 'r' - if isinstance(fileobj, io.IOBase): - return 'rw' if fileobj.writable() and memmap else 'r' - - raise ValueError("Unknown file object type, can't guess mode") - - # It is not safe to open files with memory maps in readonly mode - elif asdf_mode == 'r' and memmap: - raise ValueError("Can't open file as readonly without copy_arrays=True") - - return asdf_mode - @classmethod - def _open_asdf(cls, self, fd, uri=None, mode=None, + def _open_asdf(cls, self, fd, uri=None, mode='r', validate_checksums=False, do_not_fill_defaults=False, _get_yaml_content=False, @@ -629,7 +604,7 @@ def _open_asdf(cls, self, fd, uri=None, mode=None, "'strict_extension_check' and 'ignore_missing_extensions' are " "incompatible options") - self._mode = self._check_and_set_mode(fd, mode) + self._mode = mode fd = generic_io.get_file(fd, mode=self._mode, uri=uri) self._fd = fd @@ -698,7 +673,7 @@ def _open_asdf(cls, self, fd, uri=None, mode=None, return self @classmethod - def _open_impl(cls, self, fd, uri=None, mode=None, + def _open_impl(cls, self, fd, uri=None, mode='r', validate_checksums=False, do_not_fill_defaults=False, _get_yaml_content=False, @@ -737,7 +712,7 @@ def _open_impl(cls, self, fd, uri=None, mode=None, ignore_missing_extensions=ignore_missing_extensions) @classmethod - def open(cls, fd, uri=None, mode=None, + def open(cls, fd, uri=None, mode='r', validate_checksums=False, extensions=None, do_not_fill_defaults=False, @@ -749,92 +724,20 @@ def open(cls, fd, uri=None, mode=None, custom_schema=None, strict_extension_check=False, ignore_missing_extensions=False): - """ - Open an existing ASDF file. - Parameters - ---------- - fd : string or file-like object - May be a string ``file`` or ``http`` URI, or a Python - file-like object. - - uri : string, optional - The URI of the file. Only required if the URI can not be - automatically determined from `fd`. - - mode : string, optional - The mode to open the file in. Must be ``r`` (default) or - ``rw``. - - validate_checksums : bool, optional - If `True`, validate the blocks against their checksums. - Requires reading the entire file, so disabled by default. - - extensions : list of AsdfExtension - A list of extensions to use when reading and writing ASDF files. - See `~asdf.asdftypes.AsdfExtension` for more information. - - do_not_fill_defaults : bool, optional - When `True`, do not fill in missing default values. - - ignore_version_mismatch : bool, optional - When `True`, do not raise warnings for mismatched schema versions. - Set to `True` by default. - - ignore_unrecognized_tag : bool, optional - When `True`, do not raise warnings for unrecognized tags. Set to - `False` by default. - - copy_arrays : bool, optional - When `False`, when reading files, attempt to memmap underlying data - arrays when possible. - - lazy_load : bool, optional - When `True` and the underlying file handle is seekable, data - arrays will only be loaded lazily: i.e. when they are accessed - for the first time. In this case the underlying file must stay - open during the lifetime of the tree. Setting to False causes - all data arrays to be loaded up front, which means that they - can be accessed even after the underlying file is closed. - Note: even if `lazy_load` is `False`, `copy_arrays` is still taken - into account. - - custom_schema : str, optional - Path to a custom schema file that will be used for a secondary - validation pass. This can be used to ensure that particular ASDF - files follow custom conventions beyond those enforced by the - standard. - - strict_extension_check : bool, optional - When `True`, if the given ASDF file contains metadata about the - extensions used to create it, and if those extensions are not - installed, opening the file will fail. When `False`, opening a file - under such conditions will cause only a warning. Defaults to - `False`. - - ignore_missing_extensions : bool, optional - When `True`, do not raise warnings when a file is read that - contains metadata about extensions that are not available. Defaults - to `False`. - - Returns - ------- - asdffile : AsdfFile - The new AsdfFile object. - """ - self = cls(extensions=extensions, - ignore_version_mismatch=ignore_version_mismatch, - ignore_unrecognized_tag=ignore_unrecognized_tag, - copy_arrays=copy_arrays, lazy_load=lazy_load, - custom_schema=custom_schema) - - return cls._open_impl( - self, fd, uri=uri, mode=mode, + return open_asdf( + fd, uri=uri, mode=mode, validate_checksums=validate_checksums, + extensions=extensions, do_not_fill_defaults=do_not_fill_defaults, + ignore_version_mismatch=ignore_version_mismatch, + ignore_unrecognized_tag=ignore_unrecognized_tag, _force_raw_types=_force_raw_types, + copy_arrays=copy_arrays, lazy_load=lazy_load, + custom_schema=custom_schema, strict_extension_check=strict_extension_check, - ignore_missing_extensions=ignore_missing_extensions) + ignore_missing_extensions=ignore_missing_extensions, + _compat=True) def _write_tree(self, tree, fd, pad_blocks): fd.write(constants.ASDF_MAGIC) @@ -1314,6 +1217,129 @@ def get_history_entries(self): AsdfFile.keys.__doc__ = dict.keys.__doc__ +def _check_and_set_mode(fileobj, asdf_mode): + + if asdf_mode is not None and asdf_mode not in ['r', 'rw']: + msg = "Unrecognized asdf mode '{}'. Must be either 'r' or 'rw'" + raise ValueError(msg.format(asdf_mode)) + + if asdf_mode is None: + if isinstance(fileobj, str): + parsed = generic_io.urlparse.urlparse(fileobj) + if parsed.scheme == 'http': + return 'r' + return 'rw' + if isinstance(fileobj, io.IOBase): + return 'rw' if fileobj.writable() else 'r' + + # This is the safest default since it allows for memory mapping + return 'rw' + + return asdf_mode + + +def open_asdf(fd, uri=None, mode=None, validate_checksums=False, + extensions=None, do_not_fill_defaults=False, + ignore_version_mismatch=True, ignore_unrecognized_tag=False, + _force_raw_types=False, copy_arrays=False, lazy_load=True, + custom_schema=None, strict_extension_check=False, + ignore_missing_extensions=False, _compat=False): + """ + Open an existing ASDF file. + + Parameters + ---------- + fd : string or file-like object + May be a string ``file`` or ``http`` URI, or a Python + file-like object. + + uri : string, optional + The URI of the file. Only required if the URI can not be + automatically determined from `fd`. + + mode : string, optional + The mode to open the file in. Must be ``r`` (default) or + ``rw``. + + validate_checksums : bool, optional + If `True`, validate the blocks against their checksums. + Requires reading the entire file, so disabled by default. + + extensions : list of AsdfExtension + A list of extensions to use when reading and writing ASDF files. + See `~asdf.asdftypes.AsdfExtension` for more information. + + do_not_fill_defaults : bool, optional + When `True`, do not fill in missing default values. + + ignore_version_mismatch : bool, optional + When `True`, do not raise warnings for mismatched schema versions. + Set to `True` by default. + + ignore_unrecognized_tag : bool, optional + When `True`, do not raise warnings for unrecognized tags. Set to + `False` by default. + + copy_arrays : bool, optional + When `False`, when reading files, attempt to memmap underlying data + arrays when possible. + + lazy_load : bool, optional + When `True` and the underlying file handle is seekable, data + arrays will only be loaded lazily: i.e. when they are accessed + for the first time. In this case the underlying file must stay + open during the lifetime of the tree. Setting to False causes + all data arrays to be loaded up front, which means that they + can be accessed even after the underlying file is closed. + Note: even if `lazy_load` is `False`, `copy_arrays` is still taken + into account. + + custom_schema : str, optional + Path to a custom schema file that will be used for a secondary + validation pass. This can be used to ensure that particular ASDF + files follow custom conventions beyond those enforced by the + standard. + + strict_extension_check : bool, optional + When `True`, if the given ASDF file contains metadata about the + extensions used to create it, and if those extensions are not + installed, opening the file will fail. When `False`, opening a file + under such conditions will cause only a warning. Defaults to + `False`. + + ignore_missing_extensions : bool, optional + When `True`, do not raise warnings when a file is read that + contains metadata about extensions that are not available. Defaults + to `False`. + + Returns + ------- + asdffile : AsdfFile + The new AsdfFile object. + """ + + # For now retain backwards compatibility with the old API behavior, + # specifically when being called from AsdfFile.open + if not _compat: + mode = _check_and_set_mode(fd, mode) + if mode == 'r' and not copy_arrays: + copy_arrays = True + + instance = AsdfFile(extensions=extensions, + ignore_version_mismatch=ignore_version_mismatch, + ignore_unrecognized_tag=ignore_unrecognized_tag, + copy_arrays=copy_arrays, lazy_load=lazy_load, + custom_schema=custom_schema) + + return AsdfFile._open_impl(instance, + fd, uri=uri, mode=mode, + validate_checksums=validate_checksums, + do_not_fill_defaults=do_not_fill_defaults, + _force_raw_types=_force_raw_types, + strict_extension_check=strict_extension_check, + ignore_missing_extensions=ignore_missing_extensions) + + def is_asdf_file(fd): """ Determine if fd is an ASDF file. From b4723856568ca9c323eda4d0f5224fa52f9be617 Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Fri, 26 Oct 2018 15:31:52 -0400 Subject: [PATCH 03/19] Use asdf_open for opening external array blocks --- asdf/asdf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asdf/asdf.py b/asdf/asdf.py index a4170a6cc..bfdd3ce74 100644 --- a/asdf/asdf.py +++ b/asdf/asdf.py @@ -374,7 +374,7 @@ def open_external(self, uri, do_not_fill_defaults=False): asdffile = self._external_asdf_by_uri.get(resolved_uri) if asdffile is None: - asdffile = self.open( + asdffile = open_asdf( resolved_uri, do_not_fill_defaults=do_not_fill_defaults) self._external_asdf_by_uri[resolved_uri] = asdffile From 465fae61dd5f11538f481a8db01ebe28198d79ef Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Fri, 26 Oct 2018 15:32:07 -0400 Subject: [PATCH 04/19] Accommodate generic_io.GenericFile in mode detection --- asdf/asdf.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/asdf/asdf.py b/asdf/asdf.py index bfdd3ce74..7dd2b703c 100644 --- a/asdf/asdf.py +++ b/asdf/asdf.py @@ -1232,6 +1232,9 @@ def _check_and_set_mode(fileobj, asdf_mode): if isinstance(fileobj, io.IOBase): return 'rw' if fileobj.writable() else 'r' + if isinstance(fileobj, generic_io.GenericFile): + return fileobj.mode + # This is the safest default since it allows for memory mapping return 'rw' From 9f871e8afaa0cf7c30a19739cf024d362fdb6e65 Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Fri, 26 Oct 2018 15:36:09 -0400 Subject: [PATCH 05/19] Deprecate AsdfFile.open, remove use internally and in tests... Now the top-level function asdf.open should be used instead --- asdf/asdf.py | 5 + asdf/commands/defragment.py | 3 +- asdf/commands/diff.py | 5 +- asdf/commands/exploded.py | 5 +- asdf/commands/tests/test_defragment.py | 3 +- asdf/commands/tests/test_to_yaml.py | 3 +- asdf/commands/to_yaml.py | 3 +- asdf/tags/core/tests/test_complex.py | 6 +- asdf/tags/core/tests/test_ndarray.py | 70 ++++++------ asdf/tags/wcs/tests/test_wcs.py | 8 +- asdf/tests/helpers.py | 15 +-- asdf/tests/schema_tester.py | 2 +- asdf/tests/test_asdftypes.py | 32 +++--- asdf/tests/test_compression.py | 22 ++-- asdf/tests/test_fits_embed.py | 24 ++-- asdf/tests/test_generic_io.py | 14 ++- asdf/tests/test_low_level.py | 148 ++++++++++++------------- asdf/tests/test_reference.py | 8 +- asdf/tests/test_schema.py | 18 +-- asdf/tests/test_stream.py | 16 +-- asdf/tests/test_yaml.py | 4 +- 21 files changed, 213 insertions(+), 201 deletions(-) diff --git a/asdf/asdf.py b/asdf/asdf.py index 7dd2b703c..c82762f58 100644 --- a/asdf/asdf.py +++ b/asdf/asdf.py @@ -725,6 +725,11 @@ def open(cls, fd, uri=None, mode='r', strict_extension_check=False, ignore_missing_extensions=False): + warnings.warn( + "The method AsdfFile.open has been deprecated and will be removed " + "in asdf-3.0. Use the top-level asdf.open function instead.", + AsdfDeprecationWarning) + return open_asdf( fd, uri=uri, mode=mode, validate_checksums=validate_checksums, diff --git a/asdf/commands/defragment.py b/asdf/commands/defragment.py index c41f59390..9ca2bb1d1 100644 --- a/asdf/commands/defragment.py +++ b/asdf/commands/defragment.py @@ -8,6 +8,7 @@ import os +import asdf from .main import Command from .. import AsdfFile @@ -65,7 +66,7 @@ def defragment(input, output=None, resolve_references=False, compress=None): compress : str, optional Compression to use. """ - with AsdfFile.open(input) as ff: + with asdf.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() diff --git a/asdf/commands/diff.py b/asdf/commands/diff.py index 6815b1b94..6828456a7 100644 --- a/asdf/commands/diff.py +++ b/asdf/commands/diff.py @@ -28,6 +28,7 @@ GREEN = '' RESET = '' +import asdf from .main import Command from .. import AsdfFile from .. import treeutil @@ -245,8 +246,8 @@ def compare_trees(diff_ctx, tree0, tree1, keys=[]): def diff(filenames, minimal, iostream=sys.stdout): """Top-level implementation of diff algorithm""" try: - with AsdfFile.open(filenames[0], _force_raw_types=True) as asdf0: - with AsdfFile.open(filenames[1], _force_raw_types=True) as asdf1: + with asdf.open(filenames[0], _force_raw_types=True) as asdf0: + with asdf.open(filenames[1], _force_raw_types=True) as asdf1: diff_ctx = DiffContext(asdf0, asdf1, iostream, minimal=minimal) compare_trees(diff_ctx, asdf0.tree, asdf1.tree) except ValueError as error: diff --git a/asdf/commands/exploded.py b/asdf/commands/exploded.py index 01e3c7538..31a5404ee 100644 --- a/asdf/commands/exploded.py +++ b/asdf/commands/exploded.py @@ -8,6 +8,7 @@ import os +import asdf from .main import Command from .. import AsdfFile @@ -65,7 +66,7 @@ def implode(input, output=None, resolve_references=False): if output is None: base, ext = os.path.splitext(input) output = base + '_all' + '.asdf' - with AsdfFile.open(input) as ff: + with asdf.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() @@ -115,5 +116,5 @@ def explode(input, output=None): if output is None: base, ext = os.path.splitext(input) output = base + '_exploded' + '.asdf' - with AsdfFile.open(input) as ff: + with asdf.open(input) as ff: ff.write_to(output, all_array_storage='external') diff --git a/asdf/commands/tests/test_defragment.py b/asdf/commands/tests/test_defragment.py index 753edde56..77eee448f 100644 --- a/asdf/commands/tests/test_defragment.py +++ b/asdf/commands/tests/test_defragment.py @@ -8,6 +8,7 @@ import numpy as np import pytest +import asdf from ... import AsdfFile from .. import main from ...tests.helpers import get_file_sizes, assert_tree_match @@ -41,7 +42,7 @@ def _test_defragment(tmpdir, codec): assert files['original.defragment.asdf'] < files['original.asdf'] - with AsdfFile.open(os.path.join(str(tmpdir), 'original.defragment.asdf')) as ff: + with asdf.open(os.path.join(str(tmpdir), 'original.defragment.asdf')) as ff: assert_tree_match(ff.tree, tree) assert len(list(ff.blocks.internal_blocks)) == 2 diff --git a/asdf/commands/tests/test_to_yaml.py b/asdf/commands/tests/test_to_yaml.py index bb672fa67..c57597404 100644 --- a/asdf/commands/tests/test_to_yaml.py +++ b/asdf/commands/tests/test_to_yaml.py @@ -6,6 +6,7 @@ import numpy as np +import asdf from ... import AsdfFile from .. import main from ...tests.helpers import get_file_sizes, assert_tree_match @@ -35,6 +36,6 @@ def test_to_yaml(tmpdir): assert 'original.asdf' in files assert 'original.yaml' in files - with AsdfFile.open(os.path.join(str(tmpdir), 'original.yaml')) as ff: + with asdf.open(os.path.join(str(tmpdir), 'original.yaml')) as ff: assert_tree_match(ff.tree, tree) assert len(list(ff.blocks.internal_blocks)) == 0 diff --git a/asdf/commands/to_yaml.py b/asdf/commands/to_yaml.py index 7f15dbbb5..4fe153aa7 100644 --- a/asdf/commands/to_yaml.py +++ b/asdf/commands/to_yaml.py @@ -8,6 +8,7 @@ import os +import asdf from .main import Command from .. import AsdfFile @@ -63,7 +64,7 @@ def to_yaml(input, output=None, resolve_references=False): if output is None: base, ext = os.path.splitext(input) output = base + '.yaml' - with AsdfFile.open(input) as ff: + with asdf.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() diff --git a/asdf/tags/core/tests/test_complex.py b/asdf/tags/core/tests/test_complex.py index 7c25718c1..04c1f31ef 100644 --- a/asdf/tags/core/tests/test_complex.py +++ b/asdf/tags/core/tests/test_complex.py @@ -25,7 +25,7 @@ def make_complex_asdf(string): def test_invalid_complex(invalid): with pytest.raises(asdf.ValidationError): - with asdf.AsdfFile.open(make_complex_asdf(invalid)): + with asdf.open(make_complex_asdf(invalid)): pass @@ -36,7 +36,7 @@ def test_invalid_complex(invalid): ]) def test_valid_complex(valid): - with asdf.AsdfFile.open(make_complex_asdf(valid)) as af: + with asdf.open(make_complex_asdf(valid)) as af: assert af.tree['a'] == complex(re.sub(r'[iI]$', r'j', valid)) @@ -46,7 +46,7 @@ def test_valid_complex(valid): ]) def test_valid_nan_complex(valid): - with asdf.AsdfFile.open(make_complex_asdf(valid)) as af: + with asdf.open(make_complex_asdf(valid)) as af: # Don't compare values since NANs are never equal pass diff --git a/asdf/tags/core/tests/test_ndarray.py b/asdf/tags/core/tests/test_ndarray.py index 3c2777ad6..ab49b8b57 100644 --- a/asdf/tags/core/tests/test_ndarray.py +++ b/asdf/tags/core/tests/test_ndarray.py @@ -148,7 +148,7 @@ def test_dont_load_data(): ff.write_to(buff) buff.seek(0) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: ff.run_hook('reserve_blocks') # repr and str shouldn't load data @@ -208,7 +208,7 @@ def test_copy_inline(): buff = helpers.yaml_to_asdf(yaml) - with asdf.AsdfFile.open(buff) as infile: + with asdf.open(buff) as infile: with asdf.AsdfFile() as f: f.tree['a'] = infile.tree['x0'] f.tree['b'] = f.tree['a'] @@ -284,7 +284,7 @@ def test_inline(): ff.write_to(buff) buff.seek(0) - with asdf.AsdfFile.open(buff, mode='rw') as ff: + with asdf.open(buff, mode='rw') as ff: helpers.assert_tree_match(tree, ff.tree) assert len(list(ff.blocks.internal_blocks)) == 0 buff = io.BytesIO() @@ -297,7 +297,7 @@ def test_inline_bare(): content = "arr: !core/ndarray-1.0.0 [[1, 2, 3, 4], [5, 6, 7, 8]]" buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert_array_equal(ff.tree['arr'], [[1, 2, 3, 4], [5, 6, 7, 8]]) @@ -331,7 +331,7 @@ def test_mask_arbitrary(): """ buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert_array_equal( ff.tree['arr'].mask, [[False, False, False, True], [False, False, False, False]]) @@ -345,7 +345,7 @@ def test_mask_nan(): """ buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert_array_equal( ff.tree['arr'].mask, [[False, False, False, True], [False, False, False, False]]) @@ -372,7 +372,7 @@ def test_inline_string(): content = "arr: !core/ndarray-1.0.0 ['a', 'b', 'c']" buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert_array_equal(ff.tree['arr']._make_array(), ['a', 'b', 'c']) @@ -387,7 +387,7 @@ def test_inline_structured(): buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert ff.tree['arr']['f1'].dtype.char == 'H' @@ -426,7 +426,7 @@ def test_unicode_to_list(tmpdir): ff.write_to(fd) fd.seek(0) - with asdf.AsdfFile.open(fd) as ff: + with asdf.open(fd) as ff: ff.resolve_and_inline() ff.write_to(io.BytesIO()) @@ -440,7 +440,7 @@ def test_inline_masked_array(tmpdir): f.set_array_storage(tree['test'], 'inline') f.write_to(testfile) - with asdf.AsdfFile.open(testfile) as f2: + with asdf.open(testfile) as f2: assert len(list(f2.blocks.internal_blocks)) == 0 assert_array_equal(f.tree['test'], f2.tree['test']) @@ -464,7 +464,7 @@ def test_masked_array_stay_open_bug(tmpdir): orig_open = p.open_files() for i in range(3): - with asdf.AsdfFile.open(tmppath) as f2: + with asdf.open(tmppath) as f2: np.sum(f2.tree['test']) assert len(p.open_files()) == len(orig_open) @@ -480,7 +480,7 @@ def test_masked_array_repr(tmpdir): asdf.AsdfFile(tree).write_to(tmppath) - with asdf.AsdfFile.open(tmppath) as ff: + with asdf.open(tmppath) as ff: assert 'masked array' in repr(ff.tree['masked']) @@ -493,15 +493,15 @@ def test_operations_on_ndarray_proxies(tmpdir): asdf.AsdfFile(tree).write_to(tmppath) - with asdf.AsdfFile.open(tmppath) as ff: + with asdf.open(tmppath) as ff: x = ff.tree['array'] * 2 assert_array_equal(x, np.arange(10) * 2) - with asdf.AsdfFile.open(tmppath) as ff: + with asdf.open(tmppath) as ff: x = -ff.tree['array'] assert_array_equal(x, -np.arange(10)) - with asdf.AsdfFile.open(tmppath, mode='rw') as ff: + with asdf.open(tmppath, mode='rw') as ff: ff.tree['array'][2] = 4 x = np.arange(10) x[2] = 4 @@ -518,7 +518,7 @@ def test_mask_datatype(tmpdir): """ buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: pass @@ -533,7 +533,7 @@ def test_invalid_mask_datatype(tmpdir): buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: pass @@ -546,7 +546,7 @@ def test_ndim_validation(tmpdir): buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -556,7 +556,7 @@ def test_ndim_validation(tmpdir): """ buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -567,7 +567,7 @@ def test_ndim_validation(tmpdir): """ buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -577,7 +577,7 @@ def test_ndim_validation(tmpdir): """ buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -587,7 +587,7 @@ def test_ndim_validation(tmpdir): """ buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -598,7 +598,7 @@ def test_ndim_validation(tmpdir): buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass @@ -611,7 +611,7 @@ def test_datatype_validation(tmpdir): """ buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -623,7 +623,7 @@ def test_datatype_validation(tmpdir): buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -634,7 +634,7 @@ def test_datatype_validation(tmpdir): """ buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -646,7 +646,7 @@ def test_datatype_validation(tmpdir): buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -662,7 +662,7 @@ def test_datatype_validation(tmpdir): buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass @@ -679,7 +679,7 @@ def test_structured_datatype_validation(tmpdir): """ buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -695,7 +695,7 @@ def test_structured_datatype_validation(tmpdir): buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -713,7 +713,7 @@ def test_structured_datatype_validation(tmpdir): buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -724,7 +724,7 @@ def test_structured_datatype_validation(tmpdir): buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -740,7 +740,7 @@ def test_structured_datatype_validation(tmpdir): buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ @@ -755,7 +755,7 @@ def test_structured_datatype_validation(tmpdir): """ buff = helpers.yaml_to_asdf(content) - with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: + with asdf.open(buff, extensions=CustomExtension()) as ff: pass @@ -776,7 +776,7 @@ def test_inline_shape_mismatch(): buff = helpers.yaml_to_asdf(content) with pytest.raises(ValueError): - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: pass diff --git a/asdf/tags/wcs/tests/test_wcs.py b/asdf/tags/wcs/tests/test_wcs.py index bc55f5920..d273993b0 100644 --- a/asdf/tags/wcs/tests/test_wcs.py +++ b/asdf/tags/wcs/tests/test_wcs.py @@ -141,10 +141,10 @@ def test_backwards_compat_galcen(): """ % (declination, right_ascension, galcen_distance, roll, z_sun) old_buff = helpers.yaml_to_asdf(old_frame_yaml) - old_asdf = AsdfFile.open(old_buff) + old_asdf = asdf.open(old_buff) old_frame = old_asdf.tree['frames'][0] new_buff = helpers.yaml_to_asdf(new_frame_yaml) - new_asdf = AsdfFile.open(new_buff) + new_asdf = asdf.open(new_buff) new_frame = new_asdf.tree['frames'][0] # Poor man's frame comparison since it's not implemented by astropy @@ -206,13 +206,13 @@ def test_backwards_compat_gcrs(): """ % (obsgeovel + obsgeoloc) old_buff = helpers.yaml_to_asdf(old_frame_yaml) - old_asdf = AsdfFile.open(old_buff) + old_asdf = asdf.open(old_buff) old_frame = old_asdf.tree['frames'][0] old_loc = old_frame.reference_frame.obsgeoloc old_vel = old_frame.reference_frame.obsgeovel new_buff = helpers.yaml_to_asdf(new_frame_yaml) - new_asdf = AsdfFile.open(new_buff) + new_asdf = asdf.open(new_buff) new_frame = new_asdf.tree['frames'][0] new_loc = new_frame.reference_frame.obsgeoloc new_vel = new_frame.reference_frame.obsgeovel diff --git a/asdf/tests/helpers.py b/asdf/tests/helpers.py index 1d695f0c8..e38c08bca 100644 --- a/asdf/tests/helpers.py +++ b/asdf/tests/helpers.py @@ -20,6 +20,7 @@ except ImportError: CartesianDifferential = None +import asdf from ..asdf import AsdfFile, get_asdf_library_info from ..block import Block from .httpserver import RangeHTTPServer @@ -187,7 +188,7 @@ def _assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None, AsdfFile(tree, extensions=extensions, **init_options).write_to(buff, **write_options) assert not buff.closed buff.seek(0) - with AsdfFile.open(buff, mode='rw', extensions=extensions) as ff: + with asdf.open(buff, mode='rw', extensions=extensions) as ff: assert not buff.closed assert isinstance(ff.tree, AsdfObject) assert 'asdf_library' in ff.tree @@ -198,7 +199,7 @@ def _assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None, buff.seek(0) ff = AsdfFile(extensions=extensions, **init_options) - content = AsdfFile._open_impl(ff, buff, _get_yaml_content=True) + content = AsdfFile._open_impl(ff, buff, mode='r', _get_yaml_content=True) buff.close() # We *never* want to get any raw python objects out assert b'!!python' not in content @@ -210,7 +211,7 @@ def _assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None, # Then, test writing/reading to a real file ff = AsdfFile(tree, extensions=extensions, **init_options) ff.write_to(fname, **write_options) - with AsdfFile.open(fname, mode='rw', extensions=extensions) as ff: + with asdf.open(fname, mode='rw', extensions=extensions) as ff: assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) @@ -221,7 +222,7 @@ def _assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None, AsdfFile(tree, extensions=extensions, **init_options).write_to(buff, **write_options) assert not buff.closed buff.seek(0) - with AsdfFile.open(buff, mode='rw', extensions=extensions) as ff: + with asdf.open(buff, mode='rw', extensions=extensions) as ff: assert not buff.closed assert isinstance(ff.tree, AsdfObject) assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) @@ -234,7 +235,7 @@ def _assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None, try: ff = AsdfFile(tree, extensions=extensions, **init_options) ff.write_to(os.path.join(server.tmpdir, 'test.asdf'), **write_options) - with AsdfFile.open(server.url + 'test.asdf', mode='r', + with asdf.open(server.url + 'test.asdf', mode='r', extensions=extensions) as ff: assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: @@ -246,7 +247,7 @@ def _assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None, with io.BytesIO() as buff: AsdfFile(tree, extensions=extensions, **init_options).write_to(buff, **write_options) buff.seek(0) - ff = AsdfFile.open(buff, extensions=extensions, copy_arrays=True, lazy_load=False) + ff = asdf.open(buff, extensions=extensions, copy_arrays=True, lazy_load=False) # Ensure that all the blocks are loaded for block in ff.blocks._internal_blocks: assert isinstance(block, Block) @@ -258,7 +259,7 @@ def _assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None, # Now repeat with copy_arrays=False and a real file to test mmap() AsdfFile(tree, extensions=extensions, **init_options).write_to(fname, **write_options) - with AsdfFile.open(fname, mode='rw', extensions=extensions, copy_arrays=False, + with asdf.open(fname, mode='rw', extensions=extensions, copy_arrays=False, lazy_load=False) as ff: for block in ff.blocks._internal_blocks: assert isinstance(block, Block) diff --git a/asdf/tests/schema_tester.py b/asdf/tests/schema_tester.py index 514ce576f..432733fef 100644 --- a/asdf/tests/schema_tester.py +++ b/asdf/tests/schema_tester.py @@ -165,7 +165,7 @@ def runtest(self): try: with pytest.warns(None) as w: import warnings - ff._open_impl(ff, buff) + ff._open_impl(ff, buff, mode='rw') # Do not tolerate any warnings that occur during schema validation assert len(w) == 0, helpers.display_warnings(w) except Exception: diff --git a/asdf/tests/test_asdftypes.py b/asdf/tests/test_asdftypes.py index 5dd3ba302..9fd6e11a6 100644 --- a/asdf/tests/test_asdftypes.py +++ b/asdf/tests/test_asdftypes.py @@ -70,16 +70,14 @@ def check(tag): """ buff = helpers.yaml_to_asdf(yaml) - with asdf.AsdfFile.open( - buff, extensions=FractionExtension()) as ff: + with asdf.open(buff, extensions=FractionExtension()) as ff: assert ff.tree['a'] == fractions.Fraction(2, 3) buff = io.BytesIO() ff.write_to(buff) buff = helpers.yaml_to_asdf(yaml) - with asdf.AsdfFile.open( - buff, extensions=FractionCallable()) as ff: + with asdf.open(buff, extensions=FractionCallable()) as ff: assert ff.tree['a'] == fractions.Fraction(2, 3) buff = io.BytesIO() @@ -95,7 +93,7 @@ def test_version_mismatch(): buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as warning: - with asdf.AsdfFile.open(buff, ignore_version_mismatch=False) as ff: + with asdf.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 1 @@ -106,7 +104,7 @@ def test_version_mismatch(): # Make sure warning is repeatable buff.seek(0) with pytest.warns(None) as warning: - with asdf.AsdfFile.open(buff, ignore_version_mismatch=False) as ff: + with asdf.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 1 @@ -117,7 +115,7 @@ def test_version_mismatch(): # Make sure the warning does not occur if it is being ignored (default) buff.seek(0) with pytest.warns(None) as warning: - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 0, helpers.display_warnings(warning) @@ -131,7 +129,7 @@ def test_version_mismatch(): buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as warning: - with asdf.AsdfFile.open(buff, ignore_version_mismatch=False) as ff: + with asdf.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 0 @@ -151,7 +149,7 @@ def test_version_mismatch_file(tmpdir): handle.write(buff.read()) with pytest.warns(None) as w: - with asdf.AsdfFile.open(testfile, ignore_version_mismatch=False) as ff: + with asdf.open(testfile, ignore_version_mismatch=False) as ff: assert ff._fname == "file://{}".format(testfile) assert isinstance(ff.tree['a'], complex) @@ -200,7 +198,7 @@ def url_mapping(self): """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as w: - data = asdf.AsdfFile.open( + data = asdf.open( buff, ignore_version_mismatch=False, extensions=CustomFlowExtension()) assert len(w) == 1, helpers.display_warnings(w) @@ -332,7 +330,7 @@ def test_undefined_tag(): """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as warning: - afile = asdf.AsdfFile.open(buff) + afile = asdf.open(buff) missing = afile.tree['undefined_data'] assert missing[0] == 5 @@ -351,7 +349,7 @@ def test_undefined_tag(): # Make sure no warning occurs if explicitly ignored buff.seek(0) with pytest.warns(None) as warning: - afile = asdf.AsdfFile.open(buff, ignore_unrecognized_tag=True) + afile = asdf.open(buff, ignore_unrecognized_tag=True) assert len(warning) == 0 @@ -409,7 +407,7 @@ def url_mapping(self): d: 3.14 """ new_buff = helpers.yaml_to_asdf(new_yaml) - new_data = asdf.AsdfFile.open(new_buff, extensions=CustomFlowExtension()) + new_data = asdf.open(new_buff, extensions=CustomFlowExtension()) assert type(new_data.tree['flow_thing']) == CustomFlow old_yaml = """ @@ -420,7 +418,7 @@ def url_mapping(self): """ old_buff = helpers.yaml_to_asdf(old_yaml) with pytest.warns(None) as warning: - asdf.AsdfFile.open(old_buff, extensions=CustomFlowExtension()) + asdf.open(old_buff, extensions=CustomFlowExtension()) assert len(warning) == 1, helpers.display_warnings(warning) # We expect this warning since it will not be possible to convert version @@ -542,11 +540,11 @@ def url_mapping(self): b: 3.14 """ new_buff = helpers.yaml_to_asdf(new_yaml) - new_data = asdf.AsdfFile.open(new_buff, extensions=CustomFlowExtension()) + new_data = asdf.open(new_buff, extensions=CustomFlowExtension()) assert type(new_data.tree['flow_thing']) == CustomFlow old_buff = helpers.yaml_to_asdf(old_yaml) - old_data = asdf.AsdfFile.open(old_buff, extensions=CustomFlowExtension()) + old_data = asdf.open(old_buff, extensions=CustomFlowExtension()) assert type(old_data.tree['flow_thing']) == CustomFlow def test_unsupported_version_warning(): @@ -586,7 +584,7 @@ def url_mapping(self): buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as _warnings: - data = asdf.AsdfFile.open(buff, extensions=CustomFlowExtension()) + data = asdf.open(buff, extensions=CustomFlowExtension()) assert len(_warnings) == 1 assert str(_warnings[0].message) == ( diff --git a/asdf/tests/test_compression.py b/asdf/tests/test_compression.py index 47085ab53..78d3b917b 100644 --- a/asdf/tests/test_compression.py +++ b/asdf/tests/test_compression.py @@ -43,10 +43,10 @@ def _roundtrip(tmpdir, tree, compression=None, ff.set_array_compression(tree['science_data'], compression) ff.write_to(tmpfile, **write_options) - with asdf.AsdfFile.open(tmpfile, mode="rw") as ff: + with asdf.open(tmpfile, mode="rw") as ff: ff.update(**write_options) - with asdf.AsdfFile.open(tmpfile, **read_options) as ff: + with asdf.open(tmpfile, **read_options) as ff: helpers.assert_tree_match(tree, ff.tree) # Also test saving to a buffer @@ -57,7 +57,7 @@ def _roundtrip(tmpdir, tree, compression=None, ff.write_to(buff, **write_options) buff.seek(0) - with asdf.AsdfFile.open(buff, **read_options) as ff: + with asdf.open(buff, **read_options) as ff: helpers.assert_tree_match(tree, ff.tree) # Test saving to a non-seekable buffer @@ -68,7 +68,7 @@ def _roundtrip(tmpdir, tree, compression=None, ff.write_to(generic_io.OutputStream(buff), **write_options) buff.seek(0) - with asdf.AsdfFile.open(generic_io.InputStream(buff), **read_options) as ff: + with asdf.open(generic_io.InputStream(buff), **read_options) as ff: helpers.assert_tree_match(tree, ff.tree) return ff @@ -129,11 +129,11 @@ def test_recompression(tmpdir): afile = asdf.AsdfFile(tree) afile.write_to(tmpfile, all_array_compression='zlib') afile.close() - afile = asdf.AsdfFile.open(tmpfile) + afile = asdf.open(tmpfile) tmpfile = os.path.join(str(tmpdir), 'test2.asdf') afile.write_to(tmpfile, all_array_compression='bzp2') afile.close() - afile = asdf.AsdfFile.open(tmpfile) + afile = asdf.open(tmpfile) helpers.assert_tree_match(tree, afile.tree) afile.close() @@ -144,11 +144,11 @@ def test_input(tmpdir): afile = asdf.AsdfFile(tree) afile.write_to(tmpfile, all_array_compression='zlib') afile.close() - afile = asdf.AsdfFile.open(tmpfile) + afile = asdf.open(tmpfile) tmpfile = os.path.join(str(tmpdir), 'test2.asdf') afile.write_to(tmpfile) afile.close() - afile = asdf.AsdfFile.open(tmpfile) + afile = asdf.open(tmpfile) helpers.assert_tree_match(tree, afile.tree) assert afile.get_array_compression(afile.tree['science_data']) == 'zlib' afile.close() @@ -163,15 +163,15 @@ def test_none(tmpdir): afile.write_to(tmpfile1) tmpfile2 = os.path.join(str(tmpdir), 'test2.asdf') - with asdf.AsdfFile.open(tmpfile1) as afile: + with asdf.open(tmpfile1) as afile: assert afile.get_array_compression(afile.tree['science_data']) is None afile.write_to(tmpfile2, all_array_compression='zlib') assert afile.get_array_compression(afile.tree['science_data']) == 'zlib' - with asdf.AsdfFile.open(tmpfile2) as afile: + with asdf.open(tmpfile2) as afile: afile.write_to(tmpfile1, all_array_compression=None) - with asdf.AsdfFile.open(tmpfile1) as afile: + with asdf.open(tmpfile1) as afile: helpers.assert_tree_match(tree, afile.tree) assert afile.get_array_compression(afile.tree['science_data']) is None diff --git a/asdf/tests/test_fits_embed.py b/asdf/tests/test_fits_embed.py index 15944fcbe..81e21180a 100644 --- a/asdf/tests/test_fits_embed.py +++ b/asdf/tests/test_fits_embed.py @@ -100,7 +100,7 @@ def test_embed_asdf_in_fits_file(tmpdir, backwards_compat): ff = asdf.AsdfFile(copy.deepcopy(ff2.tree)) ff.write_to(asdf_testfile) - with asdf.AsdfFile.open(asdf_testfile) as ff: + with asdf.open(asdf_testfile) as ff: assert_tree_match(tree, ff.tree) @@ -125,7 +125,7 @@ def test_embed_asdf_in_fits_file_anonymous_extensions(tmpdir): ff = asdf.AsdfFile(copy.deepcopy(ff2.tree)) ff.write_to(os.path.join(str(tmpdir), 'test.asdf')) - with asdf.AsdfFile.open(os.path.join(str(tmpdir), 'test.asdf')) as ff: + with asdf.open(os.path.join(str(tmpdir), 'test.asdf')) as ff: assert_tree_match(asdf_in_fits.tree, ff.tree) @@ -213,12 +213,12 @@ def test_create_in_tree_first(tmpdir): with asdf.AsdfFile(tree) as ff: ff.write_to(os.path.join(str(tmpdir), 'plain.asdf')) - with asdf.AsdfFile.open(os.path.join(str(tmpdir), 'plain.asdf')) as ff: + with asdf.open(os.path.join(str(tmpdir), 'plain.asdf')) as ff: assert_array_equal(ff.tree['model']['sci']['data'], np.arange(512, dtype=np.float)) # This tests the changes that allow FITS files with ASDF extensions to be - # opened directly by the top-level AsdfFile.open API + # opened directly by the top-level asdf.open API with asdf_open(tmpfile) as ff: assert_array_equal(ff.tree['model']['sci']['data'], np.arange(512, dtype=np.float)) @@ -289,13 +289,13 @@ def test_open_gzipped(): # Opening as an HDU should work with fits.open(testfile) as ff: - with asdf.AsdfFile.open(ff) as af: + with asdf.open(ff) as af: assert af.tree['stuff'].shape == (20, 20) with fits_embed.AsdfInFits.open(testfile) as af: assert af.tree['stuff'].shape == (20, 20) - with asdf.AsdfFile.open(testfile) as af: + with asdf.open(testfile) as af: assert af.tree['stuff'].shape == (20, 20) def test_bad_input(tmpdir): @@ -315,7 +315,7 @@ def test_version_mismatch_file(): testfile = str(get_test_data_path('version_mismatch.fits')) with pytest.warns(None) as w: - with asdf.AsdfFile.open(testfile, + with asdf.open(testfile, ignore_version_mismatch=False) as fits_handle: assert fits_handle.tree['a'] == complex(0j) # This is the warning that we expect from opening the FITS file @@ -326,7 +326,7 @@ def test_version_mismatch_file(): # Make sure warning does not occur when warning is ignored (default) with pytest.warns(None) as w: - with asdf.AsdfFile.open(testfile) as fits_handle: + with asdf.open(testfile) as fits_handle: assert fits_handle.tree['a'] == complex(0j) assert len(w) == 0, display_warnings(w) @@ -359,7 +359,7 @@ def test_serialize_table(tmpdir): with fits_embed.AsdfInFits(hdulist, tree) as ff: ff.write_to(tmpfile) - with asdf.AsdfFile.open(tmpfile) as ff: + with asdf.open(tmpfile) as ff: data = ff.tree['my_table'] assert data._source.startswith('fits:') @@ -367,7 +367,7 @@ def test_extension_check(): testfile = get_test_data_path('extension_check.fits') with pytest.warns(None) as warnings: - with asdf.AsdfFile.open(testfile) as ff: + with asdf.open(testfile) as ff: pass assert len(warnings) == 1, display_warnings(warnings) @@ -376,13 +376,13 @@ def test_extension_check(): # Make sure that suppressing the warning works as well with pytest.warns(None) as warnings: - with asdf.AsdfFile.open(testfile, ignore_missing_extensions=True) as ff: + with asdf.open(testfile, ignore_missing_extensions=True) as ff: pass assert len(warnings) == 0, display_warnings(warnings) with pytest.raises(RuntimeError): - with asdf.AsdfFile.open(testfile, strict_extension_check=True) as ff: + with asdf.open(testfile, strict_extension_check=True) as ff: pass def test_verify_with_astropy(tmpdir): diff --git a/asdf/tests/test_generic_io.py b/asdf/tests/test_generic_io.py index a4c034669..3e51f1b5c 100644 --- a/asdf/tests/test_generic_io.py +++ b/asdf/tests/test_generic_io.py @@ -41,7 +41,7 @@ def _roundtrip(tree, get_write_fd, get_read_fd, fd._fd.close() with get_read_fd() as fd: - ff = asdf.AsdfFile.open(fd, **read_options) + ff = asdf.open(fd, **read_options) helpers.assert_tree_match(tree, ff.tree) return ff @@ -76,7 +76,8 @@ def get_write_fd(): return f def get_read_fd(): - f = generic_io.get_file(path, mode='r') + # Must open with mode=rw in order to get memmapped data + f = generic_io.get_file(path, mode='rw') assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) # This is to check for a "feature" in Python 3.x that reading zero @@ -101,7 +102,8 @@ def get_write_fd(): return f def get_read_fd(): - f = generic_io.get_file(open(path, 'rb'), mode='r', close=True) + # Must open with mode=rw in order to get memmapped data + f = generic_io.get_file(open(path, 'r+b'), mode='rw', close=True) assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f @@ -340,7 +342,7 @@ def get_read_fd(): asdf.AsdfFile(tree).write_to(fd, all_array_storage='external') with get_read_fd() as fd: - with asdf.AsdfFile.open(fd) as ff: + with asdf.open(fd) as ff: with pytest.raises(ValueError): helpers.assert_tree_match(tree, ff.tree) @@ -386,7 +388,7 @@ def test_exploded_stream_read(tmpdir, small_tree): with open(path, 'rb') as fd: # This should work, so we can get the tree content x = generic_io.InputStream(fd, 'r') - with asdf.AsdfFile.open(x) as ff: + with asdf.open(x) as ff: # It's only when trying to access external data that an error occurs with pytest.raises(ValueError): ff.tree['science_data'][:] @@ -401,7 +403,7 @@ def test_unicode_open(tmpdir, small_tree): with io.open(path, 'rt', encoding="utf-8") as fd: with pytest.raises(ValueError): - with asdf.AsdfFile.open(fd): + with asdf.open(fd): pass diff --git a/asdf/tests/test_low_level.py b/asdf/tests/test_low_level.py index 13bfe6ab4..2a3144e83 100644 --- a/asdf/tests/test_low_level.py +++ b/asdf/tests/test_low_level.py @@ -34,13 +34,13 @@ def test_no_yaml_end_marker(tmpdir): buff = io.BytesIO(content) with pytest.raises(ValueError): - with asdf.AsdfFile.open(buff): + with asdf.open(buff): pass buff.seek(0) fd = generic_io.InputStream(buff, 'r') with pytest.raises(ValueError): - with asdf.AsdfFile.open(fd): + with asdf.open(fd): pass with open(path, 'wb') as fd: @@ -48,7 +48,7 @@ def test_no_yaml_end_marker(tmpdir): with open(path, 'rb') as fd: with pytest.raises(ValueError): - with asdf.AsdfFile.open(fd): + with asdf.open(fd): pass @@ -63,19 +63,19 @@ def test_no_final_newline(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.tree) == 2 buff.seek(0) fd = generic_io.InputStream(buff, 'r') - with asdf.AsdfFile.open(fd) as ff: + with asdf.open(fd) as ff: assert len(ff.tree) == 2 with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: - with asdf.AsdfFile.open(fd) as ff: + with asdf.open(fd) as ff: assert len(ff.tree) == 2 @@ -86,14 +86,14 @@ def test_no_asdf_header(tmpdir): buff = io.BytesIO(content) with pytest.raises(ValueError): - asdf.AsdfFile.open(buff) + asdf.open(buff) with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with pytest.raises(ValueError): - asdf.AsdfFile.open(fd) + asdf.open(fd) def test_no_asdf_blocks(tmpdir): @@ -109,19 +109,19 @@ def test_no_asdf_blocks(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.blocks) == 0 buff.seek(0) fd = generic_io.InputStream(buff, 'r') - with asdf.AsdfFile.open(fd) as ff: + with asdf.open(fd) as ff: assert len(ff.blocks) == 0 with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: - with asdf.AsdfFile.open(fd) as ff: + with asdf.open(fd) as ff: assert len(ff.blocks) == 0 @@ -135,7 +135,7 @@ def test_invalid_source(small_tree): ff.write_to(buff, all_array_storage='internal') buff.seek(0) - with asdf.AsdfFile.open(buff) as ff2: + with asdf.open(buff) as ff2: ff2.blocks.get_block(0) with pytest.raises(ValueError): @@ -158,14 +158,14 @@ def test_empty_file(): buff = io.BytesIO(b"#ASDF 1.0.0\n") buff.seek(0) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert ff.tree == {} assert len(ff.blocks) == 0 buff = io.BytesIO(b"#ASDF 1.0.0\n#ASDF_STANDARD 1.0.0") buff.seek(0) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert ff.tree == {} assert len(ff.blocks) == 0 @@ -175,14 +175,14 @@ def test_not_asdf_file(): buff.seek(0) with pytest.raises(ValueError): - with asdf.AsdfFile.open(buff): + with asdf.open(buff): pass buff = io.BytesIO(b"SIMPLE\n") buff.seek(0) with pytest.raises(ValueError): - with asdf.AsdfFile.open(buff): + with asdf.open(buff): pass @@ -191,7 +191,7 @@ def test_junk_file(): buff.seek(0) with pytest.raises(ValueError): - with asdf.AsdfFile.open(buff): + with asdf.open(buff): pass @@ -204,7 +204,7 @@ def test_block_mismatch(): buff.seek(0) with pytest.raises(ValueError): - with asdf.AsdfFile.open(buff): + with asdf.open(buff): pass @@ -216,7 +216,7 @@ def test_block_header_too_small(): buff.seek(0) with pytest.raises(ValueError): - with asdf.AsdfFile.open(buff): + with asdf.open(buff): pass @@ -271,7 +271,7 @@ def test_transfer_array_sources(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf")) - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(my_array, ff.tree['my_array']) ff.write_to(os.path.join(tmpdir, "test2.asdf")) # write_to should have no effect on getting the original data @@ -288,13 +288,13 @@ def test_write_to_same(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf")) - with asdf.AsdfFile.open( + with asdf.open( os.path.join(tmpdir, "test.asdf"), mode='rw') as ff: assert_array_equal(my_array, ff.tree['my_array']) ff.tree['extra'] = [0] * 1000 ff.write_to(os.path.join(tmpdir, "test2.asdf")) - with asdf.AsdfFile.open( + with asdf.open( os.path.join(tmpdir, "test2.asdf"), mode='rw') as ff: assert_array_equal(my_array, ff.tree['my_array']) @@ -313,7 +313,7 @@ def test_pad_blocks(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf"), pad_blocks=True) - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['my_array'], my_array) assert_array_equal(ff.tree['my_array2'], my_array2) @@ -337,13 +337,13 @@ def test_update_expand_tree(tmpdir): ff.set_array_storage(tree['arrays'][2], 'inline') assert len(list(ff.blocks.inline_blocks)) == 1 ff.write_to(testpath, pad_blocks=True) - with asdf.AsdfFile.open(testpath, mode='rw') as ff: + with asdf.open(testpath, mode='rw') as ff: assert_array_equal(ff.tree['arrays'][0], my_array) orig_offset = ff.blocks[ff.tree['arrays'][0]].offset ff.tree['extra'] = [0] * 6000 ff.update() - with asdf.AsdfFile.open(testpath) as ff: + with asdf.open(testpath) as ff: assert orig_offset <= ff.blocks[ff.tree['arrays'][0]].offset assert ff.blocks[ff.tree['arrays'][2]].array_storage == 'inline' assert_array_equal(ff.tree['arrays'][0], my_array) @@ -353,12 +353,12 @@ def test_update_expand_tree(tmpdir): ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['arrays'][2], 'inline') ff.write_to(os.path.join(tmpdir, "test2.asdf"), pad_blocks=True) - with asdf.AsdfFile.open(os.path.join(tmpdir, "test2.asdf"), mode='rw') as ff: + with asdf.open(os.path.join(tmpdir, "test2.asdf"), mode='rw') as ff: orig_offset = ff.blocks[ff.tree['arrays'][0]].offset ff.tree['extra'] = [0] * 2 ff.update() - with asdf.AsdfFile.open(os.path.join(tmpdir, "test2.asdf")) as ff: + with asdf.open(os.path.join(tmpdir, "test2.asdf")) as ff: assert orig_offset == ff.blocks[ff.tree['arrays'][0]].offset assert ff.blocks[ff.tree['arrays'][2]].array_storage == 'inline' assert_array_equal(ff.tree['arrays'][0], my_array) @@ -387,13 +387,13 @@ def test_update_delete_first_array(tmpdir): original_size = os.stat(path).st_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][0] ff.update() assert os.stat(path).st_size <= original_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][2]) @@ -410,13 +410,13 @@ def test_update_delete_last_array(tmpdir): original_size = os.stat(path).st_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][-1] ff.update() assert os.stat(path).st_size <= original_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) @@ -433,14 +433,14 @@ def test_update_delete_middle_array(tmpdir): original_size = os.stat(path).st_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][1] ff.update() assert len(ff.blocks._internal_blocks) == 2 assert os.stat(path).st_size <= original_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert len(ff.tree['arrays']) == 2 assert ff.tree['arrays'][0]._source == 0 assert ff.tree['arrays'][1]._source == 1 @@ -460,13 +460,13 @@ def test_update_replace_first_array(tmpdir): original_size = os.stat(path).st_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][0] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], np.arange(32)) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) @@ -484,13 +484,13 @@ def test_update_replace_last_array(tmpdir): original_size = os.stat(path).st_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][2] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], np.arange(32)) @@ -508,13 +508,13 @@ def test_update_replace_middle_array(tmpdir): original_size = os.stat(path).st_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][1] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], np.arange(32)) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) @@ -532,11 +532,11 @@ def test_update_add_array(tmpdir): original_size = os.stat(path).st_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'].append(np.arange(32)) ff.update() - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) @@ -555,14 +555,14 @@ def test_update_add_array_at_end(tmpdir): original_size = os.stat(path).st_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'].append(np.arange(2048)) ff.update() assert len(ff.blocks) == 4 assert os.stat(path).st_size >= original_size - with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: + with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) @@ -582,11 +582,11 @@ def test_update_replace_all_arrays(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(testpath, pad_blocks=True) - with asdf.AsdfFile.open(testpath, mode='rw') as ff: + with asdf.open(testpath, mode='rw') as ff: ff.tree['my_array'] = np.ones((64, 64)) * 2 ff.update() - with asdf.AsdfFile.open(testpath) as ff: + with asdf.open(testpath) as ff: assert_array_equal(ff.tree['my_array'], np.ones((64, 64)) * 2) @@ -603,12 +603,12 @@ def test_update_array_in_place(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(testpath, pad_blocks=True) - with asdf.AsdfFile.open(testpath, mode='rw') as ff: + with asdf.open(testpath, mode='rw') as ff: array = np.asarray(ff.tree['my_array']) array *= 2 ff.update() - with asdf.AsdfFile.open(testpath) as ff: + with asdf.open(testpath) as ff: assert_array_equal(ff.tree['my_array'], np.ones((64, 64)) * 2) @@ -628,7 +628,7 @@ def test_init_from_asdffile(tmpdir): ff.write_to(os.path.join(tmpdir, 'test.asdf')) - with asdf.AsdfFile().open(os.path.join(tmpdir, 'test.asdf')) as ff: + with asdf.open(os.path.join(tmpdir, 'test.asdf')) as ff: ff2 = asdf.AsdfFile(ff) assert not ff.tree['my_array'] is ff2.tree['my_array'] assert_array_equal(ff.tree['my_array'], ff2.tree['my_array']) @@ -647,7 +647,7 @@ def test_update_exceptions(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(path) - with asdf.AsdfFile().open(path) as ff: + with asdf.open(path) as ff: with pytest.raises(IOError): ff.update() @@ -656,7 +656,7 @@ def test_update_exceptions(tmpdir): ff.write_to(buff) buff.seek(0) - with asdf.AsdfFile.open(buff, mode='rw') as ff: + with asdf.open(buff, mode='rw') as ff: ff.update() with pytest.raises(ValueError): @@ -673,7 +673,7 @@ def test_get_data_from_closed_file(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(path) - with asdf.AsdfFile().open(path) as ff: + with asdf.open(path) as ff: pass with pytest.raises(IOError): @@ -695,12 +695,12 @@ def test_seek_until_on_block_boundary(): constants.BLOCK_MAGIC + b'\0\x30' + b'\0' * 50) buff = io.BytesIO(content) - ff = asdf.AsdfFile.open(buff) + ff = asdf.open(buff) assert len(ff.blocks) == 1 buff.seek(0) fd = generic_io.InputStream(buff, 'r') - ff = asdf.AsdfFile.open(fd) + ff = asdf.open(fd) assert len(ff.blocks) == 1 @@ -713,7 +713,7 @@ def test_checksum(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(path) - with asdf.AsdfFile.open(path, validate_checksums=True) as ff: + with asdf.open(path, validate_checksums=True) as ff: assert type(ff.blocks._internal_blocks[0].checksum) == bytes assert ff.blocks._internal_blocks[0].checksum == \ b'\xcaM\\\xb8t_L|\x00\n+\x01\xf1\xcfP1' @@ -729,13 +729,13 @@ def test_checksum_update(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(path) - with asdf.AsdfFile.open(path, mode='rw') as ff: + with asdf.open(path, mode='rw') as ff: ff.tree['my_array'][7, 7] = 0.0 # update() should update the checksum, even if the data itself # is memmapped and isn't expressly re-written. ff.update() - with asdf.AsdfFile.open(path, validate_checksums=True) as ff: + with asdf.open(path, validate_checksums=True) as ff: assert ff.blocks._internal_blocks[0].checksum == \ b'T\xaf~[\x90\x8a\x88^\xc2B\x96D,N\xadL' @@ -746,7 +746,7 @@ def test_atomic_write(tmpdir, small_tree): ff = asdf.AsdfFile(small_tree) ff.write_to(tmpfile) - with asdf.AsdfFile.open(tmpfile) as ff: + with asdf.open(tmpfile) as ff: ff.write_to(tmpfile) @@ -790,7 +790,7 @@ def test_copy(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, 'test.asdf')) - with asdf.AsdfFile.open(os.path.join(tmpdir, 'test.asdf')) as ff: + with asdf.open(os.path.join(tmpdir, 'test.asdf')) as ff: ff2 = ff.copy() ff2.tree['my_array'] *= 2 ff2.tree['foo']['bar'] = 'boo' @@ -812,7 +812,7 @@ def test_deferred_block_loading(small_tree): ff.write_to(buff, include_block_index=False, all_array_storage='internal') buff.seek(0) - with asdf.AsdfFile.open(buff) as ff2: + with asdf.open(buff) as ff2: assert len([x for x in ff2.blocks.blocks if isinstance(x, block.Block)]) == 1 x = ff2.tree['science_data'] * 2 x = ff2.tree['not_shared'] * 2 @@ -837,7 +837,7 @@ def test_block_index(): ff.write_to(buff) buff.seek(0) - with asdf.AsdfFile.open(buff) as ff2: + with asdf.open(buff) as ff2: assert isinstance(ff2.blocks._internal_blocks[0], block.Block) assert len(ff2.blocks._internal_blocks) == 100 for i in range(2, 99): @@ -882,7 +882,7 @@ def test_large_block_index(): ff.write_to(buff, all_array_storage='internal') buff.seek(0) - with asdf.AsdfFile.open(buff) as ff2: + with asdf.open(buff) as ff2: assert isinstance(ff2.blocks._internal_blocks[0], block.Block) assert len(ff2.blocks._internal_blocks) == narrays @@ -925,7 +925,7 @@ def test_junk_after_index(): # This has junk after the block index, so it # should fall back to the skip method, which # only loads the first block. - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.blocks) == 1 @@ -946,7 +946,7 @@ def test_short_file_find_block_index(): buff.write(b'0' * (io.DEFAULT_BUFFER_SIZE * 4)) buff.seek(0) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.blocks) == 1 @@ -971,7 +971,7 @@ def test_invalid_block_index_values(): ff.blocks.write_block_index(buff, ff) buff.seek(0) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.blocks) == 1 @@ -995,7 +995,7 @@ def test_invalid_last_block_index(): ff.blocks.write_block_index(buff, ff) buff.seek(0) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.blocks) == 1 @@ -1018,7 +1018,7 @@ def test_unordered_block_index(): ff.blocks.write_block_index(buff, ff) buff.seek(0) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.blocks) == 1 @@ -1042,7 +1042,7 @@ def test_invalid_block_index_first_block_value(): ff.blocks.write_block_index(buff, ff) buff.seek(0) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.blocks) == 1 @@ -1069,7 +1069,7 @@ def test_dots_but_no_block_index(): buff.write(b'...\n') buff.seek(0) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.blocks) == 1 @@ -1084,7 +1084,7 @@ def test_open_no_memmap(tmpdir): ff.write_to(tmpfile) # Test that by default we use memmapped arrays when possible - with asdf.AsdfFile.open(tmpfile) as af: + with asdf.open(tmpfile) as af: array = af.tree['array'] # Make sure to access the block so that it gets loaded x = array[0] @@ -1092,7 +1092,7 @@ def test_open_no_memmap(tmpdir): assert isinstance(array.block._data, np.memmap) # Test that if we ask for copy, we do not get memmapped arrays - with asdf.AsdfFile.open(tmpfile, copy_arrays=True) as af: + with asdf.open(tmpfile, copy_arrays=True) as af: array = af.tree['array'] x = array[0] assert array.block._memmapped == False @@ -1110,7 +1110,7 @@ def test_invalid_version(tmpdir): ...""" buff = io.BytesIO(content) with pytest.raises(ValueError): - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: pass @@ -1122,7 +1122,7 @@ def test_valid_version(tmpdir): foo : bar ...""" buff = io.BytesIO(content) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: version = ff.file_format_version assert version.major == 1 @@ -1191,7 +1191,7 @@ def test_access_tree_outside_handler(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(str(tempname)) - with asdf.AsdfFile.open(tempname) as newf: + with asdf.open(tempname) as newf: pass # Accessing array data outside of handler should fail @@ -1209,7 +1209,7 @@ def test_context_handler_resolve_and_inline(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(str(tempname)) - with asdf.AsdfFile.open(tempname) as newf: + with asdf.open(tempname) as newf: newf.resolve_and_inline() with pytest.raises(OSError): diff --git a/asdf/tests/test_reference.py b/asdf/tests/test_reference.py index 74f70657f..43b7d0f39 100644 --- a/asdf/tests/test_reference.py +++ b/asdf/tests/test_reference.py @@ -114,10 +114,10 @@ def do_asserts(ff): internal_path = os.path.join(str(tmpdir), 'main.asdf') ff.write_to(internal_path) - with asdf.AsdfFile.open(internal_path) as ff: + with asdf.open(internal_path) as ff: do_asserts(ff) - with asdf.AsdfFile.open(internal_path) as ff: + with asdf.open(internal_path) as ff: assert len(ff._external_asdf_by_uri) == 0 ff.resolve_references() assert len(ff._external_asdf_by_uri) == 2 @@ -214,14 +214,14 @@ def test_make_reference(tmpdir): ext = asdf.AsdfFile(exttree) ext.write_to(external_path) - with asdf.AsdfFile.open(external_path) as ext: + with asdf.open(external_path) as ext: ff = asdf.AsdfFile() ff.tree['ref'] = ext.make_reference(['f~o~o/', 'a']) assert_array_equal(ff.tree['ref'], ext.tree['f~o~o/']['a']) ff.write_to(os.path.join(str(tmpdir), 'source.asdf')) - with asdf.AsdfFile.open(os.path.join(str(tmpdir), 'source.asdf')) as ff: + with asdf.open(os.path.join(str(tmpdir), 'source.asdf')) as ff: assert ff.tree['ref']._uri == 'external.asdf#f~0o~0o~1/a' diff --git a/asdf/tests/test_schema.py b/asdf/tests/test_schema.py index 81b3b2c28..40cab4b84 100644 --- a/asdf/tests/test_schema.py +++ b/asdf/tests/test_schema.py @@ -75,7 +75,7 @@ def test_tagging_scalars(): from astropy import units as u buff = helpers.yaml_to_asdf(yaml) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert isinstance(ff.tree['unit'], u.UnitBase) assert not isinstance(ff.tree['not_unit'], u.UnitBase) assert isinstance(ff.tree['not_unit'], str) @@ -287,13 +287,13 @@ def types(self): # providing an extension, our custom type will not be recognized and will # simply be converted to a raw type. with pytest.warns(None) as warning: - with asdf.AsdfFile.open(buff): + with asdf.open(buff): pass assert len(warning) == 1 buff.seek(0) with pytest.raises(ValidationError): - with asdf.AsdfFile.open(buff, extensions=[CustomTypeExtension()]): + with asdf.open(buff, extensions=[CustomTypeExtension()]): pass # Make sure tags get validated inside of other tags that know @@ -306,7 +306,7 @@ def types(self): """ buff = helpers.yaml_to_asdf(yaml) with pytest.raises(ValidationError): - with asdf.AsdfFile.open(buff, extensions=[CustomTypeExtension()]): + with asdf.open(buff, extensions=[CustomTypeExtension()]): pass @@ -377,13 +377,13 @@ def types(self): b: {} """ buff = helpers.yaml_to_asdf(yaml) - with asdf.AsdfFile.open(buff, extensions=[DefaultTypeExtension()]) as ff: + with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: assert 'a' in ff.tree['custom'] assert ff.tree['custom']['a'] == 42 assert ff.tree['custom']['b']['c'] == 82 buff.seek(0) - with asdf.AsdfFile.open(buff, extensions=[DefaultTypeExtension()], + with asdf.open(buff, extensions=[DefaultTypeExtension()], do_not_fill_defaults=True) as ff: assert 'a' not in ff.tree['custom'] assert 'c' not in ff.tree['custom']['b'] @@ -412,7 +412,7 @@ def types(self): """ buff = helpers.yaml_to_asdf(yaml) - with asdf.AsdfFile.open(buff, extensions=[DefaultTypeExtension()]) as ff: + with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: custom = ff.tree['custom'] assert custom['name'] == "Something" assert_array_equal(custom['things'], [1, 2, 3]) @@ -452,7 +452,7 @@ def types(self): """ buff = helpers.yaml_to_asdf(yaml) - with asdf.AsdfFile.open(buff, extensions=ForeignTypeExtension()) as ff: + with asdf.open(buff, extensions=ForeignTypeExtension()) as ff: a = ff.tree['custom']['a'] b = ff.tree['custom']['b'] assert a['name'] == 'Something' @@ -623,7 +623,7 @@ def types(self): """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as w: - with asdf.AsdfFile.open(buff, extensions=[DefaultTypeExtension()]) as ff: + with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: assert ff.tree['custom']['b']['foo'] == 42 assert len(w) == 1 diff --git a/asdf/tests/test_stream.py b/asdf/tests/test_stream.py index f3cdf82b4..2c0eabecb 100644 --- a/asdf/tests/test_stream.py +++ b/asdf/tests/test_stream.py @@ -29,7 +29,7 @@ def test_stream(): buff.seek(0) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (100, 6, 2) for i, row in enumerate(ff.tree['stream']): @@ -50,7 +50,7 @@ def test_stream_write_nothing(): buff.seek(0) - with asdf.AsdfFile().open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (0, 6, 2) @@ -72,7 +72,7 @@ def test_stream_twice(): buff.seek(0) - ff = asdf.AsdfFile().open(buff) + ff = asdf.open(buff) assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (100, 6, 2) assert ff.tree['stream2'].shape == (50, 12, 2) @@ -96,7 +96,7 @@ def test_stream_with_nonstream(): buff.seek(0) - with asdf.AsdfFile().open(buff) as ff: + with asdf.open(buff) as ff: assert len(ff.blocks) == 1 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) @@ -123,7 +123,7 @@ def test_stream_real_file(tmpdir): for i in range(100): fd.write(np.array([i] * 12, np.float64).tostring()) - with asdf.AsdfFile().open(path) as ff: + with asdf.open(path) as ff: assert len(ff.blocks) == 1 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) @@ -148,7 +148,7 @@ def test_stream_to_stream(): buff.seek(0) - with asdf.AsdfFile().open(generic_io.InputStream(buff, 'r')) as ff: + with asdf.open(generic_io.InputStream(buff, 'r')) as ff: assert len(ff.blocks) == 2 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) @@ -168,7 +168,7 @@ def test_array_to_stream(tmpdir): buff.write(np.array([5, 6, 7, 8], np.int64).tostring()) buff.seek(0) - ff = asdf.AsdfFile().open(generic_io.InputStream(buff)) + ff = asdf.open(generic_io.InputStream(buff)) assert_array_equal(ff.tree['stream'], [1, 2, 3, 4, 5, 6, 7, 8]) buff.seek(0) ff2 = asdf.AsdfFile(ff) @@ -181,7 +181,7 @@ def test_array_to_stream(tmpdir): ff.write_to(fd) fd.write(np.array([5, 6, 7, 8], np.int64).tostring()) - with asdf.AsdfFile().open(os.path.join(str(tmpdir), 'test.asdf')) as ff: + with asdf.open(os.path.join(str(tmpdir), 'test.asdf')) as ff: assert_array_equal(ff.tree['stream'], [1, 2, 3, 4, 5, 6, 7, 8]) ff2 = asdf.AsdfFile(ff) ff2.write_to(buff) diff --git a/asdf/tests/test_yaml.py b/asdf/tests/test_yaml.py index 8ce8d9158..a960a39f4 100644 --- a/asdf/tests/test_yaml.py +++ b/asdf/tests/test_yaml.py @@ -210,7 +210,7 @@ def test_explicit_tags(): # Check that fully qualified explicit tags work buff = helpers.yaml_to_asdf(yaml, yaml_headers=False) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert all(ff.tree['foo'] == [1, 2, 3]) @@ -249,7 +249,7 @@ def test_yaml_nan_inf(): ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.seek(0) - with asdf.AsdfFile.open(buff) as ff: + with asdf.open(buff) as ff: assert np.isnan(ff.tree['a']) assert np.isinf(ff.tree['b']) assert np.isinf(ff.tree['c']) From 478b5409e149c52c631f6bd8f22483bd7a6084f2 Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Fri, 26 Oct 2018 15:46:25 -0400 Subject: [PATCH 06/19] Fix test_update_exceptions test --- asdf/tests/test_low_level.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asdf/tests/test_low_level.py b/asdf/tests/test_low_level.py index 2a3144e83..077674d79 100644 --- a/asdf/tests/test_low_level.py +++ b/asdf/tests/test_low_level.py @@ -647,7 +647,7 @@ def test_update_exceptions(tmpdir): ff = asdf.AsdfFile(tree) ff.write_to(path) - with asdf.open(path) as ff: + with asdf.open(path, mode='r', copy_arrays=True) as ff: with pytest.raises(IOError): ff.update() From c9b0a82fa673fd6c35a64c6adf9ee8f50e68908a Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Mon, 29 Oct 2018 10:22:31 -0400 Subject: [PATCH 07/19] Make sure to account for https schemes --- asdf/asdf.py | 2 +- asdf/generic_io.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/asdf/asdf.py b/asdf/asdf.py index c82762f58..c3bd01b38 100644 --- a/asdf/asdf.py +++ b/asdf/asdf.py @@ -1231,7 +1231,7 @@ def _check_and_set_mode(fileobj, asdf_mode): if asdf_mode is None: if isinstance(fileobj, str): parsed = generic_io.urlparse.urlparse(fileobj) - if parsed.scheme == 'http': + if parsed.scheme in ['http', 'https']: return 'r' return 'rw' if isinstance(fileobj, io.IOBase): diff --git a/asdf/generic_io.py b/asdf/generic_io.py index d56c85d9b..6eab89553 100644 --- a/asdf/generic_io.py +++ b/asdf/generic_io.py @@ -1173,7 +1173,7 @@ def get_file(init, mode='r', uri=None, close=False): elif isinstance(init, str): parsed = urlparse.urlparse(init) - if parsed.scheme == 'http': + if parsed.scheme in ['http', 'https']: if 'w' in mode: raise ValueError( "HTTP connections can not be opened for writing") From 82fcf387c362f57100bb5765fb8453a4f588d61e Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Mon, 29 Oct 2018 11:13:34 -0400 Subject: [PATCH 08/19] Remove redundant asdf_open function from top-level namespace --- asdf/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/asdf/__init__.py b/asdf/__init__.py index 990758acb..f6daae3a1 100644 --- a/asdf/__init__.py +++ b/asdf/__init__.py @@ -44,3 +44,5 @@ from jsonschema import ValidationError open = open_asdf +# Avoid redundancy/confusion in the top-level namespace +del open_asdf From f972e613d80fc740aa34f93dab5681f64e9d950d Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Mon, 29 Oct 2018 11:34:16 -0400 Subject: [PATCH 09/19] Enforce readonly arrays under certain circumstances --- asdf/asdf.py | 12 +++++++----- asdf/block.py | 14 +++++++++++--- asdf/fits_embed.py | 4 ++++ asdf/tags/core/ndarray.py | 2 ++ 4 files changed, 24 insertions(+), 8 deletions(-) diff --git a/asdf/asdf.py b/asdf/asdf.py index c3bd01b38..d2eef98d1 100644 --- a/asdf/asdf.py +++ b/asdf/asdf.py @@ -51,7 +51,8 @@ class AsdfFile(versioning.VersionedMixin): def __init__(self, tree=None, uri=None, extensions=None, version=None, ignore_version_mismatch=True, ignore_unrecognized_tag=False, ignore_implicit_conversion=False, copy_arrays=False, - lazy_load=True, custom_schema=None, inline_threshold=None): + lazy_load=True, custom_schema=None, inline_threshold=None, + _readonly=False): """ Parameters ---------- @@ -134,7 +135,7 @@ def __init__(self, tree=None, uri=None, extensions=None, version=None, self._external_asdf_by_uri = {} self._blocks = block.BlockManager( self, copy_arrays=copy_arrays, inline_threshold=inline_threshold, - lazy_load=lazy_load) + lazy_load=lazy_load, readonly=_readonly) self._uri = None if tree is None: self.tree = {} @@ -1326,18 +1327,19 @@ def open_asdf(fd, uri=None, mode=None, validate_checksums=False, The new AsdfFile object. """ + readonly = False + # For now retain backwards compatibility with the old API behavior, # specifically when being called from AsdfFile.open if not _compat: mode = _check_and_set_mode(fd, mode) - if mode == 'r' and not copy_arrays: - copy_arrays = True + readonly = (mode == 'r' and not copy_arrays) instance = AsdfFile(extensions=extensions, ignore_version_mismatch=ignore_version_mismatch, ignore_unrecognized_tag=ignore_unrecognized_tag, copy_arrays=copy_arrays, lazy_load=lazy_load, - custom_schema=custom_schema) + custom_schema=custom_schema, _readonly=readonly) return AsdfFile._open_impl(instance, fd, uri=uri, mode=mode, diff --git a/asdf/block.py b/asdf/block.py index 1b29c93f6..9c82df97c 100644 --- a/asdf/block.py +++ b/asdf/block.py @@ -34,7 +34,7 @@ class BlockManager(object): Manages the `Block`s associated with a ASDF file. """ def __init__(self, asdffile, copy_arrays=False, inline_threshold=None, - lazy_load=True): + lazy_load=True, readonly=False): self._asdffile = weakref.ref(asdffile) self._internal_blocks = [] @@ -58,6 +58,7 @@ def __init__(self, asdffile, copy_arrays=False, inline_threshold=None, self._validate_checksums = False self._memmap = not copy_arrays self._lazy_load = lazy_load + self._readonly = readonly def __len__(self): """ @@ -533,7 +534,8 @@ def read_block_index(self, fd, ctx): for offset in offsets[1:-1]: self._internal_blocks.append( UnloadedBlock(fd, offset, - memmap=self.memmap, lazy_load=self.lazy_load)) + memmap=self.memmap, lazy_load=self.lazy_load, + readonly=self._readonly)) # We already read the last block in the file -- no need to read it again self._internal_blocks.append(block) @@ -834,6 +836,7 @@ def __init__(self, data=None, uri=None, array_storage='internal', self._should_memmap = memmap self._memmapped = False self._lazy_load = lazy_load + self._readonly = False self.update_size() self._allocated = self._size @@ -916,6 +919,10 @@ def output_compression(self, compression): def checksum(self): return self._checksum + @property + def readonly(self): + return self._readonly + def _set_checksum(self, checksum): if checksum == b'\0' * 16: self._checksum = None @@ -1215,7 +1222,7 @@ class UnloadedBlock(object): full-fledged block whenever the underlying data or more detail is requested. """ - def __init__(self, fd, offset, memmap=True, lazy_load=True): + def __init__(self, fd, offset, memmap=True, lazy_load=True, readonly=False): self._fd = fd self._offset = offset self._data = None @@ -1227,6 +1234,7 @@ def __init__(self, fd, offset, memmap=True, lazy_load=True): self._should_memmap = memmap self._memmapped = False self._lazy_load = lazy_load + self._readonly = readonly def __len__(self): self.load() diff --git a/asdf/fits_embed.py b/asdf/fits_embed.py index 2643f31e9..5d3a451e6 100644 --- a/asdf/fits_embed.py +++ b/asdf/fits_embed.py @@ -43,6 +43,10 @@ def __len__(self): def data(self): return self._hdu.data + @property + def readonly(self): + return False + @property def array_storage(self): return 'fits' diff --git a/asdf/tags/core/ndarray.py b/asdf/tags/core/ndarray.py index 9639a4bd3..e5f6a31c7 100644 --- a/asdf/tags/core/ndarray.py +++ b/asdf/tags/core/ndarray.py @@ -252,6 +252,8 @@ def _make_array(self): shape, self._dtype, block.data, self._offset, self._strides, self._order) self._array = self._apply_mask(self._array, self._mask) + if block.readonly: + self._array.setflags(write=False) return self._array def _apply_mask(self, array, mask): From 221c3c401c23ce99b30484a066f5893833940ced Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Mon, 29 Oct 2018 11:40:07 -0400 Subject: [PATCH 10/19] Enforce readonly mode for external arrays --- asdf/asdf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asdf/asdf.py b/asdf/asdf.py index d2eef98d1..007d3838a 100644 --- a/asdf/asdf.py +++ b/asdf/asdf.py @@ -377,7 +377,7 @@ def open_external(self, uri, do_not_fill_defaults=False): if asdffile is None: asdffile = open_asdf( resolved_uri, - do_not_fill_defaults=do_not_fill_defaults) + mode='r', do_not_fill_defaults=do_not_fill_defaults) self._external_asdf_by_uri[resolved_uri] = asdffile return asdffile From 73e7255fb9a9159396d6090849a8a764a718dcf6 Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Mon, 29 Oct 2018 11:45:57 -0400 Subject: [PATCH 11/19] Add test for readonly behavior of ndarrays --- asdf/tags/core/tests/test_ndarray.py | 41 ++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/asdf/tags/core/tests/test_ndarray.py b/asdf/tags/core/tests/test_ndarray.py index ab49b8b57..f86186146 100644 --- a/asdf/tags/core/tests/test_ndarray.py +++ b/asdf/tags/core/tests/test_ndarray.py @@ -817,3 +817,44 @@ def test_fortran_order(tmpdir): array = np.array([[11,12,13], [21,22,23]], order='F') tree = dict(data=array) helpers.assert_roundtrip_tree(tree, tmpdir) + + +def test_readonly(tmpdir): + + tmpfile = str(tmpdir.join('data.asdf')) + tree = dict(data=np.ndarray((100))) + + with asdf.AsdfFile(tree) as af: + # Make sure we're actually writing to an internal array for this test + af.write_to(tmpfile, all_array_storage='internal') + + # This should be perfectly fine + with asdf.open(tmpfile) as af: + assert af['data'].flags.writeable == True + af['data'][0] = 40 + + # Opening in read mode should mean array is readonly + with asdf.open(tmpfile, mode='r') as af: + assert af['data'].flags.writeable == False + with pytest.raises(ValueError) as err: + af['data'][0] = 41 + assert str(err) == 'assignment destination is read-only' + + # Copying the arrays makes it safe to write to the underlying array + with asdf.open(tmpfile, mode='r', copy_arrays=True) as af: + assert af['data'].flags.writeable == True + af['data'][0] = 42 + + +def test_readonly_inline(tmpdir): + + tmpfile = str(tmpdir.join('data.asdf')) + tree = dict(data=np.ndarray((100))) + + with asdf.AsdfFile(tree) as af: + af.write_to(tmpfile, all_array_storage='inline') + + # This should be safe since it's an inline array + with asdf.open(tmpfile, mode='r') as af: + assert af['data'].flags.writeable == True + af['data'][0] = 42 From 85e12090ef7b289017c81fa09e5e286223ee9f00 Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Mon, 29 Oct 2018 12:11:08 -0400 Subject: [PATCH 12/19] Override ndarray __setitem__ to avoid segfault during exception handling --- asdf/tags/core/ndarray.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/asdf/tags/core/ndarray.py b/asdf/tags/core/ndarray.py index e5f6a31c7..0ab5dd188 100644 --- a/asdf/tags/core/ndarray.py +++ b/asdf/tags/core/ndarray.py @@ -346,6 +346,16 @@ def __getattr__(self, attr): raise AttributeError() return getattr(self._make_array(), attr) + def __setitem__(self, *args): + # This workaround appears to be necessary in order to avoid a segfault + # in the case that array assignment causes an exception. The segfault + # originates from the call to __repr__ inside the traceback report. + try: + self._make_array().__setitem__(*args) + except Exception: + self._array = None + raise + @classmethod def from_tree(cls, node, ctx): if isinstance(node, list): @@ -511,7 +521,7 @@ def __operation__(self, *args): '__imul__', '__idiv__', '__itruediv__', '__ifloordiv__', '__imod__', '__ipow__', '__ilshift__', '__irshift__', '__iand__', '__ixor__', '__ior__', '__getitem__', - '__delitem__', '__contains__', '__setitem__']: + '__delitem__', '__contains__']: setattr(NDArrayType, op, _make_operation(op)) From 9167221bbb40062ad0241182356ee70403635af9 Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Mon, 29 Oct 2018 12:12:40 -0400 Subject: [PATCH 13/19] Update change log --- CHANGES.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index cac04dd7c..57b365462 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -24,6 +24,12 @@ types. This warning is converted to an error when using ``assert_roundtrip_tree`` for tests. [#583] +- Deprecate ``asdf.AsdfFile.open`` in favor of ``asdf.open``. [#579] + +- Add readonly protection to memory mapped arrays when the underlying file + handle is readonly. [#579] + + 2.1.1 (unreleased) ------------------ From 3f8954ec0c6fdc866d1dddfb4715a1819d263d73 Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Mon, 29 Oct 2018 12:39:30 -0400 Subject: [PATCH 14/19] Add docstring with deprecation notice to asdf.AsdfFile.open --- asdf/asdf.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/asdf/asdf.py b/asdf/asdf.py index 007d3838a..768b176ab 100644 --- a/asdf/asdf.py +++ b/asdf/asdf.py @@ -725,6 +725,12 @@ def open(cls, fd, uri=None, mode='r', custom_schema=None, strict_extension_check=False, ignore_missing_extensions=False): + """ + Open an existing ASDF file. + + .. deprecated:: 2.2 + Use `asdf.open` instead. + """ warnings.warn( "The method AsdfFile.open has been deprecated and will be removed " From f6c3e364c93c5a19bbfbf427021558061e3b826e Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Mon, 29 Oct 2018 12:48:52 -0400 Subject: [PATCH 15/19] Fix test failure on windows. --- asdf/extern/atomicfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asdf/extern/atomicfile.py b/asdf/extern/atomicfile.py index e3198425f..34f4b6076 100644 --- a/asdf/extern/atomicfile.py +++ b/asdf/extern/atomicfile.py @@ -73,7 +73,7 @@ def atomic_rename(src, dst): except OSError as e: if e.errno != errno.EEXIST: raise - old = "%s-%08x" % (dst, random.randint(0, sys.maxint)) + old = "%s-%08x" % (dst, random.randint(0, sys.maxsize)) os.rename(dst, old) os.rename(src, dst) try: From 51993f10e6bf0d986e0c5f103ea1ad6c7671ef3a Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Tue, 30 Oct 2018 10:07:01 -0400 Subject: [PATCH 16/19] Attempt to fix file handle issue that appears on Windows --- asdf/asdf.py | 29 +++++++++++++++-------------- asdf/tests/test_low_level.py | 2 +- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/asdf/asdf.py b/asdf/asdf.py index 768b176ab..e692eef59 100644 --- a/asdf/asdf.py +++ b/asdf/asdf.py @@ -822,6 +822,7 @@ def _post_write(self, fd): if len(self._tree): self.run_hook('post_write') + # TODO: there has got to be a better way to do this... if hasattr(self, '_all_array_storage'): del self._all_array_storage if hasattr(self, '_all_array_compression'): @@ -1027,24 +1028,24 @@ def write_to(self, fd, all_array_storage=None, all_array_compression='input', write out in the latest version supported by asdf. """ - original_fd = self._fd - if version is not None: self.version = version - try: - with generic_io.get_file(fd, mode='w') as fd: - self._fd = fd - self._pre_write(fd, all_array_storage, all_array_compression, - auto_inline) - try: - self._serial_write(fd, pad_blocks, include_block_index) - fd.flush() - finally: - self._post_write(fd) - finally: - self._fd = original_fd + with generic_io.get_file(fd, mode='w') as fd: + # TODO: This is not ideal: we really should pass the URI through + # explicitly to wherever it is required instead of making it an + # attribute of the AsdfFile. + if self._uri is None: + self._uri = fd.uri + self._pre_write(fd, all_array_storage, all_array_compression, + auto_inline) + + try: + self._serial_write(fd, pad_blocks, include_block_index) + fd.flush() + finally: + self._post_write(fd) def find_references(self): """ diff --git a/asdf/tests/test_low_level.py b/asdf/tests/test_low_level.py index 077674d79..2ca488e70 100644 --- a/asdf/tests/test_low_level.py +++ b/asdf/tests/test_low_level.py @@ -746,7 +746,7 @@ def test_atomic_write(tmpdir, small_tree): ff = asdf.AsdfFile(small_tree) ff.write_to(tmpfile) - with asdf.open(tmpfile) as ff: + with asdf.open(tmpfile, mode='r') as ff: ff.write_to(tmpfile) From 07ef146b0ac341a398e73de6edde6fc4048fb51b Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Tue, 30 Oct 2018 11:57:32 -0400 Subject: [PATCH 17/19] Cleaner exception handling in ndarray.__setitem__ --- asdf/tags/core/ndarray.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/asdf/tags/core/ndarray.py b/asdf/tags/core/ndarray.py index 0ab5dd188..39e0b4423 100644 --- a/asdf/tags/core/ndarray.py +++ b/asdf/tags/core/ndarray.py @@ -352,9 +352,9 @@ def __setitem__(self, *args): # originates from the call to __repr__ inside the traceback report. try: self._make_array().__setitem__(*args) - except Exception: + except Exception as e: self._array = None - raise + raise e from None @classmethod def from_tree(cls, node, ctx): From 1411b4f328b522165bd20522b23b0dd647516e76 Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Tue, 30 Oct 2018 12:01:22 -0400 Subject: [PATCH 18/19] Replace references to AsdfFile.open in the documentation --- asdf/fits_embed.py | 2 +- docs/asdf/features.rst | 7 ++++--- docs/sphinxext/example.py | 3 ++- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/asdf/fits_embed.py b/asdf/fits_embed.py index 5d3a451e6..08a109f6c 100644 --- a/asdf/fits_embed.py +++ b/asdf/fits_embed.py @@ -182,7 +182,7 @@ def open(cls, fd, uri=None, validate_checksums=False, extensions=None, The URI for this ASDF file. Used to resolve relative references against. If not provided, will be automatically determined from the associated file object, - if possible and if created from `AsdfFile.open`. + if possible and if created from `asdf.open`. validate_checksums : bool, optional If `True`, validate the blocks against their checksums. diff --git a/docs/asdf/features.rst b/docs/asdf/features.rst index 77f916375..0f4cb42c4 100644 --- a/docs/asdf/features.rst +++ b/docs/asdf/features.rst @@ -235,6 +235,7 @@ First, we'll create a ASDF file with a couple of arrays in it: .. runcode:: + import asdf from asdf import AsdfFile import numpy as np @@ -259,7 +260,7 @@ to the target file. ff = AsdfFile() - with AsdfFile.open('target.asdf') as target: + with asdf.open('target.asdf') as target: ff.tree['my_ref_a'] = target.make_reference(['a']) ff.tree['my_ref_b'] = {'$ref': 'target.asdf#b'} @@ -275,7 +276,7 @@ references. .. runcode:: - with AsdfFile.open('source.asdf') as ff: + with asdf.open('source.asdf') as ff: ff.find_references() assert ff.tree['my_ref_b'].shape == (10,) @@ -286,7 +287,7 @@ literal content in its place. .. runcode:: - with AsdfFile.open('source.asdf') as ff: + with asdf.open('source.asdf') as ff: ff.resolve_references() ff.write_to('resolved.asdf') diff --git a/docs/sphinxext/example.py b/docs/sphinxext/example.py index 6423bc379..6790d2657 100644 --- a/docs/sphinxext/example.py +++ b/docs/sphinxext/example.py @@ -15,6 +15,7 @@ from sphinx.util.nodes import set_source_info +import asdf from asdf import AsdfFile from asdf.constants import ASDF_MAGIC, BLOCK_FLAG_STREAMED from asdf import versioning, util @@ -89,7 +90,7 @@ def run(self): kwargs['ignore_unrecognized_tag'] = 'ignore_unrecognized_tag' in self.arguments kwargs['ignore_missing_extensions'] = 'ignore_unrecognized_tag' in self.arguments - with AsdfFile.open(filename, **kwargs) as ff: + with asdf.open(filename, **kwargs) as ff: for i, block in enumerate(ff.blocks.internal_blocks): data = codecs.encode(block.data.tostring(), 'hex') if len(data) > 40: From e17a3763947082ae15dc19b21a8ac7af9098bd41 Mon Sep 17 00:00:00 2001 From: Daniel D'Avella Date: Wed, 31 Oct 2018 09:05:44 -0400 Subject: [PATCH 19/19] Add test for deprecated asdf.AsdfFile.open --- asdf/tests/test_low_level.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/asdf/tests/test_low_level.py b/asdf/tests/test_low_level.py index 2ca488e70..85c5beba0 100644 --- a/asdf/tests/test_low_level.py +++ b/asdf/tests/test_low_level.py @@ -1306,3 +1306,16 @@ def test_no_warning_nan_array(tmpdir): with pytest.warns(None) as w: assert_roundtrip_tree(tree, tmpdir) assert len(w) == 0, display_warnings(w) + + +def test_warning_deprecated_open(tmpdir): + + tmpfile = str(tmpdir.join('foo.asdf')) + + tree = dict(foo=42, bar='hello') + with asdf.AsdfFile(tree) as af: + af.write_to(tmpfile) + + with pytest.warns(AsdfDeprecationWarning): + with asdf.AsdfFile.open(tmpfile) as af: + assert_tree_match(tree, af.tree)