diff --git a/CHANGES.rst b/CHANGES.rst index ab77e1577..19cd11a62 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -34,6 +34,9 @@ The ASDF Standard is at v1.6.0 - Require pytest 7+ and update asdf pytest plugin to be compatible with the current development version of pytest (8.1) [#1731] +- Eliminate the use of the legacy ``tmpdir`` fixture in favor of + the new ``tmp_path`` fixture for temporary directory creation. [#1759] + 3.0.1 (2023-10-30) ------------------ diff --git a/asdf/_tests/commands/tests/test_defragment.py b/asdf/_tests/commands/tests/test_defragment.py index e21b7f951..23cdc2a3b 100644 --- a/asdf/_tests/commands/tests/test_defragment.py +++ b/asdf/_tests/commands/tests/test_defragment.py @@ -9,7 +9,7 @@ from asdf.commands import main -def _test_defragment(tmpdir, codec): +def _test_defragment(tmp_path, codec): x = np.arange(0, 1000, dtype=float) tree = { @@ -19,8 +19,8 @@ def _test_defragment(tmpdir, codec): "not_shared": np.arange(100, 0, -1, dtype=np.uint8), } - path = os.path.join(str(tmpdir), "original.asdf") - out_path = os.path.join(str(tmpdir), "original.defragment.asdf") + path = os.path.join(str(tmp_path), "original.asdf") + out_path = os.path.join(str(tmp_path), "original.defragment.asdf") ff = AsdfFile(tree) ff.write_to(path) with asdf.open(path) as af: @@ -30,26 +30,26 @@ def _test_defragment(tmpdir, codec): assert result == 0 - files = get_file_sizes(str(tmpdir)) + files = get_file_sizes(str(tmp_path)) assert "original.asdf" in files assert "original.defragment.asdf" in files assert files["original.defragment.asdf"] < files["original.asdf"] - with asdf.open(os.path.join(str(tmpdir), "original.defragment.asdf")) as ff: + with asdf.open(os.path.join(str(tmp_path), "original.defragment.asdf")) as ff: assert_tree_match(ff.tree, tree) assert len(ff._blocks.blocks) == 2 -def test_defragment_zlib(tmpdir): - _test_defragment(tmpdir, "zlib") +def test_defragment_zlib(tmp_path): + _test_defragment(tmp_path, "zlib") -def test_defragment_bzp2(tmpdir): - _test_defragment(tmpdir, "bzp2") +def test_defragment_bzp2(tmp_path): + _test_defragment(tmp_path, "bzp2") -def test_defragment_lz4(tmpdir): +def test_defragment_lz4(tmp_path): pytest.importorskip("lz4") - _test_defragment(tmpdir, "lz4") + _test_defragment(tmp_path, "lz4") diff --git a/asdf/_tests/commands/tests/test_exploded.py b/asdf/_tests/commands/tests/test_exploded.py index 1c08f4272..631432867 100644 --- a/asdf/_tests/commands/tests/test_exploded.py +++ b/asdf/_tests/commands/tests/test_exploded.py @@ -8,7 +8,7 @@ from asdf.commands import main -def test_explode_then_implode(tmpdir): +def test_explode_then_implode(tmp_path): x = np.arange(0, 10, dtype=float) tree = { @@ -18,7 +18,7 @@ def test_explode_then_implode(tmpdir): "not_shared": np.arange(10, 0, -1, dtype=np.uint8), } - path = os.path.join(str(tmpdir), "original.asdf") + path = os.path.join(str(tmp_path), "original.asdf") ff = AsdfFile(tree) # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put @@ -31,7 +31,7 @@ def test_explode_then_implode(tmpdir): assert result == 0 - files = get_file_sizes(str(tmpdir)) + files = get_file_sizes(str(tmp_path)) assert "original.asdf" in files assert "original_exploded.asdf" in files @@ -41,16 +41,16 @@ def test_explode_then_implode(tmpdir): assert files["original.asdf"] > files["original_exploded.asdf"] - path = os.path.join(str(tmpdir), "original_exploded.asdf") + path = os.path.join(str(tmp_path), "original_exploded.asdf") result = main.main_from_args(["implode", path]) assert result == 0 - with asdf.open(str(tmpdir.join("original_exploded_all.asdf"))) as af: + with asdf.open(str(tmp_path / "original_exploded_all.asdf")) as af: assert_tree_match(af.tree, tree) assert len(af._blocks.blocks) == 2 -def test_file_not_found(tmpdir): - path = os.path.join(str(tmpdir), "original.asdf") +def test_file_not_found(tmp_path): + path = os.path.join(str(tmp_path), "original.asdf") assert main.main_from_args(["explode", path]) == 2 diff --git a/asdf/_tests/commands/tests/test_to_yaml.py b/asdf/_tests/commands/tests/test_to_yaml.py index e2987beea..31b04fd61 100644 --- a/asdf/_tests/commands/tests/test_to_yaml.py +++ b/asdf/_tests/commands/tests/test_to_yaml.py @@ -8,7 +8,7 @@ from asdf.commands import main -def test_to_yaml(tmpdir): +def test_to_yaml(tmp_path): x = np.arange(0, 10, dtype=float) tree = { @@ -18,7 +18,7 @@ def test_to_yaml(tmpdir): "not_shared": np.arange(10, 0, -1, dtype=np.uint8), } - path = os.path.join(str(tmpdir), "original.asdf") + path = os.path.join(str(tmp_path), "original.asdf") ff = AsdfFile(tree) ff.write_to(path) with asdf.open(path) as ff2: @@ -28,11 +28,11 @@ def test_to_yaml(tmpdir): assert result == 0 - files = get_file_sizes(str(tmpdir)) + files = get_file_sizes(str(tmp_path)) assert "original.asdf" in files assert "original.yaml" in files - with asdf.open(os.path.join(str(tmpdir), "original.yaml")) as ff: + with asdf.open(os.path.join(str(tmp_path), "original.yaml")) as ff: assert_tree_match(ff.tree, tree) assert len(list(ff._blocks.blocks)) == 0 diff --git a/asdf/_tests/core/_converters/test_external_reference.py b/asdf/_tests/core/_converters/test_external_reference.py index bbfb6e1c6..8db8af806 100644 --- a/asdf/_tests/core/_converters/test_external_reference.py +++ b/asdf/_tests/core/_converters/test_external_reference.py @@ -2,7 +2,7 @@ from asdf.testing.helpers import roundtrip_object -def test_roundtrip_external_array(tmpdir): +def test_roundtrip_external_array(tmp_path): ref = ExternalArrayReference("./nonexistent.fits", 1, "np.float64", (100, 100)) result = roundtrip_object(ref) diff --git a/asdf/_tests/tags/core/tests/test_integer.py b/asdf/_tests/tags/core/tests/test_integer.py index 82f5fa13b..e5e900e6c 100644 --- a/asdf/_tests/tags/core/tests/test_integer.py +++ b/asdf/_tests/tags/core/tests/test_integer.py @@ -22,18 +22,18 @@ random.getrandbits(200), ], ) -def test_integer_value(tmpdir, value, sign): +def test_integer_value(tmp_path, value, sign): if sign == "-": value = -value integer = IntegerType(value) tree = {"integer": integer} - helpers.assert_roundtrip_tree(tree, tmpdir) + helpers.assert_roundtrip_tree(tree, tmp_path) @pytest.mark.parametrize("inline", [False, True]) -def test_integer_storage(tmpdir, inline): - tmpfile = str(tmpdir.join("integer.asdf")) +def test_integer_storage(tmp_path, inline): + tmpfile = str(tmp_path / "integer.asdf") kwargs = {} if inline: diff --git a/asdf/_tests/tags/core/tests/test_ndarray.py b/asdf/_tests/tags/core/tests/test_ndarray.py index 758a3bb6a..bab255ba3 100644 --- a/asdf/_tests/tags/core/tests/test_ndarray.py +++ b/asdf/_tests/tags/core/tests/test_ndarray.py @@ -152,7 +152,7 @@ def test_sharing(): assert tree["skipping"][0] == 42 -def test_byteorder(tmpdir): +def test_byteorder(tmp_path): tree = { "bigendian": np.arange(0, 10, dtype=">f8"), "little": np.arange(0, 10, dtype="", "<"): for dtype in ndarray._datatype_names.values(): @@ -210,7 +210,7 @@ def test_dont_load_data(): assert callable(block._data) -def test_table_inline(tmpdir): +def test_table_inline(tmp_path): table = np.array( [(0, 1, (2, 3)), (4, 5, (6, 7))], dtype=[("MINE", np.int8), ("", np.float64), ("arr", ">i4", (2,))], @@ -240,7 +240,7 @@ def test_table_inline(tmpdir): } -def test_array_inline_threshold_recursive(tmpdir): +def test_array_inline_threshold_recursive(tmp_path): """ Test that setting the inline threshold works for objects that contain (and when serialized produce a ndarray) @@ -280,7 +280,7 @@ class NDArrayContainerExtension: # using == which will fail # this test appears to be designed to test the inline threshold so we can # just look at the number of blocks - fn = str(tmpdir / "test.asdf") + fn = str(tmp_path / "test.asdf") af = asdf.AsdfFile(tree) af.write_to(fn) with asdf.open(fn) as af: @@ -301,7 +301,7 @@ def test_copy_inline(): f.write_to(io.BytesIO()) -def test_table(tmpdir): +def test_table(tmp_path): table = np.array([(0, 1, (2, 3)), (4, 5, (6, 7))], dtype=[("MINE", np.int8), ("", "i4", (2,))]) tree = {"table_data": table} @@ -324,7 +324,7 @@ def test_table(tmpdir): } -def test_table_nested_fields(tmpdir): +def test_table_nested_fields(tmp_path): table = np.array( [(0, (1, 2)), (4, (5, 6)), (7, (8, 9))], dtype=[("A", " 5) tree = {"masked_array": m, "unmasked_array": x} @@ -399,7 +399,7 @@ def test_mask_roundtrip(tmpdir): assert len(af._blocks.blocks) == 2 -def test_len_roundtrip(tmpdir): +def test_len_roundtrip(tmp_path): sequence = np.arange(0, 10, dtype=int) tree = {"sequence": sequence} @@ -432,7 +432,7 @@ def test_mask_nan(): assert_array_equal(ff.tree["arr"].mask, [[False, False, False, True], [False, False, False, False]]) -def test_string(tmpdir): +def test_string(tmp_path): tree = { "ascii": np.array([b"foo", b"bar", b"baz"]), "unicode": np.array(["სამეცნიერო", "данные", "வடிவம்"]), @@ -443,7 +443,7 @@ def test_string(tmpdir): assert_array_equal(tree[k], af[k]) -def test_string_table(tmpdir): +def test_string_table(tmp_path): tree = {"table": np.array([(b"foo", "სამეცნიერო", "42", "53.0")])} with roundtrip(tree) as af: @@ -501,7 +501,7 @@ def test_simple_table(): ff.write_to(io.BytesIO()) -def test_unicode_to_list(tmpdir): +def test_unicode_to_list(tmp_path): arr = np.array(["", "𐀠"], dtype=" a: !core/ndarray-1.0.0 @@ -724,7 +724,7 @@ def test_ndim_validation(tmpdir): @with_custom_extension() -def test_datatype_validation(tmpdir): +def test_datatype_validation(tmp_path): content = """ obj: ! a: !core/ndarray-1.0.0 @@ -801,7 +801,7 @@ def test_datatype_validation(tmpdir): @with_custom_extension() -def test_structured_datatype_validation(tmpdir): +def test_structured_datatype_validation(tmp_path): content = """ obj: ! c: !core/ndarray-1.0.0 @@ -930,14 +930,14 @@ def test_inline_shape_mismatch(): pass -def test_broadcasted_array(tmpdir): +def test_broadcasted_array(tmp_path): attrs = np.broadcast_arrays(np.array([10, 20]), np.array(10), np.array(10)) tree = {"one": attrs[1]} # , 'two': attrs[1], 'three': attrs[2]} with roundtrip(tree) as af: assert_array_equal(tree["one"], af["one"]) -def test_broadcasted_offset_array(tmpdir): +def test_broadcasted_offset_array(tmp_path): base = np.arange(10) offset = base[5:] broadcasted = np.broadcast_to(offset, (4, 5)) @@ -946,7 +946,7 @@ def test_broadcasted_offset_array(tmpdir): assert_array_equal(tree["broadcasted"], af["broadcasted"]) -def test_non_contiguous_base_array(tmpdir): +def test_non_contiguous_base_array(tmp_path): base = np.arange(60).reshape(5, 4, 3).transpose(2, 0, 1) * 1 contiguous = base.transpose(1, 2, 0) tree = {"contiguous": contiguous} @@ -954,7 +954,7 @@ def test_non_contiguous_base_array(tmpdir): assert_array_equal(tree["contiguous"], af["contiguous"]) -def test_fortran_order(tmpdir): +def test_fortran_order(tmp_path): array = np.array([[11, 12, 13], [21, 22, 23]], order="F", dtype=np.int64) tree = {"data": array} @@ -967,8 +967,8 @@ def test_fortran_order(tmpdir): assert tree["data"]["strides"] == [8, 16] -def test_memmap_write(tmpdir): - tmpfile = str(tmpdir.join("data.asdf")) +def test_memmap_write(tmp_path): + tmpfile = str(tmp_path / "data.asdf") tree = {"data": np.zeros(100)} with asdf.AsdfFile(tree) as af: @@ -988,8 +988,8 @@ def test_memmap_write(tmpdir): assert af["data"][0] == 42 -def test_readonly(tmpdir): - tmpfile = str(tmpdir.join("data.asdf")) +def test_readonly(tmp_path): + tmpfile = str(tmp_path / "data.asdf") tree = {"data": np.ndarray(100)} with asdf.AsdfFile(tree) as af: @@ -1019,8 +1019,8 @@ def test_readonly(tmpdir): af["data"][0] = 42 -def test_readonly_inline(tmpdir): - tmpfile = str(tmpdir.join("data.asdf")) +def test_readonly_inline(tmp_path): + tmpfile = str(tmp_path / "data.asdf") tree = {"data": np.ndarray(100)} with asdf.AsdfFile(tree) as af: @@ -1035,8 +1035,8 @@ def test_readonly_inline(tmpdir): # Confirm that NDArrayType's internal array is regenerated # following an update. @pytest.mark.parametrize("pad_blocks", [True, False]) -def test_block_data_change(pad_blocks, tmpdir): - tmpfile = str(tmpdir.join("data.asdf")) +def test_block_data_change(pad_blocks, tmp_path): + tmpfile = str(tmp_path / "data.asdf") tree = {"data": np.zeros(10, dtype="uint8")} with asdf.AsdfFile(tree) as af: af.write_to(tmpfile, pad_blocks=pad_blocks) diff --git a/asdf/conftest.py b/asdf/conftest.py index ae1411407..9e1f92bbb 100644 --- a/asdf/conftest.py +++ b/asdf/conftest.py @@ -9,7 +9,7 @@ @pytest.fixture(scope="session", autouse=True) -def _temp_cwd(tmpdir_factory): +def _temp_cwd(tmp_path_factory): """ This fixture creates a temporary current working directory for the test session, so that docstring tests that write files @@ -17,7 +17,7 @@ def _temp_cwd(tmpdir_factory): """ original_cwd = os.getcwd() try: - os.chdir(tmpdir_factory.mktemp("cwd")) + os.chdir(tmp_path_factory.mktemp("cwd")) yield finally: os.chdir(original_cwd) diff --git a/compatibility_tests/test_file_compatibility.py b/compatibility_tests/test_file_compatibility.py index b9f8f34ff..d8c07d760 100644 --- a/compatibility_tests/test_file_compatibility.py +++ b/compatibility_tests/test_file_compatibility.py @@ -149,7 +149,7 @@ def env_path(asdf_version, tmp_path_factory): @pytest.fixture(autouse=True) -def _pushd_tmpdir(tmpdir): +def _pushd_tmp_path(tmp_path): """ Change the working directory, in case the user is running these tests from the repo root. Python will import a module from the @@ -157,13 +157,13 @@ def _pushd_tmpdir(tmpdir): from accidentally comparing the current library code to itself. """ original_cwd = os.getcwd() - tmpdir.chdir() + os.chdir(str(tmp_path)) yield os.chdir(original_cwd) @pytest.mark.remote_data() -def test_file_compatibility(asdf_version, env_path, tmpdir): +def test_file_compatibility(asdf_version, env_path, tmp_path): # Sanity check to ensure we're not accidentally comparing # the current code to itself. installed_version = get_installed_version(env_path) @@ -188,7 +188,7 @@ def test_file_compatibility(asdf_version, env_path, tmpdir): # Confirm that a file generated by the current version of the code # can be read by the older version of the library. if asdf_version >= MIN_VERSION_NEW_FILES: - current_file_path = Path(str(tmpdir)) / "test-current.asdf" + current_file_path = Path(str(tmp_path)) / "test-current.asdf" generate_file(current_file_path, standard_version) assert env_run(env_path, "python3", ASSERT_SCRIPT_PATH, current_file_path, capture_output=True), ( f"asdf library version {asdf_version} failed to read an ASDF Standard {standard_version} " @@ -198,7 +198,7 @@ def test_file_compatibility(asdf_version, env_path, tmpdir): # Confirm that a file generated by the older version of the library # can be read by the current version of the code. if asdf_version >= MIN_VERSION_OLD_FILES: - old_file_path = Path(str(tmpdir)) / "test-old.asdf" + old_file_path = Path(str(tmp_path)) / "test-old.asdf" assert env_run( env_path, "python3", diff --git a/pyproject.toml b/pyproject.toml index 27ecf1480..c589773a7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -110,7 +110,12 @@ filterwarnings = [ ] # Configuration for pytest-doctestplus text_file_format = 'rst' -addopts = '--color=yes --doctest-rst -rsxfE' +addopts = [ + '--color=yes', + '--doctest-rst', + '-rsxfE', + '-p no:legacypath', +] [tool.coverage.run] omit = [ diff --git a/pytest_asdf/plugin.py b/pytest_asdf/plugin.py index bb86e70b5..58a39793c 100644 --- a/pytest_asdf/plugin.py +++ b/pytest_asdf/plugin.py @@ -299,7 +299,7 @@ def pytest_collect_file(file_path, parent): skip_tests = _parse_test_list(parent.config.getini("asdf_schema_skip_tests")) xfail_tests = _parse_test_list(parent.config.getini("asdf_schema_xfail_tests")) - schema_roots = [os.path.join(str(parent.config.rootdir), os.path.normpath(root)) for root in schema_roots] + schema_roots = [os.path.join(str(parent.config.rootpath), os.path.normpath(root)) for root in schema_roots] if file_path.suffix != ".yaml": return None