From cb5eef1ad17e36626e2556bc2cfaf5c74aedf807 Mon Sep 17 00:00:00 2001 From: Spencer Clark Date: Tue, 29 Oct 2019 11:30:54 -0400 Subject: [PATCH 1/2] Remove outdated code related to compatibility with netcdftime (#3450) * Remove code leftover from the netcdftime -> cftime transition * Add a what's new note * black formatting * Add more detail to what's new note * More minor edits to what's new note --- doc/whats-new.rst | 5 + xarray/coding/times.py | 43 +------ xarray/tests/__init__.py | 4 - xarray/tests/test_accessor_dt.py | 30 ++--- xarray/tests/test_cftimeindex.py | 10 +- xarray/tests/test_coding_times.py | 184 ++++++++++++------------------ xarray/tests/test_conventions.py | 10 +- xarray/tests/test_utils.py | 13 +-- 8 files changed, 100 insertions(+), 199 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 73618782460..82355a6bda4 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -22,6 +22,11 @@ Breaking changes ~~~~~~~~~~~~~~~~ - Minimum cftime version is now 1.0.3. By `Deepak Cherian `_. +- All leftover support for dates from non-standard calendars through netcdftime, the + module included in versions of netCDF4 prior to 1.4 that eventually became the + cftime package, has been removed in favor of relying solely on the standalone + cftime package (:pull:`3450`). By `Spencer Clark + `_. New Features ~~~~~~~~~~~~ diff --git a/xarray/coding/times.py b/xarray/coding/times.py index 0174088064b..965ddd8f043 100644 --- a/xarray/coding/times.py +++ b/xarray/coding/times.py @@ -39,34 +39,6 @@ ) -def _import_cftime(): - """ - helper function handle the transition to netcdftime/cftime - as a stand-alone package - """ - try: - import cftime - except ImportError: - # in netCDF4 the num2date/date2num function are top-level api - try: - import netCDF4 as cftime - except ImportError: - raise ImportError("Failed to import cftime") - return cftime - - -def _require_standalone_cftime(): - """Raises an ImportError if the standalone cftime is not found""" - try: - import cftime # noqa: F401 - except ImportError: - raise ImportError( - "Decoding times with non-standard calendars " - "or outside the pandas.Timestamp-valid range " - "requires the standalone cftime package." - ) - - def _netcdf_to_numpy_timeunit(units): units = units.lower() if not units.endswith("s"): @@ -119,16 +91,11 @@ def _decode_cf_datetime_dtype(data, units, calendar, use_cftime): def _decode_datetime_with_cftime(num_dates, units, calendar): - cftime = _import_cftime() + import cftime - if cftime.__name__ == "cftime": - return np.asarray( - cftime.num2date(num_dates, units, calendar, only_use_cftime_datetimes=True) - ) - else: - # Must be using num2date from an old version of netCDF4 which - # does not have the only_use_cftime_datetimes option. - return np.asarray(cftime.num2date(num_dates, units, calendar)) + return np.asarray( + cftime.num2date(num_dates, units, calendar, only_use_cftime_datetimes=True) + ) def _decode_datetime_with_pandas(flat_num_dates, units, calendar): @@ -354,7 +321,7 @@ def _encode_datetime_with_cftime(dates, units, calendar): This method is more flexible than xarray's parsing using datetime64[ns] arrays but also slower because it loops over each element. """ - cftime = _import_cftime() + import cftime if np.issubdtype(dates.dtype, np.datetime64): # numpy's broken datetime conversion only works for us precision diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index f85a33f7a3c..6592360cdf2 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -78,10 +78,6 @@ def LooseVersion(vstring): requires_scipy_or_netCDF4 = pytest.mark.skipif( not has_scipy_or_netCDF4, reason="requires scipy or netCDF4" ) -has_cftime_or_netCDF4 = has_cftime or has_netCDF4 -requires_cftime_or_netCDF4 = pytest.mark.skipif( - not has_cftime_or_netCDF4, reason="requires cftime or netCDF4" -) try: import_seaborn() has_seaborn = True diff --git a/xarray/tests/test_accessor_dt.py b/xarray/tests/test_accessor_dt.py index 0058747db71..5fe5b8c3f59 100644 --- a/xarray/tests/test_accessor_dt.py +++ b/xarray/tests/test_accessor_dt.py @@ -7,10 +7,8 @@ from . import ( assert_array_equal, assert_equal, - has_cftime, - has_cftime_or_netCDF4, - has_dask, raises_regex, + requires_cftime, requires_dask, ) @@ -199,7 +197,7 @@ def times_3d(times): ) -@pytest.mark.skipif(not has_cftime, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize( "field", ["year", "month", "day", "hour", "dayofyear", "dayofweek"] ) @@ -217,7 +215,7 @@ def test_field_access(data, field): assert_equal(result, expected) -@pytest.mark.skipif(not has_cftime, reason="cftime not installed") +@requires_cftime def test_cftime_strftime_access(data): """ compare cftime formatting against datetime formatting """ date_format = "%Y%m%d%H" @@ -232,8 +230,8 @@ def test_cftime_strftime_access(data): assert_equal(result, expected) -@pytest.mark.skipif(not has_dask, reason="dask not installed") -@pytest.mark.skipif(not has_cftime, reason="cftime not installed") +@requires_cftime +@requires_dask @pytest.mark.parametrize( "field", ["year", "month", "day", "hour", "dayofyear", "dayofweek"] ) @@ -254,8 +252,8 @@ def test_dask_field_access_1d(data, field): assert_equal(result.compute(), expected) -@pytest.mark.skipif(not has_dask, reason="dask not installed") -@pytest.mark.skipif(not has_cftime, reason="cftime not installed") +@requires_cftime +@requires_dask @pytest.mark.parametrize( "field", ["year", "month", "day", "hour", "dayofyear", "dayofweek"] ) @@ -286,7 +284,7 @@ def cftime_date_type(calendar): return _all_cftime_date_types()[calendar] -@pytest.mark.skipif(not has_cftime, reason="cftime not installed") +@requires_cftime def test_seasons(cftime_date_type): dates = np.array([cftime_date_type(2000, month, 15) for month in range(1, 13)]) dates = xr.DataArray(dates) @@ -307,15 +305,3 @@ def test_seasons(cftime_date_type): seasons = xr.DataArray(seasons) assert_array_equal(seasons.values, dates.dt.season.values) - - -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime or netCDF4 not installed") -def test_dt_accessor_error_netCDF4(cftime_date_type): - da = xr.DataArray( - [cftime_date_type(1, 1, 1), cftime_date_type(2, 1, 1)], dims=["time"] - ) - if not has_cftime: - with pytest.raises(TypeError): - da.dt.month - else: - da.dt.month diff --git a/xarray/tests/test_cftimeindex.py b/xarray/tests/test_cftimeindex.py index e49dc72abdd..a8ee3c97042 100644 --- a/xarray/tests/test_cftimeindex.py +++ b/xarray/tests/test_cftimeindex.py @@ -15,7 +15,7 @@ ) from xarray.tests import assert_array_equal, assert_identical -from . import has_cftime, has_cftime_or_netCDF4, raises_regex, requires_cftime +from . import raises_regex, requires_cftime from .test_coding_times import ( _ALL_CALENDARS, _NON_STANDARD_CALENDARS, @@ -653,7 +653,7 @@ def test_indexing_in_dataframe_iloc(df, index): assert result.equals(expected) -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime def test_concat_cftimeindex(date_type): da1 = xr.DataArray( [1.0, 2.0], coords=[[date_type(1, 1, 1), date_type(1, 2, 1)]], dims=["time"] @@ -663,11 +663,7 @@ def test_concat_cftimeindex(date_type): ) da = xr.concat([da1, da2], dim="time") - if has_cftime: - assert isinstance(da.indexes["time"], CFTimeIndex) - else: - assert isinstance(da.indexes["time"], pd.Index) - assert not isinstance(da.indexes["time"], CFTimeIndex) + assert isinstance(da.indexes["time"], CFTimeIndex) @requires_cftime diff --git a/xarray/tests/test_coding_times.py b/xarray/tests/test_coding_times.py index 021d76e2b11..d012fb36c35 100644 --- a/xarray/tests/test_coding_times.py +++ b/xarray/tests/test_coding_times.py @@ -8,7 +8,6 @@ from xarray import DataArray, Dataset, Variable, coding, decode_cf from xarray.coding.times import ( - _import_cftime, cftime_to_nptime, decode_cf_datetime, encode_cf_datetime, @@ -19,15 +18,7 @@ from xarray.core.common import contains_cftime_datetimes from xarray.testing import assert_equal -from . import ( - arm_xfail, - assert_array_equal, - has_cftime, - has_cftime_or_netCDF4, - has_dask, - requires_cftime, - requires_cftime_or_netCDF4, -) +from . import arm_xfail, assert_array_equal, has_cftime, requires_cftime, requires_dask _NON_STANDARD_CALENDARS_SET = { "noleap", @@ -79,10 +70,8 @@ def _all_cftime_date_types(): - try: - import cftime - except ImportError: - import netcdftime as cftime + import cftime + return { "noleap": cftime.DatetimeNoLeap, "365_day": cftime.DatetimeNoLeap, @@ -95,16 +84,14 @@ def _all_cftime_date_types(): } -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize(["num_dates", "units", "calendar"], _CF_DATETIME_TESTS) def test_cf_datetime(num_dates, units, calendar): - cftime = _import_cftime() - if cftime.__name__ == "cftime": - expected = cftime.num2date( - num_dates, units, calendar, only_use_cftime_datetimes=True - ) - else: - expected = cftime.num2date(num_dates, units, calendar) + import cftime + + expected = cftime.num2date( + num_dates, units, calendar, only_use_cftime_datetimes=True + ) min_y = np.ravel(np.atleast_1d(expected))[np.nanargmin(num_dates)].year max_y = np.ravel(np.atleast_1d(expected))[np.nanargmax(num_dates)].year if min_y >= 1678 and max_y < 2262: @@ -138,15 +125,12 @@ def test_cf_datetime(num_dates, units, calendar): assert_array_equal(num_dates, np.around(encoded, 1)) -@requires_cftime_or_netCDF4 +@requires_cftime def test_decode_cf_datetime_overflow(): # checks for # https://github.com/pydata/pandas/issues/14068 # https://github.com/pydata/xarray/issues/975 - try: - from cftime import DatetimeGregorian - except ImportError: - from netcdftime import DatetimeGregorian + from cftime import DatetimeGregorian datetime = DatetimeGregorian units = "days since 2000-01-01 00:00:00" @@ -171,7 +155,7 @@ def test_decode_cf_datetime_non_standard_units(): assert_array_equal(actual, expected) -@requires_cftime_or_netCDF4 +@requires_cftime def test_decode_cf_datetime_non_iso_strings(): # datetime strings that are _almost_ ISO compliant but not quite, # but which cftime.num2date can still parse correctly @@ -190,10 +174,10 @@ def test_decode_cf_datetime_non_iso_strings(): assert (abs_diff <= np.timedelta64(1, "s")).all() -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize("calendar", _STANDARD_CALENDARS) def test_decode_standard_calendar_inside_timestamp_range(calendar): - cftime = _import_cftime() + import cftime units = "days since 0001-01-01" times = pd.date_range("2001-04-01-00", end="2001-04-30-23", freq="H") @@ -210,21 +194,18 @@ def test_decode_standard_calendar_inside_timestamp_range(calendar): assert (abs_diff <= np.timedelta64(1, "s")).all() -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize("calendar", _NON_STANDARD_CALENDARS) def test_decode_non_standard_calendar_inside_timestamp_range(calendar): - cftime = _import_cftime() + import cftime + units = "days since 0001-01-01" times = pd.date_range("2001-04-01-00", end="2001-04-30-23", freq="H") non_standard_time = cftime.date2num(times.to_pydatetime(), units, calendar=calendar) - if cftime.__name__ == "cftime": - expected = cftime.num2date( - non_standard_time, units, calendar=calendar, only_use_cftime_datetimes=True - ) - else: - expected = cftime.num2date(non_standard_time, units, calendar=calendar) - + expected = cftime.num2date( + non_standard_time, units, calendar=calendar, only_use_cftime_datetimes=True + ) expected_dtype = np.dtype("O") actual = coding.times.decode_cf_datetime( @@ -238,24 +219,19 @@ def test_decode_non_standard_calendar_inside_timestamp_range(calendar): assert (abs_diff <= np.timedelta64(1, "s")).all() -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize("calendar", _ALL_CALENDARS) def test_decode_dates_outside_timestamp_range(calendar): + import cftime from datetime import datetime - cftime = _import_cftime() - units = "days since 0001-01-01" times = [datetime(1, 4, 1, h) for h in range(1, 5)] time = cftime.date2num(times, units, calendar=calendar) - if cftime.__name__ == "cftime": - expected = cftime.num2date( - time, units, calendar=calendar, only_use_cftime_datetimes=True - ) - else: - expected = cftime.num2date(time, units, calendar=calendar) - + expected = cftime.num2date( + time, units, calendar=calendar, only_use_cftime_datetimes=True + ) expected_date_type = type(expected[0]) with warnings.catch_warnings(): @@ -269,7 +245,7 @@ def test_decode_dates_outside_timestamp_range(calendar): assert (abs_diff <= np.timedelta64(1, "s")).all() -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize("calendar", _STANDARD_CALENDARS) def test_decode_standard_calendar_single_element_inside_timestamp_range(calendar): units = "days since 0001-01-01" @@ -280,7 +256,7 @@ def test_decode_standard_calendar_single_element_inside_timestamp_range(calendar assert actual.dtype == np.dtype("M8[ns]") -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize("calendar", _NON_STANDARD_CALENDARS) def test_decode_non_standard_calendar_single_element_inside_timestamp_range(calendar): units = "days since 0001-01-01" @@ -291,10 +267,11 @@ def test_decode_non_standard_calendar_single_element_inside_timestamp_range(cale assert actual.dtype == np.dtype("O") -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize("calendar", _NON_STANDARD_CALENDARS) def test_decode_single_element_outside_timestamp_range(calendar): - cftime = _import_cftime() + import cftime + units = "days since 0001-01-01" for days in [1, 1470376]: for num_time in [days, [days], [[days]]]: @@ -304,20 +281,16 @@ def test_decode_single_element_outside_timestamp_range(calendar): num_time, units, calendar=calendar ) - if cftime.__name__ == "cftime": - expected = cftime.num2date( - days, units, calendar, only_use_cftime_datetimes=True - ) - else: - expected = cftime.num2date(days, units, calendar) - + expected = cftime.num2date( + days, units, calendar, only_use_cftime_datetimes=True + ) assert isinstance(actual.item(), type(expected)) -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize("calendar", _STANDARD_CALENDARS) def test_decode_standard_calendar_multidim_time_inside_timestamp_range(calendar): - cftime = _import_cftime() + import cftime units = "days since 0001-01-01" times1 = pd.date_range("2001-04-01", end="2001-04-05", freq="D") @@ -343,10 +316,10 @@ def test_decode_standard_calendar_multidim_time_inside_timestamp_range(calendar) assert (abs_diff2 <= np.timedelta64(1, "s")).all() -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize("calendar", _NON_STANDARD_CALENDARS) def test_decode_nonstandard_calendar_multidim_time_inside_timestamp_range(calendar): - cftime = _import_cftime() + import cftime units = "days since 0001-01-01" times1 = pd.date_range("2001-04-01", end="2001-04-05", freq="D") @@ -382,13 +355,12 @@ def test_decode_nonstandard_calendar_multidim_time_inside_timestamp_range(calend assert (abs_diff2 <= np.timedelta64(1, "s")).all() -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize("calendar", _ALL_CALENDARS) def test_decode_multidim_time_outside_timestamp_range(calendar): + import cftime from datetime import datetime - cftime = _import_cftime() - units = "days since 0001-01-01" times1 = [datetime(1, 4, day) for day in range(1, 6)] times2 = [datetime(1, 5, day) for day in range(1, 6)] @@ -398,16 +370,8 @@ def test_decode_multidim_time_outside_timestamp_range(calendar): mdim_time[:, 0] = time1 mdim_time[:, 1] = time2 - if cftime.__name__ == "cftime": - expected1 = cftime.num2date( - time1, units, calendar, only_use_cftime_datetimes=True - ) - expected2 = cftime.num2date( - time2, units, calendar, only_use_cftime_datetimes=True - ) - else: - expected1 = cftime.num2date(time1, units, calendar) - expected2 = cftime.num2date(time2, units, calendar) + expected1 = cftime.num2date(time1, units, calendar, only_use_cftime_datetimes=True) + expected2 = cftime.num2date(time2, units, calendar, only_use_cftime_datetimes=True) with warnings.catch_warnings(): warnings.filterwarnings("ignore", "Unable to decode time axis") @@ -424,46 +388,38 @@ def test_decode_multidim_time_outside_timestamp_range(calendar): assert (abs_diff2 <= np.timedelta64(1, "s")).all() -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize("calendar", ["360_day", "all_leap", "366_day"]) def test_decode_non_standard_calendar_single_element(calendar): - cftime = _import_cftime() + import cftime + units = "days since 0001-01-01" - try: - dt = cftime.netcdftime.datetime(2001, 2, 29) - except AttributeError: - # Must be using the standalone cftime library - dt = cftime.datetime(2001, 2, 29) + dt = cftime.datetime(2001, 2, 29) num_time = cftime.date2num(dt, units, calendar) actual = coding.times.decode_cf_datetime(num_time, units, calendar=calendar) - if cftime.__name__ == "cftime": - expected = np.asarray( - cftime.num2date(num_time, units, calendar, only_use_cftime_datetimes=True) - ) - else: - expected = np.asarray(cftime.num2date(num_time, units, calendar)) + expected = np.asarray( + cftime.num2date(num_time, units, calendar, only_use_cftime_datetimes=True) + ) assert actual.dtype == np.dtype("O") assert expected == actual -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime def test_decode_360_day_calendar(): - cftime = _import_cftime() + import cftime + calendar = "360_day" # ensure leap year doesn't matter for year in [2010, 2011, 2012, 2013, 2014]: units = f"days since {year}-01-01" num_times = np.arange(100) - if cftime.__name__ == "cftime": - expected = cftime.num2date( - num_times, units, calendar, only_use_cftime_datetimes=True - ) - else: - expected = cftime.num2date(num_times, units, calendar) + expected = cftime.num2date( + num_times, units, calendar, only_use_cftime_datetimes=True + ) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") @@ -477,7 +433,7 @@ def test_decode_360_day_calendar(): @arm_xfail -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize( ["num_dates", "units", "expected_list"], [ @@ -499,7 +455,7 @@ def test_cf_datetime_nan(num_dates, units, expected_list): assert_array_equal(expected, actual) -@requires_cftime_or_netCDF4 +@requires_cftime def test_decoded_cf_datetime_array_2d(): # regression test for GH1229 variable = Variable( @@ -548,7 +504,7 @@ def test_infer_datetime_units(dates, expected): ] -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize( "calendar", _NON_STANDARD_CALENDARS + ["gregorian", "proleptic_gregorian"] ) @@ -622,7 +578,7 @@ def test_infer_timedelta_units(deltas, expected): assert expected == coding.times.infer_timedelta_units(deltas) -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize( ["date_args", "expected"], [ @@ -649,7 +605,7 @@ def test_decode_cf(calendar): ds[v].attrs["units"] = "days since 2001-01-01" ds[v].attrs["calendar"] = calendar - if not has_cftime_or_netCDF4 and calendar not in _STANDARD_CALENDARS: + if not has_cftime and calendar not in _STANDARD_CALENDARS: with pytest.raises(ValueError): ds = decode_cf(ds) else: @@ -703,7 +659,7 @@ def test_decode_cf_time_bounds(): _update_bounds_attributes(ds.variables) -@requires_cftime_or_netCDF4 +@requires_cftime def test_encode_time_bounds(): time = pd.date_range("2000-01-16", periods=1) @@ -749,7 +705,7 @@ def calendar(request): @pytest.fixture() def times(calendar): - cftime = _import_cftime() + import cftime return cftime.num2date( np.arange(4), @@ -779,24 +735,24 @@ def times_3d(times): ) -@pytest.mark.skipif(not has_cftime, reason="cftime not installed") +@requires_cftime def test_contains_cftime_datetimes_1d(data): assert contains_cftime_datetimes(data.time) -@pytest.mark.skipif(not has_dask, reason="dask not installed") -@pytest.mark.skipif(not has_cftime, reason="cftime not installed") +@requires_cftime +@requires_dask def test_contains_cftime_datetimes_dask_1d(data): assert contains_cftime_datetimes(data.time.chunk()) -@pytest.mark.skipif(not has_cftime, reason="cftime not installed") +@requires_cftime def test_contains_cftime_datetimes_3d(times_3d): assert contains_cftime_datetimes(times_3d) -@pytest.mark.skipif(not has_dask, reason="dask not installed") -@pytest.mark.skipif(not has_cftime, reason="cftime not installed") +@requires_cftime +@requires_dask def test_contains_cftime_datetimes_dask_3d(times_3d): assert contains_cftime_datetimes(times_3d.chunk()) @@ -806,13 +762,13 @@ def test_contains_cftime_datetimes_non_cftimes(non_cftime_data): assert not contains_cftime_datetimes(non_cftime_data) -@pytest.mark.skipif(not has_dask, reason="dask not installed") +@requires_dask @pytest.mark.parametrize("non_cftime_data", [DataArray([]), DataArray([1, 2])]) def test_contains_cftime_datetimes_non_cftimes_dask(non_cftime_data): assert not contains_cftime_datetimes(non_cftime_data.chunk()) -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime @pytest.mark.parametrize("shape", [(24,), (8, 3), (2, 4, 3)]) def test_encode_cf_datetime_overflow(shape): # Test for fix to GH 2272 @@ -837,7 +793,7 @@ def test_encode_cf_datetime_pandas_min(): assert calendar == expected_calendar -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime def test_time_units_with_timezone_roundtrip(calendar): # Regression test for GH 2649 expected_units = "days since 2000-01-01T00:00:00-05:00" diff --git a/xarray/tests/test_conventions.py b/xarray/tests/test_conventions.py index 42b2a679347..09002e252b4 100644 --- a/xarray/tests/test_conventions.py +++ b/xarray/tests/test_conventions.py @@ -21,7 +21,7 @@ from . import ( assert_array_equal, raises_regex, - requires_cftime_or_netCDF4, + requires_cftime, requires_dask, requires_netCDF4, ) @@ -81,7 +81,7 @@ def test_decode_cf_with_conflicting_fill_missing_value(): assert_identical(actual, expected) -@requires_cftime_or_netCDF4 +@requires_cftime class TestEncodeCFVariable: def test_incompatible_attributes(self): invalid_vars = [ @@ -144,7 +144,7 @@ def test_string_object_warning(self): assert_identical(original, encoded) -@requires_cftime_or_netCDF4 +@requires_cftime class TestDecodeCF: def test_dataset(self): original = Dataset( @@ -226,7 +226,7 @@ def test_invalid_time_units_raises_eagerly(self): with raises_regex(ValueError, "unable to decode time"): decode_cf(ds) - @requires_cftime_or_netCDF4 + @requires_cftime def test_dataset_repr_with_netcdf4_datetimes(self): # regression test for #347 attrs = {"units": "days since 0001-01-01", "calendar": "noleap"} @@ -239,7 +239,7 @@ def test_dataset_repr_with_netcdf4_datetimes(self): ds = decode_cf(Dataset({"time": ("time", [0, 1], attrs)})) assert "(time) datetime64[ns]" in repr(ds) - @requires_cftime_or_netCDF4 + @requires_cftime def test_decode_cf_datetime_transition_to_invalid(self): # manually create dataset with not-decoded date from datetime import datetime diff --git a/xarray/tests/test_utils.py b/xarray/tests/test_utils.py index 5bb9deaf240..af87b94393d 100644 --- a/xarray/tests/test_utils.py +++ b/xarray/tests/test_utils.py @@ -9,7 +9,7 @@ from xarray.core import duck_array_ops, utils from xarray.core.utils import either_dict_or_kwargs -from . import assert_array_equal, has_cftime, has_cftime_or_netCDF4, requires_dask +from . import assert_array_equal, requires_cftime, requires_dask from .test_coding_times import _all_cftime_date_types @@ -39,17 +39,12 @@ def test_safe_cast_to_index(): assert expected.dtype == actual.dtype -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime def test_safe_cast_to_index_cftimeindex(): date_types = _all_cftime_date_types() for date_type in date_types.values(): dates = [date_type(1, 1, day) for day in range(1, 20)] - - if has_cftime: - expected = CFTimeIndex(dates) - else: - expected = pd.Index(dates) - + expected = CFTimeIndex(dates) actual = utils.safe_cast_to_index(np.array(dates)) assert_array_equal(expected, actual) assert expected.dtype == actual.dtype @@ -57,7 +52,7 @@ def test_safe_cast_to_index_cftimeindex(): # Test that datetime.datetime objects are never used in a CFTimeIndex -@pytest.mark.skipif(not has_cftime_or_netCDF4, reason="cftime not installed") +@requires_cftime def test_safe_cast_to_index_datetime_datetime(): dates = [datetime(1, 1, day) for day in range(1, 20)] From 278d2e6af6abd933dd1d43ac3ae70bc306412ae1 Mon Sep 17 00:00:00 2001 From: Maximilian Roos <5635139+max-sixty@users.noreply.github.com> Date: Tue, 29 Oct 2019 11:34:33 -0400 Subject: [PATCH 2/2] upgrade black verison to 19.10b0 (#3456) --- xarray/backends/api.py | 2 +- xarray/core/alignment.py | 2 +- xarray/core/combine.py | 2 +- xarray/core/computation.py | 8 ++++---- xarray/core/concat.py | 4 ++-- xarray/core/dataarray.py | 2 +- xarray/core/dataset.py | 2 +- xarray/core/groupby.py | 6 +++--- xarray/core/indexing.py | 4 ++-- xarray/core/merge.py | 4 ++-- xarray/core/variable.py | 4 ++-- xarray/plot/plot.py | 8 ++++---- xarray/tests/test_cftime_offsets.py | 2 +- xarray/tests/test_dataarray.py | 8 ++++---- xarray/tests/test_dataset.py | 6 +++--- 15 files changed, 32 insertions(+), 32 deletions(-) diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 199516116b0..d23594fc675 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -677,7 +677,7 @@ def open_dataarray( "then select the variable you want." ) else: - data_array, = dataset.data_vars.values() + (data_array,) = dataset.data_vars.values() data_array._file_obj = dataset._file_obj diff --git a/xarray/core/alignment.py b/xarray/core/alignment.py index 1a33cb955c3..41ff5a3b32d 100644 --- a/xarray/core/alignment.py +++ b/xarray/core/alignment.py @@ -252,7 +252,7 @@ def align( if not indexes and len(objects) == 1: # fast path for the trivial case - obj, = objects + (obj,) = objects return (obj.copy(deep=copy),) all_indexes = defaultdict(list) diff --git a/xarray/core/combine.py b/xarray/core/combine.py index 19c327ec597..3308dcef285 100644 --- a/xarray/core/combine.py +++ b/xarray/core/combine.py @@ -954,7 +954,7 @@ def _auto_concat( "supply the ``concat_dim`` argument " "explicitly" ) - dim, = concat_dims + (dim,) = concat_dims return concat( datasets, dim=dim, diff --git a/xarray/core/computation.py b/xarray/core/computation.py index 1393d76f283..2ab2ab78416 100644 --- a/xarray/core/computation.py +++ b/xarray/core/computation.py @@ -145,7 +145,7 @@ def result_name(objects: list) -> Any: names = {getattr(obj, "name", _DEFAULT_NAME) for obj in objects} names.discard(_DEFAULT_NAME) if len(names) == 1: - name, = names + (name,) = names else: name = None return name @@ -187,7 +187,7 @@ def build_output_coords( if len(coords_list) == 1 and not exclude_dims: # we can skip the expensive merge - unpacked_coords, = coords_list + (unpacked_coords,) = coords_list merged_vars = dict(unpacked_coords.variables) else: # TODO: save these merged indexes, instead of re-computing them later @@ -237,7 +237,7 @@ def apply_dataarray_vfunc( for variable, coords in zip(result_var, result_coords) ) else: - coords, = result_coords + (coords,) = result_coords out = DataArray(result_var, coords, name=name, fastpath=True) return out @@ -384,7 +384,7 @@ def apply_dataset_vfunc( if signature.num_outputs > 1: out = tuple(_fast_dataset(*args) for args in zip(result_vars, list_of_coords)) else: - coord_vars, = list_of_coords + (coord_vars,) = list_of_coords out = _fast_dataset(result_vars, coord_vars) if keep_attrs and isinstance(first_obj, Dataset): diff --git a/xarray/core/concat.py b/xarray/core/concat.py index bcab136de8d..0d19990bdd0 100644 --- a/xarray/core/concat.py +++ b/xarray/core/concat.py @@ -148,10 +148,10 @@ def _calc_concat_dim_coord(dim): dim = dim_name elif not isinstance(dim, DataArray): coord = as_variable(dim).to_index_variable() - dim, = coord.dims + (dim,) = coord.dims else: coord = dim - dim, = coord.dims + (dim,) = coord.dims return dim, coord diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index 33dcad13204..0c220acaee0 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -616,7 +616,7 @@ def _level_coords(self) -> Dict[Hashable, Hashable]: if var.ndim == 1 and isinstance(var, IndexVariable): level_names = var.level_names if level_names is not None: - dim, = var.dims + (dim,) = var.dims level_coords.update({lname: dim for lname in level_names}) return level_coords diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index 3ca9dd14fae..05d9772cb7a 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -4066,7 +4066,7 @@ def reduce( if len(reduce_dims) == 1: # unpack dimensions for the benefit of functions # like np.argmin which can't handle tuple arguments - reduce_dims, = reduce_dims + (reduce_dims,) = reduce_dims elif len(reduce_dims) == var.ndim: # prefer to aggregate over axis=None rather than # axis=(0, 1) if they will be equivalent, because diff --git a/xarray/core/groupby.py b/xarray/core/groupby.py index c3f712b31ac..353566eb345 100644 --- a/xarray/core/groupby.py +++ b/xarray/core/groupby.py @@ -321,7 +321,7 @@ def __init__( raise ValueError("`group` must have a name") group, obj, stacked_dim, inserted_dims = _ensure_1d(group, obj) - group_dim, = group.dims + (group_dim,) = group.dims expected_size = obj.sizes[group_dim] if group.size != expected_size: @@ -470,7 +470,7 @@ def _infer_concat_args(self, applied_example): else: coord = self._unique_coord positions = None - dim, = coord.dims + (dim,) = coord.dims if isinstance(coord, _DummyGroup): coord = None return coord, dim, positions @@ -644,7 +644,7 @@ def _concat_shortcut(self, applied, dim, positions=None): def _restore_dim_order(self, stacked): def lookup_order(dimension): if dimension == self._group.name: - dimension, = self._group.dims + (dimension,) = self._group.dims if dimension in self._obj.dims: axis = self._obj.get_axis_num(dimension) else: diff --git a/xarray/core/indexing.py b/xarray/core/indexing.py index b9809a8d2b9..f48c9e72af1 100644 --- a/xarray/core/indexing.py +++ b/xarray/core/indexing.py @@ -212,7 +212,7 @@ def get_dim_indexers(data_obj, indexers): level_indexers = defaultdict(dict) dim_indexers = {} for key, label in indexers.items(): - dim, = data_obj[key].dims + (dim,) = data_obj[key].dims if key != dim: # assume here multi-index level indexer level_indexers[dim][key] = label @@ -1368,7 +1368,7 @@ def __getitem__( if isinstance(key, tuple) and len(key) == 1: # unpack key so it can index a pandas.Index object (pandas.Index # objects don't like tuples) - key, = key + (key,) = key if getattr(key, "ndim", 0) > 1: # Return np-array if multidimensional return NumpyIndexingAdapter(self.array.values)[indexer] diff --git a/xarray/core/merge.py b/xarray/core/merge.py index db5ef9531df..389ceb155f7 100644 --- a/xarray/core/merge.py +++ b/xarray/core/merge.py @@ -277,7 +277,7 @@ def append_all(variables, indexes): def collect_from_coordinates( - list_of_coords: "List[Coordinates]" + list_of_coords: "List[Coordinates]", ) -> Dict[Hashable, List[MergeElement]]: """Collect variables and indexes to be merged from Coordinate objects.""" grouped: Dict[Hashable, List[Tuple[Variable, pd.Index]]] = {} @@ -320,7 +320,7 @@ def merge_coordinates_without_align( def determine_coords( - list_of_mappings: Iterable["DatasetLike"] + list_of_mappings: Iterable["DatasetLike"], ) -> Tuple[Set[Hashable], Set[Hashable]]: """Given a list of dicts with xarray object values, identify coordinates. diff --git a/xarray/core/variable.py b/xarray/core/variable.py index 7d03fd58d39..b7abdc7c462 100644 --- a/xarray/core/variable.py +++ b/xarray/core/variable.py @@ -1526,7 +1526,7 @@ def concat(cls, variables, dim="concat_dim", positions=None, shortcut=False): along the given dimension. """ if not isinstance(dim, str): - dim, = dim.dims + (dim,) = dim.dims # can't do this lazily: we need to loop through variables at least # twice @@ -1996,7 +1996,7 @@ def concat(cls, variables, dim="concat_dim", positions=None, shortcut=False): arrays, if possible. """ if not isinstance(dim, str): - dim, = dim.dims + (dim,) = dim.dims variables = list(variables) first_var = variables[0] diff --git a/xarray/plot/plot.py b/xarray/plot/plot.py index a288f195e32..ca68f617144 100644 --- a/xarray/plot/plot.py +++ b/xarray/plot/plot.py @@ -83,8 +83,8 @@ def _infer_line_data(darray, x, y, hue): ) else: - xdim, = darray[xname].dims - huedim, = darray[huename].dims + (xdim,) = darray[xname].dims + (huedim,) = darray[huename].dims yplt = darray.transpose(xdim, huedim) else: @@ -102,8 +102,8 @@ def _infer_line_data(darray, x, y, hue): ) else: - ydim, = darray[yname].dims - huedim, = darray[huename].dims + (ydim,) = darray[yname].dims + (huedim,) = darray[huename].dims xplt = darray.transpose(ydim, huedim) huelabel = label_from_attrs(darray[huename]) diff --git a/xarray/tests/test_cftime_offsets.py b/xarray/tests/test_cftime_offsets.py index 142769dbbe7..343e059f53c 100644 --- a/xarray/tests/test_cftime_offsets.py +++ b/xarray/tests/test_cftime_offsets.py @@ -1187,5 +1187,5 @@ def test_dayofyear_after_cftime_range(freq): def test_cftime_range_standard_calendar_refers_to_gregorian(): from cftime import DatetimeGregorian - result, = cftime_range("2000", periods=1) + (result,) = cftime_range("2000", periods=1) assert isinstance(result, DatetimeGregorian) diff --git a/xarray/tests/test_dataarray.py b/xarray/tests/test_dataarray.py index ad474d533be..4b3ffdc021a 100644 --- a/xarray/tests/test_dataarray.py +++ b/xarray/tests/test_dataarray.py @@ -3125,11 +3125,11 @@ def test_align_copy(self): # Trivial align - 1 element x = DataArray([1, 2, 3], coords=[("a", [1, 2, 3])]) - x2, = align(x, copy=False) + (x2,) = align(x, copy=False) assert_identical(x, x2) assert source_ndarray(x2.data) is source_ndarray(x.data) - x2, = align(x, copy=True) + (x2,) = align(x, copy=True) assert_identical(x, x2) assert source_ndarray(x2.data) is not source_ndarray(x.data) @@ -3214,7 +3214,7 @@ def test_align_indexes(self): assert_identical(expected_x2, x2) assert_identical(expected_y2, y2) - x2, = align(x, join="outer", indexes={"a": [-2, 7, 10, -1]}) + (x2,) = align(x, join="outer", indexes={"a": [-2, 7, 10, -1]}) expected_x2 = DataArray([3, np.nan, 2, 1], coords=[("a", [-2, 7, 10, -1])]) assert_identical(expected_x2, x2) @@ -3293,7 +3293,7 @@ def test_broadcast_arrays_nocopy(self): assert source_ndarray(x2.data) is source_ndarray(x.data) # single-element broadcast (trivial case) - x2, = broadcast(x) + (x2,) = broadcast(x) assert_identical(x, x2) assert source_ndarray(x2.data) is source_ndarray(x.data) diff --git a/xarray/tests/test_dataset.py b/xarray/tests/test_dataset.py index dfb3da89569..eab6040e17e 100644 --- a/xarray/tests/test_dataset.py +++ b/xarray/tests/test_dataset.py @@ -1945,7 +1945,7 @@ def test_align_nocopy(self): def test_align_indexes(self): x = Dataset({"foo": DataArray([1, 2, 3], dims="x", coords=[("x", [1, 2, 3])])}) - x2, = align(x, indexes={"x": [2, 3, 1]}) + (x2,) = align(x, indexes={"x": [2, 3, 1]}) expected_x2 = Dataset( {"foo": DataArray([2, 3, 1], dims="x", coords={"x": [2, 3, 1]})} ) @@ -1973,7 +1973,7 @@ def test_broadcast(self): }, {"c": ("x", [4])}, ) - actual, = broadcast(ds) + (actual,) = broadcast(ds) assert_identical(expected, actual) ds_x = Dataset({"foo": ("x", [1])}) @@ -1995,7 +1995,7 @@ def test_broadcast_nocopy(self): x = Dataset({"foo": (("x", "y"), [[1, 1]])}) y = Dataset({"bar": ("y", [2, 3])}) - actual_x, = broadcast(x) + (actual_x,) = broadcast(x) assert_identical(x, actual_x) assert source_ndarray(actual_x["foo"].data) is source_ndarray(x["foo"].data)