diff --git a/holoviews/operation/element.py b/holoviews/operation/element.py index 50fea80769..e48d7ea047 100644 --- a/holoviews/operation/element.py +++ b/holoviews/operation/element.py @@ -38,6 +38,7 @@ from ..element.raster import RGB, Image from ..element.util import categorical_aggregate2d # noqa (API import) from ..streams import RangeXY +from ..util.locator import MaxNLocator column_interfaces = [ArrayInterface, DictInterface, PandasInterface] @@ -560,13 +561,9 @@ class contours(Operation): def _process(self, element, key=None): try: - from matplotlib.axes import Axes - from matplotlib.contour import QuadContourSet - from matplotlib.dates import date2num, num2date - from matplotlib.figure import Figure + from contourpy import FillType, LineType, contour_generator except ImportError: - raise ImportError("contours operation requires matplotlib.") - extent = element.range(0) + element.range(1)[::-1] + raise ImportError("contours operation requires contourpy.") xs = element.dimension_values(0, True, flat=False) ys = element.dimension_values(1, True, flat=False) @@ -586,6 +583,15 @@ def _process(self, element, key=None): # if any data is a datetime, transform to matplotlib's numerical format data_is_datetime = tuple(isdatetime(arr) for k, arr in enumerate(data)) if any(data_is_datetime): + if any(data_is_datetime[:2]) and self.p.filled: + raise RuntimeError("Datetime spatial coordinates are not supported " + "for filled contour calculations.") + + try: + from matplotlib.dates import date2num, num2date + except ImportError: + raise ImportError("contours operation using datetimes requires matplotlib.") from None + data = tuple( date2num(d) if is_datetime else d for d, is_datetime in zip(data, data_is_datetime) @@ -598,61 +604,97 @@ def _process(self, element, key=None): contour_type = Contours vdims = element.vdims[:1] - kwargs = {} levels = self.p.levels zmin, zmax = element.range(2) - if isinstance(self.p.levels, int): + if isinstance(levels, int): if zmin == zmax: contours = contour_type([], [xdim, ydim], vdims) return (element * contours) if self.p.overlaid else contours - data += (levels,) + else: + # The +1 is consistent with Matplotlib's use of MaxNLocator for contours. + locator = MaxNLocator(levels + 1) + levels = locator.tick_values(zmin, zmax) else: - kwargs = {'levels': levels} + levels = np.array(levels) + + if data_is_datetime[2]: + levels = date2num(levels) - fig = Figure() - ax = Axes(fig, [0, 0, 1, 1]) - contour_set = QuadContourSet(ax, *data, filled=self.p.filled, - extent=extent, **kwargs) - levels = np.array(contour_set.get_array()) crange = levels.min(), levels.max() if self.p.filled: - levels = levels[:-1] + np.diff(levels)/2. vdims = [vdims[0].clone(range=crange)] + cont_gen = contour_generator( + *data, + line_type=LineType.ChunkCombinedOffset, + fill_type=FillType.ChunkCombinedOffsetOffset, + ) + + def points_to_datetime(points): + # transform x/y coordinates back to datetimes + xs, ys = np.split(points, 2, axis=1) + if data_is_datetime[0]: + xs = np.array(num2date(xs)) + if data_is_datetime[1]: + ys = np.array(num2date(ys)) + return np.concatenate((xs, ys), axis=1) + paths = [] - empty = np.array([[np.nan, np.nan]]) - for level, cset in zip(levels, contour_set.collections): - exteriors = [] - interiors = [] - for geom in cset.get_paths(): - interior = [] - polys = geom.to_polygons(closed_only=False) - for ncp, cp in enumerate(polys): - if any(data_is_datetime[0:2]): - # transform x/y coordinates back to datetimes - xs, ys = np.split(cp, 2, axis=1) - if data_is_datetime[0]: - xs = np.array(num2date(xs)) - if data_is_datetime[1]: - ys = np.array(num2date(ys)) - cp = np.concatenate((xs, ys), axis=1) - if ncp == 0: - exteriors.append(cp) + if self.p.filled: + empty = np.array([[np.nan, np.nan]]) + for lower_level, upper_level in zip(levels[:-1], levels[1:]): + filled = cont_gen.filled(lower_level, upper_level) + # Only have to consider last index 0 as we are using contourpy without chunking + if (points := filled[0][0]) is None: + continue + + exteriors = [] + interiors = [] + if any(data_is_datetime[0:2]): + points = points_to_datetime(points) + + offsets = filled[1][0] + outer_offsets = filled[2][0] + + # Loop through exterior polygon boundaries. + for jstart, jend in zip(outer_offsets[:-1], outer_offsets[1:]): + if exteriors: exteriors.append(empty) - else: - interior.append(cp) - if len(polys): + exteriors.append(points[offsets[jstart]:offsets[jstart + 1]]) + + # Loop over the (jend-jstart-1) interior boundaries. + interior = [points[offsets[j]:offsets[j + 1]] for j in range(jstart+1, jend)] interiors.append(interior) - if not exteriors: - continue - geom = { - element.vdims[0].name: - num2date(level) if data_is_datetime[2] else level, - (xdim, ydim): np.concatenate(exteriors[:-1]) - } - if self.p.filled and interiors: - geom['holes'] = interiors - paths.append(geom) + level = (lower_level + upper_level) / 2 + geom = { + element.vdims[0].name: + num2date(level) if data_is_datetime[2] else level, + (xdim, ydim): np.concatenate(exteriors) if exteriors else [], + } + if interiors: + geom['holes'] = interiors + paths.append(geom) + else: + for level in levels: + lines = cont_gen.lines(level) + # Only have to consider last index 0 as we are using contourpy without chunking + if (points := lines[0][0]) is None: + continue + + if any(data_is_datetime[0:2]): + points = points_to_datetime(points) + + offsets = lines[1][0] + if offsets is not None and len(offsets) > 2: + # Casting offsets to int64 to avoid possible numpy UFuncOutputCastingError + offsets = offsets[1:-1].astype(np.int64) + points = np.insert(points, offsets, np.nan, axis=0) + geom = { + element.vdims[0].name: + num2date(level) if data_is_datetime[2] else level, + (xdim, ydim): points if points is not None else [], + } + paths.append(geom) contours = contour_type(paths, label=element.label, kdims=element.kdims, vdims=vdims) if self.p.overlaid: contours = element * contours diff --git a/holoviews/tests/operation/test_operation.py b/holoviews/tests/operation/test_operation.py index ffabf420c4..0746560418 100644 --- a/holoviews/tests/operation/test_operation.py +++ b/holoviews/tests/operation/test_operation.py @@ -94,20 +94,126 @@ def test_image_gradient(self): self.assertEqual(op_img, img.clone(np.array([[3.162278, 3.162278], [3.162278, 3.162278]]), group='Gradient')) def test_image_contours(self): - img = Image(np.array([[0, 1, 0], [3, 4, 5.], [6, 7, 8]])) + img = Image(np.array([[0, 1, 0], [0, 1, 0]])) op_contours = contours(img, levels=[0.5]) - contour = Contours([[(-0.166667, 0.333333, 0.5), (-0.333333, 0.277778, 0.5), - (np.nan, np.nan, 0.5), (0.333333, 0.3, 0.5), - (0.166667, 0.333333, 0.5)]], + # Note multiple lines which are nan-separated. + contour = Contours([[(-0.166667, 0.25, 0.5), (-0.1666667, -0.25, 0.5), + (np.nan, np.nan, 0.5), (0.1666667, -0.25, 0.5), + (0.1666667, 0.25, 0.5)]], vdims=img.vdims) self.assertEqual(op_contours, contour) + def test_image_contours_empty(self): + img = Image(np.array([[0, 1, 0], [0, 1, 0]])) + # Contour level outside of data limits + op_contours = contours(img, levels=[23.0]) + contour = Contours([], vdims=img.vdims) + self.assertEqual(op_contours, contour) + + def test_image_contours_auto_levels(self): + z = np.array([[0, 1, 0], [3, 4, 5.], [6, 7, 8]]) + img = Image(z) + for nlevels in range(3, 20): + op_contours = contours(img, levels=nlevels) + levels = [item['z'] for item in op_contours.data] + assert len(levels) <= nlevels + 2 + assert np.min(levels) <= z.min() + assert np.max(levels) < z.max() + def test_image_contours_no_range(self): img = Image(np.zeros((2, 2))) op_contours = contours(img, levels=2) contour = Contours([], vdims=img.vdims) self.assertEqual(op_contours, contour) + def test_image_contours_x_datetime(self): + x = np.array(['2023-09-01', '2023-09-03', '2023-09-05'], dtype='datetime64') + y = [14, 15] + z = np.array([[0, 1, 0], [0, 1, 0]]) + img = Image((x, y, z)) + op_contours = contours(img, levels=[0.5]) + # Note multiple lines which are nan-separated. + tz = dt.timezone.utc + expected_x = np.array( + [dt.datetime(2023, 9, 2, tzinfo=tz), dt.datetime(2023, 9, 2, tzinfo=tz), np.nan, + dt.datetime(2023, 9, 4, tzinfo=tz), dt.datetime(2023, 9, 4, tzinfo=tz)], + dtype=object) + + # Separately compare nans and datetimes + x = op_contours.dimension_values('x') + mask = np.array([True, True, False, True, True]) # Mask ignoring nans + np.testing.assert_array_equal(x[mask], expected_x[mask]) + np.testing.assert_array_equal(x[~mask].astype(float), expected_x[~mask].astype(float)) + + np.testing.assert_array_almost_equal(op_contours.dimension_values('y').astype(float), + [15, 14, np.nan, 14, 15]) + np.testing.assert_array_almost_equal(op_contours.dimension_values('z'), [0.5]*5) + + def test_image_contours_y_datetime(self): + x = [14, 15, 16] + y = np.array(['2023-09-01', '2023-09-03'], dtype='datetime64') + z = np.array([[0, 1, 0], [0, 1, 0]]) + img = Image((x, y, z)) + op_contours = contours(img, levels=[0.5]) + # Note multiple lines which are nan-separated. + np.testing.assert_array_almost_equal(op_contours.dimension_values('x').astype(float), + [14.5, 14.5, np.nan, 15.5, 15.5]) + + tz = dt.timezone.utc + expected_y = np.array( + [dt.datetime(2023, 9, 3, tzinfo=tz), dt.datetime(2023, 9, 1, tzinfo=tz), np.nan, + dt.datetime(2023, 9, 1, tzinfo=tz), dt.datetime(2023, 9, 3, tzinfo=tz)], + dtype=object) + + # Separately compare nans and datetimes + y = op_contours.dimension_values('y') + mask = np.array([True, True, False, True, True]) # Mask ignoring nans + np.testing.assert_array_equal(y[mask], expected_y[mask]) + np.testing.assert_array_equal(y[~mask].astype(float), expected_y[~mask].astype(float)) + + np.testing.assert_array_almost_equal(op_contours.dimension_values('z'), [0.5]*5) + + def test_image_contours_xy_datetime(self): + x = np.array(['2023-09-01', '2023-09-03', '2023-09-05'], dtype='datetime64') + y = np.array(['2023-10-07', '2023-10-08'], dtype='datetime64') + z = np.array([[0, 1, 0], [0, 1, 0]]) + img = Image((x, y, z)) + op_contours = contours(img, levels=[0.5]) + # Note multiple lines which are nan-separated. + + tz = dt.timezone.utc + expected_x = np.array( + [dt.datetime(2023, 9, 2, tzinfo=tz), dt.datetime(2023, 9, 2, tzinfo=tz), np.nan, + dt.datetime(2023, 9, 4, tzinfo=tz), dt.datetime(2023, 9, 4, tzinfo=tz)], + dtype=object) + expected_y = np.array( + [dt.datetime(2023, 10, 8, tzinfo=tz), dt.datetime(2023, 10, 7, tzinfo=tz), np.nan, + dt.datetime(2023, 10, 7, tzinfo=tz), dt.datetime(2023, 10, 8, tzinfo=tz)], + dtype=object) + + # Separately compare nans and datetimes + x = op_contours.dimension_values('x') + mask = np.array([True, True, False, True, True]) # Mask ignoring nans + np.testing.assert_array_equal(x[mask], expected_x[mask]) + np.testing.assert_array_equal(x[~mask].astype(float), expected_x[~mask].astype(float)) + + y = op_contours.dimension_values('y') + np.testing.assert_array_equal(y[mask], expected_y[mask]) + np.testing.assert_array_equal(y[~mask].astype(float), expected_y[~mask].astype(float)) + + np.testing.assert_array_almost_equal(op_contours.dimension_values('z'), [0.5]*5) + + def test_image_contours_z_datetime(self): + z = np.array([['2023-09-10', '2023-09-10'], ['2023-09-10', '2023-09-12']], dtype='datetime64') + img = Image(z) + op_contours = contours(img, levels=[np.datetime64('2023-09-11')]) + np.testing.assert_array_almost_equal(op_contours.dimension_values('x'), [0.25, 0.0]) + np.testing.assert_array_almost_equal(op_contours.dimension_values('y'), [0.0, -0.25]) + expected_z = np.array([ + dt.datetime(2023, 9, 11, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 9, 11, 0, 0, tzinfo=dt.timezone.utc)], dtype=object) + np.testing.assert_array_equal(op_contours.dimension_values('z'), expected_z) + def test_qmesh_contours(self): qmesh = QuadMesh(([0, 1, 2], [1, 2, 3], np.array([[0, 1, 0], [3, 4, 5.], [6, 7, 8]]))) op_contours = contours(qmesh, levels=[0.5]) @@ -146,13 +252,70 @@ def test_qmesh_curvilinear_edges_contours(self): self.assertEqual(op_contours, contour) def test_image_contours_filled(self): + img = Image(np.array([[0, 2, 0], [0, 2, 0]])) + # Two polygons (nan-separated) without holes + op_contours = contours(img, filled=True, levels=[0.5, 1.5]) + data = [[(-0.25, -0.25, 1), (-0.08333333, -0.25, 1), (-0.08333333, 0.25, 1), + (-0.25, 0.25, 1), (-0.25, -0.25, 1), (np.nan, np.nan, 1), (0.08333333, -0.25, 1), + (0.25, -0.25, 1), (0.25, 0.25, 1), (0.08333333, 0.25, 1), (0.08333333, -0.25, 1)]] + polys = Polygons(data, vdims=img.vdims[0].clone(range=(0.5, 1.5))) + self.assertEqual(op_contours, polys) + + def test_image_contours_filled_with_hole(self): + img = Image(np.array([[0, 0, 0], [0, 1, 0.], [0, 0, 0]])) + # Single polygon with hole + op_contours = contours(img, filled=True, levels=[0.25, 0.75]) + data = [[(-0.25, 0.0, 0.5), (0.0, -0.25, 0.5), (0.25, 0.0, 0.5), (0.0, 0.25, 0.5), + (-0.25, 0.0, 0.5)]] + polys = Polygons(data, vdims=img.vdims[0].clone(range=(0.25, 0.75))) + self.assertEqual(op_contours, polys) + expected_holes = [[[np.array([[0.0, -0.08333333], [-0.08333333, 0.0], [0.0, 0.08333333], + [0.08333333, 0.0], [0.0, -0.08333333]])]]] + np.testing.assert_array_almost_equal(op_contours.holes(), expected_holes) + + def test_image_contours_filled_multi_holes(self): + img = Image(np.array([[0, 0, 0, 0, 0], [0, 1, 0, 1, 0], [0, 0, 0, 0, 0]])) + # Single polygon with two holes + op_contours = contours(img, filled=True, levels=[-0.5, 0.5]) + data = [[(-0.4, -0.3333333, 0), (-0.2, -0.3333333, 0), (0, -0.3333333, 0), + (0.2, -0.3333333, 0), (0.4, -0.3333333, 0), (0.4, 0, 0), (0.4, 0.3333333, 0), + (0.2, 0.3333333, 0), (0, 0.3333333, 0), (-0.2, 0.3333333, 0), (-0.4, 0.3333333, 0), + (-0.4, 0, 0), (-0.4, -0.3333333, 0)]] + polys = Polygons(data, vdims=img.vdims[0].clone(range=(-0.5, 0.5))) + self.assertEqual(op_contours, polys) + expected_holes = [[[np.array([[-0.2, -0.16666667], [-0.3, 0], [-0.2, 0.16666667], [-0.1, 0], + [-0.2, -0.16666667]]), + np.array([[0.2, -0.16666667], [0.1, 0], [0.2, 0.16666667], [0.3, 0], + [0.2, -0.16666667]])]]] + np.testing.assert_array_almost_equal(op_contours.holes(), expected_holes) + + def test_image_contours_filled_empty(self): img = Image(np.array([[0, 1, 0], [3, 4, 5.], [6, 7, 8]])) - op_contours = contours(img, filled=True, levels=[2, 2.5]) - data = [[(0., 0.166667, 2.25), (0.333333, 0.166667, 2.25), (0.333333, 0.2, 2.25), (0., 0.222222, 2.25), - (-0.333333, 0.111111, 2.25), (-0.333333, 0.055556, 2.25), (0., 0.166667, 2.25)]] - polys = Polygons(data, vdims=img.vdims[0].clone(range=(2, 2.5))) + # Contour level outside of data limits + op_contours = contours(img, filled=True, levels=[20.0, 23.0]) + polys = Polygons([], vdims=img.vdims[0].clone(range=(20.0, 23.0))) self.assertEqual(op_contours, polys) + def test_image_contours_filled_auto_levels(self): + z = np.array([[0, 1, 0], [3, 4, 5], [6, 7, 8]]) + img = Image(z) + for nlevels in range(3, 20): + op_contours = contours(img, filled=True, levels=nlevels) + levels = [item['z'] for item in op_contours.data] + assert len(levels) <= nlevels + 1 + delta = 0.5*(levels[1] - levels[0]) + assert np.min(levels) <= z.min() + delta + assert np.max(levels) >= z.max() - delta + + def test_image_contours_filled_x_datetime(self): + x = np.array(['2023-09-01', '2023-09-05', '2023-09-09'], dtype='datetime64') + y = np.array([6, 7]) + z = np.array([[0, 2, 0], [0, 2, 0]]) + img = Image((x, y, z)) + msg = r'Datetime spatial coordinates are not supported for filled contour calculations.' + with pytest.raises(RuntimeError, match=msg): + _ = contours(img, filled=True, levels=[0.5, 1.5]) + def test_points_histogram(self): points = Points([float(i) for i in range(10)]) op_hist = histogram(points, num_bins=3, normed=True) diff --git a/holoviews/tests/util/test_locator.py b/holoviews/tests/util/test_locator.py new file mode 100644 index 0000000000..987ef0e49c --- /dev/null +++ b/holoviews/tests/util/test_locator.py @@ -0,0 +1,51 @@ +import numpy as np +import pytest + +from ...util.locator import MaxNLocator + + +@pytest.mark.parametrize( + "n, vmin, vmax, expected", + [ + (1, 20, 100, [0, 80, 160]), + (2, 20, 100, [0, 40, 80, 120]), + (3, 20, 100, [0, 30, 60, 90, 120]), + (4, 20, 100, [20, 40, 60, 80, 100]), + (5, 20, 100, [20, 40, 60, 80, 100]), + (6, 20, 100, [15, 30, 45, 60, 75, 90, 105]), + (7, 20, 100, [15, 30, 45, 60, 75, 90, 105]), + (8, 20, 100, [20, 30, 40, 50, 60, 70, 80, 90, 100]), + (9, 20, 100, [20, 30, 40, 50, 60, 70, 80, 90, 100]), + (10, 20, 100, [16, 24, 32, 40, 48, 56, 64, 72, 80, 88, 96, 104]), + (1, 1e-4, 1e-3, [0, 1e-3]), + (2, 1e-4, 1e-3, [0, 5e-4, 1e-3]), + (3, 1e-4, 1e-3, [0, 3e-4, 6e-4, 9e-4, 1.2e-3]), + (4, 1e-4, 1e-3, [0, 2.5e-4, 5e-4, 7.5e-4, 1e-3]), + (5, 1e-4, 1e-3, [0, 2e-4, 4e-4, 6e-4, 8e-4, 1e-3]), + (6, 1e-4, 1e-3, [0, 1.5e-4, 3e-4, 4.5e-4, 6e-4, 7.5e-4, 9e-4, 1.05e-3]), + (7, 1e-4, 1e-3, [0, 1.5e-4, 3e-4, 4.5e-4, 6e-4, 7.5e-4, 9e-4, 1.05e-3]), + (8, 1e-4, 1e-3, [0, 1.5e-4, 3e-4, 4.5e-4, 6e-4, 7.5e-4, 9e-4, 1.05e-3]), + (9, 1e-4, 1e-3, [1e-4, 2e-4, 3e-4, 4e-4, 5e-4, 6e-4, 7e-4, 8e-4, 9e-4, 1e-3]), + (10, 1e-4, 1e-3, [1e-4, 2e-4, 3e-4, 4e-4, 5e-4, 6e-4, 7e-4, 8e-4, 9e-4, 1e-3]), + (2, -1e15, 1e15, [-1e15, 0, 1e15]), + (4, -1e15, 1e15, [-1e15, -5e14, 0, 5e14, 1e15]), + (8, -1e15, 1e15, [-1e15, -7.5e14, -5e14, -2.5e14, 0, 2.5e14, 5e14, 7.5e14, 1e15]), + (5, 0, 0.85e-50, [0, 2e-51, 4e-51, 6e-51, 8e-51, 1e-50]), + (5, -0.85e-50, 0, [-1e-50, -8e-51, -6e-51, -4e-51, -2e-51, 0]), + (5, 1.23, 1.23, [1.23]*5), + ], +) +def test_max_n_locator(n, vmin, vmax, expected): + locator = MaxNLocator(n) + ticks = locator.tick_values(vmin, vmax) + np.testing.assert_almost_equal(ticks, expected) + + # Same results if swap vmin and vmax + ticks = locator.tick_values(vmax, vmin) + np.testing.assert_almost_equal(ticks, expected) + + +@pytest.mark.parametrize("n", (0, -1, -2)) +def test_max_n_locator_invalid_n(n): + with pytest.raises(ValueError): + _ = MaxNLocator(n) diff --git a/holoviews/util/locator.py b/holoviews/util/locator.py new file mode 100644 index 0000000000..6d9083220c --- /dev/null +++ b/holoviews/util/locator.py @@ -0,0 +1,173 @@ +""" +Minimal set of functionality of Matplotlib's MaxNLocator to choose contour +levels without having to have Matplotlib installed. +Taken from Matplotlib 3.8.0. +""" +import math + +import numpy as np + + +class _Edge_integer: + """ + Helper for `.MaxNLocator`, `.MultipleLocator`, etc. + + Take floating-point precision limitations into account when calculating + tick locations as integer multiples of a step. + """ + def __init__(self, step, offset): + """ + Parameters + ---------- + step : float > 0 + Interval between ticks. + offset : float + Offset subtracted from the data limits prior to calculating tick + locations. + """ + if step <= 0: + raise ValueError("'step' must be positive") + self.step = step + self._offset = abs(offset) + + def closeto(self, ms, edge): + # Allow more slop when the offset is large compared to the step. + if self._offset > 0: + digits = np.log10(self._offset / self.step) + tol = max(1e-10, 10 ** (digits - 12)) + tol = min(0.4999, tol) + else: + tol = 1e-10 + return abs(ms - edge) < tol + + def le(self, x): + """Return the largest n: n*step <= x.""" + d, m = divmod(x, self.step) + if self.closeto(m / self.step, 1): + return d + 1 + return d + + def ge(self, x): + """Return the smallest n: n*step >= x.""" + d, m = divmod(x, self.step) + if self.closeto(m / self.step, 0): + return d + return d + 1 + + +def nonsingular(vmin, vmax, expander=0.001, tiny=1e-15, increasing=True): + """ + Modify the endpoints of a range as needed to avoid singularities. + + Parameters + ---------- + vmin, vmax : float + The initial endpoints. + expander : float, default: 0.001 + Fractional amount by which *vmin* and *vmax* are expanded if + the original interval is too small, based on *tiny*. + tiny : float, default: 1e-15 + Threshold for the ratio of the interval to the maximum absolute + value of its endpoints. If the interval is smaller than + this, it will be expanded. This value should be around + 1e-15 or larger; otherwise the interval will be approaching + the double precision resolution limit. + increasing : bool, default: True + If True, swap *vmin*, *vmax* if *vmin* > *vmax*. + + Returns + ------- + vmin, vmax : float + Endpoints, expanded and/or swapped if necessary. + If either input is inf or NaN, or if both inputs are 0 or very + close to zero, it returns -*expander*, *expander*. + """ + + if (not np.isfinite(vmin)) or (not np.isfinite(vmax)): + return -expander, expander + + swapped = False + if vmax < vmin: + vmin, vmax = vmax, vmin + swapped = True + + # Expand vmin, vmax to float: if they were integer types, they can wrap + # around in abs (abs(np.int8(-128)) == -128) and vmax - vmin can overflow. + vmin, vmax = map(float, [vmin, vmax]) + + maxabsvalue = max(abs(vmin), abs(vmax)) + if maxabsvalue < (1e6 / tiny) * np.finfo(float).tiny: + vmin = -expander + vmax = expander + + elif vmax - vmin <= maxabsvalue * tiny: + if vmax == 0 and vmin == 0: + vmin = -expander + vmax = expander + else: + vmin -= expander*abs(vmin) + vmax += expander*abs(vmax) + + if swapped and not increasing: + vmin, vmax = vmax, vmin + return vmin, vmax + + +def scale_range(vmin, vmax, n=1, threshold=100): + dv = abs(vmax - vmin) # > 0 as nonsingular is called before. + meanv = (vmax + vmin) / 2 + if abs(meanv) / dv < threshold: + offset = 0 + else: + offset = math.copysign(10 ** (math.log10(abs(meanv)) // 1), meanv) + scale = 10 ** (math.log10(dv / n) // 1) + return scale, offset + + +class MaxNLocator: + _extended_steps = np.array([ + 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5, 0.6, 0.8, + 1., 1.5, 2., 2.5, 3., 4., 5., 6., 8., 10., 15.]) + _min_n_ticks = 1 + + def __init__(self, nbins: int = 10): + if nbins < 1: + raise ValueError("MaxNLocator nbins must be an integer greater than zero") + self.nbins = nbins + + def _raw_ticks(self, vmin, vmax): + scale, offset = scale_range(vmin, vmax, self.nbins) + _vmin = vmin - offset + _vmax = vmax - offset + steps = self._extended_steps * scale + + raw_step = ((_vmax - _vmin) / self.nbins) + large_steps = steps >= raw_step + + # Find index of smallest large step + istep = np.nonzero(large_steps)[0][0] + + # Start at smallest of the steps greater than the raw step, and check + # if it provides enough ticks. If not, work backwards through + # smaller steps until one is found that provides enough ticks. + for step in steps[:istep+1][::-1]: + best_vmin = (_vmin // step) * step + + # Find tick locations spanning the vmin-vmax range, taking into + # account degradation of precision when there is a large offset. + # The edge ticks beyond vmin and/or vmax are needed for the + # "round_numbers" autolimit mode. + edge = _Edge_integer(step, offset) + low = edge.le(_vmin - best_vmin) + high = edge.ge(_vmax - best_vmin) + ticks = np.arange(low, high + 1) * step + best_vmin + # Count only the ticks that will be displayed. + nticks = ((ticks <= _vmax) & (ticks >= _vmin)).sum() + if nticks >= self._min_n_ticks: + break + return ticks + offset + + def tick_values(self, vmin, vmax): + vmin, vmax = nonsingular(vmin, vmax, expander=1e-13, tiny=1e-14) + locs = self._raw_ticks(vmin, vmax) + return locs diff --git a/setup.py b/setup.py index 47a1a19c00..e7d19bce39 100644 --- a/setup.py +++ b/setup.py @@ -39,6 +39,7 @@ 'pillow', 'plotly >=4.0', 'ipython >=5.4.0', + 'contourpy', ] # Optional tests dependencies, i.e. one should be able