diff --git a/holoviews/__init__.py b/holoviews/__init__.py index 49b55e3246..7d25931b24 100644 --- a/holoviews/__init__.py +++ b/holoviews/__init__.py @@ -139,7 +139,7 @@ def __call__(self, *args, **opts): # noqa (dummy signature) try: exec(code) except Exception as e: - print("Warning: Could not load %r [%r]" % (filename, str(e))) + print(f"Warning: Could not load {filename!r} [{str(e)!r}]") del f, code break del filename diff --git a/holoviews/annotators.py b/holoviews/annotators.py index 4d596ba4c0..80a308b619 100644 --- a/holoviews/annotators.py +++ b/holoviews/annotators.py @@ -224,7 +224,7 @@ def __init__(self, object=None, **params): super().__init__(None, **params) self.object = self._process_element(object) self._table_row = Row() - self.editor = Tabs(('%s' % param_name(self.name), self._table_row)) + self.editor = Tabs((f'{param_name(self.name)}', self._table_row)) self.plot = DynamicMap(self._get_plot) self.plot.callback.inputs[:] = [self.object] self._tables = [] @@ -336,19 +336,19 @@ class PathAnnotator(Annotator): def __init__(self, object=None, **params): self._vertex_table_row = Row() super().__init__(object, **params) - self.editor.append(('%s Vertices' % param_name(self.name), + self.editor.append((f'{param_name(self.name)} Vertices', self._vertex_table_row)) def _init_stream(self): name = param_name(self.name) self._stream = PolyDraw( source=self.plot, data={}, num_objects=self.num_objects, - show_vertices=self.show_vertices, tooltip='%s Tool' % name, + show_vertices=self.show_vertices, tooltip=f'{name} Tool', vertex_style=self.vertex_style, empty_value=self.empty_value ) if self.edit_vertices: self._vertex_stream = PolyEdit( - source=self.plot, tooltip='%s Edit Tool' % name, + source=self.plot, tooltip=f'{name} Edit Tool', vertex_style=self.vertex_style, ) @@ -379,7 +379,7 @@ def _process_element(self, element=None): # Validate annotations poly_data = {c: element.dimension_values(c, expanded=False) for c in validate} - if validate and len(set(len(v) for v in poly_data.values())) != 1: + if validate and len({len(v) for v in poly_data.values()}) != 1: raise ValueError('annotations must refer to value dimensions ' 'which vary per path while at least one of ' '%s varies by vertex.' % validate) @@ -419,7 +419,7 @@ def _update_table(self): self._table = Table(table_data, annotations, [], label=name).opts( show_title=False, **self.table_opts) self._vertex_table = Table( - [], table.kdims, list(self.vertex_annotations), label='%s Vertices' % name + [], table.kdims, list(self.vertex_annotations), label=f'{name} Vertices' ).opts(show_title=False, **self.table_opts) self._update_links() self._table_row[:] = [self._table] @@ -458,7 +458,7 @@ def _init_stream(self): name = param_name(self.name) self._stream = self._stream_type( source=self.plot, data={}, num_objects=self.num_objects, - tooltip='%s Tool' % name, empty_value=self.empty_value + tooltip=f'{name} Tool', empty_value=self.empty_value ) def _process_element(self, object): @@ -519,7 +519,7 @@ class CurveAnnotator(_GeomAnnotator): def _init_stream(self): name = param_name(self.name) self._stream = self._stream_type( - source=self.plot, data={}, tooltip='%s Tool' % name, + source=self.plot, data={}, tooltip=f'{name} Tool', style=self.vertex_style ) diff --git a/holoviews/core/__init__.py b/holoviews/core/__init__.py index 9b45a1c99b..441fc1e67e 100644 --- a/holoviews/core/__init__.py +++ b/holoviews/core/__init__.py @@ -1,5 +1,7 @@ from datetime import date, datetime +import pandas as pd + from .boundingregion import * # noqa (API import) from .data import * # noqa (API import) from .dimension import * # noqa (API import) @@ -29,12 +31,8 @@ Dimension.type_formatters[np.datetime64] = '%Y-%m-%d %H:%M:%S' Dimension.type_formatters[datetime] = '%Y-%m-%d %H:%M:%S' Dimension.type_formatters[date] = '%Y-%m-%d' +Dimension.type_formatters[pd.Timestamp] = "%Y-%m-%d %H:%M:%S" -try: - import pandas as pd - Dimension.type_formatters[pd.Timestamp] = "%Y-%m-%d %H:%M:%S" -except: - pass def public(obj): if not isinstance(obj, type): return False @@ -42,6 +40,6 @@ def public(obj): SheetCoordinateSystem, AttrTree] return any([issubclass(obj, bc) for bc in baseclasses]) -_public = list(set([_k for _k, _v in locals().items() if public(_v)])) +_public = list({_k for _k, _v in locals().items() if public(_v)}) __all__ = _public + ["boundingregion", "dimension", "layer", "layout", "ndmapping", "operation", "options", "sheetcoords", "tree", "element"] diff --git a/holoviews/core/accessors.py b/holoviews/core/accessors.py index 00ec3d7373..921902d5a2 100644 --- a/holoviews/core/accessors.py +++ b/holoviews/core/accessors.py @@ -86,7 +86,7 @@ def pipelined_call(*args, **kwargs): @add_metaclass(AccessorPipelineMeta) -class Apply(object): +class Apply: """ Utility to apply a function or operation to all viewable elements inside the object. @@ -289,7 +289,7 @@ def transform(self, *args, **kwargs): @add_metaclass(AccessorPipelineMeta) -class Redim(object): +class Redim: """ Utility that supports re-dimensioning any HoloViews object via the redim method. @@ -489,7 +489,7 @@ def values(self, specs=None, **ranges): @add_metaclass(AccessorPipelineMeta) -class Opts(object): +class Opts: def __init__(self, obj, mode=None): self._mode = mode diff --git a/holoviews/core/boundingregion.py b/holoviews/core/boundingregion.py index 7f22cea8bd..313cf4a263 100644 --- a/holoviews/core/boundingregion.py +++ b/holoviews/core/boundingregion.py @@ -12,7 +12,7 @@ from .util import datetime_types -class BoundingRegion(object): +class BoundingRegion: """ Abstract bounding region class, for any portion of a 2D plane. @@ -93,9 +93,9 @@ def __str__(self): l, b, r, t = self._aarect.lbrt() if (not isinstance(r, datetime_types) and r == -l and not isinstance(b, datetime_types) and t == -b and r == t): - return 'BoundingBox(radius=%s)' % (r) + return f'BoundingBox(radius={r})' else: - return 'BoundingBox(points=((%s,%s),(%s,%s)))' % (l, b, r, t) + return f'BoundingBox(points=(({l},{b}),({r},{t})))' def __repr__(self): @@ -106,7 +106,7 @@ def script_repr(self, imports=[], prefix=" "): # Generate import statement cls = self.__class__.__name__ mod = self.__module__ - imports.append("from %s import %s" % (mod, cls)) + imports.append(f"from {mod} import {cls}") return self.__str__() @@ -242,7 +242,7 @@ def contains(self, x, y): # JABALERT: Should probably remove top, bottom, etc. accessor functions, # and use the slot itself instead. ################################################### -class AARectangle(object): +class AARectangle: """ Axis-aligned rectangle class. diff --git a/holoviews/core/data/__init__.py b/holoviews/core/data/__init__.py index 86b54f2573..c7e705a39d 100644 --- a/holoviews/core/data/__init__.py +++ b/holoviews/core/data/__init__.py @@ -1,8 +1,3 @@ -try: - import itertools.izip as zip -except ImportError: - pass - import types import copy @@ -70,7 +65,7 @@ def concat(datasets, datatype=None): return Interface.concatenate(datasets, datatype) -class DataConversion(object): +class DataConversion: """ DataConversion is a very simple container object which can be given an existing Dataset Element and provides methods to convert @@ -129,7 +124,7 @@ def __call__(self, new_type, kdims=None, vdims=None, groupby=None, else: selected = self._element else: - if pd and issubclass(self._element.interface, PandasInterface): + if issubclass(self._element.interface, PandasInterface): ds_dims = self._element.dimensions() ds_kdims = [self._element.get_dimension(d) if d in ds_dims else d for d in groupby+kdims] @@ -283,15 +278,14 @@ class to each underlying element. """ if isinstance(data, DynamicMap): class_name = cls.__name__ - repr_kdims = 'kdims=%r' % kdims if kdims else None - repr_vdims = 'vdims=%r' % vdims if vdims else None - repr_kwargs = (', '.join('%s=%r' % (k,v) for k,v in kwargs.items()) + repr_kdims = f'kdims={kdims!r}' if kdims else None + repr_vdims = f'vdims={vdims!r}' if vdims else None + repr_kwargs = (', '.join(f'{k}={v!r}' for k,v in kwargs.items()) if kwargs else None) extras = ', '.join([el for el in [repr_kdims, repr_vdims, repr_kwargs] if el is not None]) extras = ', ' + extras if extras else '' - apply_args= 'hv.{class_name}{extras}'.format(class_name=class_name, - extras=extras) + apply_args= f'hv.{class_name}{extras}' msg = "Cannot construct a {class_name} from the supplied object of type DynamicMap. Implicitly creating a DynamicMap of {class_name} objects, but instead please explicitly call .apply({apply_args}) on the supplied DynamicMap." cls.param.warning(cls, msg.format(class_name=class_name, apply_args=apply_args)) return data.apply(cls, per_element=True, kdims=kdims, vdims=vdims, **kwargs) @@ -540,7 +534,7 @@ def add_dimension(self, dimension, dim_pos, dim_val, vdim=False, **kwargs): dimension = Dimension(dimension) if dimension.name in self.kdims: - raise Exception('{dim} dimension already defined'.format(dim=dimension.name)) + raise Exception(f'{dimension.name} dimension already defined') if vdim: dims = self.vdims[:] @@ -716,7 +710,7 @@ def __getitem__(self, slices): value_select = slices[self.ndims] elif len(slices) == self.ndims+1 and isinstance(slices[self.ndims], (Dimension,str)): - raise IndexError("%r is not an available value dimension" % slices[self.ndims]) + raise IndexError(f"{slices[self.ndims]!r} is not an available value dimension") else: selection = dict(zip(self.dimensions(label=True), slices)) data = self.select(**selection) @@ -821,7 +815,7 @@ def sample(self, samples=[], bounds=None, closest=True, **kwargs): return self.clone(selection, kdims=kdims, new_type=new_type, datatype=datatype) - lens = set(len(util.wrap_tuple(s)) for s in samples) + lens = {len(util.wrap_tuple(s)) for s in samples} if len(lens) > 1: raise IndexError('Sample coordinates must all be of the same length.') @@ -946,7 +940,7 @@ def aggregate(self, dimensions=None, function=None, spreadfn=None, **kwargs): try: # Should be checking the dimensions declared on the element are compatible return self.clone(aggregated, kdims=kdims, vdims=vdims) - except: + except Exception: datatype = self.param.objects('existing')['datatype'].default return self.clone(aggregated, kdims=kdims, vdims=vdims, new_type=new_type, datatype=datatype) @@ -1070,12 +1064,10 @@ def __len__(self): "Number of values in the Dataset." return self.interface.length(self) - def __nonzero__(self): + def __bool__(self): "Whether the Dataset contains any values" return self.interface.nonzero(self) - __bool__ = __nonzero__ - @property def shape(self): "Returns the shape of the data." diff --git a/holoviews/core/data/array.py b/holoviews/core/data/array.py index 3402e33a60..d96e07d71f 100644 --- a/holoviews/core/data/array.py +++ b/holoviews/core/data/array.py @@ -1,8 +1,3 @@ -try: - import itertools.izip as zip -except ImportError: - pass - import numpy as np from .interface import Interface, DataError @@ -35,7 +30,7 @@ def init(cls, eltype, data, kdims, vdims): if ((isinstance(data, dict) or util.is_dataframe(data)) and all(d in data for d in dimensions)): dataset = [d if isinstance(d, np.ndarray) else np.asarray(data[d]) for d in dimensions] - if len(set(d.dtype.kind for d in dataset)) > 1: + if len({d.dtype.kind for d in dataset}) > 1: raise ValueError('ArrayInterface expects all columns to be of the same dtype') data = np.column_stack(dataset) elif isinstance(data, dict) and not all(d in data for d in dimensions): @@ -45,7 +40,7 @@ def init(cls, eltype, data, kdims, vdims): data = np.column_stack(list(dataset)) elif isinstance(data, tuple): data = [d if isinstance(d, np.ndarray) else np.asarray(d) for d in data] - if len(set(d.dtype.kind for d in data)) > 1: + if len({d.dtype.kind for d in data}) > 1: raise ValueError('ArrayInterface expects all columns to be of the same dtype') elif cls.expanded(data): data = np.column_stack(data) @@ -57,7 +52,7 @@ def init(cls, eltype, data, kdims, vdims): data = np.array([], ndmin=2).T if data is None else list(data) try: data = np.array(data) - except: + except Exception: data = None if kdims is None: diff --git a/holoviews/core/data/cudf.py b/holoviews/core/data/cudf.py index 1ba3d7d69b..4a608a4fd6 100644 --- a/holoviews/core/data/cudf.py +++ b/holoviews/core/data/cudf.py @@ -1,14 +1,10 @@ import sys import warnings -try: - import itertools.izip as zip -except ImportError: - pass - from itertools import product import numpy as np +import pandas as pd from .. import util from ..dimension import dimension_name @@ -53,7 +49,6 @@ def applies(cls, obj): @classmethod def init(cls, eltype, data, kdims, vdims): import cudf - import pandas as pd element_params = eltype.param.objects() kdim_param = element_params['kdims'] @@ -199,10 +194,10 @@ def select_mask(cls, dataset, selection): if isinstance(sel, tuple): sel = slice(*sel) arr = cls.values(dataset, dim, keep_index=True) - if util.isdatetime(arr) and util.pd: + if util.isdatetime(arr): try: sel = util.parse_datetime_selection(sel) - except: + except Exception: pass new_masks = [] @@ -274,22 +269,22 @@ def aggregate(cls, dataset, dimensions, function, **kwargs): agg = agg_map.get(agg, agg) grouped = reindexed.groupby(cols, sort=False) if not hasattr(grouped, agg): - raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg) + raise ValueError(f'{agg} aggregation is not supported on cudf DataFrame.') df = getattr(grouped, agg)().reset_index() else: agg_map = {'amin': 'min', 'amax': 'max', 'size': 'count'} agg = agg_map.get(agg, agg) if not hasattr(reindexed, agg): - raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg) + raise ValueError(f'{agg} aggregation is not supported on cudf DataFrame.') agg = getattr(reindexed, agg)() try: - data = dict(((col, [v]) for col, v in zip(agg.index.values_host, agg.to_numpy()))) + data = {col: [v] for col, v in zip(agg.index.values_host, agg.to_numpy())} except Exception: # Give FutureWarning: 'The to_array method will be removed in a future cuDF release. # Consider using `to_numpy` instead.' # Seen in cudf=21.12.01 - data = dict(((col, [v]) for col, v in zip(agg.index.values_host, agg.to_array()))) - df = util.pd.DataFrame(data, columns=list(agg.index.values_host)) + data = {col: [v] for col, v in zip(agg.index.values_host, agg.to_array())} + df = pd.DataFrame(data, columns=list(agg.index.values_host)) dropped = [] for vd in vdims: diff --git a/holoviews/core/data/dask.py b/holoviews/core/data/dask.py index 24fa27ac36..478487084b 100644 --- a/holoviews/core/data/dask.py +++ b/holoviews/core/data/dask.py @@ -1,8 +1,4 @@ import sys -try: - import itertools.izip as zip -except ImportError: - pass import numpy as np import pandas as pd diff --git a/holoviews/core/data/dictionary.py b/holoviews/core/data/dictionary.py index 91a37802ed..8f93e6f9c8 100644 --- a/holoviews/core/data/dictionary.py +++ b/holoviews/core/data/dictionary.py @@ -1,15 +1,10 @@ from collections import OrderedDict, defaultdict -try: - import itertools.izip as zip -except ImportError: - pass import numpy as np from .interface import Interface, DataError from ..dimension import dimension_name from ..element import Element -from ..dimension import OrderedDict as cyODict from ..ndmapping import NdMapping, item_check, sorted_context from ..util import isscalar from .. import util @@ -23,7 +18,7 @@ class DictInterface(Interface): are collections representing the values in that column. """ - types = (dict, OrderedDict, cyODict) + types = (dict, OrderedDict) datatype = 'dictionary' @@ -36,7 +31,6 @@ def dimension_type(cls, dataset, dim): @classmethod def init(cls, eltype, data, kdims, vdims): - odict_types = (OrderedDict, cyODict) if kdims is None: kdims = eltype.kdims if vdims is None: @@ -115,7 +109,7 @@ def init(cls, eltype, data, kdims, vdims): if not cls.expanded([vs for d, vs in unpacked if d in dimensions and not isscalar(vs)]): raise ValueError('DictInterface expects data to be of uniform shape.') - if isinstance(data, odict_types): + if isinstance(data, OrderedDict): data.update(unpacked) else: data = OrderedDict(unpacked) @@ -308,9 +302,9 @@ def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): grouped_data = [] for unique_key in util.unique_iterator(keys): mask = cls.select_mask(dataset, dict(zip(dimensions, unique_key))) - group_data = OrderedDict(((d.name, dataset.data[d.name] if isscalar(dataset.data[d.name]) + group_data = OrderedDict((d.name, dataset.data[d.name] if isscalar(dataset.data[d.name]) else dataset.data[d.name][mask]) - for d in kdims+vdims)) + for d in kdims+vdims) group_data = group_type(group_data, **group_kwargs) grouped_data.append((unique_key, group_data)) diff --git a/holoviews/core/data/grid.py b/holoviews/core/data/grid.py index 5770bbd8f0..8876de54b5 100644 --- a/holoviews/core/data/grid.py +++ b/holoviews/core/data/grid.py @@ -1,17 +1,11 @@ from collections import OrderedDict, defaultdict -try: - import itertools.izip as zip -except ImportError: - pass - import numpy as np from .dictionary import DictInterface from .interface import Interface, DataError from ..dimension import dimension_name from ..element import Element -from ..dimension import OrderedDict as cyODict from ..ndmapping import NdMapping, item_check, sorted_context from .. import util from .util import finite_range, is_dask, dask_array_module, get_array_types @@ -34,7 +28,7 @@ class GridInterface(DictInterface): longitudes can specify the position of NxM temperature samples. """ - types = (dict, OrderedDict, cyODict) + types = (dict, OrderedDict) datatype = 'grid' @@ -108,7 +102,7 @@ def init(cls, eltype, data, kdims, vdims): for dim in validate_dims: name = dimension_name(dim) if name not in data: - raise ValueError("Values for dimension %s not found" % dim) + raise ValueError(f"Values for dimension {dim} not found") if not isinstance(data[name], get_array_types()): data[name] = np.array(data[name]) @@ -165,7 +159,7 @@ def concat_dim(cls, datasets, dim, vdims): new_data[dim.name] = np.array(values) for vdim in vdims: arrays = [grid[vdim.name] for grid in grids] - shapes = set(arr.shape for arr in arrays) + shapes = {arr.shape for arr in arrays} if len(shapes) > 1: raise DataError('When concatenating gridded data the shape ' 'of arrays must match. %s found that arrays ' @@ -451,7 +445,7 @@ def groupby(cls, dataset, dim_names, container_type, group_type, **kwargs): invalid = [d for d in dimensions if dataset.data[d.name].ndim > 1] if invalid: - if len(invalid) == 1: invalid = "'%s'" % invalid[0] + if len(invalid) == 1: invalid = f"'{invalid[0]}'" raise ValueError("Cannot groupby irregularly sampled dimension(s) %s." % invalid) @@ -504,7 +498,7 @@ def groupby(cls, dataset, dim_names, container_type, group_type, **kwargs): @classmethod def key_select_mask(cls, dataset, values, ind): - if util.pd and values.dtype.kind == 'M': + if values.dtype.kind == 'M': ind = util.parse_datetime_selection(ind) if isinstance(ind, tuple): ind = slice(*ind) @@ -544,7 +538,7 @@ def key_select_mask(cls, dataset, values, ind): @classmethod def select(cls, dataset, selection_mask=None, **selection): if selection_mask is not None: - raise ValueError("Masked selections currently not supported for {0}.".format(cls.__name__)) + raise ValueError(f"Masked selections currently not supported for {cls.__name__}.") dimensions = dataset.kdims val_dims = [vdim for vdim in dataset.vdims if vdim in selection] diff --git a/holoviews/core/data/ibis.py b/holoviews/core/data/ibis.py index 6213b81e6d..d17261bb1f 100644 --- a/holoviews/core/data/ibis.py +++ b/holoviews/core/data/ibis.py @@ -1,12 +1,8 @@ import sys import numpy +from collections.abc import Iterable from packaging.version import Version -try: - from collections.abc import Iterable -except ImportError: - from collections import Iterable - from .. import util from ..element import Element from ..ndmapping import NdMapping, item_check, sorted_context @@ -45,7 +41,7 @@ def is_rowid_zero_indexed(cls, data): try: from ibis.client import find_backends, validate_backends (backend,) = validate_backends(list(find_backends(data))) - except Exception: + except ImportError: backend = data._find_backend() return type(backend).__module__ in cls.zero_indexed_backend_modules diff --git a/holoviews/core/data/interface.py b/holoviews/core/data/interface.py index ac3d62e7aa..73800da3ac 100644 --- a/holoviews/core/data/interface.py +++ b/holoviews/core/data/interface.py @@ -1,7 +1,6 @@ import sys import warnings -import six import param import numpy as np @@ -20,7 +19,7 @@ def __init__(self, msg, interface=None): super().__init__(msg) -class Accessor(object): +class Accessor: def __init__(self, dataset): self.dataset = dataset @@ -224,7 +223,7 @@ def initialize(cls, eltype, data, kdims, vdims, datatype=None): for vd in data.vdims: new_data.append(interface.values(data, vd, flat=False, compute=False)) data = tuple(new_data) - elif 'dataframe' in datatype and util.pd: + elif 'dataframe' in datatype: data = data.dframe() else: data = tuple(data.columns().values()) @@ -263,10 +262,9 @@ def initialize(cls, eltype, data, kdims, vdims, datatype=None): "to support the supplied data format.") if priority_errors: intfc, e, _ = priority_errors[0] - priority_error = ("%s raised following error:\n\n %s" - % (intfc.__name__, e)) + priority_error = f"{intfc.__name__} raised following error:\n\n {e}" error = ' '.join([error, priority_error]) - raise six.reraise(DataError, DataError(error, intfc), sys.exc_info()[2]) + raise DataError(error, intfc).with_traceback(sys.exc_info()[2]) raise DataError(error) return data, interface, dims, extra_kws @@ -348,10 +346,10 @@ def select_mask(cls, dataset, selection): if isinstance(sel, tuple): sel = slice(*sel) arr = cls.values(dataset, dim) - if util.isdatetime(arr) and util.pd: + if util.isdatetime(arr): try: sel = util.parse_datetime_selection(sel) - except: + except Exception: pass if isinstance(sel, slice): with warnings.catch_warnings(): diff --git a/holoviews/core/data/multipath.py b/holoviews/core/data/multipath.py index 07e5c527fd..8105df4bc3 100644 --- a/holoviews/core/data/multipath.py +++ b/holoviews/core/data/multipath.py @@ -473,7 +473,7 @@ def split(cls, dataset, start, end, datatype, **kwargs): if gt is not None: obj['geom_type'] = gt else: - raise ValueError("%s datatype not support" % datatype) + raise ValueError(f"{datatype} datatype not support") objs.append(obj) return objs diff --git a/holoviews/core/data/pandas.py b/holoviews/core/data/pandas.py index 835cc3d6ae..383593d21f 100644 --- a/holoviews/core/data/pandas.py +++ b/holoviews/core/data/pandas.py @@ -1,3 +1,4 @@ +from collections import OrderedDict from packaging.version import Version import numpy as np @@ -8,7 +9,6 @@ from .interface import Interface, DataError from ..dimension import dimension_name, Dimension from ..element import Element -from ..dimension import OrderedDict as cyODict from ..ndmapping import NdMapping, item_check, sorted_context from .. import util from .util import finite_range @@ -16,7 +16,7 @@ class PandasInterface(Interface): - types = (pd.DataFrame if pd else None,) + types = (pd.DataFrame,) datatype = 'dataframe' @@ -99,7 +99,7 @@ def init(cls, eltype, data, kdims, vdims): columns = list(util.unique_iterator([dimension_name(d) for d in kdims+vdims])) if isinstance(data, dict) and all(c in data for c in columns): - data = cyODict(((d, data[d]) for d in columns)) + data = OrderedDict((d, data[d]) for d in columns) elif isinstance(data, list) and len(data) == 0: data = {c: np.array([]) for c in columns} elif isinstance(data, (list, dict)) and data in ([], {}): @@ -114,7 +114,7 @@ def init(cls, eltype, data, kdims, vdims): "values.") column_data = zip(*((util.wrap_tuple(k)+util.wrap_tuple(v)) for k, v in column_data)) - data = cyODict(((c, col) for c, col in zip(columns, column_data))) + data = OrderedDict(((c, col) for c, col in zip(columns, column_data))) elif isinstance(data, np.ndarray): if data.ndim == 1: if eltype._auto_indexable_1d and len(kdims)+len(vdims)>1: @@ -168,13 +168,13 @@ def range(cls, dataset, dimension): column = dataset.data[dimension.name] if column.dtype.kind == 'O': if (not isinstance(dataset.data, pd.DataFrame) or - util.LooseVersion(pd.__version__) < util.LooseVersion('0.17.0')): + util.pandas_version < Version('0.17.0')): column = column.sort(inplace=False) else: column = column.sort_values() try: column = column[~column.isin([None, pd.NA])] - except: + except Exception: pass if not len(column): return np.NaN, np.NaN @@ -191,7 +191,7 @@ def range(cls, dataset, dimension): @classmethod def concat_fn(cls, dataframes, **kwargs): - if util.pandas_version >= util.LooseVersion('0.23.0'): + if util.pandas_version >= Version('0.23.0'): kwargs['sort'] = False return pd.concat(dataframes, **kwargs) @@ -223,7 +223,7 @@ def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): group_kwargs['dataset'] = dataset.dataset group_by = [d.name for d in index_dims] - if len(group_by) == 1 and Version(pd.__version__) >= Version("1.5.0"): + if len(group_by) == 1 and util.pandas_version >= Version("1.5.0"): # Because of this deprecation warning from pandas 1.5.0: # In a future version of pandas, a length 1 tuple will be returned # when iterating over a groupby with a grouper equal to a list of length 1. @@ -269,7 +269,7 @@ def aggregate(cls, dataset, dimensions, function, **kwargs): df = grouped[numeric_cols].aggregate(fn, **kwargs).reset_index() else: agg = reindexed.apply(fn, **kwargs) - data = dict(((col, [v]) for col, v in zip(agg.index, agg.values))) + data = {col: [v] for col, v in zip(agg.index, agg.values)} df = pd.DataFrame(data, columns=list(agg.index)) dropped = [] @@ -312,11 +312,10 @@ def redim(cls, dataset, dimensions): @classmethod def sort(cls, dataset, by=[], reverse=False): - import pandas as pd cols = [dataset.get_dimension(d, strict=True).name for d in by] if (not isinstance(dataset.data, pd.DataFrame) or - util.LooseVersion(pd.__version__) < util.LooseVersion('0.17.0')): + util.pandas_version < Version('0.17.0')): return dataset.data.sort(columns=cols, ascending=not reverse) return dataset.data.sort_values(by=cols, ascending=not reverse) diff --git a/holoviews/core/data/spatialpandas.py b/holoviews/core/data/spatialpandas.py index d0bf9e50a6..9af858daf7 100644 --- a/holoviews/core/data/spatialpandas.py +++ b/holoviews/core/data/spatialpandas.py @@ -3,9 +3,10 @@ from collections import defaultdict import numpy as np +import pandas as pd from ..dimension import dimension_name -from ..util import isscalar, unique_iterator, pd, unique_array +from ..util import isscalar, unique_iterator, unique_array from .interface import DataError, Interface from .multipath import MultiInterface, ensure_ring from .pandas import PandasInterface @@ -94,7 +95,7 @@ def init(cls, eltype, data, kdims, vdims): elif isinstance(data, cls.array_type()): data = GeoDataFrame({'geometry': data}) elif not isinstance(data, cls.frame_type()): - raise ValueError("%s only support spatialpandas DataFrames." % cls.__name__) + raise ValueError(f"{cls.__name__} only support spatialpandas DataFrames.") elif 'geometry' not in data: cls.geo_column(data) @@ -429,7 +430,7 @@ def split(cls, dataset, start, end, datatype, **kwargs): elif datatype == 'dataframe': obj = ds.dframe(**kwargs) else: - raise ValueError("%s datatype not support" % datatype) + raise ValueError(f"{datatype} datatype not support") objs.append(obj) return objs diff --git a/holoviews/core/data/util.py b/holoviews/core/data/util.py index bdee313137..ce7c303f69 100644 --- a/holoviews/core/data/util.py +++ b/holoviews/core/data/util.py @@ -46,7 +46,7 @@ def dask_array_module(): try: import dask.array as da return da - except: + except ImportError: return None def is_dask(array): diff --git a/holoviews/core/data/xarray.py b/holoviews/core/data/xarray.py index 4971538b8c..70ed6f48f7 100644 --- a/holoviews/core/data/xarray.py +++ b/holoviews/core/data/xarray.py @@ -4,6 +4,7 @@ from collections import OrderedDict import numpy as np +import pandas as pd from .. import util from ..dimension import Dimension, asdim, dimension_name @@ -257,7 +258,7 @@ def validate(cls, dataset, vdims=True): if cls.irregular(dataset, kd): irregular.append((kd, dataset.data[kd.name].dims)) if irregular: - nonmatching = ['%s: %s' % (kd, dims) for kd, dims in irregular[1:] + nonmatching = [f'{kd}: {dims}' for kd, dims in irregular[1:] if set(dims) != set(irregular[0][1])] if nonmatching: nonmatching = ['%s: %s' % irregular[0]] + nonmatching @@ -319,7 +320,7 @@ def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): invalid = [d for d in index_dims if dataset.data[d.name].ndim > 1] if invalid: - if len(invalid) == 1: invalid = "'%s'" % invalid[0] + if len(invalid) == 1: invalid = f"'{invalid[0]}'" raise ValueError("Cannot groupby irregularly sampled dimension(s) %s." % invalid) @@ -646,7 +647,7 @@ def sample(cls, dataset, samples=[]): names = [kd.name for kd in dataset.kdims] samples = [dataset.data.sel(**{k: [v] for k, v in zip(names, s)}).to_dataframe().reset_index() for s in samples] - return util.pd.concat(samples) + return pd.concat(samples) @classmethod def add_dimension(cls, dataset, dimension, dim_pos, values, vdim): diff --git a/holoviews/core/decollate.py b/holoviews/core/decollate.py index ee298161d1..42f8660902 100644 --- a/holoviews/core/decollate.py +++ b/holoviews/core/decollate.py @@ -177,7 +177,7 @@ def to_expr_extract_streams( elif isinstance(hvobj, Element): return hvobj.clone(link=False) else: - raise NotImplementedError("Type {typ} not implemented".format(typ=type(hvobj))) + raise NotImplementedError(f"Type {type(hvobj)} not implemented") def expr_to_fn_of_stream_contents(expr, nkdims): diff --git a/holoviews/core/dimension.py b/holoviews/core/dimension.py index 3a236a3f9e..0496c3ef48 100644 --- a/holoviews/core/dimension.py +++ b/holoviews/core/dimension.py @@ -3,6 +3,7 @@ axis or map dimension. Also supplies the Dimensioned abstract baseclass for classes that accept Dimension values. """ +import builtins import re import datetime as dt import weakref @@ -40,8 +41,8 @@ def param_aliases(d): Warning: We want to keep pickle hacking to a minimum! """ for old, new in ALIASES.items(): - old_param = '_%s_param_value' % old - new_param = '_%s_param_value' % new + old_param = f'_{old}_param_value' + new_param = f'_{new}_param_value' if old_param in d: d[new_param] = d.pop(old_param) return d @@ -254,9 +255,8 @@ def __init__(self, spec, **params): ) from exc if 'label' in params and params['label'] != all_params['label']: self.param.warning( - 'Using label as supplied by keyword ({!r}), ignoring ' - 'tuple value {!r}'.format(params['label'], all_params['label']) - ) + f'Using label as supplied by keyword ({params["label"]!r}), ' + f'ignoring tuple value {all_params["label"]!r}') elif isinstance(spec, dict): all_params.update(spec) try: @@ -327,8 +327,8 @@ def clone(self, spec=None, **overrides): elif 'label' in overrides and isinstance(spec, tuple) : if overrides['label'] != spec[1]: self.param.warning( - 'Using label as supplied by keyword ({!r}), ignoring ' - 'tuple value {!r}'.format(overrides['label'], spec[1])) + f'Using label as supplied by keyword ({overrides["label"]!r}), ' + f'ignoring tuple value {spec[1]!r}') spec = (spec[0], overrides['label']) return self.__class__(spec, **{k:v for k,v in settings.items() if k not in ['name', 'label']}) @@ -378,16 +378,16 @@ def pprint_label(self): def pprint(self): changed = dict(self.param.get_param_values(onlychanged=True)) - if len(set([changed.get(k, k) for k in ['name','label']])) == 1: - return 'Dimension({spec})'.format(spec=repr(self.name)) + if len({changed.get(k, k) for k in ['name','label']}) == 1: + return f'Dimension({repr(self.name)})' params = self.param.objects('existing') ordering = sorted( sorted(changed.keys()), key=lambda k: ( -float('inf') if params[k].precedence is None else params[k].precedence)) - kws = ", ".join('%s=%r' % (k, changed[k]) for k in ordering if k != 'name') - return 'Dimension({spec}, {kws})'.format(spec=repr(self.name), kws=kws) + kws = ", ".join(f'{k}={changed[k]!r}' for k in ordering if k != 'name') + return f'Dimension({repr(self.name)}, {kws})' def pprint_value(self, value, print_unit=False): @@ -489,7 +489,7 @@ def __init__(self, data, id=None, plot_id=None, **params): self._id = None self.id = id - self._plot_id = plot_id or util.builtins.id(self) + self._plot_id = plot_id or builtins.id(self) if isinstance(params.get('label',None), tuple): (alias, long_name) = params['label'] util.label_sanitizer.add_aliases(**{alias:long_name}) @@ -712,7 +712,7 @@ def __getstate__(self): if obj_dict['_id'] in s} else: obj_dict['_id'] = None - except: + except Exception: self.param.warning("Could not pickle custom style information.") return obj_dict @@ -747,7 +747,7 @@ def __setstate__(self, d): if opts_id is not None: opts_id += Store.load_counter_offset - except: + except Exception: self.param.warning("Could not unpickle custom style information.") d['_id'] = opts_id self.__dict__.update(d) @@ -874,7 +874,7 @@ def _valid_dimensions(self, dimensions): for dim in dimensions: if isinstance(dim, Dimension): dim = dim.name if dim not in self.kdims: - raise Exception("Supplied dimensions %s not found." % dim) + raise Exception(f"Supplied dimensions {dim} not found.") valid_dimensions.append(dim) return valid_dimensions @@ -924,7 +924,7 @@ def dimensions(self, selection='all', label=False): for dim in getattr(self, group)] elif isinstance(selection, list): dims = [dim for group in selection - for dim in getattr(self, '%sdims' % aliases.get(group))] + for dim in getattr(self, f'{aliases.get(group)}dims')] elif aliases.get(selection) in lambdas: selection = aliases.get(selection, selection) lmbd, kwargs = lambdas[selection] @@ -957,14 +957,14 @@ def get_dimension(self, dimension, default=None, strict=False): if 0 <= dimension < len(all_dims): return all_dims[dimension] elif strict: - raise KeyError("Dimension %r not found" % dimension) + raise KeyError(f"Dimension {dimension!r} not found") else: return default if isinstance(dimension, Dimension): dims = [d for d in all_dims if dimension == d] if strict and not dims: - raise KeyError("%r not found." % dimension) + raise KeyError(f"{dimension!r} not found.") elif dims: return dims[0] else: @@ -976,7 +976,7 @@ def get_dimension(self, dimension, default=None, strict=False): name_map.update({dim.label: dim for dim in all_dims}) name_map.update({util.dimension_sanitizer(dim.name): dim for dim in all_dims}) if strict and dimension not in name_map: - raise KeyError("Dimension %r not found." % dimension) + raise KeyError(f"Dimension {dimension!r} not found.") else: return name_map.get(dimension, default) @@ -1001,8 +1001,7 @@ def get_dimension_index(self, dimension): dimensions = self.kdims+self.vdims return [i for i, d in enumerate(dimensions) if d == dim][0] except IndexError: - raise Exception("Dimension %s not found in %s." % - (dim, self.__class__.__name__)) + raise Exception(f"Dimension {dim} not found in {self.__class__.__name__}.") def get_dimension_type(self, dim): @@ -1195,15 +1194,13 @@ def range(self, dimension, data_range=True, dimension_range=True): if not dimension_range: return lower, upper return util.dimension_range(lower, upper, dimension.range, dimension.soft_range) + def __repr__(self): return PrettyPrinter.pprint(self) def __str__(self): return repr(self) - def __unicode__(self): - return PrettyPrinter.pprint(self) - def options(self, *args, clone=True, **kwargs): """Applies simplified option definition returning a new object. diff --git a/holoviews/core/element.py b/holoviews/core/element.py index 85cb269d28..537d09c7e9 100644 --- a/holoviews/core/element.py +++ b/holoviews/core/element.py @@ -2,6 +2,7 @@ import numpy as np import param +import pandas as pd from .dimension import Dimensioned, ViewableElement, asdim from .layout import Composable, Layout, NdLayout @@ -77,7 +78,7 @@ def __getitem__(self, key): raise NotImplementedError("%s currently does not support getitem" % type(self).__name__) - def __nonzero__(self): + def __bool__(self): """Indicates whether the element is empty. Subclasses may override this to signal that the Element @@ -93,8 +94,6 @@ def __iter__(self): "Disable iterator interface." raise NotImplementedError('Iteration on Elements is not supported.') - __bool__ = __nonzero__ - def closest(self, coords, **kwargs): """Snap list or dict of coordinates to closest position. @@ -208,7 +207,6 @@ def dframe(self, dimensions=None, multi_index=False): Returns: DataFrame of columns corresponding to each dimension """ - import pandas as pd if dimensions is None: dimensions = [d.name for d in self.dimensions()] else: @@ -472,5 +470,5 @@ def _add_dimensions(self, item, dims, constant_keys): return new_item -__all__ = list(set([_k for _k, _v in locals().items() - if isinstance(_v, type) and issubclass(_v, Dimensioned)])) +__all__ = list({_k for _k, _v in locals().items() + if isinstance(_v, type) and issubclass(_v, Dimensioned)}) diff --git a/holoviews/core/io.py b/holoviews/core/io.py index c62c9ce38c..ade45f6880 100644 --- a/holoviews/core/io.py +++ b/holoviews/core/io.py @@ -122,7 +122,7 @@ def encode(cls, entry): def _filename(self_or_cls, filename): "Add the file extension if not already present" if not filename.endswith(self_or_cls.file_ext): - return '%s.%s' % (filename, self_or_cls.file_ext) + return f'{filename}.{self_or_cls.file_ext}' else: return filename @@ -132,7 +132,7 @@ def _merge_metadata(self_or_cls, obj, fn, *dicts): Returns a merged metadata info dictionary from the supplied function and additional dictionaries """ - merged = dict([(k,v) for d in dicts for (k,v) in d.items()]) + merged = {k:v for d in dicts for (k,v) in d.items()} return dict(merged, **fn(obj)) if fn else merged def __call__(self, obj, fmt=None): @@ -277,7 +277,8 @@ def load(self_or_cls, filename): data = self_or_cls.deserializer(f) try: data = self_or_cls.deserializer(f) - except: pass + except Exception: + pass return data @bothmethod @@ -343,7 +344,7 @@ def save(self_or_cls, obj, filename, key={}, info={}, **kwargs): components = list(obj.data.values()) entries = entries if len(entries) > 1 else [entries[0]+'(L)'] else: - entries = ['%s.%s' % (group_sanitizer(obj.group, False), + entries = ['{}.{}'.format(group_sanitizer(obj.group, False), label_sanitizer(obj.label, False))] components = [obj] @@ -379,7 +380,7 @@ def load(self_or_cls, filename, entries=None): with zipfile.ZipFile(filename, 'r') as f: for entry in entries: if entry not in f.namelist(): - raise Exception("Entry %s not available" % entry) + raise Exception(f"Entry {entry} not available") components.append(Store.loads(f.read(entry))) single_layout = entry.endswith('(L)') @@ -395,7 +396,7 @@ def _load_metadata(self_or_cls, filename, name): raise Exception("No metadata available") metadata = pickle.loads(f.read('metadata')) if name not in metadata: - raise KeyError("Entry %s is missing from the metadata" % name) + raise KeyError(f"Entry {name} is missing from the metadata") return metadata[name] @bothmethod @@ -605,9 +606,9 @@ def parse_fields(cls, formatter): if formatter is None: return [] try: parse = list(string.Formatter().parse(formatter)) - return set(f for f in list(zip(*parse))[1] if f is not None) - except: - raise SyntaxError("Could not parse formatter %r" % formatter) + return {f for f in list(zip(*parse))[1] if f is not None} + except Exception: + raise SyntaxError(f"Could not parse formatter {formatter!r}") def __init__(self, **params): super().__init__(**params) @@ -631,7 +632,7 @@ def _dim_formatter(self, obj): if lower == upper: range = dim.pprint_value(lower) else: - range = "%s-%s" % (lower, upper) + range = f"{lower}-{upper}" formatters = {'name': dim.name, 'range': range, 'unit': dim.unit} dim_strings.append(self.dimension_formatter.format(**formatters)) @@ -640,11 +641,13 @@ def _dim_formatter(self, obj): def _validate_formatters(self): if not self.parse_fields(self.filename_formatter).issubset(self.ffields): - raise Exception("Valid filename fields are: %s" % ','.join(sorted(self.ffields))) + raise Exception(f"Valid filename fields are: {','.join(sorted(self.ffields))}") elif not self.parse_fields(self.export_name).issubset(self.efields): - raise Exception("Valid export fields are: %s" % ','.join(sorted(self.efields))) - try: time.strftime(self.timestamp_format, tuple(time.localtime())) - except: raise Exception("Timestamp format invalid") + raise Exception(f"Valid export fields are: {','.join(sorted(self.efields))}") + try: + time.strftime(self.timestamp_format, tuple(time.localtime())) + except Exception: + raise Exception("Timestamp format invalid") def add(self, obj=None, filename=None, data=None, info={}, **kwargs): @@ -723,21 +726,21 @@ def _zip_archive(self, export_name, files, root): with zipfile.ZipFile(os.path.join(root, archname), 'w') as zipf: for (basename, ext), entry in files: filename = self._truncate_name(basename, ext) - zipf.writestr(('%s/%s' % (export_name, filename)),Exporter.encode(entry)) + zipf.writestr(f'{export_name}/{filename}',Exporter.encode(entry)) def _tar_archive(self, export_name, files, root): archname = '.'.join(self._unique_name(export_name, 'tar', root)) with tarfile.TarFile(os.path.join(root, archname), 'w') as tarf: for (basename, ext), entry in files: filename = self._truncate_name(basename, ext) - tarinfo = tarfile.TarInfo('%s/%s' % (export_name, filename)) + tarinfo = tarfile.TarInfo(f'{export_name}/{filename}') filedata = Exporter.encode(entry) tarinfo.size = len(filedata) tarf.addfile(tarinfo, BytesIO(filedata)) def _single_file_archive(self, export_name, files, root): ((basename, ext), entry) = files[0] - full_fname = '%s_%s' % (export_name, basename) + full_fname = f'{export_name}_{basename}' (unique_name, ext) = self._unique_name(full_fname, ext, root) filename = self._truncate_name(self._normalize_name(unique_name), ext=ext) fpath = os.path.join(root, filename) @@ -788,7 +791,7 @@ def _truncate_name(self, basename, ext='', tail=10, join='...', maxlen=None): start = basename[:max_len-(tail + len(join))] end = basename[-tail:] basename = start + join + end - filename = '%s.%s' % (basename, ext) if ext else basename + filename = f'{basename}.{ext}' if ext else basename return filename @@ -839,14 +842,14 @@ def contents(self, maxlen=70): "Print the current (unexported) contents of the archive" lines = [] if len(self._files) == 0: - print("Empty %s" % self.__class__.__name__) + print(f"Empty {self.__class__.__name__}") return fnames = [self._truncate_name(maxlen=maxlen, *k) for k in self._files] max_len = max([len(f) for f in fnames]) for name,v in zip(fnames, self._files.values()): mime_type = v[1].get('mime_type', 'no mime type') - lines.append('%s : %s' % (name.ljust(max_len), mime_type)) + lines.append(f'{name.ljust(max_len)} : {mime_type}') print('\n'.join(lines)) def listing(self): diff --git a/holoviews/core/layout.py b/holoviews/core/layout.py index 6a79da6efc..4219768c84 100644 --- a/holoviews/core/layout.py +++ b/holoviews/core/layout.py @@ -48,7 +48,7 @@ def __lshift__(self, other): elif isinstance(other, AdjointLayout): return AdjointLayout(other.data.values()+[self]) else: - raise TypeError('Cannot append {0} to a AdjointLayout'.format(type(other).__name__)) + raise TypeError(f'Cannot append {type(other).__name__} to a AdjointLayout') @@ -262,7 +262,7 @@ def __getitem__(self, key): return self if data_slice is None else self.clone([el[data_slice] for el in self]) else: - raise KeyError("Key {0} not found in AdjointLayout.".format(key)) + raise KeyError(f"Key {key} not found in AdjointLayout.") def __setitem__(self, key, value): @@ -272,7 +272,7 @@ def __setitem__(self, key, value): else: raise ValueError('AdjointLayout only accepts Element types.') else: - raise Exception('Position %s not valid in AdjointLayout.' % key) + raise Exception(f'Position {key} not valid in AdjointLayout.') def __lshift__(self, other): @@ -485,7 +485,7 @@ def __getitem__(self, key): idx = row * self._max_cols + col keys = list(self.data.keys()) if idx >= len(keys) or col >= self._max_cols: - raise KeyError('Index %s is outside available item range' % str(key)) + raise KeyError(f'Index {key} is outside available item range') key = keys[idx] return super().__getitem__(key) @@ -554,6 +554,6 @@ def __rmul__(self, other): return self.__mul__(other, reverse=True) -__all__ = list(set([_k for _k, _v in locals().items() +__all__ = list({_k for _k, _v in locals().items() if isinstance(_v, type) and (issubclass(_v, Dimensioned) - or issubclass(_v, Layout))])) + or issubclass(_v, Layout))}) diff --git a/holoviews/core/ndmapping.py b/holoviews/core/ndmapping.py index 061bae9fb8..0473b53b53 100644 --- a/holoviews/core/ndmapping.py +++ b/holoviews/core/ndmapping.py @@ -7,6 +7,7 @@ from itertools import cycle from operator import itemgetter import numpy as np +import pandas as pd import param @@ -17,7 +18,7 @@ process_ellipses, get_ndmapping_label ) -class item_check(object): +class item_check: """ Context manager to allow creating NdMapping types without performing the usual item_checks, providing significant @@ -37,7 +38,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): MultiDimensionalMapping._check_items = self._enabled -class sorted_context(object): +class sorted_context: """ Context manager to temporarily disable sorting on NdMapping types. Retains the current sort order, which can be useful as @@ -139,10 +140,11 @@ def _item_check(self, dim_vals, data): data_type = tuple(dt.__name__ for dt in self.data_type) else: data_type = self.data_type.__name__ - raise TypeError('{slf} does not accept {data} type, data elements have ' - 'to be a {restr}.'.format(slf=type(self).__name__, - data=type(data).__name__, - restr=data_type)) + + slf = type(self).__name__ + data = type(data).__name__ + raise TypeError(f'{slf} does not accept {data} type, data elements have ' + f'to be a {data_type}.') elif not len(dim_vals) == self.ndims: raise KeyError('The data contains keys of length %d, but the kdims ' 'only declare %d dimensions. Ensure that the number ' @@ -323,7 +325,7 @@ def add_dimension(self, dimension, dim_pos, dim_val, vdim=False, **kwargs): dimension = asdim(dimension) if dimension in self.dimensions(): - raise Exception('{dim} dimension already defined'.format(dim=dimension.name)) + raise Exception(f'{dimension.name} dimension already defined') if vdim and self._deep_indexable: raise Exception('Cannot add value dimension to object that is deep indexable') @@ -401,7 +403,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): vals = np.concatenate(values) return vals if expanded else util.unique_array(vals) else: - return super(MultiDimensionalMapping, self).dimension_values(dimension, expanded, flat) + return super().dimension_values(dimension, expanded, flat) def reindex(self, kdims=[], force=False): @@ -433,7 +435,7 @@ def reindex(self, kdims=[], force=False): keys = [tuple(k[i] for i in indices) for k in self.data.keys()] reindexed_items = OrderedDict( (k, v) for (k, v) in zip(keys, self.data.values())) - reduced_dims = set([d.name for d in self.kdims]).difference(kdims) + reduced_dims = {d.name for d in self.kdims}.difference(kdims) dimensions = [self.get_dimension(d) for d in kdims if d not in reduced_dims] @@ -481,12 +483,12 @@ def info(self): dimensions = getattr(self, group) if dimensions: group = aliases[group].split('_')[0] - info_str += '%s Dimensions: \n' % group.capitalize() + info_str += f'{group.capitalize()} Dimensions: \n' for d in dimensions: dmin, dmax = self.range(d.name) if d.value_format: dmin, dmax = d.value_format(dmin), d.value_format(dmax) - info_str += '\t %s: %s...%s \n' % (d.pprint_label, dmin, dmax) + info_str += f'\t {d.pprint_label}: {dmin}...{dmax} \n' return info_str @@ -881,7 +883,6 @@ def dframe(self, dimensions=None, multi_index=False): Returns: DataFrame of columns corresponding to each dimension """ - import pandas as pd if dimensions is None: outer_dimensions = self.kdims inner_dimensions = None diff --git a/holoviews/core/options.py b/holoviews/core/options.py index b0e6a4a0a8..cca3124be1 100644 --- a/holoviews/core/options.py +++ b/holoviews/core/options.py @@ -211,7 +211,7 @@ def print_traceback(self): traceback.print_exception(self.etype, self.value, self.traceback) -class abbreviated_exception(object): +class abbreviated_exception: """ Context manager used to to abbreviate tracebacks using an AbbreviatedException when a backend may raise an error due to @@ -291,7 +291,6 @@ def __repr__(self): def __str__(self): return str(self.values) def __iter__(self): return iter(self.values) def __bool__(self): return bool(self.values) - def __nonzero__(self): return bool(self.values) def __contains__(self, val): return val in self.values @@ -361,7 +360,7 @@ def __repr__(self): vrepr = repr(self.key) else: vrepr = [str(el) for el in self.values] - return "%s(%s)" % (type(self).__name__, vrepr) + return f"{type(self).__name__}({vrepr})" @@ -778,7 +777,7 @@ def closest(self, obj, group, defaults=True, backend=None): if cache_key in cache: return cache[cache_key] - target = '.'.join((c for c in opts_spec if c)) + target = '.'.join(c for c in opts_spec if c) options = self.find(opts_spec).options( group, target=target, defaults=defaults, backend=backend) cache[cache_key] = options @@ -836,7 +835,7 @@ def __repr__(self): if especs: format_kws = [ - (t, 'dict(%s)' % ', '.join(f'{k}={v}' for k, v in sorted(kws.items()))) + (t, f"dict({', '.join(f'{k}={v}' for k, v in sorted(kws.items()))})") for t, kws in especs ] ljust = max(len(t) for t,_ in format_kws) @@ -1119,7 +1118,7 @@ def apply(self, value, input_ranges, backend=None): return transformed -class Store(object): +class Store: """ The Store is what links up HoloViews objects to their corresponding options and to the appropriate classes of the chosen @@ -1288,7 +1287,7 @@ def lookup(cls, backend, obj): Given an object, lookup the corresponding customized option tree if a single custom tree is applicable. """ - ids = set([el for el in obj.traverse(lambda x: x.id) if el is not None]) + ids = {el for el in obj.traverse(lambda x: x.id) if el is not None} if len(ids) == 0: raise Exception("Object does not own a custom options tree") elif len(ids) != 1: @@ -1373,8 +1372,8 @@ def register(cls, associations, backend, style_aliases={}): for view_class, plot in cls.registry[backend].items(): expanded_opts = [opt for key in plot.style_opts for opt in style_aliases.get(key, [])] - style_opts = sorted(set(opt for opt in (expanded_opts + plot.style_opts) - if opt not in plot._disabled_opts)) + style_opts = sorted({opt for opt in (expanded_opts + plot.style_opts) + if opt not in plot._disabled_opts}) # Special handling for PlotSelector which just proxies parameters params = list(plot.param) if hasattr(plot, 'param') else plot.params() @@ -1432,7 +1431,7 @@ def render(cls, obj): return data, metadata -class StoreOptions(object): +class StoreOptions: """ A collection of utilities for advanced users for creating and setting customized option trees on the Store. Designed for use by @@ -1491,8 +1490,8 @@ def record_skipped_option(cls, error): @classmethod def get_object_ids(cls, obj): - return set(el for el - in obj.traverse(lambda x: getattr(x, 'id', None))) + return {el for el + in obj.traverse(lambda x: getattr(x, 'id', None))} @classmethod def tree_to_dict(cls, tree): @@ -1716,7 +1715,7 @@ def merge_options(cls, groups, options=None,**kwargs): ) options = {} if (options is None) else dict(**options) - all_keys = set(k for d in kwargs.values() for k in d) + all_keys = {k for d in kwargs.values() for k in d} for spec_key in all_keys: additions = {} for k, d in kwargs.items(): diff --git a/holoviews/core/overlay.py b/holoviews/core/overlay.py index d6e13403bb..e25cf2d4b0 100644 --- a/holoviews/core/overlay.py +++ b/holoviews/core/overlay.py @@ -252,8 +252,7 @@ def group(self): @group.setter def group(self, group): if not sanitize_identifier.allowable(group): - raise ValueError("Supplied group %s contains invalid characters." % - group) + raise ValueError(f"Supplied group {group} contains invalid characters.") else: self._group = group @@ -271,8 +270,7 @@ def label(self): @label.setter def label(self, label): if not sanitize_identifier.allowable(label): - raise ValueError("Supplied group %s contains invalid characters." % - label) + raise ValueError(f"Supplied group {label} contains invalid characters.") self._label = label @property @@ -326,5 +324,5 @@ def decollate(self): return decollate(self) -__all__ = list(set([_k for _k, _v in locals().items() - if isinstance(_v, type) and issubclass(_v, Dimensioned)])) + ['Overlayable'] +__all__ = list({_k for _k, _v in locals().items() + if isinstance(_v, type) and issubclass(_v, Dimensioned)}) + ['Overlayable'] diff --git a/holoviews/core/pprint.py b/holoviews/core/pprint.py index 0ef88b7c6c..199c1c0851 100644 --- a/holoviews/core/pprint.py +++ b/holoviews/core/pprint.py @@ -76,7 +76,7 @@ def inner_filter(name, p): return inner_filter -class InfoPrinter(object): +class InfoPrinter: """ Class for printing other information related to an object that is of use to the user. @@ -117,16 +117,16 @@ def heading(cls, heading_text, char='=', level=0, ansi=False): """ heading_color = cls.headings[level] if ansi else '%s' if char is None: - return heading_color % '%s\n' % heading_text + return heading_color % f'{heading_text}\n' else: heading_ul = char*len(heading_text) - return heading_color % '%s\n%s\n%s' % (heading_ul, heading_text, heading_ul) + return heading_color % f'{heading_ul}\n{heading_text}\n{heading_ul}' @classmethod def highlight(cls, pattern, string): if pattern is None: return string - return re.sub(pattern, '\033[43;1;30m\g<0>\x1b[0m', + return re.sub(pattern, '\033[43;1;30m\\g<0>\x1b[0m', string, flags=re.IGNORECASE) @@ -160,9 +160,7 @@ def info(cls, obj, ansi=False, backend='matplotlib', visualization=True, info = ansi_escape.sub('', info) return cls.highlight(pattern, info) - heading = name if isclass else '{name}: {group} {label}'.format(name=name, - group=obj.group, - label=obj.label) + heading = name if isclass else f'{name}: {obj.group} {obj.label}' prefix = heading lines = [prefix, cls.object_info(obj, name, backend=backend, ansi=ansi)] @@ -177,9 +175,7 @@ def get_target(cls, obj): objtype=obj.__class__.__name__ group = group_sanitizer(obj.group) label = ('.' + label_sanitizer(obj.label) if obj.label else '') - target = '{objtype}.{group}{label}'.format(objtype=objtype, - group=group, - label=label) + target = f'{objtype}.{group}{label}' return (None, target) if hasattr(obj, 'values') else (target, None) @@ -189,25 +185,25 @@ def target_info(cls, obj, ansi=False): targets = obj.traverse(cls.get_target) elements, containers = zip(*targets) - element_set = set(el for el in elements if el is not None) - container_set = set(c for c in containers if c is not None) + element_set = {el for el in elements if el is not None} + container_set = {c for c in containers if c is not None} element_info = None if len(element_set) == 1: - element_info = 'Element: %s' % list(element_set)[0] + element_info = f'Element: {list(element_set)[0]}' elif len(element_set) > 1: element_info = 'Elements:\n %s' % '\n '.join(sorted(element_set)) container_info = None if len(container_set) == 1: - container_info = 'Container: %s' % list(container_set)[0] + container_info = f'Container: {list(container_set)[0]}' elif len(container_set) > 1: container_info = 'Containers:\n %s' % '\n '.join(sorted(container_set)) heading = cls.heading('Target Specifications', ansi=ansi, char="-") target_header = '\nTargets in this object available for customization:\n' if element_info and container_info: - target_info = '%s\n\n%s' % (element_info, container_info) + target_info = f'{element_info}\n\n{container_info}' else: target_info = element_info if element_info else container_info @@ -237,9 +233,9 @@ def object_info(cls, obj, name, backend, ansi=False): def options_info(cls, plot_class, ansi=False, pattern=None): if plot_class.style_opts: backend_name = plot_class.backend - style_info = ("\n(Consult %s's documentation for more information.)" % backend_name) - style_keywords = '\t%s' % ', '.join(plot_class.style_opts) - style_msg = '%s\n%s' % (style_keywords, style_info) + style_info = f"\n(Consult {backend_name}'s documentation for more information.)" + style_keywords = f"\t{', '.join(plot_class.style_opts)}" + style_msg = f'{style_keywords}\n{style_info}' else: style_msg = '\t' @@ -254,7 +250,7 @@ def options_info(cls, plot_class, ansi=False, pattern=None): lines+= ['No %r parameters found matching specified pattern %r.' % (plot_class.__name__, pattern)] else: - lines+= ['No %r parameters found.' % plot_class.__name__] + lines+= [f'No {plot_class.__name__!r} parameters found.'] return '\n'.join(lines) @@ -355,9 +351,9 @@ def element_info(cls_or_slf, node, siblings, level, value_dims): """ info = cls_or_slf.component_type(node) if len(node.kdims) >= 1: - info += cls_or_slf.tab + '[%s]' % ','.join(d.name for d in node.kdims) + info += cls_or_slf.tab + f"[{','.join(d.name for d in node.kdims)}]" if value_dims and len(node.vdims) >= 1: - info += cls_or_slf.tab + '(%s)' % ','.join(d.name for d in node.vdims) + info += cls_or_slf.tab + f"({','.join(d.name for d in node.vdims)})" return level, [(level, info)] @bothmethod @@ -394,7 +390,7 @@ def adjointlayout_info(cls_or_slf, node, siblings, level, value_dims): @bothmethod def ndmapping_info(cls_or_slf, node, siblings, level, value_dims): - key_dim_info = '[%s]' % ','.join(d.name for d in node.kdims) + key_dim_info = f"[{','.join(d.name for d in node.kdims)}]" first_line = cls_or_slf.component_type(node) + cls_or_slf.tab + key_dim_info lines = [(level, first_line)] diff --git a/holoviews/core/sheetcoords.py b/holoviews/core/sheetcoords.py index 4a3cb1f559..3f0efb3216 100644 --- a/holoviews/core/sheetcoords.py +++ b/holoviews/core/sheetcoords.py @@ -115,7 +115,7 @@ -class SheetCoordinateSystem(object): +class SheetCoordinateSystem: """ Provides methods to allow conversion between sheet and matrix coordinates. diff --git a/holoviews/core/spaces.py b/holoviews/core/spaces.py index b58f192025..d596cffcb8 100644 --- a/holoviews/core/spaces.py +++ b/holoviews/core/spaces.py @@ -178,8 +178,8 @@ def _dynamic_mul(self, dimensions, other, keys): # If either is a HoloMap compute Dimension values if not isinstance(self, DynamicMap) or not isinstance(other, DynamicMap): keys = sorted((d, v) for k in keys for d, v in k) - grouped = dict([(g, [v for _, v in group]) - for g, group in groupby(keys, lambda x: x[0])]) + grouped = {g: [v for _, v in group] + for g, group in groupby(keys, lambda x: x[0])} dimensions = [d.clone(values=grouped[d.name]) for d in dimensions] map_obj = None @@ -301,7 +301,7 @@ def __lshift__(self, other): elif isinstance(other, AdjointLayout): return AdjointLayout(other.data+[self]) else: - raise TypeError('Cannot append {0} to a AdjointLayout'.format(type(other).__name__)) + raise TypeError(f'Cannot append {type(other).__name__} to a AdjointLayout') def collate(self, merge_type=None, drop=[], drop_constant=False): @@ -573,8 +573,7 @@ def __call__(self, *args, **kwargs): clashes = set(pos_kwargs.keys()) & set(kwargs.keys()) if clashes: self.param.warning( - 'Positional arguments %r overriden by keywords' - % list(clashes)) + f'Positional arguments {list(clashes)!r} overriden by keywords') args, kwargs = (), dict(pos_kwargs, **kwargs) try: @@ -584,8 +583,8 @@ def __call__(self, *args, **kwargs): # invalid keys on DynamicMap and should not warn raise except Exception as e: - posstr = ', '.join(['%r' % el for el in self.args]) if self.args else '' - kwstr = ', '.join('%s=%r' % (k,v) for k,v in self.kwargs.items()) + posstr = ', '.join([f'{el!r}' for el in self.args]) if self.args else '' + kwstr = ', '.join(f'{k}={v!r}' for k,v in self.kwargs.items()) argstr = ', '.join([el for el in [posstr, kwstr] if el]) message = ("Callable raised \"{e}\".\n" "Invoked as {name}({argstr})") @@ -669,7 +668,7 @@ def dynamicmap_memoization(callable_obj, streams): -class periodic(object): +class periodic: """ Implements the utility of the same name on DynamicMap. @@ -787,7 +786,7 @@ def __init__(self, callback, initial_items=None, streams=None, **params): if invalid: msg = ('The supplied streams list contains objects that ' 'are not Stream instances: {objs}') - raise TypeError(msg.format(objs = ', '.join('%r' % el for el in invalid))) + raise TypeError(msg.format(objs = ', '.join(f'{el!r}' for el in invalid))) super().__init__(initial_items, callback=callback, streams=valid, **params) @@ -883,7 +882,7 @@ def _initial_key(self): if undefined: msg = ('Dimension(s) {undefined_dims} do not specify range or values needed ' 'to generate initial key') - undefined_dims = ', '.join(['%r' % str(dim) for dim in undefined]) + undefined_dims = ', '.join(f'{str(dim)!r}' for dim in undefined) raise KeyError(msg.format(undefined_dims=undefined_dims)) return tuple(key) @@ -902,12 +901,10 @@ def _validate_key(self, key): low, high = util.max_range([kdim.range, kdim.soft_range]) if util.is_number(low) and util.isfinite(low): if val < low: - raise KeyError("Key value %s below lower bound %s" - % (val, low)) + raise KeyError(f"Key value {val} below lower bound {low}") if util.is_number(high) and util.isfinite(high): if val > high: - raise KeyError("Key value %s above upper bound %s" - % (val, high)) + raise KeyError(f"Key value {val} above upper bound {high}") def event(self, **kwargs): """Updates attached streams and triggers events @@ -932,7 +929,7 @@ def event(self, **kwargs): invalid = [k for k in kwargs.keys() if k not in stream_params] if invalid: msg = 'Key(s) {invalid} do not correspond to stream parameters' - raise KeyError(msg.format(invalid = ', '.join('%r' % i for i in invalid))) + raise KeyError(msg.format(invalid = ', '.join(f'{i!r}' for i in invalid))) streams = [] for stream in self.streams: @@ -1089,7 +1086,7 @@ def _cross_product(self, tuple_key, cache, data_slice): product = tuple_key[0] else: args = [set(el) if isinstance(el, (list,set)) - else set([el]) for el in tuple_key] + else {el} for el in tuple_key] product = itertools.product(*args) data = [] @@ -1374,10 +1371,10 @@ def split_overlay_callback(obj, overlay_key=key, overlay_el=el, **kwargs): for i, (k, v) in enumerate(items)] match = util.closest_match(spec, specs) if match is None: - raise KeyError('{spec} spec not found in {otype}. The split_overlays method ' + otype = type(obj).__name__ + raise KeyError(f'{spec} spec not found in {otype}. The split_overlays method ' 'only works consistently for a DynamicMap where the ' - 'layers of the {otype} do not change.'.format( - spec=spec, otype=type(obj).__name__)) + f'layers of the {otype} do not change.') return items[match][1] dmap = Dynamic(self, streams=self.streams, operation=split_overlay_callback) dmap.data = OrderedDict([(list(self.data.keys())[-1], self.last.data[key])]) @@ -1749,7 +1746,7 @@ def __lshift__(self, other): elif isinstance(other, AdjointLayout): return AdjointLayout(other.data+[self]) else: - raise TypeError('Cannot append {0} to a AdjointLayout'.format(type(other).__name__)) + raise TypeError(f'Cannot append {type(other).__name__} to a AdjointLayout') def _transform_indices(self, key): @@ -1843,7 +1840,7 @@ def shape(self): keys = self.keys() if self.ndims == 1: return (len(keys), 1) - return len(set(k[0] for k in keys)), len(set(k[1] for k in keys)) + return len({k[0] for k in keys}), len({k[1] for k in keys}) def decollate(self): """Packs GridSpace of DynamicMaps into a single DynamicMap that returns a diff --git a/holoviews/core/traversal.py b/holoviews/core/traversal.py index 00d1dc59da..618ee7ca49 100644 --- a/holoviews/core/traversal.py +++ b/holoviews/core/traversal.py @@ -10,11 +10,6 @@ from .dimension import Dimension from .util import merge_dimensions -try: - import itertools.izip as zip -except ImportError: - pass - def create_ndkey(length, indexes, values): key = [None] * length diff --git a/holoviews/core/tree.py b/holoviews/core/tree.py index d70befd38b..526c979fbe 100644 --- a/holoviews/core/tree.py +++ b/holoviews/core/tree.py @@ -4,7 +4,7 @@ from .pprint import PrettyPrinter -class AttrTree(object): +class AttrTree: """ An AttrTree offers convenient, multi-level attribute access for collections of objects. AttrTree objects may also be combined @@ -247,7 +247,7 @@ def __getattr__(self, identifier): # Attributes starting with __ get name mangled if identifier.startswith('_' + type(self).__name__) or identifier.startswith('__'): - raise AttributeError('Attribute %s not found.' % identifier) + raise AttributeError(f'Attribute {identifier} not found.') elif self.fixed==True: raise AttributeError(self._fixed_error % identifier) diff --git a/holoviews/core/util.py b/holoviews/core/util.py index b1133a28dd..9b2bc795d1 100644 --- a/holoviews/core/util.py +++ b/holoviews/core/util.py @@ -1,5 +1,4 @@ import sys, warnings, operator -import builtins as builtins # noqa (compatibility) import hashlib import json import time @@ -15,7 +14,7 @@ from collections.abc import Iterable # noqa from collections import defaultdict, OrderedDict, namedtuple # noqa (compatibility) from contextlib import contextmanager -from packaging.version import Version as LooseVersion +from packaging.version import Version from functools import partial from threading import Thread, Event from types import FunctionType @@ -32,8 +31,8 @@ get_keywords = operator.attrgetter('varkw') generator_types = (zip, range, types.GeneratorType) -numpy_version = LooseVersion(np.__version__) -param_version = LooseVersion(param.__version__) +numpy_version = Version(np.__version__) +param_version = Version(param.__version__) datetime_types = (np.datetime64, dt.datetime, dt.date, dt.time) timedelta_types = (np.timedelta64, dt.timedelta,) @@ -50,15 +49,15 @@ _PANDAS_ROWS_LARGE = 1_000_000 _PANDAS_SAMPLE_SIZE = 1_000_000 -pandas_version = LooseVersion(pd.__version__) +pandas_version = Version(pd.__version__) try: - if pandas_version >= LooseVersion('1.3.0'): + if pandas_version >= Version('1.3.0'): from pandas.core.dtypes.dtypes import DatetimeTZDtype as DatetimeTZDtypeType from pandas.core.dtypes.generic import ABCSeries, ABCIndex as ABCIndexClass - elif pandas_version >= LooseVersion('0.24.0'): + elif pandas_version >= Version('0.24.0'): from pandas.core.dtypes.dtypes import DatetimeTZDtype as DatetimeTZDtypeType from pandas.core.dtypes.generic import ABCSeries, ABCIndexClass - elif pandas_version > LooseVersion('0.20.0'): + elif pandas_version > Version('0.20.0'): from pandas.core.dtypes.dtypes import DatetimeTZDtypeType from pandas.core.dtypes.generic import ABCSeries, ABCIndexClass else: @@ -69,10 +68,10 @@ datetime_types = datetime_types + pandas_datetime_types timedelta_types = timedelta_types + pandas_timedelta_types arraylike_types = arraylike_types + (ABCSeries, ABCIndexClass) - if pandas_version > LooseVersion('0.23.0'): + if pandas_version > Version('0.23.0'): from pandas.core.dtypes.generic import ABCExtensionArray arraylike_types = arraylike_types + (ABCExtensionArray,) - if pandas_version > LooseVersion('1.0'): + if pandas_version > Version('1.0'): from pandas.core.arrays.masked import BaseMaskedArray masked_types = (BaseMaskedArray,) except Exception as e: @@ -83,9 +82,9 @@ import cftime cftime_types = (cftime.datetime,) datetime_types += cftime_types -except: +except ImportError: cftime_types = () -_STANDARD_CALENDARS = set(['standard', 'gregorian', 'proleptic_gregorian']) +_STANDARD_CALENDARS = {'standard', 'gregorian', 'proleptic_gregorian'} class VersionError(Exception): @@ -188,7 +187,7 @@ def default(self, obj): obj = state.choice(obj.flat, size=_NP_SAMPLE_SIZE) h.update(obj.tobytes()) return h.hexdigest() - if pd and isinstance(obj, (pd.Series, pd.DataFrame)): + if isinstance(obj, (pd.Series, pd.DataFrame)): if len(obj) > _PANDAS_ROWS_LARGE: obj = obj.sample(n=_PANDAS_SAMPLE_SIZE, random_state=0) try: @@ -216,7 +215,7 @@ def default(self, obj): return repr(obj) try: return hash(obj) - except: + except Exception: return id(obj) @@ -305,7 +304,7 @@ def __init__(self, period, count, callback, timeout=None, block=False): if isinstance(count, int): if count < 0: raise ValueError('Count value must be positive') - elif not type(count) is type(None): + elif count is not None: raise ValueError('Count value must be a positive integer or None') if block is False and count is None and timeout is None: @@ -338,9 +337,7 @@ def stop(self): self._completed.set() def __repr__(self): - return 'periodic(%s, %s, %s)' % (self.period, - self.count, - callable_name(self.callback)) + return f'periodic({self.period}, {self.count}, {callable_name(self.callback)})' def __str__(self): return repr(self) @@ -458,14 +455,13 @@ def validate_dynamic_argspec(callback, kdims, streams): unassigned_streams = set(stream_params) - set(argspec.args) if unassigned_streams: unassigned = ','.join(unassigned_streams) - raise KeyError('Callable {name!r} missing keywords to ' - 'accept stream parameters: {unassigned}'.format(name=name, - unassigned=unassigned)) + raise KeyError(f'Callable {name!r} missing keywords to ' + f'accept stream parameters: {unassigned}') if len(posargs) > len(kdims) + len(stream_params): - raise KeyError('Callable {name!r} accepts more positional arguments than ' - 'there are kdims and stream parameters'.format(name=name)) + raise KeyError(f'Callable {name!r} accepts more positional arguments than ' + 'there are kdims and stream parameters') if kdims == []: # Can be no posargs, stream kwargs already validated return [] if set(kdims) == set(posargs): # Posargs match exactly, can all be passed as kwargs @@ -473,16 +469,14 @@ def validate_dynamic_argspec(callback, kdims, streams): elif len(posargs) == len(kdims): # Posargs match kdims length, supplying names if argspec.args[:len(kdims)] != posargs: raise KeyError('Unmatched positional kdim arguments only allowed at ' - 'the start of the signature of {name!r}'.format(name=name)) + f'the start of the signature of {name!r}') return posargs elif argspec.varargs: # Posargs missing, passed to Callable directly return None elif set(posargs) - set(kdims): - raise KeyError('Callable {name!r} accepts more positional arguments {posargs} ' - 'than there are key dimensions {kdims}'.format(name=name, - posargs=posargs, - kdims=kdims)) + raise KeyError(f'Callable {name!r} accepts more positional arguments {posargs} ' + f'than there are key dimensions {kdims}') elif set(kdims).issubset(set(kwargs)): # Key dims can be supplied by keyword return kdims elif set(kdims).issubset(set(posargs+kwargs)): @@ -490,10 +484,9 @@ def validate_dynamic_argspec(callback, kdims, streams): elif argspec.keywords: return kdims else: - raise KeyError('Callback {name!r} signature over {names} does not accommodate ' - 'required kdims {kdims}'.format(name=name, - names=list(set(posargs+kwargs)), - kdims=kdims)) + names = list(set(posargs+kwargs)) + raise KeyError(f'Callback {name!r} signature over {names} does not accommodate ' + f'required kdims {kdims}') def callable_name(callable_obj): @@ -677,7 +670,7 @@ def allowable(self_or_cls, name, disable_leading_underscore=None): else disable_leading_underscore) if disabled_ and name.startswith('_'): return False - isrepr = any(('_repr_%s_' % el) == name for el in disabled_reprs) + isrepr = any(f'_repr_{el}_' == name for el in disabled_reprs) return (name not in self_or_cls.disallowed) and not isrepr @param.parameterized.bothmethod @@ -732,7 +725,7 @@ def __call__(self, name, escape=True): return self._lookup_table[name] name = bytes_to_unicode(name) if not self.allowable(name): - raise AttributeError("String %r is in the disallowed list of attribute names: %r" % (name, self.disallowed)) + raise AttributeError(f"String {name!r} is in the disallowed list of attribute names: {self.disallowed!r}") if self.capitalize and name and name[0] in string.ascii_lowercase: name = name[0].upper()+name[1:] @@ -809,7 +802,7 @@ def isnumeric(val): try: float(val) return True - except: + except Exception: return False @@ -827,7 +820,7 @@ def asarray(arraylike, strict=True): elif hasattr(arraylike, '__array__'): return np.asarray(arraylike) elif strict: - raise ValueError('Could not convert %s type to array' % type(arraylike)) + raise ValueError(f'Could not convert {type(arraylike)} type to array') return arraylike @@ -839,13 +832,13 @@ def isnat(val): """ if (isinstance(val, (np.datetime64, np.timedelta64)) or (isinstance(val, np.ndarray) and val.dtype.kind == 'M')): - if numpy_version >= LooseVersion('1.13'): + if numpy_version >= Version('1.13'): return np.isnat(val) else: return val.view('i8') == nat_as_integer - elif pd and val is pd.NaT: + elif val is pd.NaT: return True - elif pd and isinstance(val, pandas_datetime_types+pandas_timedelta_types): + elif isinstance(val, pandas_datetime_types+pandas_timedelta_types): return pd.isna(val) else: return False @@ -875,9 +868,9 @@ def isfinite(val): elif val.dtype.kind == 'O': return np.array([isfinite(v) for v in val], dtype=bool) elif val.dtype.kind in 'US': - return ~pd.isna(val) if pd else np.ones_like(val, dtype=bool) + return ~pd.isna(val) finite = np.isfinite(val) - if pd and pandas_version >= LooseVersion('1.0.0'): + if pandas_version >= Version('1.0.0'): finite &= ~pd.isna(val) return finite elif isinstance(val, datetime_types+timedelta_types): @@ -885,7 +878,7 @@ def isfinite(val): elif isinstance(val, (str, bytes)): return True finite = np.isfinite(val) - if pd and pandas_version >= LooseVersion('1.0.0'): + if pandas_version >= Version('1.0.0'): if finite is pd.NA: return False return finite & (~pd.isna(val)) @@ -913,7 +906,7 @@ def find_minmax(lims, olims): try: limzip = zip(list(lims), list(olims), [np.nanmin, np.nanmax]) limits = tuple([float(fn([l, ol])) for l, ol, fn in limzip]) - except: + except Exception: limits = (np.NaN, np.NaN) return limits @@ -934,11 +927,11 @@ def find_range(values, soft_range=[]): with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'All-NaN (slice|axis) encountered') return np.nanmin(values), np.nanmax(values) - except: + except Exception: try: values = sorted(values) return (values[0], values[-1]) - except: + except Exception: return (None, None) @@ -959,7 +952,7 @@ def max_range(ranges, combined=True): with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'All-NaN (slice|axis) encountered') values = [tuple(np.NaN if v is None else v for v in r) for r in ranges] - if pd and any(isinstance(v, datetime_types) and not isinstance(v, cftime_types+(dt.time,)) + if any(isinstance(v, datetime_types) and not isinstance(v, cftime_types+(dt.time,)) for r in values for v in r): converted = [] for l, h in values: @@ -989,7 +982,7 @@ def max_range(ranges, combined=True): return (np.nanmin(arr), np.nanmax(arr)) else: return (np.nanmin(arr[:, 0]), np.nanmax(arr[:, 1])) - except: + except Exception: return (np.NaN, np.NaN) @@ -1105,8 +1098,8 @@ def int_to_alpha(n, upper=True): def int_to_roman(input): - if type(input) != type(1): - raise TypeError("expected integer, got %s" % type(input)) + if not isinstance(input, int): + raise TypeError(f"expected integer, got {type(input)}") if not 0 < input < 4000: raise ValueError("Argument must be between 1 and 3999") ints = (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1) @@ -1157,22 +1150,18 @@ def unique_array(arr): """ if not len(arr): return np.asarray(arr) - elif pd: - if isinstance(arr, np.ndarray) and arr.dtype.kind not in 'MO': - # Avoid expensive unpacking if not potentially datetime - return pd.unique(arr) - - values = [] - for v in arr: - if (isinstance(v, datetime_types) and - not isinstance(v, cftime_types)): - v = pd.Timestamp(v).to_datetime64() - values.append(v) - return pd.unique(values) - else: - arr = np.asarray(arr) - _, uniq_inds = np.unique(arr, return_index=True) - return arr[np.sort(uniq_inds)] + + if isinstance(arr, np.ndarray) and arr.dtype.kind not in 'MO': + # Avoid expensive unpacking if not potentially datetime + return pd.unique(arr) + + values = [] + for v in arr: + if (isinstance(v, datetime_types) and + not isinstance(v, cftime_types)): + v = pd.Timestamp(v).to_datetime64() + values.append(v) + return pd.unique(values) def match_spec(element, specification): @@ -1482,7 +1471,7 @@ def is_dataframe(data): dd = None if 'dask.dataframe' in sys.modules and 'pandas' in sys.modules: import dask.dataframe as dd - return((pd is not None and isinstance(data, pd.DataFrame)) or + return((isinstance(data, pd.DataFrame)) or (dd is not None and isinstance(data, dd.DataFrame))) @@ -1493,7 +1482,7 @@ def is_series(data): dd = None if 'dask.dataframe' in sys.modules: import dask.dataframe as dd - return((pd is not None and isinstance(data, pd.Series)) or + return (isinstance(data, pd.Series) or (dd is not None and isinstance(data, dd.Series))) @@ -1579,14 +1568,11 @@ def resolve_dependent_value(value): ) if 'panel' in sys.modules: - from panel.widgets import RangeSlider, Widget + from panel.depends import param_value_if_widget + from panel.widgets import RangeSlider range_widget = isinstance(value, RangeSlider) - try: - from panel.depends import param_value_if_widget - value = param_value_if_widget(value) - except Exception: - if isinstance(value, Widget): - value = value.param.value + value = param_value_if_widget(value) + if is_param_method(value, has_deps=True): value = value() elif isinstance(value, param.Parameter) and isinstance(value.owner, param.Parameterized): @@ -1872,12 +1858,7 @@ class ndmapping_groupby(param.ParameterizedFunction): def __call__(self, ndmapping, dimensions, container_type, group_type, sort=False, **kwargs): - try: - import pandas # noqa (optional import) - groupby = self.groupby_pandas - except: - groupby = self.groupby_python - return groupby(ndmapping, dimensions, container_type, + return self.groupby_pandas(ndmapping, dimensions, container_type, group_type, sort=sort, **kwargs) @param.parameterized.bothmethod @@ -1961,7 +1942,7 @@ def arglexsort(arrays): dtypes = ','.join(array.dtype.str for array in arrays) recarray = np.empty(len(arrays[0]), dtype=dtypes) for i, array in enumerate(arrays): - recarray['f%s' % i] = array + recarray[f'f{i}'] = array return recarray.argsort() @@ -1973,7 +1954,7 @@ def dimensioned_streams(dmap): dimensioned = [] for stream in dmap.streams: stream_params = stream_parameters([stream]) - if set([str(k) for k in dmap.kdims]) & set(stream_params): + if {str(k) for k in dmap.kdims} & set(stream_params): dimensioned.append(stream) return dimensioned @@ -2011,7 +1992,7 @@ def is_nan(x): """ try: return np.isnan(x) - except: + except Exception: return False @@ -2077,7 +2058,7 @@ def date_range(start, end, length, time_unit='us'): of samples. """ step = (1./compute_density(start, end, length, time_unit)) - if pd and isinstance(start, pd.Timestamp): + if isinstance(start, pd.Timestamp): start = start.to_datetime64() step = np.timedelta64(int(round(step)), time_unit) return start+step/2.+np.arange(length)*step @@ -2087,8 +2068,6 @@ def parse_datetime(date): """ Parses dates specified as string or integer or pandas Timestamp """ - if pd is None: - raise ImportError('Parsing dates from strings requires pandas') return pd.to_datetime(date).to_datetime64() @@ -2112,15 +2091,14 @@ def dt_to_int(value, time_unit='us'): """ Converts a datetime type to an integer with the supplied time unit. """ - if pd: - if isinstance(value, pd.Period): - value = value.to_timestamp() - if isinstance(value, pd.Timestamp): - try: - value = value.to_datetime64() - except Exception: - value = np.datetime64(value.to_pydatetime()) - elif isinstance(value, cftime_types): + if isinstance(value, pd.Period): + value = value.to_timestamp() + if isinstance(value, pd.Timestamp): + try: + value = value.to_datetime64() + except Exception: + value = np.datetime64(value.to_pydatetime()) + if isinstance(value, cftime_types): return cftime_to_timestamp(value, time_unit) # date class is a parent for datetime class @@ -2142,16 +2120,11 @@ def dt_to_int(value, time_unit='us'): else: tscale = 1./np.timedelta64(1, time_unit).tolist().total_seconds() - try: - # Handle python3 - if value.tzinfo is None: - _epoch = dt.datetime(1970, 1, 1) - else: - _epoch = dt.datetime(1970, 1, 1, tzinfo=dt.timezone.utc) - return int((value - _epoch).total_seconds() * tscale) - except Exception: - # Handle python2 - return (time.mktime(value.timetuple()) + value.microsecond / 1e6) * tscale + if value.tzinfo is None: + _epoch = dt.datetime(1970, 1, 1) + else: + _epoch = dt.datetime(1970, 1, 1, tzinfo=dt.timezone.utc) + return int((value - _epoch).total_seconds() * tscale) def cftime_to_timestamp(date, time_unit='us'): @@ -2212,7 +2185,7 @@ def mimebundle_to_html(bundle): html = data.get('text/html', '') if 'application/javascript' in data: js = data['application/javascript'] - html += '\n'.format(js=js) + html += f'\n' return html diff --git a/holoviews/element/__init__.py b/holoviews/element/__init__.py index b44557f013..fb5198931a 100644 --- a/holoviews/element/__init__.py +++ b/holoviews/element/__init__.py @@ -118,4 +118,4 @@ def public(obj): return False return issubclass(obj, Element) -__all__ = list(set([_k for _k, _v in locals().items() if public(_v)])) +__all__ = list({_k for _k, _v in locals().items() if public(_v)}) diff --git a/holoviews/element/annotation.py b/holoviews/element/annotation.py index 90a37d64ee..f439dc7680 100644 --- a/holoviews/element/annotation.py +++ b/holoviews/element/annotation.py @@ -43,8 +43,7 @@ def __getitem__(self, key): elif len(key) == 0: return self.clone() if not all(isinstance(k, slice) for k in key): - raise KeyError("%s only support slice indexing" % - self.__class__.__name__) + raise KeyError(f"{self.__class__.__name__} only support slice indexing") xkey, ykey = tuple(key[:len(self.kdims)]) xstart, xstop = xkey.start, xkey.stop ystart, ystop = ykey.start, ykey.stop diff --git a/holoviews/element/chart.py b/holoviews/element/chart.py index f34fbe48c1..cff1bcedc6 100644 --- a/holoviews/element/chart.py +++ b/holoviews/element/chart.py @@ -51,7 +51,7 @@ def __init__(self, data, kdims=None, vdims=None, **params): super().__init__(data, **params) def __getitem__(self, index): - return super(Chart, self).__getitem__(index) + return super().__getitem__(index) class Scatter(Selection1DExpr, Chart): diff --git a/holoviews/element/comparison.py b/holoviews/element/comparison.py index 07c69faaa4..1ac05c7d05 100644 --- a/holoviews/element/comparison.py +++ b/holoviews/element/comparison.py @@ -19,6 +19,7 @@ """ from functools import partial import numpy as np +import pandas as pd from unittest.util import safe_repr from unittest import TestCase from numpy.testing import assert_array_equal, assert_array_almost_equal @@ -28,11 +29,11 @@ HoloMap, Dimensioned, Layout, NdLayout, NdOverlay, GridSpace, DynamicMap, GridMatrix, OrderedDict) from ..core.options import Options, Cycle -from ..core.util import (pd, cast_array_to_int64, datetime_types, dt_to_int, +from ..core.util import (cast_array_to_int64, datetime_types, dt_to_int, is_float) -class ComparisonInterface(object): +class ComparisonInterface: """ This class is designed to allow equality testing to work seamlessly with unittest.TestCase as a mix-in by implementing a @@ -53,7 +54,7 @@ def simple_equality(cls, first, second, msg=None): Classmethod equivalent to unittest.TestCase method (longMessage = False.) """ if not first==second: - standardMsg = '%s != %s' % (safe_repr(first), safe_repr(second)) + standardMsg = f'{safe_repr(first)} != {safe_repr(second)}' raise cls.failureException(msg or standardMsg) @@ -115,8 +116,7 @@ def register(cls): cls.equality_type_funcs[np.ma.masked_array] = cls.compare_arrays # Pandas dataframe comparison - if pd: - cls.equality_type_funcs[pd.DataFrame] = cls.compare_dataframe + cls.equality_type_funcs[pd.DataFrame] = cls.compare_dataframe # Dimension objects cls.equality_type_funcs[Dimension] = cls.compare_dimensions @@ -212,10 +212,9 @@ def compare_dictionaries(cls, d1, d2, msg='Dictionaries'): keys2 = set(d2.keys()) symmetric_diff = keys ^ keys2 if symmetric_diff: - msg = ("Dictionaries have different sets of keys: %r\n\n" - % symmetric_diff) - msg += "Dictionary 1: %s\n" % d1 - msg += "Dictionary 2: %s" % d2 + msg = f"Dictionaries have different sets of keys: {symmetric_diff!r}\n\n" + msg += f"Dictionary 1: {d1}\n" + msg += f"Dictionary 2: {d2}" raise cls.failureException(msg) for k in keys: cls.assertEqual(d1[k], d2[k]) @@ -228,7 +227,7 @@ def compare_lists(cls, l1, l2, msg=None): for v1, v2 in zip(l1, l2): cls.assertEqual(v1, v2) except AssertionError: - raise AssertionError(msg or '%s != %s' % (repr(l1), repr(l2))) + raise AssertionError(msg or f'{l1!r} != {l2!r}') @classmethod @@ -238,7 +237,7 @@ def compare_tuples(cls, t1, t2, msg=None): for i1, i2 in zip(t1, t2): cls.assertEqual(i1, i2) except AssertionError: - raise AssertionError(msg or '%s != %s' % (repr(t1), repr(t2))) + raise AssertionError(msg or f'{t1!r} != {t2!r}') #=====================# @@ -257,7 +256,7 @@ def compare_arrays(cls, arr1, arr2, msg='Arrays'): if arr2.dtype.kind == 'M': arr2 = cast_array_to_int64(arr2.astype('datetime64[ns]')) assert_array_equal(arr1, arr2) - except: + except Exception: try: cls.assert_array_almost_equal_fn(arr1, arr2) except AssertionError as e: @@ -310,8 +309,8 @@ def compare_dimensions(cls, dim1, dim2, msg=None): try: # This is needed as two lists are not compared by contents using == cls.assertEqual(dim1_params[k], dim2_params[k], msg=None) except AssertionError as e: - msg = 'Dimension parameter %r mismatched: ' % k - raise cls.failureException("%s%s" % (msg, str(e))) + msg = f'Dimension parameter {k!r} mismatched: ' + raise cls.failureException(f"{msg}{str(e)}") @classmethod def compare_labelled_data(cls, obj1, obj2, msg=None): @@ -321,7 +320,7 @@ def compare_labelled_data(cls, obj1, obj2, msg=None): @classmethod def compare_dimension_lists(cls, dlist1, dlist2, msg='Dimension lists'): if len(dlist1) != len(dlist2): - raise cls.failureException('%s mismatched' % msg) + raise cls.failureException(f'{msg} mismatched') for d1, d2 in zip(dlist1, dlist2): cls.assertEqual(d1, d2) @@ -346,9 +345,9 @@ def compare_elements(cls, obj1, obj2, msg=None): @classmethod def compare_trees(cls, el1, el2, msg='Trees'): if len(el1.keys()) != len(el2.keys()): - raise cls.failureException("%s have mismatched path counts." % msg) + raise cls.failureException(f"{msg} have mismatched path counts.") if el1.keys() != el2.keys(): - raise cls.failureException("%s have mismatched paths." % msg) + raise cls.failureException(f"{msg} have mismatched paths.") for element1, element2 in zip(el1.values(), el2.values()): cls.assertEqual(element1, element2) @@ -376,14 +375,14 @@ def compare_overlays(cls, el1, el2, msg=None): def compare_ndmappings(cls, el1, el2, msg='NdMappings'): cls.compare_dimensioned(el1, el2) if len(el1.keys()) != len(el2.keys()): - raise cls.failureException("%s have different numbers of keys." % msg) + raise cls.failureException(f"{msg} have different numbers of keys.") if set(el1.keys()) != set(el2.keys()): diff1 = [el for el in el1.keys() if el not in el2.keys()] diff2 = [el for el in el2.keys() if el not in el1.keys()] - raise cls.failureException("%s have different sets of keys. " % msg - + "In first, not second %s. " % diff1 - + "In second, not first: %s." % diff2) + raise cls.failureException(f"{msg} have different sets of keys. " + + f"In first, not second {diff1}. " + + f"In second, not first: {diff2}.") for element1, element2 in zip(el1, el2): cls.assertEqual(element1, element2) @@ -482,9 +481,9 @@ def compare_paths(cls, el1, el2, msg='Path'): paths1 = el1.split() paths2 = el2.split() if len(paths1) != len(paths2): - raise cls.failureException("%s objects do not have a matching number of paths." % msg) + raise cls.failureException(f"{msg} objects do not have a matching number of paths.") for p1, p2 in zip(paths1, paths2): - cls.compare_dataset(p1, p2, '%s data' % msg) + cls.compare_dataset(p1, p2, f'{msg} data') @classmethod def compare_contours(cls, el1, el2, msg='Contours'): @@ -526,7 +525,7 @@ def compare_dataset(cls, el1, el2, msg='Dataset'): % (msg, el1.shape[0], el2.shape[0])) for dim, d1, d2 in dimension_data: if d1.dtype != d2.dtype: - cls.failureException("%s %s columns have different type." % (msg, dim.pprint_label) + cls.failureException(f"{msg} {dim.pprint_label} columns have different type." + " First has type %s, and second has type %s." % (d1, d2)) if d1.dtype.kind in 'SUOV': @@ -722,13 +721,13 @@ def compare_hextiles(cls, el1, el2, msg='HexTiles'): def _compare_grids(cls, el1, el2, name): if len(el1.keys()) != len(el2.keys()): - raise cls.failureException("%ss have different numbers of items." % name) + raise cls.failureException(f"{name}s have different numbers of items.") if set(el1.keys()) != set(el2.keys()): - raise cls.failureException("%ss have different keys." % name) + raise cls.failureException(f"{name}s have different keys.") if len(el1) != len(el2): - raise cls.failureException("%ss have different depths." % name) + raise cls.failureException(f"{name}s have different depths.") for element1, element2 in zip(el1, el2): cls.assertEqual(element1, element2) diff --git a/holoviews/element/graphs.py b/holoviews/element/graphs.py index 40fbcc1c23..781100a873 100644 --- a/holoviews/element/graphs.py +++ b/holoviews/element/graphs.py @@ -3,6 +3,7 @@ import param import numpy as np +import pandas as pd from ..core import Dimension, Dataset, Element2D from ..core.accessors import Redim @@ -10,7 +11,7 @@ from ..core.operation import Operation from .chart import Points from .path import Path -from .util import (split_path, pd, circular_layout, connect_edges, +from .util import (split_path, circular_layout, connect_edges_pd, quadratic_bezier, connect_tri_edges_pd) @@ -63,7 +64,6 @@ def _process(self, element, key=None): target = element.dimension_values(1, expanded=False) nodes = np.unique(np.concatenate([source, target])) if self.p.layout: - import pandas as pd df = pd.DataFrame({'index': nodes}) nodes = self.p.layout(df, element.dframe(), **self.p.kwargs) nodes = nodes[['x', 'y', 'index']] @@ -173,39 +173,24 @@ def _add_node_info(self, node_info): "dimension to allow the Graph to merge " "the data.") - if pd is None: - if node_info.kdims and len(node_info) != len(nodes): - raise ValueError("Graph cannot merge node data on index " - "dimension without pandas. Either ensure " - "the node data matches the order of nodes " - "as they appear in the edge data or install " - "pandas.") - dimensions = nodes.dimensions() - for d in node_info.vdims: - if d in dimensions: - continue - nodes = nodes.add_dimension(d, len(nodes.vdims), - node_info.dimension_values(d), - vdim=True) + left_on = nodes.kdims[-1].name + node_info_df = node_info.dframe() + node_df = nodes.dframe() + if node_info.kdims: + idx = node_info.kdims[-1] else: - left_on = nodes.kdims[-1].name - node_info_df = node_info.dframe() - node_df = nodes.dframe() - if node_info.kdims: - idx = node_info.kdims[-1] - else: - idx = Dimension('index') - node_info_df = node_info_df.reset_index() - if 'index' in node_info_df.columns and not idx.name == 'index': - node_df = node_df.rename(columns={'index': '__index'}) - left_on = '__index' - cols = [c for c in node_info_df.columns if c not in - node_df.columns or c == idx.name] - node_info_df = node_info_df[cols] - node_df = pd.merge(node_df, node_info_df, left_on=left_on, - right_on=idx.name, how='left') - nodes = nodes.clone(node_df, kdims=nodes.kdims[:2]+[idx], - vdims=node_info.vdims) + idx = Dimension('index') + node_info_df = node_info_df.reset_index() + if 'index' in node_info_df.columns and not idx.name == 'index': + node_df = node_df.rename(columns={'index': '__index'}) + left_on = '__index' + cols = [c for c in node_info_df.columns if c not in + node_df.columns or c == idx.name] + node_info_df = node_info_df[cols] + node_df = pd.merge(node_df, node_info_df, left_on=left_on, + right_on=idx.name, how='left') + nodes = nodes.clone(node_df, kdims=nodes.kdims[:2]+[idx], + vdims=node_info.vdims) self._nodes = nodes @@ -216,7 +201,7 @@ def _validate(self): mismatch = [] for kd1, kd2 in zip(self.nodes.kdims, self.edgepaths.kdims): if kd1 != kd2: - mismatch.append('%s != %s' % (kd1, kd2)) + mismatch.append(f'{kd1} != {kd2}') if mismatch: raise ValueError('Ensure that the first two key dimensions on ' 'Nodes and EdgePaths match: %s' % ', '.join(mismatch)) @@ -328,7 +313,7 @@ def select(self, selection_expr=None, selection_specs=None, selection_mode='edge edgepaths = self._split_edgepaths paths = edgepaths.clone(edgepaths.interface.select_paths(edgepaths, mask)) if len(self._edgepaths.data) == 1: - paths = paths.clone([paths.dframe() if pd else paths.array()]) + paths = paths.clone([paths.dframe()]) else: data = self.data paths = self._edgepaths @@ -386,10 +371,7 @@ def edgepaths(self): """ if self._edgepaths: return self._edgepaths - if pd is None: - paths = connect_edges(self) - else: - paths = connect_edges_pd(self) + paths = connect_edges_pd(self) return self.edge_type(paths, kdims=self.nodes.kdims[:2]) @classmethod @@ -560,7 +542,7 @@ def from_vertices(cls, data): """ try: from scipy.spatial import Delaunay - except: + except ImportError: raise ImportError("Generating triangles from points requires " "SciPy to be installed.") if not isinstance(data, Points): @@ -674,7 +656,7 @@ def _process(self, element, key=None): areas_in_radians = (weights_of_areas / weights_of_areas.sum()) * (2 * np.pi) # We add a zero in the begging for the cumulative sum - points = np.zeros((areas_in_radians.shape[0] + 1)) + points = np.zeros(areas_in_radians.shape[0] + 1) points[1:] = areas_in_radians points = points.cumsum() @@ -785,8 +767,7 @@ def __init__(self, data, kdims=None, vdims=None, compute=True, **params): self._angles = chord._angles else: if not isinstance(nodes, Nodes): - raise TypeError("Expected Nodes object in data, found %s." - % type(nodes)) + raise TypeError(f"Expected Nodes object in data, found {type(nodes)}.") self._nodes = nodes if not isinstance(edgepaths, EdgePaths): raise TypeError("Expected EdgePaths object in data, found %s." diff --git a/holoviews/element/path.py b/holoviews/element/path.py index a32468b0a7..f8c021dd49 100644 --- a/holoviews/element/path.py +++ b/holoviews/element/path.py @@ -92,8 +92,7 @@ def __getitem__(self, key): key = (key, slice(None)) elif len(key) == 0: return self.clone() if not all(isinstance(k, slice) for k in key): - raise KeyError("%s only support slice indexing" % - self.__class__.__name__) + raise KeyError(f"{self.__class__.__name__} only support slice indexing") xkey, ykey = key xstart, xstop = xkey.start, xkey.stop ystart, ystop = ykey.start, ykey.stop @@ -173,7 +172,7 @@ def split(self, start=None, end=None, datatype=None, **kwargs): elif datatype is None: obj = self.clone([self.data]) else: - raise ValueError("%s datatype not support" % datatype) + raise ValueError(f"{datatype} datatype not support") return [obj] return self.interface.split(self, start, end, datatype, **kwargs) diff --git a/holoviews/element/raster.py b/holoviews/element/raster.py index 16e596b92e..b09ad068b7 100644 --- a/holoviews/element/raster.py +++ b/holoviews/element/raster.py @@ -58,7 +58,7 @@ def __getitem__(self, slices): elif len(slices) > (2 + self.depth): raise KeyError("Can only slice %d dimensions" % 2 + self.depth) elif len(slices) == 3 and slices[-1] not in [self.vdims[0].name, slice(None)]: - raise KeyError("%r is the only selectable value dimension" % self.vdims[0].name) + raise KeyError(f"{self.vdims[0].name!r} is the only selectable value dimension") slc_types = [isinstance(sl, slice) for sl in slices[:2]] data = self.data.__getitem__(slices[:2][::-1]) @@ -133,7 +133,7 @@ def sample(self, samples=[], bounds=None, **sample_values): # Indices inverted for indexing sample_ind = self.get_dimension_index(dimension) if sample_ind is None: - raise Exception("Dimension %s not found during sampling" % dimension) + raise Exception(f"Dimension {dimension} not found during sampling") other_dimension = [d for i, d in enumerate(self.kdims) if i != sample_ind] @@ -329,13 +329,12 @@ def _validate(self, data_bounds, supplied_bounds): if yvals.ndim > 1: invalid.append(ydim) if invalid: - dims = '%s and %s' % tuple(invalid) if len(invalid) > 1 else '%s' % invalid[0] - raise ValueError('{clsname} coordinates must be 1D arrays, ' - '{dims} dimension(s) were found to have ' + dims = '%s and %s' % tuple(invalid) if len(invalid) > 1 else f'{invalid[0]}' + raise ValueError(f'{clsname} coordinates must be 1D arrays, ' + f'{dims} dimension(s) were found to have ' 'multiple dimensions. Either supply 1D ' 'arrays or use the QuadMesh element for ' - 'curvilinear coordinates.'.format( - clsname=clsname, dims=dims)) + 'curvilinear coordinates.') xvalid = util.validate_regular_sampling(xvals, self.rtol) yvalid = util.validate_regular_sampling(yvals, self.rtol) @@ -346,9 +345,9 @@ def _validate(self, data_bounds, supplied_bounds): "{clsname} constructor.") dims = None if not xvalid: - dims = ' %s is ' % xdim if yvalid else '(s) %s and %s are' % (xdim, ydim) + dims = f' {xdim} is ' if yvalid else f'(s) {xdim} and {ydim} are' elif not yvalid: - dims = ' %s is' % ydim + dims = f' {ydim} is' if dims: self.param.warning( msg.format(clsname=clsname, dims=dims, rtol=self.rtol)) @@ -578,7 +577,7 @@ def load_image(cls, filename, height=1, array=False, bounds=None, bare=False, ** """ try: from PIL import Image - except: + except ImportError: raise ImportError("RGB.load_image requires PIL (or Pillow).") with open(filename, 'rb') as f: @@ -847,7 +846,7 @@ def range(self, dim, data_range=True, dimension_range=True): return super().range(dim, data_range, dimension_range) else: drange = self.gridded.range(dim, data_range, dimension_range) - except: + except Exception: drange = None finally: self.gridded._binned = False diff --git a/holoviews/element/sankey.py b/holoviews/element/sankey.py index 26534310e2..cdcc2ae221 100644 --- a/holoviews/element/sankey.py +++ b/holoviews/element/sankey.py @@ -429,7 +429,7 @@ def __init__(self, data, kdims=None, vdims=None, **params): elif not isinstance(nodes, Dataset): try: nodes = Dataset(nodes) - except: + except Exception: nodes = Dataset(nodes, 'index') if not nodes.kdims: raise ValueError('Could not determine index in supplied node data. ' @@ -442,8 +442,7 @@ def __init__(self, data, kdims=None, vdims=None, **params): self._sankey = graph else: if not isinstance(nodes, self.node_type): - raise TypeError("Expected Nodes object in data, found %s." - % type(nodes)) + raise TypeError(f"Expected Nodes object in data, found {type(nodes)}.") self._nodes = nodes if not isinstance(edgepaths, self.edge_type): raise TypeError("Expected EdgePaths object in data, found %s." diff --git a/holoviews/element/selection.py b/holoviews/element/selection.py index a8011d4cb6..19b34d1ace 100644 --- a/holoviews/element/selection.py +++ b/holoviews/element/selection.py @@ -14,7 +14,7 @@ from .annotation import HSpan, VSpan -class SelectionIndexExpr(object): +class SelectionIndexExpr: _selection_dims = None @@ -96,7 +96,7 @@ def spatial_select_columnar(xvals, yvals, geometry): geometry[:, 1], ) return result.values - except Exception: + except ImportError: xvals = np.asarray(xvals) yvals = np.asarray(yvals) if 'dask' in sys.modules: @@ -124,7 +124,7 @@ def spatial_select_columnar(xvals, yvals, geometry): points = PointArray((masked_xvals.astype('float'), masked_yvals.astype('float'))) poly = Polygon([np.concatenate([geometry, geometry[:1]]).flatten()]) geom_mask = points.intersects(poly) - except Exception: + except ImportError: try: from shapely.geometry import Point, Polygon points = (Point(x, y) for x, y in zip(masked_xvals, masked_yvals)) diff --git a/holoviews/element/stats.py b/holoviews/element/stats.py index 1b5d9a1d80..9cfb3624bb 100644 --- a/holoviews/element/stats.py +++ b/holoviews/element/stats.py @@ -32,8 +32,7 @@ def __init__(self, data, kdims=None, vdims=None, **params): if not vdims: self.vdims = [Dimension('Density')] elif len(vdims) > 1: - raise ValueError("%s expects at most one vdim." % - type(self).__name__) + raise ValueError(f"{type(self).__name__} expects at most one vdim.") else: self.vdims = process_dimensions(None, vdims)['vdims'] diff --git a/holoviews/element/tabular.py b/holoviews/element/tabular.py index 5ebe99b6d9..ac5825f19e 100644 --- a/holoviews/element/tabular.py +++ b/holoviews/element/tabular.py @@ -1,8 +1,10 @@ +from collections import OrderedDict + import numpy as np import param -from ..core import OrderedDict, Element, Dataset, Tabular +from ..core import Element, Dataset, Tabular from ..core.dimension import Dimension, dimension_name from .selection import SelectionIndexExpr @@ -59,7 +61,7 @@ def __getitem__(self, heading): if heading == (): return self if heading not in self.vdims: - raise KeyError("%r not in available headings." % heading) + raise KeyError(f"{heading!r} not in available headings.") return np.array(self.data.get(heading, np.NaN)) def dimension_values(self, dimension, expanded=True, flat=True): diff --git a/holoviews/element/tiles.py b/holoviews/element/tiles.py index feaa24c929..e5965662f4 100644 --- a/holoviews/element/tiles.py +++ b/holoviews/element/tiles.py @@ -36,11 +36,8 @@ class Tiles(Element2D): group = param.String(default='Tiles', constant=True) def __init__(self, data=None, kdims=None, vdims=None, **params): - try: - from bokeh.models import MercatorTileSource - except: - MercatorTileSource = None - if MercatorTileSource and isinstance(data, MercatorTileSource): + from bokeh.models import MercatorTileSource + if isinstance(data, MercatorTileSource): data = data.url elif data is not None and not isinstance(data, (str, dict)): raise TypeError('%s data should be a tile service URL or ' @@ -133,8 +130,8 @@ def easting_northing_to_lon_lat(easting, northing): def deprecation_warning(name, url, reason): def deprecated_tilesource_warning(): if util.config.raise_deprecated_tilesource_exception: - raise DeprecationWarning('%s tile source is deprecated: %s' % (name, reason)) - param.main.param.warning('%s tile source is deprecated and is likely to be unusable: %s' % (name, reason)) + raise DeprecationWarning(f'{name} tile source is deprecated: {reason}') + param.main.param.warning(f'{name} tile source is deprecated and is likely to be unusable: {reason}') return Tiles(url, name=name) return deprecated_tilesource_warning diff --git a/holoviews/element/util.py b/holoviews/element/util.py index 3362c38bf4..88eac1e2db 100644 --- a/holoviews/element/util.py +++ b/holoviews/element/util.py @@ -2,10 +2,11 @@ import param import numpy as np +import pandas as pd from ..core import Dataset, OrderedDict from ..core.boundingregion import BoundingBox -from ..core.data import default_datatype +from ..core.data import default_datatype, PandasInterface from ..core.operation import Operation from ..core.sheetcoords import Slice from ..core.util import ( @@ -13,11 +14,6 @@ one_to_one, sort_topologically ) -try: - import pandas as pd - from ..core.data import PandasInterface -except: - pd = None def split_path(path): @@ -29,11 +25,11 @@ def split_path(path): values = path.dimension_values(0) splits = np.concatenate([[0], np.where(np.isnan(values))[0]+1, [None]]) subpaths = [] - data = PandasInterface.as_dframe(path) if pd else path.array() + data = PandasInterface.as_dframe(path) for i in range(len(splits)-1): end = splits[i+1] slc = slice(splits[i], None if end is None else end-1) - subpath = data.iloc[slc] if pd else data[slc] + subpath = data.iloc[slc] if len(subpath): subpaths.append(subpath) return subpaths @@ -95,7 +91,7 @@ def reduce_fn(x): """ Aggregation function to get the first non-zero value. """ - values = x.values if pd and isinstance(x, pd.Series) else x + values = x.values if isinstance(x, pd.Series) else x for v in values: if not is_nan(v): return v @@ -189,12 +185,9 @@ def _aggregate_dataset(self, obj): reindexed = concat_data.reindex([xdim, ydim], vdims) if not reindexed: agg = reindexed - elif pd: - df = PandasInterface.as_dframe(reindexed) - df = df.groupby([xdim, ydim], sort=False).first().reset_index() - agg = reindexed.clone(df) - else: - agg = reindexed.aggregate([xdim, ydim], reduce_fn) + df = PandasInterface.as_dframe(reindexed) + df = df.groupby([xdim, ydim], sort=False).first().reset_index() + agg = reindexed.clone(df) # Convert data to a gridded dataset for vdim in vdims: @@ -229,12 +222,8 @@ def _process(self, obj, key=None): raise ValueError("Must have at two dimensions to aggregate over" "and one value dimension to aggregate on.") - if pd: - obj = Dataset(obj, datatype=['dataframe']) - return self._aggregate_dataset_pandas(obj) - else: - obj = Dataset(obj, datatype=['dictionary']) - return self._aggregate_dataset(obj) + obj = Dataset(obj, datatype=['dataframe']) + return self._aggregate_dataset_pandas(obj) def circular_layout(nodes): diff --git a/holoviews/ipython/__init__.py b/holoviews/ipython/__init__.py index b18cdb1167..3fd09ebfed 100644 --- a/holoviews/ipython/__init__.py +++ b/holoviews/ipython/__init__.py @@ -4,7 +4,6 @@ import param import holoviews -from IPython import version_info from IPython.core.completer import IPCompleter from IPython.display import HTML, publish_display_data from param import ipython as param_ext @@ -55,7 +54,7 @@ def skip_comparison(self, obj1, obj2, msg): pass def get_object(self, name): obj = self.ip._object_find(name).obj if obj is None: - raise self.failureException("Could not find object %s" % name) + raise self.failureException(f"Could not find object {name}") return obj @@ -117,15 +116,13 @@ def __call__(self, *args, **params): # Abort if IPython not found try: ip = params.pop('ip', None) or get_ipython() # noqa (get_ipython) - except: + except Exception: return # Notebook archive relies on display hooks being set to work. try: - if version_info[0] >= 4: - import nbformat # noqa (ensures availability) - else: - from IPython import nbformat # noqa (ensures availability) + import nbformat # noqa: F401 + try: from .archive import notebook_archive holoviews.archive = notebook_archive @@ -155,7 +152,7 @@ def __call__(self, *args, **params): msg = ('Output magic unable to control displayed format ' 'as IPython notebook uses fixed precedence ' 'between %r' % p.display_formats) - display(HTML('Warning: %s' % msg)) + display(HTML(f'Warning: {msg}')) loaded = notebook_extension._loaded if loaded == False: @@ -171,7 +168,7 @@ def __call__(self, *args, **params): if p.width is not None: css += '' % p.width if p.css: - css += '' % p.css + css += f'' if css: display(HTML(css)) diff --git a/holoviews/ipython/archive.py b/holoviews/ipython/archive.py index a6ea96d86c..3d120082aa 100644 --- a/holoviews/ipython/archive.py +++ b/holoviews/ipython/archive.py @@ -94,9 +94,9 @@ def __init__(self, **params): self._timestamp = None self._tags = {MIME_TYPES[k]:v for k,v in HTML_TAGS.items() if k in MIME_TYPES} - keywords = ['%s=%s' % (k, v.__class__.__name__) + keywords = [f'{k}={v.__class__.__name__}' for k, v in self.param.objects().items()] - self.auto.__func__.__doc__ = 'auto(enabled=Boolean, %s)' % ', '.join(keywords) + self.auto.__func__.__doc__ = f"auto(enabled=Boolean, {', '.join(keywords)})" def get_namespace(self): @@ -109,7 +109,7 @@ def get_namespace(self): if not k.startswith('_') and v is sys.modules['holoviews']] if len(matches) == 0: raise Exception("Could not find holoviews module in namespace") - return '%s.archive' % matches[0] + return f'{matches[0]}.archive' def last_export_status(self): @@ -172,12 +172,12 @@ def export(self, timestamp=None): + r'var json_data = IPython.notebook.toJSON(); ' + r'var json_string = JSON.stringify(json_data); ' + capture_cmd - + "var pycmd = capture + ';%s._export_with_html()'; " % name + + f"var pycmd = capture + ';{name}._export_with_html()'; " + r"kernel.execute(pycmd)") tstamp = time.strftime(self.timestamp_format, self._timestamp) export_name = self._format(self.export_name, {'timestamp':tstamp, 'notebook':self.notebook_name}) - print(('Export name: %r\nDirectory %r' % (export_name, + print(('Export name: {!r}\nDirectory {!r}'.format(export_name, os.path.join(os.path.abspath(self.root)))) + '\n\nIf no output appears, please check holoviews.archive.last_export_status()') display(Javascript(cmd)) @@ -236,7 +236,7 @@ def _export_with_html(self): # pragma: no cover if html_key is None: continue filename = self._format(basename, {'timestamp':tstamp, 'notebook':self.notebook_name}) - fpath = filename+(('.%s' % ext) if ext else '') + fpath = filename+(f'.{ext}' if ext else '') info = {'src':fpath, 'mime_type':info['mime_type']} # No mime type if 'mime_type' not in info: pass @@ -270,7 +270,7 @@ def _export_with_html(self): # pragma: no cover # If store cleared_notebook... save here super().export(timestamp=self._timestamp, info={'notebook':self.notebook_name}) - except: + except Exception: self.traceback = traceback.format_exc() else: self.export_success = True diff --git a/holoviews/ipython/display_hooks.py b/holoviews/ipython/display_hooks.py index d6ecfd118d..283f8f6f23 100644 --- a/holoviews/ipython/display_hooks.py +++ b/holoviews/ipython/display_hooks.py @@ -36,10 +36,10 @@ def max_frame_warning(max_frames): sys.stderr.write( - "Animation longer than the max_frames limit {max_frames};\n" + f"Animation longer than the max_frames limit {max_frames};\n" "skipping rendering to avoid unexpected lengthy computations.\n" "If desired, the limit can be increased using:\n" - "hv.output(max_frames=)".format(max_frames=max_frames) + "hv.output(max_frames=)" ) def process_object(obj): diff --git a/holoviews/ipython/magics.py b/holoviews/ipython/magics.py index d6b6378311..53e7dab4d2 100644 --- a/holoviews/ipython/magics.py +++ b/holoviews/ipython/magics.py @@ -3,7 +3,7 @@ try: from IPython.core.magic import Magics, magics_class, line_magic, line_cell_magic -except: +except ImportError: from unittest import SkipTest raise SkipTest("IPython extension requires IPython >= 0.13") @@ -51,7 +51,7 @@ def pprint(cls): """ current, count = '', 0 for k,v in Store.output_settings.options.items(): - keyword = '%s=%r' % (k,v) + keyword = f'{k}={v!r}' if len(current) + len(keyword) > 80: print(('%output' if count==0 else ' ') + current) count += 1 @@ -93,7 +93,7 @@ def cell_runner(cell,renderer): self.shell.run_cell(cell, store_history=STORE_HISTORY) def warnfn(msg): - display(HTML("Warning: %s" % msg)) + display(HTML(f"Warning: {msg}")) if line: @@ -151,7 +151,7 @@ def option_completer(cls, k,v): -class OptsCompleter(object): +class OptsCompleter: """ Implements the TAB-completion for the %%opts magic. """ @@ -332,7 +332,7 @@ def opts(self, line='', cell=None): for backend in Store.loaded_backends(): available_elements |= set(Store.options(backend).children) - spec_elements = set(k.split('.')[0] for k in spec.keys()) + spec_elements = {k.split('.')[0] for k in spec.keys()} unknown_elements = spec_elements - available_elements if unknown_elements: msg = ("WARNING: Unknown elements {unknown} not registered " @@ -402,7 +402,7 @@ def timer(self, line=''): elif line.strip() == 'start': TimerMagic.start_time = time.time() timestamp = time.strftime("%Y/%m/%d %H:%M:%S") - print("Timer start: %s" % timestamp) + print(f"Timer start: {timestamp}") return elif self.start_time is None: print("Please start timer with %timer start. For more information consult %timer?") diff --git a/holoviews/ipython/preprocessors.py b/holoviews/ipython/preprocessors.py index 164015559c..d40c0d2c59 100644 --- a/holoviews/ipython/preprocessors.py +++ b/holoviews/ipython/preprocessors.py @@ -133,8 +133,7 @@ def preprocess_cell(self, cell, resources, index): template='hv.util.output({line!r})') source, output_lines = filter_magic(source, '%%output') if output_lines: - template = 'hv.util.output({options!r}, {{expr}})'.format( - options=output_lines[-1]) + template = f'hv.util.output({output_lines[-1]!r}, {{expr}})' source = wrap_cell_expression(source, template) cell['source'] = source diff --git a/holoviews/ipython/widgets.py b/holoviews/ipython/widgets.py index b8c054f35d..a10df47c3d 100644 --- a/holoviews/ipython/widgets.py +++ b/holoviews/ipython/widgets.py @@ -4,7 +4,7 @@ try: import IPython from IPython.core.display import clear_output -except: +except ImportError: clear_output = None raise SkipTest("IPython extension requires IPython >= 0.12") @@ -86,7 +86,7 @@ def __call__(self, percentage): self.cache['socket'] = self._get_socket() if self.cache['socket'] is not None: - self.cache['socket'].send('%s|%s' % (percentage, self.label)) + self.cache['socket'].send(f'{percentage}|{self.label}') def _stdout_display(self, percentage, display=True): @@ -119,7 +119,7 @@ def _get_socket(self, min_port=8080, max_port=8100, max_tries=20): max_tries=max_tries) self.param.message("Progress broadcast bound to port %d" % port) return sock - except: + except Exception: self.param.message("No suitable port found for progress broadcast.") return None @@ -152,14 +152,13 @@ def __call__(self): [percent_str, label] = message.split('|') percent = float(percent_str) self.label = label - super(RemoteProgress, self).__call__(percent) + super().__call__(percent) except KeyboardInterrupt: if percent is not None: - self.param.message("Exited at %.3f%% completion" % percent) + self.param.message(f"Exited at {percent:.3f}% completion") break - except: - self.param.message("Could not process socket message: %r" - % message) + except Exception: + self.param.message(f"Could not process socket message: {message!r}") class RunProgress(ProgressBar): diff --git a/holoviews/operation/__init__.py b/holoviews/operation/__init__.py index e504ff341f..6b7506c903 100644 --- a/holoviews/operation/__init__.py +++ b/holoviews/operation/__init__.py @@ -10,7 +10,7 @@ def public(obj): return any([issubclass(obj, bc) for bc in baseclasses]) -_public = list(set([_k for _k, _v in locals().items() if public(_v)])) +_public = list({_k for _k, _v in locals().items() if public(_v)}) _current_locals = [el for el in locals().items()] for _k, _v in _current_locals: diff --git a/holoviews/operation/datashader.py b/holoviews/operation/datashader.py index 999eacf0eb..ce639f09af 100644 --- a/holoviews/operation/datashader.py +++ b/holoviews/operation/datashader.py @@ -14,18 +14,19 @@ from datashader.colors import color_lookup from param.parameterized import bothmethod +from packaging.version import Version try: from datashader.bundling import (directly_connect_edges as connect_edges, hammer_bundle) -except: +except ImportError: hammer_bundle, connect_edges = object, object from ..core import (Operation, Element, Dimension, NdOverlay, CompositeOverlay, Dataset, Overlay, OrderedDict, Store) from ..core.data import PandasInterface, XArrayInterface, DaskInterface, cuDFInterface from ..core.util import ( - Iterable, LooseVersion, cast_array_to_int64, cftime_types, cftime_to_timestamp, + Iterable, cast_array_to_int64, cftime_types, cftime_to_timestamp, datetime_types, dt_to_int, isfinite, get_param_values, max_range ) from ..element import (Image, Path, Curve, RGB, Graph, TriMesh, @@ -34,7 +35,7 @@ from ..element.util import connect_tri_edges_pd from ..streams import RangeXY, PlotSize, PointerXY -ds_version = LooseVersion(ds.__version__) +ds_version = Version(ds.__version__) class LinkableOperation(Operation): @@ -124,7 +125,7 @@ class ResamplingOperation(LinkableOperation): @bothmethod def instance(self_or_cls,**params): filtered = {k:v for k,v in params.items() if k in self_or_cls.param} - inst = super(ResamplingOperation, self_or_cls).instance(**filtered) + inst = super().instance(**filtered) inst._precomputed = {} return inst @@ -330,21 +331,21 @@ def _get_agg_params(self, element, x, y, agg_fn, bounds): "dimension." % (column,element)) if isinstance(agg_fn, (ds.count, ds.count_cat)): if vdim_prefix: - vdim_name = '%s%s Count' % (vdim_prefix, column) + vdim_name = f'{vdim_prefix}{column} Count' else: - vdim_name = '%s Count' % column + vdim_name = f'{column} Count' vdims = dims[0].clone(vdim_name, nodata=0) else: vdims = dims[0].clone(vdim_prefix + column) elif category: agg_name = type(agg_fn).__name__.title() - agg_label = '%s %s' % (category, agg_name) - vdims = Dimension('%s%s' % (vdim_prefix, agg_label), label=agg_label) + agg_label = f'{category} {agg_name}' + vdims = Dimension(f'{vdim_prefix}{agg_label}', label=agg_label) if agg_name in ('Count', 'Any'): vdims.nodata = 0 else: agg_name = type(agg_fn).__name__.title() - vdims = Dimension('%s%s' % (vdim_prefix, agg_name), label=agg_name, nodata=0) + vdims = Dimension(f'{vdim_prefix}{agg_name}', label=agg_name, nodata=0) params['vdims'] = vdims return params @@ -508,7 +509,7 @@ def _process(self, element, key=None): x_range=x_range, y_range=y_range) agg_kwargs = {} - if self.p.line_width and glyph == 'line' and ds_version >= LooseVersion('0.14.0'): + if self.p.line_width and glyph == 'line' and ds_version >= Version('0.14.0'): agg_kwargs['line_width'] = self.p.line_width dfdata = PandasInterface.as_dframe(data) @@ -529,13 +530,13 @@ def _process(self, element, key=None): if agg.ndim == 2: # Replacing x and y coordinates to avoid numerical precision issues - eldata = agg if ds_version > LooseVersion('0.5.0') else (xs, ys, agg.data) + eldata = agg if ds_version > Version('0.5.0') else (xs, ys, agg.data) return self.p.element_type(eldata, **params) else: layers = {} for c in agg.coords[agg_fn.column].data: cagg = agg.sel(**{agg_fn.column: c}) - eldata = cagg if ds_version > LooseVersion('0.5.0') else (xs, ys, cagg.data) + eldata = cagg if ds_version > Version('0.5.0') else (xs, ys, cagg.data) layers[c] = self.p.element_type(eldata, **params) return NdOverlay(layers, kdims=[data.get_dimension(agg_fn.column)]) @@ -787,7 +788,7 @@ def _process(self, element, key=None): x_range=x_range, y_range=y_range) agg_kwargs = {} - if ds_version >= LooseVersion('0.14.0'): + if ds_version >= Version('0.14.0'): agg_kwargs['line_width'] = self.p.line_width agg = cvs.line(df, x.name, yagg, agg_fn, axis=1, **agg_kwargs).rename(rename_dict) @@ -846,13 +847,13 @@ def _process(self, element, key=None): if agg.ndim == 2: # Replacing x and y coordinates to avoid numerical precision issues - eldata = agg if ds_version > LooseVersion('0.5.0') else (xs, ys, agg.data) + eldata = agg if ds_version > Version('0.5.0') else (xs, ys, agg.data) return self.p.element_type(eldata, **params) else: layers = {} for c in agg.coords[agg_fn.column].data: cagg = agg.sel(**{agg_fn.column: c}) - eldata = cagg if ds_version > LooseVersion('0.5.0') else (xs, ys, cagg.data) + eldata = cagg if ds_version > Version('0.5.0') else (xs, ys, cagg.data) layers[c] = self.p.element_type(eldata, **params) return NdOverlay(layers, kdims=[element.get_dimension(agg_fn.column)]) @@ -864,7 +865,7 @@ class segments_aggregate(geom_aggregate, LineAggregationOperation): def _aggregate(self, cvs, df, x0, y0, x1, y1, agg_fn): agg_kwargs = {} - if ds_version >= LooseVersion('0.14.0'): + if ds_version >= Version('0.14.0'): agg_kwargs['line_width'] = self.p.line_width return cvs.line(df, [x0, x1], [y0, y1], agg_fn, axis=1, **agg_kwargs) @@ -954,7 +955,7 @@ def _get_xarrays(self, element, coords, xtype, ytype): def _process(self, element, key=None): - if ds_version <= LooseVersion('0.5.0'): + if ds_version <= Version('0.5.0'): raise RuntimeError('regrid operation requires datashader>=0.6.0') # Compute coords, anges and size @@ -1116,7 +1117,7 @@ def _process(self, element, key=None): precompute = self.p.precompute if interp == 'linear': interp = 'bilinear' wireframe = False - if (not (element.vdims or (isinstance(element, TriMesh) and element.nodes.vdims))) and ds_version <= LooseVersion('0.6.9'): + if (not (element.vdims or (isinstance(element, TriMesh) and element.nodes.vdims))) and ds_version <= Version('0.6.9'): self.p.aggregator = ds.any() if isinstance(agg, ds.any) or agg == 'any' else ds.count() return aggregate._process(self, element, key) elif ((not interp and (isinstance(agg, (ds.any, ds.count)) or @@ -1176,11 +1177,11 @@ class quadmesh_rasterize(trimesh_rasterize): """ def _precompute(self, element, agg): - if ds_version <= LooseVersion('0.7.0'): + if ds_version <= Version('0.7.0'): return super()._precompute(element.trimesh(), agg) def _process(self, element, key=None): - if ds_version <= LooseVersion('0.7.0'): + if ds_version <= Version('0.7.0'): return super()._process(element, key) if element.interface.datatype != 'xarray': @@ -1323,7 +1324,7 @@ def rgb2hex(cls, rgb): """ if len(rgb) > 3: rgb = rgb[:-1] - return "#{0:02x}{1:02x}{2:02x}".format(*(int(v*255) for v in rgb)) + return "#{:02x}{:02x}{:02x}".format(*(int(v*255) for v in rgb)) @classmethod @@ -1368,7 +1369,7 @@ def _process(self, element, key=None): shade_opts = dict( how=self.p.cnorm, min_alpha=self.p.min_alpha, alpha=self.p.alpha ) - if ds_version >= LooseVersion('0.14.0'): + if ds_version >= Version('0.14.0'): shade_opts['rescale_discrete_levels'] = self.p.rescale_discrete_levels # Compute shading options depending on whether @@ -1402,7 +1403,7 @@ def _process(self, element, key=None): if self.p.clims: shade_opts['span'] = self.p.clims - elif ds_version > LooseVersion('0.5.0') and self.p.cnorm != 'eq_hist': + elif ds_version > Version('0.5.0') and self.p.cnorm != 'eq_hist': shade_opts['span'] = element.range(vdim) params = dict(get_param_values(element), kdims=kdims, @@ -1473,7 +1474,7 @@ def _process(self, element, key=None): if isinstance(element, Polygons): agg = cvs.polygons(data, **agg_kwargs) elif isinstance(element, Path): - if self.p.line_width and ds_version >= LooseVersion('0.14.0'): + if self.p.line_width and ds_version >= Version('0.14.0'): agg_kwargs['line_width'] = self.p.line_width agg = cvs.line(data, **agg_kwargs) elif isinstance(element, Points): @@ -1660,7 +1661,7 @@ class SpreadingOperation(LinkableOperation): to make sparse plots more visible. """ - how = param.ObjectSelector(default='source' if ds_version <= LooseVersion('0.11.1') else None, + how = param.ObjectSelector(default='source' if ds_version <= Version('0.11.1') else None, objects=[None, 'source', 'over', 'saturate', 'add', 'max', 'min'], doc=""" The name of the compositing operator to use when combining pixels. Default of None uses 'over' operator for RGB elements @@ -1920,7 +1921,7 @@ def _update_hits(self, event): @bothmethod def instance(self_or_cls, **params): - inst = super(inspect, self_or_cls).instance(**params) + inst = super().instance(**params) inst._op = None return inst diff --git a/holoviews/operation/element.py b/holoviews/operation/element.py index 51a7bb9bf3..33a00def94 100644 --- a/holoviews/operation/element.py +++ b/holoviews/operation/element.py @@ -7,14 +7,15 @@ import numpy as np import param +from packaging.version import Version from param import _is_number from ..core import (Operation, NdOverlay, Overlay, GridMatrix, HoloMap, Dataset, Element, Collator, Dimension) -from ..core.data import ArrayInterface, DictInterface, default_datatype +from ..core.data import ArrayInterface, DictInterface, PandasInterface, default_datatype from ..core.data.util import dask_array_module from ..core.util import ( - LooseVersion, group_sanitizer, label_sanitizer, pd, datetime_types, isfinite, + group_sanitizer, label_sanitizer, datetime_types, isfinite, dt_to_int, isdatetime, is_dask_array, is_cupy_array, is_ibis_expr ) from ..element.chart import Histogram, Scatter @@ -23,10 +24,7 @@ from ..element.util import categorical_aggregate2d # noqa (API import) from ..streams import RangeXY -column_interfaces = [ArrayInterface, DictInterface] -if pd: - from ..core.data import PandasInterface - column_interfaces.append(PandasInterface) +column_interfaces = [ArrayInterface, DictInterface, PandasInterface] def identity(x,k): return x @@ -730,7 +728,7 @@ def _process(self, element, key=None): is_cupy = is_cupy_array(data) if is_cupy: import cupy - full_cupy_support = LooseVersion(cupy.__version__) > LooseVersion('8.0') + full_cupy_support = Version(cupy.__version__) > Version('8.0') if not full_cupy_support and (normed or self.p.weight_dimension): data = cupy.asnumpy(data) is_cupy = False @@ -837,7 +835,7 @@ def _process(self, element, key=None): params['vdims'] = [Dimension('Frequency', label=label)] else: label = 'Frequency' if normed else 'Count' - params['vdims'] = [Dimension('{0}_{1}'.format(dim.name, label.lower()), + params['vdims'] = [Dimension(f'{dim.name}_{label.lower()}', label=label)] if element.group != element.__class__.__name__: diff --git a/holoviews/operation/normalization.py b/holoviews/operation/normalization.py index fc6e37168a..16d94b92e9 100644 --- a/holoviews/operation/normalization.py +++ b/holoviews/operation/normalization.py @@ -111,7 +111,7 @@ def get_ranges(self, element, key): try: index = keys.index(key) specs = ranges[index] - except: + except Exception: raise KeyError("Could not match element key to defined keys") else: raise ValueError("Key list length must match length of supplied ranges") diff --git a/holoviews/operation/stats.py b/holoviews/operation/stats.py index 797e95e9c6..f85a28d18c 100644 --- a/holoviews/operation/stats.py +++ b/holoviews/operation/stats.py @@ -70,7 +70,7 @@ def _process(self, element, key=None): from scipy import stats from scipy.linalg import LinAlgError except ImportError: - raise ImportError('%s operation requires SciPy to be installed.' % type(self).__name__) + raise ImportError(f'{type(self).__name__} operation requires SciPy to be installed.') params = {} if isinstance(element, Distribution): @@ -79,7 +79,7 @@ def _process(self, element, key=None): params['group'] = element.group params['label'] = element.label vdim = element.vdims[0] - vdim_name = '{}_density'.format(selected_dim.name) + vdim_name = f'{selected_dim.name}_density' vdims = [vdim.clone(vdim_name, label='Density') if vdim.name == 'Density' else vdim] else: if self.p.dimension: @@ -91,7 +91,7 @@ def _process(self, element, key=None): "to compute the kernel density estimate on." % type(element).__name__) selected_dim = dimensions[0] - vdim_name = '{}_density'.format(selected_dim.name) + vdim_name = f'{selected_dim.name}_density' vdims = [Dimension(vdim_name, label='Density')] data = element.dimension_values(selected_dim) @@ -174,7 +174,7 @@ def _process(self, element, key=None): try: from scipy import stats except ImportError: - raise ImportError('%s operation requires SciPy to be installed.' % type(self).__name__) + raise ImportError(f'{type(self).__name__} operation requires SciPy to be installed.') if len(element.dimensions()) < 2: raise ValueError("bivariate_kde can only be computed on elements " diff --git a/holoviews/operation/timeseries.py b/holoviews/operation/timeseries.py index a91eb3dd87..677ec283b2 100644 --- a/holoviews/operation/timeseries.py +++ b/holoviews/operation/timeseries.py @@ -1,10 +1,11 @@ import param import numpy as np import pandas as pd +from packaging.version import Version from ..core import Operation, Element from ..core.data import PandasInterface -from ..core.util import pandas_version, LooseVersion +from ..core.util import pandas_version from ..element import Scatter @@ -50,7 +51,7 @@ def _process_layer(self, element, key=None): df = df.set_index(xdim).rolling(win_type=self.p.window_type, **self._roll_kwargs()) if self.p.window_type is None: - kwargs = {'raw': True} if pandas_version >= LooseVersion('0.23.0') else {} + kwargs = {'raw': True} if pandas_version >= Version('0.23.0') else {} rolled = df.apply(self.p.function, **kwargs) else: if self.p.function is np.mean: diff --git a/holoviews/plotting/__init__.py b/holoviews/plotting/__init__.py index 1942c59383..d1370c0dfe 100644 --- a/holoviews/plotting/__init__.py +++ b/holoviews/plotting/__init__.py @@ -55,5 +55,5 @@ def public(obj): is_renderer = any([issubclass(obj, bc) for bc in [Renderer]]) return (is_plot_or_cycle or is_renderer) -_public = list(set([_k for _k, _v in locals().items() if public(_v)])) +_public = list({_k for _k, _v in locals().items() if public(_v)}) __all__ = _public diff --git a/holoviews/plotting/bokeh/__init__.py b/holoviews/plotting/bokeh/__init__.py index 7cb90b0074..a80daa15f2 100644 --- a/holoviews/plotting/bokeh/__init__.py +++ b/holoviews/plotting/bokeh/__init__.py @@ -16,10 +16,6 @@ TriMesh, Violin, Chord, Div, HexTiles, Labels, Sankey, Tiles, Segments, Slope, Rectangles) from ...core.options import Options, Cycle, Palette -try: - from ...interface import DFrame -except: - DFrame = None from .annotation import ( TextPlot, LineAnnotationPlot, BoxAnnotationPlot, SplinePlot, ArrowPlot, @@ -130,9 +126,6 @@ HexTiles: HexTilesPlot} -if DFrame is not None: - associations[DFrame] = TablePlot - Store.register(associations, 'bokeh') if config.no_padding: diff --git a/holoviews/plotting/bokeh/annotation.py b/holoviews/plotting/bokeh/annotation.py index 57d5e59658..3376bd4f53 100644 --- a/holoviews/plotting/bokeh/annotation.py +++ b/holoviews/plotting/bokeh/annotation.py @@ -1,9 +1,5 @@ from collections import defaultdict - -try: - from html import escape -except: - from cgi import escape +from html import escape import param import numpy as np @@ -16,7 +12,7 @@ arrow_start = {'<->': NormalHead, '<|-|>': NormalHead} arrow_end = {'->': NormalHead, '-[': TeeHead, '-|>': NormalHead, '-': None} -except: +except ImportError: from bokeh.models.arrow_heads import OpenHead, NormalHead arrow_start = {'<->': NormalHead, '<|-|>': NormalHead} arrow_end = {'->': NormalHead, '-[': OpenHead, '-|>': NormalHead, @@ -289,7 +285,7 @@ def get_data(self, element, ranges, style): class ArrowPlot(CompositeElementPlot, AnnotationPlot): - style_opts = (['arrow_%s' % p for p in line_properties+fill_properties+['size']] + + style_opts = ([f'arrow_{p}' for p in line_properties+fill_properties+['size']] + text_properties) _style_groups = {'arrow': 'arrow', 'text': 'text'} diff --git a/holoviews/plotting/bokeh/callbacks.py b/holoviews/plotting/bokeh/callbacks.py index 5cc75d692c..1cc1378c50 100644 --- a/holoviews/plotting/bokeh/callbacks.py +++ b/holoviews/plotting/bokeh/callbacks.py @@ -1,9 +1,7 @@ -from __future__ import absolute_import, division, unicode_literals - import asyncio import time -from collections import defaultdict +from collections import defaultdict, OrderedDict import numpy as np @@ -14,7 +12,6 @@ ) from panel.io.state import state -from ...core import OrderedDict from ...core.options import CallbackError from ...core.util import ( datetime_types, dimension_sanitizer, dt64_to_dt @@ -30,7 +27,7 @@ from .util import convert_timestamp -class Callback(object): +class Callback: """ Provides a baseclass to define callbacks, which return data from bokeh model callbacks, events and attribute changes. The callback @@ -233,7 +230,7 @@ def resolve_attr_spec(cls, spec, cb_obj, model=None): be the same as the model. """ if not cb_obj: - raise Exception('Bokeh plot attribute %s could not be found' % spec) + raise Exception(f'Bokeh plot attribute {spec} could not be found') if model is None: model = cb_obj spec = spec.split('.') @@ -753,7 +750,7 @@ def _process_msg(self, msg): dtype = el.interface.dtype(el, xdim) try: xfactors = list(np.array(xfactors).astype(dtype)) - except: + except Exception: pass msg['x_selection'] = xfactors else: @@ -768,7 +765,7 @@ def _process_msg(self, msg): dtype = el.interface.dtype(el, ydim) try: yfactors = list(np.array(yfactors).astype(dtype)) - except: + except Exception: pass msg['y_selection'] = yfactors else: diff --git a/holoviews/plotting/bokeh/chart.py b/holoviews/plotting/bokeh/chart.py index 1c5481179b..1d847499fd 100644 --- a/holoviews/plotting/bokeh/chart.py +++ b/holoviews/plotting/bokeh/chart.py @@ -24,7 +24,7 @@ expand_batched_style, base_properties, line_properties, fill_properties, mpl_to_bokeh, rgb2hex ) -from .util import LooseVersion, bokeh_version, categorize_array +from .util import categorize_array class PointPlot(LegendPlot, ColorbarPlot): @@ -573,8 +573,8 @@ def _init_glyph(self, plot, mapping, properties): if prop not in properties: continue pval = properties.pop(prop) - line_prop = 'line_%s' % prop - fill_prop = 'fill_%s' % prop + line_prop = f'line_{prop}' + fill_prop = f'fill_{prop}' if line_prop not in properties: properties[line_prop] = pval if fill_prop not in properties and fill_prop in self.style_opts: @@ -859,7 +859,7 @@ def _add_color_data(self, ds, ranges, style, cdim, data, mapping, factors, color # Enable legend if colormapper is categorical cmapper = cmapping['color']['transform'] - legend_prop = 'legend_field' if bokeh_version >= LooseVersion('1.3.5') else 'legend' + legend_prop = 'legend_field' if ('color' in cmapping and self.show_legend and isinstance(cmapper, CategoricalColorMapper)): mapping[legend_prop] = cdim.name diff --git a/holoviews/plotting/bokeh/element.py b/holoviews/plotting/bokeh/element.py index 96cc668dee..45ae6d8578 100644 --- a/holoviews/plotting/bokeh/element.py +++ b/holoviews/plotting/bokeh/element.py @@ -11,7 +11,8 @@ from bokeh.core.properties import value from bokeh.document.events import ModelChangedEvent from bokeh.models import ( - ColorBar, ColorMapper, Legend, Renderer, Title, tools + BinnedTicker, ColorBar, ColorMapper, EqHistColorMapper, + Legend, Renderer, Title, tools, ) from bokeh.models.axes import CategoricalAxis, DatetimeAxis from bokeh.models.formatters import ( @@ -27,6 +28,8 @@ from bokeh.models.tools import Tool from bokeh.models.widgets import Panel, Tabs +from packaging.version import Version + from ...core import DynamicMap, CompositeOverlay, Element, Dimension, Dataset from ...core.options import abbreviated_exception, SkipRendering from ...core import util @@ -44,33 +47,19 @@ ) from .tabular import TablePlot from .util import ( - LooseVersion, TOOL_TYPES, bokeh_version, date_to_integer, decode_bytes, get_tab_title, + TOOL_TYPES, bokeh_version, date_to_integer, decode_bytes, get_tab_title, glyph_order, py2js_tickformatter, recursive_model_update, theme_attr_json, cds_column_replace, hold_policy, match_dim_specs, compute_layout_properties, wrap_formatter, match_ax_type, prop_is_none, remove_legend ) -try: - from bokeh.models import EqHistColorMapper -except ImportError: - EqHistColorMapper = None - -try: - from bokeh.models import BinnedTicker -except ImportError: - BinnedTicker = None -if bokeh_version >= LooseVersion('2.0.1'): - try: - TOOLS_MAP = Tool._known_aliases - except Exception: - TOOLS_MAP = TOOL_TYPES -elif bokeh_version >= LooseVersion('2.0.0'): - from bokeh.plotting._tools import TOOLS_MAP -else: - from bokeh.plotting.helpers import _known_tools as TOOLS_MAP +try: + TOOLS_MAP = Tool._known_aliases +except Exception: + TOOLS_MAP = TOOL_TYPES class ElementPlot(BokehPlot, GenericElementPlot): @@ -595,7 +584,6 @@ def _title_properties(self, key, plot, element): # this will override theme if not set to the default 12pt title_font = self._fontsize('title').get('fontsize') if title_font != '12pt': - title_font = title_font if bokeh_version > LooseVersion('2.2.3') else value(title_font) opts['text_font_size'] = title_font return opts @@ -631,18 +619,17 @@ def _axis_properties(self, axis, key, plot, dimension=None, if ((axis == 'x' and self.xaxis in ['bottom-bare', 'top-bare', 'bare']) or (axis == 'y' and self.yaxis in ['left-bare', 'right-bare', 'bare'])): - zero_pt = '0pt' if bokeh_version > LooseVersion('2.2.3') else value('0pt') + zero_pt = '0pt' axis_props['axis_label_text_font_size'] = zero_pt axis_props['major_label_text_font_size'] = zero_pt axis_props['major_tick_line_color'] = None axis_props['minor_tick_line_color'] = None else: - labelsize = self._fontsize('%slabel' % axis).get('fontsize') + labelsize = self._fontsize(f'{axis}label').get('fontsize') if labelsize: axis_props['axis_label_text_font_size'] = labelsize - ticksize = self._fontsize('%sticks' % axis, common=False).get('fontsize') + ticksize = self._fontsize(f'{axis}ticks', common=False).get('fontsize') if ticksize: - ticksize = ticksize if bokeh_version > LooseVersion('2.2.3') else value(ticksize) axis_props['major_label_text_font_size'] = ticksize rotation = self.xrotation if axis == 'x' else self.yrotation if rotation: @@ -715,7 +702,7 @@ def _axis_properties(self, axis, key, plot, dimension=None, # major ticks are actually minor ticks in a categorical # so if user inputs minor ticks sizes, then use that; # else keep major (group) == minor (subgroup) - msize = self._fontsize('minor_{0}ticks'.format(axis), + msize = self._fontsize(f'minor_{axis}ticks', common=False).get('fontsize') if msize is not None: axis_props['major_label_text_font_size'] = msize @@ -966,7 +953,7 @@ def _categorize_data(self, data, cols, dims): if self.invert_axes: cols = cols[::-1] dims = dims[:2][::-1] - ranges = [self.handles['%s_range' % ax] for ax in 'xy'] + ranges = [self.handles[f'{ax}_range'] for ax in 'xy'] for i, col in enumerate(cols): column = data[col] if (isinstance(ranges[i], FactorRange) and @@ -1205,8 +1192,7 @@ def _glyph_properties(self, plot, element, source, ranges, style, group=None): else: legend = element.label if legend and self.overlaid: - legend_prop = 'legend_label' if bokeh_version >= LooseVersion('1.3.5') else 'legend' - properties[legend_prop] = legend + properties['legend_label'] = legend return properties @@ -1233,7 +1219,7 @@ def _update_glyph(self, renderer, properties, mapping, glyph, source, data): allowed_properties = glyph.properties() properties = mpl_to_bokeh(properties) merged = dict(properties, **mapping) - legend_props = ('legend_field', 'legend_label') if bokeh_version >= LooseVersion('1.3.5') else ('legend',) + legend_props = ('legend_field', 'legend_label') for lp in legend_props: legend = merged.pop(lp, None) if legend is not None: @@ -1281,10 +1267,7 @@ def _update_glyph(self, renderer, properties, mapping, glyph, source, data): event = ModelChangedEvent(self.document, source, 'data', source.data, empty_data, empty_data, setter='empty') - if bokeh_version >= LooseVersion('2.4.0'): - self.document.callbacks._held_events.append(event) - else: - self.document._held_events.append(event) + self.document.callbacks._held_events.append(event) if legend is not None: for leg in self.state.legend: @@ -1788,7 +1771,7 @@ class ColorbarPlot(ElementPlot): def _draw_colorbar(self, plot, color_mapper, prefix=''): if CategoricalColorMapper and isinstance(color_mapper, CategoricalColorMapper): return - if EqHistColorMapper and isinstance(color_mapper, EqHistColorMapper) and BinnedTicker: + if isinstance(color_mapper, EqHistColorMapper): ticker = BinnedTicker(mapper=color_mapper) elif isinstance(color_mapper, LogColorMapper) and color_mapper.low > 0: ticker = LogTicker() @@ -1968,8 +1951,7 @@ def _get_color_data(self, element, ranges, style, name='color', factors=None, co data[field] = cdata if factors is not None and self.show_legend: - legend_prop = 'legend_field' if bokeh_version >= LooseVersion('1.3.5') else 'legend' - mapping[legend_prop] = field + mapping['legend_field'] = field mapping[name] = {'field': field, 'transform': mapper} return data, mapping @@ -1995,12 +1977,8 @@ def _get_cmapper_opts(self, low, high, factors, colors): "the `clim` option." ) elif self.cnorm == 'eq_hist': - if EqHistColorMapper is None: - raise ImportError("Could not import bokeh.models.EqHistColorMapper. " - "Note that the option cnorm='eq_hist' requires " - "bokeh 2.2.3 or higher.") colormapper = EqHistColorMapper - if bokeh_version > LooseVersion('2.4.2'): + if bokeh_version > Version('2.4.2'): opts['rescale_discrete_levels'] = self.rescale_discrete_levels if isinstance(low, (bool, np.bool_)): low = int(low) if isinstance(high, (bool, np.bool_)): high = int(high) @@ -2103,7 +2081,7 @@ def _process_legend(self, plot=None): -class AnnotationPlot(object): +class AnnotationPlot: """ Mix-in plotting subclass for AnnotationPlots which do not have a legend. """ diff --git a/holoviews/plotting/bokeh/graphs.py b/holoviews/plotting/bokeh/graphs.py index 5fbcb50846..f4ad007f68 100644 --- a/holoviews/plotting/bokeh/graphs.py +++ b/holoviews/plotting/bokeh/graphs.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - from collections import defaultdict import param diff --git a/holoviews/plotting/bokeh/heatmap.py b/holoviews/plotting/bokeh/heatmap.py index 8e9d8e6f64..d667c387f2 100644 --- a/holoviews/plotting/bokeh/heatmap.py +++ b/holoviews/plotting/bokeh/heatmap.py @@ -72,7 +72,7 @@ def _element_transform(self, transform, element, ranges): return transform.apply(element.gridded, ranges=ranges, flat=False).T.flatten() def get_data(self, element, ranges, style): - x, y, z = [dimension_sanitizer(d) for d in element.dimensions(label=True)[:3]] + x, y, z = (dimension_sanitizer(d) for d in element.dimensions(label=True)[:3]) if self.invert_axes: x, y = y, x cmapper = self._get_colormapper(element.vdims[0], element, ranges, style) if 'line_alpha' not in style and 'line_width' not in style: @@ -474,7 +474,7 @@ def _get_ymarks_data(self, order_ann, bins_ann): def get_data(self, element, ranges, style): # dimension labels dim_labels = element.dimensions(label=True)[:3] - x, y, z = [dimension_sanitizer(d) for d in dim_labels] + x, y, z = (dimension_sanitizer(d) for d in dim_labels) if self.invert_axes: x, y = y, x # color mapper diff --git a/holoviews/plotting/bokeh/hex_tiles.py b/holoviews/plotting/bokeh/hex_tiles.py index 384a9a3ad3..110cd433e6 100644 --- a/holoviews/plotting/bokeh/hex_tiles.py +++ b/holoviews/plotting/bokeh/hex_tiles.py @@ -3,10 +3,7 @@ import param import numpy as np -try: - from bokeh.util.hex import cartesian_to_axial -except: - cartesian_to_axial = None +from bokeh.util.hex import cartesian_to_axial from ...core import Dimension, Operation from ...core.options import Compositor diff --git a/holoviews/plotting/bokeh/links.py b/holoviews/plotting/bokeh/links.py index 64392e13a2..3e9e8a4a8b 100644 --- a/holoviews/plotting/bokeh/links.py +++ b/holoviews/plotting/bokeh/links.py @@ -1,6 +1,7 @@ import numpy as np from bokeh.models import CustomJS, ToolbarBox +from bokeh.models.tools import RangeTool from ...core.util import isscalar from ..links import ( @@ -10,7 +11,7 @@ from ..plot import GenericElementPlot, GenericOverlayPlot -class LinkCallback(object): +class LinkCallback: source_model = None target_model = None @@ -130,10 +131,6 @@ class RangeToolLinkCallback(LinkCallback): """ def __init__(self, root_model, link, source_plot, target_plot): - try: - from bokeh.models.tools import RangeTool - except: - raise Exception('RangeToolLink requires bokeh >= 0.13') toolbars = list(root_model.select({'type': ToolbarBox})) axes = {} if 'x' in link.axes: diff --git a/holoviews/plotting/bokeh/path.py b/holoviews/plotting/bokeh/path.py index 63331aaf7e..8f4ab6c78b 100644 --- a/holoviews/plotting/bokeh/path.py +++ b/holoviews/plotting/bokeh/path.py @@ -13,7 +13,7 @@ expand_batched_style, base_properties, line_properties, fill_properties, mpl_to_bokeh, validate ) -from .util import LooseVersion, bokeh_version, multi_polygons_data +from .util import multi_polygons_data class PathPlot(LegendPlot, ColorbarPlot): @@ -273,8 +273,7 @@ def get_data(self, element, ranges, style): cmapper = self._get_colormapper(cdim, element, ranges, style, factors) mapping[self._color_style] = {'field': dim_name, 'transform': cmapper} if self.show_legend: - legend_prop = 'legend_field' if bokeh_version >= LooseVersion('1.3.5') else 'legend' - mapping[legend_prop] = dim_name + mapping['legend_field'] = dim_name return data, mapping, style def _init_glyph(self, plot, mapping, properties): diff --git a/holoviews/plotting/bokeh/plot.py b/holoviews/plotting/bokeh/plot.py index 8d5f68503d..9869607cb7 100644 --- a/holoviews/plotting/bokeh/plot.py +++ b/holoviews/plotting/bokeh/plot.py @@ -237,7 +237,7 @@ def _fontsize(self, key, label='fontsize', common=True): fontsize in pt. """ size = super()._fontsize(key, label, common) - return {k: v if isinstance(v, str) else '%spt' % v + return {k: v if isinstance(v, str) else f'{v}pt' for k, v in size.items()} def _get_title_div(self, key, default_fontsize='15pt', width=450): @@ -720,7 +720,7 @@ def _init_layout(self, layout): if empty or view.main is None: continue elif not view.traverse(lambda x: x, [Element]): - self.param.warning('%s is empty, skipping subplot.' % view.main) + self.param.warning(f'{view.main} is empty, skipping subplot.') continue else: layout_count += 1 diff --git a/holoviews/plotting/bokeh/raster.py b/holoviews/plotting/bokeh/raster.py index cc37d8c956..c8a6290b08 100644 --- a/holoviews/plotting/bokeh/raster.py +++ b/holoviews/plotting/bokeh/raster.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import sys import numpy as np @@ -46,7 +44,7 @@ def _hover_opts(self, element): tooltips.append((vdims[0].pprint_label, '@image')) for vdim in vdims[1:]: vname = dimension_sanitizer(vdim.name) - tooltips.append((vdim.pprint_label, '@{0}'.format(vname))) + tooltips.append((vdim.pprint_label, f'@{vname}')) return tooltips, {} def _postprocess_hover(self, renderer, source): @@ -56,7 +54,7 @@ def _postprocess_hover(self, renderer, source): return element = self.current_frame - xdim, ydim = [dimension_sanitizer(kd.name) for kd in element.kdims] + xdim, ydim = (dimension_sanitizer(kd.name) for kd in element.kdims) xaxis = self.handles['xaxis'] yaxis = self.handles['yaxis'] @@ -145,7 +143,7 @@ class RGBPlot(LegendPlot): selection_display = BokehOverlaySelectionDisplay() def __init__(self, hmap, **params): - super(RGBPlot, self).__init__(hmap, **params) + super().__init__(hmap, **params) self._legend_plot = None def _hover_opts(self, element): @@ -154,7 +152,7 @@ def _hover_opts(self, element): ('RGBA', '@image')], {} def _init_glyphs(self, plot, element, ranges, source): - super(RGBPlot, self)._init_glyphs(plot, element, ranges, source) + super()._init_glyphs(plot, element, ranges, source) if not ('holoviews.operation.datashader' in sys.modules and self.show_legend): return try: @@ -275,8 +273,8 @@ def get_data(self, element, ranges, style): if irregular: dims = element.kdims if self.invert_axes: dims = dims[::-1] - X, Y = [element.interface.coords(element, d, expanded=True, edges=True) - for d in dims] + X, Y = (element.interface.coords(element, d, expanded=True, edges=True) + for d in dims) X, Y = colormesh(X, Y) zvals = zdata.T.flatten() if self.invert_axes else zdata.flatten() XS, YS = [], [] diff --git a/holoviews/plotting/bokeh/sankey.py b/holoviews/plotting/bokeh/sankey.py index 8d2668963c..d91d21bc2d 100644 --- a/holoviews/plotting/bokeh/sankey.py +++ b/holoviews/plotting/bokeh/sankey.py @@ -181,7 +181,7 @@ def _compute_labels(self, element, data, mapping): if self.show_values: value = value_dim.pprint_value(node['value'], print_unit=True) if label: - label = '%s - %s' % (label, value) + label = f'{label} - {value}' else: label = value if label: @@ -220,7 +220,7 @@ def _patch_hover(self, element, data): if not (self.inspection_policy == 'edges' and 'hover' in self.handles): return lidx = element.nodes.get_dimension(self.label_index) - src, tgt = [dimension_sanitizer(kd.name) for kd in element.kdims[:2]] + src, tgt = (dimension_sanitizer(kd.name) for kd in element.kdims[:2]) if src == 'start': src += '_values' if tgt == 'end': tgt += '_values' lookup = dict(zip(*(element.nodes.dimension_values(d) for d in (2, lidx)))) diff --git a/holoviews/plotting/bokeh/stats.py b/holoviews/plotting/bokeh/stats.py index c3a3843268..1c10048bbf 100644 --- a/holoviews/plotting/bokeh/stats.py +++ b/holoviews/plotting/bokeh/stats.py @@ -20,7 +20,7 @@ from .element import CompositeElementPlot, ColorbarPlot, LegendPlot from .path import PolygonPlot from .styles import base_properties, fill_properties, line_properties -from .util import LooseVersion, bokeh_version, decode_bytes +from .util import decode_bytes class DistributionPlot(AreaPlot): @@ -102,7 +102,7 @@ def _get_axis_dims(self, element): def _glyph_properties(self, plot, element, source, ranges, style, group=None): properties = dict(style, source=source) if self.show_legend and not element.kdims and self.overlaid: - legend_prop = 'legend_label' if bokeh_version >= LooseVersion('1.3.5') else 'legend' + legend_prop = 'legend_label' properties[legend_prop] = element.label return properties @@ -309,8 +309,7 @@ def get_data(self, element, ranges, style): factors = list(unique_iterator(factors)) if self.show_legend: - legend_prop = 'legend_field' if bokeh_version >= LooseVersion('1.3.5') else 'legend' - vbar_map[legend_prop] = cdim.name + vbar_map['legend_field'] = cdim.name return data, mapping, style @@ -404,7 +403,7 @@ def _kde_data(self, element, el, key, split_dim, split_cats, **kwargs): if len(split_cats) > 2: raise ValueError( 'The number of categories for split violin plots cannot be ' - 'greater than 2. Found {0} categories: {1}'.format( + 'greater than 2. Found {} categories: {}'.format( len(split_cats), ', '.join(split_cats))) el = el.add_dimension(repr(split_dim), len(el.kdims), all_cats) kdes = univariate_kde(el, dimension=vdim.name, groupby=repr(split_dim), **kwargs) @@ -592,8 +591,7 @@ def get_data(self, element, ranges, style): group='violin', factors=factors) style['violin_fill_color'] = {'field': repr(split_dim), 'transform': cmapper} if self.show_legend: - legend_prop = 'legend_field' if bokeh_version >= LooseVersion('1.3.5') else 'legend' - kde_map[legend_prop] = repr(split_dim) + kde_map['legend_field'] = repr(split_dim) for k, v in list(style.items()): if k.startswith('violin_line'): diff --git a/holoviews/plotting/bokeh/util.py b/holoviews/plotting/bokeh/util.py index 37c0c949a2..b8a0d3a447 100644 --- a/holoviews/plotting/bokeh/util.py +++ b/holoviews/plotting/bokeh/util.py @@ -26,22 +26,19 @@ from bokeh.models.widgets import DataTable, Tabs, Div from bokeh.plotting import Figure from bokeh.themes.theme import Theme - -try: - from bokeh.themes import built_in_themes -except: - built_in_themes = {} +from bokeh.themes import built_in_themes +from packaging.version import Version from ...core.ndmapping import NdMapping from ...core.overlay import Overlay from ...core.util import ( - LooseVersion, arraylike_types, callable_name, cftime_types, + arraylike_types, callable_name, cftime_types, cftime_to_timestamp, isnumeric, pd, unique_array ) from ...core.spaces import get_nested_dmaps, DynamicMap from ..util import dim_axis_label -bokeh_version = LooseVersion(bokeh.__version__) # noqa +bokeh_version = Version(bokeh.__version__) # noqa TOOL_TYPES = { @@ -577,13 +574,13 @@ def py2js_tickformatter(formatter, msg=''): try: jscode = py2js(formatter, 'formatter') except Exception as e: - error = 'Pyscript raised an error: {0}'.format(e) + error = f'Pyscript raised an error: {e}' error = error.replace('%', '%%') param.main.param.warning(msg+error) return args = inspect.getfullargspec(formatter).args - arg_define = 'var %s = tick;' % args[0] if args else '' + arg_define = f'var {args[0]} = tick;' if args else '' return_js = 'return formatter();\n' jsfunc = '\n'.join([arg_define, jscode, return_js]) match = re.search(r'(formatter \= function flx_formatter \(.*\))', jsfunc) @@ -644,7 +641,7 @@ def filter_batched_data(data, mapping): if len(unique_array(values)) == 1: mapping[k] = values[0] del data[v] - except: + except Exception: pass def cds_column_replace(source, data): @@ -665,21 +662,15 @@ def hold_policy(document, policy, server=False): """ Context manager to temporary override the hold policy. """ - if bokeh_version >= LooseVersion('2.4'): - old_policy = document.callbacks.hold_value - document.callbacks._hold = policy - else: - old_policy = document._hold - document._hold = policy + old_policy = document.callbacks.hold_value + document.callbacks._hold = policy try: yield finally: if server and not old_policy: document.unhold() - elif bokeh_version >= LooseVersion('2.4'): - document.callbacks._hold = old_policy else: - document._hold = old_policy + document.callbacks._hold = old_policy def recursive_model_update(model, props): @@ -726,7 +717,7 @@ def wrapper(self, *args, **kwargs): for source in shared_sources: source.data.clear() if doc: - event_obj = doc.callbacks if bokeh_version >= LooseVersion('2.4') else doc + event_obj = doc.callbacks event_obj._held_events = event_obj._held_events[:-1] ret = f(self, *args, **kwargs) @@ -750,7 +741,7 @@ def categorize_array(array, dim): return np.array([dim.pprint_value(x) for x in array]) -class periodic(object): +class periodic: """ Mocks the API of periodic Thread in hv.core.util, allowing a smooth API transition on bokeh server. @@ -811,9 +802,7 @@ def stop(self): self._pcb = None def __repr__(self): - return 'periodic(%s, %s, %s)' % (self.period, - self.count, - callable_name(self.callback)) + return f'periodic({self.period}, {self.count}, {callable_name(self.callback)})' def __str__(self): return repr(self) @@ -842,10 +831,10 @@ def date_to_integer(date): Returns: Milliseconds since 1970-01-01 00:00:00 """ - if pd and isinstance(date, pd.Timestamp): + if isinstance(date, pd.Timestamp): try: date = date.to_datetime64() - except: + except Exception: date = date.to_datetime() if isinstance(date, np.datetime64): @@ -969,8 +958,7 @@ def wrap_formatter(formatter, axis): if isinstance(formatter, TickFormatter): pass elif isinstance(formatter, FunctionType): - msg = ('%sformatter could not be ' - 'converted to tick formatter. ' % axis) + msg = f'{axis}formatter could not be converted to tick formatter. ' jsfunc = py2js_tickformatter(formatter, msg) if jsfunc: formatter = FuncTickFormatter(code=jsfunc) diff --git a/holoviews/plotting/links.py b/holoviews/plotting/links.py index 8496207481..2abd20fb6c 100644 --- a/holoviews/plotting/links.py +++ b/holoviews/plotting/links.py @@ -36,9 +36,9 @@ class Link(param.Parameterized): def __init__(self, source, target=None, **params): if source is None: - raise ValueError('%s must define a source' % type(self).__name__) + raise ValueError(f'{type(self).__name__} must define a source') if self._requires_target and target is None: - raise ValueError('%s must define a target.' % type(self).__name__) + raise ValueError(f'{type(self).__name__} must define a target.') # Source is stored as a weakref to allow it to be garbage collected self._source = None if source is None else weakref.ref(source) diff --git a/holoviews/plotting/mixins.py b/holoviews/plotting/mixins.py index 7f576cccda..6258957637 100644 --- a/holoviews/plotting/mixins.py +++ b/holoviews/plotting/mixins.py @@ -6,7 +6,7 @@ from .util import get_axis_padding -class GeomMixin(object): +class GeomMixin: def get_extents(self, element, ranges, range_type='combined'): """ @@ -36,7 +36,7 @@ def get_extents(self, element, ranges, range_type='combined'): return super().get_extents(element, ranges, range_type) -class ChordMixin(object): +class ChordMixin: def get_extents(self, element, ranges, range_type='combined'): """ @@ -53,7 +53,7 @@ def get_extents(self, element, ranges, range_type='combined'): return (x0, y0, x1, y1) -class HeatMapMixin(object): +class HeatMapMixin: def get_extents(self, element, ranges, range_type='combined'): if range_type in ('data', 'combined'): @@ -74,7 +74,7 @@ def get_extents(self, element, ranges, range_type='combined'): return super().get_extents(element, ranges, range_type) -class SpikesMixin(object): +class SpikesMixin: def get_extents(self, element, ranges, range_type='combined'): opts = self.lookup_options(element, 'plot').options @@ -109,7 +109,7 @@ def get_extents(self, element, ranges, range_type='combined'): ydim=proxy_dim) -class AreaMixin(object): +class AreaMixin: def get_extents(self, element, ranges, range_type='combined'): vdims = element.vdims[:2] @@ -128,7 +128,7 @@ def get_extents(self, element, ranges, range_type='combined'): return super().get_extents(element, ranges, range_type) -class BarsMixin(object): +class BarsMixin: def get_extents(self, element, ranges, range_type='combined'): """ diff --git a/holoviews/plotting/mpl/__init__.py b/holoviews/plotting/mpl/__init__.py index 970914bd0d..272f6de6a5 100644 --- a/holoviews/plotting/mpl/__init__.py +++ b/holoviews/plotting/mpl/__init__.py @@ -4,11 +4,11 @@ from matplotlib.colors import ListedColormap, LinearSegmentedColormap from param import concrete_descendents from colorcet import kbc, register_cmap +from packaging.version import Version from ...core import Layout, Collator, GridMatrix, config from ...core.options import Cycle, Palette, Options from ...core.overlay import NdOverlay, Overlay -from ...core.util import LooseVersion, pd from ...element import * # noqa (API import) from ..plot import PlotSelector from ..util import fire_colors @@ -30,15 +30,14 @@ from .renderer import MPLRenderer -mpl_ge_150 = LooseVersion(mpl.__version__) >= LooseVersion('1.5.0') +mpl_ge_150 = Version(mpl.__version__) >= Version('1.5.0') -if pd: - try: - from pandas.plotting import register_matplotlib_converters - register_matplotlib_converters() - except ImportError: - from pandas.tseries import converter - converter.register() +try: + from pandas.plotting import register_matplotlib_converters + register_matplotlib_converters() +except ImportError: + from pandas.tseries import converter + converter.register() def set_style(key): diff --git a/holoviews/plotting/mpl/chart.py b/holoviews/plotting/mpl/chart.py index 5b03a929c7..4d16b65d9a 100644 --- a/holoviews/plotting/mpl/chart.py +++ b/holoviews/plotting/mpl/chart.py @@ -1,6 +1,7 @@ import param import numpy as np import matplotlib as mpl +from packaging.version import Version from matplotlib import cm from matplotlib.collections import LineCollection @@ -9,7 +10,7 @@ from ...core.dimension import Dimension, dimension_name from ...core.options import Store, abbreviated_exception from ...core.util import ( - LooseVersion, match_spec, isfinite, dt_to_int, dt64_to_dt, search_indices, + match_spec, isfinite, dt_to_int, dt64_to_dt, search_indices, unique_array, isscalar, isdatetime ) from ...element import Raster, HeatMap @@ -123,7 +124,7 @@ class ErrorPlot(ColorbarPlot): def init_artists(self, ax, plot_data, plot_kwargs): handles = ax.errorbar(*plot_data, **plot_kwargs) bottoms, tops = None, None - if mpl_version >= LooseVersion('2.0'): + if mpl_version >= Version('2.0'): _, caps, verts = handles if caps: bottoms, tops = caps @@ -147,11 +148,9 @@ def get_data(self, element, ranges, style): with abbreviated_exception(): raise ValueError('Mapping a continuous or categorical ' 'dimension to a color on a ErrorBarPlot ' - 'is not supported by the {backend} backend. ' + f'is not supported by the {self.renderer.backend} backend. ' 'To map a dimension to a color supply ' - 'an explicit list of rgba colors.'.format( - backend=self.renderer.backend - ) + 'an explicit list of rgba colors.' ) style['fmt'] = 'none' @@ -384,7 +383,7 @@ def _compute_ticks(self, element, edges, widths, lims): if self.cyclic: x0, x1, _, _ = lims xvals = np.linspace(x0, x1, self.xticks) - labels = ["%.0f" % np.rad2deg(x) + '\N{DEGREE SIGN}' for x in xvals] + labels = [f"{np.rad2deg(x):.0f}\N{DEGREE SIGN}" for x in xvals] elif self.xticks: dim = element.get_dimension(0) inds = np.linspace(0, len(edges), self.xticks, dtype=np.int) diff --git a/holoviews/plotting/mpl/chart3d.py b/holoviews/plotting/mpl/chart3d.py index 3354dce326..ced30ffb46 100644 --- a/holoviews/plotting/mpl/chart3d.py +++ b/holoviews/plotting/mpl/chart3d.py @@ -2,6 +2,7 @@ import param import matplotlib.cm as cm from mpl_toolkits.mplot3d.art3d import Line3DCollection +from packaging.version import Version from ...core import Dimension from ...core.options import abbreviated_exception @@ -10,7 +11,7 @@ from .element import ColorbarPlot from .chart import PointPlot from .path import PathPlot -from .util import LooseVersion, mpl_version +from .util import mpl_version class Plot3D(ColorbarPlot): @@ -83,7 +84,7 @@ def _finalize_axis(self, key, **kwargs): if self.disable_axes: axis.set_axis_off() - if mpl_version <= LooseVersion('1.5.9'): + if mpl_version <= Version('1.5.9'): axis.set_axis_bgcolor(self.bgcolor) else: axis.set_facecolor(self.bgcolor) @@ -248,5 +249,5 @@ class TriSurfacePlot(Plot3D): def get_data(self, element, ranges, style): dims = element.dimensions() self._norm_kwargs(element, ranges, style, dims[2]) - x, y, z = [element.dimension_values(d) for d in dims] + x, y, z = (element.dimension_values(d) for d in dims) return (x, y, z), style, {} diff --git a/holoviews/plotting/mpl/element.py b/holoviews/plotting/mpl/element.py index e88f499719..e7fb27b197 100644 --- a/holoviews/plotting/mpl/element.py +++ b/holoviews/plotting/mpl/element.py @@ -10,6 +10,7 @@ from matplotlib import ticker from matplotlib.dates import date2num from matplotlib.image import AxesImage +from packaging.version import Version from ...core import util from ...core import (OrderedDict, NdOverlay, DynamicMap, Dataset, @@ -21,7 +22,7 @@ from ..plot import GenericElementPlot, GenericOverlayPlot from ..util import process_cmap, color_intervals, dim_range_key from .plot import MPLPlot, mpl_rc_context -from .util import LooseVersion, EqHistNormalize, mpl_version, validate, wrap_formatter +from .util import EqHistNormalize, mpl_version, validate, wrap_formatter class ElementPlot(GenericElementPlot, MPLPlot): @@ -130,7 +131,7 @@ def _finalize_axis(self, key, element=None, title=None, dimensions=None, ranges= subplots = list(self.subplots.values()) if self.subplots else [] if self.zorder == 0 and key is not None: if self.bgcolor: - if mpl_version <= LooseVersion('1.5.9'): + if mpl_version <= Version('1.5.9'): axis.set_axis_bgcolor(self.bgcolor) else: axis.set_facecolor(self.bgcolor) @@ -236,7 +237,7 @@ def _finalize_ticks(self, axis, dimensions, xticks, yticks, zticks): axes_list.append(axis.zaxis) for ax, ax_obj in zip(axes_str, axes_list): - tick_fontsize = self._fontsize('%sticks' % ax,'labelsize',common=False) + tick_fontsize = self._fontsize(f'{ax}ticks','labelsize',common=False) if tick_fontsize: ax_obj.set_tick_params(**tick_fontsize) def _finalize_artist(self, element): @@ -902,7 +903,7 @@ def _norm_kwargs(self, element, ranges, opts, vdim, values=None, prefix=''): with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'All-NaN (slice|axis) encountered') clim = (np.nanmin(values), np.nanmax(values)) - except: + except Exception: clim = np.NaN, np.NaN else: clim = element.range(vdim) diff --git a/holoviews/plotting/mpl/graphs.py b/holoviews/plotting/mpl/graphs.py index 172226cfc8..b4eaea9af3 100644 --- a/holoviews/plotting/mpl/graphs.py +++ b/holoviews/plotting/mpl/graphs.py @@ -144,7 +144,7 @@ def get_data(self, element, ranges, style): return {'nodes': (pxs, pys), 'edges': paths}, style, {'dimensions': dims} def get_extents(self, element, ranges, range_type='combined'): - return super(GraphPlot, self).get_extents(element.nodes, ranges, range_type) + return super().get_extents(element.nodes, ranges, range_type) def init_artists(self, ax, plot_args, plot_kwargs): # Draw edges @@ -362,7 +362,7 @@ def _update_labels(self, ax, element, data, style): for label in labels: try: label.remove() - except: + except Exception: pass if 'text' not in data: self.handles['labels'] = [] diff --git a/holoviews/plotting/mpl/heatmap.py b/holoviews/plotting/mpl/heatmap.py index 8ed87af04e..bd4d12d31b 100644 --- a/holoviews/plotting/mpl/heatmap.py +++ b/holoviews/plotting/mpl/heatmap.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - from itertools import product import numpy as np @@ -309,7 +307,7 @@ def get_extents(self, view, ranges, range_type='combined'): def get_data(self, element, ranges, style): # dimension labels dim_labels = element.dimensions(label=True)[:3] - x, y, z = [dimension_sanitizer(d) for d in dim_labels] + x, y, z = (dimension_sanitizer(d) for d in dim_labels) if self.invert_axes: x, y = y, x diff --git a/holoviews/plotting/mpl/plot.py b/holoviews/plotting/mpl/plot.py index a64ef99184..46483ef4e7 100644 --- a/holoviews/plotting/mpl/plot.py +++ b/holoviews/plotting/mpl/plot.py @@ -513,7 +513,7 @@ def _layout_axis(self, layout, axis): layout_axis.patch.set_visible(False) for ax, ax_obj in zip(['x', 'y'], [layout_axis.xaxis, layout_axis.yaxis]): - tick_fontsize = self._fontsize('%sticks' % ax,'labelsize', common=False) + tick_fontsize = self._fontsize(f'{ax}ticks','labelsize', common=False) if tick_fontsize: ax_obj.set_tick_params(**tick_fontsize) # Set labels @@ -938,7 +938,7 @@ def _compute_gridspec(self, layout): elif empty: obj = AdjointLayout([]) elif not view.traverse(lambda x: x, [Element]): - self.param.warning('%s is empty, skipping subplot.' % obj.main) + self.param.warning(f'{obj.main} is empty, skipping subplot.') continue elif self.transpose: layout_count = (c*self.rows+(r+1)) diff --git a/holoviews/plotting/mpl/raster.py b/holoviews/plotting/mpl/raster.py index 77c9d3dffd..bab605baee 100644 --- a/holoviews/plotting/mpl/raster.py +++ b/holoviews/plotting/mpl/raster.py @@ -2,6 +2,7 @@ import param import numpy as np +from packaging.version import Version from ...core import CompositeOverlay, Element from ...core import traversal @@ -11,7 +12,7 @@ from .chart import PointPlot from .element import ElementPlot, ColorbarPlot, LegendPlot, OverlayPlot from .plot import MPLPlot, GridPlot, mpl_rc_context -from .util import LooseVersion, get_raster_array, mpl_version +from .util import get_raster_array, mpl_version class RasterBasePlot(ElementPlot): @@ -58,7 +59,7 @@ class RasterPlot(RasterBasePlot, ColorbarPlot): 'filterrad', 'clims', 'norm'] def __init__(self, *args, **kwargs): - super(RasterPlot, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if self.hmap.type == Raster: self.invert_yaxis = not self.invert_yaxis @@ -124,7 +125,7 @@ def get_data(self, element, ranges, style): return [data], style, {'xticks': xticks, 'yticks': yticks} def init_artists(self, ax, plot_args, plot_kwargs): - handles = super(RGBPlot, self).init_artists(ax, plot_args, plot_kwargs) + handles = super().init_artists(ax, plot_args, plot_kwargs) if 'holoviews.operation.datashader' not in sys.modules or not self.show_legend: return handles try: @@ -191,7 +192,7 @@ def init_artists(self, ax, plot_args, plot_kwargs): locs = plot_kwargs.pop('locs', None) artist = ax.pcolormesh(*plot_args, **plot_kwargs) colorbar = self.handles.get('cbar') - if colorbar and mpl_version < LooseVersion('3.1'): + if colorbar and mpl_version < Version('3.1'): colorbar.set_norm(artist.norm) if hasattr(colorbar, 'set_array'): # Compatibility with mpl < 3 diff --git a/holoviews/plotting/mpl/renderer.py b/holoviews/plotting/mpl/renderer.py index babb769c9a..6de871b76c 100644 --- a/holoviews/plotting/mpl/renderer.py +++ b/holoviews/plotting/mpl/renderer.py @@ -94,7 +94,7 @@ def show(self, obj): for o in objects: plots.append(self.get_plot(o)) plt.show() - except: + except Exception: raise finally: MPLPlot._close_figures = True @@ -167,7 +167,7 @@ def _figure_data(self, plot, fmt, bbox_inches='tight', as_script=False, **kwargs # Attempts to precompute the tight bounding box try: kw = self._compute_bbox(fig, kw) - except: + except Exception: pass bytes_io = BytesIO() fig.canvas.print_figure(bytes_io, **kw) @@ -196,7 +196,7 @@ def _anim_data(self, anim, fmt): if self.dpi is not None: anim_kwargs['dpi'] = self.dpi if not hasattr(anim, '_encoded_video'): # Windows will throw PermissionError with auto-delete - with NamedTemporaryFile(suffix='.%s' % fmt, delete=False) as f: + with NamedTemporaryFile(suffix=f'.{fmt}', delete=False) as f: anim.save(f.name, writer=writer, **anim_kwargs) video = f.read() f.close() diff --git a/holoviews/plotting/mpl/sankey.py b/holoviews/plotting/mpl/sankey.py index febc9a7b6e..875d2d7c41 100644 --- a/holoviews/plotting/mpl/sankey.py +++ b/holoviews/plotting/mpl/sankey.py @@ -104,7 +104,7 @@ def get_data(self, element, ranges, style): if self.show_values: value = value_dim.pprint_value(node['value'], print_unit=True) if label: - label = '%s - %s' % (label, value) + label = f'{label} - {value}' else: label = value if label: @@ -120,7 +120,7 @@ def _update_labels(self, ax, data, style): for label in labels: try: label.remove() - except: + except Exception: pass if 'text' not in data: return [] diff --git a/holoviews/plotting/mpl/util.py b/holoviews/plotting/mpl/util.py index 07c6e33a7b..3b438f1b91 100644 --- a/holoviews/plotting/mpl/util.py +++ b/holoviews/plotting/mpl/util.py @@ -14,29 +14,30 @@ from matplotlib.transforms import Bbox, TransformedBbox, Affine2D from matplotlib.rcsetup import ( validate_fontsize, validate_fonttype, validate_hatch) +from packaging.version import Version try: # starting Matplotlib 3.4.0 from matplotlib._enums import CapStyle as validate_capstyle from matplotlib._enums import JoinStyle as validate_joinstyle -except: # before Matplotlib 3.4.0 +except ImportError: # before Matplotlib 3.4.0 from matplotlib.rcsetup import ( validate_capstyle, validate_joinstyle) try: from nc_time_axis import NetCDFTimeConverter, CalendarDateTime nc_axis_available = True -except: +except ImportError: from matplotlib.dates import DateConverter NetCDFTimeConverter = DateConverter nc_axis_available = False from ...core.util import ( - LooseVersion, arraylike_types, cftime_types, is_number + arraylike_types, cftime_types, is_number ) from ...element import Raster, RGB, Polygons from ..util import COLOR_ALIASES, RGB_HEX_REGEX -mpl_version = LooseVersion(matplotlib.__version__) +mpl_version = Version(matplotlib.__version__) def is_color(color): @@ -85,7 +86,7 @@ def get_old_rcparams(): ] old_rcparams = { k: v for k, v in matplotlib.rcParams.items() - if mpl_version < LooseVersion('3.0') or k not in deprecated_rcparams + if mpl_version < Version('3.0') or k not in deprecated_rcparams } return old_rcparams @@ -122,7 +123,7 @@ def validate(style, value, vectorized=True): try: valid = validator(value) return False if valid == False else True - except: + except Exception: return False @@ -283,7 +284,7 @@ def fix_aspect(fig, nrows, ncols, title=None, extra_artists=[], bbox = get_tight_bbox(fig, extra_artists) top = bbox.intervaly[1] if title and title.get_text(): - title.set_y((top/(w*aspect))) + title.set_y(top/(w*aspect)) def get_tight_bbox(fig, bbox_extra_artists=[], pad=None): diff --git a/holoviews/plotting/plot.py b/holoviews/plotting/plot.py index c5b6348a7c..e1cd6a588b 100644 --- a/holoviews/plotting/plot.py +++ b/holoviews/plotting/plot.py @@ -6,7 +6,7 @@ import uuid import warnings -from collections import Counter, defaultdict +from collections import Counter, defaultdict, OrderedDict from functools import partial from itertools import groupby, product @@ -14,16 +14,11 @@ import param from panel.config import config +from panel.io.document import unlocked from panel.io.notebook import push from panel.io.state import state -try: - from panel.io.document import unlocked -except Exception: - from panel.io.server import unlocked from pyviz_comms import JupyterComm - from ..selection import NoOpSelectionDisplay -from ..core import OrderedDict from ..core import util, traversal from ..core.data import Dataset, disable_pipeline from ..core.element import Element, Element3D @@ -32,7 +27,7 @@ from ..core.options import Store, Compositor, SkipRendering, lookup_options from ..core.overlay import NdOverlay from ..core.spaces import HoloMap, DynamicMap -from ..core.util import LooseVersion, stream_parameters, isfinite +from ..core.util import stream_parameters, isfinite from ..element import Table, Graph from ..streams import Stream, RangeXY, RangeX, RangeY from ..util.transform import dim @@ -113,19 +108,12 @@ def document(self, doc): self.root is self.handles.get('plot') and not isinstance(self, GenericAdjointLayoutPlot)): doc.on_session_destroyed(self._session_destroy) - from .bokeh.util import bokeh_version - if self._document and bokeh_version >= LooseVersion('2.4.0'): + if self._document: if isinstance(self._document.callbacks._session_destroyed_callbacks, set): self._document.callbacks._session_destroyed_callbacks.discard(self._session_destroy) else: self._document.callbacks._session_destroyed_callbacks.pop(self._session_destroy, None) - elif self._document: - if isinstance(self._document._session_destroyed_callbacks, set): - self._document._session_destroyed_callbacks.discard(self._session_destroy) - else: - self._document._session_destroyed_callbacks.pop(self._session_destroy, None) - self._document = doc if self.subplots: for plot in self.subplots.values(): @@ -288,7 +276,7 @@ def lookup_options(cls, obj, group): -class PlotSelector(object): +class PlotSelector: """ Proxy that allows dynamic selection of a plotting class based on a function of the plotted object. Behaves like a Plot class and @@ -340,7 +328,7 @@ def get_plot_class(self, obj): def __setattr__(self, label, value): try: return super().__setattr__(label, value) - except: + except Exception: raise Exception("Please set class parameters directly on classes %s" % ', '.join(str(cls) for cls in self.__dict__['plot_classes'].values())) @@ -428,7 +416,7 @@ def __getitem__(self, frame): Get the state of the Plot for a given frame number. """ if isinstance(frame, int) and frame > len(self): - self.param.warning("Showing last frame available: %d" % len(self)) + self.param.warning(f"Showing last frame available: {len(self)}") if not self.drawn: self.handles['fig'] = self.initialize_plot() if not isinstance(frame, tuple): frame = self.keys[frame] @@ -763,7 +751,7 @@ def _compute_group_range(cls, group, elements, ranges, framewise, with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'All-NaN (slice|axis) encountered') drange = (np.nanmin(values), np.nanmax(values)) - except: + except Exception: factors = util.unique_array(values) if dim_name not in group_ranges: group_ranges[dim_name] = { @@ -957,7 +945,7 @@ def __len__(self): return len(self.keys) -class CallbackPlot(object): +class CallbackPlot: backend = None @@ -1875,7 +1863,7 @@ def __init__(self, layout, keys=None, dimensions=None, **params): dimensions=dimensions, **params) nested_streams = layout.traverse(lambda x: get_nested_streams(x), [DynamicMap]) - self.streams = list(set([s for streams in nested_streams for s in streams])) + self.streams = list({s for streams in nested_streams for s in streams}) self._link_dimensioned_streams() def _link_dimensioned_streams(self): diff --git a/holoviews/plotting/plotly/__init__.py b/holoviews/plotting/plotly/__init__.py index 00217e4f98..eb42d7dbe9 100644 --- a/holoviews/plotting/plotly/__init__.py +++ b/holoviews/plotting/plotly/__init__.py @@ -1,12 +1,13 @@ import plotly from param import concrete_descendents +from packaging.version import Version from ...core import ( Overlay, NdOverlay, Layout, NdLayout, GridSpace, GridMatrix, config ) from ...core.options import Store, Cycle, Options -from ...core.util import LooseVersion, VersionError +from ...core.util import VersionError from ...element import * # noqa (Element import for registration) from .element import ElementPlot @@ -25,7 +26,7 @@ from .shapes import * # noqa (API import) from .images import * # noqa (API import) -if LooseVersion(plotly.__version__) < LooseVersion('4.0.0'): +if Version(plotly.__version__) < Version('4.0.0'): raise VersionError( "The plotly extension requires a plotly version >=4.0.0, " "please upgrade from plotly %s to a more recent version." diff --git a/holoviews/plotting/plotly/callbacks.py b/holoviews/plotting/plotly/callbacks.py index 10c8c370a4..4380d83837 100644 --- a/holoviews/plotting/plotly/callbacks.py +++ b/holoviews/plotting/plotly/callbacks.py @@ -39,7 +39,7 @@ def __call__(cls, *args, **kwargs): @add_metaclass(PlotlyCallbackMetaClass) -class PlotlyCallback(object): +class PlotlyCallback: def __init__(self, plot, streams, source, **params): self.plot = plot @@ -235,8 +235,8 @@ def build_event_data_from_viewport(cls, traces, property_value): xaxis = trace.get('xaxis', 'x').replace('x', 'xaxis') yaxis = trace.get('yaxis', 'y').replace('y', 'yaxis') - xprop = '{xaxis}.range'.format(xaxis=xaxis) - yprop = '{yaxis}.range'.format(yaxis=yaxis) + xprop = f'{xaxis}.range' + yprop = f'{yaxis}.range' if not property_value: x_range = None diff --git a/holoviews/plotting/plotly/chart3d.py b/holoviews/plotting/plotly/chart3d.py index 6d870d3d15..e5e13619f2 100644 --- a/holoviews/plotting/plotly/chart3d.py +++ b/holoviews/plotting/plotly/chart3d.py @@ -102,7 +102,7 @@ class TriSurfacePlot(Chart3DPlot, ColorbarPlot): def get_data(self, element, ranges, style, **kwargs): try: from scipy.spatial import Delaunay - except: + except ImportError: raise SkipRendering("SciPy not available, cannot plot TriSurface") x, y, z = (element.dimension_values(i) for i in range(3)) points2D = np.vstack([x, y]).T diff --git a/holoviews/plotting/plotly/dash.py b/holoviews/plotting/plotly/dash.py index 61fdd53eda..d2b12714ae 100644 --- a/holoviews/plotting/plotly/dash.py +++ b/holoviews/plotting/plotly/dash.py @@ -139,7 +139,7 @@ def to_function_spec(hvobj): # Check for unbounded dimensions if isinstance(hvobj, DynamicMap) and hvobj.unbounded: - dims = ', '.join('%r' % dim for dim in hvobj.unbounded) + dims = ', '.join(f'{dim!r}' for dim in hvobj.unbounded) msg = ('DynamicMap cannot be displayed without explicit indexing ' 'as {dims} dimension(s) are unbounded. ' '\nSet dimensions bounds with the DynamicMap redim.range ' @@ -618,9 +618,7 @@ def update_figure(*args): [Input(component_id=kdim_slider_id, component_property="value")] ) def update_kdim_label(value, kdim_label=kdim_label): - return "{kdim_label}: {value:.2f}".format( - kdim_label=kdim_label, value=value - ) + return f"{kdim_label}: {value:.2f}" # Collect Dash components into DashComponents namedtuple components = DashComponents( diff --git a/holoviews/plotting/plotly/element.py b/holoviews/plotting/plotly/element.py index 5097b537e7..66d397e74d 100644 --- a/holoviews/plotting/plotly/element.py +++ b/holoviews/plotting/plotly/element.py @@ -140,8 +140,8 @@ def generate_plot(self, key, ranges, element=None, is_geo=False): if is_geo and not self._supports_geo: raise ValueError( - "Elements of type {typ} cannot be overlaid with Tiles elements " - "using the plotly backend".format(typ=type(element)) + f"Elements of type {type(element)} cannot be overlaid " + "with Tiles elements using the plotly backend" ) if element is None: @@ -447,10 +447,10 @@ def init_layout(self, key, element, ranges, is_geo=False): # Create dimension string used to compute matching axes if isinstance(xdim, (list, tuple)): - dim_str = "-".join(["%s^%s^%s" % (d.name, d.label, d.unit) + dim_str = "-".join([f"{d.name}^{d.label}^{d.unit}" for d in xdim]) else: - dim_str = "%s^%s^%s" % (xdim.name, xdim.label, xdim.unit) + dim_str = f"{xdim.name}^{xdim.label}^{xdim.unit}" xaxis['_dim'] = dim_str @@ -485,10 +485,10 @@ def init_layout(self, key, element, ranges, is_geo=False): # Create dimension string used to compute matching axes if isinstance(ydim, (list, tuple)): - dim_str = "-".join(["%s^%s^%s" % (d.name, d.label, d.unit) + dim_str = "-".join([f"{d.name}^{d.label}^{d.unit}" for d in ydim]) else: - dim_str = "%s^%s^%s" % (ydim.name, ydim.label, ydim.unit) + dim_str = f"{ydim.name}^{ydim.label}^{ydim.unit}" yaxis['_dim'] = dim_str, if 'bare' in self.yaxis: diff --git a/holoviews/plotting/plotly/images.py b/holoviews/plotting/plotly/images.py index c4d513794d..bb55caac29 100644 --- a/holoviews/plotting/plotly/images.py +++ b/holoviews/plotting/plotly/images.py @@ -79,8 +79,7 @@ def get_data(self, element, ranges, style, is_geo=False, **kwargs): img = np.zeros((1, 1, 3), dtype=np.uint8) if img.ndim != 3 or img.shape[2] not in (3, 4): - raise ValueError("Unsupported image array with shape: {shape}" - .format(shape=img.shape)) + raise ValueError(f"Unsupported image array with shape: {img.shape}") # Ensure axis inversions are handled correctly l, b, r, t = element.bounds.lbrt() diff --git a/holoviews/plotting/plotly/raster.py b/holoviews/plotting/plotly/raster.py index b27b76955d..2ff5366523 100644 --- a/holoviews/plotting/plotly/raster.py +++ b/holoviews/plotting/plotly/raster.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import numpy as np import param diff --git a/holoviews/plotting/plotly/renderer.py b/holoviews/plotting/plotly/renderer.py index 1290e89ea0..895636c9a0 100644 --- a/holoviews/plotting/plotly/renderer.py +++ b/holoviews/plotting/plotly/renderer.py @@ -131,7 +131,7 @@ def _figure_data(self, plot, fmt, as_script=False, **kwargs): if fmt == 'svg': data = data.decode('utf-8') else: - raise ValueError("Unsupported format: {fmt}".format(fmt=fmt)) + raise ValueError(f"Unsupported format: {fmt}") if as_script: b64 = base64.b64encode(data).decode("utf-8") diff --git a/holoviews/plotting/plotly/selection.py b/holoviews/plotting/plotly/selection.py index 087d939a4a..49c82ed5ba 100644 --- a/holoviews/plotting/plotly/selection.py +++ b/holoviews/plotting/plotly/selection.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from ...core.overlay import NdOverlay, Overlay from ...selection import OverlaySelectionDisplay diff --git a/holoviews/plotting/plotly/shapes.py b/holoviews/plotting/plotly/shapes.py index 8a3d854742..2865148141 100644 --- a/holoviews/plotting/plotly/shapes.py +++ b/holoviews/plotting/plotly/shapes.py @@ -35,10 +35,9 @@ def init_graph(self, datum, options, index=0, is_geo=False, **kwargs): @staticmethod def build_path(xs, ys, closed=True): - line_tos = ''.join(['L{x} {y}'.format(x=x, y=y) + line_tos = ''.join([f'L{x} {y}' for x, y in zip(xs[1:], ys[1:])]) - path = 'M{x0} {y0}{line_tos}'.format( - x0=xs[0], y0=ys[0], line_tos=line_tos) + path = f'M{xs[0]} {ys[0]}{line_tos}' if closed: path += 'Z' diff --git a/holoviews/plotting/plotly/util.py b/holoviews/plotting/plotly/util.py index 51cb49105c..caf36d6049 100644 --- a/holoviews/plotting/plotly/util.py +++ b/holoviews/plotting/plotly/util.py @@ -749,8 +749,8 @@ def figure_grid(figures_grid, if responsive: scale_x = 1./ncols scale_y = 1./nrows - px = ((0.2/(ncols) if ncols > 1 else 0)) - py = ((0.2/(nrows) if nrows > 1 else 0)) + px = (0.2/(ncols) if ncols > 1 else 0) + py = (0.2/(nrows) if nrows > 1 else 0) sx = scale_x-px sy = scale_y-py _scale_translate(fig, sx, sy, scale_x*c+px/2., scale_y*r+py/2.) diff --git a/holoviews/plotting/renderer.py b/holoviews/plotting/renderer.py index a304191712..70c1ea0203 100644 --- a/holoviews/plotting/renderer.py +++ b/holoviews/plotting/renderer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Public API for all plotting renderers supported by HoloViews, regardless of plotting package or backend. @@ -6,11 +5,7 @@ import base64 import os -from io import BytesIO -try: - from StringIO import StringIO -except ImportError: - from io import StringIO +from io import BytesIO, StringIO from contextlib import contextmanager from functools import partial @@ -21,6 +16,7 @@ from bokeh.io import curdoc from bokeh.embed import file_html from bokeh.resources import CDN, INLINE +from packaging.version import Version from panel import config from panel.io.notebook import ipywidget, load_notebook, render_model, render_mimebundle from panel.io.state import state @@ -34,14 +30,14 @@ from ..core.data import disable_pipeline from ..core.io import Exporter from ..core.options import Store, StoreOptions, SkipRendering, Compositor -from ..core.util import unbound_dimensions, LooseVersion +from ..core.util import unbound_dimensions from ..streams import Stream from . import Plot from .util import displayable, collate, initialize_dynamic from param.parameterized import bothmethod -panel_version = LooseVersion(panel.__version__) +panel_version = Version(panel.__version__) # Tags used when visual output is to be embedded in HTML IMAGE_TAG = "" @@ -206,7 +202,7 @@ def get_plot(self_or_cls, obj, doc=None, renderer=None, comm=None, **kwargs): Given a HoloViews Viewable return a corresponding plot instance. """ if isinstance(obj, DynamicMap) and obj.unbounded: - dims = ', '.join('%r' % dim for dim in obj.unbounded) + dims = ', '.join(f'{dim!r}' for dim in obj.unbounded) msg = ('DynamicMap cannot be displayed without explicit indexing ' 'as {dims} dimension(s) are unbounded. ' '\nSet dimensions bounds with the DynamicMap redim.range ' @@ -358,7 +354,7 @@ def html(self, obj, fmt=None, css=None, resources='CDN', **kwargs): css['height'] = '%dpx' % (h*self.dpi*1.15) if isinstance(css, dict): - css = '; '.join("%s: %s" % (k, v) for k, v in css.items()) + css = '; '.join(f"{k}: {v}" for k, v in css.items()) else: raise ValueError("CSS must be supplied as Python dictionary") @@ -541,8 +537,7 @@ class needed to render it with the current renderer. try: plotclass = Store.registry[cls.backend][element_type] except KeyError: - raise SkipRendering("No plotting class for {0} " - "found".format(element_type.__name__)) + raise SkipRendering(f"No plotting class for {element_type.__name__} found") return plotclass @classmethod @@ -603,7 +598,7 @@ def save(self_or_cls, obj, basename, fmt='auto', key={}, info={}, basename.write(encoded) basename.seek(0) else: - filename ='%s.%s' % (basename, info['file-ext']) + filename =f"{basename}.{info['file-ext']}" with open(filename, 'wb') as f: f.write(encoded) @@ -649,7 +644,7 @@ def load_nb(cls, inline=True): with param.logging_level('ERROR'): try: ip = get_ipython() # noqa - except: + except Exception: ip = None if not ip or not hasattr(ip, 'kernel'): return diff --git a/holoviews/plotting/util.py b/holoviews/plotting/util.py index 59e8aed8aa..ee49ab06ef 100644 --- a/holoviews/plotting/util.py +++ b/holoviews/plotting/util.py @@ -92,7 +92,7 @@ def collate(obj): collated_layout = Layout(el.collate()) expanded.extend(collated_layout.values()) return Layout(expanded) - except: + except Exception: raise Exception(undisplayable_info(obj)) else: raise Exception(undisplayable_info(obj)) @@ -320,14 +320,14 @@ def undisplayable_info(obj, html=False): collate = 'collate' if html else 'collate' info = "For more information, please consult the Composing Data tutorial (http://git.io/vtIQh)" if isinstance(obj, HoloMap): - error = "HoloMap of %s objects cannot be displayed." % obj.type.__name__ - remedy = "Please call the %s method to generate a displayable object" % collate + error = f"HoloMap of {obj.type.__name__} objects cannot be displayed." + remedy = f"Please call the {collate} method to generate a displayable object" elif isinstance(obj, Layout): error = "Layout containing HoloMaps of Layout or GridSpace objects cannot be displayed." - remedy = "Please call the %s method on the appropriate elements." % collate + remedy = f"Please call the {collate} method on the appropriate elements." elif isinstance(obj, GridSpace): error = "GridSpace containing HoloMaps of Layouts cannot be displayed." - remedy = "Please call the %s method on the appropriate elements." % collate + remedy = f"Please call the {collate} method on the appropriate elements." if not html: return '\n'.join([error, remedy, info]) @@ -350,8 +350,8 @@ def compute_sizes(sizes, size_fn, scaling_factor, scaling_method, base_size): scaling_factor = scaling_factor**2 else: raise ValueError( - 'Invalid value for argument "scaling_method": "{}". ' - 'Valid values are: "width", "area".'.format(scaling_method)) + f'Invalid value for argument "scaling_method": "{scaling_method}". ' + 'Valid values are: "width", "area".') sizes = size_fn(sizes) return (base_size*scaling_factor*sizes) @@ -456,7 +456,7 @@ def validate_unbounded_mode(holomaps, dynmaps): composite = HoloMap(enumerate(holomaps), kdims=['testing_kdim']) holomap_kdims = set(unique_iterator([kd.name for dm in holomaps for kd in dm.kdims])) hmranges = {d: composite.range(d) for d in holomap_kdims} - if any(not set(d.name for d in dm.kdims) <= holomap_kdims + if any(not {d.name for d in dm.kdims} <= holomap_kdims for dm in dynmaps): raise Exception('DynamicMap that are unbounded must have key dimensions that are a ' 'subset of dimensions of the HoloMap(s) defining the keys.') @@ -563,7 +563,7 @@ def mplcmap_to_palette(cmap, ncolors=None, categorical=False): import matplotlib.cm as cm try: cmap = cm.get_cmap(cmap) - except: + except Exception: cmap = cm.get_cmap(cmap.lower()) else: from matplotlib import colormaps @@ -626,7 +626,7 @@ def bokeh_palette_to_palette(cmap, ncolors=None, categorical=False): else: palette = getattr(palettes, cmap, getattr(palettes, cmap.capitalize(), None)) if palette is None: - raise ValueError("Supplied palette %s not found among bokeh palettes" % cmap) + raise ValueError(f"Supplied palette {cmap} not found among bokeh palettes") elif isinstance(palette, dict) and (cmap in palette or cmap.capitalize() in palette): # Some bokeh palettes are doubly nested palette = palette.get(cmap, palette.get(cmap.capitalize())) @@ -707,7 +707,7 @@ def info(provider,names): cmaps += info('matplotlib', mpl_cmaps) cmaps += info('matplotlib', [cmap+'_r' for cmap in mpl_cmaps if not cmap.endswith('_r')]) - except: + except ImportError: pass if 'bokeh' in provider: try: @@ -715,7 +715,7 @@ def info(provider,names): cmaps += info('bokeh', palettes.all_palettes) cmaps += info('bokeh', [p+'_r' for p in palettes.all_palettes if not p.endswith('_r')]) - except: + except ImportError: pass if 'colorcet' in provider: try: @@ -724,7 +724,7 @@ def info(provider,names): cet_maps['glasbey_hv'] = glasbey_hv # Add special hv-specific map cmaps += info('colorcet', cet_maps) cmaps += info('colorcet', [p+'_r' for p in cet_maps if not p.endswith('_r')]) - except: + except ImportError: pass return sorted(unique_iterator(cmaps)) @@ -924,7 +924,7 @@ def process_cmap(cmap, ncolors=None, provider=None, categorical=False): try: # Try processing as matplotlib colormap palette = mplcmap_to_palette(cmap, ncolors) - except: + except Exception: palette = None if not isinstance(palette, list): raise TypeError("cmap argument %s expects a list, Cycle or valid %s colormap or palette." @@ -1009,7 +1009,7 @@ def scale_fontsize(size, scaling): size = size * scaling if ext is not None: - size = ('%.3f' % size).rstrip('0').rstrip('.') + ext + size = f'{size:.3f}'.rstrip('0').rstrip('.') + ext return size @@ -1056,7 +1056,7 @@ def get_min_distance(element): try: from scipy.spatial.distance import pdist return pdist(element.array([0, 1])).min() - except: + except Exception: return _get_min_distance_numpy(element) @@ -1088,7 +1088,7 @@ def rgb2hex(rgb): """ if len(rgb) > 3: rgb = rgb[:-1] - return "#{0:02x}{1:02x}{2:02x}".format(*(int(v*255) for v in rgb)) + return "#{:02x}{:02x}{:02x}".format(*(int(v*255) for v in rgb)) def dim_range_key(eldim): @@ -1293,7 +1293,7 @@ def _process(self, element, key=None): [1, 0.99989, 0.93683 ], [1, 1, 1 ]] # Bokeh palette -fire = [str('#{0:02x}{1:02x}{2:02x}'.format(int(r*255),int(g*255),int(b*255))) +fire = [f'#{int(r * 255):02x}{int(g * 255):02x}{int(b * 255):02x}' for r,g,b in fire_colors] diff --git a/holoviews/selection.py b/holoviews/selection.py index d8da1c6e2e..0ea6ce0cd5 100644 --- a/holoviews/selection.py +++ b/holoviews/selection.py @@ -181,7 +181,7 @@ def _selection_transform(self, hvobj, operations=()): # This is a DynamicMap that we don't know how to recurse into. self.param.warning( "linked selection: Encountered DynamicMap that we don't know " - "how to recurse into:\n{!r}".format(hvobj) + f"how to recurse into:\n{hvobj!r}" ) return hvobj elif isinstance(hvobj, Element): @@ -464,7 +464,7 @@ def selected_cmap(self): return None if self.selected_color is None else _color_to_cmap(self.selected_color) -class SelectionDisplay(object): +class SelectionDisplay: """ Base class for selection display classes. Selection display classes are responsible for transforming an element (or DynamicMap that produces an diff --git a/holoviews/streams.py b/holoviews/streams.py index d5353695a9..383a1efaf6 100644 --- a/holoviews/streams.py +++ b/holoviews/streams.py @@ -12,8 +12,10 @@ from functools import partial from itertools import groupby from types import FunctionType +from packaging.version import Version import param +import pandas as pd import numpy as np from .core import util @@ -53,7 +55,7 @@ def streams_list_from_dict(streams): if isinstance(v, param.Parameter) and v.owner is not None: params[k] = v else: - raise TypeError('Cannot handle value %r in streams dictionary' % v) + raise TypeError(f'Cannot handle value {v!r} in streams dictionary') return Params.from_params(params) @@ -166,7 +168,7 @@ def trigger(cls, streams): if key_count > 1 and key_count > value_count and k not in key_clashes: key_clashes.append(k) if key_clashes: - print('Parameter name clashes for keys %r' % key_clashes) + print(f'Parameter name clashes for keys {key_clashes!r}') # Group subscribers by precedence while keeping the ordering # within each group @@ -301,7 +303,7 @@ def clear(self, policy='all'): """ policies = ['all', 'user', 'internal'] if policy not in policies: - raise ValueError('Policy for clearing subscribers must be one of %s' % policies) + raise ValueError(f'Policy for clearing subscribers must be one of {policies}') if policy == 'all': remaining = [] elif policy == 'user': @@ -342,7 +344,7 @@ def _validate_rename(self, mapping): param_names = [k for k in self.param if k != 'name'] for k, v in mapping.items(): if k not in param_names: - raise KeyError('Cannot rename %r as it is not a stream parameter' % k) + raise KeyError(f'Cannot rename {k!r} as it is not a stream parameter') if k != v and v in param_names: raise KeyError('Cannot rename to %r as it clashes with a ' 'stream parameter of the same name' % v) @@ -444,12 +446,12 @@ def update(self, **kwargs): def __repr__(self): cls_name = self.__class__.__name__ - kwargs = ','.join('%s=%r' % (k, v) + kwargs = ','.join(f'{k}={v!r}' for (k, v) in self.param.get_param_values() if k != 'name') if not self._rename: - return '%s(%s)' % (cls_name, kwargs) + return f'{cls_name}({kwargs})' else: - return '%s(%r, %s)' % (cls_name, self._rename, kwargs) + return f'{cls_name}({self._rename!r}, {kwargs})' def __str__(self): @@ -524,7 +526,7 @@ class Buffer(Pipe): Arbitrary data being streamed to a DynamicMap callback.""") def __init__(self, data, length=1000, index=True, following=True, **params): - if (util.pd and isinstance(data, util.pd.DataFrame)): + if isinstance(data, pd.DataFrame): example = data elif isinstance(data, np.ndarray): if data.ndim != 2: @@ -533,7 +535,7 @@ def __init__(self, data, length=1000, index=True, following=True, **params): elif isinstance(data, dict): if not all(isinstance(v, np.ndarray) for v in data.values()): raise ValueError("Data in dictionary must be of array types.") - elif len(set(len(v) for v in data.values())) > 1: + elif len({len(v) for v in data.values()}) > 1: raise ValueError("Columns in dictionary must all be the same length.") example = data else: @@ -555,7 +557,7 @@ def __init__(self, data, length=1000, index=True, following=True, **params): data.stream.sink(self.send) self.sdf = data - if index and (util.pd and isinstance(example, util.pd.DataFrame)): + if index and isinstance(example, pd.DataFrame): example = example.reset_index() params['data'] = example super().__init__(**params) @@ -577,14 +579,14 @@ def verify(self, x): elif x.shape[1] != self.data.shape[1]: raise ValueError("Streamed array data expeced to have %d columns, " "got %d." % (self.data.shape[1], x.shape[1])) - elif util.pd and isinstance(x, util.pd.DataFrame) and list(x.columns) != list(self.data.columns): + elif isinstance(x, pd.DataFrame) and list(x.columns) != list(self.data.columns): raise IndexError("Input expected to have columns %s, got %s" % (list(self.data.columns), list(x.columns))) elif isinstance(x, dict): if any(c not in x for c in self.data): raise IndexError("Input expected to have columns %s, got %s" % (sorted(self.data.keys()), sorted(x.keys()))) - elif len(set(len(v) for v in x.values())) > 1: + elif len({len(v) for v in x.values()}) > 1: raise ValueError("Input columns expected to have the " "same number of rows.") @@ -593,7 +595,7 @@ def clear(self): "Clears the data in the stream" if isinstance(self.data, np.ndarray): data = self.data[:, :0] - elif util.pd and isinstance(self.data, util.pd.DataFrame): + elif isinstance(self.data, pd.DataFrame): data = self.data.iloc[:0] elif isinstance(self.data, dict): data = {k: v[:0] for k, v in self.data.items()} @@ -616,13 +618,13 @@ def _concat(self, data): data = np.concatenate([prev_chunk, data]) elif data_length > self.length: data = data[-self.length:] - elif util.pd and isinstance(data, util.pd.DataFrame): + elif isinstance(data, pd.DataFrame): data_length = len(data) if not self.length: - data = util.pd.concat([self.data, data]) + data = pd.concat([self.data, data]) elif data_length < self.length: prev_chunk = self.data.iloc[-(self.length-data_length):] - data = util.pd.concat([prev_chunk, data]) + data = pd.concat([prev_chunk, data]) elif data_length > self.length: data = data.iloc[-self.length:] elif isinstance(data, dict) and data: @@ -649,7 +651,7 @@ def update(self, **kwargs): """ data = kwargs.get('data') if data is not None: - if (util.pd and isinstance(data, util.pd.DataFrame) and + if (isinstance(data, pd.DataFrame) and list(data.columns) != list(self.data.columns) and self._index): data = data.reset_index() self.verify(data) @@ -679,7 +681,7 @@ class Params(Stream): Parameters on the parameterized to watch.""") def __init__(self, parameterized=None, parameters=None, watch=True, watch_only=False, **params): - if util.param_version < util.LooseVersion('1.8.0') and watch: + if util.param_version < Version('1.8.0') and watch: raise RuntimeError('Params stream requires param version >= 1.8.0, ' 'to support watching parameters.') if parameters is None: @@ -749,7 +751,7 @@ def _validate_rename(self, mapping): for k, v in mapping.items(): n = k[1] if isinstance(k, tuple) else k if n not in pnames: - raise KeyError('Cannot rename %r as it is not a stream parameter' % n) + raise KeyError(f'Cannot rename {n!r} as it is not a stream parameter') if n != v and v in pnames: raise KeyError('Cannot rename to %r as it clashes with a ' 'stream parameter of the same name' % v) @@ -759,8 +761,6 @@ def _watcher(self, *events): try: self._events = list(events) self.trigger([self]) - except: - raise finally: self._events = [] @@ -996,8 +996,7 @@ def __init__(self, source, include_region=True, **params): raise ValueError( "The source of SelectionExpr must be an instance of an " "Element subclass or a DynamicMap that returns such an " - "instance. Received value of type {typ}: {val}".format( - typ=type(source), val=source) + f"instance. Received value of type {type(source)}: {source}" ) input_streams = self._build_selection_streams(source) diff --git a/holoviews/tests/__init__.py b/holoviews/tests/__init__.py index 42232d07ca..43fc66b0de 100644 --- a/holoviews/tests/__init__.py +++ b/holoviews/tests/__init__.py @@ -2,5 +2,5 @@ # Standardize backend due to random inconsistencies from matplotlib import pyplot pyplot.switch_backend('agg') -except: +except ImportError: pass diff --git a/holoviews/tests/core/data/base.py b/holoviews/tests/core/data/base.py index 932373fb20..2fae546cbd 100644 --- a/holoviews/tests/core/data/base.py +++ b/holoviews/tests/core/data/base.py @@ -18,7 +18,7 @@ import pandas as pd -class DatatypeContext(object): +class DatatypeContext: def __init__(self, datatypes, dataset_type=Dataset): self.datatypes = datatypes @@ -73,7 +73,7 @@ def init_data(self): -class HomogeneousColumnTests(object): +class HomogeneousColumnTests: """ Tests for data formats that require all dataset to have the same type (e.g. numpy arrays) @@ -863,7 +863,7 @@ def test_dataset_transform_add_ht(self): -class ScalarColumnTests(object): +class ScalarColumnTests: """ Tests for interfaces that allow on or more columns to be of scalar types. @@ -924,7 +924,7 @@ def test_dataset_scalar_iloc(self): -class GriddedInterfaceTests(object): +class GriddedInterfaceTests: """ Tests for the grid interfaces """ @@ -1107,12 +1107,12 @@ def test_dataset_dim_vals_grid_kdims_expanded_ys_inv(self): expanded_ys) def test_dataset_dim_vals_dimensions_match_shape(self): - self.assertEqual(len(set(self.dataset_grid.dimension_values(i, flat=False).shape - for i in range(3))), 1) + self.assertEqual(len({self.dataset_grid.dimension_values(i, flat=False).shape + for i in range(3)}), 1) def test_dataset_dim_vals_dimensions_match_shape_inv(self): - self.assertEqual(len(set(self.dataset_grid_inv.dimension_values(i, flat=False).shape - for i in range(3))), 1) + self.assertEqual(len({self.dataset_grid_inv.dimension_values(i, flat=False).shape + for i in range(3)}), 1) def test_dataset_dim_vals_grid_vdims_zs_flat(self): expanded_zs = np.array([0, 2, 4, 1, 3, 5]) diff --git a/holoviews/tests/core/data/test_binneddatasets.py b/holoviews/tests/core/data/test_binneddatasets.py index ea0fbcf792..6e8339c667 100644 --- a/holoviews/tests/core/data/test_binneddatasets.py +++ b/holoviews/tests/core/data/test_binneddatasets.py @@ -188,7 +188,7 @@ def test_construct_from_dict(self): def test_construct_from_xarray(self): try: import xarray as xr - except: + except ImportError: raise SkipTest("Test requires xarray") coords = OrderedDict([('lat', (('y', 'x'), self.ys)), ('lon', (('y', 'x'), self.xs))]) @@ -208,7 +208,7 @@ def test_construct_from_xarray(self): def test_construct_3d_from_xarray(self): try: import xarray as xr - except: + except ImportError: raise SkipTest("Test requires xarray") zs = np.arange(48).reshape(2, 4, 6) da = xr.DataArray(zs, dims=['z', 'y', 'x'], @@ -224,7 +224,7 @@ def test_construct_3d_from_xarray(self): def test_construct_from_xarray_with_invalid_irregular_coordinate_arrays(self): try: import xarray as xr - except: + except ImportError: raise SkipTest("Test requires xarray") zs = np.arange(48*6).reshape(2, 4, 6, 6) da = xr.DataArray(zs, dims=['z', 'y', 'x', 'b'], @@ -237,7 +237,7 @@ def test_construct_from_xarray_with_invalid_irregular_coordinate_arrays(self): def test_3d_xarray_with_constant_dim_canonicalized_to_2d(self): try: import xarray as xr - except: + except ImportError: raise SkipTest("Test requires xarray") zs = np.arange(24).reshape(1, 4, 6) # Construct DataArray with additional constant dimension @@ -253,7 +253,7 @@ def test_3d_xarray_with_constant_dim_canonicalized_to_2d(self): def test_groupby_3d_from_xarray(self): try: import xarray as xr - except: + except ImportError: raise SkipTest("Test requires xarray") zs = np.arange(48).reshape(2, 4, 6) da = xr.DataArray(zs, dims=['z', 'y', 'x'], diff --git a/holoviews/tests/core/data/test_cudfinterface.py b/holoviews/tests/core/data/test_cudfinterface.py index a8195856a2..5bf9bdd9e5 100644 --- a/holoviews/tests/core/data/test_cudfinterface.py +++ b/holoviews/tests/core/data/test_cudfinterface.py @@ -6,7 +6,7 @@ try: import cudf -except: +except ImportError: raise SkipTest("Could not import cuDF, skipping cuDFInterface tests.") from holoviews.core.data import Dataset diff --git a/holoviews/tests/core/data/test_daskinterface.py b/holoviews/tests/core/data/test_daskinterface.py index 75e6696dea..43ffa9c7e6 100644 --- a/holoviews/tests/core/data/test_daskinterface.py +++ b/holoviews/tests/core/data/test_daskinterface.py @@ -5,7 +5,7 @@ try: import dask.dataframe as dd -except: +except ImportError: raise SkipTest("Could not import dask, skipping DaskInterface tests.") from holoviews.core.data import Dataset diff --git a/holoviews/tests/core/data/test_dictinterface.py b/holoviews/tests/core/data/test_dictinterface.py index 002be888c3..5e647fc469 100644 --- a/holoviews/tests/core/data/test_dictinterface.py +++ b/holoviews/tests/core/data/test_dictinterface.py @@ -2,7 +2,6 @@ import numpy as np -from holoviews.core.dimension import OrderedDict as cyODict from holoviews.core.data import Dataset from .base import HeterogeneousColumnTests, ScalarColumnTests, InterfaceTests @@ -14,7 +13,7 @@ class DictDatasetTest(HeterogeneousColumnTests, ScalarColumnTests, InterfaceTest """ datatype = 'dictionary' - data_type = (OrderedDict, cyODict) + data_type = (OrderedDict,) __test__ = True diff --git a/holoviews/tests/core/data/test_ibisinterface.py b/holoviews/tests/core/data/test_ibisinterface.py index b5b192d464..5ae3ca03fa 100644 --- a/holoviews/tests/core/data/test_ibisinterface.py +++ b/holoviews/tests/core/data/test_ibisinterface.py @@ -6,7 +6,7 @@ try: import ibis from ibis import sqlite -except: +except ImportError: raise SkipTest("Could not import ibis, skipping IbisInterface tests.") import numpy as np diff --git a/holoviews/tests/core/data/test_multiinterface.py b/holoviews/tests/core/data/test_multiinterface.py index 29ce076fb4..9428742621 100644 --- a/holoviews/tests/core/data/test_multiinterface.py +++ b/holoviews/tests/core/data/test_multiinterface.py @@ -14,7 +14,7 @@ try: import dask.dataframe as dd -except: +except ImportError: dd = None diff --git a/holoviews/tests/core/data/test_spatialpandas.py b/holoviews/tests/core/data/test_spatialpandas.py index 09dd223d7d..aaa5c49ddb 100644 --- a/holoviews/tests/core/data/test_spatialpandas.py +++ b/holoviews/tests/core/data/test_spatialpandas.py @@ -11,12 +11,12 @@ MultiPolygonArray, LineDtype, PointDtype, PolygonDtype, MultiLineDtype, MultiPointDtype, MultiPolygonDtype ) -except Exception: +except ImportError: spatialpandas = None try: import dask.dataframe as dd -except Exception: +except ImportError: dd = None from holoviews.core.data import ( diff --git a/holoviews/tests/core/data/test_xarrayinterface.py b/holoviews/tests/core/data/test_xarrayinterface.py index 62dad0d396..1d2e70ddfa 100644 --- a/holoviews/tests/core/data/test_xarrayinterface.py +++ b/holoviews/tests/core/data/test_xarrayinterface.py @@ -8,7 +8,7 @@ try: import xarray as xr -except: +except ImportError: raise SkipTest("Could not import xarray, skipping XArrayInterface tests.") from holoviews.core.data import Dataset, concat @@ -295,7 +295,7 @@ class DaskXArrayInterfaceTest(XArrayInterfaceTests): def setUp(self): try: import dask.array # noqa - except: + except ImportError: raise SkipTest('Dask could not be imported, cannot test ' 'dask arrays with XArrayInterface') super().setUp() diff --git a/holoviews/tests/core/test_archives.py b/holoviews/tests/core/test_archives.py index 5f00d9b345..2e8b02bd7e 100644 --- a/holoviews/tests/core/test_archives.py +++ b/holoviews/tests/core/test_archives.py @@ -39,7 +39,7 @@ def test_filearchive_image_pickle(self): self.assertEqual(archive.listing(), filenames) archive.export() if not os.path.isdir(export_name): - raise AssertionError("No directory %r created on export." % export_name) + raise AssertionError(f"No directory {export_name!r} created on export.") self.assertEqual(sorted(filenames), sorted(os.listdir(export_name))) self.assertEqual(archive.listing(), []) @@ -54,9 +54,9 @@ def test_filearchive_image_pickle_zip(self): self.assertEqual(archive.listing(), filenames) archive.export() if not os.path.isfile(export_name+'.zip'): - raise AssertionError("No zip file %r created on export." % export_name) + raise AssertionError(f"No zip file {export_name!r} created on export.") - namelist = ['archive_image/%s' % f for f in filenames] + namelist = [f'archive_image/{f}' for f in filenames] with zipfile.ZipFile(export_name+'.zip', 'r') as f: self.assertEqual(sorted(namelist), sorted(f.namelist())) self.assertEqual(archive.listing(), []) @@ -73,9 +73,9 @@ def test_filearchive_image_pickle_tar(self): self.assertEqual(archive.listing(), filenames) archive.export() if not os.path.isfile(export_name+'.tar'): - raise AssertionError("No tar file %r created on export." % export_name) + raise AssertionError(f"No tar file {export_name!r} created on export.") - namelist = ['archive_image/%s' % f for f in filenames] + namelist = [f'archive_image/{f}' for f in filenames] with tarfile.TarFile(export_name+'.tar', 'r') as f: self.assertEqual(sorted(namelist), sorted([el.path for el in f.getmembers()])) @@ -92,7 +92,7 @@ def test_filearchive_image_serialize(self): self.assertEqual(archive.listing(), filenames) archive.export() if not os.path.isdir(export_name): - raise AssertionError("No directory %r created on export." % export_name) + raise AssertionError(f"No directory {export_name!r} created on export.") self.assertEqual(sorted(filenames), sorted(os.listdir(export_name))) self.assertEqual(archive.listing(), []) @@ -106,7 +106,7 @@ def test_filearchive_image_pickle_name_clash(self): self.assertEqual(archive.listing(), filenames) archive.export() if not os.path.isdir(export_name): - raise AssertionError("No directory %r created on export." % export_name) + raise AssertionError(f"No directory {export_name!r} created on export.") self.assertEqual(sorted(filenames), sorted(os.listdir(export_name))) self.assertEqual(archive.listing(), []) @@ -119,10 +119,10 @@ def test_filearchive_json_single_file(self): self.assertEqual(len(archive), 1) self.assertEqual(archive.listing(), ['metadata.json']) archive.export() - fname = '%s_%s' % (export_name, 'metadata.json') + fname = f"{export_name}_metadata.json" if not os.path.isfile(fname): - raise AssertionError("No file %r created on export." % fname) - self.assertEqual(json.load(open(fname, 'r')), data) + raise AssertionError(f"No file {fname!r} created on export.") + self.assertEqual(json.load(open(fname)), data) self.assertEqual(archive.listing(), []) """ diff --git a/holoviews/tests/core/test_callable.py b/holoviews/tests/core/test_callable.py index 744d7fed31..e35403bbdb 100644 --- a/holoviews/tests/core/test_callable.py +++ b/holoviews/tests/core/test_callable.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Unit tests of the Callable object that wraps user callbacks. Also test how DynamicMap validates and invokes Callable based on its signature. @@ -16,7 +15,7 @@ from ..utils import LoggingComparisonTestCase -class CallableClass(object): +class CallableClass: @staticmethod def somestaticmethod(): pass @@ -52,7 +51,7 @@ def test_partial_name(self): self.assertEqual(cb.name.startswith('functools.partial('), True) def test_generator_expression_name(self): - cb = Generator((i for i in range(10))) + cb = Generator(i for i in range(10)) self.assertEqual(cb.name, '') def test_generator_name(self): diff --git a/holoviews/tests/core/test_datasetproperty.py b/holoviews/tests/core/test_datasetproperty.py index 1c6324a75f..6ce698feab 100644 --- a/holoviews/tests/core/test_datasetproperty.py +++ b/holoviews/tests/core/test_datasetproperty.py @@ -5,7 +5,7 @@ try: import dask.dataframe as dd -except: +except ImportError: dd = None from holoviews import Dataset, Curve, Dimension, Scatter, Distribution @@ -15,7 +15,7 @@ try: from holoviews.operation.datashader import dynspread, datashade, rasterize -except: +except ImportError: dynspread = datashade = rasterize = None diff --git a/holoviews/tests/core/test_decollation.py b/holoviews/tests/core/test_decollation.py index 460e832152..9cd02c9c1e 100644 --- a/holoviews/tests/core/test_decollation.py +++ b/holoviews/tests/core/test_decollation.py @@ -9,7 +9,7 @@ try: from holoviews.operation.datashader import spread, datashade -except: +except ImportError: spread = datashade = None datashade_skip = skipIf(datashade is None, "datashade is not available") diff --git a/holoviews/tests/core/test_dimensioned.py b/holoviews/tests/core/test_dimensioned.py index 9ae712e483..dbd4743bc9 100644 --- a/holoviews/tests/core/test_dimensioned.py +++ b/holoviews/tests/core/test_dimensioned.py @@ -10,7 +10,7 @@ class ExampleElement(Element): pass -class MockRenderer(object): +class MockRenderer: def __init__(self, backend): self.backend = backend diff --git a/holoviews/tests/core/test_dynamic.py b/holoviews/tests/core/test_dynamic.py index 26eed93022..815158a0c7 100644 --- a/holoviews/tests/core/test_dynamic.py +++ b/holoviews/tests/core/test_dynamic.py @@ -965,7 +965,7 @@ def hashkey(self): return {'hash': uuid.uuid4().hex} class TestPeriodicStreamUpdate(ComparisonTestCase): def test_periodic_counter_blocking(self): - class Counter(object): + class Counter: def __init__(self): self.count = 0 def __call__(self): diff --git a/holoviews/tests/core/test_layouts.py b/holoviews/tests/core/test_layouts.py index 287258775f..a08491d0fd 100644 --- a/holoviews/tests/core/test_layouts.py +++ b/holoviews/tests/core/test_layouts.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Tests of Layout and related classes """ diff --git a/holoviews/tests/core/test_options.py b/holoviews/tests/core/test_options.py index 26a5d9c9b0..b5b0dd5376 100644 --- a/holoviews/tests/core/test_options.py +++ b/holoviews/tests/core/test_options.py @@ -815,7 +815,7 @@ def tearDown(self): if self.plotly_options is not None: Store._options['plotly'] = self.plotly_options - super(TestCrossBackendOptions, self).tearDown() + super().tearDown() def test_mpl_bokeh_mpl(self): @@ -1149,7 +1149,7 @@ def tearDown(self): for f in self.cleanup: try: os.remove(f) - except: + except Exception: pass def test_raw_pickle(self): diff --git a/holoviews/tests/core/test_utils.py b/holoviews/tests/core/test_utils.py index 55464e7bbf..178aea4f27 100644 --- a/holoviews/tests/core/test_utils.py +++ b/holoviews/tests/core/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Unit tests of the helper functions in core.utils """ @@ -39,10 +38,10 @@ def test_deephash_list_inequality(self): self.assertNotEqual(deephash(obj1), deephash(obj2)) def test_deephash_set_equality(self): - self.assertEqual(deephash(set([1,2,3])), deephash(set([1,3,2]))) + self.assertEqual(deephash({1,2,3}), deephash({1,3,2})) def test_deephash_set_inequality(self): - self.assertNotEqual(deephash(set([1,2,3])), deephash(set([1,3,4]))) + self.assertNotEqual(deephash({1,2,3}), deephash({1,3,4})) def test_deephash_dict_equality_v1(self): self.assertEqual(deephash({1:'a',2:'b'}), deephash({2:'b', 1:'a'})) @@ -128,22 +127,22 @@ def test_deephash_nested_native_inequality(self): self.assertNotEqual(deephash(obj1), deephash(obj2)) def test_deephash_nested_mixed_equality(self): - obj1 = [datetime.datetime(1,2,3), set([1,2,3]), + obj1 = [datetime.datetime(1,2,3), {1,2,3}, pd.DataFrame({'a':[1,2],'b':[3,4]}), np.array([1,2,3]), {'a':'b', '1':True}, OrderedDict([(1,'a'),(2,'b')]), np.int64(34)] - obj2 = [datetime.datetime(1,2,3), set([1,2,3]), + obj2 = [datetime.datetime(1,2,3), {1,2,3}, pd.DataFrame({'a':[1,2],'b':[3,4]}), np.array([1,2,3]), {'a':'b', '1':True}, OrderedDict([(1,'a'),(2,'b')]), np.int64(34)] self.assertEqual(deephash(obj1), deephash(obj2)) def test_deephash_nested_mixed_inequality(self): - obj1 = [datetime.datetime(1,2,3), set([1,2,3]), + obj1 = [datetime.datetime(1,2,3), {1,2,3}, pd.DataFrame({'a':[1,2],'b':[3,4]}), np.array([1,2,3]), {'a':'b', '2':True}, OrderedDict([(1,'a'),(2,'b')]), np.int64(34)] - obj2 = [datetime.datetime(1,2,3), set([1,2,3]), + obj2 = [datetime.datetime(1,2,3), {1,2,3}, pd.DataFrame({'a':[1,2],'b':[3,4]}), np.array([1,2,3]), {'a':'b', '1':True}, OrderedDict([(1,'a'),(2,'b')]), np.int64(34)] diff --git a/holoviews/tests/element/test_apiconsistency.py b/holoviews/tests/element/test_apiconsistency.py index 63bce83ed1..497bdfc061 100644 --- a/holoviews/tests/element/test_apiconsistency.py +++ b/holoviews/tests/element/test_apiconsistency.py @@ -12,7 +12,7 @@ def test_element_group_parameter_declared_constant(self): for element_name in all_elements: el = getattr(element, element_name) self.assertEqual(el.param['group'].constant, True, - msg='Group parameter of element %s not constant' % element_name) + msg=f'Group parameter of element {element_name} not constant') def test_element_label_parameter_declared_constant(self): """ @@ -21,4 +21,4 @@ def test_element_label_parameter_declared_constant(self): for element_name in all_elements: el = getattr(element, element_name) self.assertEqual(el.param['label'].constant, True, - msg='Label parameter of element %s not constant' % element_name) + msg=f'Label parameter of element {element_name} not constant') diff --git a/holoviews/tests/element/test_comparisonchart.py b/holoviews/tests/element/test_comparisonchart.py index 64304ec4b9..f4785e1267 100644 --- a/holoviews/tests/element/test_comparisonchart.py +++ b/holoviews/tests/element/test_comparisonchart.py @@ -56,7 +56,7 @@ def test_bars_unequal_1(self): self.assertEqual(self.bars1, self.bars2) except AssertionError as e: if not 'not almost equal' in str(e): - raise Exception('Bars mismatched data error not raised. %s' % e) + raise Exception(f'Bars mismatched data error not raised. {e}') def test_bars_unequal_keydims(self): try: diff --git a/holoviews/tests/element/test_elementconstructors.py b/holoviews/tests/element/test_elementconstructors.py index 12cf362671..9da202ad9a 100644 --- a/holoviews/tests/element/test_elementconstructors.py +++ b/holoviews/tests/element/test_elementconstructors.py @@ -36,7 +36,7 @@ def test_empty_element_constructor(self): continue try: el([]) - except: + except Exception: failed_elements.append(name) self.assertEqual(failed_elements, []) diff --git a/holoviews/tests/element/test_graphelement.py b/holoviews/tests/element/test_graphelement.py index 4fd19ecf3c..222f03230b 100644 --- a/holoviews/tests/element/test_graphelement.py +++ b/holoviews/tests/element/test_graphelement.py @@ -9,10 +9,10 @@ from holoviews.core.data import Dataset from holoviews.element.chart import Points from holoviews.element.graphs import ( - Graph, Nodes, TriMesh, Chord, circular_layout, connect_edges, - connect_edges_pd) + Graph, Nodes, TriMesh, Chord) from holoviews.element.sankey import Sankey from holoviews.element.comparison import ComparisonTestCase +from holoviews.element.util import connect_edges, connect_edges_pd, circular_layout class GraphTests(ComparisonTestCase): @@ -134,7 +134,7 @@ class FromNetworkXTests(ComparisonTestCase): def setUp(self): try: import networkx as nx # noqa - except: + except ImportError: raise SkipTest('Test requires networkx to be installed') def test_from_networkx_with_node_attrs(self): diff --git a/holoviews/tests/element/test_selection.py b/holoviews/tests/element/test_selection.py index c2242e7767..31e030a8b3 100644 --- a/holoviews/tests/element/test_selection.py +++ b/holoviews/tests/element/test_selection.py @@ -20,22 +20,22 @@ try: import datashader as ds -except: +except ImportError: ds = None try: import spatialpandas as spd -except: +except ImportError: spd = None try: import shapely -except: +except ImportError: shapely = None try: import dask.dataframe as dd -except: +except ImportError: dd = None spd_available = skipIf(spd is None, "spatialpandas is not available") diff --git a/holoviews/tests/element/test_statselements.py b/holoviews/tests/element/test_statselements.py index 754c88eb5f..3232cab861 100644 --- a/holoviews/tests/element/test_statselements.py +++ b/holoviews/tests/element/test_statselements.py @@ -112,7 +112,7 @@ class StatisticalCompositorTest(ComparisonTestCase): def setUp(self): try: import scipy # noqa - except: + except ImportError: raise SkipTest('SciPy not available') self.renderer = hv.renderer('matplotlib') np.random.seed(42) diff --git a/holoviews/tests/ipython/test_notebooks.py b/holoviews/tests/ipython/test_notebooks.py index bdb0691e77..fe31f6dbda 100644 --- a/holoviews/tests/ipython/test_notebooks.py +++ b/holoviews/tests/ipython/test_notebooks.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Unit tests relating to notebook processing """ diff --git a/holoviews/tests/operation/test_datashader.py b/holoviews/tests/operation/test_datashader.py index 5281d90b30..5da5e0ea12 100644 --- a/holoviews/tests/operation/test_datashader.py +++ b/holoviews/tests/operation/test_datashader.py @@ -14,28 +14,29 @@ from holoviews.streams import Tap from holoviews.element.comparison import ComparisonTestCase from numpy import nan +from packaging.version import Version try: import datashader as ds import dask.dataframe as dd import xarray as xr from holoviews.operation.datashader import ( - LooseVersion, aggregate, regrid, ds_version, stack, directly_connect_edges, + aggregate, regrid, ds_version, stack, directly_connect_edges, shade, spread, rasterize, datashade, AggregationOperation, inspect, inspect_points, inspect_polygons ) -except: +except ImportError: raise SkipTest('Datashader not available') try: import cudf import cupy -except: +except ImportError: cudf = None try: import spatialpandas -except: +except ImportError: spatialpandas = None spatialpandas_skip = skipIf(spatialpandas is None, "SpatialPandas not available") @@ -721,7 +722,7 @@ def test_multi_poly_rasterize(self): class DatashaderCatAggregateTests(ComparisonTestCase): def setUp(self): - if ds_version < LooseVersion('0.11.0'): + if ds_version < Version('0.11.0'): raise SkipTest('Regridding operations require datashader>=0.11.0') def test_aggregate_points_categorical(self): @@ -807,7 +808,7 @@ class DatashaderRegridTests(ComparisonTestCase): """ def setUp(self): - if ds_version <= LooseVersion('0.5.0'): + if ds_version <= Version('0.5.0'): raise SkipTest('Regridding operations require datashader>=0.6.0') def test_regrid_mean(self): @@ -885,7 +886,7 @@ class DatashaderRasterizeTests(ComparisonTestCase): """ def setUp(self): - if ds_version <= LooseVersion('0.6.4'): + if ds_version <= Version('0.6.4'): raise SkipTest('Regridding operations require datashader>=0.7.0') self.simplexes = [(0, 1, 2), (3, 2, 1)] @@ -1164,7 +1165,7 @@ def test_spread_rgb_1px(self): self.assertEqual(spreaded, RGB(arr)) def test_spread_img_1px(self): - if ds_version < LooseVersion('0.12.0'): + if ds_version < Version('0.12.0'): raise SkipTest('Datashader does not support DataArray yet') arr = np.array([[0, 0, 0], [0, 0, 0], [1, 1, 1]]).T spreaded = spread(Image(arr)) @@ -1211,7 +1212,7 @@ def test_stack_saturate_compositor_reverse(self): class GraphBundlingTests(ComparisonTestCase): def setUp(self): - if ds_version <= LooseVersion('0.7.0'): + if ds_version <= Version('0.7.0'): raise SkipTest('Regridding operations require datashader>=0.7.0') self.source = np.arange(8) self.target = np.zeros(8) diff --git a/holoviews/tests/operation/test_operation.py b/holoviews/tests/operation/test_operation.py index d97854cdbe..0b1101dc3d 100644 --- a/holoviews/tests/operation/test_operation.py +++ b/holoviews/tests/operation/test_operation.py @@ -6,7 +6,7 @@ try: import dask.array as da -except: +except ImportError: da = None from holoviews import (HoloMap, NdOverlay, NdLayout, GridSpace, Image, diff --git a/holoviews/tests/operation/test_statsoperations.py b/holoviews/tests/operation/test_statsoperations.py index 0237917938..525aa6af45 100644 --- a/holoviews/tests/operation/test_statsoperations.py +++ b/holoviews/tests/operation/test_statsoperations.py @@ -2,7 +2,7 @@ try: import scipy # noqa -except: +except ImportError: raise SkipTest('SciPy not available') import numpy as np diff --git a/holoviews/tests/operation/test_timeseriesoperations.py b/holoviews/tests/operation/test_timeseriesoperations.py index 7e00086071..0a245aba6e 100644 --- a/holoviews/tests/operation/test_timeseriesoperations.py +++ b/holoviews/tests/operation/test_timeseriesoperations.py @@ -4,7 +4,7 @@ try: import scipy # noqa -except: +except ImportError: scipy = None scipy_skip = skipIf(scipy is None, "SciPy is not available.") diff --git a/holoviews/tests/plotting/bokeh/test_elementplot.py b/holoviews/tests/plotting/bokeh/test_elementplot.py index b1cf64e9df..88a30d30b2 100644 --- a/holoviews/tests/plotting/bokeh/test_elementplot.py +++ b/holoviews/tests/plotting/bokeh/test_elementplot.py @@ -21,9 +21,7 @@ from bokeh.models import tools from bokeh.models import (FuncTickFormatter, PrintfTickFormatter, NumeralTickFormatter, LogTicker, - LinearColorMapper, LogColorMapper) -from holoviews.plotting.bokeh.util import LooseVersion, bokeh_version - + LinearColorMapper, LogColorMapper, EqHistColorMapper) class TestElementPlot(LoggingComparisonTestCase, TestBokehPlot): @@ -38,20 +36,11 @@ def test_element_font_scaling(self): fig = plot.state xaxis = plot.handles['xaxis'] yaxis = plot.handles['yaxis'] - if bokeh_version > LooseVersion('2.2.3'): - self.assertEqual(fig.title.text_font_size, '24pt') - else: - self.assertEqual(fig.title.text_font_size, {'value': '24pt'}) - if bokeh_version < LooseVersion('2.0.2'): - self.assertEqual(xaxis.axis_label_text_font_size, '20pt') - self.assertEqual(yaxis.axis_label_text_font_size, '20pt') - self.assertEqual(xaxis.major_label_text_font_size, '16pt') - self.assertEqual(yaxis.major_label_text_font_size, '16pt') - else: - self.assertEqual(xaxis.axis_label_text_font_size, '26px') - self.assertEqual(yaxis.axis_label_text_font_size, '26px') - self.assertEqual(xaxis.major_label_text_font_size, '22px') - self.assertEqual(yaxis.major_label_text_font_size, '22px') + self.assertEqual(fig.title.text_font_size, '24pt') + self.assertEqual(xaxis.axis_label_text_font_size, '26px') + self.assertEqual(yaxis.axis_label_text_font_size, '26px') + self.assertEqual(xaxis.major_label_text_font_size, '22px') + self.assertEqual(yaxis.major_label_text_font_size, '22px') def test_element_font_scaling_fontsize_override_common(self): curve = Curve(range(10)).opts(fontscale=2, fontsize='14pt', title='A title') @@ -59,18 +48,11 @@ def test_element_font_scaling_fontsize_override_common(self): fig = plot.state xaxis = plot.handles['xaxis'] yaxis = plot.handles['yaxis'] - if bokeh_version > LooseVersion('2.2.3'): - self.assertEqual(fig.title.text_font_size, '28pt') - else: - self.assertEqual(fig.title.text_font_size, {'value': '28pt'}) + self.assertEqual(fig.title.text_font_size, '28pt') self.assertEqual(xaxis.axis_label_text_font_size, '28pt') self.assertEqual(yaxis.axis_label_text_font_size, '28pt') - if bokeh_version < LooseVersion('2.0.2'): - self.assertEqual(xaxis.major_label_text_font_size, '16pt') - self.assertEqual(yaxis.major_label_text_font_size, '16pt') - else: - self.assertEqual(xaxis.major_label_text_font_size, '22px') - self.assertEqual(yaxis.major_label_text_font_size, '22px') + self.assertEqual(xaxis.major_label_text_font_size, '22px') + self.assertEqual(yaxis.major_label_text_font_size, '22px') def test_element_font_scaling_fontsize_override_specific(self): curve = Curve(range(10)).opts( @@ -80,18 +62,11 @@ def test_element_font_scaling_fontsize_override_specific(self): fig = plot.state xaxis = plot.handles['xaxis'] yaxis = plot.handles['yaxis'] - if bokeh_version > LooseVersion('2.2.3'): - self.assertEqual(fig.title.text_font_size, '200%') - else: - self.assertEqual(fig.title.text_font_size, {'value': '200%'}) + self.assertEqual(fig.title.text_font_size, '200%') self.assertEqual(xaxis.axis_label_text_font_size, '24pt') self.assertEqual(xaxis.major_label_text_font_size, '2.4em') - if bokeh_version < LooseVersion('2.0.2'): - self.assertEqual(yaxis.axis_label_text_font_size, '20pt') - self.assertEqual(yaxis.major_label_text_font_size, '16pt') - else: - self.assertEqual(yaxis.axis_label_text_font_size, '26px') - self.assertEqual(yaxis.major_label_text_font_size, '22px') + self.assertEqual(yaxis.axis_label_text_font_size, '26px') + self.assertEqual(yaxis.major_label_text_font_size, '22px') def test_element_xaxis_top(self): curve = Curve(range(10)).opts(xaxis='top') @@ -210,7 +185,7 @@ def test_element_yformatter_string(self): def test_element_xformatter_function(self): try: import pscript # noqa - except: + except ImportError: raise SkipTest('Test requires pscript') def formatter(value): return str(value) + ' %' @@ -222,7 +197,7 @@ def formatter(value): def test_element_yformatter_function(self): try: import pscript # noqa - except: + except ImportError: raise SkipTest('Test requires pscript') def formatter(value): return str(value) + ' %' @@ -306,7 +281,7 @@ def get_img(test): global data data *= test return img - stream = Stream.define(str('Test'), test=1)() + stream = Stream.define('Test', test=1)() dmap = DynamicMap(get_img, streams=[stream]) plot = bokeh_renderer.get_plot(dmap, doc=Document()) source = plot.handles['source'] @@ -317,7 +292,7 @@ def get_img(test): self.assertNotIn(source, plot.current_handles) def test_stream_cleanup(self): - stream = Stream.define(str('Test'), test=1)() + stream = Stream.define('Test', test=1)() dmap = DynamicMap(lambda test: Curve([]), streams=[stream]) plot = bokeh_renderer.get_plot(dmap) self.assertTrue(bool(stream._subscribers)) @@ -327,10 +302,10 @@ def test_stream_cleanup(self): def test_element_formatter_xaxis(self): try: import pscript # noqa - except: + except ImportError: raise SkipTest('Test requires pscript') def formatter(x): - return '%s' % x + return f'{x}' curve = Curve(range(10), kdims=[Dimension('x', value_format=formatter)]) plot = bokeh_renderer.get_plot(curve).state self.assertIsInstance(plot.xaxis[0].formatter, FuncTickFormatter) @@ -338,10 +313,10 @@ def formatter(x): def test_element_formatter_yaxis(self): try: import pscript # noqa - except: + except ImportError: raise SkipTest('Test requires pscript') def formatter(x): - return '%s' % x + return f'{x}' curve = Curve(range(10), vdims=[Dimension('y', value_format=formatter)]) plot = bokeh_renderer.get_plot(curve).state self.assertIsInstance(plot.yaxis[0].formatter, FuncTickFormatter) @@ -420,7 +395,7 @@ def test_categorical_axis_fontsize_both(self): def test_cftime_transform_gregorian_no_warn(self): try: import cftime - except: + except ImportError: raise SkipTest('Test requires cftime library') gregorian_dates = [cftime.DatetimeGregorian(2000, 2, 28), cftime.DatetimeGregorian(2000, 3, 1), @@ -434,7 +409,7 @@ def test_cftime_transform_gregorian_no_warn(self): def test_cftime_transform_noleap_warn(self): try: import cftime - except: + except ImportError: raise SkipTest('Test requires cftime library') gregorian_dates = [cftime.DatetimeNoLeap(2000, 2, 28), cftime.DatetimeNoLeap(2000, 3, 1), @@ -859,10 +834,6 @@ def test_colormapper_cnorm_log(self): self.assertTrue(cmapper, LogColorMapper) def test_colormapper_cnorm_eqhist(self): - try: - from bokeh.models import EqHistColorMapper - except: - raise SkipTest("Option cnorm='eq_hist' requires EqHistColorMapper") img = Image(np.array([[0, 1], [2, 3]])).opts(cnorm='eq_hist') plot = bokeh_renderer.get_plot(img) cmapper = plot.handles['color_mapper'] @@ -887,12 +858,8 @@ def test_colorbar_fontsize_scaling(self): img = Image(np.array([[0, 1], [2, 3]])).opts(colorbar=True, fontscale=2) plot = bokeh_renderer.get_plot(img) colorbar = plot.handles['colorbar'] - if bokeh_version < LooseVersion('2.0.2'): - self.assertEqual(colorbar.title_text_font_size, '20pt') - self.assertEqual(colorbar.major_label_text_font_size, '16pt') - else: - self.assertEqual(colorbar.title_text_font_size, '26px') - self.assertEqual(colorbar.major_label_text_font_size, '22px') + self.assertEqual(colorbar.title_text_font_size, '26px') + self.assertEqual(colorbar.major_label_text_font_size, '22px') def test_explicit_categorical_cmap_on_integer_data(self): explicit_mapping = OrderedDict([(0, 'blue'), (1, 'red'), (2, 'green'), (3, 'purple')]) diff --git a/holoviews/tests/plotting/bokeh/test_gridplot.py b/holoviews/tests/plotting/bokeh/test_gridplot.py index 3be78463d3..6c20060b5d 100644 --- a/holoviews/tests/plotting/bokeh/test_gridplot.py +++ b/holoviews/tests/plotting/bokeh/test_gridplot.py @@ -10,8 +10,6 @@ from bokeh.layouts import Column from bokeh.models import Div, ToolbarBox -from holoviews.plotting.bokeh.util import LooseVersion, bokeh_version - class TestGridPlot(TestBokehPlot): @@ -63,10 +61,7 @@ def test_gridspace_sparse(self): for j in range(2,4) if not (i==1 and j == 2)}) plot = bokeh_renderer.get_plot(grid) size = bokeh_renderer.get_size(plot.state) - if bokeh_version < LooseVersion('2.0.2'): - self.assertEqual(size, (318, 310)) - else: - self.assertEqual(size, (320, 311)) + self.assertEqual(size, (320, 311)) def test_grid_shared_source_synced_update(self): hmap = HoloMap({i: Dataset({chr(65+j): np.random.rand(i+2) diff --git a/holoviews/tests/plotting/bokeh/test_labels.py b/holoviews/tests/plotting/bokeh/test_labels.py index bbdbc4bb36..7dfdae686c 100644 --- a/holoviews/tests/plotting/bokeh/test_labels.py +++ b/holoviews/tests/plotting/bokeh/test_labels.py @@ -37,7 +37,7 @@ def test_labels_empty(self): self.assertEqual(glyph.text, 'Label') def test_labels_formatter(self): - vdim = Dimension('text', value_format=lambda x: '%.1f' % x) + vdim = Dimension('text', value_format=lambda x: f'{x:.1f}') labels = Labels([(0, 1, 0.33333), (1, 0, 0.66666)], vdims=vdim) plot = bokeh_renderer.get_plot(labels) source = plot.handles['source'] diff --git a/holoviews/tests/plotting/bokeh/test_layoutplot.py b/holoviews/tests/plotting/bokeh/test_layoutplot.py index 42f73e8eb8..83cdb5c749 100644 --- a/holoviews/tests/plotting/bokeh/test_layoutplot.py +++ b/holoviews/tests/plotting/bokeh/test_layoutplot.py @@ -25,7 +25,7 @@ def test_layout_update_visible(self): hmap = HoloMap({i: Curve(np.arange(i), label='A') for i in range(1, 3)}) hmap2 = HoloMap({i: Curve(np.arange(i), label='B') for i in range(3, 5)}) plot = bokeh_renderer.get_plot(hmap+hmap2) - subplot1, subplot2 = [p for k, p in sorted(plot.subplots.items())] + subplot1, subplot2 = (p for k, p in sorted(plot.subplots.items())) subplot1 = subplot1.subplots['main'] subplot2 = subplot2.subplots['main'] self.assertTrue(subplot1.handles['glyph_renderer'].visible) diff --git a/holoviews/tests/plotting/bokeh/test_links.py b/holoviews/tests/plotting/bokeh/test_links.py index c9286c7ca0..7e442fb2db 100644 --- a/holoviews/tests/plotting/bokeh/test_links.py +++ b/holoviews/tests/plotting/bokeh/test_links.py @@ -125,5 +125,5 @@ def test_data_link_nan(self): DataLink(a, b) try: bokeh_renderer.get_plot(a+b) - except: + except Exception: self.fail() diff --git a/holoviews/tests/plotting/bokeh/test_pathplot.py b/holoviews/tests/plotting/bokeh/test_pathplot.py index 05791d9106..c9770f536f 100644 --- a/holoviews/tests/plotting/bokeh/test_pathplot.py +++ b/holoviews/tests/plotting/bokeh/test_pathplot.py @@ -1,6 +1,7 @@ import datetime as dt import numpy as np +import pandas as pd from holoviews.core import NdOverlay, HoloMap from holoviews.core.options import Cycle @@ -421,7 +422,6 @@ def test_contours_linear_color_op(self): self.assertEqual(cmapper.high, 7) def test_contours_empty_path(self): - import pandas as pd contours = Contours([ pd.DataFrame([], columns=['x', 'y', 'color', 'line_width']), pd.DataFrame({'x': np.random.rand(10), 'y': np.random.rand(10), diff --git a/holoviews/tests/plotting/bokeh/test_radialheatmap.py b/holoviews/tests/plotting/bokeh/test_radialheatmap.py index 2946cbce6f..63a9080a00 100644 --- a/holoviews/tests/plotting/bokeh/test_radialheatmap.py +++ b/holoviews/tests/plotting/bokeh/test_radialheatmap.py @@ -16,8 +16,8 @@ class BokehRadialHeatMapPlotTests(TestBokehPlot): def setUp(self): super().setUp() # set up dummy data for convenient tests - x = ["Seg {}".format(idx) for idx in range(2)] - y = ["Ann {}".format(idx) for idx in range(2)] + x = [f"Seg {idx}" for idx in range(2)] + y = [f"Ann {idx}" for idx in range(2)] self.z = list(range(4)) self.x, self.y = zip(*product(x, y)) @@ -155,7 +155,7 @@ def test_get_default_mapping(self): glyphs = self.plot._style_groups.keys() glyphs_mapped = self.plot.get_default_mapping(None, None).keys() - glyphs_plain = set([x[:-2] for x in glyphs_mapped]) + glyphs_plain = {x[:-2] for x in glyphs_mapped} self.assertTrue(all([x in glyphs_plain for x in glyphs])) @@ -259,7 +259,7 @@ def test_get_data(self): for check in [data, mapping]: glyphs_mapped = check.keys() - glyphs_plain = set([x[:-2] for x in glyphs_mapped]) + glyphs_plain = {x[:-2] for x in glyphs_mapped} self.assertTrue(all([x in glyphs_plain for x in glyphs])) def test_plot_data_source(self): diff --git a/holoviews/tests/plotting/bokeh/test_renderer.py b/holoviews/tests/plotting/bokeh/test_renderer.py index 1a24355258..23bfc7e323 100644 --- a/holoviews/tests/plotting/bokeh/test_renderer.py +++ b/holoviews/tests/plotting/bokeh/test_renderer.py @@ -15,12 +15,10 @@ from bokeh.io import curdoc from holoviews.plotting.bokeh import BokehRenderer -from holoviews.plotting.bokeh.util import LooseVersion, bokeh_version from bokeh.themes.theme import Theme from panel.widgets import DiscreteSlider, Player, FloatSlider - class BokehRendererTest(ComparisonTestCase): def setUp(self): @@ -67,10 +65,7 @@ def test_get_size_grid_plot(self): grid = GridSpace({(i, j): self.image1 for i in range(3) for j in range(3)}) plot = self.renderer.get_plot(grid) w, h = self.renderer.get_size(plot) - if bokeh_version < LooseVersion('2.0.2'): - self.assertEqual((w, h), (444, 436)) - else: - self.assertEqual((w, h), (446, 437)) + self.assertEqual((w, h), (446, 437)) def test_get_size_table(self): table = Table(range(10), kdims=['x']) @@ -180,7 +175,7 @@ def test_render_dynamicmap_with_dims(self): self.assertEqual(cds.data['y'][2], 3.1) def test_render_dynamicmap_with_stream(self): - stream = Stream.define(str('Custom'), y=2)() + stream = Stream.define('Custom', y=2)() dmap = DynamicMap(lambda y: Curve([1, 2, y]), kdims=['y'], streams=[stream]) obj, _ = self.renderer._validate(dmap, None) self.renderer.components(obj) @@ -192,7 +187,7 @@ def test_render_dynamicmap_with_stream(self): self.assertEqual(cds.data['y'][2], 3) def test_render_dynamicmap_with_stream_dims(self): - stream = Stream.define(str('Custom'), y=2)() + stream = Stream.define('Custom', y=2)() dmap = DynamicMap(lambda x, y: Curve([x, 1, y]), kdims=['x', 'y'], streams=[stream]).redim.values(x=[1, 2, 3]) obj, _ = self.renderer._validate(dmap, None) diff --git a/holoviews/tests/plotting/bokeh/test_vectorfieldplot.py b/holoviews/tests/plotting/bokeh/test_vectorfieldplot.py index 94c5754f32..eb6eafd0da 100644 --- a/holoviews/tests/plotting/bokeh/test_vectorfieldplot.py +++ b/holoviews/tests/plotting/bokeh/test_vectorfieldplot.py @@ -7,7 +7,7 @@ try: from bokeh.models import LinearColorMapper, CategoricalColorMapper -except: +except ImportError: pass diff --git a/holoviews/tests/plotting/bokeh/test_violinplot.py b/holoviews/tests/plotting/bokeh/test_violinplot.py index 1038457cc6..c84da7b420 100644 --- a/holoviews/tests/plotting/bokeh/test_violinplot.py +++ b/holoviews/tests/plotting/bokeh/test_violinplot.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from unittest import SkipTest import numpy as np @@ -18,7 +16,7 @@ class TestBokehViolinPlot(TestBokehPlot): def setUp(self): try: import scipy # noqa - except: + except ImportError: raise SkipTest('Violin plot requires SciPy to compute kde') super().setUp() @@ -71,7 +69,7 @@ def test_violin_inner_quartiles(self): self.assertIn('segment_1_glyph_renderer', plot.handles) seg_source = plot.handles['segment_1_source'] q1, q2, q3 = (np.percentile(values, q=q) for q in range(25,100,25)) - y0, y1, y2 = [xs[np.argmin(np.abs(xs-v))] for v in (q1, q2, q3)] + y0, y1, y2 = (xs[np.argmin(np.abs(xs-v))] for v in (q1, q2, q3)) self.assertEqual(seg_source.data['x'], np.array([y0, y1, y2])) def test_violin_inner_stick(self): diff --git a/holoviews/tests/plotting/matplotlib/test_elementplot.py b/holoviews/tests/plotting/matplotlib/test_elementplot.py index c96195b7dd..f9122d0118 100644 --- a/holoviews/tests/plotting/matplotlib/test_elementplot.py +++ b/holoviews/tests/plotting/matplotlib/test_elementplot.py @@ -14,7 +14,7 @@ class TestElementPlot(TestMPLPlot): def test_stream_cleanup(self): - stream = Stream.define(str('Test'), test=1)() + stream = Stream.define('Test', test=1)() dmap = DynamicMap(lambda test: Curve([]), streams=[stream]) plot = mpl_renderer.get_plot(dmap) self.assertTrue(bool(stream._subscribers)) diff --git a/holoviews/tests/plotting/matplotlib/test_graphplot.py b/holoviews/tests/plotting/matplotlib/test_graphplot.py index 06da5cf61c..11cd13fce7 100644 --- a/holoviews/tests/plotting/matplotlib/test_graphplot.py +++ b/holoviews/tests/plotting/matplotlib/test_graphplot.py @@ -3,10 +3,10 @@ from holoviews.core.data import Dataset from holoviews.core.options import Cycle from holoviews.core.spaces import HoloMap -from holoviews.core.util import LooseVersion from holoviews.element import Graph, Nodes, TriMesh, Chord, circular_layout from holoviews.util.transform import dim from matplotlib.collections import LineCollection, PolyCollection +from packaging.version import Version from .test_plot import TestMPLPlot, mpl_renderer @@ -202,7 +202,7 @@ def test_graph_op_node_alpha(self): nodes = Nodes([(0, 0, 0, 0.2), (0, 1, 1, 0.6), (1, 1, 2, 1)], vdims='alpha') graph = Graph((edges, nodes)).opts(node_alpha='alpha') - if LooseVersion(mpl.__version__) < LooseVersion("3.4.0"): + if Version(mpl.__version__) < Version("3.4.0"): # Python 3.6 only support up to matplotlib 3.3 with self.assertRaises(Exception): mpl_renderer.get_plot(graph) @@ -395,7 +395,7 @@ def test_trimesh_op_node_alpha(self): nodes = [(-1, -1, 0, 0.2), (0, 0, 1, 0.6), (0, 1, 2, 1), (1, 0, 3, 0.3)] trimesh = TriMesh((edges, Nodes(nodes, vdims='alpha'))).opts(node_alpha='alpha') - if LooseVersion(mpl.__version__) < LooseVersion("3.4.0"): + if Version(mpl.__version__) < Version("3.4.0"): # Python 3.6 only support up to matplotlib 3.3 with self.assertRaises(Exception): mpl_renderer.get_plot(trimesh) diff --git a/holoviews/tests/plotting/matplotlib/test_labels.py b/holoviews/tests/plotting/matplotlib/test_labels.py index 5f1d39f281..040f31e4a7 100644 --- a/holoviews/tests/plotting/matplotlib/test_labels.py +++ b/holoviews/tests/plotting/matplotlib/test_labels.py @@ -28,7 +28,7 @@ def test_labels_empty(self): self.assertEqual(artist, []) def test_labels_formatter(self): - vdim = Dimension('text', value_format=lambda x: '%.1f' % x) + vdim = Dimension('text', value_format=lambda x: f'{x:.1f}') labels = Labels([(0, 1, 0.33333), (1, 0, 0.66666)], vdims=vdim) plot = mpl_renderer.get_plot(labels) artist = plot.handles['artist'] diff --git a/holoviews/tests/plotting/matplotlib/test_overlayplot.py b/holoviews/tests/plotting/matplotlib/test_overlayplot.py index af857f6d14..7e0611cf91 100644 --- a/holoviews/tests/plotting/matplotlib/test_overlayplot.py +++ b/holoviews/tests/plotting/matplotlib/test_overlayplot.py @@ -8,7 +8,7 @@ try: from holoviews.plotting.mpl import OverlayPlot -except: +except ImportError: pass diff --git a/holoviews/tests/plotting/matplotlib/test_radialheatmap.py b/holoviews/tests/plotting/matplotlib/test_radialheatmap.py index 58f6879e53..fe379a7b9e 100644 --- a/holoviews/tests/plotting/matplotlib/test_radialheatmap.py +++ b/holoviews/tests/plotting/matplotlib/test_radialheatmap.py @@ -7,7 +7,7 @@ try: from holoviews.plotting.mpl import RadialHeatMapPlot -except: +except ImportError: pass from .test_plot import TestMPLPlot, mpl_renderer @@ -19,8 +19,8 @@ def setUp(self): super().setUp() # set up dummy data for convenient tests - x = ["Seg {}".format(idx) for idx in range(2)] - y = ["Ann {}".format(idx) for idx in range(2)] + x = [f"Seg {idx}" for idx in range(2)] + y = [f"Ann {idx}" for idx in range(2)] self.z = list(range(4)) self.x, self.y = zip(*product(x, y)) diff --git a/holoviews/tests/plotting/matplotlib/test_renderer.py b/holoviews/tests/plotting/matplotlib/test_renderer.py index ee7989a3d9..15b4d563b2 100644 --- a/holoviews/tests/plotting/matplotlib/test_renderer.py +++ b/holoviews/tests/plotting/matplotlib/test_renderer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Test cases for rendering exporters """ @@ -86,7 +85,7 @@ def test_render_mp4(self): devnull = subprocess.DEVNULL try: subprocess.call(['ffmpeg', '-h'], stdout=devnull, stderr=devnull) - except: + except Exception: raise SkipTest('ffmpeg not available, skipping mp4 export test') data, metadata = self.renderer.components(self.map1, 'mp4') self.assertIn("= 1.8.0') class Inner(param.Parameterized): @@ -426,7 +426,7 @@ def test(x): def test_panel_param_steams_dict(self): try: import panel - except: + except ImportError: raise SkipTest('Panel required for widget support in streams dict') widget = panel.widgets.FloatSlider(value=1) @@ -531,7 +531,7 @@ def test_dynamicmap_param_method_action_param(self): def subscriber(**kwargs): values.append(kwargs) self.assertEqual(set(stream.hashkey), - {'%s action' % id(inner), '_memoize_key'}) + {f'{id(inner)} action', '_memoize_key'}) stream.add_subscriber(subscriber) inner.action(inner) @@ -551,7 +551,7 @@ def subscriber(**kwargs): values.append(kwargs) self.assertEqual( set(stream.hashkey), - {'%s action' % id(inner), '%s x' % id(inner), '_memoize_key'}) + {f'{id(inner)} action', f'{id(inner)} x', '_memoize_key'}) stream.add_subscriber(subscriber) stream.add_subscriber(lambda **kwargs: dmap[()]) @@ -1170,7 +1170,7 @@ def test_selection_expr_stream_1D_elements(self): # Check SelectionExpr values self.assertEqual( repr(expr_stream.selection_expr), - repr(((dim('x')>=1)&(dim('x')<=3))) + repr((dim('x')>=1)&(dim('x')<=3)) ) self.assertEqual( expr_stream.bbox, @@ -1225,7 +1225,7 @@ def test_selection_expr_stream_invert_axes_1D_elements(self): # Check SelectionExpr values self.assertEqual( repr(expr_stream.selection_expr), - repr(((dim('x')>=1)&(dim('x')<=4))) + repr((dim('x')>=1)&(dim('x')<=4)) ) self.assertEqual( expr_stream.bbox, @@ -1287,7 +1287,7 @@ def test_selection_expr_stream_invert_xaxis_yaxis_1D_elements(self): # Check SelectionExpr values self.assertEqual( repr(expr_stream.selection_expr), - repr(((dim('x')>=1)&(dim('x')<=3))) + repr((dim('x')>=1)&(dim('x')<=3)) ) self.assertEqual( expr_stream.bbox, @@ -1393,9 +1393,9 @@ def test_selection_expr_stream_hist_invert_xaxis_yaxis(self): def test_selection_expr_stream_polygon_index_cols(self): # Create SelectionExpr on element try: import shapely # noqa - except: + except ImportError: try: import spatialpandas # noqa - except: raise SkipTest('Shapely required for polygon selection') + except ImportError: raise SkipTest('Shapely required for polygon selection') poly = Polygons([ [(0, 0, 'a'), (2, 0, 'a'), (1, 1, 'a')], [(2, 0, 'b'), (4, 0, 'b'), (3, 1, 'b')], @@ -1492,7 +1492,7 @@ def test_selection_expr_stream_dynamic_map_1D_elements(self): # Check SelectionExpr values self.assertEqual( repr(expr_stream.selection_expr), - repr(((dim('x')>=1)&(dim('x')<=3))) + repr((dim('x')>=1)&(dim('x')<=3)) ) self.assertEqual( expr_stream.bbox, diff --git a/holoviews/tests/util/test_transform.py b/holoviews/tests/util/test_transform.py index d023a7aa5b..7bd43e0b12 100644 --- a/holoviews/tests/util/test_transform.py +++ b/holoviews/tests/util/test_transform.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Unit tests for dim transforms """ @@ -15,12 +14,12 @@ try: import dask.dataframe as dd import dask.array as da -except: +except ImportError: da, dd = None, None try: import xarray as xr -except: +except ImportError: xr = None xr_skip = skipIf(xr is None, "xarray not available") diff --git a/holoviews/tests/util/test_utils.py b/holoviews/tests/util/test_utils.py index 8c3dadbcb4..f31af83f5f 100644 --- a/holoviews/tests/util/test_utils.py +++ b/holoviews/tests/util/test_utils.py @@ -1,12 +1,12 @@ -# -*- coding: utf-8 -*- """ Unit tests of the helper functions in utils """ +from collections import OrderedDict + from holoviews import notebook_extension from holoviews.element.comparison import ComparisonTestCase from holoviews import Store from holoviews.util import output, opts, OutputSettings, Options -from holoviews.core import OrderedDict from holoviews.core.options import OptionTree from pyviz_comms import CommManager diff --git a/holoviews/util/__init__.py b/holoviews/util/__init__.py index 4cfa63561b..7e38817d1b 100644 --- a/holoviews/util/__init__.py +++ b/holoviews/util/__init__.py @@ -37,7 +37,7 @@ def examples(path='holoviews-examples', verbose=False, force=False, root=__file_ example_dir = os.path.join(filepath, '../examples') if os.path.exists(path): if not force: - print('%s directory already exists, either delete it or set the force flag' % path) + print(f'{path} directory already exists, either delete it or set the force flag') return shutil.rmtree(path) ignore = shutil.ignore_patterns('.ipynb_checkpoints','*.pyc','*~') @@ -45,7 +45,7 @@ def examples(path='holoviews-examples', verbose=False, force=False, root=__file_ if os.path.isdir(tree_root): shutil.copytree(tree_root, path, ignore=ignore, symlinks=True) else: - print('Cannot find %s' % tree_root) + print(f'Cannot find {tree_root}') class OptsMeta(param.parameterized.ParameterizedMetaclass): @@ -120,7 +120,7 @@ def _group_kwargs_to_options(cls, obj, kwargs): "Format option group kwargs into canonical options format" groups = Options._option_groups if set(kwargs.keys()) - set(groups): - raise Exception("Keyword options %s must be one of %s" % (groups, + raise Exception("Keyword options {} must be one of {}".format(groups, ','.join(repr(g) for g in groups))) elif not all(isinstance(v, dict) for v in kwargs.values()): raise Exception("The %s options must be specified using dictionary groups" % @@ -134,11 +134,11 @@ def _group_kwargs_to_options(cls, obj, kwargs): # Not targets specified - add current object as target sanitized_group = util.group_sanitizer(obj.group) if obj.label: - identifier = ('%s.%s.%s' % ( + identifier = ('{}.{}.{}'.format( obj.__class__.__name__, sanitized_group, util.label_sanitizer(obj.label))) elif sanitized_group != obj.__class__.__name__: - identifier = '%s.%s' % (obj.__class__.__name__, sanitized_group) + identifier = f'{obj.__class__.__name__}.{sanitized_group}' else: identifier = obj.__class__.__name__ @@ -176,7 +176,7 @@ def _grouped_backends(cls, options, backend): for spec, groups in options.items(): if 'output' not in groups.keys() or len(groups['output'])==0: dfltdict[backend or Store.current_backend][spec.strip()] = groups - elif set(groups['output'].keys()) - set(['backend']): + elif set(groups['output'].keys()) - {'backend'}: dfltdict[groups['output']['backend']][spec.strip()] = groups elif ['backend'] == list(groups['output'].keys()): filtered = {k:v for k,v in groups.items() if k != 'output'} @@ -232,8 +232,7 @@ def apply_groups(cls, obj, options=None, backend=None, clone=True, **kwargs): options = OptsSpec.parse(options) except SyntaxError: options = OptsSpec.parse( - '{clsname} {options}'.format(clsname=obj.__class__.__name__, - options=options)) + f'{obj.__class__.__name__} {options}') if kwargs: options = cls._group_kwargs_to_options(obj, kwargs) @@ -246,7 +245,7 @@ def _process_magic(cls, options, strict, backends=None): if isinstance(options, str): from .parser import OptsSpec try: ns = get_ipython().user_ns # noqa - except: ns = globals() + except Exception: ns = globals() options = OptsSpec.parse(options, ns=ns) errmsg = StoreOptions.validation_error_message(options, backends=backends) @@ -337,7 +336,7 @@ def _expand_options(cls, options, backend=None): try: backend_options = Store.options(backend=backend or current_backend) except KeyError as e: - raise Exception('The %s backend is not loaded. Please load the backend using hv.extension.' % str(e)) + raise Exception(f'The {e} backend is not loaded. Please load the backend using hv.extension.') expanded = {} if isinstance(options, list): options = merge_options_to_dict(options) @@ -345,8 +344,7 @@ def _expand_options(cls, options, backend=None): for objspec, options in options.items(): objtype = objspec.split('.')[0] if objtype not in backend_options: - raise ValueError('%s type not found, could not apply options.' - % objtype) + raise ValueError(f'{objtype} type not found, could not apply options.') obj_options = backend_options[objtype] expanded[objspec] = {g: {} for g in obj_options.groups} for opt, value in options.items(): @@ -425,15 +423,15 @@ def _builder_reprs(cls, options, namespace=None, ns=None): from .parser import OptsSpec if ns is None: try: ns = get_ipython().user_ns # noqa - except: ns = globals() + except Exception: ns = globals() options = options.replace('%%opts','').replace('%opts','') options = OptsSpec.parse_options(options, ns=ns) reprs = [] - ns = '{namespace}.'.format(namespace=namespace) if namespace else '' + ns = f'{namespace}.' if namespace else '' for option in options: - kws = ', '.join('%s=%r' % (k,option.kwargs[k]) for k in sorted(option.kwargs)) + kws = ', '.join(f'{k}={option.kwargs[k]!r}' for k in sorted(option.kwargs)) if '.' in option.key: element = option.key.split('.')[0] spec = repr('.'.join(option.key.split('.')[1:])) + ', ' @@ -448,8 +446,8 @@ def _builder_reprs(cls, options, namespace=None, ns=None): @classmethod def _create_builder(cls, element, completions): def builder(cls, spec=None, **kws): - spec = element if spec is None else '%s.%s' % (element, spec) - prefix = 'In opts.{element}(...), '.format(element=element) + spec = element if spec is None else f'{element}.{spec}' + prefix = f'In opts.{element}(...), ' backend = kws.get('backend', None) keys = set(kws.keys()) if backend: @@ -589,8 +587,8 @@ def info(cls): if ':' in pairs['backend']: pairs['backend'] = pairs['backend'].split(':')[0] - keywords = ', '.join('%s=%r' % (k,pairs[k]) for k in sorted(pairs.keys())) - print('output({kws})'.format(kws=keywords)) + keywords = ', '.join(f'{k}={pairs[k]!r}' for k in sorted(pairs.keys())) + print(f'output({keywords})') def __call__(self, *args, **options): @@ -610,12 +608,12 @@ def __call__(self, *args, **options): options = Store.output_settings.extract_keywords(line, {}) for k in options.keys(): if k not in Store.output_settings.allowed: - raise KeyError('Invalid keyword: %s' % k) + raise KeyError(f'Invalid keyword: {k}') def display_fn(obj, renderer): try: from IPython.display import display - except: + except ImportError: return display(obj) @@ -684,11 +682,11 @@ def __call__(self, *args, **params): for backend, imp in imports: try: __import__(backend) - except: + except ImportError: self.param.warning("%s could not be imported, ensure %s is installed." % (backend, backend)) try: - __import__('holoviews.plotting.%s' % imp) + __import__(f'holoviews.plotting.{imp}') if selected_backend is None: selected_backend = backend except util.VersionError as e: @@ -976,7 +974,7 @@ def _get_streams(self, map_obj, watch=True): if invalid: msg = ('The supplied streams list contains objects that ' 'are not Stream instances: {objs}') - raise TypeError(msg.format(objs = ', '.join('%r' % el for el in invalid))) + raise TypeError(msg.format(objs = ', '.join(f'{el!r}' for el in invalid))) return valid def _process(self, element, key=None, kwargs={}): diff --git a/holoviews/util/command.py b/holoviews/util/command.py index 72bc563d2a..6adc6080e9 100755 --- a/holoviews/util/command.py +++ b/holoviews/util/command.py @@ -12,13 +12,13 @@ try: import nbformat, nbconvert -except: +except ImportError: print('nbformat, nbconvert and ipython need to be installed to use the holoviews command') sys.exit() try: from ..ipython.preprocessors import OptsMagicProcessor, OutputMagicProcessor from ..ipython.preprocessors import StripMagicsProcessor -except: +except ImportError: from holoviews.ipython.preprocessors import OptsMagicProcessor, OutputMagicProcessor from holoviews.ipython.preprocessors import StripMagicsProcessor diff --git a/holoviews/util/parser.py b/holoviews/util/parser.py index f46b5a155b..ccb2bc70fd 100644 --- a/holoviews/util/parser.py +++ b/holoviews/util/parser.py @@ -29,7 +29,7 @@ class ParserWarning(param.Parameterized):pass parsewarning = ParserWarning(name='Warning') -class Parser(object): +class Parser: """ Base class for magic line parsers, designed for forgiving parsing of keyword lists. @@ -54,7 +54,7 @@ def recurse_token(cls, token, inner): new_tok = [s for t in tok for s in (cls.recurse_token(t, inner) if isinstance(t, list) else [t])] - recursed.append((inner % ''.join(new_tok))) + recursed.append(inner % ''.join(new_tok)) else: recursed.append(tok) return inner % ''.join(recursed) @@ -109,12 +109,11 @@ def todict(cls, parseresult, mode='parens', ns={}): (',.', '.')]: keyword = keyword.replace(fst, snd) try: - kwargs.update(eval('dict(%s)' % keyword, + kwargs.update(eval(f'dict({keyword})', dict(cls.namespace, **ns))) - except: + except Exception: if cls.abort_on_eval_failure: - raise SyntaxError("Could not evaluate keyword: %r" - % keyword) + raise SyntaxError(f"Could not evaluate keyword: {keyword!r}") msg = "Ignoring keyword pair that fails to evaluate: '%s'" parsewarning.warning(msg % keyword) @@ -235,8 +234,7 @@ def process_normalization(cls, parse_group): " contain repeated %r" % normopt) if not all(opt in options for opt in opts): - raise SyntaxError("Normalization option not one of %s" - % ", ".join(options)) + raise SyntaxError(f"Normalization option not one of {', '.join(options)}") excluded = [('+framewise', '-framewise'), ('+axiswise', '-axiswise')] for pair in excluded: if all(exclude in opts for exclude in pair): @@ -313,7 +311,7 @@ def parse(cls, line, ns={}): e = parses[0][2] processed = line[:e] if (processed.strip() != line.strip()): - raise SyntaxError("Failed to parse remainder of string: %r" % line[e:]) + raise SyntaxError(f"Failed to parse remainder of string: {line[e:]!r}") grouped_paths = cls._group_paths_without_options(cls.opts_spec.parseString(line)) parse = {} @@ -414,7 +412,7 @@ def parse(cls, line, ns={}): e = parses[0][2] processed = line[:e] if (processed.strip() != line.strip()): - raise SyntaxError("Failed to parse remainder of string: %r" % line[e:]) + raise SyntaxError(f"Failed to parse remainder of string: {line[e:]!r}") opmap = {op.__name__:op for op in Compositor.operations} for group in cls.compositor_spec.parseString(line): diff --git a/holoviews/util/settings.py b/holoviews/util/settings.py index 7c0bcffe55..80647cfe87 100644 --- a/holoviews/util/settings.py +++ b/holoviews/util/settings.py @@ -1,9 +1,8 @@ -from collections import defaultdict -from ..core import OrderedDict +from collections import defaultdict, OrderedDict from ..core import Store -class KeywordSettings(object): +class KeywordSettings: """ Base class for options settings used to specified collections of keyword options. @@ -39,7 +38,7 @@ def get_options(cls, items, options, warnfn): if isinstance(allowed, set): pass elif isinstance(allowed, dict): if not isinstance(value, dict): - raise ValueError("Value %r not a dict type" % value) + raise ValueError(f"Value {value!r} not a dict type") disallowed = set(value.keys()) - set(allowed.keys()) if disallowed: raise ValueError("Keywords %r for %r option not one of %s" @@ -83,7 +82,7 @@ def extract_keywords(cls, line, items): if chunk.strip() in cls.allowed: key = chunk.strip() else: - raise SyntaxError("Invalid keyword: %s" % chunk.strip()) + raise SyntaxError(f"Invalid keyword: {chunk.strip()}") # The next chunk may end in a subsequent keyword value = unprocessed.pop().strip() if len(unprocessed) != 0: @@ -94,12 +93,12 @@ def extract_keywords(cls, line, items): unprocessed.append(option) break else: - raise SyntaxError("Invalid keyword: %s" % value.split()[-1]) - keyword = '%s=%s' % (key, value) + raise SyntaxError(f"Invalid keyword: {value.split()[-1]}") + keyword = f'{key}={value}' try: - items.update(eval('dict(%s)' % keyword)) - except: - raise SyntaxError("Could not evaluate keyword: %s" % keyword) + items.update(eval(f'dict({keyword})')) + except Exception: + raise SyntaxError(f"Could not evaluate keyword: {keyword}") return items @@ -111,7 +110,7 @@ def list_backends(): renderer = Store.renderers[backend] modes = [mode for mode in renderer.param.objects('existing')['mode'].objects if mode != 'default'] - backends += ['%s:%s' % (backend, mode) for mode in modes] + backends += [f'{backend}:{mode}' for mode in modes] return backends @@ -197,13 +196,13 @@ class OutputSettings(KeywordSettings): backend_list = [] # List of possible backends def missing_dependency_exception(value, keyword, allowed): - raise Exception("Format %r does not appear to be supported." % value) + raise Exception(f"Format {value!r} does not appear to be supported.") def missing_backend_exception(value, keyword, allowed): if value in OutputSettings.backend_list: - raise ValueError("Backend %r not available. Has it been loaded with the notebook_extension?" % value) + raise ValueError(f"Backend {value!r} not available. Has it been loaded with the notebook_extension?") else: - raise ValueError("Backend %r does not exist" % value) + raise ValueError(f"Backend {value!r} does not exist") custom_exceptions = {'holomap':missing_dependency_exception, 'backend': missing_backend_exception} @@ -256,10 +255,10 @@ def _validate(cls, options, items, warnfn): if 'html' in Store.display_formats: pass elif 'fig' in items and items['fig'] not in Store.display_formats: - msg = ("Requesting output figure format %r " % items['fig'] - + "not in display formats %r" % Store.display_formats) + msg = (f"Requesting output figure format {items['fig']!r} " + + f"not in display formats {Store.display_formats!r}") if warnfn is None: - print('Warning: {msg}'.format(msg=msg)) + print(f'Warning: {msg}') else: warnfn(msg) @@ -326,7 +325,7 @@ def output(cls, line=None, cell=None, cell_runner=None, "has not been loaded, ensure you load it " "with hv.extension({ext}) before using " "hv.output.".format(ext=repr(backend))) - print('Error: %s' % str(e)) + print(f'Error: {e}') if help_prompt: print(help_prompt) return diff --git a/holoviews/util/transform.py b/holoviews/util/transform.py index 45be351628..4fd948359f 100644 --- a/holoviews/util/transform.py +++ b/holoviews/util/transform.py @@ -4,11 +4,12 @@ from types import BuiltinFunctionType, BuiltinMethodType, FunctionType, MethodType import numpy as np +import pandas as pd import param from ..core.data import PandasInterface from ..core.dimension import Dimension -from ..core.util import flatten, pd, resolve_dependent_value, unique_iterator +from ..core.util import flatten, resolve_dependent_value, unique_iterator def _maybe_map(numpy_fn): @@ -69,7 +70,7 @@ def lognorm(values, min=None, max=None): return (np.log(values) - min) / (max-min) -class iloc(object): +class iloc: """Implements integer array indexing for dim expressions. """ @@ -90,7 +91,7 @@ def __call__(self, values): return values[resolve_dependent_value(self.index)] -class loc(object): +class loc: """Implements loc for dim expressions. """ @@ -199,7 +200,7 @@ def _python_isin(array, values): ) -class dim(object): +class dim: """ dim transform objects are a way to express deferred transforms on Datasets. dim transforms support all mathematical and bitwise @@ -318,7 +319,7 @@ def __getattribute__(self, attr): # transform itself, so set namespace to None ns = None extras = {ns_attr for ns_attr in dir(ns) if not ns_attr.startswith('_')} - if attr in extras and attr not in super(dim, self).__dir__(): + if attr in extras and attr not in super().__dir__(): return type(self)(self, attr, accessor=True) else: return super().__getattribute__(attr) @@ -329,7 +330,7 @@ def __dir__(self): ns = getattr(ns, self._current_accessor) extras = {attr for attr in dir(ns) if not attr.startswith('_')} try: - return sorted(set(super(dim, self).__dir__()) | extras) + return sorted(set(super().__dir__()) | extras) except Exception: return sorted(set(dir(type(self))) | set(self.__dict__) | extras) @@ -779,7 +780,7 @@ def apply(self, dataset, flat=False, expanded=None, ranges={}, all_values=False, return data def __repr__(self): - op_repr = "'%s'" % self.dimension + op_repr = f"'{self.dimension}'" accessor = False for i, o in enumerate(self.ops): if i == 0: @@ -825,9 +826,9 @@ def __repr__(self): fn_name = self._numpy_funcs[fn] format_string = prev+').{fn}(' elif isinstance(fn, iloc): - format_string = prev+').iloc[{0}]'.format(repr(fn.index)) + format_string = prev+f').iloc[{fn.index!r}]' elif isinstance(fn, loc): - format_string = prev+').loc[{0}]'.format(repr(fn.index)) + format_string = prev+f').loc[{fn.index!r}]' elif fn in self._custom_funcs: fn_name = self._custom_funcs[fn] format_string = prev+').{fn}(' @@ -867,7 +868,7 @@ def __repr__(self): if op_repr.count('(') - op_repr.count(')') > 0: op_repr += ')' if not self.ops: - op_repr = 'dim({repr})'.format(repr=op_repr) + op_repr = f'dim({op_repr})' if op_repr.count('(') - op_repr.count(')') > 0: op_repr += ')' return op_repr