Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Maintenance of the code #5509

Merged
merged 28 commits into from
Dec 27, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
3416921
Automatic updates to f-string with flynt
hoxbro Nov 4, 2022
4711233
Update to f-string with small manual changes
hoxbro Nov 4, 2022
f2ce53e
Manual added f-string conversion
hoxbro Nov 4, 2022
bcd6cda
Removed bare except:
hoxbro Nov 7, 2022
c4a94a2
Pandas is required depencies
hoxbro Nov 7, 2022
6dc7315
Removed old import for python depedencies
hoxbro Nov 7, 2022
223eea1
Removed old imports for bokeh import
hoxbro Nov 7, 2022
74201a9
Remove python2 code in dt_to_int
hoxbro Nov 7, 2022
1d8dbcd
Merge branch 'master' into maintenance
hoxbro Dec 23, 2022
69a0bdb
Run pyupgrade with py37-plus
hoxbro Dec 23, 2022
71d05df
Remove super(X, self)
hoxbro Dec 23, 2022
bfac9ad
Fix none if statement
hoxbro Dec 23, 2022
a9f2c83
Remove cyODict
hoxbro Dec 23, 2022
2856ef5
Change Exception to ImportError
hoxbro Dec 23, 2022
9f28b86
Remove old IPython version guard
hoxbro Dec 23, 2022
7d04e53
Remove non-existing DFrame import
hoxbro Dec 23, 2022
930c5f3
Remove panel safeguard param_value_if_widget is in version 0.13.1
hoxbro Dec 23, 2022
8c44ab4
Rename LooseVersion to Version and use explicit import
hoxbro Dec 23, 2022
17a40d9
Remove bokeh safeguard when below 2.4.0
hoxbro Dec 23, 2022
7bd63fa
Explicit import of OrderedDict
hoxbro Dec 23, 2022
33989f8
Remove panel safeguard panel.io.document is in version 0.13.1
hoxbro Dec 23, 2022
c6e420e
Use explicit pandas import as it is a required dependency
hoxbro Dec 23, 2022
849042c
Second round of pyupgrade
hoxbro Dec 24, 2022
761a2a1
Remove pandas import for functions
hoxbro Dec 24, 2022
6ba0a04
Move builtins to correct file
hoxbro Dec 24, 2022
77d66f5
Use pandas_version
hoxbro Dec 24, 2022
121a45d
Remove __nonzero__ and __unicode__
hoxbro Dec 25, 2022
3fb9eba
Remove a pd
hoxbro Dec 26, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion holoviews/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def __call__(self, *args, **opts): # noqa (dummy signature)
try:
exec(code)
except Exception as e:
print("Warning: Could not load %r [%r]" % (filename, str(e)))
print(f"Warning: Could not load {filename!r} [{str(e)!r}]")
del f, code
break
del filename
Expand Down
16 changes: 8 additions & 8 deletions holoviews/annotators.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def __init__(self, object=None, **params):
super().__init__(None, **params)
self.object = self._process_element(object)
self._table_row = Row()
self.editor = Tabs(('%s' % param_name(self.name), self._table_row))
self.editor = Tabs((f'{param_name(self.name)}', self._table_row))
self.plot = DynamicMap(self._get_plot)
self.plot.callback.inputs[:] = [self.object]
self._tables = []
Expand Down Expand Up @@ -336,19 +336,19 @@ class PathAnnotator(Annotator):
def __init__(self, object=None, **params):
self._vertex_table_row = Row()
super().__init__(object, **params)
self.editor.append(('%s Vertices' % param_name(self.name),
self.editor.append((f'{param_name(self.name)} Vertices',
self._vertex_table_row))

def _init_stream(self):
name = param_name(self.name)
self._stream = PolyDraw(
source=self.plot, data={}, num_objects=self.num_objects,
show_vertices=self.show_vertices, tooltip='%s Tool' % name,
show_vertices=self.show_vertices, tooltip=f'{name} Tool',
vertex_style=self.vertex_style, empty_value=self.empty_value
)
if self.edit_vertices:
self._vertex_stream = PolyEdit(
source=self.plot, tooltip='%s Edit Tool' % name,
source=self.plot, tooltip=f'{name} Edit Tool',
vertex_style=self.vertex_style,
)

Expand Down Expand Up @@ -379,7 +379,7 @@ def _process_element(self, element=None):
# Validate annotations
poly_data = {c: element.dimension_values(c, expanded=False)
for c in validate}
if validate and len(set(len(v) for v in poly_data.values())) != 1:
if validate and len({len(v) for v in poly_data.values()}) != 1:
raise ValueError('annotations must refer to value dimensions '
'which vary per path while at least one of '
'%s varies by vertex.' % validate)
Expand Down Expand Up @@ -419,7 +419,7 @@ def _update_table(self):
self._table = Table(table_data, annotations, [], label=name).opts(
show_title=False, **self.table_opts)
self._vertex_table = Table(
[], table.kdims, list(self.vertex_annotations), label='%s Vertices' % name
[], table.kdims, list(self.vertex_annotations), label=f'{name} Vertices'
).opts(show_title=False, **self.table_opts)
self._update_links()
self._table_row[:] = [self._table]
Expand Down Expand Up @@ -458,7 +458,7 @@ def _init_stream(self):
name = param_name(self.name)
self._stream = self._stream_type(
source=self.plot, data={}, num_objects=self.num_objects,
tooltip='%s Tool' % name, empty_value=self.empty_value
tooltip=f'{name} Tool', empty_value=self.empty_value
)

def _process_element(self, object):
Expand Down Expand Up @@ -519,7 +519,7 @@ class CurveAnnotator(_GeomAnnotator):
def _init_stream(self):
name = param_name(self.name)
self._stream = self._stream_type(
source=self.plot, data={}, tooltip='%s Tool' % name,
source=self.plot, data={}, tooltip=f'{name} Tool',
style=self.vertex_style
)

Expand Down
10 changes: 4 additions & 6 deletions holoviews/core/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from datetime import date, datetime

import pandas as pd

from .boundingregion import * # noqa (API import)
from .data import * # noqa (API import)
from .dimension import * # noqa (API import)
Expand Down Expand Up @@ -29,19 +31,15 @@
Dimension.type_formatters[np.datetime64] = '%Y-%m-%d %H:%M:%S'
Dimension.type_formatters[datetime] = '%Y-%m-%d %H:%M:%S'
Dimension.type_formatters[date] = '%Y-%m-%d'
Dimension.type_formatters[pd.Timestamp] = "%Y-%m-%d %H:%M:%S"

try:
import pandas as pd
Dimension.type_formatters[pd.Timestamp] = "%Y-%m-%d %H:%M:%S"
except:
pass

def public(obj):
if not isinstance(obj, type): return False
baseclasses = [Dimension, Dimensioned, Operation, BoundingBox,
SheetCoordinateSystem, AttrTree]
return any([issubclass(obj, bc) for bc in baseclasses])

_public = list(set([_k for _k, _v in locals().items() if public(_v)]))
_public = list({_k for _k, _v in locals().items() if public(_v)})
__all__ = _public + ["boundingregion", "dimension", "layer", "layout",
"ndmapping", "operation", "options", "sheetcoords", "tree", "element"]
6 changes: 3 additions & 3 deletions holoviews/core/accessors.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def pipelined_call(*args, **kwargs):


@add_metaclass(AccessorPipelineMeta)
class Apply(object):
class Apply:
"""
Utility to apply a function or operation to all viewable elements
inside the object.
Expand Down Expand Up @@ -289,7 +289,7 @@ def transform(self, *args, **kwargs):


@add_metaclass(AccessorPipelineMeta)
class Redim(object):
class Redim:
"""
Utility that supports re-dimensioning any HoloViews object via the
redim method.
Expand Down Expand Up @@ -489,7 +489,7 @@ def values(self, specs=None, **ranges):


@add_metaclass(AccessorPipelineMeta)
class Opts(object):
class Opts:

def __init__(self, obj, mode=None):
self._mode = mode
Expand Down
10 changes: 5 additions & 5 deletions holoviews/core/boundingregion.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from .util import datetime_types


class BoundingRegion(object):
class BoundingRegion:
"""
Abstract bounding region class, for any portion of a 2D plane.

Expand Down Expand Up @@ -93,9 +93,9 @@ def __str__(self):
l, b, r, t = self._aarect.lbrt()
if (not isinstance(r, datetime_types) and r == -l and
not isinstance(b, datetime_types) and t == -b and r == t):
return 'BoundingBox(radius=%s)' % (r)
return f'BoundingBox(radius={r})'
else:
return 'BoundingBox(points=((%s,%s),(%s,%s)))' % (l, b, r, t)
return f'BoundingBox(points=(({l},{b}),({r},{t})))'


def __repr__(self):
Expand All @@ -106,7 +106,7 @@ def script_repr(self, imports=[], prefix=" "):
# Generate import statement
cls = self.__class__.__name__
mod = self.__module__
imports.append("from %s import %s" % (mod, cls))
imports.append(f"from {mod} import {cls}")
return self.__str__()


Expand Down Expand Up @@ -242,7 +242,7 @@ def contains(self, x, y):
# JABALERT: Should probably remove top, bottom, etc. accessor functions,
# and use the slot itself instead.
###################################################
class AARectangle(object):
class AARectangle:
"""
Axis-aligned rectangle class.

Expand Down
30 changes: 11 additions & 19 deletions holoviews/core/data/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,3 @@
try:
import itertools.izip as zip
except ImportError:
pass

import types
import copy

Expand Down Expand Up @@ -70,7 +65,7 @@ def concat(datasets, datatype=None):
return Interface.concatenate(datasets, datatype)


class DataConversion(object):
class DataConversion:
"""
DataConversion is a very simple container object which can be
given an existing Dataset Element and provides methods to convert
Expand Down Expand Up @@ -129,7 +124,7 @@ def __call__(self, new_type, kdims=None, vdims=None, groupby=None,
else:
selected = self._element
else:
if pd and issubclass(self._element.interface, PandasInterface):
if issubclass(self._element.interface, PandasInterface):
ds_dims = self._element.dimensions()
ds_kdims = [self._element.get_dimension(d) if d in ds_dims else d
for d in groupby+kdims]
Expand Down Expand Up @@ -283,15 +278,14 @@ class to each underlying element.
"""
if isinstance(data, DynamicMap):
class_name = cls.__name__
repr_kdims = 'kdims=%r' % kdims if kdims else None
repr_vdims = 'vdims=%r' % vdims if vdims else None
repr_kwargs = (', '.join('%s=%r' % (k,v) for k,v in kwargs.items())
repr_kdims = f'kdims={kdims!r}' if kdims else None
repr_vdims = f'vdims={vdims!r}' if vdims else None
repr_kwargs = (', '.join(f'{k}={v!r}' for k,v in kwargs.items())
if kwargs else None)
extras = ', '.join([el for el in [repr_kdims, repr_vdims, repr_kwargs]
if el is not None])
extras = ', ' + extras if extras else ''
apply_args= 'hv.{class_name}{extras}'.format(class_name=class_name,
extras=extras)
apply_args= f'hv.{class_name}{extras}'
msg = "Cannot construct a {class_name} from the supplied object of type DynamicMap. Implicitly creating a DynamicMap of {class_name} objects, but instead please explicitly call .apply({apply_args}) on the supplied DynamicMap."
cls.param.warning(cls, msg.format(class_name=class_name, apply_args=apply_args))
return data.apply(cls, per_element=True, kdims=kdims, vdims=vdims, **kwargs)
Expand Down Expand Up @@ -540,7 +534,7 @@ def add_dimension(self, dimension, dim_pos, dim_val, vdim=False, **kwargs):
dimension = Dimension(dimension)

if dimension.name in self.kdims:
raise Exception('{dim} dimension already defined'.format(dim=dimension.name))
raise Exception(f'{dimension.name} dimension already defined')

if vdim:
dims = self.vdims[:]
Expand Down Expand Up @@ -716,7 +710,7 @@ def __getitem__(self, slices):
value_select = slices[self.ndims]
elif len(slices) == self.ndims+1 and isinstance(slices[self.ndims],
(Dimension,str)):
raise IndexError("%r is not an available value dimension" % slices[self.ndims])
raise IndexError(f"{slices[self.ndims]!r} is not an available value dimension")
else:
selection = dict(zip(self.dimensions(label=True), slices))
data = self.select(**selection)
Expand Down Expand Up @@ -821,7 +815,7 @@ def sample(self, samples=[], bounds=None, closest=True, **kwargs):
return self.clone(selection, kdims=kdims, new_type=new_type,
datatype=datatype)

lens = set(len(util.wrap_tuple(s)) for s in samples)
lens = {len(util.wrap_tuple(s)) for s in samples}
if len(lens) > 1:
raise IndexError('Sample coordinates must all be of the same length.')

Expand Down Expand Up @@ -946,7 +940,7 @@ def aggregate(self, dimensions=None, function=None, spreadfn=None, **kwargs):
try:
# Should be checking the dimensions declared on the element are compatible
return self.clone(aggregated, kdims=kdims, vdims=vdims)
except:
except Exception:
datatype = self.param.objects('existing')['datatype'].default
return self.clone(aggregated, kdims=kdims, vdims=vdims,
new_type=new_type, datatype=datatype)
Expand Down Expand Up @@ -1070,12 +1064,10 @@ def __len__(self):
"Number of values in the Dataset."
return self.interface.length(self)

def __nonzero__(self):
def __bool__(self):
"Whether the Dataset contains any values"
return self.interface.nonzero(self)

__bool__ = __nonzero__

@property
def shape(self):
"Returns the shape of the data."
Expand Down
11 changes: 3 additions & 8 deletions holoviews/core/data/array.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,3 @@
try:
import itertools.izip as zip
except ImportError:
pass

import numpy as np

from .interface import Interface, DataError
Expand Down Expand Up @@ -35,7 +30,7 @@ def init(cls, eltype, data, kdims, vdims):
if ((isinstance(data, dict) or util.is_dataframe(data)) and
all(d in data for d in dimensions)):
dataset = [d if isinstance(d, np.ndarray) else np.asarray(data[d]) for d in dimensions]
if len(set(d.dtype.kind for d in dataset)) > 1:
if len({d.dtype.kind for d in dataset}) > 1:
raise ValueError('ArrayInterface expects all columns to be of the same dtype')
data = np.column_stack(dataset)
elif isinstance(data, dict) and not all(d in data for d in dimensions):
Expand All @@ -45,7 +40,7 @@ def init(cls, eltype, data, kdims, vdims):
data = np.column_stack(list(dataset))
elif isinstance(data, tuple):
data = [d if isinstance(d, np.ndarray) else np.asarray(d) for d in data]
if len(set(d.dtype.kind for d in data)) > 1:
if len({d.dtype.kind for d in data}) > 1:
raise ValueError('ArrayInterface expects all columns to be of the same dtype')
elif cls.expanded(data):
data = np.column_stack(data)
Expand All @@ -57,7 +52,7 @@ def init(cls, eltype, data, kdims, vdims):
data = np.array([], ndmin=2).T if data is None else list(data)
try:
data = np.array(data)
except:
except Exception:
data = None

if kdims is None:
Expand Down
21 changes: 8 additions & 13 deletions holoviews/core/data/cudf.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,10 @@
import sys
import warnings

try:
import itertools.izip as zip
except ImportError:
pass

from itertools import product

import numpy as np
import pandas as pd

from .. import util
from ..dimension import dimension_name
Expand Down Expand Up @@ -53,7 +49,6 @@ def applies(cls, obj):
@classmethod
def init(cls, eltype, data, kdims, vdims):
import cudf
import pandas as pd

element_params = eltype.param.objects()
kdim_param = element_params['kdims']
Expand Down Expand Up @@ -199,10 +194,10 @@ def select_mask(cls, dataset, selection):
if isinstance(sel, tuple):
sel = slice(*sel)
arr = cls.values(dataset, dim, keep_index=True)
if util.isdatetime(arr) and util.pd:
if util.isdatetime(arr):
try:
sel = util.parse_datetime_selection(sel)
except:
except Exception:
pass

new_masks = []
Expand Down Expand Up @@ -274,22 +269,22 @@ def aggregate(cls, dataset, dimensions, function, **kwargs):
agg = agg_map.get(agg, agg)
grouped = reindexed.groupby(cols, sort=False)
if not hasattr(grouped, agg):
raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg)
raise ValueError(f'{agg} aggregation is not supported on cudf DataFrame.')
df = getattr(grouped, agg)().reset_index()
else:
agg_map = {'amin': 'min', 'amax': 'max', 'size': 'count'}
agg = agg_map.get(agg, agg)
if not hasattr(reindexed, agg):
raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg)
raise ValueError(f'{agg} aggregation is not supported on cudf DataFrame.')
agg = getattr(reindexed, agg)()
try:
data = dict(((col, [v]) for col, v in zip(agg.index.values_host, agg.to_numpy())))
data = {col: [v] for col, v in zip(agg.index.values_host, agg.to_numpy())}
except Exception:
# Give FutureWarning: 'The to_array method will be removed in a future cuDF release.
# Consider using `to_numpy` instead.'
# Seen in cudf=21.12.01
data = dict(((col, [v]) for col, v in zip(agg.index.values_host, agg.to_array())))
df = util.pd.DataFrame(data, columns=list(agg.index.values_host))
data = {col: [v] for col, v in zip(agg.index.values_host, agg.to_array())}
df = pd.DataFrame(data, columns=list(agg.index.values_host))

dropped = []
for vd in vdims:
Expand Down
4 changes: 0 additions & 4 deletions holoviews/core/data/dask.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
import sys
try:
import itertools.izip as zip
except ImportError:
pass

import numpy as np
import pandas as pd
Expand Down
Loading