Skip to content

Commit

Permalink
Completely rewrote geometry handling (#244)
Browse files Browse the repository at this point in the history
  • Loading branch information
philippjfr authored Oct 25, 2018
1 parent 45a7ea7 commit af2d3b0
Show file tree
Hide file tree
Showing 15 changed files with 714 additions and 449 deletions.
9 changes: 4 additions & 5 deletions examples/gallery/bokeh/filled_contours.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
" \"\"\"Returns ``lons``, ``lats`` and ``data`` of some fake data.\"\"\"\n",
" nlats, nlons = shape\n",
" ys = np.linspace(-np.pi / 2, np.pi / 2, nlats)\n",
" xs = np.linspace(-np.pi, np.pi, nlons)\n",
" xs = np.linspace(0, 2*np.pi, nlons)\n",
" lons, lats = np.meshgrid(xs, ys)\n",
" wave = 0.75 * (np.sin(2 * lats) ** 8) * np.cos(4 * lons)\n",
" mean = 0.5 * np.cos(2 * lats) * ((np.sin(2 * lats)) ** 2 + 2)\n",
Expand All @@ -44,8 +44,7 @@
" return lons, lats, data\n",
"\n",
"lons, lats, data = sample_data()\n",
"contours = hv.operation.contours(gv.Image((lons, lats, data)),\n",
" filled=True, levels=8).redim.range(z=(-1.1, 1.1))"
"contours = hv.operation.contours(gv.Image((lons, lats, data)), filled=True, levels=8).redim.range(z=(-1.1, 1.1))"
]
},
{
Expand All @@ -61,8 +60,8 @@
"metadata": {},
"outputs": [],
"source": [
"%%opts Polygons [colorbar=True width=600 height=300 color_levels=12 infer_projection=True] (alpha=0.8 cmap='nipy_spectral') \n",
"gv.project(contours, projection=crs.Mollweide()) * gf.coastline"
"%%opts Polygons [colorbar=True width=600 height=300 color_levels=10 projection=crs.Mollweide()] (cmap='nipy_spectral') \n",
"contours * gf.coastline"
]
}
],
Expand Down
8 changes: 4 additions & 4 deletions examples/gallery/bokeh/katrina_track.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -86,11 +86,11 @@
"metadata": {},
"outputs": [],
"source": [
"%%opts Shape [width=700 height=500 infer_projection=True] Polygons Path [show_legend=True apply_ranges=False]\n",
"# Add proxy artists for legend\n",
"direct_hit = gv.Polygons([[(0,0)]], label='State directly intersects\\nwith track').opts(style=dict(color='red'))\n",
"within_2_deg = gv.Polygons([[(0,0)]], label='State is within \\n2 degrees of track').opts(style=dict(color='#FF7E00'))\n",
"gv.Overlay(shapes).relabel(title) * direct_hit * within_2_deg"
"direct_hit = gv.Polygons([[(0,0)]], label='State directly intersects with track').options(color='red', show_legend=True, apply_ranges=False)\n",
"within_2_deg = gv.Polygons([[(0,0)]], label='State is within 2 degrees of track').options(color='#FF7E00', show_legend=True, apply_ranges=False)\n",
"\n",
"(gv.Overlay(shapes) * direct_hit * within_2_deg).relabel(title).options(width=700, height=500, infer_projection=True)"
]
}
],
Expand Down
12 changes: 4 additions & 8 deletions examples/gallery/matplotlib/katrina_track.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,9 @@
" facecolor = 'red'\n",
" elif state.intersects(track_buffer):\n",
" facecolor = '#FF7E00'\n",
" shapes.append(gv.Shape(state).opts(style=dict(facecolor=facecolor)))\n",
"shapes.append(gv.Shape(track_buffer).opts(style=dict(alpha=0.5)))\n",
"shapes.append(gv.Shape(track).opts(style=dict(facecolor='none')))"
" shapes.append(gv.Shape(state).options(facecolor=facecolor))\n",
"shapes.append(gv.Shape(track_buffer).options(alpha=0.5))\n",
"shapes.append(gv.Shape(track).options(facecolor='none'))"
]
},
{
Expand All @@ -87,11 +87,7 @@
"metadata": {},
"outputs": [],
"source": [
"%%opts Polygons [show_legend=True apply_ranges=False]\n",
"# Add proxy artists for legend\n",
"direct_hit = gv.Polygons([[(0,0)]], label='State directly intersects\\nwith track').opts(style=dict(facecolor='red'))\n",
"within_2_deg = gv.Polygons([[(0,0)]], label='State is within \\n2 degrees of track').opts(style=dict(facecolor='#FF7E00'))\n",
"gv.Overlay(shapes).relabel(title) * direct_hit * within_2_deg"
"gv.Overlay(shapes).relabel(title)"
]
}
],
Expand Down
14 changes: 7 additions & 7 deletions examples/user_guide/Geometries.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"We can also iterate over the geometries and wrap them all in an NdOverlay of ``gv.Shape`` Elements:"
"We can also supply a list of geometries directly to a Polygons or Path element:"
]
},
{
Expand All @@ -136,8 +136,7 @@
"metadata": {},
"outputs": [],
"source": [
"%%opts NdOverlay [aspect=2]\n",
"hv.NdOverlay({i: gv.Shape(s, crs=ccrs.PlateCarree()) for i, s in enumerate(land_geoms)})"
"gv.Polygons(land_geoms) + gv.Path(land_geoms)"
]
},
{
Expand Down Expand Up @@ -196,7 +195,9 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"The ``from_records`` function optionally also supports merging the records and dataset directly. To merge them, supply the name of the shared attribute on which the merge is based via the ``on`` argument. If the name of attribute in the records and the dimension in the dataset match exactly, you can simply supply it as a string, otherwise supply a dictionary mapping between the attribute and column name. In this case we want to color the choropleth by the `'leaveVoteshare'`, which we define via the `value` argument. By default, the resulting `NdOverlay` of shapes will be indexed by an integer index. To draw the index from values in the dataset instead, you can request one or more indexes using the ``index`` argument. Finally we will declare the coordinate reference system in which this data is stored, which will in most cases be the simple Plate Carree projection. We can then view the choropleth, with each shape colored by the specified value (the percentage who voted to leave the EU):"
"The ``from_records`` function optionally also supports merging the records and dataset directly. To merge them, supply the name of the shared attribute on which the merge is based via the ``on`` argument. If the name of attribute in the records and the dimension in the dataset match exactly, you can simply supply it as a string, otherwise supply a dictionary mapping between the attribute and column name. In this case we want to color the choropleth by the `'leaveVoteshare'`, which we define via the `value` argument.\n",
"\n",
"Additionally we can request one or more indexes using the ``index`` argument. Finally we will declare the coordinate reference system in which this data is stored, which will in most cases be the simple Plate Carree projection. We can then view the choropleth, with each shape colored by the specified value (the percentage who voted to leave the EU):"
]
},
{
Expand All @@ -205,10 +206,9 @@
"metadata": {},
"outputs": [],
"source": [
"%%opts Shape (cmap='viridis')\n",
"%output backend='bokeh'\n",
"gv.Shape.from_records(shapes.records(), referendum, on='code', value='leaveVoteshare',\n",
" index=['name', 'regionName'], crs=ccrs.PlateCarree())"
" index=['name', 'regionName']).options(tools=['hover'], width=350, height=500)"
]
},
{
Expand Down Expand Up @@ -267,7 +267,7 @@
"outputs": [],
"source": [
"%%opts Polygons [width=600 height=400 tools=['hover'] infer_projection=True] (cmap='tab20')\n",
"gv.Polygons(world, vdims=['continent', 'name', 'pop_est']).redim.range(Latitude=(-60, 90))"
"gv.Polygons(world, vdims=['continent', 'name', 'pop_est'])"
]
},
{
Expand Down
22 changes: 3 additions & 19 deletions geoviews/data/__init__.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,6 @@
from __future__ import absolute_import

import param
from holoviews.core.data import Dataset

try:
from . import geopandas # noqa (API import)
except ImportError:
pass
except Exception as e:
param.main.warning('GeoPandas interface failed to import with '
'following error: %s' % e)

try:
from . import iris # noqa (API import)
Dataset.datatype.append('cube')
except ImportError:
pass
except Exception as e:
param.main.warning('Iris interface failed to import with '
'following error: %s' % e)
from .geom_dict import GeomDictInterface # noqa (API import)
from .geopandas import GeoPandasInterface # noqa (API import)
from .iris import CubeInterface # noqa (API import)

161 changes: 161 additions & 0 deletions geoviews/data/geom_dict.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@
import sys
from collections import OrderedDict

import numpy as np
from holoviews.core.data import Interface, DictInterface, MultiInterface
from holoviews.core.dimension import OrderedDict as cyODict, dimension_name
from holoviews.core.util import isscalar

from ..util import geom_types, geom_to_array, geom_length


class GeomDictInterface(DictInterface):

datatype = 'geom_dictionary'

@classmethod
def applies(cls, obj):
if 'shapely' not in sys.modules:
return False
return ((isinstance(obj, cls.types) and 'geometry' in obj
and isinstance(obj['geometry'], geom_types)) or
isinstance(obj, geom_types))

@classmethod
def init(cls, eltype, data, kdims, vdims):
odict_types = (OrderedDict, cyODict)
if kdims is None:
kdims = eltype.kdims
if vdims is None:
vdims = eltype.vdims

dimensions = [dimension_name(d) for d in kdims + vdims]
if isinstance(data, geom_types):
data = {'geometry': data}

if not cls.applies(data):
raise ValueError("GeomDictInterface only handles dictionary types "
"containing a 'geometry' key and shapely geometry "
"value.")

unpacked = []
for d, vals in data.items():
if isinstance(d, tuple):
vals = np.asarray(vals)
if vals.shape == (0,):
for sd in d:
unpacked.append((sd, np.array([], dtype=vals.dtype)))
elif not vals.ndim == 2 and vals.shape[1] == len(d):
raise ValueError("Values for %s dimensions did not have "
"the expected shape.")
else:
for i, sd in enumerate(d):
unpacked.append((sd, vals[:, i]))
elif d not in dimensions:
unpacked.append((d, vals))
else:
if not isscalar(vals):
vals = np.asarray(vals)
if not vals.ndim == 1 and d in dimensions:
raise ValueError('DictInterface expects data for each column to be flat.')
unpacked.append((d, vals))

if not cls.expanded([vs for d, vs in unpacked if d in dimensions and not isscalar(vs)]):
raise ValueError('DictInterface expects data to be of uniform shape.')
if isinstance(data, odict_types):
data.update(unpacked)
else:
data = OrderedDict(unpacked)

return data, {'kdims':kdims, 'vdims':vdims}, {}

@classmethod
def validate(cls, dataset, validate_vdims):
assert len([d for d in dataset.kdims + dataset.vdims
if d.name not in dataset.data]) == 2

@classmethod
def has_holes(cls, dataset):
from shapely.geometry import Polygon, MultiPolygon
geom = dataset.data['geometry']
if isinstance(geom, Polygon) and geom.interiors:
return True
elif isinstance(geom, MultiPolygon):
for g in geom:
if isinstance(g, Polygon) and g.interiors:
return True
return False

@classmethod
def holes(cls, dataset):
from shapely.geometry import Polygon, MultiPolygon
geom = dataset.data['geometry']
if isinstance(geom, Polygon):
return [[[geom_to_array(h) for h in geom.interiors]]]
elif isinstance(geom, MultiPolygon):
return [[[geom_to_array(h) for h in g.interiors] for g in geom]]
return []

@classmethod
def dimension_type(cls, dataset, dim):
name = dataset.get_dimension(dim, strict=True).name
if name in cls.geom_dims(dataset):
return float
values = dataset.data[name]
return type(values) if isscalar(values) else values.dtype.type

@classmethod
def range(cls, dataset, dim):
dim = dataset.get_dimension(dim)
geom_dims = cls.geom_dims(dataset)
if dim in geom_dims:
bounds = dataset.data['geometry'].bounds
if geom_dims.index(dim) == 0:
return bounds[0], bounds[2]
else:
return bounds[1], bounds[3]
else:
return DictInterface.range(dataset, dim)

@classmethod
def length(cls, dataset):
return geom_length(dataset.data['geometry'])

@classmethod
def geom_dims(cls, dataset):
return [d for d in dataset.kdims + dataset.vdims
if d.name not in dataset.data]

@classmethod
def values(cls, dataset, dim, expanded=True, flat=True):
d = dataset.get_dimension(dim)
geom_dims = cls.geom_dims(dataset)
if d in geom_dims:
array = geom_to_array(dataset.data['geometry'])
idx = geom_dims.index(d)
return array[:, idx]
return DictInterface.values(dataset, dim, expanded, flat)

@classmethod
def select(cls, dataset, selection_mask=None, **selection):
raise NotImplementedError('select operation not implemented on geometries')

@classmethod
def iloc(cls, dataset, index):
raise NotImplementedError('iloc operation not implemented for geometries.')

@classmethod
def sample(cls, dataset, samples=[]):
raise NotImplementedError('sampling operation not implemented for geometries.')

@classmethod
def aggregate(cls, dataset, kdims, function, **kwargs):
raise NotImplementedError('aggregate operation not implemented for geometries.')

@classmethod
def concat(cls, datasets, dimensions, vdims):
raise NotImplementedError('concat operation not implemented for geometries.')


MultiInterface.subtypes.insert(0, 'geom_dictionary')
Interface.register(GeomDictInterface)
Loading

0 comments on commit af2d3b0

Please sign in to comment.