Skip to content

Commit

Permalink
Merge pull request #654 from djhoese/pyproject-precommit-overhaul
Browse files Browse the repository at this point in the history
  • Loading branch information
djhoese authored Nov 27, 2023
2 parents 73fa52e + 4e14d03 commit b7e8e8f
Show file tree
Hide file tree
Showing 52 changed files with 132 additions and 212 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/deploy-sdist.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
shell: bash -l {0}
run: |
python -m pip install -U build pip
python -m build --sdist
python -m build
- name: Publish package to PyPI
# upload to PyPI on every release for a tag starting with 'v'
Expand Down
20 changes: 5 additions & 15 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,22 +1,12 @@
exclude: '^$'
fail_fast: false
repos:
- repo: https://github.com/psf/black
rev: 23.11.0 # Replace by any tag/version: https://github.com/psf/black/tags
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: 'v0.1.6'
hooks:
- id: black
language_version: python3 # Should be a command that runs python3.6+
- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
language_version: python3
- repo: https://github.com/PyCQA/flake8
rev: 6.1.0
hooks:
- id: flake8
additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe]
args: [--max-complexity, "10"]
- id: ruff
args: ["--fix"]
- id: ruff-format
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
Expand Down
4 changes: 2 additions & 2 deletions MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ recursive-include doc/source *
recursive-include polar2grid/tests/etc *
include polar2grid/grids/grids.yaml
recursive-include polar2grid/fonts *
recursive-include etc *
recursive-include polar2grid/etc *
include LICENSE.txt
include README.rst
recursive-exclude doc/source/_static/example_images *.png *.jpeg *.jpg
recursive-exclude doc/source/_static/example_images *.png *.jpg
recursive-exclude doc/source/_static *.psd *.xcf
9 changes: 5 additions & 4 deletions create_conda_software_bundle.sh
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ cd ${SB_NAME} || oops "Couldn't change to software bundle directory"
echo "Copying user grid directory to software bundle"
cp -r ${BUNDLE_SCRIPTS_DIR}/grid_configs .
cp -r ${BUNDLE_SCRIPTS_DIR}/example_enhancements .
ln -s etc/polar2grid/colormaps .

mkdir -p gshhg_data || oops "Could not make GSHHG data directory"
pushd gshhg_data
Expand All @@ -75,10 +74,13 @@ chmod 444 `find . -type f` || oops "Could not make GSHHG shapefiles readable by
popd

echo "Copying bash scripts to software bundle bin"
cd "$SB_NAME"
cd "${SB_NAME}"
mkdir -p bin || oops "Couldn't make 'bin' directory"
mkdir -p etc || oops "Couldn't make 'etc' directory"
ln -s ../libexec/python_runtime/etc/polar2grid etc/polar2grid
# expand glob pattern
P2G_ETC_DIR=$(echo libexec/python_runtime/lib/python*/site-package/polar2grid/etc)
ln -s ../${P2G_ETC_DIR} etc/polar2grid || oops "Couldn't link to package etc directory"
ln -s etc/polar2grid/colormaps . || oops "Couldn't create softlink for colormaps directory"
cp -P ${BUNDLE_SCRIPTS_DIR}/*.sh ${BUNDLE_SCRIPTS_DIR}/*.txt bin/ || echo "Couldn't copy scripts to bin/ directory"
# clean up readmes and add release notes
if [[ $PROJECT == "P2G" ]]; then
Expand All @@ -101,7 +103,6 @@ PSP_CONFIG_FILE=etc/polar2grid/pyspectral.yaml PSP_DATA_ROOT=pyspectral_data PSP
echo "Downloading Satpy auxiliary data..."
AUX_CACHE_DIR="${CACHE_DIR}/satpy_aux_data_${USER}"
SATPY_DATA_DIR="${SB_NAME}/share/polar2grid/data"
P2G_ETC_DIR="${SB_NAME}/libexec/python_runtime/etc/polar2grid"
SATPY_CONFIG_PATH="${P2G_ETC_DIR}" \
${PYTHON_RUNTIME_BASE}/bin/satpy_retrieve_all_aux_data \
--data-dir ${AUX_CACHE_DIR} || oops "Could not download Satpy auxiliary data"
Expand Down
2 changes: 1 addition & 1 deletion doc/source/toctree_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def filter_entries(self, entries):
for e in entries:
m = self.hasPat.match(e)
if m is not None:
if not m.groups()[0] in excl:
if m.groups()[0] not in excl:
filtered.append(m.groups()[1])
else:
filtered.append(e)
Expand Down
14 changes: 8 additions & 6 deletions polar2grid/_glue_argparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def _separate_scene_init_load_args(self, reader_subgroups) -> None:
def _parse_reader_args(self, reader_subgroups: list) -> tuple[dict, dict]:
reader_args = {}
load_args = {}
for reader_name, (sgrp1, sgrp2) in zip(self._reader_names, reader_subgroups):
for reader_name, (sgrp1, sgrp2) in zip(self._reader_names, reader_subgroups, strict=True):
if sgrp1 is None:
continue
rargs = _args_to_dict(self._args, sgrp1._group_actions)
Expand All @@ -175,7 +175,7 @@ def _parse_reader_args(self, reader_subgroups: list) -> tuple[dict, dict]:
def _parse_one_writer_args(self, writer_subgroups: list) -> dict:
writer_names: list[str] = self._writer_args["writers"]
writer_specific_args = {}
for writer_name, (sgrp1, sgrp2) in zip(writer_names, writer_subgroups):
for writer_name, (sgrp1, sgrp2) in zip(writer_names, writer_subgroups, strict=True):
wargs = _args_to_dict(self._args, sgrp1._group_actions)
if sgrp2 is not None:
wargs.update(_args_to_dict(self._args, sgrp2._group_actions))
Expand Down Expand Up @@ -283,8 +283,9 @@ def _validate_reader_writer_args(parser, args, use_polar2grid_defaults):
parser.print_usage()
parser.exit(
1,
"\nERROR: Reader must be provided (-r flag).\n"
"Supported readers:\n\t{}\n".format("\n\t".join(_supported_readers(use_polar2grid_defaults))),
"\nERROR: Reader must be provided (-r flag).\n" "Supported readers:\n\t{}\n".format(
"\n\t".join(_supported_readers(use_polar2grid_defaults))
),
)
elif len(args.readers) > 1:
parser.print_usage()
Expand All @@ -297,8 +298,9 @@ def _validate_reader_writer_args(parser, args, use_polar2grid_defaults):
parser.print_usage()
parser.exit(
1,
"\nERROR: Writer must be provided (-w flag) with one or more writer.\n"
"Supported writers:\n\t{}\n".format("\n\t".join(_supported_writers(use_polar2grid_defaults))),
"\nERROR: Writer must be provided (-w flag) with one or more writer.\n" "Supported writers:\n\t{}\n".format(
"\n\t".join(_supported_writers(use_polar2grid_defaults))
),
)


Expand Down
6 changes: 3 additions & 3 deletions polar2grid/add_coastlines.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ def main(argv=sys.argv[1:]):
# gather all options into a single dictionary that we can pass to pycoast
pycoast_options = _args_to_pycoast_dict(args)
colorbar_kwargs = _args_to_colorbar_kwargs(args) if args.add_colorbar else {}
for input_tiff, output_filename in zip(args.input_tiff, args.output_filename):
for input_tiff, output_filename in zip(args.input_tiff, args.output_filename, strict=True):
_process_one_image(input_tiff, output_filename, pycoast_options, args.shapes_dir, colorbar_kwargs)
return 0

Expand Down Expand Up @@ -484,10 +484,10 @@ def find_font(font_name, size):
try:
font = ImageFont.truetype(font_name, size)
return font.path
except IOError:
except IOError as err:
font_path = get_resource_filename("polar2grid.fonts", font_name)
if not os.path.exists(font_path):
raise ValueError("Font path does not exist: {}".format(font_path))
raise ValueError("Font path does not exist: {}".format(font_path)) from err
return font_path


Expand Down
6 changes: 6 additions & 0 deletions polar2grid/compare.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,12 @@ def isclose_array(array1, array2, atol=0.0, rtol=0.0, margin_of_error=0.0, **kwa
Args:
array1: numpy array for comparison
array2: numpy array for comparison
atol: absolute tolerance (see numpy ``isclose``)
rtol: relative tolerance (see numpy ``isclose``)
margin_of_error: percentage of pixels that can be different and still
be considered a passing amount.
kwargs: Unused.
Returns:
1 if more than margin_of_error pixels are different, 0 otherwise.
Expand Down
8 changes: 4 additions & 4 deletions polar2grid/core/dtype.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@ def str_to_dtype(dtype_str):

try:
return str2dtype[dtype_str]
except KeyError:
raise ValueError("Not a valid data type string: %s" % (dtype_str,))
except KeyError as err:
raise ValueError("Not a valid data type string: %s" % (dtype_str,)) from err


def dtype_to_str(numpy_dtype):
Expand All @@ -114,8 +114,8 @@ def dtype_to_str(numpy_dtype):

try:
return dtype2str[np.dtype(numpy_dtype).type]
except KeyError:
raise ValueError("Unsupported np data type: %r" % (numpy_dtype,))
except KeyError as err:
raise ValueError("Unsupported np data type: %r" % (numpy_dtype,)) from err


def clip_to_data_type(data, data_type):
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
6 changes: 1 addition & 5 deletions polar2grid/filters/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,7 @@
from typing import Union

from pyresample.boundary import AreaBoundary, AreaDefBoundary, Boundary
from pyresample.geometry import (
AreaDefinition,
SwathDefinition,
get_geostationary_bounding_box,
)
from pyresample.geometry import AreaDefinition, SwathDefinition, get_geostationary_bounding_box
from pyresample.spherical import SphPolygon

logger = logging.getLogger(__name__)
Expand Down
12 changes: 4 additions & 8 deletions polar2grid/glue.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,7 @@
from satpy.writers import compute_writer_results

from polar2grid._glue_argparser import GlueArgumentParser, get_p2g_defaults_env_var
from polar2grid.core.script_utils import (
create_exc_handler,
rename_log_file,
setup_logging,
)
from polar2grid.core.script_utils import create_exc_handler, rename_log_file, setup_logging
from polar2grid.filters import filter_scene
from polar2grid.readers._base import ReaderProxyBase
from polar2grid.resample import resample_scene
Expand Down Expand Up @@ -150,7 +146,7 @@ def _write_scene_with_writer(scn: Scene, writer_name: str, data_ids: list[DataID
res = scn.save_datasets(writer=writer_name, compute=False, datasets=data_ids, **wargs)
if res and isinstance(res[0], (tuple, list)):
# list of (dask-array, file-obj) tuples
to_save.extend(zip(*res))
to_save.extend(zip(*res, strict=True))
else:
# list of delayed objects
to_save.extend(res)
Expand Down Expand Up @@ -472,12 +468,12 @@ def _persist_swath_definition_in_scene(scn: Scene) -> None:
if not to_persist_swath_defs:
return scn

to_update_data_arrays, to_persist_lonlats = zip(*to_persist_swath_defs.values())
to_update_data_arrays, to_persist_lonlats = zip(*to_persist_swath_defs.values(), strict=True)
LOG.info("Loading swath geolocation into memory...")
persisted_lonlats = dask.persist(*to_persist_lonlats)
persisted_swath_defs = [SwathDefinition(plons, plats) for plons, plats in persisted_lonlats]
new_scn = scn.copy()
for arrays_to_update, persisted_swath_def in zip(to_update_data_arrays, persisted_swath_defs):
for arrays_to_update, persisted_swath_def in zip(to_update_data_arrays, persisted_swath_defs, strict=True):
for array_to_update in arrays_to_update:
array_to_update.attrs["area"] = persisted_swath_def
new_scn._datasets[array_to_update.attrs["_satpy_id"]] = array_to_update
Expand Down
6 changes: 4 additions & 2 deletions polar2grid/resample/resample_decisions.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,8 @@ def find_match(self, **query_dict):

try:
return super().find_match(**query_dict)
except KeyError:
except KeyError as err:
# give a more understandable error message
raise KeyError(f"No resampling configuration found for {query_dict['area_type']=} | {query_dict['name']=}")
raise KeyError(
f"No resampling configuration found for {query_dict['area_type']=} | {query_dict['name']=}"
) from err
4 changes: 2 additions & 2 deletions polar2grid/tests/etc/enhancements/generic.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ enhancements:
method: !!python/name:polar2grid.enhancements.palettize
kwargs:
palettes:
- filename: $POLAR2GRID_HOME/../etc/colormaps/amsr2_36h.cmap
- filename: $POLAR2GRID_HOME/../polar2grid/etc/colormaps/amsr2_36h.cmap
min_value: 180
max_value: 280
test_p2g_palettize2:
Expand Down Expand Up @@ -62,6 +62,6 @@ enhancements:
method: !!python/name:polar2grid.enhancements.colorize
kwargs:
palettes:
- filename: $POLAR2GRID_HOME/../etc/colormaps/amsr2_36h.cmap
- filename: $POLAR2GRID_HOME/../polar2grid/etc/colormaps/amsr2_36h.cmap
min_value: 180
max_value: 280
1 change: 1 addition & 0 deletions polar2grid/tests/test_enhancements.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def setup_method(self):
add_polar2grid_config_paths()
# add test specific configs
curr_path = satpy.config.get("config_path")
print(curr_path, TEST_ETC_DIR)
satpy.config.set(config_path=[TEST_ETC_DIR] + curr_path)

def teardown_method(self):
Expand Down
2 changes: 1 addition & 1 deletion polar2grid/tests/test_utils/test_convert_grids_conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,5 +64,5 @@ def test_conf_conversion(tmpdir, capsys, conf_content, num_areas, area_types):
s.seek(0)
areas = parse_area_file([s])
assert len(areas) == num_areas
for area_obj, area_type in zip(areas, area_types):
for area_obj, area_type in zip(areas, area_types, strict=True):
assert isinstance(area_obj, area_type)
4 changes: 2 additions & 2 deletions polar2grid/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@
def get_polar2grid_etc():
p2g_pkg_location = impr.files("polar2grid")
if _is_editable_installation():
return str(p2g_pkg_location.parent / "etc")
return os.path.join(sys.prefix, "etc", "polar2grid")
return str(p2g_pkg_location / "etc")
return p2g_pkg_location / "etc" / "polar2grid"


def _is_editable_installation():
Expand Down
6 changes: 3 additions & 3 deletions polar2grid/utils/legacy_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def remove_unknown_user_products(
"""
satpy_names = self.convert_p2g_name_to_satpy(self._user_products)
new_user_products = []
for user_name, satpy_name in zip(self._user_products, satpy_names):
for user_name, satpy_name in zip(self._user_products, satpy_names, strict=True):
# convert DataID/DataQuery to string
satpy_name = satpy_name if isinstance(satpy_name, str) else satpy_name["name"]
if satpy_name not in known_dataset_names:
Expand Down Expand Up @@ -212,7 +212,7 @@ def apply_p2g_name_to_scene(
"""
all_ids = list(scn.keys())
all_p2g_names = list(self.convert_satpy_to_p2g_name(all_ids))
for data_id, p2g_name in zip(all_ids, all_p2g_names):
for data_id, p2g_name in zip(all_ids, all_p2g_names, strict=True):
if p2g_name is None:
# the Satpy ID doesn't have a Polar2Grid compatible name
logger.debug("Satpy DataID %s does not have a compatible polar2grid name.", data_id)
Expand All @@ -225,7 +225,7 @@ def available_product_names(
) -> tuple[list[str], list[str], list[str]]:
"""Get separate lists of available Satpy products and Polar2Grid products."""
available_ids_as_p2g_names = list(self.convert_satpy_to_p2g_name(available_satpy_ids, all_p2g_products))
satpy_id_to_p2g_name = dict(zip(available_satpy_ids, available_ids_as_p2g_names))
satpy_id_to_p2g_name = dict(zip(available_satpy_ids, available_ids_as_p2g_names, strict=True))
available_p2g_names = []
available_custom_names = []
available_satpy_names = []
Expand Down
8 changes: 1 addition & 7 deletions polar2grid/writers/binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,7 @@
import xarray as xr
from satpy.writers import ImageWriter, get_enhanced_image

from polar2grid.core.dtype import (
NUMPY_DTYPE_STRS,
clip_to_data_type,
dtype_to_str,
int_or_float,
str_to_dtype,
)
from polar2grid.core.dtype import NUMPY_DTYPE_STRS, clip_to_data_type, dtype_to_str, int_or_float, str_to_dtype
from polar2grid.core.script_utils import NumpyDtypeList
from polar2grid.utils.legacy_compat import convert_p2g_pattern_to_satpy

Expand Down
1 change: 1 addition & 0 deletions polar2grid/writers/hdf5.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ def iter_by_area(self, datasets: list[xr.DataArray]):
Args:
datasets (list[xr.DataArray]): A list of dataArray objects stored in Scene.
Returns:
dictionary: a dictionary of {AreaDef: list[xr.DataArray]}
"""
Expand Down
Loading

0 comments on commit b7e8e8f

Please sign in to comment.