Skip to content

Commit

Permalink
Merge pull request #258 from CSHS-CWRA/update-black
Browse files Browse the repository at this point in the history
Update black
  • Loading branch information
Zeitsperre authored Feb 1, 2023
2 parents 3414841 + 77a01f5 commit b5c6c0e
Show file tree
Hide file tree
Showing 26 changed files with 4 additions and 61 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ repos:
- id: flake8
args: [ '--config=setup.cfg' ]
- repo: https://github.com/psf/black
rev: 22.12.0
rev: 23.1.0
hooks:
- id: black
exclude: ^docs/
Expand Down
5 changes: 0 additions & 5 deletions ravenpy/cli/aggregate_forcings_to_hrus.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,6 @@ def aggregate_forcings_to_hrus(
# create all variables in output NC (incl. time) and copy over all attributes
for name, variable in nc_in.variables.items():
if name in variables_to_aggregate + ("time",):

if name != "time":
dims = ["time", "nHRU"]
else:
Expand All @@ -131,7 +130,6 @@ def aggregate_forcings_to_hrus(
nc_out[name][:] = nc_in[name][:]

for variable_to_aggregate in variables_to_aggregate:

# read 3D variable
input_var = nc_in.variables[variable_to_aggregate]

Expand Down Expand Up @@ -179,7 +177,6 @@ def aggregate_forcings_to_hrus(
agg_var = np.zeros([ntime, nHRU])
new_weights = []
for ihru, hru in enumerate(hrus):

hru = int(hru)

# filter all weights for current HRU
Expand All @@ -195,7 +192,6 @@ def aggregate_forcings_to_hrus(

# go through all time steps and zero out weights where grid cell is NODATA
for iii, ii in enumerate(idx):

# bring idx for input_var in appropriate order
idx_input = [slice(0, ntime, 1), slice(0, ntime, 1), slice(0, ntime, 1)]
idx_input[idx_lon_dim] = int(weights_data_lon_lat_ids[ii, 1]) - min_lon # type: ignore
Expand All @@ -218,7 +214,6 @@ def aggregate_forcings_to_hrus(

# derive aggregate
for iii, ii in enumerate(idx):

# bring idx for input_var in appropriate order
idx_input = [slice(0, ntime, 1), slice(0, ntime, 1), slice(0, ntime, 1)]
idx_input[idx_lon_dim] = int(weights_data_lon_lat_ids[ii, 1]) - min_lon # type: ignore
Expand Down
2 changes: 0 additions & 2 deletions ravenpy/cli/generate_hrus_from_routing_product.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,6 @@ def generate_hrus_from_routing_product(input_file, output):
"""

def assign_hru_attributes(i_sub, i_hru, hru_id, is_lake_HRU):

# fist copy subbasin attribute to hru table
for sub_col in subbasin_info.columns:
if sub_col == "geometry":
Expand Down Expand Up @@ -157,7 +156,6 @@ def assign_hru_attributes(i_sub, i_hru, hru_id, is_lake_HRU):
i_hru = 0

for i_sub in range(len(subbasin_info)):

sub_id = subbasin_info["SubId"].values[i_sub]

if subbasin_info["Lake_Cat"].values[i_sub] > 0:
Expand Down
1 change: 0 additions & 1 deletion ravenpy/config/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -867,7 +867,6 @@ def to_rv(self):

@dataclass
class BasinStateVariablesCommand(RavenCommand):

basin_states: Dict[int, BasinIndexCommand] = field(default_factory=dict)

@classmethod
Expand Down
10 changes: 0 additions & 10 deletions ravenpy/config/rvs.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@


class RV(ABC):

# This header will be prepended to all RV files when they are rendered
tmpl_header = """
###########################################################################################################
Expand Down Expand Up @@ -134,7 +133,6 @@ def to_rv(self, s: str, rv_type: str) -> str:


class RVC(RV):

tmpl = """
{hru_states}
Expand Down Expand Up @@ -183,7 +181,6 @@ def to_rv(self):


class RVH(RV):

tmpl = """
{subbasins}
Expand Down Expand Up @@ -240,7 +237,6 @@ def to_rv(self):


class RVI(RV):

_pre_tmpl = """
:Calendar {calendar}
:RunName {run_name}-{run_index}
Expand Down Expand Up @@ -309,7 +305,6 @@ def __init__(self, config):
self._custom_output = []

def configure_from_nc_data(self, fns):

with xr.open_mfdataset(fns, combine="by_coords") as ds:
start, end = ds.indexes["time"][0], ds.indexes["time"][-1]
cal = ds.time.encoding.get("calendar", "standard")
Expand Down Expand Up @@ -382,7 +377,6 @@ def evaluation_metrics(self):

@evaluation_metrics.setter
def evaluation_metrics(self, values):

if not is_sequence(values):
values = [values]
ms = []
Expand Down Expand Up @@ -496,7 +490,6 @@ def _dt2cf(self, date):
return cftime._cftime.DATE_TYPES[self.calendar.lower()](*date.timetuple()[:6])

def to_rv(self):

# Attributes (not starting with "_")
a = list(filter(lambda x: not x.startswith("_"), self.__dict__))

Expand Down Expand Up @@ -530,7 +523,6 @@ def to_rv(self):


class RVP(RV):

# This is expected to be defined by the emulators.
tmpl = """
"""
Expand Down Expand Up @@ -583,7 +575,6 @@ def to_rv(self):


class RVT(RV):

tmpl = """
{gauge}
Expand Down Expand Up @@ -892,7 +883,6 @@ def to_rv(self):


class OST(RV):

tmpl = """
"""

Expand Down
17 changes: 0 additions & 17 deletions ravenpy/extractors/routing_product.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ def extract(self, model=None):
subbasin_ids = {int(row["SubId"]) for _, row in self._df.iterrows()}

for _, row in self._df.iterrows():

# HRU
hru_recs.append(self._extract_hru(row))

Expand Down Expand Up @@ -257,7 +256,6 @@ def _extract_channel_profile(self, row) -> ChannelProfileCommand:
)

def _extract_hru(self, row) -> HRUsCommand.Record:

aspect = row["HRU_A_mean"]

if self.hru_aspect_convention == "GRASS":
Expand Down Expand Up @@ -460,7 +458,6 @@ def keep_only_valid_downsubid_and_obs_nm(g):
grid_weights = []

for _, row in self._routing_data.iterrows():

poly = ogr.CreateGeometryFromWkt(wkt.dumps(row.geometry))

area_basin = poly.Area()
Expand All @@ -474,7 +471,6 @@ def keep_only_valid_downsubid_and_obs_nm(g):

for ilat in range(self._nlat):
for ilon in range(self._nlon):

# bounding box around grid-cell (for easy check of proximity)
enve_gridcell = grid_cell_geom_gpd_wkt[ilat][ilon].GetEnvelope()

Expand Down Expand Up @@ -528,9 +524,7 @@ def keep_only_valid_downsubid_and_obs_nm(g):
)

def _prepare_input_data(self):

if self._input_is_netcdf:

# Raven numbering is:
#
# [ 1 2 3 ... 1*nlon
Expand Down Expand Up @@ -576,7 +570,6 @@ def _prepare_input_data(self):
self._nlat = np.shape(lat_var)[0]

else:

# input data is a shapefile

self._input_data = self._input_data.to_crs(
Expand All @@ -589,20 +582,16 @@ def _prepare_input_data(self):
self._nlat = self._input_data.geometry.count() # only for consistency

def _compute_grid_cell_polygons(self):

grid_cell_geom_gpd_wkt: List[List[List[ogr.Geometry]]] = [
[[] for ilon in range(self._nlon)] for ilat in range(self._nlat)
]

if self._input_is_netcdf:

lath = self._lath
lonh = self._lonh

for ilat in range(self._nlat):

for ilon in range(self._nlon):

# -------------------------
# EPSG:3035 needs a swap before and after transform ...
# -------------------------
Expand Down Expand Up @@ -652,9 +641,7 @@ def _compute_grid_cell_polygons(self):
grid_cell_geom_gpd_wkt[ilat][ilon] = tmp

else:

for ishape in range(self._nlat):

idx = np.where(self._input_data[self._netcdf_input_field] == ishape)[0]
if len(idx) == 0:
# print(
Expand All @@ -681,7 +668,6 @@ def _compute_grid_cell_polygons(self):
return grid_cell_geom_gpd_wkt

def _create_gridcells_from_centers(self, lat, lon):

# create array of edges where (x,y) are always center cells
nlon = np.shape(lon)[1]
nlat = np.shape(lat)[0]
Expand Down Expand Up @@ -725,7 +711,6 @@ def _create_gridcells_from_centers(self, lat, lon):
return [lath, lonh]

def _shape_to_geometry(self, shape_from_jsonfile, epsg=None):

# converts shape read from shapefile to geometry
# epsg :: integer EPSG code

Expand Down Expand Up @@ -753,7 +738,6 @@ def _shape_to_geometry(self, shape_from_jsonfile, epsg=None):
return poly_shape

def _check_proximity_of_envelops(self, gridcell_envelop, shape_envelop):

# checks if two envelops are in proximity (intersect)

# minX --> env[0]
Expand All @@ -771,7 +755,6 @@ def _check_proximity_of_envelops(self, gridcell_envelop, shape_envelop):
def _check_gridcell_in_proximity_of_shape(
self, gridcell_edges, shape_from_jsonfile
):

# checks if a grid cell falls into the bounding box of the shape
# does not mean it intersects but it is a quick and cheap way to
# determine cells that might intersect
Expand Down
2 changes: 1 addition & 1 deletion ravenpy/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -621,7 +621,7 @@ def diagnostics(self):
reader = csv.reader(f.readlines())
header = next(reader)
for row in reader:
for (key, val) in zip(header, row):
for key, val in zip(header, row):
if "DIAG" in key:
val = float(val) # type: ignore
out[key].append(val)
Expand Down
1 change: 0 additions & 1 deletion ravenpy/models/emulators/blended.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,6 @@ def derived_parameters(self):


class BLENDED_OST(Ostrich, BLENDED):

ostrich_to_raven_param_conversion = {
"sum_x09_x10": "par_x10",
"sum_x13_x14": "par_x14",
Expand Down
2 changes: 0 additions & 2 deletions ravenpy/models/emulators/canadianshield.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,6 @@ def derived_parameters(self):


class CANADIANSHIELD_OST(Ostrich, CANADIANSHIELD):

ostrich_to_raven_param_conversion = {
"par_sum_x05_x06": "par_x06",
"par_sum_x16_x17": "par_x17",
Expand Down Expand Up @@ -572,7 +571,6 @@ def __init__(self, *args, **kwds):
self.config.ost.set_tmpl(ost_tmpl)

def derived_parameters(self):

self.config.ost.set_extra_attributes(area=self.config.rvh.hrus[0].area)

# Here we are abusing the pydantic.dataclass type checking
Expand Down
2 changes: 0 additions & 2 deletions ravenpy/models/emulators/gr4jcn.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,6 @@ def __init__(self, *args, **kwds):
self.config.rvi.evaporation = "PET_OUDIN"

def derived_parameters(self):

params = cast(GR4JCN.Params, self.config.rvp.params)

self.config.rvp.set_extra_attributes(
Expand Down Expand Up @@ -224,7 +223,6 @@ def derived_parameters(self):


class GR4JCN_OST(Ostrich, GR4JCN):

ostrich_to_raven_param_conversion = {
"par_x1": "GR4J_X1",
"par_x2": "GR4J_X2",
Expand Down
1 change: 0 additions & 1 deletion ravenpy/models/emulators/hbvec.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,6 @@ def derived_parameters(self):

# TODO: Support index specification and unit changes.
def _monthly_average(self):

if (
self.config.rvi.evaporation == "PET_FROMMONTHLY"
or self.config.rvi.ow_evaporation == "PET_FROMMONTHLY"
Expand Down
1 change: 0 additions & 1 deletion ravenpy/models/emulators/hmets.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,6 @@ def derived_parameters(self):


class HMETS_OST(Ostrich, HMETS):

ostrich_to_raven_param_conversion = {
"par_x01": "GAMMA_SHAPE",
"par_x02": "GAMMA_SCALE",
Expand Down
2 changes: 0 additions & 2 deletions ravenpy/models/emulators/hypr.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,6 @@ def derived_parameters(self):

# TODO: Support index specification and unit changes.
def _monthly_average(self):

if (
self.config.rvi.evaporation == "PET_FROMMONTHLY"
or self.config.rvi.ow_evaporation == "PET_FROMMONTHLY"
Expand Down Expand Up @@ -314,7 +313,6 @@ def _monthly_average(self):


class HYPR_OST(Ostrich, HYPR):

# Since the `par_x05` and `par_x06` values that Ostrich have found are the base-10 logarithms of the
# corresponding Raven values, we perform the transformation here, so that Raven receives 10^par_x05
# and 10^par_x06 for its own `Params.par_x05` and `Params.par_x06`, respectively.
Expand Down
1 change: 0 additions & 1 deletion ravenpy/models/emulators/sacsma.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,6 @@ def derived_parameters(self):


class SACSMA_OST(Ostrich, SACSMA):

ostrich_to_raven_param_conversion = {
"pow_x01": "par_x01",
"pow_x02": "par_x02",
Expand Down
1 change: 0 additions & 1 deletion ravenpy/utilities/data_assimilation.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,6 @@ def sequential_assimilation(
assim_dates = math.floor(number_days / assim_step_days) * [assim_step_days]

for i, ndays in enumerate(assim_dates):

dates = [sd + dt.timedelta(days=x) for x in range(ndays)]
model.config.rvi.end_date = dates[-1]
model.config.rvi.run_name = f"assim_{i}"
Expand Down
3 changes: 1 addition & 2 deletions ravenpy/utilities/forecasting.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@

# TODO: Complete docstrings


# This function gets model states after running the model (i.e. states at the end of the run).
def get_raven_states(model, workdir=None, **kwds):
"""Get the RAVEN states file (.rvc file) after a model run.
Expand Down Expand Up @@ -169,7 +170,6 @@ def perform_climatology_esp(

# We will iterate this for all forecast years
for years in avail_years:

# Replace the forecast period start and end dates with the climatological ESP dates for the
# current member (year)
forecast_date = forecast_date.replace(year=years)
Expand Down Expand Up @@ -452,7 +452,6 @@ def make_ESP_hindcast_dataset(

# Repeat the process for all hindcast years required. Could be parallelized by a pro!
for i in included_years[1:]:

qsims_tmp = perform_climatology_esp(
model_name, forecast_date.replace(year=i), forecast_duration, **kwargs
)
Expand Down
Loading

0 comments on commit b5c6c0e

Please sign in to comment.