Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Further plugin improvements to align with CLI #2049

Draft
wants to merge 3 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion improver/api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@
"OccurrenceBetweenThresholds": "improver.between_thresholds",
"OccurrenceWithinVicinity": "improver.utilities.spatial",
"OpticalFlow": "improver.nowcasting.optical_flow",
"OrographicEnhancement": "improver.orographic_enhancement",
"MetaOrographicEnhancement": "improver.orographic_enhancement",
"OrographicSmoothingCoefficients": "improver.generate_ancillaries.generate_orographic_smoothing_coefficients",
"PercentileConverter": "improver.percentile",
"PhaseChangeLevel": "improver.psychrometric_calculations.psychrometric_calculations",
Expand Down
134 changes: 77 additions & 57 deletions improver/blending/calculate_weights_and_blend.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,12 @@ def __init__(
ynval: Optional[float] = None,
cval: Optional[float] = None,
inverse_ordering: bool = False,
cycletime: Optional[str] = None,
model_id_attr: Optional[str] = None,
record_run_attr: Optional[str] = None,
spatial_weights: bool = False,
fuzzy_length: float = 20000,
attributes_dict: Optional[Dict[str, str]] = None,
) -> None:
"""
Initialise central parameters
Expand Down Expand Up @@ -90,6 +96,38 @@ def __init__(
Option to invert weighting order for non-linear weights plugin
so that higher blend coordinate values get higher weights (eg
if cycle blending over forecast reference time).
cycletime:
The forecast reference time to be used after blending has been
applied, in the format YYYYMMDDTHHMMZ. If not provided, the
blended file takes the latest available forecast reference time
from the input datasets supplied.
model_id_attr:
The name of the dataset attribute to be used to identify the source
model when blending data from different models.
record_run_attr:
The name of the dataset attribute to be used to store model and
cycle sources in metadata, e.g. when blending data from different
models. Requires model_id_attr.
spatial_weights:
If True, this option will result in the generation of spatially
varying weights based on the masks of the data we are blending.
The one dimensional weights are first calculated using the chosen
weights calculation method, but the weights will then be adjusted
spatially based on where there is masked data in the data we are
blending. The spatial weights are calculated using the
SpatiallyVaryingWeightsFromMask plugin.
fuzzy_length:
When calculating spatially varying weights we can smooth the
weights so that areas close to areas that are masked have lower
weights than those further away. This fuzzy length controls the
scale over which the weights are smoothed. The fuzzy length is in
terms of m, the default is 20km. This distance is then converted
into a number of grid squares, which does not have to be an
integer. Assumes the grid spacing is the same in the x and y
directions and raises an error if this is not true. See
SpatiallyVaryingWeightsFromMask for more details.
attributes_dict:
Dictionary describing required changes to attributes after blending
"""
self.blend_coord = blend_coord
self.wts_calc_method = wts_calc_method
Expand All @@ -110,6 +148,30 @@ def __init__(
self.wts_calc_method
)
)

self._cycletime = cycletime
self._model_id_attr = model_id_attr
self._record_run_attr = record_run_attr
self._spatial_weights = spatial_weights
self._fuzzy_length = fuzzy_length
self._attributes_dict = attributes_dict

if record_run_attr is not None and model_id_attr is None:
raise ValueError(
"record_run_attr can only be used with model_id_attr, which "
"has not been provided."
)

if (wts_calc_method == "linear") and cval:
raise RuntimeError("Method: linear does not accept arguments: cval")
if (wts_calc_method == "nonlinear") and any([y0val, ynval]):
raise RuntimeError("Method: non-linear does not accept arguments: y0val, ynval")
if (wts_calc_method == "dict") and wts_dict is None:
raise RuntimeError('Dictionary is required if wts_calc_method="dict"')
if "model" in blend_coord and model_id_attr is None:
raise RuntimeError("model_id_attr must be specified for model blending")
if record_run_attr is not None and model_id_attr is None:
raise RuntimeError("model_id_attr must be specified for blend model recording")

def _calculate_blending_weights(self, cube: Cube) -> Cube:
"""
Expand Down Expand Up @@ -206,54 +268,16 @@ def _remove_zero_weighted_slices(

def process(
self,
cubelist: Union[List[Cube], CubeList],
cycletime: Optional[str] = None,
model_id_attr: Optional[str] = None,
record_run_attr: Optional[str] = None,
spatial_weights: bool = False,
fuzzy_length: float = 20000,
attributes_dict: Optional[Dict[str, str]] = None,
*cubes: Union[Cube, CubeList],
) -> Cube:
"""
Merge a cubelist, calculate appropriate blend weights and compute the
weighted mean. Returns a single cube collapsed over the dimension
given by self.blend_coord.

Args:
cubelist:
List of cubes to be merged and blended
cycletime:
The forecast reference time to be used after blending has been
applied, in the format YYYYMMDDTHHMMZ. If not provided, the
blended file takes the latest available forecast reference time
from the input datasets supplied.
model_id_attr:
The name of the dataset attribute to be used to identify the source
model when blending data from different models.
record_run_attr:
The name of the dataset attribute to be used to store model and
cycle sources in metadata, e.g. when blending data from different
models. Requires model_id_attr.
spatial_weights:
If True, this option will result in the generation of spatially
varying weights based on the masks of the data we are blending.
The one dimensional weights are first calculated using the chosen
weights calculation method, but the weights will then be adjusted
spatially based on where there is masked data in the data we are
blending. The spatial weights are calculated using the
SpatiallyVaryingWeightsFromMask plugin.
fuzzy_length:
When calculating spatially varying weights we can smooth the
weights so that areas close to areas that are masked have lower
weights than those further away. This fuzzy length controls the
scale over which the weights are smoothed. The fuzzy length is in
terms of m, the default is 20km. This distance is then converted
into a number of grid squares, which does not have to be an
integer. Assumes the grid spacing is the same in the x and y
directions and raises an error if this is not true. See
SpatiallyVaryingWeightsFromMask for more details.
attributes_dict:
Dictionary describing required changes to attributes after blending
*cubes:
One of more cubes to be merged and blended

Returns:
Cube of blended data.
Expand All @@ -266,11 +290,7 @@ def process(
UserWarning: If blending masked data without spatial weights.
This has not been fully tested.
"""
if record_run_attr is not None and model_id_attr is None:
raise ValueError(
"record_run_attr can only be used with model_id_attr, which "
"has not been provided."
)
cubes = as_cubelist(cubes)

# Prepare cubes for weighted blending, including creating custom metadata
# for multi-model blending. The merged cube has a monotonically ascending
Expand All @@ -279,10 +299,10 @@ def process(
merger = MergeCubesForWeightedBlending(
self.blend_coord,
weighting_coord=self.weighting_coord,
model_id_attr=model_id_attr,
record_run_attr=record_run_attr,
model_id_attr=self._model_id_attr,
record_run_attr=self._record_run_attr,
)
cube = merger(cubelist, cycletime=cycletime)
cube = merger(cubes, cycletime=self._cycletime)

if "model" in self.blend_coord:
self.blend_coord = copy(MODEL_BLEND_COORD)
Expand All @@ -296,15 +316,15 @@ def process(
weights = self._calculate_blending_weights(cube)
cube, weights = self._remove_zero_weighted_slices(cube, weights)

if record_run_attr is not None and weights is not None:
if self._record_run_attr is not None and weights is not None:
cube = update_record_run_weights(cube, weights, self.blend_coord)

# Deal with case of only one input cube or non-zero-weighted slice
if len(cube.coord(self.blend_coord).points) == 1:
result = cube
else:
if spatial_weights:
weights = self._update_spatial_weights(cube, weights, fuzzy_length)
if self._spatial_weights:
weights = self._update_spatial_weights(cube, weights, self._fuzzy_length)
elif np.ma.is_masked(cube.data):
# Raise warning if blending masked arrays using non-spatial weights.
warnings.warn(
Expand All @@ -316,8 +336,8 @@ def process(
BlendingPlugin = WeightedBlendAcrossWholeDimension(self.blend_coord)
result = BlendingPlugin(cube, weights=weights)

if record_run_attr is not None:
record_run_coord_to_attr(result, cube, record_run_attr)
if self._record_run_attr is not None:
record_run_coord_to_attr(result, cube, self._record_run_attr)

# Remove custom metadata and and update time-type coordinates. Remove
# non-time-type coordinate that were previously associated with the blend
Expand All @@ -327,9 +347,9 @@ def process(
result,
self.blend_coord,
coords_to_remove=coords_to_remove,
cycletime=cycletime,
attributes_dict=attributes_dict,
model_id_attr=model_id_attr,
cycletime=self._cycletime,
attributes_dict=self._attributes_dict,
model_id_attr=self._model_id_attr,
)

return result
5 changes: 2 additions & 3 deletions improver/cli/blend_cycles_and_realizations.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,5 @@ def process(
for cube in cubes:
cubelist.append(collapse_realizations(cube))

plugin = WeightAndBlend("forecast_reference_time", "linear", y0val=0.5, ynval=0.5,)
cube = plugin(cubelist, cycletime=cycletime,)
return cube
plugin = WeightAndBlend("forecast_reference_time", "linear", y0val=0.5, ynval=0.5, cycletime=cycletime)
return plugin(cubelist)
5 changes: 1 addition & 4 deletions improver/cli/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,5 @@ def process(*cubes: cli.inputcube):
iris.cube.Cube:
A merged cube.
"""
from iris.cube import CubeList

from improver.utilities.cube_manipulation import MergeCubes

return MergeCubes()(CubeList(cubes))
return MergeCubes()(*cubes)
20 changes: 8 additions & 12 deletions improver/cli/nowcast_optical_flow_from_winds.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
def process(
steering_flow: inputflow,
orographic_enhancement: cli.inputcube,
*cubes: cli.inputcube,
radar_precip_1: cli.inputcube,
radar_precip_2: cli.inputcube,
):
"""Calculate optical flow components as perturbations from the model
steering flow. Advects the older of the two input radar observations to
Expand All @@ -34,24 +35,19 @@ def process(
have names: "grid_eastward_wind" and "grid_northward_wind".
orographic_enhancement (iris.cube.Cube):
Cube containing the orographic enhancement fields.
cubes (tuple of iris.cube.Cube):
Two radar precipitation observation cubes.
radar_precip_1 (iris.cube.Cube):
First radar precipitation observation cube.
radar_precip_2 (iris.cube.Cube):
Second precipitation observation cube.

Returns:
iris.cube.CubeList:
List of u- and v- advection velocities
"""
from iris.cube import CubeList

from improver.nowcasting.optical_flow import (
generate_advection_velocities_from_winds,
)

if len(cubes) != 2:
raise ValueError("Expected 2 radar cubes - got {}".format(len(cubes)))

advection_velocities = generate_advection_velocities_from_winds(
CubeList(cubes), steering_flow, orographic_enhancement
return generate_advection_velocities_from_winds(
radar_precip_1, radar_precip_2, steering_flow, orographic_enhancement
)

return advection_velocities
62 changes: 10 additions & 52 deletions improver/cli/orographic_enhancement.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,44 +4,9 @@
# This file is part of IMPROVER and is released under a BSD 3-Clause license.
# See LICENSE in the root of the repository for full licensing details.
"""Script to calculate orographic enhancement."""


from improver import cli


def extract_and_check(cube, height_value, units):
"""
Function to attempt to extract a height level.
If no matching level is available an error is raised.

Args:
cube (cube):
Cube to be extracted from and checked it worked.
height_value (float):
The boundary height to be extracted with the input units.
units (str):
The units of the height level to be extracted.
Returns:
iris.cube.Cube:
A cube containing the extracted height level.
Raises:
ValueError: If height level is not found in the input cube.
"""
from improver.utilities.cube_extraction import extract_subcube

# Write constraint in this format so a constraint is constructed that
# is suitable for floating point comparison
height_constraint = [
"height=[{}:{}]".format(height_value - 0.1, height_value + 0.1)
]
cube = extract_subcube(cube, height_constraint, units=[units])

if cube is not None:
return cube

raise ValueError("No data available at height {}{}".format(height_value, units))


@cli.clizefy
@cli.with_output
def process(
Expand Down Expand Up @@ -85,20 +50,13 @@ def process(
Precipitation enhancement due to orography on the high resolution
input orography grid.
"""
from improver.orographic_enhancement import OrographicEnhancement
from improver.wind_calculations.wind_components import ResolveWindComponents

constraint_info = (boundary_height, boundary_height_units)

temperature = extract_and_check(temperature, *constraint_info)
humidity = extract_and_check(humidity, *constraint_info)
pressure = extract_and_check(pressure, *constraint_info)
wind_speed = extract_and_check(wind_speed, *constraint_info)
wind_direction = extract_and_check(wind_direction, *constraint_info)

# resolve u and v wind components
u_wind, v_wind = ResolveWindComponents()(wind_speed, wind_direction)
# calculate orographic enhancement
return OrographicEnhancement()(
temperature, humidity, pressure, u_wind, v_wind, orography
)
from improver.orographic_enhancement import MetaOrographicEnhancement

return MetaOrographicEnhancement(boundary_height, boundary_height_units)(
temperature,
humidity,
pressure,
wind_speed,
wind_direction,
orography,
)
6 changes: 2 additions & 4 deletions improver/cli/weighted_blending.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,14 +135,12 @@ def process(
y0val=y0val,
ynval=ynval,
cval=cval,
)

return plugin(
cubes,
cycletime=cycletime,
model_id_attr=model_id_attr,
record_run_attr=record_run_attr,
spatial_weights=spatial_weights_from_mask,
fuzzy_length=fuzzy_length,
attributes_dict=attributes_config,
)

return plugin(cubes)
Loading
Loading