Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
934 changes: 934 additions & 0 deletions .tmp-srcCSEToperators_colorbar_definition.json-sorted.json

Large diffs are not rendered by default.

12 changes: 12 additions & 0 deletions src/CSET/cset_workflow/meta/diagnostics/rose-meta.conf
Original file line number Diff line number Diff line change
Expand Up @@ -432,6 +432,18 @@ type=real,real
compulsory=true
sort-key=1pressure8b

[template variables=PLEVEL_TRANSECT_AGGREGATION]
ns=Diagnostics/Pressure
description=Aggregate transects for each time.
Select all options required.
Option1: Aggregate by lead time.
Option2: Aggregate by hour of day.
Option3: Aggregate by validity time.
Option4: All cases aggregated to single profile.
type=python_boolean,python_boolean,python_boolean,python_boolean
compulsory=true
sort-key=1pressure8c

[template variables=SPECTRUM_PLEVEL_FIELD]
ns=Diagnostics/Pressure
description=Create spectrum of specified pressure level fields.
Expand Down
1 change: 1 addition & 0 deletions src/CSET/cset_workflow/rose-suite.conf.example
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ MODERATE_RAIN_PRESENCE_SPATIAL_PLOT=False
!!MULTI_OVERLAY_MASK_VALUE=0.0
!!ONE_TO_ONE=False
PLACEHOLDER_OBS=False
!!PLEVEL_TRANSECT_AGGREGATION=True,True,True,True
!!PLEVEL_TRANSECT_FINISHCOORDS=
!!PLEVEL_TRANSECT_STARTCOORDS=
PLOTTING_PROJECTION=""
Expand Down
25 changes: 25 additions & 0 deletions src/CSET/loaders/transects.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,31 @@ def load(conf: Config):
aggregation=False,
)

# Create a list of case aggregation types.
AGGREGATION_TYPES = ["lead_time", "hour_of_day", "validity_time", "all"]

# Transect aggregation
for model, atype, field in itertools.product(
models, AGGREGATION_TYPES, conf.PRESSURE_LEVEL_FIELDS
):
if conf.PLEVEL_TRANSECT_AGGREGATION[AGGREGATION_TYPES.index(atype)]:
yield RawRecipe(
recipe=f"transect_case_aggregation_{atype}.yaml",
variables={
"VARNAME": field,
"VERTICAL_COORDINATE": "pressure",
"MODEL_NAME": model["name"],
"START_COORDS": conf.PLEVEL_TRANSECT_STARTCOORDS,
"FINISH_COORDS": conf.PLEVEL_TRANSECT_FINISHCOORDS,
"SUBAREA_TYPE": conf.SUBAREA_TYPE if conf.SELECT_SUBAREA else None,
"SUBAREA_EXTENT": conf.SUBAREA_EXTENT
if conf.SELECT_SUBAREA
else None,
},
model_ids=model["id"],
aggregation=True,
)

# Model level fields
if conf.EXTRACT_MLEVEL_TRANSECT:
for model, field in itertools.product(
Expand Down
71 changes: 69 additions & 2 deletions src/CSET/operators/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -583,6 +583,7 @@ def _plot_and_save_spatial_plot(
# Plot the field.
if method == "contourf":
# Filled contour plot of the field.
logging.info("testing!")
plot = iplt.contourf(cube, cmap=cmap, levels=levels, norm=norm)
elif method == "pcolormesh":
try:
Expand Down Expand Up @@ -653,15 +654,80 @@ def _plot_and_save_spatial_plot(
fontsize=16,
)

# Inset code
import cartopy.feature as cfeature
from cartopy.mpl.geoaxes import GeoAxes
from mpl_toolkits.axes_grid1.inset_locator import inset_axes

axins = inset_axes(
axes,
width="20%",
height="20%",
loc="upper right",
axes_class=GeoAxes,
axes_kwargs=dict(map_projection=ccrs.PlateCarree()),
)

axins.coastlines(resolution="50m")
axins.add_feature(cfeature.BORDERS, linewidth=0.3)

SLat = float(cube.attributes["transect_coords"].split("_")[0])
SLon = float(cube.attributes["transect_coords"].split("_")[1])
ELat = float(cube.attributes["transect_coords"].split("_")[2])
ELon = float(cube.attributes["transect_coords"].split("_")[3])

# Plot points (note: lon, lat order for Cartopy)
axins.plot(SLon, SLat, marker="x", color="green", transform=ccrs.PlateCarree())
axins.plot(ELon, ELat, marker="x", color="red", transform=ccrs.PlateCarree())

# Draw line between them
axins.plot(
[SLon, ELon], [SLat, ELat], color="black", transform=ccrs.PlateCarree()
)

lon_min, lon_max = sorted([SLon, ELon])
lat_min, lat_max = sorted([SLat, ELat])

# Midpoints
lon_mid = (lon_min + lon_max) / 2
lat_mid = (lat_min + lat_max) / 2

# Maximum half-range
half_range = max(lon_max - lon_min, lat_max - lat_min) / 2
if half_range == 0: # points identical → provide small default
half_range = 1

# Set square extent
axins.set_extent(
[
lon_mid - half_range,
lon_mid + half_range,
lat_mid - half_range,
lat_mid + half_range,
],
crs=ccrs.PlateCarree(),
)

# Ensure square aspect
axins.set_aspect("equal")

else:
# Add title.
axes.set_title(title, fontsize=16)

# Adjust padding if spatial plot or transect
if is_transect(cube):
yinfopad = -0.1
ycbarpad = 0.1
else:
yinfopad = -0.05
ycbarpad = 0.042

# Add watermark with min/max/mean. Currently not user togglable.
# In the bbox dictionary, fc and ec are hex colour codes for grey shade.
axes.annotate(
f"Min: {np.min(cube.data):.3g} Max: {np.max(cube.data):.3g} Mean: {np.mean(cube.data):.3g}",
xy=(1, -0.05),
xy=(1, yinfopad),
xycoords="axes fraction",
xytext=(-5, 5),
textcoords="offset points",
Expand All @@ -687,8 +753,9 @@ def _plot_and_save_spatial_plot(

# Add main colour bar.
cbar = fig.colorbar(
plot, orientation="horizontal", location="bottom", pad=0.042, shrink=0.7
plot, orientation="horizontal", location="bottom", pad=ycbarpad, shrink=0.7
)

cbar.set_label(label=f"{cube.name()} ({cube.units})", size=14)
# add ticks and tick_labels for every levels if less than 20 levels exist
if levels is not None and len(levels) < 20:
Expand Down
17 changes: 17 additions & 0 deletions src/CSET/operators/read.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,6 +374,7 @@ def _loading_callback(cube: iris.cube.Cube, field, filename: str) -> iris.cube.C
_proleptic_gregorian_fix(cube)
_lfric_time_callback(cube)
_lfric_forecast_period_standard_name_callback(cube)
_normalise_ML_varname(cube)
return cube


Expand Down Expand Up @@ -430,6 +431,9 @@ def _lfric_normalise_callback(cube: iris.cube.Cube, field, filename):
cube.attributes.pop("timeStamp", None)
cube.attributes.pop("uuid", None)
cube.attributes.pop("name", None)
cube.attributes.pop("source", None)
cube.attributes.pop("analysis_source", None)
cube.attributes.pop("history", None)

# Sort STASH code list.
stash_list = cube.attributes.get("um_stash_source")
Expand Down Expand Up @@ -968,7 +972,20 @@ def _lfric_forecast_period_standard_name_callback(cube: iris.cube.Cube):
"""Add forecast_period standard name if missing."""
try:
coord = cube.coord("forecast_period")
if coord.units != "hours":
cube.coord("forecast_period").convert_units("hours")
if not coord.standard_name:
coord.standard_name = "forecast_period"
except iris.exceptions.CoordinateNotFoundError:
pass


def _normalise_ML_varname(cube: iris.cube.Cube):
"""Fix variable names in ML models to standard names."""
if cube.coords("pressure"):
if cube.name() == "x_wind":
cube.long_name = "zonal_wind_at_pressure_levels"
if cube.name() == "y_wind":
cube.long_name = "meridional_wind_at_pressure_levels"
if cube.name() == "air_temperature":
cube.long_name = "temperature_at_pressure_levels"
42 changes: 42 additions & 0 deletions src/CSET/recipes/level_fields/transect_case_aggregation_all.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
category: Transect
title: $MODEL_NAME Transect of $VARNAME Aggregation over all cases.
description: |
Extracts a $VERTICAL_COORDINATE transect for $VARNAME between two points and
plots it.

Start coordinate: `$START_COORDS`
End coordinate: `$FINISH_COORDS`

steps:
- operator: read.read_cubes
file_paths: $INPUT_PATHS
constraint:
operator: constraints.combine_constraints
cell_method_constraint:
operator: constraints.generate_cell_methods_constraint
cell_methods: []
var_constraint:
operator: constraints.generate_var_constraint
varname: $VARNAME
level_constraint:
operator: constraints.generate_level_constraint
coordinate: $VERTICAL_COORDINATE
levels: '*'
subarea_type: $SUBAREA_TYPE
subarea_extent: $SUBAREA_EXTENT

- operator: aggregate.ensure_aggregatable_across_cases

- operator: collapse.collapse
coordinate: time
method: MEAN

- operator: transect.calc_transect
startcoords: $START_COORDS
endcoords: $FINISH_COORDS

- operator: plot.spatial_contour_plot
sequence_coordinate: time

- operator: write.write_cube_to_nc
overwrite: True
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
category: Transect
title: $MODEL_NAME Transect of $VARNAME Aggregation by hour of day.
description: |
Extracts a $VERTICAL_COORDINATE transect for $VARNAME between two points and
plots it.

Start coordinate: `$START_COORDS`
End coordinate: `$FINISH_COORDS`

steps:
- operator: read.read_cubes
file_paths: $INPUT_PATHS
constraint:
operator: constraints.combine_constraints
cell_method_constraint:
operator: constraints.generate_cell_methods_constraint
cell_methods: []
var_constraint:
operator: constraints.generate_var_constraint
varname: $VARNAME
level_constraint:
operator: constraints.generate_level_constraint
coordinate: $VERTICAL_COORDINATE
levels: '*'
subarea_type: $SUBAREA_TYPE
subarea_extent: $SUBAREA_EXTENT

- operator: aggregate.ensure_aggregatable_across_cases

- operator: collapse.collapse_by_hour_of_day
method: MEAN

- operator: transect.calc_transect
startcoords: $START_COORDS
endcoords: $FINISH_COORDS

- operator: plot.spatial_contour_plot
sequence_coordinate: hour

- operator: write.write_cube_to_nc
overwrite: True
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
category: Transect
title: $MODEL_NAME Transect of $VARNAME Aggregation by lead time.
description: |
Extracts a $VERTICAL_COORDINATE transect for $VARNAME between two points and
plots it.

Start coordinate: `$START_COORDS`
End coordinate: `$FINISH_COORDS`

steps:
- operator: read.read_cubes
file_paths: $INPUT_PATHS
constraint:
operator: constraints.combine_constraints
cell_method_constraint:
operator: constraints.generate_cell_methods_constraint
cell_methods: []
var_constraint:
operator: constraints.generate_var_constraint
varname: $VARNAME
level_constraint:
operator: constraints.generate_level_constraint
coordinate: $VERTICAL_COORDINATE
levels: '*'
subarea_type: $SUBAREA_TYPE
subarea_extent: $SUBAREA_EXTENT

- operator: aggregate.ensure_aggregatable_across_cases

- operator: collapse.collapse
coordinate: "forecast_reference_time"
method: MEAN

- operator: transect.calc_transect
startcoords: $START_COORDS
endcoords: $FINISH_COORDS

- operator: plot.spatial_contour_plot
sequence_coordinate: forecast_period

- operator: write.write_cube_to_nc
overwrite: True
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
category: Transect
title: $MODEL_NAME Transect of $VARNAME Aggregation by validity time.
description: |
Extracts a $VERTICAL_COORDINATE transect for $VARNAME between two points and
plots it.

Start coordinate: `$START_COORDS`
End coordinate: `$FINISH_COORDS`

steps:
- operator: read.read_cubes
file_paths: $INPUT_PATHS
constraint:
operator: constraints.combine_constraints
cell_method_constraint:
operator: constraints.generate_cell_methods_constraint
cell_methods: []
var_constraint:
operator: constraints.generate_var_constraint
varname: $VARNAME
level_constraint:
operator: constraints.generate_level_constraint
coordinate: $VERTICAL_COORDINATE
levels: '*'
subarea_type: $SUBAREA_TYPE
subarea_extent: $SUBAREA_EXTENT

- operator: aggregate.ensure_aggregatable_across_cases

- operator: collapse.collapse_by_validity_time
method: MEAN

- operator: transect.calc_transect
startcoords: $START_COORDS
endcoords: $FINISH_COORDS

- operator: plot.spatial_contour_plot
sequence_coordinate: time

- operator: write.write_cube_to_nc
overwrite: True
Loading