diff --git a/bluemath_tk/additive/greensurge.py b/bluemath_tk/additive/greensurge.py index f88aba7..e034924 100644 --- a/bluemath_tk/additive/greensurge.py +++ b/bluemath_tk/additive/greensurge.py @@ -1,7 +1,7 @@ +import struct import warnings from datetime import datetime -from functools import partial -from multiprocessing import Pool, cpu_count +from functools import lru_cache from typing import List, Tuple import cartopy.crs as ccrs @@ -12,15 +12,12 @@ from matplotlib.axes import Axes from matplotlib.figure import Figure from matplotlib.path import Path -from netCDF4 import Dataset from tqdm import tqdm from ..core.operations import get_degrees_from_uv from ..core.plotting.colors import hex_colors_land, hex_colors_water from ..core.plotting.utils import join_colormaps -# from ..topo_bathy.mesh_utils import read_adcirc_grd - def read_adcirc_grd(grd_file: str) -> Tuple[np.ndarray, np.ndarray, List[str]]: """ @@ -48,7 +45,7 @@ def read_adcirc_grd(grd_file: str) -> Tuple[np.ndarray, np.ndarray, List[str]]: """ with open(grd_file, "r") as f: - _header0 = f.readline() + f.readline() # Skip header line header1 = f.readline() header_nums = list(map(float, header1.split())) nelmts = int(header_nums[0]) @@ -61,286 +58,66 @@ def read_adcirc_grd(grd_file: str) -> Tuple[np.ndarray, np.ndarray, List[str]]: return Nodes, Elmts, lines -def calculate_edges(Elmts: np.ndarray) -> np.ndarray: +def get_regular_grid( + node_computation_longitude: np.ndarray, + node_computation_latitude: np.ndarray, + node_computation_elements: np.ndarray, + factor: float = 10.0, + margin_deg: float = 0, +) -> Tuple[np.ndarray, np.ndarray]: """ - Calculates the unique edges from the given triangle elements. + Generate a regular lon/lat grid slightly larger than the bounds of the node coordinates. + Grid resolution is derived from the smallest element size scaled by a factor. Parameters ---------- - Elmts : np.ndarray - A 2D array of shape (nelmts, 3) containing the node indices for each triangle element. + node_computation_longitude : np.ndarray + 1D array of longitudes for the nodes. + node_computation_latitude : np.ndarray + 1D array of latitudes for the nodes. + node_computation_elements : np.ndarray + 2D array of indices defining the triangular elements. + factor : float, optional + Resolution scaling factor: higher means coarser grid. + margin_deg : float, optional + Margin to add (in degrees) to each side of the bounding box. Returns ------- - np.ndarray - A 2D array of shape (n_edges, 2) containing the unique edges, - each represented by a pair of node indices. - """ - - perc = 0 - Links = np.zeros((len(Elmts) * 3, 2), dtype=int) - tel = 0 - for ii, elmt in enumerate(Elmts): - if round(100 * (ii / len(Elmts))) != perc: - perc = round(100 * (ii / len(Elmts))) - Links[tel] = [elmt[0], elmt[1]] - tel += 1 - Links[tel] = [elmt[1], elmt[2]] - tel += 1 - Links[tel] = [elmt[2], elmt[0]] - tel += 1 - - Links_sorted = np.sort(Links, axis=1) - Links_unique = np.unique(Links_sorted, axis=0) - - return Links_unique - - -def adcirc2DFlowFM(Path_grd: str, netcdf_path: str) -> None: - """ - Converts ADCIRC grid data to a NetCDF Delft3DFM format. - - Parameters - ---------- - Path_grd : str - Path to the ADCIRC grid file. - netcdf_path : str - Path where the resulting NetCDF file will be saved. - - Examples - -------- - >>> adcirc2DFlowFM("path/to/grid.grd", "path/to/output.nc") - >>> print("NetCDF file created successfully.") + lon_grid : np.ndarray + 1D array of longitudes defining the grid. + lat_grid : np.ndarray + 1D array of latitudes defining the grid. """ - Nodes_full, Elmts_full, lines = read_adcirc_grd(Path_grd) - NODE = Nodes_full[:, [1, 2, 3]] - EDGE = Elmts_full[:, [2, 3, 4]] - edges = calculate_edges(EDGE) + 1 - EDGE_S = np.sort(EDGE, axis=1) - EDGE_S = EDGE_S[EDGE_S[:, 2].argsort()] - EDGE_S = EDGE_S[EDGE_S[:, 1].argsort()] - face_node = np.array(EDGE_S[EDGE_S[:, 0].argsort()], dtype=np.int32) - edge_node = np.zeros([len(edges), 2], dtype="i4") - edge_face = np.zeros([len(edges), 2], dtype=np.double) - edge_x = np.zeros(len(edges)) - edge_y = np.zeros(len(edges)) - - edge_node = np.array( - edge_node, - dtype=np.int32, - ) - - face_x = ( - NODE[EDGE[:, 0].astype(int), 0] - + NODE[EDGE[:, 1].astype(int), 0] - + NODE[EDGE[:, 2].astype(int), 0] - ) / 3 - face_y = ( - NODE[EDGE[:, 0].astype(int), 1] - + NODE[EDGE[:, 1].astype(int), 1] - + NODE[EDGE[:, 2].astype(int), 1] - ) / 3 - - edge_x = (NODE[edges[:, 0] - 1, 0] + NODE[edges[:, 1] - 1, 0]) / 2 - edge_y = (NODE[edges[:, 0] - 1, 1] + NODE[edges[:, 1] - 1, 1]) / 2 - - face_node_dict = {} - - for idx, face in enumerate(face_node): - for node in face: - if node not in face_node_dict: - face_node_dict[node] = [] - face_node_dict[node].append(idx) - - for i, edge in enumerate(edges): - node1, node2 = map(int, edge) - - edge_node[i, 0] = node1 - edge_node[i, 1] = node2 - - faces_node1 = face_node_dict.get(node1 - 1, []) - faces_node2 = face_node_dict.get(node2 - 1, []) - - faces = list(set(faces_node1) & set(faces_node2)) + # Bounding box with margin + lon_min = node_computation_longitude.min() - margin_deg + lon_max = node_computation_longitude.max() + margin_deg + lat_min = node_computation_latitude.min() - margin_deg + lat_max = node_computation_latitude.max() + margin_deg - if len(faces) < 2: - edge_face[i, 0] = faces[0] + 1 if faces else 0 - edge_face[i, 1] = 0 - else: - edge_face[i, 0] = faces[0] + 1 - edge_face[i, 1] = faces[1] + 1 - - face_x = np.array(face_x, dtype=np.double) - face_y = np.array(face_y, dtype=np.double) - - node_x = np.array(NODE[:, 0], dtype=np.double) - node_y = np.array(NODE[:, 1], dtype=np.double) - node_z = np.array(NODE[:, 2], dtype=np.double) - - face_x_bnd = np.array(node_x[face_node], dtype=np.double) - face_y_bnd = np.array(node_y[face_node], dtype=np.double) - - num_nodes = NODE.shape[0] - num_faces = EDGE.shape[0] - num_edges = edges.shape[0] - - with Dataset(netcdf_path, "w", format="NETCDF4") as dataset: - _mesh2d_nNodes = dataset.createDimension("mesh2d_nNodes", num_nodes) - _mesh2d_nEdges = dataset.createDimension("mesh2d_nEdges", num_edges) - _mesh2d_nFaces = dataset.createDimension("mesh2d_nFaces", num_faces) - _mesh2d_nMax_face_nodes = dataset.createDimension("mesh2d_nMax_face_nodes", 3) - _two_dim = dataset.createDimension("Two", 2) - - mesh2d_node_x = dataset.createVariable( - "mesh2d_node_x", "f8", ("mesh2d_nNodes",) - ) - mesh2d_node_x.standard_name = "projection_x_coordinate" - mesh2d_node_x.long_name = "x-coordinate of mesh nodes" - - mesh2d_node_y = dataset.createVariable( - "mesh2d_node_y", "f8", ("mesh2d_nNodes",) - ) - mesh2d_node_y.standard_name = "projection_y_coordinate" - mesh2d_node_y.long_name = "y-coordinate of mesh nodes" - - mesh2d_node_z = dataset.createVariable( - "mesh2d_node_z", "f8", ("mesh2d_nNodes",) - ) - mesh2d_node_z.units = "m" - mesh2d_node_z.standard_name = "altitude" - mesh2d_node_z.long_name = "z-coordinate of mesh nodes" - - mesh2d_edge_x = dataset.createVariable( - "mesh2d_edge_x", "f8", ("mesh2d_nEdges",) - ) - mesh2d_edge_x.standard_name = "projection_x_coordinate" - mesh2d_edge_x.long_name = ( - "Characteristic x-coordinate of the mesh edge (e.g., midpoint)" - ) - - mesh2d_edge_y = dataset.createVariable( - "mesh2d_edge_y", "f8", ("mesh2d_nEdges",) - ) - mesh2d_edge_y.standard_name = "projection_y_coordinate" - mesh2d_edge_y.long_name = ( - "Characteristic y-coordinate of the mesh edge (e.g., midpoint)" - ) - - mesh2d_edge_nodes = dataset.createVariable( - "mesh2d_edge_nodes", "i4", ("mesh2d_nEdges", "Two") - ) - mesh2d_edge_nodes.cf_role = "edge_node_connectivity" - mesh2d_edge_nodes.long_name = "Start and end nodes of mesh edges" - mesh2d_edge_nodes.start_index = 1 - - mesh2d_edge_faces = dataset.createVariable( - "mesh2d_edge_faces", "f8", ("mesh2d_nEdges", "Two") - ) - mesh2d_edge_faces.cf_role = "edge_face_connectivity" - mesh2d_edge_faces.long_name = "Start and end nodes of mesh edges" - mesh2d_edge_faces.start_index = 1 - - mesh2d_face_nodes = dataset.createVariable( - "mesh2d_face_nodes", "i4", ("mesh2d_nFaces", "mesh2d_nMax_face_nodes") - ) - mesh2d_face_nodes.long_name = "Vertex node of mesh face (counterclockwise)" - mesh2d_face_nodes.start_index = 1 - - mesh2d_face_x = dataset.createVariable( - "mesh2d_face_x", "f8", ("mesh2d_nFaces",) - ) - mesh2d_face_x.standard_name = "projection_x_coordinate" - mesh2d_face_x.long_name = "characteristic x-coordinate of the mesh face" - mesh2d_face_x.start_index = 1 - - mesh2d_face_y = dataset.createVariable( - "mesh2d_face_y", "f8", ("mesh2d_nFaces",) - ) - mesh2d_face_y.standard_name = "projection_y_coordinate" - mesh2d_face_y.long_name = "characteristic y-coordinate of the mesh face" - mesh2d_face_y.start_index = 1 + # Get triangle node coordinates + lon_tri = node_computation_longitude[node_computation_elements] + lat_tri = node_computation_latitude[node_computation_elements] - mesh2d_face_x_bnd = dataset.createVariable( - "mesh2d_face_x_bnd", "f8", ("mesh2d_nFaces", "mesh2d_nMax_face_nodes") - ) - mesh2d_face_x_bnd.long_name = ( - "x-coordinate bounds of mesh faces (i.e. corner coordinates)" - ) + # Estimate resolution from max side of each triangle + dlon01 = np.abs(lon_tri[:, 0] - lon_tri[:, 1]) + dlon12 = np.abs(lon_tri[:, 1] - lon_tri[:, 2]) + dlon20 = np.abs(lon_tri[:, 2] - lon_tri[:, 0]) + max_dlon = np.stack([dlon01, dlon12, dlon20], axis=1).max(axis=1) + min_dx = np.min(max_dlon) * factor - mesh2d_face_y_bnd = dataset.createVariable( - "mesh2d_face_y_bnd", "f8", ("mesh2d_nFaces", "mesh2d_nMax_face_nodes") - ) - mesh2d_face_y_bnd.long_name = ( - "y-coordinate bounds of mesh faces (i.e. corner coordinates)" - ) + dlat01 = np.abs(lat_tri[:, 0] - lat_tri[:, 1]) + dlat12 = np.abs(lat_tri[:, 1] - lat_tri[:, 2]) + dlat20 = np.abs(lat_tri[:, 2] - lat_tri[:, 0]) + max_dlat = np.stack([dlat01, dlat12, dlat20], axis=1).max(axis=1) + min_dy = np.min(max_dlat) * factor - mesh2d_node_x.units = "longitude" - mesh2d_node_y.units = "latitude" - mesh2d_edge_x.units = "longitude" - mesh2d_edge_y.units = "latitude" - mesh2d_face_x.units = "longitude" - mesh2d_face_y.units = "latitude" - mesh2d_face_x_bnd.units = "grados" - mesh2d_face_y_bnd.units = "grados" - mesh2d_face_x_bnd.standard_name = "longitude" - mesh2d_face_y_bnd.standard_name = "latitude" - mesh2d_face_nodes.coordinates = "mesh2d_node_x mesh2d_node_y" - - wgs84 = dataset.createVariable("wgs84", "int32") - wgs84.setncatts( - { - "name": "WGS 84", - "epsg": np.int32(4326), - "grid_mapping_name": "latitude_longitude", - "longitude_of_prime_meridian": 0.0, - "semi_major_axis": 6378137.0, - "semi_minor_axis": 6356752.314245, - "inverse_flattening": 298.257223563, - "EPSG_code": "value is equal to EPSG code", - "proj4_params": "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs", - "projection_name": "unknown", - "wkt": 'GEOGCS["WGS 84",\n DATUM["WGS_1984",\n SPHEROID["WGS 84",6378137,298.257223563,\n AUTHORITY["EPSG","7030"]],\n AUTHORITY["EPSG","6326"]],\n PRIMEM["Greenwich",0,\n AUTHORITY["EPSG","8901"]],\n UNIT["degree",0.0174532925199433,\n AUTHORITY["EPSG","9122"]],\n AXIS["Latitude",NORTH],\n AXIS["Longitude",EAST],\n AUTHORITY["EPSG","4326"]]', - } - ) + # Create regular grid + lon_grid = np.arange(lon_min, lon_max + min_dx, min_dx) + lat_grid = np.arange(lat_min, lat_max + min_dy, min_dy) - mesh2d_node_x[:] = node_x - mesh2d_node_y[:] = node_y - mesh2d_node_z[:] = -node_z - - mesh2d_edge_x[:] = edge_x - mesh2d_edge_y[:] = edge_y - mesh2d_edge_nodes[:, :] = edge_node - - mesh2d_edge_faces[:] = edge_face - mesh2d_face_nodes[:] = face_node + 1 - mesh2d_face_x[:] = face_x - mesh2d_face_y[:] = face_y - - mesh2d_face_x_bnd[:] = face_x_bnd - mesh2d_face_y_bnd[:] = face_y_bnd - - dataset.institution = "GeoOcean" - dataset.references = "https://github.com/GeoOcean/BlueMath_tk" - dataset.source = f"BlueMath tk {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}" - dataset.history = "Created with OCSmesh" - dataset.Conventions = "CF-1.8 UGRID-1.0 Deltares-0.10" - - dataset.createDimension("str_dim", 1) - mesh2d = dataset.createVariable("mesh2d", "i4", ("str_dim",)) - mesh2d.cf_role = "mesh_topology" - mesh2d.long_name = "Topology data of 2D mesh" - mesh2d.topology_dimension = 2 - mesh2d.node_coordinates = "mesh2d_node_x mesh2d_node_y" - mesh2d.node_dimension = "mesh2d_nNodes" - mesh2d.edge_node_connectivity = "mesh2d_edge_nodes" - mesh2d.edge_dimension = "mesh2d_nEdges" - mesh2d.edge_coordinates = "mesh2d_edge_x mesh2d_edge_y" - mesh2d.face_node_connectivity = "mesh2d_face_nodes" - mesh2d.face_dimension = "mesh2d_nFaces" - mesh2d.face_coordinates = "mesh2d_face_x mesh2d_face_y" - mesh2d.max_face_nodes_dimension = "mesh2d_nMax_face_nodes" - mesh2d.edge_face_connectivity = "mesh2d_edge_faces" + return lon_grid, lat_grid def generate_structured_points( @@ -419,7 +196,7 @@ def plot_GS_input_wind_partition( Figure size. Default is (10, 8). """ - simple_quiver = 5 + simple_quiver = 20 scale = 30 width = 0.003 @@ -554,14 +331,14 @@ def plot_greensurge_setup( Axes object. """ - # Extracting data from the dataset - Conectivity = info_ds.triangle_forcing_connectivity.values + # Extract data from the dataset + connectivity = info_ds.triangle_forcing_connectivity.values node_forcing_longitude = info_ds.node_forcing_longitude.values node_forcing_latitude = info_ds.node_forcing_latitude.values node_computation_longitude = info_ds.node_computation_longitude.values node_computation_latitude = info_ds.node_computation_latitude.values - num_elements = len(Conectivity) + num_elements = len(connectivity) if fig is None or ax is None: fig, ax = plt.subplots( subplot_kw={"projection": ccrs.PlateCarree()}, @@ -576,13 +353,13 @@ def plot_greensurge_setup( color="grey", linestyle="-", marker="", - linewidth=1 / 2, + linewidth=0.5, label="Computational mesh", ) ax.triplot( node_forcing_longitude, node_forcing_latitude, - Conectivity, + connectivity, color="green", linestyle="-", marker="", @@ -590,22 +367,6 @@ def plot_greensurge_setup( label=f"Forcing mesh ({num_elements} elements)", ) - for t in range(num_elements): - node0, node1, node2 = Conectivity[t] - _x = ( - node_forcing_longitude[int(node0)] - + node_forcing_longitude[int(node1)] - + node_forcing_longitude[int(node2)] - ) / 3 - _y = ( - node_forcing_latitude[int(node0)] - + node_forcing_latitude[int(node1)] - + node_forcing_latitude[int(node2)] - ) / 3 - plt.text( - _x, _y, f"T{t}", fontsize=10, ha="center", va="center", fontweight="bold" - ) - bnd = [ min(node_computation_longitude.min(), node_forcing_longitude.min()), max(node_computation_longitude.max(), node_forcing_longitude.max()), @@ -622,38 +383,6 @@ def plot_greensurge_setup( return fig, ax -def create_triangle_mask( - lon_grid: np.ndarray, lat_grid: np.ndarray, triangle: np.ndarray -) -> np.ndarray: - """ - Create a mask for a triangle defined by its vertices. - - Parameters - ---------- - lon_grid : np.ndarray - The longitude grid. - lat_grid : np.ndarray - The latitude grid. - triangle : np.ndarray - The triangle vertices. - - Returns - ------- - np.ndarray - The mask for the triangle. - """ - - triangle_path = Path(triangle) - # if lon_grid.ndim == 1: - # lon_grid, lat_grid = np.meshgrid(lon_grid, lat_grid) - lon_grid, lat_grid = np.meshgrid(lon_grid, lat_grid) - points = np.vstack([lon_grid.flatten(), lat_grid.flatten()]).T - inside_mask = triangle_path.contains_points(points) - mask = inside_mask.reshape(lon_grid.shape) - - return mask - - def create_triangle_mask_from_points( lon: np.ndarray, lat: np.ndarray, triangle: np.ndarray ) -> np.ndarray: @@ -780,347 +509,137 @@ def plot_GS_vs_dynamic_windsetup_swath( return fig, axs -def GS_windsetup_reconstruction_with_postprocess( - greensurge_dataset: xr.Dataset, +def plot_GS_vs_dynamic_windsetup( + ds_WL_GS_WindSetUp: xr.Dataset, + ds_WL_dynamic_WindSetUp: xr.Dataset, ds_gfd_metadata: xr.Dataset, - wind_direction_input: xr.Dataset, - velocity_thresholds: np.ndarray = np.array([0, 100, 100]), - drag_coefficients: np.ndarray = np.array([0.00063, 0.00723, 0.00723]), -) -> xr.Dataset: + time: datetime, + vmin: float = None, + vmax: float = None, + figsize: tuple = (10, 8), +) -> None: """ - Reconstructs the GreenSurge wind setup using the provided wind direction input and metadata. + Plot the GreenSurge and dynamic wind setup from the provided datasets. Parameters ---------- - greensurge_dataset : xr.Dataset - xarray Dataset containing the GreenSurge mesh and forcing data. + ds_WL_GS_WindSetUp: xr.Dataset + xarray Dataset containing the GreenSurge wind setup data. + ds_WL_dynamic_WindSetUp: xr.Dataset + xarray Dataset containing the dynamic wind setup data. ds_gfd_metadata: xr.Dataset - xarray Dataset containing metadata for the GFD mesh. - wind_direction_input: xr.Dataset - xarray Dataset containing wind direction and speed data. - velocity_thresholds : np.ndarray - Array of velocity thresholds for drag coefficient calculation. - drag_coefficients : np.ndarray - Array of drag coefficients corresponding to the velocity thresholds. - - Returns - ------- - xr.Dataset - xarray Dataset containing the reconstructed wind setup. + xarray Dataset containing the metadata for the GFD mesh. + time: datetime.datetime + The time point at which to plot the data. + vmin: float, optional + Minimum value for the color scale. Default is None. + vmax: float, optional + Maximum value for the color scale. Default is None. + figsize: tuple, optional + Tuple specifying the figure size. Default is (10, 8). """ - velocity_thresholds = np.asarray(velocity_thresholds) - drag_coefficients = np.asarray(drag_coefficients) + warnings.filterwarnings("ignore", message="All-NaN slice encountered") + + X = ds_gfd_metadata.node_computation_longitude.values + Y = ds_gfd_metadata.node_computation_latitude.values + triangles = ds_gfd_metadata.triangle_computation_connectivity.values - direction_bins = ds_gfd_metadata.wind_directions.values - forcing_cell_indices = greensurge_dataset.forcing_cell.values - wind_speed_reference = ds_gfd_metadata.wind_speed.values.item() - base_drag_coeff = GS_LinearWindDragCoef( - wind_speed_reference, drag_coefficients, velocity_thresholds + Longitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_x.values + Latitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_y.values + + xds_GS = ds_WL_GS_WindSetUp["WL"].sel(time=time).values + xds_DY = ds_WL_dynamic_WindSetUp["mesh2d_s1"].sel(time=time).values + if vmin is None or vmax is None: + vmax = float(np.nanmax(xds_GS)) * 0.5 + vmin = -vmax + + fig, axs = plt.subplots( + nrows=1, + ncols=2, + figsize=figsize, + subplot_kw={"projection": ccrs.PlateCarree()}, + constrained_layout=True, ) - time_step_hours = ds_gfd_metadata.time_step_hours.values - time_start = wind_direction_input.time.values.min() - time_end = wind_direction_input.time.values.max() - duration_in_steps = ( - int((ds_gfd_metadata.simulation_duration_hours.values) / time_step_hours) + 1 + axs[0].tripcolor( + Longitude_dynamic, + Latitude_dynamic, + ds_WL_dynamic_WindSetUp.mesh2d_face_nodes.values - 1, + facecolors=xds_DY, + cmap="bwr", + vmin=vmin, + vmax=vmax, + transform=ccrs.PlateCarree(), ) - output_time_vector = np.arange( - time_start, time_end, np.timedelta64(int(60 * time_step_hours.item()), "m") + + pm = axs[1].tripcolor( + X, + Y, + triangles, + facecolors=xds_GS, + cmap="bwr", + vmin=vmin, + vmax=vmax, + transform=ccrs.PlateCarree(), + ) + cbar = fig.colorbar(pm, ax=axs, orientation="horizontal", pad=0.03, aspect=50) + cbar.set_label( + "WL ({})".format("m"), rotation=0, va="bottom", fontweight="bold", labelpad=15 + ) + fig.suptitle( + f"Wind SetUp for {time.astype('datetime64[s]').astype(str)}", + fontsize=16, + fontweight="bold", ) - num_output_times = len(output_time_vector) - direction_data = wind_direction_input.Dir.values - wind_speed_data = wind_direction_input.W.values + axs[0].set_title("Dynamic Wind SetUp") + axs[1].set_title("GreenSurge Wind SetUp") - n_faces = greensurge_dataset["mesh2d_s1"].isel(forcing_cell=0, direction=0).shape - wind_setup_output = np.zeros((num_output_times, n_faces[1])) - water_level_accumulator = np.zeros(n_faces) + lon_min = np.nanmin(Longitude_dynamic) + lon_max = np.nanmax(Longitude_dynamic) + lat_min = np.nanmin(Latitude_dynamic) + lat_max = np.nanmax(Latitude_dynamic) + for ax in axs: + ax.set_extent([lon_min, lon_max, lat_min, lat_max]) - for time_index in tqdm(range(num_output_times), desc="Processing time steps"): - water_level_accumulator[:] = 0 - for cell_index in forcing_cell_indices.astype(int): - current_dir = direction_data[cell_index, time_index] % 360 - adjusted_bins = np.where(direction_bins == 0, 360, direction_bins) - closest_direction_index = np.abs(adjusted_bins - current_dir).argmin() - water_level_case = ( - greensurge_dataset["mesh2d_s1"] - .sel(forcing_cell=cell_index, direction=closest_direction_index) - .values - ) - water_level_case = np.nan_to_num(water_level_case, nan=0) +def plot_GS_TG_validation_timeseries( + ds_WL_GS_WindSetUp: xr.Dataset, + ds_WL_GS_IB: xr.Dataset, + ds_WL_dynamic_WindSetUp: xr.Dataset, + tide_gauge: xr.Dataset, + ds_GFD_info: xr.Dataset, + figsize: tuple = (20, 7), + WLmin: float = None, + WLmax: float = None, +) -> None: + """ + Plot a time series comparison of GreenSurge, dynamic wind setup, and tide gauge data with a bathymetry map. - wind_speed_value = wind_speed_data[cell_index, time_index] - drag_coeff_value = GS_LinearWindDragCoef( - wind_speed_value, drag_coefficients, velocity_thresholds - ) + Parameters + ---------- + ds_WL_GS_WindSetUp : xr.Dataset + Dataset containing GreenSurge wind setup data with dimensions (nface, time). + ds_WL_GS_IB : xr.Dataset + Dataset containing inverse barometer data with dimensions (lat, lon, time). + ds_WL_dynamic_WindSetUp : xr.Dataset + Dataset containing dynamic wind setup data with dimensions (mesh2d_nFaces, time). + tide_gauge : xr.Dataset + Dataset containing tide gauge data with dimensions (time). + ds_GFD_info : xr.Dataset + Dataset containing grid information with longitude and latitude coordinates. + figsize : tuple, optional + Size of the figure for the plot. Default is (15, 7). + WLmin : float, optional + Minimum water level for the plot. Default is None. + WLmax : float, optional + Maximum water level for the plot. Default is None. + """ - scaling_factor = (wind_speed_value**2 / wind_speed_reference**2) * ( - drag_coeff_value / base_drag_coeff - ) - water_level_accumulator += water_level_case * scaling_factor - - step_window = min(duration_in_steps, num_output_times - time_index) - if (num_output_times - time_index) > step_window: - wind_setup_output[time_index : time_index + step_window] += ( - water_level_accumulator - ) - else: - shift_counter = step_window - (num_output_times - time_index) - wind_setup_output[ - time_index : time_index + step_window - shift_counter - ] += water_level_accumulator[: step_window - shift_counter] - - ds_wind_setup = xr.Dataset( - {"WL": (["time", "nface"], wind_setup_output)}, - coords={ - "time": output_time_vector, - "nface": np.arange(wind_setup_output.shape[1]), - }, - ) - ds_wind_setup.attrs["description"] = "Wind setup from GreenSurge methodology" - - return ds_wind_setup - - -def GS_LinearWindDragCoef_mat( - Wspeed: np.ndarray, CD_Wl_abc: np.ndarray, Wl_abc: np.ndarray -) -> np.ndarray: - """ - Calculate the linear drag coefficient based on wind speed and specified thresholds. - - Parameters - ---------- - Wspeed : np.ndarray - Wind speed values (1D array). - CD_Wl_abc : np.ndarray - Coefficients for the drag coefficient calculation, should be a 1D array of length 3. - Wl_abc : np.ndarray - Wind speed thresholds for the drag coefficient calculation, should be a 1D array of length 3. - - Returns - ------- - np.ndarray - Calculated drag coefficient values based on the input wind speed. - """ - - Wspeed = np.atleast_1d(Wspeed).astype(np.float64) - was_scalar = Wspeed.ndim == 1 and Wspeed.size == 1 - - Wla, Wlb, Wlc = Wl_abc - CDa, CDb, CDc = CD_Wl_abc - - if Wla != Wlb: - a_ab = (CDa - CDb) / (Wla - Wlb) - b_ab = CDb - a_ab * Wlb - else: - a_ab = 0 - b_ab = CDa - - if Wlb != Wlc: - a_bc = (CDb - CDc) / (Wlb - Wlc) - b_bc = CDc - a_bc * Wlc - else: - a_bc = 0 - b_bc = CDb - - a_cinf = 0 - b_cinf = CDc - - CD = a_cinf * Wspeed + b_cinf - CD[Wspeed <= Wlb] = a_ab * Wspeed[Wspeed <= Wlb] + b_ab - mask_bc = (Wspeed > Wlb) & (Wspeed <= Wlc) - CD[mask_bc] = a_bc * Wspeed[mask_bc] + b_bc - - return CD.item() if was_scalar else CD - - -def GS_LinearWindDragCoef( - Wspeed: np.ndarray, CD_Wl_abc: np.ndarray, Wl_abc: np.ndarray -) -> np.ndarray: - """ - Calculate the linear drag coefficient based on wind speed and specified thresholds. - - Parameters - ---------- - Wspeed : np.ndarray - Wind speed values (1D array). - CD_Wl_abc : np.ndarray - Coefficients for the drag coefficient calculation, should be a 1D array of length 3. - Wl_abc : np.ndarray - Wind speed thresholds for the drag coefficient calculation, should be a 1D array of length 3. - - Returns - ------- - np.ndarray - Calculated drag coefficient values based on the input wind speed. - """ - - Wla = Wl_abc[0] - Wlb = Wl_abc[1] - Wlc = Wl_abc[2] - CDa = CD_Wl_abc[0] - CDb = CD_Wl_abc[1] - CDc = CD_Wl_abc[2] - - # coefs lines y=ax+b - if not Wla == Wlb: - a_CDline_ab = (CDa - CDb) / (Wla - Wlb) - b_CDline_ab = CDb - a_CDline_ab * Wlb - else: - a_CDline_ab = 0 - b_CDline_ab = CDa - if not Wlb == Wlc: - a_CDline_bc = (CDb - CDc) / (Wlb - Wlc) - b_CDline_bc = CDc - a_CDline_bc * Wlc - else: - a_CDline_bc = 0 - b_CDline_bc = CDb - a_CDline_cinf = 0 - b_CDline_cinf = CDc - - if Wspeed <= Wlb: - CD = a_CDline_ab * Wspeed + b_CDline_ab - elif Wspeed > Wlb and Wspeed <= Wlc: - CD = a_CDline_bc * Wspeed + b_CDline_bc - else: - CD = a_CDline_cinf * Wspeed + b_CDline_cinf - - return CD - - -def plot_GS_vs_dynamic_windsetup( - ds_WL_GS_WindSetUp: xr.Dataset, - ds_WL_dynamic_WindSetUp: xr.Dataset, - ds_gfd_metadata: xr.Dataset, - time: datetime, - vmin: float = None, - vmax: float = None, - figsize: tuple = (10, 8), -) -> None: - """ - Plot the GreenSurge and dynamic wind setup from the provided datasets. - - Parameters - ---------- - ds_WL_GS_WindSetUp: xr.Dataset - xarray Dataset containing the GreenSurge wind setup data. - ds_WL_dynamic_WindSetUp: xr.Dataset - xarray Dataset containing the dynamic wind setup data. - ds_gfd_metadata: xr.Dataset - xarray Dataset containing the metadata for the GFD mesh. - time: datetime.datetime - The time point at which to plot the data. - vmin: float, optional - Minimum value for the color scale. Default is None. - vmax: float, optional - Maximum value for the color scale. Default is None. - figsize: tuple, optional - Tuple specifying the figure size. Default is (10, 8). - """ - - warnings.filterwarnings("ignore", message="All-NaN slice encountered") - - X = ds_gfd_metadata.node_computation_longitude.values - Y = ds_gfd_metadata.node_computation_latitude.values - triangles = ds_gfd_metadata.triangle_computation_connectivity.values - - Longitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_x.values - Latitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_y.values - - xds_GS = ds_WL_GS_WindSetUp["WL"].sel(time=time).values - xds_DY = ds_WL_dynamic_WindSetUp["mesh2d_s1"].sel(time=time).values - if vmin is None or vmax is None: - vmax = float(np.nanmax(xds_GS)) * 0.5 - vmin = -vmax - - fig, axs = plt.subplots( - nrows=1, - ncols=2, - figsize=figsize, - subplot_kw={"projection": ccrs.PlateCarree()}, - constrained_layout=True, - ) - - axs[0].tripcolor( - Longitude_dynamic, - Latitude_dynamic, - ds_WL_dynamic_WindSetUp.mesh2d_face_nodes.values - 1, - facecolors=xds_DY, - cmap="bwr", - vmin=vmin, - vmax=vmax, - transform=ccrs.PlateCarree(), - ) - - pm = axs[1].tripcolor( - X, - Y, - triangles, - facecolors=xds_GS, - cmap="bwr", - vmin=vmin, - vmax=vmax, - transform=ccrs.PlateCarree(), - ) - cbar = fig.colorbar(pm, ax=axs, orientation="horizontal", pad=0.03, aspect=50) - cbar.set_label( - "WL ({})".format("m"), rotation=0, va="bottom", fontweight="bold", labelpad=15 - ) - fig.suptitle( - f"Wind SetUp for {time.astype('datetime64[s]').astype(str)}", - fontsize=16, - fontweight="bold", - ) - - axs[0].set_title("Dynamic Wind SetUp") - axs[1].set_title("GreenSurge Wind SetUp") - - lon_min = np.nanmin(Longitude_dynamic) - lon_max = np.nanmax(Longitude_dynamic) - lat_min = np.nanmin(Latitude_dynamic) - lat_max = np.nanmax(Latitude_dynamic) - for ax in axs: - ax.set_extent([lon_min, lon_max, lat_min, lat_max]) - - -def plot_GS_TG_validation_timeseries( - ds_WL_GS_WindSetUp: xr.Dataset, - ds_WL_GS_IB: xr.Dataset, - ds_WL_dynamic_WindSetUp: xr.Dataset, - tide_gauge: xr.Dataset, - ds_GFD_info: xr.Dataset, - figsize: tuple = (20, 7), - WLmin: float = None, - WLmax: float = None, -) -> None: - """ - Plot a time series comparison of GreenSurge, dynamic wind setup, and tide gauge data with a bathymetry map. - - Parameters - ---------- - ds_WL_GS_WindSetUp : xr.Dataset - Dataset containing GreenSurge wind setup data with dimensions (nface, time). - ds_WL_GS_IB : xr.Dataset - Dataset containing inverse barometer data with dimensions (lat, lon, time). - ds_WL_dynamic_WindSetUp : xr.Dataset - Dataset containing dynamic wind setup data with dimensions (mesh2d_nFaces, time). - tide_gauge : xr.Dataset - Dataset containing tide gauge data with dimensions (time). - ds_GFD_info : xr.Dataset - Dataset containing grid information with longitude and latitude coordinates. - figsize : tuple, optional - Size of the figure for the plot. Default is (15, 7). - WLmin : float, optional - Minimum water level for the plot. Default is None. - WLmax : float, optional - Maximum water level for the plot. Default is None. - """ - - lon_obs = tide_gauge.lon.values - lat_obs = tide_gauge.lat.values - lon_obs = np.where(lon_obs > 180, lon_obs - 360, lon_obs) + lon_obs = tide_gauge.lon.values + lat_obs = tide_gauge.lat.values + lon_obs = np.where(lon_obs > 180, lon_obs - 360, lon_obs) nface_index = extract_pos_nearest_points_tri(ds_GFD_info, lon_obs, lat_obs) mesh2d_nFaces = extract_pos_nearest_points_tri( @@ -1245,14 +764,6 @@ def extract_pos_nearest_points_tri( """ if "node_forcing_latitude" in ds_mesh_info.variables: - # elements = ds_mesh_info.triangle_computation_connectivity.values - # lon_mesh = np.mean( - # ds_mesh_info.node_computation_longitude.values[elements], axis=1 - # ) - # lat_mesh = np.mean( - # ds_mesh_info.node_computation_latitude.values[elements], axis=1 - # ) - lon_mesh = ds_mesh_info.node_computation_longitude.values lat_mesh = ds_mesh_info.node_computation_latitude.values type_ds = 0 @@ -1261,7 +772,7 @@ def extract_pos_nearest_points_tri( lat_mesh = ds_mesh_info.mesh2d_face_y.values type_ds = 1 - nface_index = [] # np.zeros(len(lon_points)) + nface_index = [] for i in range(len(lon_points)): lon = lon_points[i] @@ -1271,12 +782,10 @@ def extract_pos_nearest_points_tri( min_idx = np.argmin(distances) if type_ds == 0: - # nface_index[i] = ds_mesh_info.node_cumputation_index.values[min_idx].astype(int) nface_index.append( ds_mesh_info.node_cumputation_index.values[min_idx].astype(int) ) elif type_ds == 1: - # nface_index[i] = ds_mesh_info.mesh2d_nFaces.values[min_idx].astype(int) nface_index.append(ds_mesh_info.mesh2d_nFaces.values[min_idx].astype(int)) return nface_index @@ -1308,8 +817,8 @@ def extract_pos_nearest_points( lon_mesh = ds_mesh_info.lon.values lat_mesh = ds_mesh_info.lat.values - pos_lon_points_mesh = [] # = np.zeros(len(lon_points)) - pos_lat_points_mesh = [] # = np.zeros(len(lat_points)) + pos_lon_points_mesh = [] + pos_lat_points_mesh = [] for i in range(len(lon_points)): lon = lon_points[i] @@ -1318,8 +827,6 @@ def extract_pos_nearest_points( lat_index = np.nanargmin((lat - lat_mesh) ** 2) lon_index = np.nanargmin((lon - lon_mesh) ** 2) - # pos_lon_points_mesh[i] = lon_index.astype(int) - # pos_lat_points_mesh[i] = lat_index.astype(int) pos_lon_points_mesh.append(lon_index.astype(int)) pos_lat_points_mesh.append(lat_index.astype(int)) @@ -1350,198 +857,21 @@ def pressure_to_IB(xds_presure: xr.Dataset) -> xr.Dataset: return xds_presure_modified -def compute_water_level_for_time( - time_index: int, - direction_data: np.ndarray, - wind_speed_data: np.ndarray, - direction_bins: np.ndarray, - forcing_cell_indices: np.ndarray, - greensurge_dataset: xr.Dataset, - wind_speed_reference: float, - base_drag_coeff: float, - drag_coefficients: np.ndarray, - velocity_thresholds: np.ndarray, - duration_in_steps: int, - num_output_times: int, -) -> np.ndarray: +def build_greensurge_infos_dataset( + path_grd_calc: str, + path_grd_forz: str, + site: str, + wind_speed: float, + direction_step: float, + simulation_duration_hours: float, + simulation_time_step_hours: float, + forcing_time_step: float, + reference_date_dt: datetime, + Eddy: float, + Chezy: float, +) -> xr.Dataset: """ - Compute the water level for a specific time index based on wind direction and speed. - - Parameters - ---------- - time_index : int - The index of the time step to compute the water level for. - direction_data : np.ndarray - 2D array of wind direction data with shape (n_cells, n_times). - wind_speed_data : np.ndarray - 2D array of wind speed data with shape (n_cells, n_times). - direction_bins : np.ndarray - 1D array of wind direction bins. - forcing_cell_indices : np.ndarray - 1D array of indices for the forcing cells. - greensurge_dataset : xr.Dataset - xarray Dataset containing the GreenSurge mesh and forcing data. - wind_speed_reference : float - Reference wind speed value for scaling. - base_drag_coeff : float - Base drag coefficient value for scaling. - drag_coefficients : np.ndarray - 1D array of drag coefficients corresponding to the velocity thresholds. - velocity_thresholds : np.ndarray - 1D array of velocity thresholds for drag coefficient calculation. - duration_in_steps : int - Total duration of the simulation in steps. - num_output_times : int - Total number of output time steps. - - Returns - ------- - np.ndarray - 2D array of computed water levels for the specified time index. - """ - - adjusted_bins = np.where(direction_bins == 0, 360, direction_bins) - n_faces = greensurge_dataset["mesh2d_s1"].isel(forcing_cell=0, direction=0).shape - water_level_accumulator = np.zeros(n_faces) - - for cell_index in forcing_cell_indices.astype(int): - current_dir = direction_data[cell_index, time_index] % 360 - closest_direction_index = np.abs(adjusted_bins - current_dir).argmin() - - water_level_case = ( - greensurge_dataset["mesh2d_s1"] - .sel(forcing_cell=cell_index, direction=closest_direction_index) - .values - ) - water_level_case = np.nan_to_num(water_level_case, nan=0) - - wind_speed_value = wind_speed_data[cell_index, time_index] - drag_coeff_value = GS_LinearWindDragCoef( - wind_speed_value, drag_coefficients, velocity_thresholds - ) - - scaling_factor = (wind_speed_value**2 / wind_speed_reference**2) * ( - drag_coeff_value / base_drag_coeff - ) - water_level_accumulator += water_level_case * scaling_factor - - step_window = min(duration_in_steps, num_output_times - time_index) - result = np.zeros((num_output_times, n_faces[1])) - if (num_output_times - time_index) > step_window: - result[time_index : time_index + step_window] += water_level_accumulator - else: - shift_counter = step_window - (num_output_times - time_index) - result[time_index : time_index + step_window - shift_counter] += ( - water_level_accumulator[: step_window - shift_counter] - ) - return result - - -def GS_windsetup_reconstruction_with_postprocess_parallel( - greensurge_dataset: xr.Dataset, - ds_gfd_metadata: xr.Dataset, - wind_direction_input: xr.Dataset, - num_workers: int = None, - velocity_thresholds: np.ndarray = np.array([0, 100, 100]), - drag_coefficients: np.ndarray = np.array([0.00063, 0.00723, 0.00723]), -) -> xr.Dataset: - """ - Reconstructs the GreenSurge wind setup using the provided wind direction input and metadata in parallel. - - Parameters - ---------- - greensurge_dataset : xr.Dataset - xarray Dataset containing the GreenSurge mesh and forcing data. - ds_gfd_metadata: xr.Dataset - xarray Dataset containing metadata for the GFD mesh. - wind_direction_input: xr.Dataset - xarray Dataset containing wind direction and speed data. - velocity_thresholds : np.ndarray - Array of velocity thresholds for drag coefficient calculation. - drag_coefficients : np.ndarray - Array of drag coefficients corresponding to the velocity thresholds. - - Returns - ------- - xr.Dataset - xarray Dataset containing the reconstructed wind setup. - """ - - if num_workers is None: - num_workers = cpu_count() - - direction_bins = ds_gfd_metadata.wind_directions.values - forcing_cell_indices = greensurge_dataset.forcing_cell.values - wind_speed_reference = ds_gfd_metadata.wind_speed.values.item() - base_drag_coeff = GS_LinearWindDragCoef( - wind_speed_reference, drag_coefficients, velocity_thresholds - ) - time_step_hours = ds_gfd_metadata.time_step_hours.values - - time_start = wind_direction_input.time.values.min() - time_end = wind_direction_input.time.values.max() - duration_in_steps = ( - int((ds_gfd_metadata.simulation_duration_hours.values) / time_step_hours) + 1 - ) - output_time_vector = np.arange( - time_start, time_end, np.timedelta64(int(60 * time_step_hours.item()), "m") - ) - num_output_times = len(output_time_vector) - - direction_data = wind_direction_input.Dir.values - wind_speed_data = wind_direction_input.W.values - - n_faces = greensurge_dataset["mesh2d_s1"].isel(forcing_cell=0, direction=0).shape[1] - - args = partial( - compute_water_level_for_time, - direction_data=direction_data, - wind_speed_data=wind_speed_data, - direction_bins=direction_bins, - forcing_cell_indices=forcing_cell_indices, - greensurge_dataset=greensurge_dataset, - wind_speed_reference=wind_speed_reference, - base_drag_coeff=base_drag_coeff, - drag_coefficients=drag_coefficients, - velocity_thresholds=velocity_thresholds, - duration_in_steps=duration_in_steps, - num_output_times=num_output_times, - ) - - with Pool(processes=num_workers) as pool: - results = list( - tqdm(pool.imap(args, range(num_output_times)), total=num_output_times) - ) - - wind_setup_output = np.sum(results, axis=0) - - ds_wind_setup = xr.Dataset( - {"WL": (["time", "nface"], wind_setup_output)}, - coords={ - "time": output_time_vector, - "nface": np.arange(n_faces), - }, - ) - ds_wind_setup.attrs["description"] = "Wind setup from GreenSurge methodology" - - return ds_wind_setup - - -def build_greensurge_infos_dataset( - path_grd_calc: str, - path_grd_forz: str, - site: str, - wind_speed: float, - direction_step: float, - simulation_duration_hours: float, - simulation_time_step_hours: float, - forcing_time_step: float, - reference_date_dt: datetime, - Eddy: float, - Chezy: float, -) -> xr.Dataset: - """ - Build a structured dataset containing simulation parameters for hybrid modeling. + Build a structured dataset containing simulation parameters for hybrid modeling. Parameters ---------- @@ -1573,8 +903,8 @@ def build_greensurge_infos_dataset( A structured dataset containing simulation parameters for hybrid modeling. """ - Nodes_calc, Elmts_calc, lines_calc = read_adcirc_grd(path_grd_calc) - Nodes_forz, Elmts_forz, lines_forz = read_adcirc_grd(path_grd_forz) + Nodes_calc, Elmts_calc, _ = read_adcirc_grd(path_grd_calc) + Nodes_forz, Elmts_forz, _ = read_adcirc_grd(path_grd_forz) num_elements = Elmts_forz.shape[0] @@ -1764,7 +1094,7 @@ def plot_greensurge_setup_with_raster( projections and matplotlib for plotting. """ - Nodes_calc, Elmts_calc, lines_calc = read_adcirc_grd(path_grd_calc) + Nodes_calc, Elmts_calc, _ = read_adcirc_grd(path_grd_calc) fig, ax = plt.subplots( subplot_kw={"projection": ccrs.PlateCarree()}, @@ -1772,7 +1102,6 @@ def plot_greensurge_setup_with_raster( constrained_layout=True, ) - # ax.set_facecolor("#518134") Longitude_nodes_calc = Nodes_calc[:, 1] Latitude_nodes_calc = Nodes_calc[:, 2] Elements_calc = Elmts_calc[:, 2:5].astype(int) @@ -1805,69 +1134,15 @@ def plot_greensurge_setup_with_raster( plot_greensurge_setup(simulation_dataset, figsize=(7, 7), ax=ax, fig=fig) -def plot_triangle_points( - lon_all: np.ndarray, - lat_all: np.ndarray, - i: int, - ds_GFD_info: xr.Dataset, - figsize: tuple = (7, 7), -) -> None: - """ - Plot a triangle and points selection for GreenSurge. - Parameters - ---------- - lon_all : array-like - Longitudes of the points. - lat_all : array-like - Latitudes of the points. - i : int - Index of the triangle to plot. - ds_GFD_info : xarray.Dataset - Dataset containing GreenSurge information. - figsize : tuple, optional - Size of the figure, by default (7, 7). - """ - - lon_points = lon_all[i] - lat_points = lat_all[i] - triangle = np.array( - [ - [lon_points[0], lat_points[0]], - [lon_points[1], lat_points[1]], - [lon_points[2], lat_points[2]], - [lon_points[0], lat_points[0]], - ] - ) - - fig, ax = plot_greensurge_setup(ds_GFD_info, figsize=figsize) - ax.fill( - triangle[:, 0], - triangle[:, 1], - color="green", - alpha=0.5, - transform=ccrs.PlateCarree(), - ) - ax.scatter( - lon_points, - lat_points, - color="red", - marker="o", - transform=ccrs.PlateCarree(), - label="Points selection", - ) - ax.set_title("Exemple of point selection for GreenSurge") - ax.legend() - fig.show() - - def interp_vortex_to_triangles( xds_vortex_GS: xr.Dataset, lon_all: np.ndarray, lat_all: np.ndarray, - type: str = "tri_mean", + method: str = "tri_mean", ) -> xr.Dataset: """ - Interpolates the vortex model data to the triangle points. + Interpolate vortex model data to triangle points. + Parameters ---------- xds_vortex_GS : xr.Dataset @@ -1876,21 +1151,25 @@ def interp_vortex_to_triangles( Longitudes of the triangle points. lat_all : np.ndarray Latitudes of the triangle points. + method : str, optional + Interpolation method: "tri_mean" (default) or "tri_points". + Returns ------- - xds_vortex_interp : xr.Dataset + xr.Dataset Dataset containing the interpolated vortex model data at the triangle points. - ----------- - This function interpolates the vortex model data (wind speed, direction, and pressure) + + Notes + ----- + This function interpolates vortex model data (wind speed, direction, and pressure) to the triangle points defined by `lon_all` and `lat_all`. It reshapes the data to match the number of triangles and points, and computes the mean values for each triangle. """ - - if type == "tri_mean": + if method == "tri_mean": n_tri, n_pts = lat_all.shape lat_interp = lat_all.reshape(-1) lon_interp = lon_all.reshape(-1) - elif type == "tri_points": + elif method == "tri_points": n_tri = lat_all.shape lat_interp = lat_all lon_interp = lon_all @@ -1898,7 +1177,7 @@ def interp_vortex_to_triangles( lat_interp = xr.DataArray(lat_interp, dims="point") lon_interp = xr.DataArray(lon_interp, dims="point") - if type == "tri_mean": + if method == "tri_mean": W_interp = xds_vortex_GS.W.interp(lat=lat_interp, lon=lon_interp) Dir_interp = xds_vortex_GS.Dir.interp(lat=lat_interp, lon=lon_interp) p_interp = xds_vortex_GS.p.interp(lat=lat_interp, lon=lon_interp) @@ -1915,9 +1194,8 @@ def interp_vortex_to_triangles( Dir_out = (np.rad2deg(np.arctan2(v_mean, u_mean))) % 360 W_out = W_interp.mean(axis=1) p_out = p_interp.mean(axis=1) - elif type == "tri_points": - xds_vortex_interp = xds_vortex_GS.interp(lat=lat_interp, lon=lon_interp) - return xds_vortex_interp + elif method == "tri_points": + return xds_vortex_GS.interp(lat=lat_interp, lon=lon_interp) xds_vortex_interp = xr.Dataset( data_vars={ @@ -1931,60 +1209,6 @@ def interp_vortex_to_triangles( return xds_vortex_interp -def load_GS_database( - xds_vortex_interp: xr.Dataset, ds_GFD_info: xr.Dataset, p_GFD_libdir: str -) -> xr.Dataset: - """ - Load the Green Surge database based on the interpolated vortex data. - Parameters - ---------- - xds_vortex_interp : xarray.Dataset - Interpolated vortex data on the structured grid. - ds_GFD_info : xarray.Dataset - Dataset containing information about the Green Surge database. - p_GFD_libdir : str - Path to the Green Surge database directory. - Returns - ------- - xarray.Dataset - Dataset containing the Green Surge data for the specified wind directions. - """ - - wind_direction_interp = xds_vortex_interp.Dir - - wind_direction_database = ds_GFD_info.wind_directions.values - wind_direction_step = np.mean(np.diff(wind_direction_database)) - wind_direction_indices = ( - (np.round((wind_direction_interp.values % 360) / wind_direction_step)) - % len(wind_direction_database) - ).astype(int) - unique_direction_indices = np.unique(wind_direction_indices).astype(str) - - green_surge_file_paths = np.char.add( - np.char.add(p_GFD_libdir + "/GreenSurge_DB_", unique_direction_indices), ".nc" - ) - - def preprocess(dataset): - file_name = dataset.encoding.get("source", "Unknown") - direction_string = file_name.split("_DB_")[-1].split(".")[0] - direction_index = int(direction_string) - return ( - dataset[["mesh2d_s1"]] - .expand_dims("direction") - .assign_coords(direction=[direction_index]) - ) - - greensurge_dataset = xr.open_mfdataset( - green_surge_file_paths, - parallel=False, - combine="by_coords", - preprocess=preprocess, - engine="netcdf4", - ) - - return greensurge_dataset - - def plot_GS_validation_timeseries( ds_WL_GS_WindSetUp: xr.Dataset, ds_WL_GS_IB: xr.Dataset, @@ -2084,10 +1308,7 @@ def plot_GS_validation_timeseries( ax_ts = gridspec.GridSpecFromSubplotSpec( n_series, 1, subplot_spec=gs[0, 1], hspace=0.3 ) - if WLmin is None or WLmax is None: - typee = 1 - else: - typee = 0 + auto_limits = WLmin is None or WLmax is None axes_right = [] for i in range(n_series): @@ -2115,7 +1336,7 @@ def plot_GS_validation_timeseries( ax.legend() if i != n_series - 1: ax.set_xticklabels([]) - if typee == 1: + if auto_limits: WLmax = ( max( np.nanmax(WL_SS_dyn[:, i]), @@ -2141,52 +1362,410 @@ def plot_GS_validation_timeseries( plt.show() -def get_regular_grid( - node_computation_longitude: np.ndarray, - node_computation_latitude: np.ndarray, - node_computation_elements: np.ndarray, - factor: float = 10, -) -> tuple: +@lru_cache(maxsize=256) +def read_raw_with_header(raw_path: str) -> np.ndarray: + """ + Read a .raw file with a 256-byte header and return a numpy float32 array. + + Parameters + ---------- + raw_path : str + Path to the .raw file. + + Returns + ------- + np.ndarray + The data array reshaped according to the header dimensions. + """ + with open(raw_path, "rb") as f: + header_bytes = f.read(256) + dims = list(struct.unpack("4i", header_bytes[:16])) + dims = [d for d in dims if d > 0] + if len(dims) == 0: + raise ValueError(f"{raw_path}: invalid header, no dimension > 0 found") + data = np.fromfile(f, dtype=np.float32) + expected_size = np.prod(dims) + if data.size != expected_size: + raise ValueError( + f"{raw_path}: size mismatch (data={data.size}, expected={expected_size}, shape={dims})" + ) + return np.reshape(data, dims) + + +def greensurge_wind_setup_reconstruction_raw( + greensurge_dataset: str, + ds_GFD_info_update: xr.Dataset, + xds_vortex_interp: xr.Dataset, +) -> xr.Dataset: """ - Generate a regular grid based on the node computation longitude and latitude. - The grid is defined by the minimum and maximum longitude and latitude values, - and the minimum distance between nodes in both dimensions. - The grid is generated with a specified factor to adjust the resolution. - Parameters: - - node_computation_longitude: 1D array of longitudes for the nodes. - - node_computation_latitude: 1D array of latitudes for the nodes. - - node_computation_elements: 2D array of indices defining the elements (triangles). - - factor: A scaling factor to adjust the resolution of the grid. - Returns: - - lon_grid: 1D array of longitudes defining the grid. - - lat_grid: 1D array of latitudes defining the grid. + Compute the GreenSurge wind contribution and return an xarray Dataset with the results. + + Parameters + ---------- + greensurge_dataset : str + Path to the GreenSurge dataset directory. + ds_GFD_info_update : xr.Dataset + Updated GreenSurge information dataset. + xds_vortex_interp : xr.Dataset + Interpolated vortex dataset. + + Returns + ------- + xr.Dataset + Dataset containing the GreenSurge wind setup contribution. """ - lon_min, lon_max = ( - node_computation_longitude.min(), - node_computation_longitude.max(), + ds_gfd_metadata = ds_GFD_info_update + wind_direction_input = xds_vortex_interp + velocity_thresholds = np.array([0, 100, 100]) + drag_coefficients = np.array([0.00063, 0.00723, 0.00723]) + + direction_bins = ds_gfd_metadata.wind_directions.values + forcing_cell_indices = ds_gfd_metadata.element_forcing_index.values + wind_speed_reference = ds_gfd_metadata.wind_speed.values.item() + base_drag_coeff = GS_LinearWindDragCoef( + wind_speed_reference, drag_coefficients, velocity_thresholds ) - lat_min, lat_max = node_computation_latitude.min(), node_computation_latitude.max() + time_step_hours = ds_gfd_metadata.time_step_hours.values - lon_tri = node_computation_longitude[node_computation_elements] - lat_tri = node_computation_latitude[node_computation_elements] + time_start = wind_direction_input.time.values.min() + time_end = wind_direction_input.time.values.max() + duration_in_steps = ( + int((ds_gfd_metadata.simulation_duration_hours.values) / time_step_hours) + 1 + ) + output_time_vector = np.arange( + time_start, time_end, np.timedelta64(int(time_step_hours * 60), "m") + ) + num_output_times = len(output_time_vector) - dlon01 = np.abs(lon_tri[:, 0] - lon_tri[:, 1]) - dlon12 = np.abs(lon_tri[:, 1] - lon_tri[:, 2]) - dlon20 = np.abs(lon_tri[:, 2] - lon_tri[:, 0]) - min_dx = np.min(np.stack([dlon01, dlon12, dlon20], axis=1).max(axis=1)) * factor + direction_data = wind_direction_input.Dir.values + wind_speed_data = wind_direction_input.W.values - dlat01 = np.abs(lat_tri[:, 0] - lat_tri[:, 1]) - dlat12 = np.abs(lat_tri[:, 1] - lat_tri[:, 2]) - dlat20 = np.abs(lat_tri[:, 2] - lat_tri[:, 0]) - min_dy = np.min(np.stack([dlat01, dlat12, dlat20], axis=1).max(axis=1)) * factor + sample_path = ( + f"{greensurge_dataset}/GF_T_0_D_0/dflowfmoutput/GreenSurge_GFDcase_map.raw" + ) + sample_data = read_raw_with_header(sample_path) + n_faces = sample_data.shape[-1] + wind_setup_output = np.zeros((num_output_times, n_faces), dtype=np.float32) + water_level_accumulator = np.zeros(sample_data.shape, dtype=np.float32) + + for time_index in tqdm(range(num_output_times), desc="Processing time steps"): + water_level_accumulator[:] = 0 + for cell_index in forcing_cell_indices.astype(int): + current_dir = direction_data[cell_index, time_index] % 360 + adjusted_bins = np.where(direction_bins == 0, 360, direction_bins) + closest_direction_index = np.abs(adjusted_bins - current_dir).argmin() + + raw_path = f"{greensurge_dataset}/GF_T_{cell_index}_D_{closest_direction_index}/dflowfmoutput/GreenSurge_GFDcase_map.raw" + water_level_case = read_raw_with_header(raw_path) + + water_level_case = np.nan_to_num(water_level_case, nan=0) + + wind_speed_value = wind_speed_data[cell_index, time_index] + drag_coeff_value = GS_LinearWindDragCoef( + wind_speed_value, drag_coefficients, velocity_thresholds + ) + + scaling_factor = (wind_speed_value**2 / wind_speed_reference**2) * ( + drag_coeff_value / base_drag_coeff + ) + water_level_accumulator += water_level_case * scaling_factor + + step_window = min(duration_in_steps, num_output_times - time_index) + if (num_output_times - time_index) > step_window: + wind_setup_output[time_index : time_index + step_window] += ( + water_level_accumulator + ) + else: + shift_counter = step_window - (num_output_times - time_index) + wind_setup_output[ + time_index : time_index + step_window - shift_counter + ] += water_level_accumulator[: step_window - shift_counter] + + ds_wind_setup = xr.Dataset( + {"WL": (["time", "nface"], wind_setup_output)}, + coords={ + "time": output_time_vector, + "nface": np.arange(wind_setup_output.shape[1]), + }, + ) + return ds_wind_setup - lon_grid = np.arange(lon_min, lon_max + min_dx, min_dx) - lat_grid = np.arange(lat_min, lat_max + min_dy, min_dy) - return lon_grid, lat_grid + +def build_greensurge_infos_dataset_pymesh2d( + Nodes_calc, + Elmts_calc, + Nodes_forz, + Elmts_forz, + site, + wind_speed, + direction_step, + simulation_duration_hours, + simulation_time_step_hours, + forcing_time_step, + reference_date_dt, + Eddy, + Chezy, +): + """Build a structured dataset for GreenSurge hybrid modeling. + + Parameters + ---------- + Nodes_calc : array + Computational mesh vertices (lon, lat) + Elmts_calc : array + Computational mesh triangles + Nodes_forz : array + Forcing mesh vertices (lon, lat) + Elmts_forz : array + Forcing mesh triangles + site : str + Study site name + wind_speed : float + Wind speed for each direction (m/s) + direction_step : float + Wind direction discretization step (degrees) + simulation_duration_hours : float + Total simulation duration (hours) + simulation_time_step_hours : float + Simulation time step (hours) + forcing_time_step : float + Forcing data time step (hours) + reference_date_dt : datetime + Reference date for simulation + Eddy : float + Eddy viscosity (m2/s) + Chezy : float + Chezy friction coefficient + + Returns + ------- + xr.Dataset + Structured dataset for GreenSurge + """ + num_elements = Elmts_forz.shape[0] + num_directions = int(360 / direction_step) + wind_directions = np.arange(0, 360, direction_step) + reference_date_str = reference_date_dt.strftime("%Y-%m-%d %H:%M:%S") + + time_forcing_index = [ + 0, + forcing_time_step, + forcing_time_step + 0.001, + simulation_duration_hours - 1, + ] + + ds = xr.Dataset( + coords=dict( + wind_direction_index=("wind_direction_index", np.arange(num_directions)), + time_forcing_index=("time_forcing_index", time_forcing_index), + node_computation_longitude=("node_cumputation_index", Nodes_calc[:, 0]), + node_computation_latitude=("node_cumputation_index", Nodes_calc[:, 1]), + triangle_nodes=("triangle_forcing_nodes", np.arange(3)), + node_forcing_index=("node_forcing_index", np.arange(len(Nodes_forz))), + element_forcing_index=("element_forcing_index", np.arange(num_elements)), + node_cumputation_index=( + "node_cumputation_index", + np.arange(len(Nodes_calc)), + ), + element_computation_index=( + "element_computation_index", + np.arange(len(Elmts_calc)), + ), + ), + data_vars=dict( + triangle_computation_connectivity=( + ("element_computation_index", "triangle_forcing_nodes"), + Elmts_calc.astype(int), + {"description": "Computational mesh triangle connectivity"}, + ), + node_forcing_longitude=( + "node_forcing_index", + Nodes_forz[:, 0], + {"units": "degrees_east", "description": "Forcing mesh node longitude"}, + ), + node_forcing_latitude=( + "node_forcing_index", + Nodes_forz[:, 1], + {"units": "degrees_north", "description": "Forcing mesh node latitude"}, + ), + triangle_forcing_connectivity=( + ("element_forcing_index", "triangle_forcing_nodes"), + Elmts_forz.astype(int), + {"description": "Forcing mesh triangle connectivity"}, + ), + wind_directions=( + "wind_direction_index", + wind_directions, + {"units": "degrees", "description": "Discretized wind directions"}, + ), + total_elements=( + (), + num_elements, + {"description": "Number of forcing elements"}, + ), + simulation_duration_hours=( + (), + simulation_duration_hours, + {"units": "hours"}, + ), + time_step_hours=((), simulation_time_step_hours, {"units": "hours"}), + wind_speed=((), wind_speed, {"units": "m/s"}), + location_name=((), site), + eddy_viscosity=((), Eddy, {"units": "m2/s"}), + chezy_coefficient=((), Chezy), + reference_date=((), reference_date_str), + forcing_time_step=((), forcing_time_step, {"units": "hour"}), + ), + attrs={ + "title": "GreenSurge Simulation Input Dataset", + "institution": "GeoOcean", + "model": "GreenSurge", + "created": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + }, + ) + + # Add coordinate attributes + ds["time_forcing_index"].attrs = { + "standard_name": "time", + "units": f"hours since {reference_date_str} +00:00", + "calendar": "gregorian", + } + ds["node_computation_longitude"].attrs = { + "standard_name": "longitude", + "units": "degrees_east", + } + ds["node_computation_latitude"].attrs = { + "standard_name": "latitude", + "units": "degrees_north", + } + + return ds + + +def point_to_segment_distance_vectorized( + px: np.ndarray, + py: np.ndarray, + ax: float, + ay: float, + bx: float, + by: float, +) -> np.ndarray: + """ + Compute vectorized distance from points (px, py) to segment [A, B]. + + Parameters + ---------- + px, py : np.ndarray + Arrays of point coordinates. + ax, ay : float + Coordinates of segment start point A. + bx, by : float + Coordinates of segment end point B. + + Returns + ------- + np.ndarray + Array of distances from each point to the segment. + """ + ab_x = bx - ax + ab_y = by - ay + ab_len_sq = ab_x**2 + ab_y**2 + + if ab_len_sq == 0: + return np.sqrt((px - ax) ** 2 + (py - ay) ** 2) + + t = np.clip(((px - ax) * ab_x + (py - ay) * ab_y) / ab_len_sq, 0, 1) + closest_x = ax + t * ab_x + closest_y = ay + t * ab_y + + return np.sqrt((px - closest_x) ** 2 + (py - closest_y) ** 2) + + +def generate_structured_points_vectorized( + triangle_connectivity: np.ndarray, + node_lon: np.ndarray, + node_lat: np.ndarray, +) -> Tuple[np.ndarray, np.ndarray]: + """ + Generate structured points for triangles (vectorized version, eliminates Python loop). + + Parameters + ---------- + triangle_connectivity : np.ndarray + Array of shape (n_triangles, 3) with vertex indices. + node_lon, node_lat : np.ndarray + Arrays of node coordinates. + + Returns + ------- + lon_all, lat_all : np.ndarray + Arrays of shape (n_triangles, 10) with structured point coordinates. + """ + A_lon = node_lon[triangle_connectivity[:, 0]] + A_lat = node_lat[triangle_connectivity[:, 0]] + B_lon = node_lon[triangle_connectivity[:, 1]] + B_lat = node_lat[triangle_connectivity[:, 1]] + C_lon = node_lon[triangle_connectivity[:, 2]] + C_lat = node_lat[triangle_connectivity[:, 2]] + + G_lon = (A_lon + B_lon + C_lon) / 3 + G_lat = (A_lat + B_lat + C_lat) / 3 + + M_AB_lon, M_AB_lat = (A_lon + B_lon) / 2, (A_lat + B_lat) / 2 + M_BC_lon, M_BC_lat = (B_lon + C_lon) / 2, (B_lat + C_lat) / 2 + M_CA_lon, M_CA_lat = (C_lon + A_lon) / 2, (C_lat + A_lat) / 2 + M_AG_lon, M_AG_lat = (A_lon + G_lon) / 2, (A_lat + G_lat) / 2 + M_BG_lon, M_BG_lat = (B_lon + G_lon) / 2, (B_lat + G_lat) / 2 + M_CG_lon, M_CG_lat = (C_lon + G_lon) / 2, (C_lat + G_lat) / 2 + + lon_all = np.column_stack( + [ + A_lon, + B_lon, + C_lon, + G_lon, + M_AB_lon, + M_BC_lon, + M_CA_lon, + M_AG_lon, + M_BG_lon, + M_CG_lon, + ] + ) + lat_all = np.column_stack( + [ + A_lat, + B_lat, + C_lat, + G_lat, + M_AB_lat, + M_BC_lat, + M_CA_lat, + M_AG_lat, + M_BG_lat, + M_CG_lat, + ] + ) + + return lon_all, lat_all def GS_wind_partition_tri(ds_GFD_info, xds_vortex): + """ + Interpolate vortex model data to triangle elements using GreenSurge wind partitioning. + Parameters + ---------- + ds_GFD_info : xr.Dataset + Dataset containing GreenSurge grid information. + xds_vortex : xr.Dataset + Dataset containing vortex model data. + + Returns + ------- + xr.Dataset + Dataset containing interpolated vortex model data at triangle elements. + """ element_forcing_index = ds_GFD_info.element_forcing_index.values num_element = len(element_forcing_index) triangle_forcing_connectivity = ds_GFD_info.triangle_forcing_connectivity.values @@ -2254,3 +1833,124 @@ def GS_wind_partition_tri(ds_GFD_info, xds_vortex): }, ) return xds_vortex_interp + + +def create_triangle_mask( + lon_grid: np.ndarray, lat_grid: np.ndarray, triangle: np.ndarray +) -> np.ndarray: + """ + Create a mask for a triangle defined by its vertices. + + Parameters + ---------- + lon_grid : np.ndarray + The longitude grid. + lat_grid : np.ndarray + The latitude grid. + triangle : np.ndarray + The triangle vertices. + + Returns + ------- + np.ndarray + The mask for the triangle. + """ + + triangle_path = Path(triangle) + lon_grid, lat_grid = np.meshgrid(lon_grid, lat_grid) + points = np.vstack([lon_grid.flatten(), lat_grid.flatten()]).T + inside_mask = triangle_path.contains_points(points) + mask = inside_mask.reshape(lon_grid.shape) + + return mask + + +def GS_LinearWindDragCoef( + Wspeed: np.ndarray, CD_Wl_abc: np.ndarray, Wl_abc: np.ndarray +) -> np.ndarray: + """ + Calculate the linear drag coefficient based on wind speed and specified thresholds. + + Parameters + ---------- + Wspeed : np.ndarray + Wind speed values (1D array). + CD_Wl_abc : np.ndarray + Coefficients for the drag coefficient calculation, should be a 1D array of length 3. + Wl_abc : np.ndarray + Wind speed thresholds for the drag coefficient calculation, should be a 1D array of length 3. + + Returns + ------- + np.ndarray + Calculated drag coefficient values based on the input wind speed. + """ + + Wla = Wl_abc[0] + Wlb = Wl_abc[1] + Wlc = Wl_abc[2] + CDa = CD_Wl_abc[0] + CDb = CD_Wl_abc[1] + CDc = CD_Wl_abc[2] + + # coefs lines y=ax+b + if not Wla == Wlb: + a_CDline_ab = (CDa - CDb) / (Wla - Wlb) + b_CDline_ab = CDb - a_CDline_ab * Wlb + else: + a_CDline_ab = 0 + b_CDline_ab = CDa + if not Wlb == Wlc: + a_CDline_bc = (CDb - CDc) / (Wlb - Wlc) + b_CDline_bc = CDc - a_CDline_bc * Wlc + else: + a_CDline_bc = 0 + b_CDline_bc = CDb + a_CDline_cinf = 0 + b_CDline_cinf = CDc + + if Wspeed <= Wlb: + CD = a_CDline_ab * Wspeed + b_CDline_ab + elif Wspeed > Wlb and Wspeed <= Wlc: + CD = a_CDline_bc * Wspeed + b_CDline_bc + else: + CD = a_CDline_cinf * Wspeed + b_CDline_cinf + + return CD + + +def actualize_grid_info( + path_ds_origin: str, + ds_GFD_calc_info: xr.Dataset, +) -> None: + """ + Actualizes the grid information in the GFD calculation info dataset + by adding the node coordinates and triangle connectivity from the original dataset. + Parameters + ---------- + path_ds_origin : str + Path to the original dataset containing the mesh2d node coordinates. + ds_GFD_calc_info : xr.Dataset + The dataset containing the GFD calculation information to be updated. + Returns + ------- + ds_GFD_calc_info : xr.Dataset + The updated dataset with the node coordinates and triangle connectivity. + """ + + ds_ori = xr.open_dataset(path_ds_origin) + + ds_GFD_calc_info["node_computation_longitude"] = ( + ("node_cumputation_index",), + ds_ori.mesh2d_node_x.values, + ) + ds_GFD_calc_info["node_computation_latitude"] = ( + ("node_cumputation_index",), + ds_ori.mesh2d_node_y.values, + ) + ds_GFD_calc_info["triangle_computation_connectivity"] = ( + ("element_computation_index", "triangle_forcing_nodes"), + (ds_ori.mesh2d_face_nodes.values - 1).astype("int32"), + ) + + return ds_GFD_calc_info diff --git a/bluemath_tk/topo_bathy/OCSMod.py b/bluemath_tk/topo_bathy/OCSMod.py deleted file mode 100644 index 6722afb..0000000 --- a/bluemath_tk/topo_bathy/OCSMod.py +++ /dev/null @@ -1,62 +0,0 @@ -from ocsmesh.mesh import EuclideanMesh2D - - -class EuclideanMesh2D(EuclideanMesh2D): - """ - A subclass of EuclideanMesh2D that adds some @property / @setter methods - """ - - @property - def value(self): - """Reference to the value property of the internal mesh object""" - return self.msh_t.value - - @value.setter - def value(self, value): - self.msh_t.value = value - - @property - def ndims(self): - """Reference to the number of dimensions of the internal mesh object""" - return self.msh_t.ndims - - @property - def tria3(self): - """Reference to TRI3 (triangular 3-node elements) of the internal mesh object""" - return self.msh_t.tria3 - - @tria3.setter - def tria3(self, value): - self.msh_t.tria3 = value - - @property - def quad4(self): - """Reference to QUAD4 (quadrilateral 4-node elements) of the internal mesh object""" - return self.msh_t.quad4 - - @quad4.setter - def quad4(self, value): - self.msh_t.quad4 = value - - @property - def mshID(self): - """Reference to the mesh ID of the internal mesh object""" - return self.msh_t.mshID - - @property - def hexa8(self): - """Reference to HEXA8 (hexahedral 8-node elements) of the internal mesh object""" - return self.msh_t.hexa8 - - @hexa8.setter - def hexa8(self, value): - self.msh_t.hexa8 = value - - @property - def vert2(self): - """Reference to underlying mesh 2D vertices structure""" - return self.msh_t.vert2 - - @vert2.setter - def vert2(self, value): - self.msh_t.vert2 = value diff --git a/bluemath_tk/topo_bathy/mesh_utils.py b/bluemath_tk/topo_bathy/mesh_utils.py deleted file mode 100644 index 33a7293..0000000 --- a/bluemath_tk/topo_bathy/mesh_utils.py +++ /dev/null @@ -1,776 +0,0 @@ -import re -from collections import defaultdict -from copy import deepcopy -from typing import Dict, List, Tuple - -import cartopy.crs as ccrs -import geopandas as gpd -import matplotlib.pyplot as plt -import numpy as np - -# import ocsmesh -import pandas as pd -import rasterio - -# from jigsawpy.msh_t import jigsaw_msh_t -from matplotlib.axes import Axes -from pyproj.enums import TransformDirection -from rasterio.mask import mask -from shapely.geometry import LineString, MultiLineString, Polygon, mapping -from shapely.ops import transform - -from ..core.geo import buffer_area_for_polygon -from ..core.plotting.colors import hex_colors_land, hex_colors_water -from ..core.plotting.utils import join_colormaps - - -def plot_mesh_edge( - msh_t: jigsaw_msh_t, ax: Axes = None, to_geo: callable = None, **kwargs -) -> None: - """ - Plots the edges of a triangular mesh on a given set of axes. - - Parameters - ---------- - msh_t : jigsaw_msh_t - An object containing mesh data. It must have: - - 'vert2['coord']' containing the coordinates of the mesh vertices - - 'tria3['index']' containing the indices of the triangles - ax : Axes, optional - The axes to plot on. If None, a new plot is created. Default is None. - to_geo : callable, optional - A function to transform (x, y) coordinates from projected to geographic CRS. - **kwargs : keyword arguments, optional - Additional keyword arguments passed to the `triplot` function. - These can be used to customize the plot (e.g., color, line style). - """ - - crd = np.array(msh_t.vert2["coord"], copy=True) - cnn = msh_t.tria3["index"] - - if to_geo is not None: - crd[:, 0], crd[:, 1] = to_geo(crd[:, 0], crd[:, 1]) - bnd = [crd[:, 0].min(), crd[:, 0].max(), crd[:, 1].min(), crd[:, 1].max()] - - ax.triplot(crd[:, 0], crd[:, 1], cnn, **kwargs) - ax.set_extent([*bnd], crs=ccrs.PlateCarree()) - gl = ax.gridlines(draw_labels=True) - gl.top_labels = False - gl.right_labels = False - - -def plot_mesh_vals( - msh_t: jigsaw_msh_t, - ax: Axes = None, - colorbar: bool = True, - clim: Tuple[float, float] = None, - to_geo: callable = None, - **kwargs, -) -> Axes: - """ - Plots the mesh values on a triangular mesh, with optional transformation - from UTM to geographic coordinates. - - Parameters - ---------- - msh_t : jigsaw_msh_t - An object containing the mesh data. Must include: - - vert2['coord']: coordinates of mesh vertices (N, 2) - - tria3['index']: triangle connectivity (M, 3) - - value: values to plot (length M or Mx1) - ax : matplotlib Axes, optional - Axes to draw on. If None, a new one is created. - colorbar : bool, optional - If True, show colorbar. Default is True. - clim : tuple, optional - Color limits (vmin, vmax). If None, autoscale. - to_geo : callable, optional - Function to transform (x, y) in projected coordinates to (lon, lat), - **kwargs : additional keyword args for tricontourf - - Returns - ------- - ax : matplotlib Axes - The axes with the plot. - """ - - crd = np.array(msh_t.vert2["coord"], copy=True) - cnn = msh_t.tria3["index"] - val = msh_t.value.flatten() - - if to_geo is not None: - crd[:, 0], crd[:, 1] = to_geo(crd[:, 0], crd[:, 1]) - - if ax is None: - _, ax = plt.subplots() - - mappable = ax.tricontourf(crd[:, 0], crd[:, 1], cnn, val, **kwargs) - - if colorbar: - if clim is not None: - mappable.set_clim(*clim) - cbar = plt.colorbar(mappable, ax=ax) - cbar.set_label("Mesh spacing conditioning (m)") - - gl = ax.gridlines(draw_labels=True) - gl.top_labels = False - gl.right_labels = False - return ax - - -def plot_bathymetry(rasters_path: List[str], polygon: Polygon, ax: Axes) -> Axes: - """ - Plots bathymetric raster data and overlays a polygon on top of it. - - Parameters - ---------- - rasters_path : List[str] - A list of file paths to the raster files. - polygon : Polygon - A polygon to overlay on the raster data. - ax : Axes - The axes on which to plot the data. - - Returns - ------- - ax : Axes - The axes object with the plotted raster data and polygon. - """ - - data = [] - for path in rasters_path: - with rasterio.open(path) as src: - raster_data = src.read(1) - no_data_value = src.nodata - if no_data_value is not None: - raster_data = np.ma.masked_equal(raster_data, no_data_value) - data.append(raster_data) - transform = src.transform - - x_polygon, y_polygon = polygon.exterior.xy - - height, width = data[0].shape - cols, rows = np.meshgrid(np.arange(width), np.arange(height)) - xs, ys = rasterio.transform.xy(transform, rows, cols) - - vmin = np.nanmin(data[0]) - vmax = np.nanmax(data[0]) - - cmap, norm = join_colormaps( - cmap1=hex_colors_water, - cmap2=hex_colors_land, - value_range1=(vmin, 0.0), - value_range2=(0.0, vmax), - name="raster_cmap", - ) - - im = ax.imshow( - data[0], - cmap=cmap, - norm=norm, - extent=(np.min(xs), np.max(xs), np.min(ys), np.max(ys)), - ) - cbar = plt.colorbar(im, ax=ax) - cbar.set_label("Depth (m)") - ax.set_title("Raster") - - ax.plot(x_polygon, y_polygon, color="red", linewidth=1) - gl = ax.gridlines(draw_labels=True) - gl.top_labels = False - gl.right_labels = False - - return ax - - -def clip_bathymetry( - input_raster_paths: List[str], - output_path: str, - domain: Polygon, - margin: float, -) -> float: - """ - Clips bathymetric raster data using a specified domain polygon, - applies a margin buffer, and saves the clipped raster. - - Parameters - ---------- - input_raster_paths : List[str] - List of file paths to the input raster files. - output_path : str - Destination file path to save the clipped raster. - domain : Polygon - Polygon geometry used to clip the raster. - margin : float - Buffer factor applied to the domain before clipping. - - Returns - ------- - float - The mean resolution of the raster. - """ - - buffered_polygon = buffer_area_for_polygon(domain, margin) - - for path in input_raster_paths: - with rasterio.open(path) as src: - domain_gdf = gpd.GeoDataFrame( - index=[0], geometry=[buffered_polygon], crs="EPSG:4326" - ).to_crs(src.crs) - - out_image, out_transform = mask( - src, [mapping(domain_gdf.geometry[0])], crop=True - ) - - out_meta = src.meta.copy() - out_meta.update( - { - "driver": "GTiff", - "height": out_image.shape[1], - "width": out_image.shape[2], - "transform": out_transform, - } - ) - mean_raster_resolution = get_raster_resolution(path) - with rasterio.open(output_path, "w", **out_meta) as dest: - dest.write(out_image) - return mean_raster_resolution - - -def get_raster_resolution(raster_path: str) -> float: - """ - Get the mean resolution of a raster in meters. - - Parameters - ---------- - raster_path : str - Path to the raster file. - Returns - ------- - float - Mean resolution of the raster in meters. - ---------- - Notes - This function uses rasterio to open the raster file and extract its resolution. - The mean resolution is calculated as the average of the absolute values of the x and y resolutions. - """ - - with rasterio.open(raster_path) as src: - res_x, res_y = src.res - mean_resolution = (abs(res_x) + abs(res_y)) / 2 - return mean_resolution - - -def clip_bati_manning( - rasters_path: List[str], - output_path: str, - domain: Polygon, - mas: float, - UTM: bool, - manning: float, -) -> None: - """ - Clips bathymetric raster data using a specified domain polygon and applies - Manning's coefficient. - - Parameters - ---------- - rasters_path : List[str] - A list of file paths to the raster files to be clipped. - output_path : str - The file path to save the clipped raster data. - domain : Polygon - The domain polygon used to clip the rasters. - mas : float - A buffer factor applied to the domain polygon based on its area and length. - UTM : bool - If True, assumes the coordinate reference system is EPSG:4326; - otherwise, assumes EPSG:32630 (UTM projection). - manning : float - The Manning's coefficient to apply to the raster data. - """ - - original_polygon = buffer_area_for_polygon(domain, mas) - - if UTM: - crrs = "EPSG:4326" - else: - crrs = "EPSG:32630" - for path in rasters_path: - with rasterio.open(path) as src: - gdf_polygon = gpd.GeoDataFrame( - index=[0], geometry=[original_polygon], crs=crrs - ) - gdf_polygon = gdf_polygon.to_crs(src.crs) - - out_image, out_transform = mask( - src, [mapping(gdf_polygon.geometry[0])], crop=True - ) - - out_meta = src.meta.copy() - out_meta.update( - { - "driver": "GTiff", - "height": out_image.shape[1], - "width": out_image.shape[2], - "transform": out_transform, - } - ) - - out_image = np.ones(out_image.shape) * (-manning) - with rasterio.open(output_path, "w", **out_meta) as dest: - dest.write(out_image) - - -def plot_boundaries(mesh: jigsaw_msh_t, ax: Axes, to_geo: callable = None) -> None: - """ - Plots the boundaries of a mesh, including ocean, interior (islands), and land areas. - - Parameters - ---------- - mesh : jigsaw_msh_t - The mesh object containing the mesh data and boundaries. - ax : Axes - The axes on which to plot the boundaries. - to_geo : callable, optional - A function to transform coordinates from projected to geographic CRS. - """ - - plot_mesh_edge(mesh.msh_t, to_geo=to_geo, ax=ax, color="gray", lw=0.5) - - def plot_boundary(gdf, color, label): - try: - if to_geo: - gdf = gdf.copy() - gdf["geometry"] = gdf["geometry"].apply( - lambda geom: transform(to_geo, geom) - ) - gdf.plot(ax=ax, color=color, label=label) - except Exception as _e: - print(f"No {label} boundaries available") - - plot_boundary(mesh.boundaries.ocean(), color="b", label="Ocean") - plot_boundary(mesh.boundaries.interior(), color="g", label="Islands") - plot_boundary(mesh.boundaries.land(), color="r", label="Land") - - ax.set_title("Mesh Boundaries") - ax.legend() - - -def plot_bathymetry_interp(mesh: jigsaw_msh_t, to_geo, ax: Axes) -> None: - """ - Plots the interpolated bathymetry data on a mesh. - - Parameters - ---------- - mesh : jigsaw_msh_t - The mesh object containing the bathymetry values and mesh structure. - ax : Axes - The axes on which to plot the interpolated bathymetry. - to_geo : callable - A function to transform coordinates from projected to geographic CRS. - """ - - crd = np.array(mesh.msh_t.vert2["coord"], copy=True) - - if to_geo is not None: - crd[:, 0], crd[:, 1] = to_geo(crd[:, 0], crd[:, 1]) - bnd = [crd[:, 0].min(), crd[:, 0].max(), crd[:, 1].min(), crd[:, 1].max()] - - triangle = mesh.msh_t.tria3["index"] - Z = np.mean(mesh.msh_t.value.flatten()[triangle], axis=1) - vmin = np.nanmin(Z) - vmax = np.nanmax(Z) - - cmap, norm = join_colormaps( - cmap1=hex_colors_water, - cmap2=hex_colors_land, - value_range1=(vmin, 0.0), - value_range2=(0.0, vmax), - name="raster_cmap", - ) - - im = ax.tripcolor( - crd[:, 0], - crd[:, 1], - triangle, - facecolors=Z, - cmap=cmap, - norm=norm, - ) - ax.set_title("Interpolated Bathymetry") - gl = ax.gridlines(draw_labels=True) - gl.top_labels = False - gl.right_labels = False - cbar = plt.colorbar(im, ax=ax) - cbar.set_label("Depth (m)") - ax.set_extent([*bnd], crs=ccrs.PlateCarree()) - - -def simply_polygon(base_shape: Polygon, simpl_UTM: float, project) -> Polygon: - """ - Simplifies a polygon by transforming it to a projected coordinate system (e.g., UTM), - applying geometric simplification, and transforming it back to geographic coordinates. - - Parameters - ---------- - base_shape : Polygon - The input polygon in geographic coordinates (e.g., WGS84). - simpl_UTM : float - Tolerance for simplification (in projected units, typically meters). Higher values result in simpler shapes. - project : pyproj.Transformer.transform - A projection function that converts coordinates from geographic to projected (e.g., WGS84 → UTM). - - Returns - ------- - Polygon - The simplified polygon in geographic coordinates. - """ - - base_shape_utm = transform(project, base_shape) - - simple_shape_utm = base_shape_utm.simplify(simpl_UTM) - - simple_shape = transform( - lambda x, y: project(x, y, direction=TransformDirection.INVERSE), - simple_shape_utm, - ) - - return simple_shape - - -def remove_islands(base_shape: Polygon, threshold_area: float, project) -> Polygon: - """ - Transforms a polygon to a projected coordinate system (e.g., UTM), filters out small interior rings - (holes) based on a minimum area threshold, and transforms the simplified polygon back to geographic coordinates. - - Parameters - ---------- - base_shape : Polygon - The input polygon in geographic coordinates (e.g., WGS84). - threshold_area : float - Minimum area (in projected units, e.g., square meters) for interior rings (holes) to be preserved. - Interior rings smaller than this threshold will be removed. - project : callable - A projection function, typically `pyproj.Transformer.transform`, that converts coordinates - from geographic to projected CRS. - - Returns - ------- - Polygon - The polygon with small interior rings removed, transformed back to geographic coordinates. - """ - - base_shape_utm = transform(project, base_shape) - interior_shape_utm = [ - interior - for interior in base_shape_utm.interiors - if Polygon(interior).area >= threshold_area - ] - simple_shape_utm = Polygon(base_shape_utm.exterior, interior_shape_utm) - simple_shape = transform( - lambda x, y: project(x, y, direction=TransformDirection.INVERSE), - simple_shape_utm, - ) - - return simple_shape - - -def decode_open_boundary_data(data: List[str]) -> dict: - """ - Decodes open boundary data from a given list of strings and - returns a dictionary containing boundary information. - - Parameters - ---------- - data : List[str] - List of strings containing boundary data. - - Returns - ------- - dict - A dictionary with keys corresponding to open boundary identifiers (e.g., 'open_boundary_1') - and values as lists of integers representing boundary node indices. - - Examples - -------- - >>> data = [ - ... "100! 200! 300!", - ... "open_boundary_1", - ... "open_boundary_2", - ... "open_boundary_3", - ... "land boundaries", - ... "open_boundary_1! 10", - ... "open_boundary_2! 20", - ... "open_boundary_3! 30", - ... ] - >>> boundaries = decode_open_boundary_data(data) - >>> print(boundaries) - {'open_boundary_1': [10], 'open_boundary_2': [20], 'open_boundary_3': [30]} - """ - - N_obd = int(data[0].split("!")[0]) - boundary_info = {} - key = data[2][-16:-1] - boundary_info[key] = [] - - for line in data[3:-1]: - line = line.strip() - if "!" not in line: - N = int(line) - boundary_info[key].append(N) - else: - if "land boundaries" in line: - if len(boundary_info) != N_obd: - print("reading error") - return boundary_info - match = re.search(r"open_boundary_\d+", line) - key = match.group(0) - boundary_info[key] = [] - - return boundary_info - - -def compute_circumcenter(p0: np.ndarray, p1: np.ndarray, p2: np.ndarray) -> np.ndarray: - """ - Technical Reference Manual D-Flow Flexible Mesh 13 May 2025 Revision: 80268 - Compute the circumcenter of a triangle from its 3 vertices. - Ref: Figure 3.12, Equation (3.6), D-Flow Flexible Mesh Technical Reference Manual. - - Parameters - ---------- - p0, p1, p2 : np.ndarray - 2D coordinates of the triangle vertices. - - Returns - ------- - np.ndarray - 2D coordinates of the circumcenter. - """ - - A = p1 - p0 - B = p2 - p0 - AB_perp = np.array([A[1], -A[0]]) - AC_perp = np.array([B[1], -B[0]]) - mid_AB = (p0 + p1) / 2 - mid_AC = (p0 + p2) / 2 - M = np.array([AB_perp, -AC_perp]).T - b = mid_AC - mid_AB - try: - t = np.linalg.solve(M, b) - center = mid_AB + t[0] * AB_perp - except np.linalg.LinAlgError: - center = (p0 + p1 + p2) / 3 - return center - - -def build_edge_to_cells(elements: np.ndarray) -> Dict[Tuple[int, int], List[int]]: - """ - Technical Reference Manual D-Flow Flexible Mesh 13 May 2025 Revision: 80268 - Build edge -> list of adjacent element indices (max 2). - Ref: Connectivity structure implied in Section 3.5.1 and Fig 3.2a. - - Parameters - ---------- - elements : np.ndarray - Array of triangle elements (indices of vertices). - - Returns - ------- - edge_to_cells : Dict[Tuple[int, int], List[int]] - Dictionary mapping edges to the list of adjacent element indices. - """ - - edge_to_cells = defaultdict(list) - for idx, elem in enumerate(elements): - for i in range(3): - a = elem[i] - b = elem[(i + 1) % 3] - edge = tuple(sorted((a, b))) - edge_to_cells[edge].append(idx) - - return edge_to_cells - - -def detect_circumcenter_too_close( - X: np.ndarray, Y: np.ndarray, elements: np.ndarray, aj_threshold: float = 0.1 -) -> np.ndarray: - """ - Technical Reference Manual D-Flow Flexible Mesh 13 May 2025 Revision: 80268 - Detect elements where the distance between adjacent circumcenters is small compared - to the shared edge length: aj = ||xRj - xLj|| / ||x1j - x0|| - Ref: Equation (3.6) in Section 3.5.1 (Grid Orthogonalization), D-Flow Flexible Mesh Manual. - - Parameters - ---------- - X, Y : np.ndarray - 1D arrays of x and y coordinates of the nodes. - elements : np.ndarray - 2D array of shape (nelmts, 3) containing the node indices for each triangle element. - aj_threshold : float, optional - Threshold for the ratio of circumcenter distance to edge length. Default is 0.1. - - Returns - ------- - bad_elements_mask : np.ndarray - Boolean mask indicating which elements are problematic (True if bad). - """ - - nodes = np.column_stack((X, Y)) - centers = np.array( - [ - compute_circumcenter(nodes[i0], nodes[i1], nodes[i2]) - for i0, i1, i2 in elements - ] - ) # Ref: Fig 3.12 - - edge_to_cells = build_edge_to_cells(elements) - bad_elements_mask = np.zeros(len(elements), dtype=bool) - - for edge, cells in edge_to_cells.items(): - if len(cells) != 2: - continue # Internal edges only (Ref: Ji in Eq. 3.7) - - idx0, idx1 = cells - c0 = centers[idx0] # x_Lj - c1 = centers[idx1] # x_Rj - node0 = nodes[edge[0]] # x0 - node1 = nodes[edge[1]] # x1j - - edge_length = np.linalg.norm(node1 - node0) # Denominator of aj (||x1j - x0||) - center_dist = np.linalg.norm(c1 - c0) # Numerator of aj (||xRj - xLj||) - - aj = center_dist / edge_length if edge_length > 0 else 0 # Equation (3.6) - - if aj < aj_threshold: - # If the ratio is too low, mark both elements as problematic - bad_elements_mask[idx0] = True - bad_elements_mask[idx1] = True - - return bad_elements_mask - - -def define_mesh_target_size( - rasters: List[rasterio.io.DatasetReader], - raster_resolution_meters: float, - depth_ranges: dict, - nprocs: int = 1, -) -> ocsmesh.Hfun: - """ - Define the mesh target size based on depth ranges and their corresponding values. - - Parameters - ---------- - rasters : List[rasterio.io.DatasetReader] - List of raster objects. - raster_resolution_meters : float - Resolution of the raster in meters. - depth_ranges : dict, optional - Dictionary containing depth ranges and their corresponding mesh sizes and rates. - Format: {(lower_bound, upper_bound): {'value': mesh_size, 'rate': expansion_rate}} - nprocs : int, optional - Number of processors to use for the mesh generation. Default is 1. - - Returns - ------- - mesh_spacing : ocsmesh.Hfun - Hfun object with the defined mesh target size. - """ - - if depth_ranges is None: - # Default depth-to-mesh size mapping - depth_ranges = { - (-200_000, -250): {"value": 26_000, "rate": 0.0001}, # Very deep ocean - (-250, -100): {"value": 13_000, "rate": 0.0001}, # Continental slope - (-100, -75): {"value": 6_500, "rate": 0.0001}, # Outer shelf - (-75, -25): {"value": 3_250, "rate": 0.0001}, # Mid shelf - (-25, 2.5): {"value": 1_700, "rate": 0.0001}, # Coastal zone - } - - all_values = [zone["value"] for zone in depth_ranges.values()] - - points_by_cell = 1 # Number of depth points per minimum size cell for the final cell size definition - - rasters_copy = deepcopy(rasters) - for raster in rasters_copy: - raster.resample( - scaling_factor=raster_resolution_meters * points_by_cell / min(all_values) - ) - - mesh_spacing = ocsmesh.Hfun( - rasters_copy, - hmin=min(all_values), - hmax=max(all_values), - nprocs=nprocs, - ) - - for (lower, upper), params in depth_ranges.items(): - mesh_spacing.add_topo_bound_constraint( - value=params["value"], - lower_bound=lower, - upper_bound=upper, - value_type="max", - rate=params["rate"], - ) - - return mesh_spacing - - -def read_lines(poly_line: str) -> MultiLineString: - """ - Reads a CSV file containing coordinates of a polyline and returns a MultiLineString. - The CSV file should have two columns for x and y coordinates, with NaN values indicating breaks in the line. - Parameters - ---------- - poly_line : str - Path to the CSV file containing the polyline coordinates - Returns - ------- - MultiLineString - A MultiLineString object representing the polyline segments - """ - - coords_line = pd.read_csv(poly_line, sep=",", header=None) - segments = [] - current_segment = [] - for index, row in coords_line.iterrows(): - if row.isna().any(): - if current_segment: - segments.append(LineString(current_segment)) - current_segment = [] - else: - current_segment.append(tuple(row)) - - if current_segment: - segments.append(LineString(current_segment)) - return MultiLineString(segments) - - -def get_raster_resolution_meters(lon_center, lat_center, raster_resolution, project): - """ - Calculate the raster resolution in meters based on the center coordinates and the raster resolution in degrees. - - Parameters - ---------- - lon_center : float - Longitude of the center point. - lat_center : float - Latitude of the center point. - raster_resolution : float - Raster resolution in degrees. - Returns - ------- - float - Raster resolution in meters. - """ - x_center, y_center = project(lon_center, lat_center) - x_center_raster_resolution, y_center_raster_resolution = project( - lon_center + raster_resolution / np.sqrt(2), - lat_center + raster_resolution / np.sqrt(2), - ) - raster_resolution_meters = np.mean( - [ - abs(x_center - x_center_raster_resolution), - abs(y_center - y_center_raster_resolution), - ] - ) - return raster_resolution_meters diff --git a/bluemath_tk/wrappers/delft3d/delft3d_wrapper.py b/bluemath_tk/wrappers/delft3d/delft3d_wrapper.py index 1c004e6..2f2e611 100644 --- a/bluemath_tk/wrappers/delft3d/delft3d_wrapper.py +++ b/bluemath_tk/wrappers/delft3d/delft3d_wrapper.py @@ -1,579 +1,620 @@ -import os -import os.path as op -from copy import deepcopy -from typing import Union - -import numpy as np -import pandas as pd -import xarray as xr - -from ...additive.greensurge import ( - create_triangle_mask, - create_triangle_mask_from_points, - get_regular_grid, -) -from ...core.operations import nautical_to_mathematical -from .._base_wrappers import BaseModelWrapper - -sbatch_file_example = """#!/bin/bash -#SBATCH --ntasks=1 # Number of tasks (MPI processes) -#SBATCH --partition=geocean # Standard output and error log -#SBATCH --nodes=1 # Number of nodes to use -#SBATCH --mem=4gb # Memory per node in GB (see also --mem-per-cpu) -#SBATCH --time=24:00:00 - -source /home/grupos/geocean/faugeree/miniforge3/etc/profile.d/conda.sh -conda activate GreenSurge - -case_dir=$(ls | awk "NR == $SLURM_ARRAY_TASK_ID") -launchDelft3d.sh --case-dir $case_dir - -output_file="${case_dir}/dflowfmoutput/GreenSurge_GFDcase_map.nc" -output_file_compressed="${case_dir}/dflowfmoutput/GreenSurge_GFDcase_map_compressed.nc" -output_file_compressed_tmp="${case_dir}/dflowfmoutput/GreenSurge_GFDcase_map_compressed_tmp.nc" - -ncap2 -s 'mesh2d_s1=float(mesh2d_s1)' -v -O "$output_file" "$output_file_compressed_tmp" && { - ncks -4 -L 4 "$output_file_compressed_tmp" "$output_file_compressed" - rm "$output_file_compressed_tmp" - [[ "$SLURM_ARRAY_TASK_ID" -ne 1 ]] && rm "$output_file" -} -""" - - -class Delft3dModelWrapper(BaseModelWrapper): - """ - Wrapper for the Delft3d model. - - Attributes - ---------- - default_parameters : dict - The default parameters type for the wrapper. - available_launchers : dict - The available launchers for the wrapper. - """ - - default_parameters = {} - - available_launchers = { - "geoocean-cluster": "launchDelft3d.sh", - "docker_serial": "docker run --rm -v .:/case_dir -w /case_dir geoocean/rocky8 dimr dimr_config.xml", - } - - def __init__( - self, - templates_dir: str, - metamodel_parameters: dict, - fixed_parameters: dict, - output_dir: str, - templates_name: dict = "all", - debug: bool = True, - ) -> None: - """ - Initialize the Delft3d model wrapper. - """ - - super().__init__( - templates_dir=templates_dir, - metamodel_parameters=metamodel_parameters, - fixed_parameters=fixed_parameters, - output_dir=output_dir, - templates_name=templates_name, - default_parameters=self.default_parameters, - ) - self.set_logger_name( - name=self.__class__.__name__, level="DEBUG" if debug else "INFO" - ) - - self.sbatch_file_example = sbatch_file_example - - forcing_type = self.fixed_parameters.get("forcing_type", None) - if forcing_type == "ASCII": - self.fixed_parameters.setdefault( - "ExtForceFile", "GreenSurge_GFDcase_wind_ASCII.ext" - ) - elif forcing_type == "netCDF": - self.fixed_parameters.setdefault( - "ExtForceFile", "GreenSurge_GFDcase_wind_netCDF.ext" - ) - - def run_case( - self, - case_dir: str, - launcher: str, - output_log_file: str = "wrapper_out.log", - error_log_file: str = "wrapper_error.log", - postprocess: bool = False, - ) -> None: - """ - Run the case based on the launcher specified. - - Parameters - ---------- - case_dir : str - The case directory. - launcher : str - The launcher to run the case. - output_log_file : str, optional - The name of the output log file. Default is "wrapper_out.log". - error_log_file : str, optional - The name of the error log file. Default is "wrapper_error.log". - """ - - # Get launcher command from the available launchers - launcher = self.list_available_launchers().get(launcher, launcher) - - # Run the case in the case directory - self.logger.info(f"Running case in {case_dir} with launcher={launcher}.") - output_log_file = op.join(case_dir, output_log_file) - error_log_file = op.join(case_dir, error_log_file) - self._exec_bash_commands( - str_cmd=launcher, - out_file=output_log_file, - err_file=error_log_file, - cwd=case_dir, - ) - if postprocess: - self.postprocess_case(case_dir=case_dir) - - def monitor_cases( - self, dia_file_name: str, value_counts: str = None - ) -> Union[pd.DataFrame, dict]: - """ - Monitor the cases based on the status of the .dia files. - - Parameters - ---------- - dia_file_name : str - The name of the .dia file to monitor. - """ - - cases_status = {} - - for case_dir in self.cases_dirs: - case_dir_name = op.basename(case_dir) - case_dia_file = op.join(case_dir, dia_file_name) - if op.exists(case_dia_file): - with open(case_dia_file, "r") as f: - lines = f.readlines() - if any("finished" in line for line in lines[-15:]): - cases_status[case_dir_name] = "FINISHED" - else: - cases_status[case_dir_name] = "RUNNING" - else: - cases_status[case_dir_name] = "NOT STARTED" - - return super().monitor_cases( - cases_status=cases_status, value_counts=value_counts - ) - - -def format_matrix(mat): - return "\n".join( - " ".join(f"{x:.1f}" if abs(x) > 0.01 else "0" for x in line) for line in mat - ) - - -def format_zeros(mat_shape): - return "\n".join("0 " * mat_shape[1] for _ in range(mat_shape[0])) - - -def actualize_grid_info( - path_ds_origin: str, - ds_GFD_calc_info: xr.Dataset, -) -> None: - """ - Actualizes the grid information in the GFD calculation info dataset - by adding the node coordinates and triangle connectivity from the original dataset. - Parameters - ---------- - path_ds_origin : str - Path to the original dataset containing the mesh2d node coordinates. - ds_GFD_calc_info : xr.Dataset - The dataset containing the GFD calculation information to be updated. - Returns - ------- - ds_GFD_calc_info : xr.Dataset - The updated dataset with the node coordinates and triangle connectivity. - """ - - ds_ori = xr.open_dataset(path_ds_origin) - - ds_GFD_calc_info["node_computation_longitude"] = ( - ("node_cumputation_index",), - ds_ori.mesh2d_node_x.values, - ) - ds_GFD_calc_info["node_computation_latitude"] = ( - ("node_cumputation_index",), - ds_ori.mesh2d_node_y.values, - ) - ds_GFD_calc_info["triangle_computation_connectivity"] = ( - ("element_computation_index", "triangle_forcing_nodes"), - (ds_ori.mesh2d_face_nodes.values - 1).astype("int32"), - ) - - return ds_GFD_calc_info - - -class GreenSurgeModelWrapper(Delft3dModelWrapper): - """ - Wrapper for the Delft3d model for Greensurge. - """ - - def generate_grid_forcing_file_D3DFM( - self, - case_context: dict, - case_dir: str, - ds_GFD_info: xr.Dataset, - ): - """ - Generate the wind files for a case. - - Parameters - ---------- - case_context : dict - The case context. - case_dir : str - The case directory. - ds_GFD_info : xr.Dataset - The dataset with the GFD information. - """ - dir_steps = case_context.get("dir_steps") - real_dirs = np.linspace(0, 360, dir_steps + 1)[:-1] - i_tes = case_context.get("tesela") - i_dir = case_context.get("direction") - real_dir = real_dirs[i_dir] - dt_forz = case_context.get("dt_forz") - wind_magnitude = case_context.get("wind_magnitude") - simul_time = case_context.get("simul_time") - - node_triangle = ds_GFD_info.triangle_forcing_connectivity.isel( - element_forcing_index=i_tes - ) - lon_teselas = ds_GFD_info.node_forcing_longitude.isel( - node_forcing_index=node_triangle - ).values - lat_teselas = ds_GFD_info.node_forcing_latitude.isel( - node_forcing_index=node_triangle - ).values - - lon_grid = ds_GFD_info.lon_grid.values - lat_grid = ds_GFD_info.lat_grid.values - - x_llcenter = lon_grid[0] - y_llcenter = lat_grid[0] - - n_cols = len(lon_grid) - n_rows = len(lat_grid) - - dx = (lon_grid[-1] - lon_grid[0]) / n_cols - dy = (lat_grid[-1] - lat_grid[0]) / n_rows - X0, X1, X2 = lon_teselas - Y0, Y1, Y2 = lat_teselas - - triangle = [(X0, Y0), (X1, Y1), (X2, Y2)] - mask = create_triangle_mask(lon_grid, lat_grid, triangle).astype(int) - mask_int = np.flip(mask, axis=0) # Ojo - - u = -np.cos(nautical_to_mathematical(real_dir) * np.pi / 180) * wind_magnitude - v = -np.sin(nautical_to_mathematical(real_dir) * np.pi / 180) * wind_magnitude - u_mat = mask_int * u - v_mat = mask_int * v - - self.logger.info( - f"Creating Tecelda {i_tes} direction {int(real_dir)} with u = {u} and v = {v}" - ) - - file_name_u = op.join(case_dir, "GFD_wind_file.amu") - file_name_v = op.join(case_dir, "GFD_wind_file.amv") - - with open(file_name_u, "w+") as fu, open(file_name_v, "w+") as fv: - fu.write( - "### START OF HEADER\n" - + "### This file is created by Deltares\n" - + "### Additional commments\n" - + "FileVersion = 1.03\n" - + "filetype = meteo_on_equidistant_grid\n" - + "NODATA_value = -9999.0\n" - + f"n_cols = {n_cols}\n" - + f"n_rows = {n_rows}\n" - + "grid_unit = degree\n" - + f"x_llcenter = {x_llcenter}\n" - + f"y_llcenter = {y_llcenter}\n" - + f"dx = {dx}\n" - + f"dy = {dy}\n" - + "n_quantity = 1\n" - + "quantity1 = x_wind\n" - + "unit1 = m s-1\n" - + "### END OF HEADER\n" - ) - fv.write( - "### START OF HEADER\n" - + "### This file is created by Deltares\n" - + "### Additional commments\n" - + "FileVersion = 1.03\n" - + "filetype = meteo_on_equidistant_grid\n" - + "NODATA_value = -9999.0\n" - + f"n_cols = {n_cols}\n" - + f"n_rows = {n_rows}\n" - + "grid_unit = degree\n" - + f"x_llcenter = {x_llcenter}\n" - + f"y_llcenter = {y_llcenter}\n" - + f"dx = {dx}\n" - + f"dy = {dy}\n" - + "n_quantity = 1\n" - + "quantity1 = y_wind\n" - + "unit1 = m s-1\n" - + "### END OF HEADER\n" - ) - for time in range(4): - if time == 0: - time_real = time - elif time == 1: - time_real = dt_forz - elif time == 2: - time_real = dt_forz + 0.01 - elif time == 3: - time_real = simul_time - fu.write(f"TIME = {time_real} hours since 2022-01-01 00:00:00 +00:00\n") - fv.write(f"TIME = {time_real} hours since 2022-01-01 00:00:00 +00:00\n") - if time in [0, 1]: - fu.write(format_matrix(u_mat) + "\n") - fv.write(format_matrix(v_mat) + "\n") - else: - fu.write(format_zeros(u_mat.shape) + "\n") - fv.write(format_zeros(v_mat.shape) + "\n") - - def generate_grid_forcing_file_netCDF_D3DFM( - self, - case_context: dict, - case_dir: str, - ds_GFD_info: xr.Dataset, - ): - """ - Generate the wind files for a case. - - Parameters - ---------- - case_context : dict - The case context. - case_dir : str - The case directory. - ds_GFD_info : xr.Dataset - The dataset with the GFD information. - """ - - triangle_index = case_context.get("tesela") - direction_index = case_context.get("direction") - wind_direction = ds_GFD_info.wind_directions.values[direction_index] - wind_speed = case_context.get("wind_magnitude") - - connectivity = ds_GFD_info.triangle_forcing_connectivity - triangle_longitude = ds_GFD_info.node_forcing_longitude.isel( - node_forcing_index=connectivity - ).values - triangle_latitude = ds_GFD_info.node_forcing_latitude.isel( - node_forcing_index=connectivity - ).values - - longitude_points_computation = ds_GFD_info.node_computation_longitude.values - latitude_points_computation = ds_GFD_info.node_computation_latitude.values - - x0, x1, x2 = triangle_longitude[triangle_index, :] - y0, y1, y2 = triangle_latitude[triangle_index, :] - - triangle_vertices = [(x0, y0), (x1, y1), (x2, y2)] - triangle_mask = create_triangle_mask_from_points( - longitude_points_computation, latitude_points_computation, triangle_vertices - ) - - angle_rad = nautical_to_mathematical(wind_direction) * np.pi / 180 - wind_u = -np.cos(angle_rad) * wind_speed - wind_v = -np.sin(angle_rad) * wind_speed - - windx = np.zeros((4, len(longitude_points_computation))) - windy = np.zeros((4, len(longitude_points_computation))) - - windx[0:2, triangle_mask] = wind_u - windy[0:2, triangle_mask] = wind_v - - ds_forcing = ds_GFD_info[ - [ - "time_forcing_index", - "node_cumputation_index", - "node_computation_longitude", - "node_computation_latitude", - ] - ] - ds_forcing = ds_forcing.rename( - { - "time_forcing_index": "time", - "node_cumputation_index": "node", - "node_computation_longitude": "longitude", - "node_computation_latitude": "latitude", - } - ) - ds_forcing.attrs = {} - ds_forcing["windx"] = (("time", "node"), windx) - ds_forcing["windy"] = (("time", "node"), windy) - ds_forcing["windx"].attrs = { - "coordinates": "time node", - "long_name": "Wind speed in x direction", - "standard_name": "windx", - "units": "m s-1", - } - ds_forcing["windy"].attrs = { - "coordinates": "time node", - "long_name": "Wind speed in y direction", - "standard_name": "windy", - "units": "m s-1", - } - ds_forcing.to_netcdf(op.join(case_dir, "forcing.nc")) - - self.logger.info( - f"Creating triangle {triangle_index} direction {int(wind_direction)} with u = {wind_u} and v = {wind_v}" - ) - - def build_case( - self, - case_context: dict, - case_dir: str, - ) -> None: - """ - Build the input files for a case. - - Parameters - ---------- - case_context : dict - The case context. - case_dir : str - The case directory. - """ - - if case_context.get("forcing_type") == "netCDF": - self.generate_grid_forcing_file_netCDF_D3DFM( - case_context=case_context, - case_dir=case_dir, - ds_GFD_info=case_context.get("ds_GFD_info"), - ) - elif case_context.get("forcing_type") == "ASCII": - if case_context.get("case_num") == 0: - ds_GFD_info = case_context.get("ds_GFD_info") - lon_grid, lat_grid = get_regular_grid( - node_computation_longitude=ds_GFD_info.node_computation_longitude.values, - node_computation_latitude=ds_GFD_info.node_computation_latitude.values, - node_computation_elements=ds_GFD_info.triangle_computation_connectivity.values, - ) - self.ds_GFD_info = deepcopy(case_context.get("ds_GFD_info")) - self.ds_GFD_info["lon_grid"] = np.flip(lon_grid) - self.ds_GFD_info["lat_grid"] = lat_grid - - self.generate_grid_forcing_file_D3DFM( - case_context=case_context, - case_dir=case_dir, - ds_GFD_info=self.ds_GFD_info, - ) - else: - raise ("Unknown forcing type") - - def postprocess_case(self, case_dir: str) -> None: - """ - Postprocess the case output file. - - Parameters - ---------- - case_dir : str - The case directory. - """ - - output_file = op.join(case_dir, "dflowfmoutput/GreenSurge_GFDcase_map.nc") - output_file_compressed = op.join( - case_dir, "dflowfmoutput/GreenSurge_GFDcase_map_compressed.nc" - ) - output_file_compressed_tmp = op.join( - case_dir, "dflowfmoutput/GreenSurge_GFDcase_map_compressed_tmp.nc" - ) - if case_dir == self.output_dir[0]: - # If the case_dir is the output_dir, we do not remove the original file - postprocess_command = f""" - ncap2 -s 'mesh2d_s1=float(mesh2d_s1)' -v -O "{output_file}" "{output_file_compressed_tmp}" - ncks -4 -L 4 "{output_file_compressed_tmp}" "{output_file_compressed}" - rm "{output_file_compressed_tmp}" - """ - else: - postprocess_command = f""" - ncap2 -s 'mesh2d_s1=float(mesh2d_s1)' -v -O "{output_file}" "{output_file_compressed_tmp}" - ncks -4 -L 4 "{output_file_compressed_tmp}" "{output_file_compressed}" - rm "{output_file_compressed_tmp}" - rm "{output_file}" - """ - - self._exec_bash_commands( - str_cmd=postprocess_command, - cwd=case_dir, - ) - - def postprocess_cases(self, ds_GFD_info: xr.Dataset, parallel: bool = False): - """ - Postprocess the cases output files. - - Parameters - ---------- - ds_GFD_info : xr.Dataset - The dataset with the GFD information. - parallel : bool, optional - Whether to run the postprocessing in parallel. Default is False. - """ - - if ( - self.monitor_cases( - dia_file_name="dflowfmoutput/GreenSurge_GFDcase.dia", - value_counts="percentage", - ) - .loc["FINISHED"] - .values - != 100.0 - ): - raise ValueError( - "Not all cases are finished. Please check the status of the cases." - ) - - path_computation = op.join( - self.cases_dirs[0], "dflowfmoutput/GreenSurge_GFDcase_map.nc" - ) - ds_GFD_info = actualize_grid_info(path_computation, ds_GFD_info) - dirname, basename = os.path.split(ds_GFD_info.attrs["source"]) - name, ext = os.path.splitext(basename) - new_filepath = os.path.join(dirname, f"{name}_updated{ext}") - ds_GFD_info.to_netcdf(new_filepath) - - # case_ext = "dflowfmoutput/GreenSurge_GFDcase_map_compressed.nc" - case_ext = "dflowfmoutput/GreenSurge_GFDcase_map.nc" - - def preprocess(dataset): - file_name = dataset.encoding.get("source", None) - dir_i = int(file_name.split("_D_")[-1].split("/")[0]) - tes_i = int(file_name.split("_T_")[-1].split("_D_")[0]) - dataset = ( - dataset[["mesh2d_s1"]] - .expand_dims(["forcing_cell"]) - .assign_coords(forcing_cell=[tes_i]) - ) - self.logger.info( - f"Loaded {file_name} with forcing_cell={tes_i} and dir={dir_i}" - ) - return dataset - - folder_postprocess = op.join(self.output_dir, "GreenSurge_Postprocess") - os.makedirs(folder_postprocess, exist_ok=True) - - dir_steps = self.fixed_parameters["dir_steps"] - for idx in range(dir_steps): - paths = self.cases_dirs[idx::dir_steps] - file_paths = [op.join(case_dir, case_ext) for case_dir in paths] - DS = xr.open_mfdataset( - file_paths, - parallel=parallel, - combine="by_coords", - preprocess=preprocess, - ) - DS.load().to_netcdf(op.join(folder_postprocess, f"GreenSurge_DB_{idx}.nc")) +import os +import os.path as op +from copy import deepcopy +from typing import Union + +import numpy as np +import pandas as pd +import xarray as xr + +from ...additive.greensurge import ( + actualize_grid_info, + create_triangle_mask_from_points, + generate_structured_points_vectorized, + get_regular_grid, + point_to_segment_distance_vectorized, +) +from ...core.operations import nautical_to_mathematical +from ...tcs.vortex import vortex2delft_3D_FM_nc +from .._base_wrappers import BaseModelWrapper + +sbatch_file_example = """#!/bin/bash +#SBATCH --ntasks=1 # Number of tasks (MPI processes) +#SBATCH --partition=geocean # Standard output and error log +#SBATCH --nodes=1 # Number of nodes to use +#SBATCH --mem=4gb # Memory per node in GB (see also --mem-per-cpu) +#SBATCH --time=24:00:00 + +source /nfs/home/geocean/faugeree/miniforge3/etc/profile.d/conda.sh +conda activate work + +case_dir=$(ls | awk "NR == $SLURM_ARRAY_TASK_ID") +launchDelft3dcomp.sh --case-dir $case_dir + +output_file="${case_dir}/dflowfmoutput/GreenSurge_GFDcase_map.nc" +output_file_raw="${case_dir}/dflowfmoutput/GreenSurge_GFDcase_map.raw" + +python3 - < None: + """ + Initialize the Delft3d model wrapper. + """ + + super().__init__( + templates_dir=templates_dir, + metamodel_parameters=metamodel_parameters, + fixed_parameters=fixed_parameters, + output_dir=output_dir, + templates_name=templates_name, + default_parameters=self.default_parameters, + ) + self.set_logger_name( + name=self.__class__.__name__, level="DEBUG" if debug else "INFO" + ) + + self.sbatch_file_example = sbatch_file_example + + forcing_type = self.fixed_parameters.get("forcing_type", None) + if forcing_type == "ASCII": + self.fixed_parameters.setdefault( + "ExtForceFile", "GreenSurge_GFDcase_wind_ASCII.ext" + ) + elif forcing_type == "netCDF": + self.fixed_parameters.setdefault( + "ExtForceFile", "GreenSurge_GFDcase_wind_netCDF.ext" + ) + + def run_case( + self, + case_dir: str, + launcher: str, + output_log_file: str = "wrapper_out.log", + error_log_file: str = "wrapper_error.log", + postprocess: bool = False, + ) -> None: + """ + Run the case based on the launcher specified. + + Parameters + ---------- + case_dir : str + The case directory. + launcher : str + The launcher to run the case. + output_log_file : str, optional + The name of the output log file. Default is "wrapper_out.log". + error_log_file : str, optional + The name of the error log file. Default is "wrapper_error.log". + """ + + # Get launcher command from the available launchers + launcher = self.list_available_launchers().get(launcher, launcher) + + # Run the case in the case directory + self.logger.info(f"Running case in {case_dir} with launcher={launcher}.") + output_log_file = op.join(case_dir, output_log_file) + error_log_file = op.join(case_dir, error_log_file) + self._exec_bash_commands( + str_cmd=launcher, + out_file=output_log_file, + err_file=error_log_file, + cwd=case_dir, + ) + if postprocess: + self.postprocess_case(case_dir=case_dir) + + def monitor_cases( + self, dia_file_name: str, value_counts: str = None + ) -> Union[pd.DataFrame, dict]: + """ + Monitor the cases based on the status of the .dia files. + + Parameters + ---------- + dia_file_name : str + The name of the .dia file to monitor. + """ + + cases_status = {} + + for case_dir in self.cases_dirs: + case_dir_name = op.basename(case_dir) + case_dia_file = op.join(case_dir, dia_file_name) + if op.exists(case_dia_file): + with open(case_dia_file, "r") as f: + lines = f.readlines() + if any("finished" in line for line in lines[-15:]): + cases_status[case_dir_name] = "FINISHED" + else: + cases_status[case_dir_name] = "RUNNING" + else: + cases_status[case_dir_name] = "NOT STARTED" + + return super().monitor_cases( + cases_status=cases_status, value_counts=value_counts + ) + + +def format_matrix(mat): + return "\n".join( + " ".join(f"{x:.1f}" if abs(x) > 0.01 else "0" for x in line) for line in mat + ) + + +def format_zeros(mat_shape): + return "\n".join("0 " * mat_shape[1] for _ in range(mat_shape[0])) + + +class GreenSurgeModelWrapper(Delft3dModelWrapper): + """ + Wrapper for the Delft3d model for Greensurge. + """ + + def generate_grid_forcing_file_D3DFM( + self, + case_context: dict, + case_dir: str, + ds_GFD_info: xr.Dataset, + ): + """ + Generate the wind files for a case. + + Parameters + ---------- + case_context : dict + The case context. + case_dir : str + The case directory. + ds_GFD_info : xr.Dataset + The dataset with the GFD information. + """ + dir_steps = case_context.get("dir_steps") + real_dirs = np.linspace(0, 360, dir_steps + 1)[:-1] + i_tes = case_context.get("tesela") + i_dir = case_context.get("direction") + real_dir = real_dirs[i_dir] + dt_forz = case_context.get("dt_forz") + wind_magnitude = case_context.get("wind_magnitude") + simul_time = case_context.get("simul_time") + + node_triangle = ds_GFD_info.triangle_forcing_connectivity.isel( + element_forcing_index=i_tes + ) + lon_teselas = ds_GFD_info.node_forcing_longitude.isel( + node_forcing_index=node_triangle + ).values + lat_teselas = ds_GFD_info.node_forcing_latitude.isel( + node_forcing_index=node_triangle + ).values + + lon_grid = ds_GFD_info.lon_grid.values + lat_grid = ds_GFD_info.lat_grid.values + + x_llcenter = lon_grid[0] + y_llcenter = lat_grid[0] + + n_cols = len(lon_grid) + n_rows = len(lat_grid) + + dx = (lon_grid[-1] - lon_grid[0]) / n_cols + dy = (lat_grid[-1] - lat_grid[0]) / n_rows + X0, X1, X2 = lon_teselas + Y0, Y1, Y2 = lat_teselas + + triangle = [(X0, Y0), (X1, Y1), (X2, Y2)] + mask = create_triangle_mask_from_points(lon_grid, lat_grid, triangle).astype( + int + ) + mask_int = np.flip(mask, axis=0) # Flip to match grid orientation + + u = -np.cos(nautical_to_mathematical(real_dir) * np.pi / 180) * wind_magnitude + v = -np.sin(nautical_to_mathematical(real_dir) * np.pi / 180) * wind_magnitude + u_mat = mask_int * u + v_mat = mask_int * v + + self.logger.info( + f"Creating cell {i_tes} direction {int(real_dir)} with u = {u} and v = {v}" + ) + + file_name_u = op.join(case_dir, "GFD_wind_file.amu") + file_name_v = op.join(case_dir, "GFD_wind_file.amv") + + with open(file_name_u, "w+") as fu, open(file_name_v, "w+") as fv: + fu.write( + "### START OF HEADER\n" + + "### This file is created by Deltares\n" + + "### Additional commments\n" + + "FileVersion = 1.03\n" + + "filetype = meteo_on_equidistant_grid\n" + + "NODATA_value = -9999.0\n" + + f"n_cols = {n_cols}\n" + + f"n_rows = {n_rows}\n" + + "grid_unit = degree\n" + + f"x_llcenter = {x_llcenter}\n" + + f"y_llcenter = {y_llcenter}\n" + + f"dx = {dx}\n" + + f"dy = {dy}\n" + + "n_quantity = 1\n" + + "quantity1 = x_wind\n" + + "unit1 = m s-1\n" + + "### END OF HEADER\n" + ) + fv.write( + "### START OF HEADER\n" + + "### This file is created by Deltares\n" + + "### Additional commments\n" + + "FileVersion = 1.03\n" + + "filetype = meteo_on_equidistant_grid\n" + + "NODATA_value = -9999.0\n" + + f"n_cols = {n_cols}\n" + + f"n_rows = {n_rows}\n" + + "grid_unit = degree\n" + + f"x_llcenter = {x_llcenter}\n" + + f"y_llcenter = {y_llcenter}\n" + + f"dx = {dx}\n" + + f"dy = {dy}\n" + + "n_quantity = 1\n" + + "quantity1 = y_wind\n" + + "unit1 = m s-1\n" + + "### END OF HEADER\n" + ) + for time in range(4): + if time == 0: + time_real = time + elif time == 1: + time_real = dt_forz + elif time == 2: + time_real = dt_forz + 0.01 + elif time == 3: + time_real = simul_time + fu.write(f"TIME = {time_real} hours since 2022-01-01 00:00:00 +00:00\n") + fv.write(f"TIME = {time_real} hours since 2022-01-01 00:00:00 +00:00\n") + if time in [0, 1]: + fu.write(format_matrix(u_mat) + "\n") + fv.write(format_matrix(v_mat) + "\n") + else: + fu.write(format_zeros(u_mat.shape) + "\n") + fv.write(format_zeros(v_mat.shape) + "\n") + + def generate_grid_forcing_file_netCDF_D3DFM( + self, + case_context: dict, + case_dir: str, + ds_GFD_info: xr.Dataset, + ): + """ + Generate the wind forcing files for a case in netCDF format (optimized version). + """ + triangle_index = case_context.get("tesela") + direction_index = case_context.get("direction") + wind_direction = ds_GFD_info.wind_directions.values[direction_index] + wind_speed = case_context.get("wind_magnitude") + + connectivity = ds_GFD_info.triangle_forcing_connectivity + triangle_longitude = ds_GFD_info.node_forcing_longitude.isel( + node_forcing_index=connectivity + ).values + triangle_latitude = ds_GFD_info.node_forcing_latitude.isel( + node_forcing_index=connectivity + ).values + + connectivity_compo = ds_GFD_info.triangle_computation_connectivity.values + node_lon = ds_GFD_info.node_computation_longitude.values.ravel() + node_lat = ds_GFD_info.node_computation_latitude.values.ravel() + + # Selection triangle vertices + x0, x1, x2 = triangle_longitude[triangle_index, :] + y0, y1, y2 = triangle_latitude[triangle_index, :] + triangle_vertices = np.array([(x0, y0), (x1, y1), (x2, y2)], dtype=float) + + # Compute centroid with NumPy (avoids shapely dependency) + centroid = triangle_vertices.mean(axis=0) + scale_factor = 1.001 + verts_buffered = centroid + (triangle_vertices - centroid) * scale_factor + + # Tolerance based on edge size + edge_lengths = np.linalg.norm( + np.roll(triangle_vertices, -1, axis=0) - triangle_vertices, axis=1 + ) + tol = np.mean(edge_lengths) * 0.001 + + # Vectorized distance to 3 edges (~100x faster than shapely loop) + dist_edge_01 = point_to_segment_distance_vectorized( + node_lon, node_lat, x0, y0, x1, y1 + ) + dist_edge_12 = point_to_segment_distance_vectorized( + node_lon, node_lat, x1, y1, x2, y2 + ) + dist_edge_20 = point_to_segment_distance_vectorized( + node_lon, node_lat, x2, y2, x0, y0 + ) + dist_to_boundary = np.minimum( + np.minimum(dist_edge_01, dist_edge_12), dist_edge_20 + ) + + # Points on the boundary + mask_on_edge = dist_to_boundary < tol + + # Triangles with at least one node on the boundary + mask_tri_to_refine = np.any(mask_on_edge[connectivity_compo], axis=1) + + # Vectorized version of generate_structured_points + lon_structured, lat_structured = generate_structured_points_vectorized( + connectivity_compo[mask_tri_to_refine], + node_lon, + node_lat, + ) + + # Concatenate points + longitude_points_computation = np.concatenate( + [node_lon, lon_structured.ravel()] + ) + latitude_points_computation = np.concatenate([node_lat, lat_structured.ravel()]) + + # Triangle mask (uses matplotlib Path) + triangle_mask = create_triangle_mask_from_points( + longitude_points_computation, + latitude_points_computation, + verts_buffered, + ) + + # Wind computation + angle_rad = nautical_to_mathematical(wind_direction) * np.pi / 180 + wind_u = -np.cos(angle_rad) * wind_speed + wind_v = -np.sin(angle_rad) * wind_speed + + # Initialize and assign wind arrays + n_points = len(longitude_points_computation) + windx = np.zeros((4, n_points)) + windy = np.zeros((4, n_points)) + windx[:2, triangle_mask] = wind_u + windy[:2, triangle_mask] = wind_v + + # Build forcing dataset + ds_forcing = xr.Dataset( + { + "time": ds_GFD_info["time_forcing_index"], + "node": xr.DataArray(np.arange(n_points), dims=["node"]), + "longitude": xr.DataArray( + longitude_points_computation, + dims=["node"], + attrs={ + "description": "Longitude of each mesh node of the computational grid", + "standard_name": "longitude", + "long_name": "longitude", + "units": "degrees_east", + }, + ), + "latitude": xr.DataArray( + latitude_points_computation, + dims=["node"], + attrs={ + "description": "Latitude of each mesh node of the computational grid", + "standard_name": "latitude", + "long_name": "latitude", + "units": "degrees_north", + }, + ), + "windx": xr.DataArray( + windx, + dims=["time", "node"], + attrs={ + "coordinates": "time node", + "long_name": "Wind speed in x direction", + "standard_name": "windx", + "units": "m s-1", + }, + ), + "windy": xr.DataArray( + windy, + dims=["time", "node"], + attrs={ + "coordinates": "time node", + "long_name": "Wind speed in y direction", + "standard_name": "windy", + "units": "m s-1", + }, + ), + } + ) + + ds_forcing.to_netcdf(op.join(case_dir, "forcing.nc")) + + self.logger.info( + f"Creating triangle {triangle_index} direction {int(wind_direction)} with u = {wind_u} and v = {wind_v}" + ) + + def build_case( + self, + case_context: dict, + case_dir: str, + ) -> None: + """ + Build the input files for a case. + + Parameters + ---------- + case_context : dict + The case context. + case_dir : str + The case directory. + """ + if case_context.get("SetupType") == "GreenSurge": + if case_context.get("forcing_type") == "netCDF": + self.generate_grid_forcing_file_netCDF_D3DFM( + case_context=case_context, + case_dir=case_dir, + ds_GFD_info=case_context.get("ds_GFD_info"), + ) + elif case_context.get("forcing_type") == "ASCII": + if case_context.get("case_num") == 0: + ds_GFD_info = case_context.get("ds_GFD_info") + lon_grid, lat_grid = get_regular_grid( + node_computation_longitude=ds_GFD_info.node_computation_longitude.values, + node_computation_latitude=ds_GFD_info.node_computation_latitude.values, + node_computation_elements=ds_GFD_info.triangle_computation_connectivity.values, + ) + self.ds_GFD_info = deepcopy(case_context.get("ds_GFD_info")) + self.ds_GFD_info["lon_grid"] = np.flip(lon_grid) + self.ds_GFD_info["lat_grid"] = lat_grid + + self.generate_grid_forcing_file_D3DFM( + case_context=case_context, + case_dir=case_dir, + ds_GFD_info=self.ds_GFD_info, + ) + elif case_context.get("SetupType") == "Dynamic": + mesh = xr.open_dataset(case_context.get("mesh_path")) + vortex = case_context.get("vortex") + forcing = vortex2delft_3D_FM_nc(mesh, vortex) + forcing.to_netcdf(op.join(case_dir, "forcing.nc")) + + def postprocess_case(self, case_dir: str) -> None: + """ + Postprocess the case output file. + + Parameters + ---------- + case_dir : str + The case directory. + """ + + output_file = op.join(case_dir, "dflowfmoutput/GreenSurge_GFDcase_map.nc") + output_file_compressed = op.join( + case_dir, "dflowfmoutput/GreenSurge_GFDcase_map_compressed.nc" + ) + output_file_compressed_tmp = op.join( + case_dir, "dflowfmoutput/GreenSurge_GFDcase_map_compressed_tmp.nc" + ) + if case_dir == self.output_dir[0]: + # If the case_dir is the output_dir, we do not remove the original file + postprocess_command = f""" + ncap2 -s 'mesh2d_s1=float(mesh2d_s1)' -v -O "{output_file}" "{output_file_compressed_tmp}" + ncks -4 -L 4 "{output_file_compressed_tmp}" "{output_file_compressed}" + rm "{output_file_compressed_tmp}" + """ + else: + postprocess_command = f""" + ncap2 -s 'mesh2d_s1=float(mesh2d_s1)' -v -O "{output_file}" "{output_file_compressed_tmp}" + ncks -4 -L 4 "{output_file_compressed_tmp}" "{output_file_compressed}" + rm "{output_file_compressed_tmp}" + rm "{output_file}" + """ + + self._exec_bash_commands( + str_cmd=postprocess_command, + cwd=case_dir, + ) + + def postprocess_cases(self, ds_GFD_info: xr.Dataset, parallel: bool = False): + """ + Postprocess the cases output files. + + Parameters + ---------- + ds_GFD_info : xr.Dataset + The dataset with the GFD information. + parallel : bool, optional + Whether to run the postprocessing in parallel. Default is False. + """ + + if ( + self.monitor_cases( + dia_file_name="dflowfmoutput/GreenSurge_GFDcase.dia", + value_counts="percentage", + ) + .loc["FINISHED"] + .values + != 100.0 + ): + raise ValueError( + "Not all cases are finished. Please check the status of the cases." + ) + + path_computation = op.join( + self.cases_dirs[0], "dflowfmoutput/GreenSurge_GFDcase_map.nc" + ) + ds_GFD_info = actualize_grid_info(path_computation, ds_GFD_info) + dirname, basename = os.path.split(ds_GFD_info.attrs["source"]) + name, ext = os.path.splitext(basename) + new_filepath = os.path.join(dirname, f"{name}_updated{ext}") + ds_GFD_info.to_netcdf(new_filepath) + + case_ext = "dflowfmoutput/GreenSurge_GFDcase_map.nc" + + def preprocess(dataset): + file_name = dataset.encoding.get("source", None) + dir_i = int(file_name.split("_D_")[-1].split("/")[0]) + tes_i = int(file_name.split("_T_")[-1].split("_D_")[0]) + dataset = ( + dataset[["mesh2d_s1"]] + .expand_dims(["forcing_cell"]) + .assign_coords(forcing_cell=[tes_i]) + ) + self.logger.info( + f"Loaded {file_name} with forcing_cell={tes_i} and dir={dir_i}" + ) + return dataset + + folder_postprocess = op.join(self.output_dir, "GreenSurge_Postprocess") + os.makedirs(folder_postprocess, exist_ok=True) + + dir_steps = self.fixed_parameters["dir_steps"] + for idx in range(dir_steps): + paths = self.cases_dirs[idx::dir_steps] + file_paths = [op.join(case_dir, case_ext) for case_dir in paths] + DS = xr.open_mfdataset( + file_paths, + parallel=parallel, + combine="by_coords", + preprocess=preprocess, + ) + DS.load().to_netcdf(op.join(folder_postprocess, f"GreenSurge_DB_{idx}.nc"))