Skip to content
13 changes: 11 additions & 2 deletions bluemath_tk/wrappers/_base_wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -864,15 +864,24 @@ def postprocess_cases(
cases_dir_to_postprocess = [
self.cases_dirs[case] for case in cases_to_postprocess
]
cases_context_to_postprocess = [
self.cases_context[case] for case in cases_to_postprocess
]
else:
cases_to_postprocess = list(range(len(self.cases_dirs)))
cases_dir_to_postprocess = copy.deepcopy(self.cases_dirs)
cases_context_to_postprocess = copy.deepcopy(self.cases_context)

postprocessed_files = []
for case_num, case_dir in zip(cases_to_postprocess, cases_dir_to_postprocess):
for case_num, case_dir, case_context in zip(
cases_to_postprocess, cases_dir_to_postprocess, cases_context_to_postprocess
):
try:
postprocessed_file = self.postprocess_case(
case_num=case_num, case_dir=case_dir, **kwargs
case_num=case_num,
case_dir=case_dir,
case_context=case_context,
**kwargs,
)
postprocessed_files.append(postprocessed_file)
except Exception as e:
Expand Down
Empty file.
Empty file.
13 changes: 8 additions & 5 deletions bluemath_tk/wrappers/swan/swan_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,30 +265,33 @@ def postprocess_case(
self,
case_num: int,
case_dir: str,
case_context: dict,
output_vars: List[str] = ["Hsig", "Tm02", "Dir"],
) -> xr.Dataset:
"""
Convert mat ouput files to netCDF file.

Parameters
----------
case_num : int
The case number.
case_dir : str
The case directory.
case_context : dict
The case context.
output_vars : list, optional
The output variables to postprocess. Default is None.

Returns
-------
xr.Dataset
The postprocessed Dataset.
"""

if output_vars is None:
self.logger.info("Postprocessing all available variables.")
output_vars = list(self.output_variables.keys())

output_nc_path = os.path.join(case_dir, "output.nc")
if not os.path.exists(output_nc_path):
# Convert tab files to netCDF file
Expand All @@ -302,7 +305,7 @@ def postprocess_case(
else:
self.logger.info("Reading existing output.nc file.")
output_nc = xr.open_dataset(output_nc_path)

return output_nc

def join_postprocessed_files(
Expand Down
3 changes: 3 additions & 0 deletions bluemath_tk/wrappers/swash/swash_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,6 +341,7 @@ def postprocess_case(
self,
case_num: int,
case_dir: str,
case_context: dict,
output_vars: List[str] = None,
overwrite_output: bool = True,
overwrite_output_postprocessed: bool = True,
Expand All @@ -356,6 +357,8 @@ def postprocess_case(
The case number.
case_dir : str
The case directory.
case_context : dict
The case context.
output_vars : list, optional
The output variables to postprocess. Default is None.
overwrite_output : bool, optional
Expand Down
70 changes: 68 additions & 2 deletions bluemath_tk/wrappers/xbeach/xbeach_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import numpy as np
import pandas as pd
import xarray as xr
from wavespectra.construct.direction import cartwright
from wavespectra.construct.frequency import jonswap

from .._base_wrappers import BaseModelWrapper

Expand Down Expand Up @@ -37,6 +39,7 @@ class XBeachModelWrapper(BaseModelWrapper):

available_launchers = {
"geoocean-cluster": "launchXbeach.sh",
"docker_serial": "docker run --rm -v .:/case_dir -w /case_dir geoocean/rocky8 xbeach",
}

def __init__(
Expand Down Expand Up @@ -64,6 +67,30 @@ def __init__(
name=self.__class__.__name__, level="DEBUG" if debug else "INFO"
)

def create_vardens(self, ds):
t = ""

# Frequencies
t += "{0} \n".format(len(ds.freq))
for freq in ds.freq.values:
t += "{0}\n".format(freq)

# Directions
t += "{0} \n".format(len(ds.dir))
for dirt in sorted(ds.dir.values):
t += "{0}\n".format(dirt)

# Sea_surface_wave_directional_variance_spectral_density
for _, freq in enumerate(ds.freq.values):
for _, dirt in enumerate(sorted(ds.dir.values)):
var = ds.sel(freq=freq, dir=dirt).efth.values
if np.isnan(var):
var = 0.0
t += "{0}\t".format(var)
t += "\n"

return t

def build_case(
self,
case_context: dict,
Expand All @@ -81,11 +108,47 @@ def build_case(
"""

if case_context["wbctype"] == "jonstable":
# Conversion needed by XBeach's jonswap forcing (https://xbeach.readthedocs.io/en/latest/xbeach_manual.html)
spr_rad = np.radians(np.array(case_context["SPR"]))
s = (2 / (spr_rad**2)) - 1

with open(f"{case_dir}/jonswap.txt", "w") as f:
for _i in range(math.ceil(case_context["comptime"] / 3600)):
for _i in range(math.ceil(case_context["tstop"] / 3600)):
f.write(
f"{case_context['Hs']} {case_context['Tp']} {case_context['Dir']} 3.300000 30.000000 3600.000000 1.000000 \n"
f"{case_context['Hs']} {case_context['Tp']} {case_context['Dir']} 3.300000 {s} 3600.000000 1.000000 \n"
)

if case_context["wbctype"] == "vardens":
ef = jonswap(
freq=case_context["freqs"],
fp=1 / case_context["Tp"],
gamma=case_context["gamma"],
hs=case_context["Hs"],
)
gth = cartwright(
dir=case_context["dirs"],
dm=case_context["Dir"],
dspr=case_context["SPR"],
)
efth = ef * gth

spectrum = xr.Dataset(
{
"efth": (
["freq", "dir"],
efth.data,
)
},
coords={
"dir": case_context["dirs"],
"freq": case_context["freqs"],
},
).sortby(["freq", "dir"])

spectrum["dir"] = (270 - (spectrum["dir"])) % 360
spec = self.create_vardens(spectrum)
with open(f"{case_dir}/vardens.txt", "w") as f:
f.write(spec)

def _get_average_var(self, case_nc: xr.Dataset, var: str) -> np.ndarray:
"""
Expand Down Expand Up @@ -176,6 +239,7 @@ def postprocess_case(
self,
case_num: int,
case_dir: str,
case_context: dict,
output_vars: List[str] = None,
overwrite_output: bool = True,
) -> xr.Dataset:
Expand All @@ -188,6 +252,8 @@ def postprocess_case(
The case number.
case_dir : str
The case directory.
case_context : dict
The case context.
output_vars : list, optional
The output variables to postprocess. Default is None.
overwrite_output : bool, optional
Expand Down