In [1]:
import io
import math
import os
import pathlib

import holoviews as hv
import hvplot.pandas
import numpy as np
import pandas as pd
import pymap3d
import pyposeidon.mesh as pmesh

hv.extension("bokeh")
np.set_printoptions(suppress=True)
In [2]:
def parse_hgrid_nodes(path: os.PathLike[str] | str) -> pd.DataFrame:
    with open(path, "rb") as fd:
        _ = fd.readline()
        _, no_points = map(int, fd.readline().strip().split(b" "))
        content = io.BytesIO(b''.join(next(fd) for _ in range(no_points)))
        nodes = pd.read_csv(
            content,
            engine="pyarrow",
            sep="\t",
            header=None,
            names=["lon", "lat", "depth"],
            index_col=0
        )
    nodes = nodes.reset_index(drop=True)
    return nodes
    
def parse_hgrid_elements3(path: os.PathLike[str] | str) -> pd.DataFrame:
    with open(path, "rb") as fd:
        _ = fd.readline()
        no_elements, no_points = map(int, fd.readline().strip().split(b" "))
        for _ in range(no_points):
            next(fd) 
        content = io.BytesIO(b''.join(next(fd) for _ in range(no_elements)))
        elements = pd.read_csv(
            content,
            engine="pyarrow",
            sep="\t",
            header=None,
            names=["no_nodes", "n1", "n2", "n3"],
            index_col=0
        )
    elements = elements.assign(
        n1=elements.n1 - 1,
        n2=elements.n2 - 1,
        n3=elements.n3 - 1,
    ).reset_index(drop=True)
    return elements
In [3]:
def get_skews_and_base_cfls(lons, lats, depths) -> np.ndarray:
    # The shape of each one of the input arrays needs to be (3, <no_triangles>)
    #ell = pymap3d.Ellipsoid.from_name("wgs84")
    ell = pymap3d.Ellipsoid(6378206.4, 6378206.4, "schism", "schism")
    local_x, local_y, _ = pymap3d.geodetic2enu(lats, lons, depths, lats[0], lons[0], depths[0], ell=ell)
    areas = (local_x[1] * local_y[2] - local_x[2] * local_y[1]) * 0.5
    rhos = np.sqrt(areas / np.pi)
    max_sides = np.maximum(
        np.sqrt(local_x[1] ** 2 + local_y[1] ** 2),
        np.sqrt(local_x[2] ** 2 + local_y[2] ** 2),
        np.sqrt((local_x[2] - local_x[1]) ** 2 + (local_y[2] - local_y[1]) ** 2),
    )
    skews = max_sides / rhos
    base_cfls = np.sqrt(9.81 * np.maximum(0.1, depths.mean(axis=0))) / rhos / 2
    return skews, base_cfls

def get_skews_and_base_cfls_from_path(path: os.PathLike[str] | str) -> np.ndarray:
    nodes = parse_hgrid_nodes(path)
    elements = parse_hgrid_elements3(path)
    tri = elements[["n1", "n2", "n3"]].values
    lons = nodes.lon.values[tri].T
    lats = nodes.lat.values[tri].T
    depths = nodes.depth.values[tri].T
    skews, base_cfls = get_skews_and_base_cfls(lons=lons, lats=lats, depths=depths)
    return skews, base_cfls
In [4]:
path = "/home/panos/Prog/poseidon/seareport_meshes/meshes/global-v0.1.gr3"
path = "/home/panos/Prog/poseidon/seareport_meshes/meshes/global-v0.gr3"
path = "/home/panos/Prog/git/schism/src/Utility/Grid_Scripts/hgrid.gr3"
path = "/home/panos/Prog/poseidon/seareport_meshes/meshes/global-v0.2.gr3"
skews, base_cfls = get_skews_and_base_cfls_from_path(path)
In [5]:
CFL_THRESHOLD = 0.4
for dt in (1, 50, 75, 100, 120, 150, 200, 300, 400, 600, 900, 1200, 1800, 3600):
    violations = (base_cfls * dt < CFL_THRESHOLD).sum()
    print(f"{dt:>4d} {violations:>12d} {violations / len(base_cfls) * 100:>8.2f}%")
   1       716169   100.00%
  50       438858    61.28%
  75       173842    24.27%
 100       122625    17.12%
 120       107230    14.97%
 150        87782    12.26%
 200        67136     9.37%
 300        39294     5.49%
 400        21114     2.95%
 600          564     0.08%
 900            0     0.00%
1200            0     0.00%
1800            0     0.00%
3600            0     0.00%
In [6]:
pd.DataFrame({"skew": skews}).describe()
Out[6]:
skew
count 716169.000000
mean 2.819506
std 0.144765
min 2.507587
25% 2.721086
50% 2.780963
75% 2.885701
max 6.666141
In [7]:
df = pd.DataFrame({"cfl": base_cfls * 400})
df[df.cfl < 0.4].describe()
df[df.cfl < 0.4].hvplot.hist()
Out[7]:
cfl
count 21114.000000
mean 0.330807
std 0.036809
min 0.199615
25% 0.302369
50% 0.327259
75% 0.360951
max 0.400000
Out[7]:
In [8]:
nodes = parse_hgrid_nodes(path)
elements = parse_hgrid_elements3(path)
elements = elements.assign(base_cfl=base_cfls)
elements.head()
Out[8]:
no_nodes n1 n2 n3 base_cfl
0 3 189444 0 244205 0.003675
1 3 0 189444 335357 0.004649
2 3 0 207657 218301 0.007291
3 3 207657 0 319429 0.006633
4 3 0 218301 361609 0.006373
In [9]:
min_cfl_per_node = pd.concat([
    elements[["n1", "base_cfl"]].groupby(["n1"]).base_cfl.min(),
    elements[["n2", "base_cfl"]].groupby(["n2"]).base_cfl.min(),
    elements[["n3", "base_cfl"]].groupby(["n3"]).base_cfl.min(),
], axis=1).min(axis=1)
min_cfl_per_node.head()
Out[9]:
0    0.003675
1    0.007158
2    0.009419
3    0.002360
4    0.007654
dtype: float64
In [15]:
dt = 600
df = nodes.assign(
    cfl=min_cfl_per_node * dt,
    # CFL_violation nodes have a value of 1 if there is no violation and 4 if there is a violation. 
    # We do this in order to plot the points with a different size
    cfl_violation=((min_cfl_per_node * dt < CFL_THRESHOLD) * 3) + 1   
)
df.head()
Out[15]:
lon lat depth cfl cfl_violation
0 0.000000 90.000000 4228.0 2.205227 1
1 -23.447414 -19.913775 4603.0 4.294965 1
2 118.483781 -15.726201 5477.0 5.651691 1
3 15.495643 -28.939963 187.0 1.416196 1
4 94.479055 -14.068985 5710.0 4.592690 1
In [16]:
df.hvplot.points(
    'lon', 
    'lat',
    c="cfl_violation",
    cmap="colorblind",
    geo=True,
    tiles=True,
).options(
    width=900, height=600
).opts(
    hv.opts.Points(size=hv.dim('cfl_violation'))
)
Out[16]:
In [ ]: