Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update gwdatalens #85

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,6 @@ bro_connector/qc_tool/app/assets/.mapbox_access_token
bro_connector/static/dash/.mapbox_access_token
**/.venv
bro_connector/.pi_cache/
/fieldforms/**
/fieldforms/**

.pixi
28 changes: 22 additions & 6 deletions bro_connector/gwdatalens/app/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import logging
import os

# from pathlib import Path
import dash_bootstrap_components as dbc
import i18n
import pastastore as pst
Expand All @@ -11,7 +12,12 @@
from gwdatalens.app.settings import CUSTOM_CSS_PATH, LOCALE_PATH, config, settings
from gwdatalens.app.src.cache import cache
from gwdatalens.app.src.components.layout import create_layout
from gwdatalens.app.src.data import DataInterface, DataSource, TravalInterface
from gwdatalens.app.src.data import (
DataInterface,
# HydropandasDataSource,
PostgreSQLDataSource,
TravalInterface,
)

logging.basicConfig()
logger = logging.getLogger()
Expand All @@ -29,13 +35,19 @@
i18n.set("locale", settings["LOCALE"])
i18n.load_path.append(LOCALE_PATH)

# %% Set up backend
# %% Connect to database

# postgreql database
db = PostgreSQLDataSource(config=config["database"])

# connect to database
db = DataSource(config=config["database"])
# hydropandas 'database'
# db = HydropandasDataSource(extent=[116500, 120000, 439000, 442000], source="bro")
# db = HydropandasDataSource(
# fname=Path(__file__).parent / ".." / "data" / "example_obscollection.zip",
# source="bro",
# )

# load pastastore
# name = "zeeland"
# %% load pastastore
name = config["pastastore"]["name"]
pastastore_path = config["pastastore"]["path"]

Expand All @@ -53,6 +65,8 @@
pstore = pst.PastaStore(conn)
print(pstore)

# %% traval interface

# load ruleset
traval_interface = TravalInterface(db, pstore)

Expand Down Expand Up @@ -141,3 +155,5 @@
"CACHE_DIR": ".cache",
},
)

# %%
45 changes: 37 additions & 8 deletions bro_connector/gwdatalens/app/callbacks/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,16 @@
import pastas as ps
from dash import Input, Output, State, ctx, no_update
from dash.exceptions import PreventUpdate
from packaging.version import parse
from pastas.extensions import register_plotly
from pastas.io.pas import PastasEncoder
from pastastore.version import __version__ as PASTASTORE_VERSION

from gwdatalens.app.src.components import ids

register_plotly()

PASTASTORE_GT_1_7_1 = parse(PASTASTORE_VERSION) > parse("1.7.1")

# %% MODEL TAB

Expand Down Expand Up @@ -71,15 +74,42 @@ def generate_model(n_clicks, value, tmin, tmax, use_only_validated):
tmin = pd.Timestamp(tmin)
tmax = pd.Timestamp(tmax)
# get time series
gmw_id, tube_id = value.split("-")
if "-" in value:
gmw_id, tube_id = value.split("-")
elif "_" in value:
gmw_id, tube_id = value.split("_")
else:
raise ValueError(
"Error splitting name into monitoring well ID "
f"and tube number: {value}"
)
ts = data.db.get_timeseries(gmw_id, tube_id)
if use_only_validated:
mask = ts.loc[:, data.db.qualifier_column] == "goedgekeurd"
ts = ts.loc[mask, data.db.value_column]
ts = ts.loc[mask, data.db.value_column].dropna()
else:
ts = ts.loc[:, data.db.value_column].dropna()

if value in data.pstore.oseries_names:
# update stored copy
data.pstore.update_oseries(ts, value)
else:
ts = ts.loc[:, data.db.value_column]
# update stored copy
data.pstore.update_oseries(ts, value)
# add series to database
metadata = data.db.gmw_gdf.loc[value].to_dict()
data.pstore.add_oseries(ts, value, metadata)
print(
f"Head time series '{value}' added to pastastore database."
)

if pd.isna(tmin):
tmin = ts.index[0]
if pd.isna(tmax):
tmax = ts.index[-1]

# get meteorological info, if need be, and pastastore is up-to-date
if PASTASTORE_GT_1_7_1:
data.get_knmi_data(value)

# create model
ml = ps.Model(ts)
data.pstore.add_recharge(ml)
Expand All @@ -92,9 +122,8 @@ def generate_model(n_clicks, value, tmin, tmax, use_only_validated):
report=False,
initial=False,
)
mljson = json.dumps(
ml.to_dict(), cls=PastasEncoder
) # store generated model
# store generated model
mljson = json.dumps(ml.to_dict(), cls=PastasEncoder)
return (
ml.plotly.results(tmin=tmin, tmax=tmax),
ml.plotly.diagnostics(),
Expand Down
35 changes: 17 additions & 18 deletions bro_connector/gwdatalens/app/callbacks/overview.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,29 +222,28 @@ def highlight_point_on_map_from_table(selected_cells, table):
pts = loc["id"].tolist()

dfm = data.db.gmw_gdf.reset_index().loc[pts].copy()
dfm["curveNumber"] = 0
mask = dfm.loc[:, "metingen"] == 1
dfm.loc[mask, "curveNumber"] = 1

dfm["curveNumber"] = 1 # all locs plotted in trace 1 for map highlighting
mask = dfm.loc[:, "metingen"] > 0
# update selected points
mappatch = Patch()
mappatch["data"][1]["selectedpoints"] = dfm.loc[mask, "id"].tolist()
mappatch["data"][1]["selectedpoints"] = dfm.loc[:, "id"].tolist()
mappatch["data"][0]["selectedpoints"] = dfm.loc[~mask, "id"].tolist()

selectedData = {
"points": [
{
"curveNumber": dfm["curveNumber"].loc[i],
"pointNumber": dfm["id"].loc[i],
"pointIndex": dfm["id"].loc[i],
"lon": dfm["lon"].loc[i],
"lat": dfm["lat"].loc[i],
"text": dfm["name"].loc[i],
}
for i in loc["id"]
]
}
return (
{
"points": [
{
"curveNumber": dfm["curveNumber"].iloc[i],
"pointNumber": dfm["id"].iloc[i],
"pointIndex": dfm["id"].iloc[i],
"lon": dfm["lon"].iloc[i],
"lat": dfm["lat"].iloc[i],
"text": dfm["name"].iloc[i],
}
for i in range(loc.index.size)
]
},
selectedData,
mappatch,
(pd.Timestamp.now().isoformat(), True),
)
10 changes: 9 additions & 1 deletion bro_connector/gwdatalens/app/callbacks/qc.py
Original file line number Diff line number Diff line change
Expand Up @@ -707,7 +707,15 @@ def run_traval(n_clicks, name, tmin, tmax, only_unvalidated):

set_props(ids.LOADING_QC_CHART, {"display": "show"})

gmw_id, tube_id = name.split("-")
if "-" in name:
gmw_id, tube_id = name.split("-")
elif "_" in name:
gmw_id, tube_id = name.split("_")
else:
raise ValueError(
"Error splitting name into monitoring well ID"
f" and tube number: {name}"
)
try:
result, figure = data.traval.run_traval(
gmw_id,
Expand Down
8 changes: 4 additions & 4 deletions bro_connector/gwdatalens/app/config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ PORT = 8050 # default port for the Dash app
BACKGROUND_CALLBACKS = false # false is recommended at this moment

[pastastore]
name = "zeeland_bro"
path = "./gwdatalens/pastasdb/" # path to pastas model database
connector = "pas" # database connector
update_knmi = false # update knmi data in pastas database on startup
name = "zeeland_bro" # name of the pastastore database
path = "./gwdatalens/pastasdb/" # path to pastastore database
connector = "pas" # database connector
update_knmi = true # update knmi data in pastas database on startup
13 changes: 9 additions & 4 deletions bro_connector/gwdatalens/app/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,15 @@
"port": localsecret.port,
}
else:
with open(DATALENS_APP_PATH / "database.toml", "rb") as f:
dbase = tomli.load(f)
config["database"] = dbase["database"]
try:
with open(DATALENS_APP_PATH / "database.toml", "rb") as f:
dbase = tomli.load(f)
config["database"] = dbase["database"]
except FileNotFoundError:
print(
f"No {DATALENS_APP_PATH}/database.toml file found. Ignore this message if using "
"HydropandasDataSource."
)

# %% set paths accordingly

Expand All @@ -36,4 +42,3 @@

LOCALE_PATH = ASSETS_PATH / "locale"
CUSTOM_CSS_PATH = str(ASSETS_PATH / "custom.css")
MAPBOX_ACCESS_TOKEN = str(ASSETS_PATH / ".mapbox_access_token")
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,7 @@ def render(data, selected_data):
"""
locs = data.db.list_locations()

options = [
{"label": f"{i} ({data.db.gmw_gdf.at[i, 'nitg_code']})", "value": i}
for i in locs
]
options = [{"label": f"{i}{data.db.get_nitg_code(i)}", "value": i} for i in locs]

if selected_data is not None and len(selected_data) == 1:
value = selected_data[0]
Expand Down
29 changes: 18 additions & 11 deletions bro_connector/gwdatalens/app/src/components/overview_chart.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,37 +75,42 @@ def plot_obs(names, data):
return {"layout": {"title": i18n.t("general.no_plot")}}

hasobs = list(data.db.list_locations())
title = None
no_data = []

traces = []
for name in names:
# split into monitoringwell and tube_number
monitoring_well, tube_nr = name.split("-")
if "-" in name:
monitoring_well, tube_nr = name.split("-")
elif "_" in name:
monitoring_well, tube_nr = name.split("_")
else:
raise ValueError(
f"Error splitting name into monitoring well ID and tube number: {name}"
)
tube_nr = int(tube_nr)

# no obs
if name not in hasobs:
title = i18n.t("general.no_plot")
no_data.append(True)
continue

df = data.db.get_timeseries(gmw_id=monitoring_well, tube_id=tube_nr)

if df is None:
continue

df.loc[:, data.db.qualifier_column] = df.loc[
:, data.db.qualifier_column
].fillna("")
df[data.db.qualifier_column] = df.loc[:, data.db.qualifier_column].fillna("")

if len(names) == 1:
title = None
no_data.append(False)
ts = df[data.db.value_column]
trace_i = go.Scattergl(
x=ts.index,
y=ts.values,
mode="lines",
line={"width": 1, "color": "gray"},
name=name + f" ({data.db.gmw_gdf.at[name, 'nitg_code']})",
name=name + data.db.get_nitg_code(name),
legendgroup=f"{name}-{tube_nr}",
showlegend=True,
)
Expand Down Expand Up @@ -156,6 +161,7 @@ def plot_obs(names, data):
)
traces.append(trace_mo)
else:
no_data.append(False)
ts = df[data.db.value_column]
trace_i = go.Scattergl(
x=ts.index,
Expand All @@ -169,7 +175,6 @@ def plot_obs(names, data):
)
traces.append(trace_i)
layout = {
"title": title,
# "xaxis": {"range": [sim.index[0], sim.index[-1]]},
"yaxis": {"title": "(m NAP)"},
"legend": {
Expand All @@ -184,5 +189,7 @@ def plot_obs(names, data):
# "margin": dict(t=20, b=20, l=50, r=20),
"margin-top": 0,
}

return {"data": traces, "layout": layout}
if all(no_data):
return None
else:
return {"data": traces, "layout": layout}
Loading