Skip to content
Snippets Groups Projects
Commit 18eafcaf authored by Rudolf, Michael's avatar Rudolf, Michael
Browse files

Some fixes for webservices and plots:

 - Fixed a bug causing a missing folder
 - Fixed warnings for empty geodataframes
 - Added client description to restapi calls
 - Fixed the PSI timeseries plot
 - Fixed the scalebar placement
 - Added fallback if internal server is unreachable
 - Minor formatting changes
parent 8d97f767
Branches
Tags
No related merge requests found
...@@ -63,6 +63,7 @@ def query_hlnug( ...@@ -63,6 +63,7 @@ def query_hlnug(
else: else:
gdf = _save_features(features, out_fname) gdf = _save_features(features, out_fname)
else: else:
os.makedirs(out_folder, exist_ok=True)
out_fname = os.path.join(out_folder, f"{layer_name}{suffix}") out_fname = os.path.join(out_folder, f"{layer_name}{suffix}")
if os.path.exists(out_fname + ".gpkg"): if os.path.exists(out_fname + ".gpkg"):
logging.info("Found locally cached file.") logging.info("Found locally cached file.")
...@@ -235,6 +236,7 @@ def _save_features( ...@@ -235,6 +236,7 @@ def _save_features(
gdf = gp.read_file(geojson).to_crs("EPSG:32632") gdf = gp.read_file(geojson).to_crs("EPSG:32632")
if len(region) > 0: if len(region) > 0:
gdf = gdf.clip(region) gdf = gdf.clip(region)
if len(gdf) > 0:
gdf.to_file(out_fname + ".gpkg") gdf.to_file(out_fname + ".gpkg")
return gdf return gdf
...@@ -263,7 +265,9 @@ def _query_server( ...@@ -263,7 +265,9 @@ def _query_server(
# Query webservice to find layer # Query webservice to find layer
logging.info("Querying HLNUG for geology_data") logging.info("Querying HLNUG for geology_data")
map_url = f"{HLNUG_URL}/{map_server_suffix}" map_url = f"{HLNUG_URL}/{map_server_suffix}"
feat_serv = restapi.MapService(map_url) feat_serv = restapi.MapService(
map_url, client="u4py, rudolf@geo.tu-darmstadt.de"
)
lyr_types = [lyr.type for lyr in feat_serv.layers] lyr_types = [lyr.type for lyr in feat_serv.layers]
lyr_names = [lyr.name for lyr in feat_serv.layers] lyr_names = [lyr.name for lyr in feat_serv.layers]
...@@ -278,7 +282,9 @@ def _query_server( ...@@ -278,7 +282,9 @@ def _query_server(
# Assemble url to layer and get data # Assemble url to layer and get data
lyr_url = f"{map_url}/{ii}" lyr_url = f"{map_url}/{ii}"
ms_lyr = restapi.MapServiceLayer(lyr_url) ms_lyr = restapi.MapServiceLayer(
lyr_url, client="u4py, rudolf@geo.tu-darmstadt.de"
)
if len(region) > 0: if len(region) > 0:
restgeom = polygon_to_restapi( restgeom = polygon_to_restapi(
region.geometry[0], region.crs.to_string() region.geometry[0], region.crs.to_string()
......
...@@ -298,10 +298,10 @@ def add_scalebar( ...@@ -298,10 +298,10 @@ def add_scalebar(
""" """
if ( if (
(not "left" in loc) ("left" not in loc)
and (not "right" in loc) and ("right" not in loc)
and (not "top" in loc) and ("top" not in loc)
and (not "bottom" in loc) and ("bottom" not in loc)
): ):
raise ValueError( raise ValueError(
"Invalid location string, allowed values are: 'top', 'bottom', " "Invalid location string, allowed values are: 'top', 'bottom', "
...@@ -362,12 +362,13 @@ def add_scalebar( ...@@ -362,12 +362,13 @@ def add_scalebar(
fontweight="bold", fontweight="bold",
color="w", color="w",
path_effects=[path_effects.withStroke(linewidth=2, foreground="k")], path_effects=[path_effects.withStroke(linewidth=2, foreground="k")],
zorder=99,
) )
for ii in range(div): for ii in range(div):
ax.add_patch( ax.add_patch(
mpatches.Rectangle( mpatches.Rectangle(
(x, y), width, height, facecolor="w", edgecolor="k", zorder=10 (x, y), width, height, facecolor="w", edgecolor="k", zorder=99
) )
) )
_label_scalebar(ax, x, y, width, height) _label_scalebar(ax, x, y, width, height)
...@@ -378,7 +379,7 @@ def add_scalebar( ...@@ -378,7 +379,7 @@ def add_scalebar(
height, height,
facecolor="k", facecolor="k",
edgecolor="k", edgecolor="k",
zorder=10, zorder=99,
) )
) )
if ii < div - 1: if ii < div - 1:
......
...@@ -576,6 +576,7 @@ def geology_map( ...@@ -576,6 +576,7 @@ def geology_map(
geometry=[u4spatial.bounds_to_polygon(ax)], crs=crs geometry=[u4spatial.bounds_to_polygon(ax)], crs=crs
) )
if use_internal: if use_internal:
try:
bounds = region.bounds.iloc[0] bounds = region.bounds.iloc[0]
geology_data = u4web.query_internal( geology_data = u4web.query_internal(
"gk25-hessen:GK25_f_GK3", "gk25-hessen:GK25_f_GK3",
...@@ -584,6 +585,15 @@ def geology_map( ...@@ -584,6 +585,15 @@ def geology_map(
suffix=f"{row[1].group:05}", suffix=f"{row[1].group:05}",
out_folder=shp_path, out_folder=shp_path,
) )
except TimeoutError:
logging.debug("Internal Server timed out, using fallback.")
geology_data = u4web.query_hlnug(
"geologie/gk25/MapServer",
"Geologie (Kartiereinheiten)",
region=region,
suffix=f"_{row[1].group:05}",
out_folder=shp_path,
)
else: else:
geology_data = u4web.query_hlnug( geology_data = u4web.query_hlnug(
"geologie/gk25/MapServer", "geologie/gk25/MapServer",
...@@ -655,13 +665,28 @@ def geology_map( ...@@ -655,13 +665,28 @@ def geology_map(
# Load tectonic data # Load tectonic data
if use_internal: if use_internal:
try:
fault_data = u4web.query_internal( fault_data = u4web.query_internal(
"gk25-hessen:GK25_l-tek_GK3", "gk25-hessen:GK25_l-tek_GK3",
region=[bounds.minx, bounds.miny, bounds.maxx, bounds.maxy], region=[
bounds.minx,
bounds.miny,
bounds.maxx,
bounds.maxy,
],
region_crs=region.crs, region_crs=region.crs,
suffix=f"{row[1].group:05}", suffix=f"{row[1].group:05}",
out_folder=shp_path, out_folder=shp_path,
) )
except TimeoutError:
logging.debug("Internal Server timed out, using fallback.")
fault_data = u4web.query_hlnug(
"geologie/gk25/MapServer",
"Tektonik (Liniendaten)",
region=region,
suffix=f"_{row[1].group:05}",
out_folder=shp_path,
)
else: else:
fault_data = u4web.query_hlnug( fault_data = u4web.query_hlnug(
"geologie/gk25/MapServer", "geologie/gk25/MapServer",
...@@ -670,16 +695,16 @@ def geology_map( ...@@ -670,16 +695,16 @@ def geology_map(
suffix=f"_{row[1].group:05}", suffix=f"_{row[1].group:05}",
out_folder=shp_path, out_folder=shp_path,
) )
if fault_data: if len(fault_data) > 0:
fault_data.plot(ax=ax, color="k") fault_data.plot(ax=ax, color="k")
# Formatting and other stuff # Formatting and other stuff
u4plotfmt.add_scalebar(ax=ax, width=plot_buffer * 4) u4plotfmt.add_scalebar(ax=ax, width=plot_buffer * 4)
# u4ax.add_basemap( u4ax.add_basemap(
# ax=ax, ax=ax,
# crs=geology_data.crs, crs=geology_data.crs,
# # source=contextily.providers.CartoDB.Positron, source=contextily.providers.TopPlusOpen.Grey,
# ) )
for ftype in GLOBAL_TYPES: for ftype in GLOBAL_TYPES:
fig.savefig( fig.savefig(
...@@ -1556,6 +1581,7 @@ def timeseries_map( ...@@ -1556,6 +1581,7 @@ def timeseries_map(
:param plot_buffer: The buffer width around the area of interest. :param plot_buffer: The buffer width around the area of interest.
:type plot_buffer: float :type plot_buffer: float
""" """
output_path = os.path.join(output_path, suffix)
fexists = [ fexists = [
os.path.exists( os.path.exists(
os.path.join(output_path, f"{row[1].group:05}_psi.{ftype}") os.path.join(output_path, f"{row[1].group:05}_psi.{ftype}")
...@@ -1570,7 +1596,6 @@ def timeseries_map( ...@@ -1570,7 +1596,6 @@ def timeseries_map(
f"Plotting timeseries and psi map of group {row[1].group:05}." f"Plotting timeseries and psi map of group {row[1].group:05}."
) )
# Set paths # Set paths
output_path = os.path.join(output_path, suffix)
psi_data_path = os.path.join(output_path, "psi_inv_data") psi_data_path = os.path.join(output_path, "psi_inv_data")
os.makedirs(output_path, exist_ok=True) os.makedirs(output_path, exist_ok=True)
os.makedirs(psi_data_path, exist_ok=True) os.makedirs(psi_data_path, exist_ok=True)
...@@ -1579,8 +1604,8 @@ def timeseries_map( ...@@ -1579,8 +1604,8 @@ def timeseries_map(
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs) shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
# Create figure and axes # Create figure and axes
fig = plt.figure(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI) fig = plt.figure(figsize=(13, 5), dpi=GLOBAL_DPI)
gs = fig.add_gridspec(ncols=2, width_ratios=(1, 2)) gs = fig.add_gridspec(ncols=2, width_ratios=(1, 3))
ax_map = fig.add_subplot(gs[0]) ax_map = fig.add_subplot(gs[0])
ax_ts = fig.add_subplot(gs[1]) ax_ts = fig.add_subplot(gs[1])
...@@ -1589,26 +1614,38 @@ def timeseries_map( ...@@ -1589,26 +1614,38 @@ def timeseries_map(
shp_gdf.buffer(plot_buffer).plot(ax=ax_map, fc="None", ec="None") shp_gdf.buffer(plot_buffer).plot(ax=ax_map, fc="None", ec="None")
# Load PSI data # Load PSI data
reg_gdf = gp.GeoDataFrame(
geometry=[u4spatial.bounds_to_polygon(ax_map)],
crs=crs,
)
psi_local = u4psi.get_region_data( psi_local = u4psi.get_region_data(
shp_gdf, shp_gdf,
f"grp_{row[1].group:05}_local", f"grp_{row[1].group:05}_local",
os.path.join(psi_path, "hessen_l3_clipped.gpkg"), os.path.join(psi_path, "hessen_l3_clipped.gpkg"),
) )
psi_regional = u4psi.get_region_data(
reg_gdf,
f"grp_{row[1].group:05}_regional",
os.path.join(psi_path, "hessen_l3_clipped.gpkg"),
)
results = u4proc.get_psi_dict_inversion( results = u4proc.get_psi_dict_inversion(
psi_local, psi_local,
save_path=os.path.join(psi_data_path, f"grp_{row[1].group:05}.pkl"), save_path=os.path.join(psi_data_path, f"grp_{row[1].group:05}.pkl"),
) )
# Convert PSI data for plotting # Add PSI in Map
ax_map.axis("equal")
u4ax.add_gpkg_data_where(
contour_path,
table="thresholded_contours_all_shapes",
where=f"groups=={row[1].group}",
ax=ax_map,
edgecolor="k",
facecolor="None",
alpha=0.75,
linewidth=1,
)
fig.tight_layout()
reg_gdf = gp.GeoDataFrame(
geometry=[u4spatial.bounds_to_polygon(ax_map)],
crs=crs,
)
psi_regional = u4psi.get_region_data(
reg_gdf,
f"grp_{row[1].group:05}_regional",
os.path.join(psi_path, "hessen_l3_clipped.gpkg"),
)
u4spatial.xy_data_to_gdf( u4spatial.xy_data_to_gdf(
psi_regional["x"], psi_regional["x"],
psi_regional["y"], psi_regional["y"],
...@@ -1621,7 +1658,7 @@ def timeseries_map( ...@@ -1621,7 +1658,7 @@ def timeseries_map(
vmin=-5, vmin=-5,
vmax=5, vmax=5,
zorder=2, zorder=2,
markersize=10, markersize=30,
edgecolors="k", edgecolors="k",
legend=True, legend=True,
legend_kwds={ legend_kwds={
...@@ -1632,32 +1669,21 @@ def timeseries_map( ...@@ -1632,32 +1669,21 @@ def timeseries_map(
"pad": 0.1, "pad": 0.1,
}, },
) )
u4ax.add_gpkg_data_where(
contour_path,
table="thresholded_contours_all_shapes",
where=f"groups=={row[1].group}",
ax=ax_map,
edgecolor="k",
facecolor="None",
alpha=0.75,
linewidth=1,
)
u4plotfmt.add_scalebar(ax=ax_map, width=plot_buffer * 4, div=1) u4plotfmt.add_scalebar(ax=ax_map, width=plot_buffer * 4, div=1)
ax_map.axis("off")
fig.tight_layout()
plt.axis("equal")
plt.draw()
u4ax.add_basemap( u4ax.add_basemap(
ax=ax_map, crs=crs, source=contextily.providers.CartoDB.Voyager ax=ax_map, crs=crs, source=contextily.providers.CartoDB.Voyager
) )
ax_map.axis("off")
plt.draw()
# Plot Timeseries # Plot Timeseries
u4ax.plot_timeseries_fit( u4ax.plot_timeseries_fit(
ax=ax_ts, results=results, fit_num=1, annotate=True, show_errors=True ax=ax_ts, results=results, fit_num=1, annotate=True, show_errors=True
) )
mylim = np.max(np.abs((ax_ts.get_ylim()))) * 1.4
ax_ts.set_ylim(-mylim, mylim)
ax_ts.set_xlabel("Time") ax_ts.set_xlabel("Time")
ax_ts.set_ylabel("Displacement (mm)") ax_ts.set_ylabel("Displacement (mm)")
fig.tight_layout() fig.tight_layout()
for ftype in GLOBAL_TYPES: for ftype in GLOBAL_TYPES:
fig.savefig( fig.savefig(
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment