From 18eafcaf1e5af27ea101711e472a69b7ffd8638e Mon Sep 17 00:00:00 2001 From: Michael Rudolf <rudolf@geo.tu-darmstadt.de> Date: Mon, 17 Feb 2025 17:45:41 +0100 Subject: [PATCH] Some fixes for webservices and plots: - Fixed a bug causing a missing folder - Fixed warnings for empty geodataframes - Added client description to restapi calls - Fixed the PSI timeseries plot - Fixed the scalebar placement - Added fallback if internal server is unreachable - Minor formatting changes --- u4py/addons/web_services.py | 16 +++-- u4py/plotting/formatting.py | 13 ++-- u4py/plotting/plots.py | 126 ++++++++++++++++++++++-------------- 3 files changed, 94 insertions(+), 61 deletions(-) diff --git a/u4py/addons/web_services.py b/u4py/addons/web_services.py index c9c8ce1..38c2cdc 100644 --- a/u4py/addons/web_services.py +++ b/u4py/addons/web_services.py @@ -63,6 +63,7 @@ def query_hlnug( else: gdf = _save_features(features, out_fname) else: + os.makedirs(out_folder, exist_ok=True) out_fname = os.path.join(out_folder, f"{layer_name}{suffix}") if os.path.exists(out_fname + ".gpkg"): logging.info("Found locally cached file.") @@ -124,7 +125,7 @@ def query_internal( if not out_folder: with tempfile.TemporaryDirectory() as out_folder: out_fname = os.path.join( - out_folder, f"{layer_name.replace(':','_')}{suffix}" + out_folder, f"{layer_name.replace(':', '_')}{suffix}" ) features = _query_internal_server( layer_name, region, region_crs, login_path, maxfeatures @@ -133,7 +134,7 @@ def query_internal( else: os.makedirs(out_folder, exist_ok=True) out_fname = os.path.join( - out_folder, f"{layer_name.replace(':','_')}{suffix}" + out_folder, f"{layer_name.replace(':', '_')}{suffix}" ) if os.path.exists(out_fname + ".gpkg"): logging.info("Found locally cached file.") @@ -235,7 +236,8 @@ def _save_features( gdf = gp.read_file(geojson).to_crs("EPSG:32632") if len(region) > 0: gdf = gdf.clip(region) - gdf.to_file(out_fname + ".gpkg") + if len(gdf) > 0: + gdf.to_file(out_fname + ".gpkg") return gdf @@ -263,7 +265,9 @@ def _query_server( # Query webservice to find layer logging.info("Querying HLNUG for geology_data") map_url = f"{HLNUG_URL}/{map_server_suffix}" - feat_serv = restapi.MapService(map_url) + feat_serv = restapi.MapService( + map_url, client="u4py, rudolf@geo.tu-darmstadt.de" + ) lyr_types = [lyr.type for lyr in feat_serv.layers] lyr_names = [lyr.name for lyr in feat_serv.layers] @@ -278,7 +282,9 @@ def _query_server( # Assemble url to layer and get data lyr_url = f"{map_url}/{ii}" - ms_lyr = restapi.MapServiceLayer(lyr_url) + ms_lyr = restapi.MapServiceLayer( + lyr_url, client="u4py, rudolf@geo.tu-darmstadt.de" + ) if len(region) > 0: restgeom = polygon_to_restapi( region.geometry[0], region.crs.to_string() diff --git a/u4py/plotting/formatting.py b/u4py/plotting/formatting.py index 41f642b..580393b 100644 --- a/u4py/plotting/formatting.py +++ b/u4py/plotting/formatting.py @@ -298,10 +298,10 @@ def add_scalebar( """ if ( - (not "left" in loc) - and (not "right" in loc) - and (not "top" in loc) - and (not "bottom" in loc) + ("left" not in loc) + and ("right" not in loc) + and ("top" not in loc) + and ("bottom" not in loc) ): raise ValueError( "Invalid location string, allowed values are: 'top', 'bottom', " @@ -362,12 +362,13 @@ def add_scalebar( fontweight="bold", color="w", path_effects=[path_effects.withStroke(linewidth=2, foreground="k")], + zorder=99, ) for ii in range(div): ax.add_patch( mpatches.Rectangle( - (x, y), width, height, facecolor="w", edgecolor="k", zorder=10 + (x, y), width, height, facecolor="w", edgecolor="k", zorder=99 ) ) _label_scalebar(ax, x, y, width, height) @@ -378,7 +379,7 @@ def add_scalebar( height, facecolor="k", edgecolor="k", - zorder=10, + zorder=99, ) ) if ii < div - 1: diff --git a/u4py/plotting/plots.py b/u4py/plotting/plots.py index 91221b4..bcb8c2c 100644 --- a/u4py/plotting/plots.py +++ b/u4py/plotting/plots.py @@ -576,14 +576,24 @@ def geology_map( geometry=[u4spatial.bounds_to_polygon(ax)], crs=crs ) if use_internal: - bounds = region.bounds.iloc[0] - geology_data = u4web.query_internal( - "gk25-hessen:GK25_f_GK3", - region=[bounds.minx, bounds.miny, bounds.maxx, bounds.maxy], - region_crs=region.crs, - suffix=f"{row[1].group:05}", - out_folder=shp_path, - ) + try: + bounds = region.bounds.iloc[0] + geology_data = u4web.query_internal( + "gk25-hessen:GK25_f_GK3", + region=[bounds.minx, bounds.miny, bounds.maxx, bounds.maxy], + region_crs=region.crs, + suffix=f"{row[1].group:05}", + out_folder=shp_path, + ) + except TimeoutError: + logging.debug("Internal Server timed out, using fallback.") + geology_data = u4web.query_hlnug( + "geologie/gk25/MapServer", + "Geologie (Kartiereinheiten)", + region=region, + suffix=f"_{row[1].group:05}", + out_folder=shp_path, + ) else: geology_data = u4web.query_hlnug( "geologie/gk25/MapServer", @@ -655,13 +665,28 @@ def geology_map( # Load tectonic data if use_internal: - fault_data = u4web.query_internal( - "gk25-hessen:GK25_l-tek_GK3", - region=[bounds.minx, bounds.miny, bounds.maxx, bounds.maxy], - region_crs=region.crs, - suffix=f"{row[1].group:05}", - out_folder=shp_path, - ) + try: + fault_data = u4web.query_internal( + "gk25-hessen:GK25_l-tek_GK3", + region=[ + bounds.minx, + bounds.miny, + bounds.maxx, + bounds.maxy, + ], + region_crs=region.crs, + suffix=f"{row[1].group:05}", + out_folder=shp_path, + ) + except TimeoutError: + logging.debug("Internal Server timed out, using fallback.") + fault_data = u4web.query_hlnug( + "geologie/gk25/MapServer", + "Tektonik (Liniendaten)", + region=region, + suffix=f"_{row[1].group:05}", + out_folder=shp_path, + ) else: fault_data = u4web.query_hlnug( "geologie/gk25/MapServer", @@ -670,16 +695,16 @@ def geology_map( suffix=f"_{row[1].group:05}", out_folder=shp_path, ) - if fault_data: + if len(fault_data) > 0: fault_data.plot(ax=ax, color="k") # Formatting and other stuff u4plotfmt.add_scalebar(ax=ax, width=plot_buffer * 4) - # u4ax.add_basemap( - # ax=ax, - # crs=geology_data.crs, - # # source=contextily.providers.CartoDB.Positron, - # ) + u4ax.add_basemap( + ax=ax, + crs=geology_data.crs, + source=contextily.providers.TopPlusOpen.Grey, + ) for ftype in GLOBAL_TYPES: fig.savefig( @@ -1556,6 +1581,7 @@ def timeseries_map( :param plot_buffer: The buffer width around the area of interest. :type plot_buffer: float """ + output_path = os.path.join(output_path, suffix) fexists = [ os.path.exists( os.path.join(output_path, f"{row[1].group:05}_psi.{ftype}") @@ -1570,7 +1596,6 @@ def timeseries_map( f"Plotting timeseries and psi map of group {row[1].group:05}." ) # Set paths - output_path = os.path.join(output_path, suffix) psi_data_path = os.path.join(output_path, "psi_inv_data") os.makedirs(output_path, exist_ok=True) os.makedirs(psi_data_path, exist_ok=True) @@ -1579,8 +1604,8 @@ def timeseries_map( shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs) # Create figure and axes - fig = plt.figure(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI) - gs = fig.add_gridspec(ncols=2, width_ratios=(1, 2)) + fig = plt.figure(figsize=(13, 5), dpi=GLOBAL_DPI) + gs = fig.add_gridspec(ncols=2, width_ratios=(1, 3)) ax_map = fig.add_subplot(gs[0]) ax_ts = fig.add_subplot(gs[1]) @@ -1589,26 +1614,38 @@ def timeseries_map( shp_gdf.buffer(plot_buffer).plot(ax=ax_map, fc="None", ec="None") # Load PSI data - reg_gdf = gp.GeoDataFrame( - geometry=[u4spatial.bounds_to_polygon(ax_map)], - crs=crs, - ) psi_local = u4psi.get_region_data( shp_gdf, f"grp_{row[1].group:05}_local", os.path.join(psi_path, "hessen_l3_clipped.gpkg"), ) - psi_regional = u4psi.get_region_data( - reg_gdf, - f"grp_{row[1].group:05}_regional", - os.path.join(psi_path, "hessen_l3_clipped.gpkg"), - ) results = u4proc.get_psi_dict_inversion( psi_local, save_path=os.path.join(psi_data_path, f"grp_{row[1].group:05}.pkl"), ) - # Convert PSI data for plotting + # Add PSI in Map + ax_map.axis("equal") + u4ax.add_gpkg_data_where( + contour_path, + table="thresholded_contours_all_shapes", + where=f"groups=={row[1].group}", + ax=ax_map, + edgecolor="k", + facecolor="None", + alpha=0.75, + linewidth=1, + ) + fig.tight_layout() + reg_gdf = gp.GeoDataFrame( + geometry=[u4spatial.bounds_to_polygon(ax_map)], + crs=crs, + ) + psi_regional = u4psi.get_region_data( + reg_gdf, + f"grp_{row[1].group:05}_regional", + os.path.join(psi_path, "hessen_l3_clipped.gpkg"), + ) u4spatial.xy_data_to_gdf( psi_regional["x"], psi_regional["y"], @@ -1621,7 +1658,7 @@ def timeseries_map( vmin=-5, vmax=5, zorder=2, - markersize=10, + markersize=30, edgecolors="k", legend=True, legend_kwds={ @@ -1632,32 +1669,21 @@ def timeseries_map( "pad": 0.1, }, ) - u4ax.add_gpkg_data_where( - contour_path, - table="thresholded_contours_all_shapes", - where=f"groups=={row[1].group}", - ax=ax_map, - edgecolor="k", - facecolor="None", - alpha=0.75, - linewidth=1, - ) u4plotfmt.add_scalebar(ax=ax_map, width=plot_buffer * 4, div=1) - ax_map.axis("off") - fig.tight_layout() - plt.axis("equal") - plt.draw() u4ax.add_basemap( ax=ax_map, crs=crs, source=contextily.providers.CartoDB.Voyager ) + ax_map.axis("off") + plt.draw() # Plot Timeseries u4ax.plot_timeseries_fit( ax=ax_ts, results=results, fit_num=1, annotate=True, show_errors=True ) + mylim = np.max(np.abs((ax_ts.get_ylim()))) * 1.4 + ax_ts.set_ylim(-mylim, mylim) ax_ts.set_xlabel("Time") ax_ts.set_ylabel("Displacement (mm)") - fig.tight_layout() for ftype in GLOBAL_TYPES: fig.savefig( -- GitLab