Skip to content
Snippets Groups Projects
Commit aaa90a52 authored by Rudolf, Michael's avatar Rudolf, Michael
Browse files

Report Update

 - Removed geohazards, shapes (from classifier), difference map
 - Changed the text according to new requirements
 - Added drill sites, roads and map information
 - Minor fixes and refactoring
 - Changed plot behaviours for HLNUG or Umwelt4.0 data
parent d6926ece
Branches
No related tags found
No related merge requests found
......@@ -10,19 +10,20 @@ import docx.parts
import docx.shared
import geopandas as gp
import humanize
import numpy as np
import uncertainties as unc
from docx.document import Document
from docx.enum.text import WD_ALIGN_PARAGRAPH
import u4py.io.human_text as u4human
from u4py.utils.types import U4ResDict
FIGURENUM = 1
TABLENUM = 1
humanize.activate("de")
def site_report(
row: tuple,
row: U4ResDict,
output_path: os.PathLike,
suffix: str,
hlnug_data: gp.GeoDataFrame,
......@@ -43,6 +44,13 @@ def site_report(
output_path_docx = os.path.join(output_path, suffix)
os.makedirs(output_path_docx, exist_ok=True)
img_path = os.path.join(output_path, "known_features", f"{group:05}")
site_path = os.path.split(os.path.split(output_path)[0])[0]
web_query_path = os.path.join(
site_path,
"Places",
"Classifier_shapes",
"Web_Queries_" + suffix.split("_")[-1],
)
# Create Document and apply style
document = docx.Document()
......@@ -73,17 +81,19 @@ def site_report(
document = details_and_satellite(img_path, img_fmt, document)
# Manual Classification or HLNUG data
document = hlnug_description(
hlnug_data[hlnug_data.AMT_NR_ == group], document
hlnug_data[hlnug_data.AMT_NR_ == group],
row[1],
document,
web_query_path,
)
document = shape(row[1], document)
document = landuse(row[1], document)
# Volumina
document = moved_volumes(row[1], document)
# Difference maps
if os.path.exists(img_path + f"_diffplan.{img_fmt}"):
document = difference(img_path, img_fmt, document)
if os.path.exists(img_path + f"_dem.{img_fmt}"):
document = dem(img_path, img_fmt, document)
# Topographie
if os.path.exists(img_path + f"_slope.{img_fmt}") or os.path.exists(
......@@ -95,9 +105,6 @@ def site_report(
if os.path.exists(img_path + f"_psi.{img_fmt}"):
document = psi_map(img_path, img_fmt, document)
# Geohazard
document = geohazard(row[1], document)
# Geologie etc...
if os.path.exists(img_path + f"_GK25.{img_fmt}"):
document = geology(img_path, img_fmt, document)
......@@ -119,11 +126,6 @@ def location(series: gp.GeoSeries, document: Document) -> Document:
:rtype: str
"""
wgs_point = gp.GeoDataFrame(
geometry=[series.geometry.centroid], crs="EPSG:32632"
).to_crs("EPSG:4326")
lat = np.round(float(wgs_point.geometry.y.iloc[0]), 6)
lng = np.round(float(wgs_point.geometry.x.iloc[0]), 6)
document.add_heading("Lokalität:", level=1)
prgph = document.add_paragraph()
......@@ -134,27 +136,6 @@ def location(series: gp.GeoSeries, document: Document) -> Document:
prgph.add_run("Koordinaten (UTM 32N): ").bold = True
prgph.add_run(f"{int(series.geometry.centroid.y)} N ")
prgph.add_run(f"{int(series.geometry.centroid.x)} E")
prgph = document.add_paragraph()
prgph.alignment = WD_ALIGN_PARAGRAPH.LEFT
prgph.add_run("Google Maps: ").bold = True
prgph.add_run(
f"https://www.google.com/maps/place/{lat},{lng}/@{lat},{lng}/data=!3m1!1e3"
)
prgph = document.add_paragraph()
prgph.alignment = WD_ALIGN_PARAGRAPH.LEFT
prgph.add_run("Bing Maps: ").bold = True
prgph.add_run(
f"https://bing.com/maps/default.aspx?cp={lat}~{lng}&style=h&lvl=15"
)
prgph = document.add_paragraph()
prgph.alignment = WD_ALIGN_PARAGRAPH.LEFT
prgph.add_run("OpenStreetMap: ").bold = True
prgph.add_run(
f"http://www.openstreetmap.org/?lat={lat}&lon={lng}&zoom=17&layers=M"
)
return document
......@@ -178,12 +159,10 @@ def details_and_satellite(
prgph = document.add_paragraph()
prgph.add_run(f"Abbildung {FIGURENUM}: ").bold = True
FIGURENUM += 1
prgph.add_run("Lokalität der Anomalie. ")
prgph.add_run("Lokalität der Rutschung. ")
prgph.add_run("Links: ").italic = True
prgph.add_run(
"Übersicht über das Gebiet der Gruppe inklusive verschiedener "
+ "Geogefahren und der detektierten Anomalien "
+ "(Kartengrundlage OpenStreetMap). "
"Übersicht über das Gebiet der Rutschung inklusive weiterer Rutschungen (Basiskarte OpenStreetMap). "
)
prgph.add_run("Rechts: ").italic = True
prgph.add_run("Luftbild basierend auf ESRI Imagery.")
......@@ -191,7 +170,12 @@ def details_and_satellite(
return document
def hlnug_description(hld: gp.GeoDataFrame, document: Document) -> Document:
def hlnug_description(
hld: gp.GeoDataFrame,
row: U4ResDict,
document: Document,
web_query_path: os.PathLike,
) -> Document:
"""Adds a description based on HLNUG data
:param hld: The dataset
......@@ -199,160 +183,117 @@ def hlnug_description(hld: gp.GeoDataFrame, document: Document) -> Document:
:return: The description
:rtype: str
"""
def kart_str(in_str):
if "ja" in in_str:
spl = in_str.split(" ")
if len(spl) > 2:
return f"von {spl[1]} am {spl[2]}"
else:
return f"von {spl[1]}"
else:
return "aus dem DGM"
document.add_heading("Beschreibung", level=1)
prgph = document.add_paragraph()
prgph.add_run(f"Es handelt sich hierbei um eine {hld.OBJEKT.values[0]} ")
if hld.HERKUNFT.values[0]:
prgph.add_run(f"welche durch {hld.HERKUNFT.values[0]} ")
if hld.KARTIERT.values[0]:
prgph.add_run(f"{kart_str(hld.KARTIERT.values[0])} ")
prgph.add_run("kartiert wurde")
prgph.add_run(". ")
if hld.KLASSI_DGM.values[0]:
prgph.add_run(f"Der Befund im DGM ist {hld.KLASSI_DGM.values[0]}. ")
if hld.RU_SCHICHT.values[0]:
# Add geological structural area
structure_string = u4human.listed_strings(row["structural_region"])
if isinstance(structure_string, list):
prgph.add_run(
f"Die betroffenen Einheiten sind {hld.RU_SCHICHT.values[0]} "
)
if hld.RU_SCHIC_2.values[0]:
prgph.add_run(f"und {hld.RU_SCHIC_2.values[0]} ")
if hld.GEOLOGIE.values[0]:
prgph.add_run(f"auf {hld.GEOLOGIE.values[0]} ")
if hld.STR_SYSTEM.values[0]:
prgph.add_run(f"({hld.STR_SYSTEM.values[0]})")
prgph.add_run(". ")
f"Die Rutschung liegt in den geologischen Strukturräumen {structure_string} "
)
else:
if hld.GEOLOGIE.values[0]:
prgph.add_run(
f"Die Geologie besteht aus {hld.GEOLOGIE.values[0]} "
f'Die Rutschung liegt im geologischen Strukturraum "{structure_string}" '
)
if hld.STR_SYSTEM.values[0]:
prgph.add_run(f"({hld.STR_SYSTEM.values[0]})")
prgph.add_run(". ")
if hld.FLAECHE_M2.values[0]:
# Add number and name of geological map
if isinstance(row["geology_mapnum"], list):
map_list = [
f"{mpnum} {mpname}"
for mpnum, mpname in zip(
row["geology_mapnum"], row["geology_mapname"]
)
]
mapnum_string = u4human.listed_strings(map_list)
prgph.add_run(f"auf den Kartenblättern {mapnum_string}. ")
else:
prgph.add_run(
f"Die betroffene Fläche beträgt {np.round(hld.FLAECHE_M2.values[0], -2)}. "
f"auf dem Kartenblatt {row['geology_mapnum']} {row['geology_mapname']}. "
)
if hld.LAENGE_M.values[0] and hld.BREITE_M.values[0]:
# Add dimensions
prgph.add_run(
f"Sie ist {hld.LAENGE_M.values[0]}\u00a0m lang und {hld.BREITE_M.values[0]}\u00a0m breit"
f"Sie hat eine Länge von ca. {hld['LAENGE_M'].values[0]}\u00a0m, eine Breite von ca. {hld['BREITE_M'].values[0]}\u00a0m und verläuft nach {u4human.direction_to_text(hld['EXPOSITION'].values[0], in_lang='de')}. "
)
if (
hld.H_MAX_MNN.values[0]
and hld.H_MIN_MNN.values[0]
and hld.H_DIFF_M.values[0]
):
# Add sliding layers
prgph.add_run(
f" und erstreckt sich von {hld.H_MAX_MNN.values[0]}\u00a0m NN bis {hld.H_MIN_MNN.values[0]}\u00a0m NN über {hld.H_DIFF_M.values[0]}\u00a0m Höhendifferenz"
f"Die an der Rutschung beteiligten Schichten sind {hld['RU_SCHICHT'].values[0]}"
)
if hld["RU_SCHIC_2"].values[0]:
prgph.add_run(f" und {hld['RU_SCHIC_2'].values[0]}")
prgph.add_run(". ")
if hld.EXPOSITION.values[0]:
if hld.EXPOSITION.values[0] != "n.b.":
prgph.add_run(
f"Das Gelände fällt nach {u4human.direction_to_text(hld.EXPOSITION.values[0], in_lang='de')} ein. "
# Add roads
if (
row["roads_has_motorway"]
or row["roads_has_primary"]
or row["roads_has_secondary"]
):
road_list = []
if row["roads_has_motorway"]:
if row["roads_motorway_names"].startswith("["):
motorway_names = eval(row["roads_motorway_names"])
else:
motorway_names = row["roads_motorway_names"]
motorway_lengths = eval(row["roads_motorway_length"])
if isinstance(motorway_lengths, list):
for rname, rlen in zip(motorway_names, motorway_lengths):
road_list.append(
f"der {rname} auf einer Länge von {rlen:.1f}\u00a0m"
)
if hld.LANDNUTZUN.values[0]:
prgph.add_run(
f"Im wesentlichen ist das Gebiet von {hld.LANDNUTZUN.values[0]} bedeckt. "
else:
road_list.append(
f"der {motorway_names} auf einer Länge von {motorway_lengths:.1f}\u00a0m"
)
if hld.URSACHE.values[0]:
prgph.add_run(f"Eine mögliche Ursache ist {hld.URSACHE.values[0]}. ")
if hld.SCHUTZ_OBJ.values[0]:
if hld.SCHUTZ_OBJ.values[0] == "nicht bekannt":
prgph.add_run("Eine potentielle Gefährdung ist nicht bekannt. ")
if row["roads_has_primary"]:
if row["roads_primary_names"].startswith("["):
primary_names = eval(row["roads_primary_names"])
else:
prgph.add_run(
f"Eine potentielle Gefährdung für {hld.SCHUTZ_OBJ.values[0]} könnte vorliegen. "
primary_names = row["roads_primary_names"]
primary_lengths = eval(row["roads_primary_length"])
if isinstance(primary_lengths, list):
for rname, rlen in zip(primary_names, primary_lengths):
road_list.append(
f"der {rname} auf einer Länge von {rlen:.1f}\u00a0m"
)
if hld.AKTIVITAET.values[0]:
if hld.AKTIVITAET.values[0] == "nicht bekannt":
prgph.add_run("Eine mögliche Aktivität ist nicht bekannt. ")
if hld.AKTIVITAET.values[0] == "aktiv":
prgph.add_run(f"Die {hld.OBJEKT.values[0]} ist aktiv. ")
if hld.MASSNAHME.values[0]:
if hld.MASSNAHME.values[0] == "nicht bekannt":
prgph.add_run("Über unternommene Maßnahmen ist nichts bekannt. ")
else:
prgph.add_run("")
if hld.BEMERKUNG.values[0]:
prgph = document.add_paragraph()
prgph.add_run(
"Kommentar: " + hld.BEMERKUNG.values[0] + "."
).italic = True
return document
def shape(series: gp.GeoSeries, document: Document) -> Document:
"""Adds shape information to the document
:param series: The GeoSeries object extracted from the row.
:type series: gp.GeoSeries
:return: The tex code.
:rtype: str
"""
humanize.activate("de")
prgph = document.add_paragraph()
prgph.add_run("Größe und Form: ").bold = True
long_ax = eval(series["shape_ellipse_a"])
short_ax = eval(series["shape_ellipse_b"])
if isinstance(long_ax, list):
areas = [np.pi * a * b for a, b in zip(long_ax, short_ax)]
if len(areas) > 0:
imax = np.argmax(areas)
imin = np.argmin(areas)
if len(long_ax) > 2:
lax = (
f"zwischen {round(np.min(long_ax))} und "
+ f"{round(np.max(long_ax))}"
)
sax = (
f"zwischen {round(np.min(short_ax))} und "
+ f"{round(np.max(short_ax))}"
road_list.append(
f"der {primary_names} auf einer Länge von {primary_lengths:.1f}\u00a0m"
)
if row["roads_has_secondary"]:
if row["roads_secondary_names"].startswith("["):
secondary_names = eval(row["roads_secondary_names"])
else:
lax = f"{round(long_ax[0])} und {round(long_ax[1])}"
sax = f"{round(short_ax[0])} und {round(short_ax[1])}"
prgph.add_run(
f"Es handelt sich um {humanize.apnumber(len(long_ax))} "
+ f"Anomalien. Die Anomalien sind {lax}\u00a0m lang und {sax}"
+ "\u00a0m breit. "
secondary_names = row["roads_secondary_names"]
secondary_lengths = eval(row["roads_secondary_length"])
if isinstance(secondary_lengths, list):
for rname, rlen in zip(secondary_names, secondary_lengths):
road_list.append(
f"der {rname} auf einer Länge von {rlen:.1f}\u00a0m"
)
else:
road_list.append(
f"der {secondary_names} auf einer Länge von {secondary_lengths:.1f}\u00a0m"
)
if len(long_ax) > 2:
prgph.add_run(
"Die flächenmäßig kleinste Anomalie ist hierbei "
+ f"{round(long_ax[imin])}\u00a0m lang und "
+ f"{round(short_ax[imin])}\u00a0m breit, die größte "
+ f"{round(long_ax[imax])}\u00a0m lang und "
+ f"{round(short_ax[imax])}\u00a0m breit. "
f"Die Rutschung wird von {u4human.listed_strings(road_list)} durchkreuzt. "
)
if isinstance(long_ax, float):
lax = f"{round(long_ax)}"
sax = f"{round(short_ax)}"
prgph.add_run(f"Die Anomalie ist {lax} m lang und {sax} m breit. ")
# Add drill sites
drill_path = os.path.join(
web_query_path,
f"Archivbohrungen, Endteufe [m]_bohr_{row['group']:05}.gpkg",
)
if os.path.exists(drill_path):
num_wells = len(gp.read_file(drill_path))
if num_wells > 1:
prgph.add_run(
f"Im Umfeld der Rutschung sind {humanize.apnumber(num_wells)} Bohrungen bekannt. "
)
elif num_wells == 1:
prgph.add_run("Im Umfeld der Rutschung ist eine Bohrung bekannt. ")
return document
......@@ -376,7 +317,7 @@ def landuse(series: gp.GeoSeries, document: Document) -> Document:
prgph.add_run(
"Der überwiegende Teil wird durch "
+ f"{u4human.landuse_str(series.landuse_major)} bedeckt. "
+ "Die Anteile der Landnutzung sind: "
+ "Die Anteile der Landnutzung nach OpenStreetMap sind: "
)
if isinstance(landuse, list):
lnd_str = ""
......@@ -402,7 +343,8 @@ def moved_volumes(series: gp.GeoSeries, document: Document) -> Document:
"""
document.add_heading("Höhenveränderungen", level=1)
document.add_paragraph(
"Im Gebiet um die detektierte Anomalie wurde insgesamt "
"Im Gebiet um die detektierte Anomalie wurde laut vorliegendem "
+ "Differenzenplan (HVBG) insgesamt "
+ f"{series.volumes_moved}\u00a0m³ Material bewegt, "
+ f"wovon {series.volumes_added}\u00a0m³ hinzugefügt und "
+ f"{abs(series.volumes_removed)}\u00a0m³ abgetragen wurde. "
......@@ -412,10 +354,8 @@ def moved_volumes(series: gp.GeoSeries, document: Document) -> Document:
return document
def difference(
img_path: os.PathLike, img_fmt: str, document: Document
) -> Document:
"""Adds the difference and slope maps.
def dem(img_path: os.PathLike, img_fmt: str, document: Document) -> Document:
"""Adds the dem maps.
:param img_path: The path to the image folder including group name.
:type img_path: os.PathLike
......@@ -423,24 +363,19 @@ def difference(
:rtype: str
"""
global FIGURENUM
if os.path.exists(img_path + f"_diffplan.{img_fmt}") and os.path.exists(
img_path + f"_dem.{img_fmt}"
):
if os.path.exists(img_path + f"_dem.{img_fmt}"):
prgph = document.add_paragraph()
prgph.alignment = WD_ALIGN_PARAGRAPH.CENTER
run = prgph.add_run()
run.add_picture(
img_path + f"_diffplan.{img_fmt}", width=docx.shared.Mm(70)
img_path + f"_dem.{img_fmt}", width=docx.shared.Mm(140)
)
run.add_picture(img_path + f"_dem.{img_fmt}", width=docx.shared.Mm(70))
prgph = document.add_paragraph()
prgph.add_run(f"Abbildung {FIGURENUM}: ").bold = True
FIGURENUM += 1
prgph.add_run("Höhendaten im Bereich der Anomalie. ")
prgph.add_run("Links: ").italic = True
prgph.add_run("Differenzenplan im Gebiet.")
prgph.add_run("Rechts: ").italic = True
prgph.add_run("Digitales Höhenmodell (Schummerung).")
prgph.add_run(
"Digitales Höhenmodell DGM 1 (HVBG) und vorhandene Bohrungen."
)
return document
......@@ -461,8 +396,8 @@ def topography(
document.add_heading("Topographie", level=1)
document.add_paragraph(
"In den folgenden Jahren war die Topographie im Bereich der "
+ "Anomalien wie folgt:"
"In den Jahren 2014, 2019 und 2021 war die Topographie anhand des DGM1"
+ " (HVBG) im Bereich der Rutschung wie folgt:"
)
for yy in ["14", "19", "21"]:
year = f"20{yy}"
......@@ -478,7 +413,7 @@ def topography(
if isinstance(sl_m, list) or isinstance(sl_m, float):
usl, usl_str = u4human.topo_text(sl_m, sl_s, "%", is_tex=False)
prgph.add_run(
f"Im Bereich der Anomalie {u4human.slope_std_str(usl.s)} "
f"Im Bereich der Rutschung {u4human.slope_std_str(usl.s)} "
+ f"und {u4human.slope_str(usl.n)} ({usl_str}). "
)
if as_m:
......@@ -491,7 +426,7 @@ def topography(
)
else:
prgph.add_run(
"Es liegen für den inneren Bereich der Anomalie keine Daten vor (außerhalb DEM). "
"Es liegen für den inneren Bereich der Rutschung keine Daten vor (außerhalb DEM). "
)
# Values for the hull around all anomalies
......@@ -519,7 +454,7 @@ def topography(
)
else:
prgph.add_run(
"Es liegen für das nähere Umfeld der Anomalien keine Werte für die Steigung vor. "
"Es liegen für das nähere Umfeld der Rutschung keine Werte für die Steigung vor. "
)
if os.path.exists(img_path + f"_slope.{img_fmt}") and os.path.exists(
......@@ -582,141 +517,7 @@ def psi_map(
FIGURENUM += 1
prgph.add_run(
"Persistent scatterer und Zeitreihe der Deformation "
+ "im Gebiet der Gruppe."
)
print(img_path)
return document
def geohazard(series: gp.GeoSeries, document: Document) -> Document:
"""Converts the known geohazards into a descriptive text.
:param series: The GeoSeries object extracted from the row.
:type series: gp.GeoSeries
:return: The tex code.
:rtype: str
"""
global TABLENUM
document.add_heading("Geogefahren", level=1)
prgph = document.add_paragraph()
prgph.add_run(f"Tabelle {TABLENUM}: ").bold = True
prgph.add_run("Bekannte Geogefahren im Bereich der Rutschung.")
table = document.add_table(rows=4, cols=3, style="Light Shading Accent 1")
table.cell(0, 0).text = "Typ"
table.cell(0, 1).text = "Innerhalb des Areals"
table.cell(0, 2).text = "Im Umkreis von 1 km"
table.cell(1, 0).text = "Hangrutschungen"
table.cell(1, 1).text = f"{series.landslides_num_inside}"
table.cell(1, 2).text = f"{series.landslides_num_1km}"
table.cell(2, 0).text = "Karsterscheinungen"
table.cell(2, 1).text = f"{series.karst_num_inside}"
table.cell(2, 2).text = f"{series.karst_num_1km}"
table.cell(3, 0).text = "Steinschläge"
table.cell(3, 1).text = f"{series.rockfall_num_inside}"
table.cell(3, 2).text = f"{series.rockfall_num_1km}"
document = landslide_risk(series, document)
document = karst_risk(series, document)
document = subsidence_risk(series, document)
return document
def landslide_risk(series: gp.GeoSeries, document: Document) -> Document:
"""Converts the known landslides into a descriptive text.
:param series: The GeoSeries object extracted from the row.
:type series: gp.GeoSeries
:return: The tex code.
:rtype: str
"""
lsar = series.landslide_total
document.add_heading("Rutschungsgefährdung", level=2)
prgph = document.add_paragraph()
if lsar > 0:
prgph.add_run(
f"Das Gebiet liegt {u4human.part_str(lsar)} ({lsar:.0f}%) in "
+ "einem gefährdeten Bereich mit rutschungsanfälligen Schichten. "
)
try:
landslide_units = eval(series.landslide_units)
except (NameError, SyntaxError):
landslide_units = series.landslide_units
if isinstance(landslide_units, list):
text = "Die Einheiten sind: "
for unit in landslide_units:
text += f"{unit}, "
prgph.add_run(text[:-2] + ".")
else:
prgph.add_run(f"Wichtigste Einheiten sind {landslide_units}.")
else:
prgph.add_run(
"Es liegen keine Informationen zur Rutschungsgefährdung vor."
)
return document
def karst_risk(series: gp.GeoSeries, document: Document) -> Document:
"""Converts the known karst phenomena into a descriptive text.
:param series: The GeoSeries object extracted from the row.
:type series: gp.GeoSeries
:return: The tex code.
:rtype: str
"""
ksar = series.karst_total
document.add_heading("Karstgefährdung", level=2)
prgph = document.add_paragraph()
if ksar > 0:
prgph.add_run(
f"Das Gebiet liegt {u4human.part_str(ksar)} ({ksar:.0f}\%) in "
+ "einem Bereich bekannter verkarsteter Schichten. "
)
try:
karst_units = eval(series.karst_units)
except (NameError, SyntaxError):
karst_units = series.karst_units
if isinstance(karst_units, list):
text = "Die Einheiten sind: "
for unit in karst_units:
text += f"{unit}, "
prgph.add_run(text[:-2] + ".")
else:
prgph.add_run(f"Wichtigste Einheiten sind {karst_units}.")
else:
prgph.add_run("Es liegen keine Informationen zur Karstgefährdung vor.")
return document
def subsidence_risk(series: gp.GeoSeries, document: Document) -> Document:
"""Converts the area of known subsidence into a descriptive text.
:param series: The GeoSeries object extracted from the row.
:type series: gp.GeoSeries
:return: The tex code.
:rtype: str
"""
subsar = series.subsidence_total
document.add_heading("Setzungsgefährdung", level=2)
prgph = document.add_paragraph()
if subsar > 0:
prgph.add_run(
f"Das Gebiet liegt {u4human.part_str(subsar)} ({subsar:.0f}\%) in "
+ "einem Bereich bekannter setzungsgefährdeter Schichten. "
)
try:
subsidence_units = eval(series.subsidence_units)
except (NameError, SyntaxError):
subsidence_units = series.subsidence_units
if isinstance(subsidence_units, list):
text = "Die Einheiten sind: "
for unit in subsidence_units:
text += f"{unit}, "
prgph.add_run(text[:-2] + ".")
else:
prgph.add_run(f"Wichtigste Einheiten sind {subsidence_units}.")
else:
prgph.add_run(
"Es liegen keine Informationen zur Setzungsgefährdung vor."
+ "im Gebiet der Rutschung."
)
return document
......@@ -724,7 +525,7 @@ def subsidence_risk(series: gp.GeoSeries, document: Document) -> Document:
def geology(
img_path: os.PathLike, img_fmt: str, document: Document
) -> Document:
"""Adds information for geology to the docum
"""Adds information for geology to the document.
:param img_path: The path to the geology image file.
:type img_path: os.PathLike
......@@ -746,7 +547,7 @@ def geology(
prgph.alignment = WD_ALIGN_PARAGRAPH.CENTER
run = prgph.add_run()
run.add_picture(
img_path + f"_GK25_leg.{img_fmt}", width=docx.shared.Mm(150)
img_path + f"_GK25_leg.{img_fmt}", width=docx.shared.Mm(70)
)
prgph = document.add_paragraph()
......@@ -784,7 +585,7 @@ def hydrogeology(
prgph.alignment = WD_ALIGN_PARAGRAPH.CENTER
run = prgph.add_run()
run.add_picture(
img_path + f"_HUEK200_leg.{img_fmt}", width=docx.shared.Mm(150)
img_path + f"_HUEK200_leg.{img_fmt}", width=docx.shared.Mm(70)
)
prgph = document.add_paragraph()
......@@ -798,7 +599,7 @@ def hydrogeology(
def soils(img_path: os.PathLike, img_fmt: str, document: Document) -> Document:
"""Adds information for soils to the documen
"""Adds information for soils to the document.
:param img_path: The path to the soils image file.
:type img_path: os.PathLike
......@@ -820,7 +621,7 @@ def soils(img_path: os.PathLike, img_fmt: str, document: Document) -> Document:
prgph.alignment = WD_ALIGN_PARAGRAPH.CENTER
run = prgph.add_run()
run.add_picture(
img_path + f"_BFD50_leg.{img_fmt}", width=docx.shared.Mm(150)
img_path + f"_BFD50_leg.{img_fmt}", width=docx.shared.Mm(70)
)
prgph = document.add_paragraph()
......
......@@ -232,7 +232,7 @@ def direction_to_text(
if out_lang != "abbrev":
if in_lang == "en":
desc = translate_abbrev_en[out_lang][desc]
if in_lang == "de":
elif in_lang == "de":
desc = translate_abbrev_de[out_lang][desc]
return desc
......@@ -282,3 +282,38 @@ def topo_text(
ustr = ustr.replace("+/-", "±")
ustr = ustr.replace("\\,", "\u00a0")
return usl, ustr
def listed_strings(entry: str | list) -> str:
"""
Converts a possible list of strings into a more human-readable enumeration
of strings.
:param entry: A single string (returned immediately) or a list of strings to build the enumeration.
:type entry: str | list
:return: The list of strings with commas
:rtype: str
"""
# Sometimes we have strings that are lists??
if isinstance(entry, str):
if entry.startswith("["):
entry = eval(entry)
else:
return entry
if isinstance(entry, list):
if len(entry) < 2:
return entry[0]
if len(entry) > 2:
region_str = ""
for ii, ent in enumerate(entry):
if ii + 1 != len(entry):
region_str += f"{ent}, "
else:
region_str = region_str[:-2] + f" und {entry}"
return region_str
else:
return f"{entry[0]} und {entry[1]}"
else:
raise TypeError(f"Type of entry is invalid: {type(entry)}!")
......@@ -926,7 +926,7 @@ def add_gpkg_data_in_axis(
ax: Axes,
ax_crs: str = "EPSG:32632",
**plot_kwargs,
):
) -> mpatches.Patch:
"""Adds data from a gpkg file to the plot using the boundaries of the axis as the extend of the geometry
:param gpkg_path: The path to the geodatabase with HLNUG data.
......@@ -937,6 +937,8 @@ def add_gpkg_data_in_axis(
:type ax: Axes
:param **plot_kwargs: Keyword arguments passed to the `GeoDataFrame.plot()` function.
:type **plot_kwargs: dict
:return: A legend handle for the data
:rtype: mpatches.Patch
"""
crs = u4sql.get_crs(gpkg_path, table)[0]
region = gp.GeoDataFrame(
......@@ -948,6 +950,42 @@ def add_gpkg_data_in_axis(
if "column" in plot_kwargs.keys():
if plot_kwargs["column"] not in data.keys():
plot_kwargs["column"] = None
# Special plotting for roads
if "fclass" in plot_kwargs.keys():
data = data[data["fclass"] == plot_kwargs["fclass"]]
if not data.empty:
if plot_kwargs["fclass"] == "motorway":
# Orange buffered shape with black edges and black center line
plot_kwargs["facecolor"] = "#ffb300"
plot_kwargs["edgecolor"] = "k"
plot_kwargs["linewidth"] = 0.5
plot_kwargs["zorder"] = 4
del plot_kwargs["fclass"]
gdf = gp.GeoDataFrame(geometry=[data.buffer(25).unary_union])
gdf.plot(ax=ax, **plot_kwargs)
data.plot(ax=ax, color="k", linewidth=0.5)
return mpatches.Patch(label="Autobahn", **plot_kwargs)
elif plot_kwargs["fclass"] == "primary":
# Yellow buffered shape with black edges
plot_kwargs["facecolor"] = "#ffff00"
plot_kwargs["edgecolor"] = "k"
plot_kwargs["linewidth"] = 0.5
plot_kwargs["zorder"] = 4
del plot_kwargs["fclass"]
gdf = gp.GeoDataFrame(geometry=[data.buffer(10).unary_union])
gdf.plot(ax=ax, **plot_kwargs)
return mpatches.Patch(label="Bundesstraße", **plot_kwargs)
elif plot_kwargs["fclass"] == "secondary":
# White buffered shape with black edges
plot_kwargs["facecolor"] = "#ffffff"
plot_kwargs["edgecolor"] = "k"
plot_kwargs["linewidth"] = 0.5
plot_kwargs["zorder"] = 4
del plot_kwargs["fclass"]
gdf = gp.GeoDataFrame(geometry=[data.buffer(10).unary_union])
gdf.plot(ax=ax, **plot_kwargs)
return mpatches.Patch(label="Landstraße", **plot_kwargs)
else:
if len(data) > 0:
data.plot(ax=ax, **plot_kwargs)
else:
......@@ -1347,20 +1385,20 @@ def add_geo_pts(
@_add_or_create
def add_drill_sites(
drill_sites: gp.GeoDataFrame,
leg_handles: list,
leg_labels: list,
ax: Axes,
leg_handles: list = [],
leg_labels: list = [],
) -> Tuple[list, list]:
"""Adds drill sites from the HLNUG server to the axis
:param drill_sites: The drill sites data loaded from HLNUG
:type drill_sites: gp.GeoDataFrame
:param leg_handles: The legend handles for symbology
:type leg_handles: list
:param leg_labels: The legend labels
:type leg_labels: list
:param ax: The axis to add the data to
:type ax: Axes
:param leg_handles: The legend handles for symbology, defaults to [].
:type leg_handles: list
:param leg_labels: The legend labels, defaults to [].
:type leg_labels: list
:return: A tuple with the updated legend handles and labels
:rtype: Tuple[list, list]
"""
......
......@@ -26,8 +26,11 @@ import u4py.io.tiff as u4tiff
import u4py.plotting.axes as u4ax
import u4py.plotting.formatting as u4plotfmt
GLOBAL_DPI = 72 # DPI setting for plots
GLOBAL_TYPES = ["pdf"] # List of filetypes to export for figures
warnings.filterwarnings("ignore", module="matplotlib")
IMAGE_DPI = 150 # DPI setting for plots
IMAGE_FORMATS = ["pdf"] # List of filetypes to export for figures
IS_HLNUG = False
def plot_inversion_results(
......@@ -352,7 +355,7 @@ def plot_hotspots(
if title:
ax.set_title(title)
fig.tight_layout()
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(f"{fpath_woex}.{ftype}")
return (h.get_offsets(), h.get_array(), crs)
......@@ -401,7 +404,7 @@ def plot_GroundMotionAnalyzer(
overwrite=overwrite,
)
if output_filepath:
fig, ax = plt.subplots(figsize=(6, 10), dpi=GLOBAL_DPI)
fig, ax = plt.subplots(figsize=(6, 10), dpi=IMAGE_DPI)
u4ax.add_tile(gma_tile_path, ax=ax, cmap="Reds", vm=(-2, -1), zorder=3)
fpath_woex = os.path.splitext(output_filepath)[0]
u4ax.add_basemap(ax=ax, crs=crs)
......@@ -417,7 +420,7 @@ def plot_GroundMotionAnalyzer(
ax.set_title(title)
fig.tight_layout()
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(f"{fpath_woex}.{ftype}")
plt.close(fig)
......@@ -447,7 +450,7 @@ def plot_site_statistics(
res[y_key],
)
fig.tight_layout()
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(
site_dir, f"{x_key}_vs_{y_key}_{res['group']}.{ftype}"
......@@ -505,7 +508,7 @@ def plot_shape(
ax=ax,
crs=sub_set.crs, # source=contextily.providers.CartoDB.Positron
)
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(os.path.join(save_folder, f"Site_{group}.{ftype}"))
plt.close(fig)
......@@ -553,7 +556,7 @@ def geology_map(
os.path.exists(
os.path.join(output_path, f"{row[1].group:05}_GK25.{ftype}")
)
for ftype in GLOBAL_TYPES
for ftype in IMAGE_FORMATS
]
if np.all(fexists) and not overwrite:
logging.info(f"Skipping existing plot {row[1].group:05}")
......@@ -563,7 +566,7 @@ def geology_map(
# Make geodataframe and add to plot
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=IMAGE_DPI)
shp_gdf.plot(ax=ax, fc="None", ec="C0", zorder=5, linewidth=3)
shp_gdf.buffer(plot_buffer).plot(ax=ax, fc="None", ec="None")
......@@ -640,12 +643,23 @@ def geology_map(
)
# Plot other anomalies
u4ax.add_gpkg_data_where(
if not IS_HLNUG:
# For non HLNUG data use the thresholded contours
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
table="thresholded_contours_all_shapes",
where=f"groups=={row[1].group}",
edgecolor="C0",
edgecolor="k",
facecolor="None",
linewidth=1,
)
else:
# For HLNUG data use the
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
table="Classified_Shapes",
edgecolor="k",
facecolor="None",
linewidth=1,
)
......@@ -773,7 +787,7 @@ def geology_map(
source=contextily.providers.TopPlusOpen.Grey,
)
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(output_path, f"{row[1].group:05}_GK25.{ftype}")
)
......@@ -821,7 +835,7 @@ def hydrogeology_map(
os.path.exists(
os.path.join(output_path, f"{row[1].group:05}_HUEK200.{ftype}")
)
for ftype in GLOBAL_TYPES
for ftype in IMAGE_FORMATS
]
if np.all(fexists) and not overwrite:
logging.info(f"Skipping existing plot {row[1].group:05}")
......@@ -833,7 +847,7 @@ def hydrogeology_map(
# Make geodataframe and add to plot
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=IMAGE_DPI)
shp_gdf.plot(ax=ax, fc="None", ec="C0", zorder=5, linewidth=3)
shp_gdf.buffer(plot_buffer).plot(ax=ax, fc="None", ec="None")
......@@ -878,12 +892,23 @@ def hydrogeology_map(
)
# Plot other anomalies
u4ax.add_gpkg_data_where(
if not IS_HLNUG:
# For non HLNUG data use the thresholded contours
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
table="thresholded_contours_all_shapes",
where=f"groups=={row[1].group}",
edgecolor="k",
facecolor="None",
linewidth=1,
)
else:
# For HLNUG data use the
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
edgecolor="C0",
table="Classified_Shapes",
edgecolor="k",
facecolor="None",
linewidth=1,
)
......@@ -895,7 +920,7 @@ def hydrogeology_map(
crs=hydro_units_data.crs,
source=contextily.providers.TopPlusOpen.Grey,
)
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(output_path, f"{row[1].group:05}_HUEK200.{ftype}")
)
......@@ -945,7 +970,7 @@ def topsoil_map(
os.path.exists(
os.path.join(output_path, f"{row[1].group:05}_BFD50.{ftype}")
)
for ftype in GLOBAL_TYPES
for ftype in IMAGE_FORMATS
]
if np.all(fexists) and not overwrite:
logging.info(f"Skipping existing plot {row[1].group:05}")
......@@ -955,7 +980,7 @@ def topsoil_map(
# Make geodataframe and add to plot
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=IMAGE_DPI)
shp_gdf.plot(ax=ax, fc="None", ec="C0", zorder=5, linewidth=3)
shp_gdf.buffer(plot_buffer).plot(ax=ax, fc="None", ec="None")
......@@ -1004,12 +1029,23 @@ def topsoil_map(
)
# Plot other anomalies
u4ax.add_gpkg_data_where(
if not IS_HLNUG:
# For non HLNUG data use the thresholded contours
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
table="thresholded_contours_all_shapes",
where=f"groups=={row[1].group}",
edgecolor="k",
facecolor="None",
linewidth=1,
)
else:
# For HLNUG data use the
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
edgecolor="C0",
table="Classified_Shapes",
edgecolor="k",
facecolor="None",
linewidth=1,
)
......@@ -1021,7 +1057,7 @@ def topsoil_map(
crs=soil_data.crs,
source=contextily.providers.TopPlusOpen.Grey,
)
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(output_path, f"{row[1].group:05}_BFD50.{ftype}")
)
......@@ -1065,7 +1101,7 @@ def satimg_map(
os.path.exists(
os.path.join(output_path, f"{row[1].group:05}_satimg.{ftype}")
)
for ftype in GLOBAL_TYPES
for ftype in IMAGE_FORMATS
]
if np.all(fexists) and not overwrite:
logging.info(f"Skipping existing plot {row[1].group:05}")
......@@ -1075,7 +1111,7 @@ def satimg_map(
# Make gdf and plot data
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=IMAGE_DPI)
shp_gdf.plot(ax=ax, fc="None", ec="C0")
shp_gdf.buffer(plot_buffer).plot(ax=ax, fc="None", ec="None")
......@@ -1086,21 +1122,32 @@ def satimg_map(
u4ax.add_basemap(
ax=ax, crs=crs, source=contextily.providers.Esri.WorldImagery
)
u4ax.add_gpkg_data_where(
# Plot other anomalies
if not IS_HLNUG:
# For non HLNUG data use the thresholded contours
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
table="thresholded_contours_all_shapes",
where=f"groups=={row[1].group}",
edgecolor="w",
facecolor="None",
linewidth=2,
)
else:
# For HLNUG data use the
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
edgecolor="C1",
table="Classified_Shapes",
edgecolor="w",
facecolor="None",
alpha=0.75,
linewidth=1,
linewidth=2,
)
# Formatting and save
u4plotfmt.add_scalebar(ax=ax, width=plot_buffer * 4)
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(output_path, f"{row[1].group:05}_satimg.{ftype}")
)
......@@ -1116,6 +1163,7 @@ def dem_map(
contour_path: os.PathLike,
plot_buffer: float,
overwrite: bool,
shp_path: os.PathLike = "",
):
"""Creates a hillshade map of the digital elevation model in the area of interest.
......@@ -1141,7 +1189,7 @@ def dem_map(
os.path.exists(
os.path.join(output_path, f"{row[1].group:05}_dem.{ftype}")
)
for ftype in GLOBAL_TYPES
for ftype in IMAGE_FORMATS
]
if np.all(fexists) and not overwrite:
logging.info(f"Skipping existing plot {row[1].group:05}")
......@@ -1153,7 +1201,7 @@ def dem_map(
# Loading data and plot it
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=IMAGE_DPI)
shp_gdf.plot(ax=ax, fc="None", ec="C0")
shp_gdf.buffer(plot_buffer).plot(ax=ax, fc="None", ec="None")
......@@ -1166,22 +1214,51 @@ def dem_map(
)
u4ax.add_dem(region, dem_path, ax=ax)
# Add contours of individual anomalies
u4ax.add_gpkg_data_where(
# Plot other anomalies
if not IS_HLNUG:
# For non HLNUG data use the thresholded contours
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
table="thresholded_contours_all_shapes",
where=f"groups=={row[1].group}",
edgecolor="k",
facecolor="None",
linewidth=1,
)
else:
# For HLNUG data use the
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
buffer=5,
edgecolor="C1",
table="Classified_Shapes",
edgecolor="k",
facecolor="None",
alpha=0.5,
linewidth=1,
)
if shp_path:
drill_sites = u4web.query_hlnug(
"geologie/bohrdatenportal/MapServer",
"Archivbohrungen, Endteufe [m]",
region=region,
suffix=f"_bohr_{row[1].group:05}",
out_folder=shp_path,
)
if not drill_sites.empty:
u4ax.add_drill_sites(
drill_sites=drill_sites,
ax=ax,
)
leg_handles, leg_labels = u4ax.add_drill_sites(
drill_sites=drill_sites,
leg_handles=[],
leg_labels=[],
ax=ax,
)
ax.legend(leg_handles, leg_labels)
# Formatting and other stuff
u4plotfmt.add_scalebar(ax=ax, width=plot_buffer * 4)
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(output_path, f"{row[1].group:05}_dem.{ftype}")
)
......@@ -1223,7 +1300,7 @@ def slope_map(
os.path.exists(
os.path.join(output_path, f"{row[1].group:05}_slope.{ftype}")
)
for ftype in GLOBAL_TYPES
for ftype in IMAGE_FORMATS
]
if np.all(fexists) and not overwrite:
logging.info(f"Skipping existing plot {row[1].group:05}")
......@@ -1233,7 +1310,7 @@ def slope_map(
# Make geodataframe and add to plot
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=IMAGE_DPI)
shp_gdf.plot(ax=ax, fc="None", ec="C0")
shp_gdf.buffer(plot_buffer).plot(ax=ax, fc="None", ec="None")
......@@ -1245,21 +1322,31 @@ def slope_map(
geometry=[u4spatial.bounds_to_polygon(ax)], crs=crs
)
u4ax.add_slope(region, dem_path, ax=ax)
u4ax.add_gpkg_data_where(
# Plot other anomalies
if not IS_HLNUG:
# For non HLNUG data use the thresholded contours
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
table="thresholded_contours_all_shapes",
where=f"groups=={row[1].group}",
edgecolor="w",
facecolor="None",
linewidth=2,
)
else:
# For HLNUG data use the
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
buffer=5,
table="Classified_Shapes",
edgecolor="w",
facecolor="None",
alpha=0.75,
linewidth=1,
linewidth=2,
)
# Formatting and other stuff
u4plotfmt.add_scalebar(ax=ax, width=plot_buffer * 4)
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(output_path, f"{row[1].group:05}_slope.{ftype}")
)
......@@ -1305,7 +1392,7 @@ def aspect_map(
os.path.exists(
os.path.join(output_path, f"{row[1].group:05}_aspect.{ftype}")
)
for ftype in GLOBAL_TYPES
for ftype in IMAGE_FORMATS
]
if np.all(fexists) and not overwrite:
logging.info(f"Skipping existing plot {row[1].group:05}")
......@@ -1315,7 +1402,7 @@ def aspect_map(
# Make geodataframe and add to plot
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=IMAGE_DPI)
shp_gdf.plot(ax=ax, fc="None", ec="k")
shp_gdf.buffer(plot_buffer).plot(ax=ax, fc="None", ec="None")
......@@ -1325,19 +1412,30 @@ def aspect_map(
geometry=[u4spatial.bounds_to_polygon(ax)], crs=crs
)
u4ax.add_aspect(region, dem_path, ax=ax)
u4ax.add_gpkg_data_where(
# Plot other anomalies
if not IS_HLNUG:
# For non HLNUG data use the thresholded contours
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
table="thresholded_contours_all_shapes",
where=f"groups=={row[1].group}",
edgecolor="k",
facecolor="None",
linewidth=1,
)
else:
# For HLNUG data use the
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
buffer=5,
table="Classified_Shapes",
edgecolor="k",
facecolor="None",
linewidth=0.5,
linewidth=1,
)
# Formatting and other stuff
u4plotfmt.add_scalebar(ax=ax, width=plot_buffer * 4)
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(output_path, f"{row[1].group:05}_aspect.{ftype}")
)
......@@ -1380,7 +1478,7 @@ def aspect_slope_map(
output_path, f"{row[1].group:05}_aspect_slope.{ftype}"
)
)
for ftype in GLOBAL_TYPES
for ftype in IMAGE_FORMATS
]
if np.all(fexists) and not overwrite:
logging.info(f"Skipping existing plot {row[1].group:05}")
......@@ -1389,7 +1487,7 @@ def aspect_slope_map(
logging.info(f"Plotting aspect-slope map of group {row[1].group:05}.")
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=IMAGE_DPI)
shp_gdf.plot(ax=ax, fc="None", ec="k")
shp_gdf.buffer(plot_buffer).plot(ax=ax, fc="None", ec="None")
......@@ -1399,21 +1497,31 @@ def aspect_slope_map(
geometry=[u4spatial.bounds_to_polygon(ax)], crs=crs
)
u4ax.add_aspect_slope(region, dem_path, ax=ax)
u4ax.add_gpkg_data_where(
# Plot other anomalies
if not IS_HLNUG:
# For non HLNUG data use the thresholded contours
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
table="thresholded_contours_all_shapes",
where=f"groups=={row[1].group}",
edgecolor="k",
facecolor="None",
linewidth=1,
)
else:
# For HLNUG data use the
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
buffer=5,
table="Classified_Shapes",
edgecolor="k",
facecolor="None",
linewidth=0.5,
zorder=5,
linewidth=1,
)
# Formatting and other stuff
u4plotfmt.add_scalebar(ax=ax, width=plot_buffer * 4)
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(
output_path, f"{row[1].group:05}_aspect_slope.{ftype}"
......@@ -1455,7 +1563,7 @@ def diffplan_map(
os.path.exists(
os.path.join(output_path, f"{row[1].group:05}_diffplan.{ftype}")
)
for ftype in GLOBAL_TYPES
for ftype in IMAGE_FORMATS
]
if np.all(fexists) and not overwrite:
logging.info(f"Skipping existing plot {row[1].group:05}")
......@@ -1467,7 +1575,7 @@ def diffplan_map(
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
# Plotting
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=IMAGE_DPI)
shp_gdf.plot(ax=ax, fc="None", ec="C0")
shp_gdf.buffer(plot_buffer).plot(ax=ax, fc="None", ec="None")
......@@ -1482,7 +1590,7 @@ def diffplan_map(
# Format and save
u4plotfmt.add_scalebar(ax=ax, width=2 * plot_buffer)
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(output_path, f"{row[1].group:05}_diffplan.{ftype}")
)
......@@ -1498,6 +1606,7 @@ def detailed_map(
contour_path: os.PathLike,
plot_buffer: float,
overwrite: bool,
places_path: os.PathLike,
) -> Tuple[tuple, tuple]:
"""Makes a detailed overview map of the region including some geological features.
......@@ -1515,6 +1624,8 @@ def detailed_map(
:type contour_path: os.PathLike
:param plot_buffer: The buffer width around the area of interest.
:type plot_buffer: float
:param places_path: The path where other shapefiles are found.
:type places_path: os.PathLike
"""
# Setup Paths
output_path = os.path.join(output_path, suffix)
......@@ -1524,7 +1635,7 @@ def detailed_map(
os.path.exists(
os.path.join(output_path, f"{row[1].group:05}_map.{ftype}")
)
for ftype in GLOBAL_TYPES
for ftype in IMAGE_FORMATS
]
if np.all(fexists) and not overwrite:
logging.info(f"Skipping existing plot {row[1].group:05}")
......@@ -1534,13 +1645,15 @@ def detailed_map(
# Make geodataframe and add to plot
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=GLOBAL_DPI)
fig, ax = plt.subplots(figsize=(16 / 2.54, 11.3 / 2.54), dpi=IMAGE_DPI)
shp_gdf.plot(ax=ax, fc="None", ec="C1", label="Region")
shp_gdf.buffer(plot_buffer).plot(ax=ax, fc="None", ec="None")
# Make plot full image size and fix axis to current extent
u4plotfmt.full_screen_map(ax=ax)
# Plot other anomalies
if not IS_HLNUG:
# Add additional geological data to plot.
u4ax.add_gpkg_data_in_axis(
hlnug_path,
......@@ -1578,13 +1691,42 @@ def detailed_map(
linestyle=":",
label="Senkungsmulden",
)
# For non HLNUG data use the thresholded contours
u4ax.add_gpkg_data_in_axis(
contour_path,
"thresholded_contours_all_shapes",
ax=ax,
fc="None",
column="color_levels",
cmap="RdBu",
table="thresholded_contours_all_shapes",
edgecolor="k",
facecolor="None",
linewidth=1,
)
else:
# For HLNUG data use the other shapes
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax,
table="Classified_Shapes",
edgecolor="k",
facecolor="None",
linewidth=1,
)
handle_motorway = u4ax.add_gpkg_data_in_axis(
places_path,
ax=ax,
table="gis_osm_roads_free_1",
fclass="motorway",
)
handle_primary = u4ax.add_gpkg_data_in_axis(
places_path,
ax=ax,
table="gis_osm_roads_free_1",
fclass="primary",
)
handle_secondary = u4ax.add_gpkg_data_in_axis(
places_path,
ax=ax,
table="gis_osm_roads_free_1",
fclass="secondary",
)
ax.annotate(
row[1].locations.replace(", ", "\n"),
......@@ -1598,19 +1740,23 @@ def detailed_map(
# Catches UserWarning for unsupported handles.
warnings.simplefilter("ignore")
h, _ = ax.get_legend_handles_labels()
if not IS_HLNUG:
h.append(mpatches.Patch(ec=mcm.RdBu(0), fc="None", label="Senkung"))
h.append(mpatches.Patch(ec=mcm.RdBu(256), fc="None", label="Hebung"))
ax.legend(
handles=h,
loc="upper right",
)
else:
h.append(mpatches.Patch(ec="k", fc="None", label="Rutschung"))
for handle in [handle_motorway, handle_primary, handle_secondary]:
if handle:
h.append(handle)
if len(h) > 0:
ax.legend(handles=h, loc="upper right").set_zorder(10)
# Formatting and saving
u4ax.add_basemap(
ax=ax, crs=crs, source=contextily.providers.CartoDB.Voyager
)
u4plotfmt.add_scalebar(ax=ax, width=plot_buffer * 4)
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(output_path, f"{row[1].group:05}_map.{ftype}")
)
......@@ -1648,7 +1794,7 @@ def timeseries_map(
os.path.exists(
os.path.join(output_path, f"{row[1].group:05}_psi.{ftype}")
)
for ftype in GLOBAL_TYPES
for ftype in IMAGE_FORMATS
]
if np.all(fexists) and not overwrite:
logging.info(f"Skipping existing plot {row[1].group:05}")
......@@ -1666,7 +1812,7 @@ def timeseries_map(
shp_gdf = gp.GeoDataFrame(geometry=[row[1].geometry], crs=crs)
# Create figure and axes
fig = plt.figure(figsize=(13, 5), dpi=GLOBAL_DPI)
fig = plt.figure(figsize=(13, 5), dpi=IMAGE_DPI)
gs = fig.add_gridspec(ncols=2, width_ratios=(1, 3))
ax_map = fig.add_subplot(gs[0])
ax_ts = fig.add_subplot(gs[1])
......@@ -1688,14 +1834,25 @@ def timeseries_map(
# Add PSI in Map
ax_map.axis("equal")
u4ax.add_gpkg_data_where(
# Plot other anomalies
if not IS_HLNUG:
# For non HLNUG data use the thresholded contours
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax_map,
table="thresholded_contours_all_shapes",
where=f"groups=={row[1].group}",
edgecolor="k",
facecolor="None",
linewidth=1,
)
else:
# For HLNUG data use the
u4ax.add_gpkg_data_in_axis(
contour_path,
ax=ax_map,
table="Classified_Shapes",
edgecolor="k",
facecolor="None",
alpha=0.75,
linewidth=1,
)
fig.tight_layout()
......@@ -1747,7 +1904,7 @@ def timeseries_map(
ax_ts.set_xlabel("Time")
ax_ts.set_ylabel("Displacement (mm)")
fig.tight_layout()
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
fig.savefig(
os.path.join(output_path, f"{row[1].group:05}_psi.{ftype}")
)
......@@ -1788,7 +1945,7 @@ def plot_legend(
logging.info("Creating legend")
figl = plt.figure(
figsize=(16 / 2.54, len(leg_labels) * 0.25), dpi=GLOBAL_DPI
figsize=(16 / 2.54, len(leg_labels) * 0.25), dpi=IMAGE_DPI
)
legend = figl.legend(
handles=leg_handles, labels=leg_labels, framealpha=1, frameon=False
......@@ -1797,7 +1954,7 @@ def plot_legend(
bbox = bbox.from_extents(*(bbox.extents + np.array([-5, -5, 5, 5])))
bbox = bbox.transformed(figl.dpi_scale_trans.inverted())
for ftype in GLOBAL_TYPES:
for ftype in IMAGE_FORMATS:
figl.savefig(
os.path.join(output_path, f"{grp}_{suffix}_leg.{ftype}"),
bbox_inches=bbox,
......
......@@ -28,10 +28,10 @@ def main():
proj_path = args.input
else:
# proj_path = r"~\Documents\ArcGIS\U4_projects\PostProcess_ClassifiedShapesHLNUG.u4project"
# proj_path = (
# "~/Documents/umwelt4/PostProcess_ClassifiedShapesHLNUG.u4project"
# )
proj_path = "~/Documents/umwelt4/PostProcess_ClassifiedShapes_onlyLarge.u4project"
proj_path = (
"~/Documents/umwelt4/PostProcess_ClassifiedShapesHLNUG.u4project"
)
# proj_path = "~/Documents/umwelt4/PostProcess_ClassifiedShapes_onlyLarge.u4project"
# proj_path = (
# "~/Documents/umwelt4/PostProcess_ClassifiedShapes_hazard.u4project"
# )
......@@ -48,7 +48,8 @@ def main():
)
if project.getboolean("config", "is_hlnug"):
u4plots.GLOBAL_TYPES = ["png"]
u4plots.IMAGE_FORMATS = ["png"]
u4plots.IS_HLNUG = True
# Setting up paths
output_path = os.path.join(
......@@ -73,6 +74,9 @@ def main():
"DGM1_2021",
"raster_2021",
)
if project.getboolean("config", "is_hlnug"):
contour_path = class_shp_fp
else:
contour_path = os.path.join(
project["paths"]["results_path"],
"thresholded_contours_all_shapes.gpkg",
......@@ -166,20 +170,17 @@ def main():
else:
u4tex.multi_report(output_path)
else:
ii = 0
for row in tqdm(
gdf_filtered.iterrows(),
desc="Generating docx files",
total=len(gdf_filtered),
):
if ii < 20:
u4docx.site_report(
row,
output_path,
project["metadata"]["report_suffix"],
"docx_reports" + project["metadata"]["report_suffix"],
hlnug_data,
)
ii += 1
def filter_shapes(
......@@ -275,6 +276,15 @@ def map_worker(
:type contour_path: os.PathLike
"""
shp_path = os.path.join(
project["paths"]["places_path"],
"Classifier_shapes",
"Web_Queries" + suffix,
)
places_path = os.path.join(
project["paths"]["places_path"], "OSM_shapes", "all_shapes.gpkg"
)
u4plots.detailed_map(
row,
crs,
......@@ -284,6 +294,7 @@ def map_worker(
contour_path,
plot_buffer=250,
overwrite=overwrite_plots,
places_path=places_path,
)
u4plots.satimg_map(
row,
......@@ -294,6 +305,7 @@ def map_worker(
plot_buffer=100,
overwrite=overwrite_plots,
)
if not u4plots.IS_HLNUG:
u4plots.diffplan_map(
row,
crs,
......@@ -313,6 +325,18 @@ def map_worker(
plot_buffer=100,
overwrite=overwrite_plots,
)
else:
u4plots.dem_map(
row,
crs,
output_path,
"known_features",
dem_path,
contour_path,
plot_buffer=100,
overwrite=overwrite_plots,
shp_path=shp_path,
)
u4plots.slope_map(
row,
crs,
......@@ -343,11 +367,6 @@ def map_worker(
plot_buffer=100,
overwrite=overwrite_plots,
)
shp_path = os.path.join(
project["paths"]["places_path"],
"Classifier_shapes",
"Web_Queries" + suffix,
)
u4plots.geology_map(
row,
crs,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment