diff --git a/u4py/analysis/spatial.py b/u4py/analysis/spatial.py
index 56503b3e4b57749660db5de6c37e9c835291113f..0c175eed4d94af798386b21f7a315976c67ad417 100644
--- a/u4py/analysis/spatial.py
+++ b/u4py/analysis/spatial.py
@@ -364,7 +364,7 @@ def select_points_region(
 def select_points_point(
     point: list | Tuple | gp.GeoDataFrame | shapely.Point,
     radius: float,
-    psi_file_path: os.PathLike,
+    psi_file_path: os.PathLike | List[os.PathLike],
     split_points: bool = False,
     crs: str = "EPSG:32632",
 ) -> gp.GeoDataFrame | List[gp.GeoDataFrame]:
@@ -377,7 +377,7 @@ def select_points_point(
     :param radius: The radius to calculate the buffer
     :type radius: float
     :param psi_file_path: The file or folder to select from
-    :type psi_file_path: os.PathLike
+    :type psi_file_path: os.PathLike | List[os.PathLike]
     :param split_points: Splits the output into a list of points based on the selection, defaults to False
     :type split_points: bool, optional
     :param crs: The coordinate system of the output points, defaults to "EPSG:32632".
diff --git a/u4py/io/gpkg.py b/u4py/io/gpkg.py
index 8d1896af5c5ad48059977a90c5f20e8af39aead8..5f2a90ce02b420d24b7e40a3aabfede7bcd5201f 100644
--- a/u4py/io/gpkg.py
+++ b/u4py/io/gpkg.py
@@ -15,6 +15,7 @@ from osgeo import ogr
 import u4py.analysis.spatial as u4spatial
 import u4py.io.sql as u4sql
 import u4py.io.tiff as u4tiff
+import u4py.utils.convert as u4conv
 
 ogr.UseExceptions()
 
@@ -175,13 +176,12 @@ def load_gpkg_data_point(
     gpkg_crs = u4sql.get_crs(gpkg_file_path, table)[0]
     region = u4spatial.region_around_point(point, radius, crs=gpkg_crs)
 
-    data = u4sql.table_to_dict(gpkg_file_path, table, region.bounds)
-    clipped_data = u4spatial.clip_data_points(
-        data, region, split_points=split_points, crs=gpkg_crs
+    points = load_gpkg_data_region_ogr(
+        region, gpkg_file_path, table, crs=region.crs
     )
-    if clipped_data:
-        clipped_data["crs"] = gpkg_crs
-    return clipped_data, region
+    data = u4conv.reformat_gdf_to_dict(points)
+
+    return data, region
 
 
 def load_gpkg_data_osm(
diff --git a/u4py/io/psi.py b/u4py/io/psi.py
index 675ffc38aace1207a3642270725c9c49986f5789..3f89d075fd2312bbd0a3300e387a29c27df005dc 100644
--- a/u4py/io/psi.py
+++ b/u4py/io/psi.py
@@ -116,6 +116,7 @@ def get_point_data(
     radius: float,
     region_name: str,
     source_fpath: os.PathLike,
+    table: str = "vertikal",
     overwrite: bool = False,
 ) -> dict:
     """Returns the points with data from `source_fpath` that are within
@@ -137,13 +138,13 @@ def get_point_data(
     if isinstance(point, shapely.Point):
         point = point.xy
     _, pkl_fpath = u4files.set_data_file_paths(
-        source_fpath, region_name, "point"
+        source_fpath, region_name, f"point_{table}"
     )
     _, shp_fpath = u4files.set_point_file_paths(
-        source_fpath, region_name, "point"
+        source_fpath, region_name, f"point_{table}"
     )
     _, region_fpath = u4files.set_point_file_paths(
-        source_fpath, region_name, "point_region"
+        source_fpath, region_name, f"point_region_{table}"
     )
 
     if (
@@ -152,12 +153,16 @@ def get_point_data(
         or overwrite
     ):
         data, region = u4gpkg.load_gpkg_data_point(
-            point=point, radius=radius, gpkg_file_path=source_fpath
+            point=point,
+            radius=radius,
+            gpkg_file_path=source_fpath,
+            table=table,
         )
-        u4spatial.xy_data_to_gdf(data["x"], data["y"]).to_file(shp_fpath)
-        region.to_file(region_fpath)
-        with open(pkl_fpath, "wb") as pkl_file:
-            pkl.dump(data, pkl_file)
+        if data:
+            u4spatial.xy_data_to_gdf(data["x"], data["y"]).to_file(shp_fpath)
+            region.to_file(region_fpath)
+            with open(pkl_fpath, "wb") as pkl_file:
+                pkl.dump(data, pkl_file)
     else:
         with open(pkl_fpath, "rb") as pkl_file:
             data = pkl.load(pkl_file)
diff --git a/u4py/io/tiff.py b/u4py/io/tiff.py
index 70012e65b0a85d8ea861450bc4a6bdebae5e4819..ff8513934f6febc3ea2c9443a3259afd24e8f7fb 100644
--- a/u4py/io/tiff.py
+++ b/u4py/io/tiff.py
@@ -15,7 +15,6 @@ import shapely
 from osgeo import gdal
 from tqdm import tqdm
 
-
 import u4py.analysis.processing as u4proc
 import u4py.analysis.spatial as u4spatial
 import u4py.io.files as u4files
@@ -160,17 +159,59 @@ def get_point_tiff(
     :param source_file_path: The path to the folder where the tiffs are stored (in `TB` folders)
     :type source_file_path: os.PathLike
     :param overwrite: Whether to overwrite the output shapefile, defaults to False
-    :return: A list of paths to the tiff files within the osm region.
+    :return: A list of paths to the tiff files within region.
     :rtype: list[os.PathLike]
     """
     logging.info("Getting tiffs around point")
-
     file_list = u4files.get_file_list_tiff(source_file_path)
+    if not file_list:
+        file_list = u4files.get_file_list_adf(source_file_path)
+    if not file_list:
+        raise FileNotFoundError("No suitable DEM folders or tiffs found.")
     points = u4spatial.select_points_point(point, radius, file_list)
     out_file_list = np.array(file_list)[points.source_ind]
     return np.unique(out_file_list).tolist()
 
 
+def get_pointlist_tiff(
+    points: gp.GeoDataFrame,
+    source_file_path: os.PathLike,
+) -> list[Tuple[int, os.PathLike]]:
+    """Returns paths of tiff files where the points are located.
+
+    :param points: A geodataframe of points
+    :type points: gp.GeoDataFrame
+    :param source_file_path: A path where tiffs or adf raster data is found
+    :type source_file_path: os.PathLike
+    :raises FileNotFoundError: When no suitable folder was given
+    :return: A list of indices for point and tiff paths for each point
+    :rtype: list[Tuple[int, os.PathLike]]
+    """
+    logging.info("Getting tiffs around point")
+    file_list = u4files.get_file_list_tiff(source_file_path)
+    if not file_list:
+        file_list = u4files.get_file_list_adf(source_file_path)
+    if not file_list:
+        raise FileNotFoundError("No suitable DEM folders or tiffs found.")
+    with rio.open(file_list[0]) as dataset:
+        raster_crs = dataset.crs
+    polygons, file_list_index = u4spatial._get_coords(file_list)
+    raster_gdf = gp.GeoDataFrame(
+        {"geometry": polygons, "file_list_index": file_list_index},
+        crs=raster_crs,
+    )
+    if points.crs != raster_gdf.crs:
+        points = points.to_crs(raster_gdf.crs)
+    subset = gp.sjoin(raster_gdf, points, how="inner", predicate="contains")
+    tiff_list = [
+        (idx, file_list[fii])
+        for fii, idx in zip(
+            subset.file_list_index.to_list(), subset.index_right.to_list()
+        )
+    ]
+    return tiff_list
+
+
 def ndarray_to_geotiff(
     Z: np.ndarray,
     bounds: tuple,
diff --git a/u4py/scripts/examples/correlation_external_data/Compare_BBD_21_23.py b/u4py/scripts/examples/correlation_external_data/Compare_BBD_21_23.py
new file mode 100644
index 0000000000000000000000000000000000000000..5d3d113203bb82265cfee43f03acf6717728a45f
--- /dev/null
+++ b/u4py/scripts/examples/correlation_external_data/Compare_BBD_21_23.py
@@ -0,0 +1,160 @@
+"""Compares the detected ground motion with local GNSS stations"""
+
+import os
+from pathlib import Path
+
+import matplotlib.pyplot as plt
+import numpy as np
+import uncertainties as unc
+from scipy import signal as spsig
+
+import u4py.io.gpkg as u4gpkg
+import u4py.utils.convert as u4convert
+import u4py.utils.projects as u4projects
+
+
+def main():
+    # Paths
+    project = u4projects.get_project(
+        proj_path=Path(
+            r"~\Documents\ArcGIS\U4_projects\Examples\Compare_with_GPS_GPKG.u4project"
+        ).expanduser(),
+        required=["ext_path", "output_path"],
+        interactive=False,
+    )
+    psi_path_bbd1 = os.path.join(
+        project["paths"]["base_path"],
+        "Data_2021",
+        "INSAR_Data",
+        "BBD_2021_vertikal.gpkg",
+    )
+    psi_path_bbd2 = os.path.join(
+        project["paths"]["base_path"], "Data_2023", "hessen_l3_clipped.gpkg"
+    )
+
+    output_folder = os.path.join(
+        project["paths"]["output_path"], "Compare_2021_2023"
+    )
+    os.makedirs(output_folder, exist_ok=True)
+
+    data_1 = u4convert.reformat_gdf_to_dict(
+        u4gpkg.load_gpkg_data_where_ogr(
+            psi_path_bbd1, "BBD_2021_vertikal", "ID==1964510"
+        )
+    )
+    data_2 = u4convert.reformat_gdf_to_dict(
+        u4gpkg.load_gpkg_data_where_ogr(
+            psi_path_bbd2, "vertikal", "ID==1442325"
+        )
+    )
+    overlapping_data = data_2["timeseries"][0][
+        data_2["time"] <= data_1["time"][-2]
+    ]
+    difference = data_1["timeseries"][0] - overlapping_data
+    fig, axes = plt.subplots(nrows=2, sharex=True, figsize=(6 * 1.5, 3 * 1.5))
+    axes[0].plot(
+        data_1["time"], data_1["timeseries"][0], label="BBD 2015-2021"
+    )
+    axes[0].plot(
+        data_2["time"], data_2["timeseries"][0], label="BBD 2015-2022"
+    )
+    axes[1].plot(
+        data_1["time"],
+        difference,
+        color="k",
+        label="Differenz",
+    )
+    avg_diff = unc.ufloat(np.nanmean(difference), 2 * np.nanstd(difference))
+    axes[1].annotate(
+        f"Durchschnitt: {avg_diff} mm".replace("+/-", "$\\pm$"),
+        (0.975, 0.05),
+        xycoords="axes fraction",
+        horizontalalignment="right",
+    )
+
+    axes[0].set_ylabel("Bodenbewegung (mm)")
+    axes[1].set_ylabel("Differenz (mm)")
+    axes[1].set_xlabel("Zeitraum")
+    fig.legend(loc="lower center", ncols=3)
+    fig.subplots_adjust(left=0.075, right=0.99, bottom=0.175, top=0.99)
+    fig.savefig(os.path.join(output_folder, "BBD_21_vs_23.png"))
+    fig.savefig(os.path.join(output_folder, "BBD_21_vs_23.pdf"))
+
+    fig2, ax2 = plot_xcor(
+        data_1["time"],
+        data_1["timeseries"][0],
+        data_2["time"],
+        data_2["timeseries"][0],
+    )
+    fig2.savefig(os.path.join(output_folder, "BBD_xcorr.png"))
+    fig2.savefig(os.path.join(output_folder, "BBD_xcorr.pdf"))
+
+
+def align_data(t1, y1, t2, y2):
+    t1sort = np.argsort(t1)
+    t1 = t1[t1sort]
+    y1 = y1[t1sort]
+    nany1 = np.isfinite(y1)
+    t1 = t1[nany1]
+    y1 = y1[nany1]
+    t2sort = np.argsort(t2)
+    t2 = t2[t2sort]
+    y2 = y2[t2sort]
+    nany2 = np.isfinite(y2)
+    t2 = t2[nany2]
+    y2 = y2[nany2]
+
+    t1 = u4convert.get_floatyear(t1)
+    t2 = u4convert.get_floatyear(t2)
+    dt = np.max([np.mean(np.diff(t1)), np.mean(np.diff(t2))])
+    tstart = np.max([t1[0], t2[0]])
+    tend = np.min([t1[-1], t2[-1]])
+    t = np.arange(tstart, tend, dt)
+    sig1 = np.interp(t, t1, y1)
+    sig2 = np.interp(t, t2, y2)
+    return t, sig1, sig2
+
+
+def plot_xcor(
+    t1,
+    y1,
+    t2,
+    y2,
+    y1_label="Signal 1",
+    y2_label="Signal 2",
+    col1="C0",
+    col2="C1",
+    col3="C4",
+):
+    t, sig1, sig2 = align_data(t1, y1, t2, y2)
+    xc12 = spsig.correlate(sig1, sig2)
+    xc12 /= np.max(xc12)
+    lags = (
+        spsig.correlation_lags(len(sig1), len(sig2)) * np.diff(t)[0] * 365.25
+    )
+    max_lag = lags[np.argmax(xc12)]
+
+    fig, axes = plt.subplots(nrows=3, figsize=(10, 7), dpi=300)
+    axes[0].plot(t, sig1, "s-", color=col1)
+    axes[0].set_xlabel("Year")
+    axes[0].set_ylabel(y1_label)
+    axes[1].plot(t, sig2, "s-", color=col2)
+    axes[1].set_xlabel("Year")
+    axes[1].set_ylabel(y2_label)
+    axes[2].plot(lags, xc12, "s-", color=col3)
+    axes[2].set_xlabel("Lag (days)")
+    axes[2].axvline(max_lag, color="k")
+    axes[2].annotate(
+        f"{int(max_lag)} days",
+        (max_lag, 0),
+        (0.5, 0.5),
+        textcoords="offset fontsize",
+    )
+    axes[2].set_ylabel("Cross Correlation ()")
+    fig.tight_layout()
+
+    return fig, axes
+
+
+if __name__ == "__main__":
+    main()
diff --git a/u4py/scripts/examples/correlation_external_data/Compare_With_GPS.py b/u4py/scripts/examples/correlation_external_data/Compare_With_GPS.py
index 22fc6af77b672422c48147c10c61c9c3e78106a1..3acba13d83542c1a0b82c459a808199b81e06eb4 100644
--- a/u4py/scripts/examples/correlation_external_data/Compare_With_GPS.py
+++ b/u4py/scripts/examples/correlation_external_data/Compare_With_GPS.py
@@ -1,17 +1,22 @@
-""" Compares the detected ground motion with local GNSS stations"""
+"""Compares the detected ground motion with local GNSS stations"""
 
 import datetime
 import os
 from pathlib import Path
 
+import geopandas as gp
+import matplotlib.axes as mplax
+import matplotlib.lines as mline
 import matplotlib.pyplot as plt
 import numpy as np
 import scipy.ndimage as spimage
 import scipy.stats as spstats
+import uncertainties as unc
 
 import u4py.addons.gnss as u4gnss
 import u4py.io.files as u4files
 import u4py.io.psi as u4psi
+import u4py.plotting.axes as u4ax
 import u4py.utils.convert as u4convert
 import u4py.utils.projects as u4projects
 
@@ -20,7 +25,7 @@ def main():
     # Options
 
     distance = 75  # Distance around gnss station to look for psis
-    filter_width = 3  # median filter size, 0=nofilter
+    filter_width = 0  # median filter size, 0=nofilter
     take_diff = False  # Take differences between points to correlate
     overwrite = False
     id_selection = ["BADH00DEU", "KLOP00DEU", "FFMJ00DEU"]
@@ -30,12 +35,40 @@ def main():
         proj_path=Path(
             r"~\Documents\ArcGIS\U4_projects\Examples\Compare_with_GPS_GPKG.u4project"
         ).expanduser(),
-        required=["ext_path", "psi_path", "output_path"],
+        required=["ext_path", "output_path"],
         interactive=False,
     )
-    psi_path = os.path.join(
-        project["paths"]["psi_path"], "hessen_l3_clipped.gpkg"
+    psi_path_bbd1 = os.path.join(
+        project["paths"]["base_path"],
+        "Data_2021",
+        "INSAR_Data",
+        "BBD_2021_vertikal.gpkg",
     )
+    psi_path_bbd2 = os.path.join(
+        project["paths"]["base_path"], "Data_2023", "hessen_l3_clipped.gpkg"
+    )
+    psi_path_egms1 = os.path.join(
+        project["paths"]["base_path"],
+        "Data_EGMS_2015-2021",
+        "EGMS_2015_2021_vertikal.gpkg",
+    )
+    psi_path_egms2 = os.path.join(
+        project["paths"]["base_path"],
+        "Data_EGMS_2018-2022",
+        "EGMS_2018_2022_vertikal.gpkg",
+    )
+    psi_path_list = [
+        psi_path_bbd1,
+        psi_path_bbd2,
+        psi_path_egms1,
+        psi_path_egms2,
+    ]
+    tables = [
+        "BBD_2021_vertikal",
+        "vertikal",
+        "EGMS_2015_2021_vertikal",
+        "EGMS_2018_2022_vertikal",
+    ]
     output_folder = os.path.join(
         project["paths"]["output_path"], "Compare_With_GPS"
     )
@@ -51,80 +84,137 @@ def main():
     sel = [stations[stations.ID == ii] for ii in id_selection]
 
     fig, axes = plt.subplots(
-        nrows=len(sel), sharex=True, figsize=(8, len(sel) * 3)
+        nrows=len(sel), sharex=True, sharey=True, figsize=(12, 6)
     )
-    min_t = datetime.datetime(3000, 1, 1)
-    max_t = datetime.datetime(1000, 1, 1)
     for ii, station in enumerate(sel):
-        data_vert = u4psi.get_point_data(
-            station.geometry,
-            distance,
-            f"{station.ID.values[0]}_GNSS",
-            psi_path,
-            overwrite=overwrite,
-        )
-
-        if data_vert:
-            gnss_data = u4convert.gnss_dat_to_dict(gnss_file_list[ii])
-
-            psi_t = data_vert["time"]
-            psi_v = np.nanmedian(data_vert["timeseries"], axis=0)
-            gnss_t = gnss_data["gps_datetime"]
-            gnss_v = gnss_data["res_up"]
-            if filter_width:
-                gnss_v = spimage.median_filter(gnss_v, filter_width)
-
-            if take_diff:
-                psi_t = psi_t[:-1]
-                psi_v = np.diff(psi_v)
-                gnss_t = gnss_t[:-1]
-                gnss_v = np.diff(gnss_v)
-
-            correlate_time_series(
-                psi_t,
-                psi_v,
-                gnss_t,
-                gnss_v,
-                station.name.values[0],
+        gnss_data = u4convert.gnss_dat_to_dict(gnss_file_list[ii])
+        gnss_t = gnss_data["gps_datetime"]
+        gnss_v = gnss_data["res_up"]
+        if filter_width:
+            gnss_v = spimage.median_filter(gnss_v, filter_width)
+        axes[ii].plot(gnss_t, gnss_v, "-", color="k")
+
+        for cc, (psi_path, table) in enumerate(zip(psi_path_list, tables)):
+            plot_psi_vs_gnss(
+                gnss_data,
+                psi_path,
+                table,
+                station,
+                distance,
+                axes[ii],
                 output_folder,
+                filter_width,
+                take_diff,
+                overwrite,
+                color=f"C{cc}",
             )
-            axes[ii].plot(
-                gnss_t,
-                gnss_v,
-                "-",
-            )
-            axes[ii].plot(
-                psi_t,
-                psi_v,
-                "-",
-            )
-            if psi_t[0] < min_t:
-                min_t = psi_t[0]
-            if psi_t[-1] > max_t:
-                max_t = psi_t[-1]
-            axes[ii].set_title(station.name.values[0])
-            axes[ii].annotate(
-                f"#PS: {data_vert['num_points']}",
-                (0.05, 0.05),
-                xycoords="axes fraction",
-            )
-    axes[0].set_xlim(
-        min_t - datetime.timedelta(31), max_t + datetime.timedelta(31)
-    )
-    axes[0].legend(
-        ["GNSS", f"PSI {distance}m Radius"],
+    fig.legend(
+        handles=[
+            mline.Line2D([], [], color="k", label="GNSS"),
+            mline.Line2D(
+                [],
+                [],
+                marker=".",
+                linestyle="None",
+                color="C0",
+                label="BBD 2015-2021",
+            ),
+            mline.Line2D(
+                [],
+                [],
+                marker=".",
+                linestyle="None",
+                color="C1",
+                label="BBD 2015-2022",
+            ),
+            mline.Line2D(
+                [],
+                [],
+                marker=".",
+                linestyle="None",
+                color="C2",
+                label="EGMS 2016-2021",
+            ),
+            mline.Line2D(
+                [],
+                [],
+                marker=".",
+                linestyle="None",
+                color="C3",
+                label="EGMS 2018-2023",
+            ),
+        ],
         fontsize="small",
-        loc="lower right",
+        loc="lower center",
+        ncols=5,
     )
-    for ax in axes:
-        ax.set_ylabel("Displacement (mm)")
-    axes[-1].set_xlabel("Time")
-    fig.set_constrained_layout(True)
+    axes[1].set_ylabel("Bodenbewegung (mm)")
+    axes[-1].set_xlabel("Zeitraum")
+    axes[0].set_ylim(-15, 15)
+    axes[0].set_xlim(
+        datetime.datetime(2015, 1, 1),
+        datetime.datetime(2022, 6, 1),
+    )
+    fig.subplots_adjust(top=0.95, left=0.075, right=0.99, bottom=0.125)
+
     fig.savefig(os.path.join(output_folder, "GNSS_PSI_timeseries"))
     fig.savefig(os.path.join(output_folder, "GNSS_PSI_timeseries.pdf"))
     # plot_gnss_and_psi(gnss_file_list)
 
 
+def plot_psi_vs_gnss(
+    gnss_data: dict,
+    psi_path: os.PathLike,
+    table: str,
+    station: gp.GeoDataFrame,
+    distance: float,
+    ax: mplax.Axes,
+    output_folder: os.PathLike,
+    filter_width: int,
+    take_diff: bool,
+    overwrite: bool,
+    color: str = "C0",
+):
+    psi_data, _ = u4psi.get_point_data(
+        station.geometry,
+        distance,
+        f"{station.ID.values[0]}_GNSS",
+        psi_path,
+        table=table,
+        overwrite=overwrite,
+    )
+
+    if psi_data:
+        psi_t = psi_data["time"]
+        psi_v = np.nanmedian(psi_data["timeseries"], axis=0)
+        gnss_t = gnss_data["gps_datetime"]
+        gnss_v = gnss_data["res_up"]
+        psi_val = unc.ufloat(np.nanmedian(psi_v), 2 * np.nanstd(psi_v))
+        gnss_val = unc.ufloat(np.nanmedian(gnss_v), 2 * np.nanstd(gnss_v))
+        print(
+            station.name.values[0],
+            f"psi: {psi_val},  gnss: {gnss_val}",
+        )
+        if filter_width:
+            gnss_v = spimage.median_filter(gnss_v, filter_width)
+
+        correlate_time_series(
+            psi_t,
+            psi_v,
+            gnss_t,
+            gnss_v,
+            station.name.values[0],
+            output_folder,
+        )
+
+        u4ax.plot_timeseries(
+            psi_data["time"], psi_data["timeseries"], ax=ax, color=color
+        )
+
+        ax.set_title(station.name.values[0])
+    return
+
+
 def subsample_gnss(data_vert, gnss_data):
     """Gets only the gnss data closest to psi obervations"""
     pti = []
diff --git a/u4py/scripts/examples/correlation_external_data/Groundwater_height.py b/u4py/scripts/examples/correlation_external_data/Groundwater_height.py
new file mode 100644
index 0000000000000000000000000000000000000000..093f4281a6e56b84afacdc9c2784332337244c73
--- /dev/null
+++ b/u4py/scripts/examples/correlation_external_data/Groundwater_height.py
@@ -0,0 +1,70 @@
+import os
+import pickle as pkl
+from pathlib import Path
+
+import geopandas as gp
+import matplotlib.pyplot as plt
+import numpy as np
+import rasterio as rio
+from tqdm import tqdm
+
+import u4py.addons.groundwater as u4gw
+import u4py.io.tiff as u4tiff
+import u4py.utils.projects as u4proj
+
+
+def main():
+    project = u4proj.get_project(
+        proj_path=Path(
+            r"C:\Users\Michael Rudolf\Documents\ArcGIS\U4_projects\Groundwater_height.u4project"
+        ).expanduser(),
+        required=["base_path", "places_path", "ext_path", "diff_plan_path"],
+        interactive=False,
+    )
+    wells_shp_path = r"C:\Users\Michael Rudolf\HESSENBOX-DA\Datenaustausch_Umwelt4.0\All_Correlations_GW_PSI.shp"
+    wells_gdf = gp.read_file(wells_shp_path)
+    wells_path = os.path.join(
+        project["paths"]["ext_path"], "GWStände_2015", "GWStände_2015.pkl"
+    )
+    wells_data = u4gw.get_groundwater_data(wells_path)
+    tiff_file_list = u4tiff.get_pointlist_tiff(
+        wells_gdf, project["paths"]["diff_plan_path"]
+    )
+    with rio.open(tiff_file_list[0][1]) as dataset:
+        raster_crs = dataset.crs
+    wells_gdf = wells_gdf.to_crs(raster_crs)
+
+    if (
+        "z"
+        not in wells_data[wells_gdf.iloc[tiff_file_list[0][0]]["name"]].keys()
+    ):
+        for ii, tfpath in tqdm(
+            tiff_file_list,
+            desc="Extracting well height from DEM1",
+            leave=False,
+        ):
+            point = wells_gdf.iloc[ii]
+            with rio.open(tfpath) as dataset:
+                dem = dataset.read(1)
+                height = dem[dataset.index(point.geometry.x, point.geometry.y)]
+            wells_data[point["name"]]["z"] = height
+
+        with open(wells_path, "wb") as well_file:
+            pkl.dump(wells_data, well_file)
+
+    avg_h = []
+    idx = []
+    zvals = []
+    for ii, name in enumerate(wells_gdf.name.to_list()):
+        z = wells_data[name]["z"]
+        zvals.append(z)
+        avg_h.append(z - np.median(wells_data[name]["height"]))
+        idx.append(ii)
+
+    wells_gdf = wells_gdf.assign(z=gp.pd.Series(zvals, idx))
+    wells_gdf = wells_gdf.assign(avg_h=gp.pd.Series(avg_h, idx))
+    wells_gdf.to_file(wells_shp_path)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/u4py/scripts/examples/regional_case_studies/Crumstadt_BBD_EGMS_comparison.py b/u4py/scripts/examples/regional_case_studies/Crumstadt_BBD_EGMS_comparison.py
new file mode 100644
index 0000000000000000000000000000000000000000..48c63e3c1d54ec8ba7cfd1d44e8a111d8e354dae
--- /dev/null
+++ b/u4py/scripts/examples/regional_case_studies/Crumstadt_BBD_EGMS_comparison.py
@@ -0,0 +1,132 @@
+import os
+from pathlib import Path
+
+import geopandas as gp
+import matplotlib.pyplot as plt
+
+import u4py.addons.gas_storage as u4gas
+import u4py.io.files as u4files
+import u4py.io.gpkg as u4gpkg
+import u4py.plotting.axes as u4ax
+import u4py.utils.convert as u4convert
+import u4py.utils.projects as u4proj
+
+
+def main():
+    project = u4proj.get_project(
+        proj_path=Path(
+            r"~\Documents\ArcGIS\U4_projects\Examples\Bericht_FFM_BBD_EGMS.u4project"
+        ).expanduser(),
+        required=[
+            "base_path",
+            "piloten_path",
+            "output_path",
+            "processing_path",
+        ],
+        interactive=False,
+    )
+
+    # Setting up Paths
+    bbd_fpath = os.path.join(
+        project["paths"]["base_path"], "Data_2023", "hessen_l3_clipped.gpkg"
+    )
+    egms_fpath_vert1 = os.path.join(
+        project["paths"]["base_path"],
+        "Data_EGMS_2015-2021",
+        "EGMS_2015_2021_vertikal.gpkg",
+    )
+    egms_fpath_vert2 = os.path.join(
+        project["paths"]["base_path"],
+        "Data_EGMS_2018-2022",
+        "EGMS_2018_2022_vertikal.gpkg",
+    )
+    regions = u4files.get_rois(project["paths"]["piloten_path"])
+    crumstadt_roi = gp.GeoDataFrame(geometry=[regions[4][1]], crs="EPSG:32632")
+
+    # Load bbd dataset
+    bbd_v_path = os.path.join(
+        project["paths"]["processing_path"], "bbd_v_crumstadt.gpkg"
+    )
+    if not os.path.exists(bbd_v_path):
+        bbd_v_gdf = u4gpkg.load_gpkg_data_region_ogr(
+            crumstadt_roi,
+            bbd_fpath,
+            "vertikal",
+        )
+        bbd_v_gdf.to_file(bbd_v_path)
+    else:
+        bbd_v_gdf = gp.read_file(bbd_v_path)
+    bbd_v = u4convert.reformat_gdf_to_dict(bbd_v_gdf)
+
+    # Load egms1 dataset
+    egms1_v_path = os.path.join(
+        project["paths"]["processing_path"], "egms1_v_crumstadt.gpkg"
+    )
+    if not os.path.exists(egms1_v_path):
+        egms1_v_gdf = u4gpkg.load_gpkg_data_region_ogr(
+            crumstadt_roi,
+            egms_fpath_vert1,
+            "EGMS_2015_2021_vertikal",
+        )
+        egms1_v_gdf.to_file(egms1_v_path)
+    else:
+        egms1_v_gdf = gp.read_file(egms1_v_path)
+    egms1_v = u4convert.reformat_gdf_to_dict(egms1_v_gdf)
+
+    # Load egms2 dataset
+    egms2_v_path = os.path.join(
+        project["paths"]["processing_path"], "egms2_v_crumstadt.gpkg"
+    )
+    if not os.path.exists(egms2_v_path):
+        egms2_v_gdf = u4gpkg.load_gpkg_data_region_ogr(
+            crumstadt_roi,
+            egms_fpath_vert2,
+            "EGMS_2018_2022_vertikal",
+        )
+        egms2_v_gdf.to_file(egms2_v_path)
+    else:
+        egms2_v_gdf = gp.read_file(egms2_v_path)
+    egms2_v = u4convert.reformat_gdf_to_dict(egms2_v_gdf)
+    date_gas, _, level_gas = u4gas.load_gas_data(
+        os.path.join(
+            project["paths"]["ext_path"], "Inventory Turnover Data_23.txt"
+        )
+    )
+
+    # Plotting
+    fig, axes = plt.subplots(nrows=4, sharex=True)
+
+    u4ax.plot_timeseries(
+        bbd_v["time"], bbd_v["timeseries"], ax=axes[0], color="C0"
+    )
+    u4ax.plot_timeseries(
+        egms1_v["time"], egms1_v["timeseries"], ax=axes[1], color="C1"
+    )
+    u4ax.plot_timeseries(
+        egms2_v["time"], egms2_v["timeseries"], ax=axes[2], color="C2"
+    )
+    axes[3].plot(date_gas, level_gas, color="k")
+
+    axes[1].set_ylabel("Bodenbewegung (mm)")
+    axes[3].set_ylabel("Füllstand (%)")
+    axes[-1].set_xlabel("Zeitraum")
+    axes[1].sharey(axes[0])
+    axes[2].sharey(axes[0])
+    axes[0].set_ylim(-10, 10)
+
+    for ii, lbl in enumerate(
+        ["BBD", "EGMS 2015-2021", "EGMS 2018-2022", "Gasspeicher"]
+    ):
+        axes[ii].annotate(lbl, (0.01, 0.05), xycoords="axes fraction")
+
+    fig.tight_layout()
+    fig.savefig(
+        os.path.join(project["paths"]["output_path"], "PSI_vs_Gas.png")
+    )
+    fig.savefig(
+        os.path.join(project["paths"]["output_path"], "PSI_vs_Gas.pdf")
+    )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/u4py/scripts/examples/regional_case_studies/NeoTect_URG_PSI.py b/u4py/scripts/examples/regional_case_studies/NeoTect_URG_PSI.py
index 1b2bf106698e2b9cdc19438340cd6c8d1c9b3245..619f00ac2289338dd7fd4b96a69edb4313d63de2 100644
--- a/u4py/scripts/examples/regional_case_studies/NeoTect_URG_PSI.py
+++ b/u4py/scripts/examples/regional_case_studies/NeoTect_URG_PSI.py
@@ -21,7 +21,7 @@ import u4py.utils.projects as u4proj
 def main():
     project = u4proj.get_project(
         proj_path=Path(
-            r"~/Documents/umwelt4/Bericht_FFM_BBD_EGMS.u4project"
+            r"~\Documents\ArcGIS\U4_projects\Examples\Bericht_FFM_BBD_EGMS.u4project"
         ).expanduser(),
         required=[
             "base_path",
diff --git a/u4py/scripts/playground/filtered_classified_stats.py b/u4py/scripts/playground/filtered_classified_stats.py
new file mode 100644
index 0000000000000000000000000000000000000000..ed0ba8c4351ce4f22403e92fb87b386497e876ae
--- /dev/null
+++ b/u4py/scripts/playground/filtered_classified_stats.py
@@ -0,0 +1,47 @@
+import os
+
+import geopandas as gp
+import matplotlib.axes as max
+import matplotlib.pyplot as plt
+import numpy as np
+
+
+def main():
+    folder_path = (
+        r"C:\Users\Michael Rudolf\Documents\ArcGIS\SelectedSites_August24"
+    )
+    flist = [
+        "Filtered_Classified_Shapes_hazard.gpkg",
+        "Filtered_Classified_Shapes_onlyLarge.gpkg",
+    ]
+    for file in flist:
+        plot_bar_diagram(folder_path, file)
+    # fname_all = "Filtered_Classified_Shapes_all_merged_clean.gpkg"
+
+
+def plot_bar_diagram(folder_path: os.PathLike, fname: str):
+    gdf_large: gp.GeoDataFrame
+    ax: max.Axes
+    gdf_large = gp.read_file(os.path.join(folder_path, fname))
+
+    fig, ax = plt.subplots()
+    gdf_large.manual_class_1.value_counts(ascending=True).plot(
+        ax=ax, kind="barh"
+    )
+    ax.annotate(
+        f"Insgesamt {len(gdf_large)} Regionen,\ndavon {gdf_large.manual_research.value_counts(ascending=True).iloc[0]} genauer zu untersuchen\nund {gdf_large.manual_unclear_1.value_counts().iloc[1]} unklar.",
+        (0.95, 0.05),
+        xycoords="axes fraction",
+        verticalalignment="bottom",
+        horizontalalignment="right",
+    )
+    ax.set_ylabel("")
+    ax.set_xlabel("Anzahl")
+
+    fig.tight_layout()
+    outname = os.path.splitext(fname)[0].split("_")[-1]
+    fig.savefig(os.path.join(folder_path, f"{outname}.pdf"))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/u4py/utils/convert.py b/u4py/utils/convert.py
index 56217b7f0f300ed579f045529e375e40717a8cf8..ab9d8026218962b8810884e337835af1627505dc 100644
--- a/u4py/utils/convert.py
+++ b/u4py/utils/convert.py
@@ -238,8 +238,12 @@ def reformat_gdf_to_dict(gdf: gp.GeoDataFrame) -> dict:
         "Z",
         "mean_velo_vert",
         "var_mean_velo_vert",
+        "mean_velo_",
+        "var_mean_v",
         "mean_velo_east",
         "var_mean_velo_east",
+        "mean_velo_",
+        "var_mean_v",
         "fid",
         "geom",
         "pid",
@@ -272,6 +276,9 @@ def reformat_gdf_to_dict(gdf: gp.GeoDataFrame) -> dict:
     if "mean_velo_vert" in gdf.keys():
         mv_key = "mean_velo_vert"
         mvs_key = "var_mean_velo_vert"
+    elif "mean_velo_" in gdf.keys():
+        mv_key = "mean_velo_"
+        mvs_key = "var_mean_v"
     if "mean_velo_east" in gdf.keys():
         mv_key = "mean_velo_east"
         mvs_key = "var_mean_velo_east"
@@ -312,7 +319,19 @@ def sql_key_to_time(key: str) -> datetime:
     :rtype: datetime
     """
     try:
+        # For BBD 2023 data
         dts = datetime.strptime(key, "date_%Y%m%d")
     except ValueError:
-        dts = datetime.strptime(key, "%Y%m%d")
+        try:
+            # For EGMS data
+            dts = datetime.strptime(key, "%Y%m%d")
+        except ValueError:
+            # For BBD 2021 data
+            start_time = datetime(2015, 4, 7)
+            dt = timedelta(6)
+            ts_0 = 20150
+            cur_ts = int(key[5:])
+            num_dt = cur_ts - ts_0
+            dts = start_time + num_dt * dt
+
     return dts