diff --git a/u4py/addons/adatools.py b/u4py/addons/adatools.py
index c4876ce9fcdaae0ddbaf95c2d336b02d40779199..3399d83c72d84bfcbe42d41957246148c6a7197c 100644
--- a/u4py/addons/adatools.py
+++ b/u4py/addons/adatools.py
@@ -17,6 +17,7 @@ import u4py.analysis.processing as u4proc
 import u4py.io.files as u4files
 import u4py.io.gpkg as u4gpkg
 import u4py.io.sql as u4sql
+from u4py.utils.types import U4Project
 
 
 def create_adafinder_read_map(
@@ -133,13 +134,13 @@ def load_region_as_gdf(
         return [], []
 
 
-def convert_gpkg_to_shp(fname: str, project: dict):
+def convert_gpkg_to_shp(fname: str, project: U4Project):
     """Converts the data from the gpkg to a shape file and readmap for table.
 
     :param fname: The filename of the gpkg file.
     :type fname: str
     :param project: The project config
-    :type project: dict
+    :type project: U4Project
 
     Subdivides the tables into smaller shape files depending on the number of
     entries in the table. The resulting subdivisions are stored in shapefiles
@@ -209,14 +210,14 @@ def conversion_worker(args: Iterable):
 
 
 def create_adafinder_config(
-    in_path: os.PathLike, project: dict, **kwargs
+    in_path: os.PathLike, project: U4Project, **kwargs
 ) -> os.PathLike:
     """Creates a config for use with ADAfinder CLI
 
     :param in_path: The path to the input points shapefile or csvfile.
     :type in_path: os.PathLike
     :param project: The u4py project config
-    :type project: dict
+    :type project: U4Project
     :param **kwargs: Additional arguments passed to ADAfinder config, with keys in UPPERCASE.
     :type **kwargs: dict
     :return: The path to the ADAfinder config
@@ -296,11 +297,11 @@ def create_adafinder_config(
     return adacfg_path
 
 
-def merge_shps(project: dict, ending: str, direction: str):
+def merge_shps(project: U4Project, ending: str, direction: str):
     """Merges output of several ADAfinder runs into a single shapefile.
 
     :param project: The project config
-    :type project: dict
+    :type project: U4Project
     :param ending: The ending to identify the type of results.
     :type ending: str
     :param direction: The direction (vertikal, Ost_West) of the results.
diff --git a/u4py/analysis/classify.py b/u4py/analysis/classify.py
index c8f96ad26b8e22ea483981cfbd6cb7b0cffd85ea..56b88308547192d9b8011331827ac7422629eb10 100644
--- a/u4py/analysis/classify.py
+++ b/u4py/analysis/classify.py
@@ -16,15 +16,15 @@ import u4py.analysis.spatial as u4spatial
 import u4py.io.gpkg as u4gpkg
 import u4py.io.tiff as u4tiff
 import u4py.plotting.plots as u4plots
-from u4py.utils.types import U4ResDict
+from u4py.utils.types import ShapeCfgDict, U4Project, U4ResDict
 
 
 def classify_shape(
     shp_gdf: gp.GeoDataFrame,
     group: int,
     buffer_size: float,
-    shp_cfg: dict,
-    project: dict,
+    shp_cfg: ShapeCfgDict,
+    project: U4Project,
     use_online: bool = False,
     use_internal: bool = True,
     save_shapes: bool = False,
@@ -42,9 +42,9 @@ def classify_shape(
     :param buffer_size: The buffer size for the hull around the shapes.
     :type buffer_size: float
     :param shp_cfg: The configuration for shapes, e.g. including the buffer sizes for roads etc.
-    :type shp_cfg: dict
+    :type shp_cfg: ShapeCfgDict
     :param project: The project config containing file paths.
-    :type project: dict
+    :type project: U4Project
     :param use_online: Query online webservices by the HLNUG for geology, hydrogeology and soil, defaults to False
     :type use_online: bool, optional
     :param use_internal: First query the internal geoserver, defaults to True
@@ -147,10 +147,10 @@ def classify_shape(
 
         # Roads and Railways
         res.update(roads(res, sub_set_hull, osm_path, shp_cfg))
-        res.update(railways(res, sub_set_hull, osm_path))
+        res.update(railways(res, sub_set_hull, osm_path, shp_cfg))
 
         # Buildings
-        res.update(buildings(res, sub_set_hull, osm_path))
+        res.update(buildings(res, sub_set_hull, shp_cfg, osm_path))
 
         # Water
         res.update(rivers_water(res, sub_set_hull, osm_path, shp_cfg))
@@ -240,6 +240,8 @@ def preallocate_results() -> U4ResDict:
         "aspect_polygons_std_21": [],
         "buildings_area": np.nan,
         "buildings": gp.GeoDataFrame(),
+        "buildings_num": 0,
+        "buildings_close": False,
         "geology_area": [],
         "geology_percent": [],
         "geology_units": [],
@@ -280,6 +282,8 @@ def preallocate_results() -> U4ResDict:
         "railways_has": False,
         "railways_length": np.nan,
         "railways_close": False,
+        "roads_has": False,
+        "roads_close": False,
         "roads_has_motorway": False,
         "roads_has_primary": False,
         "roads_has_secondary": False,
@@ -289,7 +293,6 @@ def preallocate_results() -> U4ResDict:
         "roads_motorway_length": [],
         "roads_primary_length": [],
         "roads_secondary_length": [],
-        "roads_main_area": np.nan,
         "roads_nearest_motorway_name": "",
         "roads_nearest_primary_name": "",
         "roads_nearest_secondary_name": "",
@@ -365,7 +368,7 @@ def roads(
     res: U4ResDict,
     sub_set_hull: gp.GeoDataFrame,
     osm_path: os.PathLike,
-    shp_cfg: dict,
+    shp_cfg: ShapeCfgDict,
 ) -> dict:
     """Loads the roads from the openstreetmap database and returns the area of minor and major roads. Also establishes if a motorway is present in the area.
 
@@ -376,7 +379,7 @@ def roads(
     :param osm_path: The path to the openstreetmap database.
     :type osm_path: os.PathLike
     :param shp_cfg: The shape config containing buffer sizes.
-    :type shp_cfg: dict
+    :type shp_cfg: ShapeCfgDict
     :return: An updated version of the results dictionary.
     :rtype: U4ResDict
     """
@@ -385,14 +388,9 @@ def roads(
     roads_data = u4gpkg.load_gpkg_data_region_ogr(
         sub_set_hull, osm_path, "gis_osm_roads_free_1"
     )
-    close_roads_data = u4gpkg.load_gpkg_data_region_ogr(
-        sub_set_hull.buffer(1000), osm_path, "gis_osm_roads_free_1"
-    )
-    if len(close_roads_data) > 0:
-        close_road_classes = close_roads_data.fclass.to_list()
     if len(roads_data) > 0:
         logging.info("Classifying Road Data")
-
+        res["roads_has"] = True
         # Extract area of main roads
         res["roads_main"] = u4spatial.get_clipped_road_area(
             roads_data, "mainroads", sub_set_hull, shp_cfg
@@ -422,31 +420,45 @@ def roads(
             res["roads_motorway_names"], res["roads_motorway_length"] = (
                 u4spatial.road_info(roads_data, "motorway")
             )
-        elif "motorway" in close_road_classes:
-            name, dist = u4spatial.get_nearest_road_segment(
-                sub_set_hull.centroid.iloc[0], close_roads_data, "motorway"
-            )
-            res["roads_nearest_motorway_name"] = name
-            res["roads_nearest_motorway_dist"] = dist
-
         if "primary" in road_classes:
             res["roads_has_primary"] = True
             res["roads_primary_names"], res["roads_primary_length"] = (
                 u4spatial.road_info(roads_data, "primary")
             )
-        elif "primary" in close_road_classes:
-            name, dist = u4spatial.get_nearest_road_segment(
-                sub_set_hull.centroid.iloc[0], close_roads_data, "primary"
-            )
-            res["roads_nearest_primary_name"] = name
-            res["roads_nearest_primary_dist"] = dist
-
         if "secondary" in road_classes:
             res["roads_has_secondary"] = True
             res["roads_secondary_names"], res["roads_secondary_length"] = (
                 u4spatial.road_info(roads_data, "secondary")
             )
-        elif "secondary" in close_road_classes:
+
+    logging.info(
+        f"Loading close roads in {shp_cfg['classify_buffers']['roads']}"
+        + " m buffer."
+    )
+    close_roads_data = u4gpkg.load_gpkg_data_region_ogr(
+        sub_set_hull.buffer(shp_cfg["classify_buffers"]["roads"]),
+        osm_path,
+        "gis_osm_roads_free_1",
+    )
+
+    if len(close_roads_data) > 0:
+        logging.info("Classifying Close Road Data")
+        res["roads_close"] = True
+        # Look for road classes
+        close_road_classes = close_roads_data.fclass.to_list()
+        if "motorway" in close_road_classes:
+            name, dist = u4spatial.get_nearest_road_segment(
+                sub_set_hull.centroid.iloc[0], close_roads_data, "motorway"
+            )
+            res["roads_nearest_motorway_name"] = name
+            res["roads_nearest_motorway_dist"] = dist
+        if "primary" in close_road_classes:
+            name, dist = u4spatial.get_nearest_road_segment(
+                sub_set_hull.centroid.iloc[0], close_roads_data, "primary"
+            )
+            res["roads_nearest_primary_name"] = name
+            res["roads_nearest_primary_dist"] = dist
+        if "secondary" in close_road_classes:
             name, dist = u4spatial.get_nearest_road_segment(
                 sub_set_hull.centroid.iloc[0], close_roads_data, "secondary"
             )
@@ -459,6 +471,7 @@ def railways(
     res: U4ResDict,
     sub_set_hull: gp.GeoDataFrame,
     osm_path: os.PathLike,
+    shp_cfg: ShapeCfgDict,
 ) -> dict:
     """Loads the railways from the openstreetmap database and calculates length or distance to closest points.
 
@@ -468,6 +481,8 @@ def railways(
     :type sub_set_hull: gp.GeoDataFrame
     :param osm_path: The path to the openstreetmap database.
     :type osm_path: os.PathLike
+    :param shp_cfg: The shape config containing buffer sizes.
+    :type shp_cfg: ShapeCfgDict
     :return: An updated version of the results dictionary.
     :rtype: U4ResDict
     """
@@ -477,7 +492,9 @@ def railways(
         sub_set_hull, osm_path, "gis_osm_railways_free_1"
     )
     close_rails_data = u4gpkg.load_gpkg_data_region_ogr(
-        sub_set_hull.buffer(1000), osm_path, "gis_osm_railways_free_1"
+        sub_set_hull.buffer(shp_cfg["classify_buffers"]["railways"]),
+        osm_path,
+        "gis_osm_railways_free_1",
     )
     logging.info("Classifying Railways Data")
     if len(railways_data) > 0:
@@ -486,8 +503,8 @@ def railways(
             res["railways_has"] = True
     elif len(close_rails_data) > 0:
         _, res["railways_length"] = u4spatial.get_nearest_road_segment(
-                sub_set_hull.centroid.iloc[0], close_rails_data, "rail"
-            )
+            sub_set_hull.centroid.iloc[0], close_rails_data, "rail"
+        )
         if res["railways_length"] > 0:
             res["railways_close"] = True
 
@@ -495,7 +512,11 @@ def railways(
 
 
 def buildings(
-    res: U4ResDict, sub_set_hull: gp.GeoDataFrame, osm_path: os.PathLike
+    res: U4ResDict,
+    sub_set_hull: gp.GeoDataFrame,
+    shp_cfg: ShapeCfgDict,
+    osm_path: os.PathLike = "",
+    out_folder: os.PathLike = "",
 ) -> U4ResDict:
     """Calculates the area that is covered by buildings
 
@@ -503,20 +524,53 @@ def buildings(
     :type res: U4ResDict
     :param sub_set_hull: The hull of the area.
     :type sub_set_hull: gp.GeoDataFrame
+    :param shp_cfg: The shape config containing buffer sizes.
+    :type shp_cfg: ShapeCfgDict
     :param osm_path: The path to the osm dataset.
     :type osm_path: os.PathLike
+    :param out_folder: The path to a gpkg file containing the data, defaults to ""
+    :type out_folder: os.PathLike, optional
     :return: The results dictionary with the classified data appended.
     :rtype: U4ResDict
     """
-    logging.info("Loading Building Data")
-    res["buildings"] = u4gpkg.load_gpkg_data_region_ogr(
-        sub_set_hull, osm_path, "gis_osm_buildings_a_free_1"
-    )
+    if osm_path:
+        logging.info("Loading building data from shape")
+        res["buildings"] = u4gpkg.load_gpkg_data_region_ogr(
+            sub_set_hull, osm_path, "gis_osm_buildings_a_free_1"
+        )
+        buildings_close = u4gpkg.load_gpkg_data_region_ogr(
+            sub_set_hull.buffer(shp_cfg["classify_buffers"]["buildings"]),
+            osm_path,
+            "gis_osm_buildings_a_free_1",
+        )
+        if len(buildings_close) > 0:
+            res["buildings_close"] = True
+    else:
+        logging.info("Loading building data from HLNUG server")
+        building_data = u4web.query_hlnug(
+            "geologie/gk25/MapServer",
+            "Geologie (Kartiereinheiten)",
+            region=sub_set_hull,
+            out_folder=out_folder,
+            suffix=f"{res['group']:05}",
+        )
+        if building_data.empty:
+            logging.debug("Retrying to get geological data.")
+            building_data = u4web.query_hlnug(
+                "geologie/gk25/MapServer",
+                "Geologie (Kartiereinheiten)",
+                region=sub_set_hull,
+                out_folder=out_folder,
+                suffix=f"{res['group']:05}",
+            )
+
     if len(res["buildings"]) > 0:
         res["buildings_area"] = round(res["buildings"].area.sum(), 1)
         res["additional_areas"] += res["buildings_area"]
+        res["buildings_num"] = len(res["buildings"])
     else:
         res["buildings"] = []
+
     return res
 
 
@@ -524,7 +578,7 @@ def rivers_water(
     res: U4ResDict,
     sub_set_hull: gp.GeoDataFrame,
     osm_path: os.PathLike,
-    shp_cfg: dict,
+    shp_cfg: ShapeCfgDict,
 ) -> U4ResDict:
     """Calculates the area that is covered by water
 
@@ -535,7 +589,7 @@ def rivers_water(
     :param osm_path: The path to the osm dataset.
     :type osm_path: os.PathLike
     :param shp_cfg: The shape config to calculate the buffers for the rivers.
-    :type shp_cfg: dict
+    :type shp_cfg: ShapeCfgDict
     :return: The results dictionary with the classified data appended.
     :rtype: U4ResDict
     """
@@ -785,7 +839,7 @@ def geology(
     :type res: U4ResDict
     :param sub_set_hull: The hull of the area.
     :type sub_set_hull: gp.GeoDataFrame
-    :param out_folder: The path to a gpkg file containing the data (not implemented yet), defaults to ""
+    :param out_folder: The path to a gpkg file containing the data, defaults to ""
     :type out_folder: os.PathLike, optional
     :param use_internal: Try internal web server first, defaults to True.
     :type use_internal: os.PathLike, optional
@@ -935,7 +989,7 @@ def hydrogeology(
     :type res: U4ResDict
     :param sub_set_hull: The hull of the area.
     :type sub_set_hull: gp.GeoDataFrame
-    :param out_folder: The path to a gpkg file containing the data (not implemented yet), defaults to ""
+    :param out_folder: The path to a gpkg file containing the data, defaults to ""
     :type out_folder: os.PathLike, optional
     :return: The results dictionary with the classified data appended.
     :rtype: U4ResDict
@@ -1195,7 +1249,7 @@ def topsoil(
     :type res: U4ResDict
     :param sub_set_hull: The hull of the area.
     :type sub_set_hull: gp.GeoDataFrame
-    :param out_folder: The path to a gpkg file containing the data (not implemented yet), defaults to ""
+    :param out_folder: The path to a gpkg file containing the data, defaults to ""
     :type out_folder: os.PathLike, optional
     :return: The results dictionary with the classified data appended.
     :rtype: U4ResDict
@@ -1305,7 +1359,7 @@ def write_report(
     :param group: The name of the group.
     :type group: str
     :param project: The project containing paths.
-    :type project: dict
+    :type project: U4Project
     """
     logging.info("Writing report")
     report_folder = os.path.join(project["paths"]["sites_path"], "Reports")
@@ -1449,7 +1503,7 @@ def structural_area(
     :type res: U4ResDict
     :param sub_set_hull: The hull of the area.
     :type sub_set_hull: gp.GeoDataFrame
-    :param out_folder: The path to a gpkg file containing the data (not implemented yet), defaults to ""
+    :param out_folder: The path to a gpkg file containing the data, defaults to ""
     :type out_folder: os.PathLike, optional
     :return: The results dictionary with the classified data appended.
     :rtype: U4ResDict
diff --git a/u4py/analysis/spatial.py b/u4py/analysis/spatial.py
index ebf2520a04e40cb2985e89b37fdb4689092c9f87..45006cb112f32f07197e4e7f789095f105d06307 100644
--- a/u4py/analysis/spatial.py
+++ b/u4py/analysis/spatial.py
@@ -30,6 +30,8 @@ from shapely.errors import GEOSException
 from skimage import measure as skmeasure
 from tqdm import tqdm
 
+from u4py.utils.types import ShapeCfgDict
+
 
 def reproject_raster(
     in_path: os.PathLike,
@@ -1152,28 +1154,6 @@ def area_per_feature(
     return (fclasses, areas)
 
 
-def vol_added(im_data: np.ndarray) -> np.ndarray:
-    """Calculates the volume added inside the area of the numpy array.
-
-    :param im_data: The input raster dem data.
-    :type im_data: np.ndarray
-    :return: The volume added.
-    :rtype: np.ndarray
-    """
-    return np.nansum(im_data[im_data > 0])
-
-
-def vol_removed(im_data: np.ndarray) -> np.ndarray:
-    """Calculates the volume removed inside the area of the numpy array.
-
-    :param im_data: The input raster dem data.
-    :type im_data: np.ndarray
-    :return: The volume removed.
-    :rtype: np.ndarray
-    """
-    return np.nansum(im_data[im_data < 0])
-
-
 def vol_moved(im_data: np.ndarray) -> np.ndarray:
     """Calculates the volume moved inside the area of the numpy array.
 
@@ -1182,7 +1162,10 @@ def vol_moved(im_data: np.ndarray) -> np.ndarray:
     :return: The volume moved.
     :rtype: np.ndarray
     """
-    return np.nansum(np.abs(im_data))
+    if np.isfinite(im_data).any():
+        return np.nansum(np.abs(im_data))
+    else:
+        return 0
 
 
 def vol_error(im_data: np.ndarray) -> np.ndarray:
@@ -1193,12 +1176,17 @@ def vol_error(im_data: np.ndarray) -> np.ndarray:
     :return: The error of volume estimates.
     :rtype: np.ndarray
     """
-    return 0.3**2 * len(im_data[np.isfinite(im_data)])
+    if np.isfinite(im_data).any():
+        return 0.3**2 * len(im_data[np.isfinite(im_data)])
+    else:
+        return 0
 
 
 def roundness(shapes: gp.GeoDataFrame) -> list:
     """Calculates the roundness of all polygons in shapes.
 
+    :math:`Roundness=\\frac{4\\pi A}{p^2}`
+
     :param shapes: A geodataframe with polygons.
     :type shapes: gp.GeoDataFrame
     :return: The roundness between 0=not round and 1=perfectly round.
@@ -1206,7 +1194,7 @@ def roundness(shapes: gp.GeoDataFrame) -> list:
     """
     peri = shapes.geometry.length
     area = shapes.area
-    roundness = round((4 * np.pi * area) / (area) ** 2, 3)
+    roundness = round((4 * np.pi * area) / (peri) ** 2, 3)
     return roundness.to_list()
 
 
@@ -1330,7 +1318,7 @@ def get_clipped_road_area(
     roads: gp.GeoDataFrame,
     road_type: str,
     clip_reg: gp.GeoDataFrame,
-    shp_cfg: dict,
+    shp_cfg: ShapeCfgDict,
 ) -> gp.GeoDataFrame:
     """Buffers and clips the roads to the area of interest, removing tunnels as well.
 
@@ -1341,7 +1329,7 @@ def get_clipped_road_area(
     :param clip_reg: The region used for clipping.
     :type clip_reg: gp.GeoDataFrame
     :param shp_cfg: The shape config containing buffer sizes.
-    :type shp_cfg: dict
+    :type shp_cfg: ShapeCfgDict
     :return: The clipped roads as polygons.
     :rtype: gp.GeoDataFrame
     """
@@ -1407,7 +1395,9 @@ def ckdnearest(
     :rtype: gp.GeoDataFrame
     """
     geom_a = (geom_a.x, geom_a.y)
-    geom_b = [np.array(geom.coords) for geom in gdf_b.geometry.to_list()]
+    geom_b = [
+        np.array(geom.coords) for geom in gdf_b.geometry.explode().to_list()
+    ]
     idx_b = tuple(
         itertools.chain.from_iterable(
             [
diff --git a/u4py/io/docx_report.py b/u4py/io/docx_report.py
index 87d78e53a2987a357f905c8b01939a55ea82bf3e..ad1a17746181c0b2162316e0f979d2874b751834 100644
--- a/u4py/io/docx_report.py
+++ b/u4py/io/docx_report.py
@@ -6,7 +6,6 @@ requirements of the HLNUG.
 import os
 
 import docx
-import docx.parts
 import docx.shared
 import geopandas as gp
 import humanize
@@ -240,12 +239,19 @@ def hlnug_description(
         prgph.add_run(f" und {hld['RU_SCHIC_2'].values[0]}")
     prgph.add_run(". ")
 
+    # Add buildings
+    if row["buildings_num"] > 0:
+        prgph.add_run("Auf der Rutschung ist Bebauung vorhanden. ")
+    elif row["buildings_close"]:
+        prgph.add_run(
+            "In in maximal 50\u00a0m Entfernung vom Rand der Rutschung ist "
+            + "Bebauung vorhanden. "
+        )
+    else:
+        prgph.add_run("In der näheren Umgebung ist keine Bebauung vorhanden. ")
+
     # Add roads
-    if (
-        row["roads_has_motorway"]
-        or row["roads_has_primary"]
-        or row["roads_has_secondary"]
-    ):
+    if row["roads_has"]:
         road_list = []
         if row["roads_has_motorway"]:
             if row["roads_motorway_names"].startswith("["):
@@ -292,36 +298,39 @@ def hlnug_description(
                 road_list.append(
                     f"der {secondary_names} auf einer Länge von {secondary_lengths:.1f}\u00a0m"
                 )
-        prgph.add_run(
-            f"Die Rutschung wird von {u4human.listed_strings(road_list)} gequert. "
-        )
+        if road_list:
+            prgph.add_run(
+                f"Die Rutschung wird von {u4human.listed_strings(road_list)} gequert. "
+            )
+        else:
+            prgph.add_run(
+                "Die Rutschung wird von mindestens einer Straße oder Weg gequert. "
+            )
 
     else:
         if row["roads_nearest_motorway_name"]:
             prgph.add_run(
-                f"In ca. {int(row['roads_nearest_motorway_dist'])}\u00a0m "
-                + "Entfernung vom Mittelpunkt der Rutschung befindet sich die "
+                "In in maximal 50\u00a0m Entfernung vom Rand der Rutschung "
+                + "befindet sich die "
                 + f"{row['roads_nearest_motorway_name']}. "
             )
-        elif row["roads_nearest_primary_name"]:
+        if row["roads_nearest_primary_name"]:
             prgph.add_run(
-                f"In ca. {int(row['roads_nearest_primary_dist'])}\u00a0m "
-                + "Entfernung vom Mittelpunkt der Rutschung befindet sich die "
+                "Inin maximal 50\u00a0m Entfernung vom Rand der Rutschung "
+                + "befindet sich die "
                 + f"{row['roads_nearest_primary_name']}. "
             )
-        elif row["roads_nearest_secondary_dist"] > 0:
+        if row["roads_nearest_secondary_dist"] > 0:
             prgph.add_run(
-                f"In ca. {int(row['roads_nearest_secondary_dist'])}\u00a0m "
-                + "Entfernung vom Mittelpunkt der Rutschung befindet sich "
+                "In in maximal 50\u00a0m Entfernung vom Rand der Rutschung "
+                + "befindet sich "
             )
             if row["roads_nearest_secondary_name"]:
                 prgph.add_run(f"die {row['roads_nearest_secondary_name']}. ")
             else:
                 prgph.add_run("eine nicht benannte Landstraße. ")
         else:
-            prgph.add_run(
-                "Im Umkreis von 1\u00a0km sind keine größeren Straßen vorhanden. "
-            )
+            prgph.add_run("Im näheren Umkreis sind keine Straßen vorhanden. ")
 
     if row["railways_has"]:
         prgph.add_run(
@@ -330,8 +339,7 @@ def hlnug_description(
         )
     elif row["railways_close"]:
         prgph.add_run(
-            f"Eine Bahnlinie liegt ca. {int(row['railways_length'])}\u00a0m "
-            + "entfernt vom Mittelpunkt der Rutschung. "
+            "In maximal 50\u00a0m Entfernung vom Rand der Rutschung befindet sich eine Bahntrasse. "
         )
 
     # Add drill sites
diff --git a/u4py/io/gpkg.py b/u4py/io/gpkg.py
index cbaa5ff649dff8d23ac386d9edd542b261627cda..4bf521ff5a165fb0af99ae8c2acec01c7eb268e0 100644
--- a/u4py/io/gpkg.py
+++ b/u4py/io/gpkg.py
@@ -16,6 +16,7 @@ import u4py.analysis.spatial as u4spatial
 import u4py.io.sql as u4sql
 import u4py.io.tiff as u4tiff
 import u4py.utils.convert as u4conv
+from u4py.utils.types import ShapeCfgDict
 
 ogr.UseExceptions()
 
@@ -24,7 +25,7 @@ def load_and_buffer_gpkg(
     gpkg_path: os.PathLike,
     tiff_path: os.PathLike,
     tables: list = [],
-    shp_cfg: dict = {},
+    shp_cfg: ShapeCfgDict = {},
     out_crs: str = "EPSG:32632",
     buffer_dist: bool = False,
 ) -> gp.GeoDataFrame:
diff --git a/u4py/io/shp.py b/u4py/io/shp.py
index 5043d4ef406406216f1b47491b1b15990638dfc1..964a6e960e5f64bb57c181b313b5e8b10e63bad1 100644
--- a/u4py/io/shp.py
+++ b/u4py/io/shp.py
@@ -20,6 +20,7 @@ from packaging.version import Version
 from tqdm import tqdm
 
 import u4py.utils.utils as u4utils
+from u4py.utils.types import U4Project
 
 
 def get_clipped_shapefile(
@@ -72,7 +73,7 @@ def get_clipped_shapefile(
 
 def get_osm_as_shp(
     tags: dict,
-    project: dict,
+    project: U4Project,
     query: str = "Hesse",
     shape_type: shapely.GeometryType = shapely.Point,
     overwrite: bool = False,
@@ -82,7 +83,7 @@ def get_osm_as_shp(
     :param tags: The tags for open street map (same as for the overpass API)
     :type tags: dict
     :param project: The loaded project file (for path management)
-    :type project: dict
+    :type project: U4Project
     :param query: The region where to get the data, defaults to "Hesse"
     :type query: str, optional
     :param shape_type: The type of the shape data, defaults to `shapely.Point`
diff --git a/u4py/scripts/gis_workflows/Classify_Shapes.py b/u4py/scripts/gis_workflows/Classify_Shapes.py
index f715f4641e839766270086da2c6a97668700d3eb..795a62a7e5d468dcf9bd25013ffca7cadaeddbc1 100644
--- a/u4py/scripts/gis_workflows/Classify_Shapes.py
+++ b/u4py/scripts/gis_workflows/Classify_Shapes.py
@@ -70,14 +70,14 @@ def main():
         {
             "shp_gdf": shp_gdf,
             "group": group,
-            "buffer_size": 5,
+            "buffer_size": 0.1,
             "shp_cfg": shp_cfg,
             "project": project,
             "use_online": project.getboolean("config", "use_online"),
             "use_internal": project.getboolean("config", "use_internal"),
             "save_report": True,
         }
-        for group in unique_groups
+        for group in unique_groups[:50]
     ]
     if project.getboolean("config", "use_parallel"):
         main_list = u4proc.batch_mapping(
diff --git a/u4py/utils/config.py b/u4py/utils/config.py
index 380a5dad5ee37551b5063aac87cc42aa6c659f9f..ac5caf52f5bf1021aa43ad8338aeee0e4afaa786 100644
--- a/u4py/utils/config.py
+++ b/u4py/utils/config.py
@@ -8,6 +8,8 @@ import logging
 import os
 import sys
 
+from u4py.utils.types import ShapeCfgDict
+
 # Setting the cpu count for parallel processing to a convenient amount.
 if os.cpu_count() < 2:
     ResourceWarning("Not enough processors for parallel processing.")
@@ -36,11 +38,11 @@ def start_logger():
     )
 
 
-def get_shape_config() -> dict:
+def get_shape_config() -> ShapeCfgDict:
     """Returns the parameters for buffering OSM shapes.
 
     :return: The configuration.
-    :rtype: dict
+    :rtype: ShapeCfgDict
     """
     # Names of the shapes for the legend
     name = {
@@ -56,8 +58,9 @@ def get_shape_config() -> dict:
         "lakes": "Lakes",
         "power": "Wind Turbines",
         "traffic": "Parking Lots,...",
-        "transportx": "Terminals, Airports,...",
+        "transport": "Terminals, Airports,...",
     }
+
     # Name of the shapefile containing the original data
     shp_file = {
         "build": "gis_osm_buildings_a_free_1.shp",
@@ -74,12 +77,13 @@ def get_shape_config() -> dict:
         "traffic": "gis_osm_traffic_a_free_1.shp",
         "transport": "gis_osm_transport_a_free_1.shp",
     }
+
     # List of feature classes to extract from the file (empty=all)
     fclass = {
         "build": [],
         "pois_area": [
             "sports_centre",
-            "track" "pitch",
+            "trackpitch",
             "swimming_pool",
             "wastewater_plant",
             "golf_course",
@@ -132,6 +136,7 @@ def get_shape_config() -> dict:
         "traffic": [],
         "transport": [],
     }
+
     # Default buffer size around each feature is 10 meters.
     buffer_dist = {
         "build": 15,
@@ -149,6 +154,13 @@ def get_shape_config() -> dict:
         "transport": 15,
     }
 
+    # Classification buffers
+    classify_buffers = {
+        "roads": 50,
+        "railways": 50,
+        "buildings": 50,
+    }
+
     # Plotting Stuff
     colors = {
         "build": "dimgray",
@@ -165,6 +177,7 @@ def get_shape_config() -> dict:
         "traffic": "blue",
         "transport": "blue",
     }
+
     zorder = {
         "build": 2,
         "pois_area": 2,
@@ -183,6 +196,7 @@ def get_shape_config() -> dict:
 
     shp_cfg = {
         "buffer_dist": buffer_dist,
+        "classify_buffers": classify_buffers,
         "name": name,
         "shp_file": shp_file,
         "fclass": fclass,
diff --git a/u4py/utils/types.py b/u4py/utils/types.py
index 3c7e3c0be29462a0d32b70de94d95b5db661f4b9..44337e4b579a4499fa281cbe04e681257bd7f525 100644
--- a/u4py/utils/types.py
+++ b/u4py/utils/types.py
@@ -87,6 +87,8 @@ class U4ResDict(TypedDict):
     aspect_polygons_std_21: list
     buildings_area: float
     buildings: gp.GeoDataFrame
+    buildings_num: int
+    buildings_close: bool
     geology_area: list
     geology_percent: list
     geology_units: list
@@ -127,6 +129,7 @@ class U4ResDict(TypedDict):
     railways_has: bool
     railways_length: float
     railways_close: bool
+    roads_has: bool
     roads_has_motorway: bool
     roads_has_primary: bool
     roads_has_secondary: bool
@@ -136,6 +139,7 @@ class U4ResDict(TypedDict):
     roads_motorway_length: list
     roads_primary_length: list
     roads_secondary_length: list
+    roads_close: bool
     roads_nearest_motorway_name: str
     roads_nearest_primary_name: str
     roads_nearest_secondary_name: str
@@ -212,3 +216,131 @@ class U4Namespace(Namespace):
     input: os.PathLike
     overwrite: bool
     cpus: int
+
+
+class BufferDistDict(TypedDict):
+    """Default buffer distances for features"""
+
+    build: float
+    pois_area: float
+    landfill: float
+    landuse: float
+    construction: float
+    railway: float
+    mainroads: float
+    minor_roads: float
+    water: float
+    lakes: float
+    power: float
+    traffic: float
+    transport: float
+
+
+class ClassifyDict(TypedDict):
+    """Buffer distances for classification"""
+
+    roads: float
+    railways: float
+    buildings: float
+
+
+class NameDict(TypedDict):
+    """Names for shapes of the legend"""
+
+    build: str
+    pois_area: str
+    landfill: str
+    landuse: str
+    construction: str
+    railway: str
+    mainroads: str
+    minor_roads: str
+    water: str
+    lakes: str
+    power: str
+    traffic: str
+    transport: str
+
+
+class ShpFileDict(TypedDict):
+    """Names of the shapefile containing the original data"""
+
+    build: os.PathLike
+    pois_area: os.PathLike
+    landfill: os.PathLike
+    landuse: os.PathLike
+    construction: os.PathLike
+    railway: os.PathLike
+    mainroads: os.PathLike
+    minor_roads: os.PathLike
+    water: os.PathLike
+    lakes: os.PathLike
+    power: os.PathLike
+    traffic: os.PathLike
+    transport: os.PathLike
+
+
+class FclassDict(TypedDict):
+    """List of feature classes to extract from the file"""
+
+    build: list[str]
+    pois_area: list[str]
+    landfill: list[str]
+    landuse: list[str]
+    construction: list[str]
+    railway: list[str]
+    mainroads: list[str]
+    minor_roads: list[str]
+    water: list[str]
+    lakes: list[str]
+    power: list[str]
+    traffic: list[str]
+    transport: list[str]
+
+
+class ColorsDict(TypedDict):
+    """Colors for plotting features"""
+
+    build: str
+    pois_area: str
+    landfill: str
+    landuse: str
+    construction: str
+    railway: str
+    mainroads: str
+    minor_roads: str
+    water: str
+    lakes: str
+    power: str
+    traffic: str
+    transport: str
+
+
+class ZorderDict(TypedDict):
+    """Layer `zorder` for plotting"""
+
+    build: int
+    pois_area: int
+    landfill: int
+    landuse: int
+    construction: int
+    railway: int
+    mainroads: int
+    minor_roads: int
+    water: int
+    lakes: int
+    power: int
+    traffic: int
+    transport: int
+
+
+class ShapeCfgDict(TypedDict):
+    """A dictionary containing configs for shapes and buffers"""
+
+    buffer_dist: BufferDistDict
+    classify_buffers: ClassifyDict
+    name: NameDict
+    shp_file: ShpFileDict
+    fclass: FclassDict
+    colors: ColorsDict
+    zorder: ZorderDict