diff --git a/u4py/analysis/inversion.py b/u4py/analysis/inversion.py
index ac87443f27ff8882d9c536e55eb6dfcea65f23ae..c4e247575e0ed6c9000fc35f6c5c2ff7b881bdc1 100644
--- a/u4py/analysis/inversion.py
+++ b/u4py/analysis/inversion.py
@@ -877,8 +877,12 @@ def stack_data(
         for k in ["dataE", "dataN", "dataU"]:
             time_series[k] = dataset[data_mapping[k]]
         time_series["station"] = [str(nn) for nn in dataset["ps_id"]]
-        time_series["xmid"] = dataset["xmid"]
-        time_series["ymid"] = dataset["ymid"]
+        if "xmid" in data_keys:
+            time_series["xmid"] = dataset["xmid"]
+            time_series["ymid"] = dataset["ymid"]
+        else:
+            time_series["xmid"] = np.mean(dataset["x"])
+            time_series["ymid"] = np.mean(dataset["y"])
     return time_series
 
 
diff --git a/u4py/analysis/processing.py b/u4py/analysis/processing.py
index 5d46a07be597e8925ac5a1cc16becb98ac4a02dc..5353cc6a5479889b33ca0a8bdd325064adf23fa1 100644
--- a/u4py/analysis/processing.py
+++ b/u4py/analysis/processing.py
@@ -14,7 +14,6 @@ overwritten by setting the keyword argument `overwrite=True`.
 from __future__ import annotations
 
 import logging
-import multiprocessing.pool as mpp
 import os
 import pickle
 from multiprocessing import Pool
@@ -30,7 +29,7 @@ import u4py.utils.config as u4config
 import u4py.utils.convert as u4convert
 
 
-def invert_psi_dict(
+def get_psi_dict_inversion(
     data: dict = dict(),
     t_AT: list = [],
     t_EQ: list = [],
@@ -44,7 +43,7 @@ def invert_psi_dict(
         "dataU": "timeseries",
     },
 ) -> list:
-    """Inverts a dictionary loaded or merged from h5-files
+    """Loads inversion results or recreates the inversion results for the given dataset.
 
     :param data: Data dictionary according to u4py standard (e.g. read from h5).
     :type data: dict
@@ -63,56 +62,29 @@ def invert_psi_dict(
     :return: The matrix of components.
     :rtype: list
     """
-    if (overwrite or not os.path.exists(save_path)) and data:
-        prepared_data = u4invert.reformat_dict(data, data_mapping=data_mapping)
-        (
-            ori_matrix,
-            prepared_data,
-            t_1,
-            parameters_list,
-        ) = u4invert.invert_time_series(
-            prepared_data,
-            t_AT=t_AT,
-            t_EQ=t_EQ,
-            t_EX=t_EX,
-            num_coeffs=num_coeffs,
-        )
-        ind = u4invert.remove_outliers(
-            prepared_data["ori_dhat_data"], threshold=2
-        )
-        (
-            matrix,
-            prepared_data,
-            t_2,
-            parameters_list,
-        ) = u4invert.invert_time_series(
-            prepared_data,
-            t_AT=t_AT,
-            t_EQ=t_EQ,
-            t_EX=t_EX,
-            num_coeffs=num_coeffs,
-            ind=ind,
+    if save_path and data:
+        if (overwrite or not os.path.exists(save_path)) and data:
+            prepared_data = invert_psi_dict(
+                data, data_mapping, t_AT, t_EQ, t_EX, num_coeffs
+            )
+            try:
+                os.remove(save_path)
+            except FileNotFoundError:
+                logging.info("No inversion results found. Creating new file.")
+            with open(save_path, "wb") as pkl_file:
+                pickle.dump(prepared_data, pkl_file)
+
+        elif os.path.exists(save_path):
+            _, fname = os.path.split(save_path)
+            logging.info(f"Loading from save file: {fname}")
+            with open(save_path, "rb") as pkl_file:
+                prepared_data = pickle.load(pkl_file)
+        else:
+            FileNotFoundError("No Inversion data found")
+    elif data:
+        prepared_data = invert_psi_dict(
+            data, data_mapping, t_AT, t_EQ, t_EX, num_coeffs
         )
-        prepared_data["ori_inversion_results"] = ori_matrix
-        prepared_data["inversion_results"] = matrix
-        prepared_data["ori_dhat_data"]["t"] = t_1
-        prepared_data["dhat_data"]["t"] = t_2
-        prepared_data["parameters_list"] = parameters_list
-
-        try:
-            os.remove(save_path)
-        except FileNotFoundError:
-            logging.info("No inversion results found. Creating new file.")
-        with open(save_path, "wb") as pkl_file:
-            pickle.dump(prepared_data, pkl_file)
-
-    elif os.path.exists(save_path):
-        _, fname = os.path.split(save_path)
-        logging.info(f"Loading from save file: {fname}")
-        with open(save_path, "rb") as pkl_file:
-            prepared_data = pickle.load(pkl_file)
-    else:
-        FileNotFoundError("No Inversion data found")
 
     # Add other components to results dictionary (in case EW or NS data was given.)
     directions = ["U"]
@@ -127,6 +99,64 @@ def invert_psi_dict(
     return ref_data
 
 
+def invert_psi_dict(
+    data: dict,
+    data_mapping: dict,
+    t_AT: list,
+    t_EQ: list,
+    t_EX: list,
+    num_coeffs: int,
+) -> dict:
+    """_summary_
+
+    :param data: Data dictionary according to u4py standard (e.g. read from h5).
+    :type data: dict
+    :param data_mapping: Dictionary for mapping the data sets to the appropriate directions.
+    :type data_mapping: dict
+    :param t_AT: A list with times of known antenna offsets, defaults to []
+    :type t_AT: list, optional
+    :param t_EQ: A list with times of known earthquakes, defaults to []
+    :type t_EQ: list, optional
+    :param num_coeffs: The number of parameters to use for inversion, defaults to 1
+    :type num_coeffs: int, optional
+    :return: The unformatted inversion results
+    :rtype: dict
+    """
+    prepared_data = u4invert.reformat_dict(data, data_mapping=data_mapping)
+    (
+        ori_matrix,
+        prepared_data,
+        t_1,
+        parameters_list,
+    ) = u4invert.invert_time_series(
+        prepared_data,
+        t_AT=t_AT,
+        t_EQ=t_EQ,
+        t_EX=t_EX,
+        num_coeffs=num_coeffs,
+    )
+    ind = u4invert.remove_outliers(prepared_data["ori_dhat_data"], threshold=2)
+    (
+        matrix,
+        prepared_data,
+        t_2,
+        parameters_list,
+    ) = u4invert.invert_time_series(
+        prepared_data,
+        t_AT=t_AT,
+        t_EQ=t_EQ,
+        t_EX=t_EX,
+        num_coeffs=num_coeffs,
+        ind=ind,
+    )
+    prepared_data["ori_inversion_results"] = ori_matrix
+    prepared_data["inversion_results"] = matrix
+    prepared_data["ori_dhat_data"]["t"] = t_1
+    prepared_data["dhat_data"]["t"] = t_2
+    prepared_data["parameters_list"] = parameters_list
+    return prepared_data
+
+
 def inversion_map_worker(data: dict) -> Tuple[Tuple, Tuple]:
     """Worker function to map the inversion of a dictionary with parallel processing.
 
@@ -240,7 +270,7 @@ def extraction_worker(data: dict, ii: int) -> dict:
     return extract
 
 
-def get_results(
+def get_results_gpkg_in_roi(
     name: str,
     dataset: str,
     roi: gp.GeoDataFrame,
@@ -248,7 +278,7 @@ def get_results(
     direction_paths: list[Tuple[os.PathLike, str]],
     overwrite: bool,
     min_psi: int = 0,
-):
+) -> dict:
     """Loads the results of a inversion with two directions.
 
     :param name: The name of the region of interest (for saving the intermediate results.)
@@ -271,9 +301,6 @@ def get_results(
     else:
         gpkg_crs = ""
     psi_save_path = os.path.join(processing_path, f"{dataset}_{name}.pkl")
-    inv_save_path = os.path.join(
-        processing_path, f"{dataset}_{name}_invres.pkl"
-    )
     roi_gdf = gp.GeoDataFrame(geometry=[roi], crs="EPSG:32632")
     results = []
     if not os.path.exists(psi_save_path) or overwrite:
@@ -297,7 +324,7 @@ def get_results(
             ):
                 data_v["timeseries_ew"] = data_ew["timeseries"]
                 logging.info("Inverting both components")
-                results = invert_psi_dict(
+                results = get_psi_dict_inversion(
                     data_v,
                     save_path=psi_save_path,
                     data_mapping={
@@ -308,7 +335,7 @@ def get_results(
                     overwrite=overwrite,
                 )
     else:
-        results = invert_psi_dict(
+        results = get_psi_dict_inversion(
             save_path=psi_save_path,
             data_mapping={
                 "dataE": "timeseries_ew",
@@ -319,6 +346,10 @@ def get_results(
     return results
 
 
+def parallel_get_results_gpkg_in_roi(args: Iterable) -> dict:
+    return get_results_gpkg_in_roi(*args)
+
+
 def extract_profile(
     name: str,
     dataset: str,
@@ -326,7 +357,7 @@ def extract_profile(
     processing_path: os.PathLike,
     direction_paths: list[Tuple[os.PathLike, str]],
     overwrite: bool = False,
-):
+) -> dict:
     """Extracts values along a profile in the roi.
 
     :param name: The name of the region of interest (for saving the intermediate results.)
@@ -395,3 +426,53 @@ def extract_profile(
         with open(psi_save_path, "rb") as pklfile:
             results = pickle.load(pklfile)
     return results
+
+
+def get_results_gpkg_single(
+    dataset: str,
+    pid: str,
+    direction_paths: list[Tuple[os.PathLike, str]],
+) -> dict:
+    """Loads the results of a inversion with two directions.
+
+    :param dataset: The name of the dataset (e.g. "BBD", "EGMS_1", "EGMS_2)
+    :type dataset: str
+    :param pid: The unique identifier of the scatterer.
+    :type pid: str
+    :param direction_paths: The path to the psi data and the table name as a list of (path, table_name) Tuples
+    :type direction_paths: List[Tuple[os.PathLike, str]]
+    """
+    if "EGMS" in dataset:
+        pid_name = "pid"
+    else:
+        pid_name = "ID"
+
+    logging.info("Getting vertical results")
+    data_v = u4gpkg.load_gpkg_data_where(
+        direction_paths[0][0],
+        direction_paths[0][1],
+        where=f"{pid_name}='{pid}'",
+    )
+    logging.info("Getting E-W results")
+    data_ew = u4gpkg.load_gpkg_data_where(
+        direction_paths[1][0],
+        direction_paths[1][1],
+        where=f"{pid_name}='{pid}'",
+    )
+    if data_v and data_ew:
+        data_v["timeseries_ew"] = data_ew["timeseries"]
+        logging.info("Inverting both components")
+        results = get_psi_dict_inversion(
+            data_v,
+            data_mapping={
+                "dataE": "timeseries_ew",
+                "dataN": "timeseries",
+                "dataU": "timeseries",
+            },
+        )
+    return results
+
+
+def parallel_get_results_gpkg_single(args: Iterable) -> dict:
+    """Parallel wrapper for `get_results_gpkg_single`"""
+    return get_results_gpkg_single(*args)
diff --git a/u4py/io/gpkg.py b/u4py/io/gpkg.py
index 5f2a90ce02b420d24b7e40a3aabfede7bcd5201f..cbaa5ff649dff8d23ac386d9edd542b261627cda 100644
--- a/u4py/io/gpkg.py
+++ b/u4py/io/gpkg.py
@@ -244,7 +244,7 @@ def load_gpkg_data_region(
     :type gpkg_crs: str, optional
     :param region_crs: The CRS of the region used for clipping, defaults to "EPSG:32632". Only has an effect if the region is not a GeoDataFrame.
     :type region_crs: str, optional
-    :return: The points from `psi_file_path` in a `radius` round `point`.
+    :return: The points from `psi_file_path` in the `region`.
     :rtype: dict
     """
     if not table:
@@ -288,6 +288,35 @@ def load_gpkg_data_region(
     return clipped_data
 
 
+def load_gpkg_data_where(
+    gpkg_file_path: os.PathLike, table: str = "", where: str = ""
+) -> dict:
+    """Selects data from a gpkg file where the given SQL statement is true.
+
+    :param gpkg_file_path: The path to the gpkg file.
+    :type gpkg_file_path: os.PathLike
+    :param table: The table where to look for data, defaults to ""
+    :type table: str, optional
+    :param where: The SQL query for filtering, defaults to ""
+    :type where: str, optional
+    :return: The points from `psi_file_path`.
+    :rtype: dict
+    """
+    if not table:
+        tables = u4sql.get_table_names(gpkg_file_path)
+        if len(tables) < 1:
+            raise ValueError("File does not contain any tables")
+        elif len(tables) == 1:
+            table = tables[0]
+        else:
+            raise ValueError(
+                f"File contains several tables, please specify appropriate table from: {tables}"
+            )
+
+    data = u4sql.table_to_dict(gpkg_file_path, table, where=where)
+    return data
+
+
 def load_gpkg_data_region_ogr(
     region: gp.GeoDataFrame | gp.GeoSeries | shapely.Polygon,
     gpkg_file_path: os.PathLike,
diff --git a/u4py/io/sql.py b/u4py/io/sql.py
index f24a8901f80d7985e6e4b14f2678455d890a91b0..8fe914d2876f265bbf6b578c868876f03f634039 100644
--- a/u4py/io/sql.py
+++ b/u4py/io/sql.py
@@ -143,6 +143,7 @@ def table_to_dict(
     bounds: Tuple = (),
     get_timeseries: bool = True,
     recalculate_stats: bool = False,
+    where: str = "",
 ) -> dict:
     """Opens the given sql database and gets all content of the given table.
 
@@ -156,6 +157,8 @@ def table_to_dict(
     :type get_timeseries: bool
     :param recalculate_stats: Whether to recalculate the mean and variance for the timeseries.
     :type recalculate_stats: bool
+    :param where: Additional where statements for the extraction
+    :type where: str
     :return: The content of the table.
     :rtype: dict
 
@@ -172,7 +175,6 @@ def table_to_dict(
     con = sqlite3.connect(file_path)
     cur = con.cursor()
     info = read_info(cur, table)
-    info["num_points"] = num_points
 
     # Getting column names for coordinates
     # (BBD: X,Y,Z; EGMS: easting, northing, height)
@@ -187,15 +189,16 @@ def table_to_dict(
         z_str = "height"
 
     if len(bounds) > 0:
+        where_bounds = ""
         if isinstance(bounds, tuple) or isinstance(bounds, list):
-            where = (
+            where_bounds = (
                 f"{x_str} > {bounds[0]} AND "
                 + f"{x_str} < {bounds[2]} AND "
                 + f"{y_str} > {bounds[1]} AND "
                 + f"{y_str} < {bounds[3]} "
             )
         elif isinstance(bounds, gp.pd.DataFrame):
-            where = (
+            where_bounds = (
                 f"{x_str} > {bounds.minx.values[0]} AND "
                 + f"{x_str} < {bounds.maxx.values[0]} AND "
                 + f"{y_str} > {bounds.miny.values[0]} AND "
@@ -203,8 +206,10 @@ def table_to_dict(
             )
         else:
             TypeError("Bounds of invalid type.")
-    else:
-        where = ""
+        if where_bounds and where:
+            where += " AND " + where_bounds
+        elif where_bounds:
+            where = where_bounds
 
     logging.info("Querying coordinates and keys")
     xx, yy, zz, ps_id = multi_col_select(
@@ -225,7 +230,6 @@ def table_to_dict(
             cur, [mv_key, var_mv_key], table, where
         )
     con.close()
-
     if info["has_time"] and get_timeseries:
         output = {
             "x": np.array(xx),
@@ -236,7 +240,7 @@ def table_to_dict(
             "timeseries": timeseries,
             "mean_vel": mean_vel,
             "var_mean_vel": var_mean_vel,
-            "num_points": info["num_points"],
+            "num_points": len(ps_id),
         }
     else:
         output = {
@@ -246,7 +250,7 @@ def table_to_dict(
             "ps_id": np.array(ps_id),
             "mean_vel": np.array(mean_vel),
             "var_mean_vel": np.array(var_mean_vel),
-            "num_points": info["num_points"],
+            "num_points": len(ps_id),
         }
 
     return output
@@ -652,9 +656,18 @@ def gen_queries_psi_gpkg(
     cur = con.cursor()
     info = read_info(cur, direction)
     # Get Coordinates
-    xx = np.array(select(cur, "X", direction))
-    yy = np.array(select(cur, "Y", direction))
-    zz = np.array(select(cur, "Z", direction))
+    try:
+        xx = np.array(select(cur, "X", direction))
+    except sqlite3.OperationalError as _:
+        xx = np.array(select(cur, "easting", direction))
+    try:
+        yy = np.array(select(cur, "Y", direction))
+    except sqlite3.OperationalError as _:
+        yy = np.array(select(cur, "northing", direction))
+    try:
+        zz = np.array(select(cur, "Z", direction))
+    except sqlite3.OperationalError as _:
+        zz = np.array(select(cur, "height", direction))
 
     if info["has_time"]:
         time, queries = gen_timeseries_queries(file_path, direction, info)
diff --git a/u4py/plotting/plots.py b/u4py/plotting/plots.py
index 2c83a321cb2b7dfc953a9282081f02641c50b6ed..818e4d326e1f9565d74e913c88ac8e53c9d71c00 100644
--- a/u4py/plotting/plots.py
+++ b/u4py/plotting/plots.py
@@ -1556,7 +1556,7 @@ def timeseries_map(
         f"grp_{row[1].group:05}_regional",
         os.path.join(psi_path, "hessen_l3_clipped.gpkg"),
     )
-    results = u4proc.invert_psi_dict(
+    results = u4proc.get_psi_dict_inversion(
         psi_local,
         save_path=os.path.join(psi_data_path, f"grp_{row[1].group:05}.pkl"),
     )
diff --git a/u4py/scripts/data_analysis/Batch_Invert_GPKG.py b/u4py/scripts/data_analysis/Batch_Invert_GPKG.py
index 4124a30ce3df34985b710efc48e22a159480e835..ad98ca60a4552996439008454fc1c96c656ec476 100644
--- a/u4py/scripts/data_analysis/Batch_Invert_GPKG.py
+++ b/u4py/scripts/data_analysis/Batch_Invert_GPKG.py
@@ -2,55 +2,171 @@
 Inverts all timeseries in a GPKG file and outputs the results as a pkl file.
 """
 
+import configparser
+import logging
 import os
 import pickle
+from pathlib import Path
+
+import geopandas as gp
+import numpy as np
+from tqdm import tqdm
 
 import u4py.analysis.processing as u4proc
+import u4py.analysis.spatial as u4spatial
 import u4py.io.sql as u4sql
+import u4py.utils.cmd_args as u4args
+
+# import u4py.utils.config as u4config
 import u4py.utils.projects as u4proj
 
+# u4config.start_logger()
+
 
 def main():
-    overwrite_intermediate = False
-    overwrite_results = False
+    args = u4args.load()
+    if args.input:
+        proj_path = args.input
+    else:
+        proj_path = Path(
+            "~/Documents/umwelt4/BatchInvert_EGMS_2015-2021.u4project"
+        ).expanduser()
+    overwrite = args.overwrite
+    overwrite = True
+
     project = u4proj.get_project(
-        required=["base_path", "psi_path", "output_path"]
+        required=[
+            "base_path",
+            "psi_path",
+            "psivert_path",
+            "psiew_path",
+            "output_path",
+        ],
+        proj_path=proj_path,
     )
+    merged_data = merge_data(project)
+    # # All points
+    # if not os.path.exists(project["paths"]["output_path"]) or overwrite:
+    #     extracts = u4proc.get_extracts(merged_data)
+    #     invert_extracts(extracts, project["paths"]["output_path"])
 
-    # Set Paths
-    input_file = os.path.join(
-        project["paths"]["psi_path"], "hessen_l3_clipped.gpkg"
-    )
-    intermediate_file = os.path.join(
-        project["paths"]["output_path"], "extracted_hessen_l3_data.pkl"
-    )
-    output_file = os.path.join(
-        project["paths"]["output_path"], "all_inversion_results.pkl"
+    # Gridded points
+    logging.info("Starting gridding analysis")
+    raster_sizes = [250, 1000, 2500]
+    bounds = u4sql.get_bounds(project["paths"]["psivert_path"])
+    crs = u4sql.get_crs(project["paths"]["psivert_path"])
+    bounds = (
+        gp.GeoDataFrame(
+            geometry=[u4spatial.bounds_to_polygon(bounds[0])], crs=crs[0]
+        )
+        .to_crs("EPSG:3035")
+        .bounds
     )
 
-    # Loads data from intermediate storage
-    if os.path.exists(intermediate_file) and not overwrite_intermediate:
-        with open(intermediate_file, "rb") as pkl_file:
-            data = pickle.load(pkl_file)
-    else:
-        data = u4sql.load_tables(input_file)
-        with open(intermediate_file, "wb") as pkl_file:
-            pickle.dump(data, pkl_file)
+    for raster_size in raster_sizes:
+        output_path = os.path.join(
+            project["paths"]["psi_path"] + f"_{raster_size}m_inv_results.pkl"
+        )
+        if not os.path.exists(output_path) or overwrite:
+            logging.info(f"Subdivision size {raster_size}")
 
-    # Start processing
-    if not os.path.exists(output_file) or overwrite_results:
-        extracts = u4proc.get_extracts(data)
+            # Get x and y extend for rastering
+            minx = np.floor(bounds.minx[0] / raster_size) * raster_size
+            maxx = np.ceil(bounds.maxx[0] / raster_size) * raster_size
+            x_rng = np.arange(minx, maxx, raster_size)
+            miny = np.floor(bounds.miny[0] / raster_size) * raster_size
+            maxy = np.ceil(bounds.maxy[0] / raster_size) * raster_size
+            y_rng = np.arange(miny, maxy, raster_size)
 
-        # Parallel processing
-        results = u4proc.batch_mapping(
-            extracts, u4proc.inversion_map_worker, "Inverting Extracts"
-        )
+            # Get indices where to put data for each point
+            ii_x = [
+                np.argwhere(x_rng >= x).squeeze()[0]
+                for x in tqdm(
+                    merged_data["vertikal"]["x"],
+                    desc="Getting x indices",
+                    leave=False,
+                )
+            ]
+            ii_y = [
+                np.argwhere(y_rng >= y).squeeze()[0]
+                for y in tqdm(
+                    merged_data["vertikal"]["y"],
+                    desc="Getting y indices",
+                    leave=False,
+                )
+            ]
+            indices = np.array([(ix, iy) for ix, iy in zip(ii_x, ii_y)])
+            uqidc = np.unique(indices, axis=0)
+            extracts = [
+                {
+                    "x": x_rng[ii_x[ii]],
+                    "y": y_rng[ii_y[ii]],
+                    "time": merged_data["vertikal"]["time"],
+                    "ps_id": [],
+                    "timeseries_v": [],
+                    "timeseries_ew": [],
+                }
+                for ii in tqdm(
+                    range(len(ii_x)),
+                    desc="Preallocating extracts",
+                    leave=False,
+                )
+            ]
+            for ii, (tsv, tsew, psid) in tqdm(
+                enumerate(
+                    zip(
+                        merged_data["vertikal"]["timeseries"],
+                        merged_data["Ost_West"]["timeseries"],
+                        merged_data["vertikal"]["ps_id"],
+                    )
+                ),
+                desc="Getting extracts",
+                leave=False,
+                total=len(merged_data["vertikal"]["x"]),
+            ):
+                uqiis = np.nonzero(np.all(uqidc == indices[ii], axis=1))[0]
+                if len(uqiis) > 1:
+                    logging.info("Error, to many indices found?")
+                else:
+                    extracts[uqiis[0]]["timeseries_v"].append(tsv)
+                    extracts[uqiis[0]]["timeseries_ew"].append(tsew)
+                    extracts[uqiis[0]]["ps_id"].append(psid)
+            invert_extracts(extracts, output_path)
+
+
+def merge_data(project: configparser.ConfigParser) -> dict:
+    # Set Paths
+    table_v = os.path.splitext(
+        os.path.split(project["paths"]["psivert_path"])[-1]
+    )[0]
+    table_ew = os.path.splitext(
+        os.path.split(project["paths"]["psiew_path"])[-1]
+    )[0]
+    # Load Data
+    logging.info("Loading vertical data")
+    data_v = u4sql.table_to_dict(project["paths"]["psivert_path"], table_v)
+    logging.info("Loading east-west data")
+    data_ew = u4sql.table_to_dict(project["paths"]["psiew_path"], table_ew)
 
-        # Single processing
-        # results = [u4proc.inversion_map_worker(ext) for ext in tqdm(extracts)]
+    logging.info("Creating sort indices")
+    sidx_v = np.argsort(data_v["ps_id"])
+    sidx_ew = np.argsort(data_ew["ps_id"])
+    logging.info("Sorting data")
+    to_sort = ["x", "y", "z", "ps_id", "timeseries"]
+    for kk in to_sort:
+        data_v[kk] = data_v[kk][sidx_v]
+        data_ew[kk] = data_ew[kk][sidx_ew]
+
+    return {"vertikal": data_v, "Ost_West": data_ew}
+
+
+def invert_extracts(extracts: list[dict], output_path: os.PathLike):
+    results = u4proc.batch_mapping(
+        extracts, u4proc.inversion_map_worker, "Inverting Data"
+    )
 
-        with open(output_file, "wb") as pkl_file:
-            pickle.dump(results, pkl_file)
+    with open(output_path, "wb") as pkl_file:
+        pickle.dump(results, pkl_file)
 
 
 if __name__ == "__main__":
diff --git a/u4py/scripts/data_analysis/Convert_Inversion_Results_TIFF.py b/u4py/scripts/data_analysis/Convert_Inversion_Results_TIFF.py
index c718f28f89b22b1c1810736f9cc2a92a2eb5712c..d91bac255b5a957eef8e5c69d422562859002cad 100644
--- a/u4py/scripts/data_analysis/Convert_Inversion_Results_TIFF.py
+++ b/u4py/scripts/data_analysis/Convert_Inversion_Results_TIFF.py
@@ -36,7 +36,9 @@ def main():
             "output_path",
         ],
         interactive=False,
+        proj_path="/home/rudolf/Documents/umwelt4/Pkl_to_Tiff_BBD2023.u4project",
     )
+    chunk_size = 50
 
     logging.info("Setting up paths")
     is_normal = False
@@ -53,12 +55,12 @@ def main():
             project["paths"]["results_path"]
         )
         converted_data, chunk_size = u4plotprep.convert_results_for_grid(
-            data[0], chunk_size=50
+            data[0], chunk_size=chunk_size
         )
     else:
         data = u4files.get_all_pickle_data(project["paths"]["results_path"])
         converted_data, chunk_size = u4plotprep.convert_results_for_grid(
-            data[1], chunk_size=50
+            data[1], chunk_size=chunk_size
         )
 
     logging.info("Creating numpy arrays from data")
diff --git a/u4py/scripts/examples/correlation_external_data/EQ_Timeseries.py b/u4py/scripts/examples/correlation_external_data/EQ_Timeseries.py
index f510575caeae2005a23d1e08202b7bc79c0ee82d..dc11f419c1adb8ce6d28d27d02fccf2e09e859ab 100644
--- a/u4py/scripts/examples/correlation_external_data/EQ_Timeseries.py
+++ b/u4py/scripts/examples/correlation_external_data/EQ_Timeseries.py
@@ -65,7 +65,7 @@ def main():
         inversion_path = os.path.join(
             project["paths"]["processing_path"], f"{eqid}_{eqloc}_{eqtime}.pkl"
         )
-        results = u4proc.invert_psi_dict(data, save_path=inversion_path)
+        results = u4proc.get_psi_dict_inversion(data, save_path=inversion_path)
         u4ax.plot_timeseries_fit(ax=axes[ii + 1], results=results)
         axes[ii + 1].annotate(
             f"$M_L${eqmag}, {eqloc} ({eqdep} km)",
diff --git a/u4py/scripts/examples/regional_case_studies/Cross_Correlation_Crumstadt.py b/u4py/scripts/examples/regional_case_studies/Cross_Correlation_Crumstadt.py
index 4514d99a5563f45eed0528ba7326e6336e87abe2..6ce9423066d1e2eb72e8669d46be7e469dd86f09 100644
--- a/u4py/scripts/examples/regional_case_studies/Cross_Correlation_Crumstadt.py
+++ b/u4py/scripts/examples/regional_case_studies/Cross_Correlation_Crumstadt.py
@@ -80,7 +80,7 @@ def main():
         psi_path,
         overwrite=overwrite,
     )
-    res_well = u4proc.invert_psi_dict(
+    res_well = u4proc.get_psi_dict_inversion(
         data_well,
         save_path=os.path.join(
             project["paths"]["processing_path"], "GasStorage.pkl"
@@ -97,7 +97,7 @@ def main():
     data_crum, region_osm = u4psi.get_osm_data(
         query, psi_path, overwrite=overwrite
     )
-    res_crum = u4proc.invert_psi_dict(
+    res_crum = u4proc.get_psi_dict_inversion(
         data_crum,
         save_path=os.path.join(
             project["paths"]["processing_path"], "Crumstadt.pkl"
@@ -133,7 +133,7 @@ def main():
     temp_ref = u4invert.reformat_simple_timeseries(
         np.array(temp_data["time"]), y_mean
     )
-    res_temp = u4proc.invert_psi_dict(
+    res_temp = u4proc.get_psi_dict_inversion(
         temp_ref,
         save_path=os.path.join(
             project["paths"]["processing_path"], "Temperature.pkl"
@@ -142,7 +142,7 @@ def main():
     )
 
     gas_ref = u4invert.reformat_simple_timeseries(date_gas, level_gas)
-    res_gas = u4proc.invert_psi_dict(
+    res_gas = u4proc.get_psi_dict_inversion(
         gas_ref,
         save_path=os.path.join(
             project["paths"]["processing_path"], "Gas_Reform.pkl"
diff --git a/u4py/scripts/examples/regional_case_studies/Crumstadt_Seasonal.py b/u4py/scripts/examples/regional_case_studies/Crumstadt_Seasonal.py
index 48645dfd67774bdc51b33446406d8fd7d267de75..2ca9d04a253b39c6a6f02d81331bce0ddd448123 100644
--- a/u4py/scripts/examples/regional_case_studies/Crumstadt_Seasonal.py
+++ b/u4py/scripts/examples/regional_case_studies/Crumstadt_Seasonal.py
@@ -209,7 +209,7 @@ def add_timeseries(
     :param inversion_path: The path to the inversion data file for this region, defaults to ""
     :type inversion_path: os.PathLike, optional
     """
-    results = u4proc.invert_psi_dict(
+    results = u4proc.get_psi_dict_inversion(
         data, save_path=inversion_path, overwrite=overwrite
     )
     u4plotprep.print_inversion_results_for_publications(results)
diff --git a/u4py/scripts/examples/regional_case_studies/Gas_Storage_Analysis.py b/u4py/scripts/examples/regional_case_studies/Gas_Storage_Analysis.py
index ff97f6b176496521eef7849377dd2a8576ebf747..77d9570ba1ec8038d084cb53fad9eab9f3df4b8d 100644
--- a/u4py/scripts/examples/regional_case_studies/Gas_Storage_Analysis.py
+++ b/u4py/scripts/examples/regional_case_studies/Gas_Storage_Analysis.py
@@ -1,4 +1,4 @@
-""" Shows possible correlation of surface motion with gas storage activity """
+"""Shows possible correlation of surface motion with gas storage activity"""
 
 import os
 
@@ -38,7 +38,7 @@ def main():
     data_well = u4psi.get_point_data(
         (464350, 5516100), 500, "OilWell", gpkg_path
     )
-    results = u4proc.invert_psi_dict(
+    results = u4proc.get_psi_dict_inversion(
         data_well,
         save_path=os.path.join(
             project["paths"]["processing_path"], "GasStorage.pkl"
diff --git a/u4py/scripts/examples/regional_case_studies/JURSE_FFM.py b/u4py/scripts/examples/regional_case_studies/JURSE_FFM.py
index bbcd81f4a39991a1a55ec620057ab83c9287657d..14cece4083387bce21aefd96d4807a39f76e2a6d 100644
--- a/u4py/scripts/examples/regional_case_studies/JURSE_FFM.py
+++ b/u4py/scripts/examples/regional_case_studies/JURSE_FFM.py
@@ -1,4 +1,4 @@
-""" Impact of construction activity on PSI motion in Frankfurt a.M."""
+"""Impact of construction activity on PSI motion in Frankfurt a.M."""
 
 import os
 import warnings
@@ -91,7 +91,7 @@ def main():
 
         # Timeseries Fit and Residuals
         t_EX = ext_times[sel_name]
-        results = u4proc.invert_psi_dict(
+        results = u4proc.get_psi_dict_inversion(
             sel_data,
             save_path=os.path.join(
                 project["paths"]["processing_path"],
diff --git a/u4py/scripts/examples/regional_case_studies/NeoTect_URG_PSI.py b/u4py/scripts/examples/regional_case_studies/NeoTect_URG_PSI.py
index 619f00ac2289338dd7fd4b96a69edb4313d63de2..fdbb3de499f35acd5f9299d4017ff046fc1575da 100644
--- a/u4py/scripts/examples/regional_case_studies/NeoTect_URG_PSI.py
+++ b/u4py/scripts/examples/regional_case_studies/NeoTect_URG_PSI.py
@@ -70,7 +70,7 @@ def main():
         fig, axes = plt.subplots(
             figsize=(9, 5), nrows=2, sharex=True, sharey=True
         )
-        results = u4proc.get_results(
+        results = u4proc.get_results_gpkg_in_roi(
             name,
             "BBD",
             roi,
@@ -81,7 +81,7 @@ def main():
         if results:
             make_plot(axes, results, color="C0")
             results_log.extend(fit_results("BBD", results))
-        results = u4proc.get_results(
+        results = u4proc.get_results_gpkg_in_roi(
             name,
             "EGMS_1",
             roi,
@@ -95,7 +95,7 @@ def main():
         if results:
             make_plot(axes, results, color="C1")
             results_log.extend(fit_results("EGMS_1", results))
-        results = u4proc.get_results(
+        results = u4proc.get_results_gpkg_in_roi(
             name,
             "EGMS_2",
             roi,
diff --git a/u4py/scripts/examples/regional_case_studies/U5_Frankfurt.py b/u4py/scripts/examples/regional_case_studies/U5_Frankfurt.py
index b3d60a30bbeb15821277224690ffe0025714875a..a8c150b8b71ce84aec2dc12090afd48980bcb65a 100644
--- a/u4py/scripts/examples/regional_case_studies/U5_Frankfurt.py
+++ b/u4py/scripts/examples/regional_case_studies/U5_Frankfurt.py
@@ -130,7 +130,7 @@ def main():
 
         # Timeseries Fit and Residuals
         t_EX = ext_times[sel_name]
-        results = u4proc.invert_psi_dict(
+        results = u4proc.get_psi_dict_inversion(
             sel_data,
             save_path=os.path.join(
                 project["paths"]["processing_path"],
diff --git a/u4py/scripts/gis_workflows/Plot_Inversion_Timeseries_ROIs.py b/u4py/scripts/gis_workflows/Plot_Inversion_Timeseries_ROIs.py
index 6e187b855a8f7b9bc43453b9a58a16c41c402202..6b7db7a9a37392bd90c58ccc7234dd0ccdff91d6 100644
--- a/u4py/scripts/gis_workflows/Plot_Inversion_Timeseries_ROIs.py
+++ b/u4py/scripts/gis_workflows/Plot_Inversion_Timeseries_ROIs.py
@@ -41,7 +41,9 @@ def main():
             project["paths"]["processing_path"], f"{name}.pkl"
         )
         if data:
-            results = u4proc.invert_psi_dict(data, save_path=inv_save_path)
+            results = u4proc.get_psi_dict_inversion(
+                data, save_path=inv_save_path
+            )
             fig, ax = plt.subplots(figsize=(9, 5))
             u4ax.plot_timeseries_fit(ax=ax, results=results)
             ax.set_title(name)
diff --git a/u4py/utils/cmd_args.py b/u4py/utils/cmd_args.py
index 4f0522b69ca4ce95603372d5fd6addadbf9eec06..933cbf05e5d115fae490c6aca3af8d787c1cd39e 100644
--- a/u4py/utils/cmd_args.py
+++ b/u4py/utils/cmd_args.py
@@ -38,6 +38,7 @@ def load(module_descript: str = "No description given."):
     logging.info(
         f"Set loglevel to {log_level_str(logging.getLogger().level)}."
     )
+    return args
 
 
 def setup_parser(module_descript: str) -> argparse.ArgumentParser:
@@ -56,6 +57,13 @@ def setup_parser(module_descript: str) -> argparse.ArgumentParser:
         metavar="PATH",
         default="",
     )
+    parser.add_argument(
+        "-ov",
+        "--overwrite",
+        help="Overwrite results and intermediate files.",
+        default=False,
+        type=bool,
+    )
     parser.add_argument(
         "-c",
         "--cpus",