diff --git a/pixart_ros/CMakeLists.txt b/pixart_ros/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..9a7a50f65abee751abc4e82454e18a6614fcd214
--- /dev/null
+++ b/pixart_ros/CMakeLists.txt
@@ -0,0 +1,234 @@
+cmake_minimum_required(VERSION 3.0.2)
+project(pixart_ros)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED COMPONENTS
+  roscpp
+  rospy
+  std_msgs
+  message_generation
+)
+
+add_service_files(
+  DIRECTORY 
+  srv 
+  FILES 
+  PAA5102_service.srv
+)
+
+
+generate_messages(
+  DEPENDENCIES
+  std_msgs
+)
+
+catkin_package(CATKIN_DEPENDS message_runtime)
+
+catkin_install_python(PROGRAMS 
+scripts/PAA5102_server.py 
+scripts/PAA5102_client.py
+scripts/PAA5102_publisher.py
+scripts/PAA5102_V_publisher.py
+scripts/PAA5102_H_publisher.py
+scripts/PAA5102_subscriber.py
+  DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+)
+
+
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+##   your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+##   * add a build_depend tag for "message_generation"
+##   * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
+##   * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+##     but can be declared for certainty nonetheless:
+##     * add a exec_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+##   * add "message_generation" and every package in MSG_DEP_SET to
+##     find_package(catkin REQUIRED COMPONENTS ...)
+##   * add "message_runtime" and every package in MSG_DEP_SET to
+##     catkin_package(CATKIN_DEPENDS ...)
+##   * uncomment the add_*_files sections below as needed
+##     and list every .msg/.srv/.action file to be processed
+##   * uncomment the generate_messages entry below
+##   * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+# add_message_files(
+#   FILES
+#   Message1.msg
+#   Message2.msg
+# )
+
+## Generate services in the 'srv' folder
+# add_service_files(
+#   FILES
+#   Service1.srv
+#   Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+#   FILES
+#   Action1.action
+#   Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+# generate_messages(
+#   DEPENDENCIES
+#   std_msgs
+# )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+##   * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+##   * add "dynamic_reconfigure" to
+##     find_package(catkin REQUIRED COMPONENTS ...)
+##   * uncomment the "generate_dynamic_reconfigure_options" section below
+##     and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+#   cfg/DynReconf1.cfg
+#   cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+#  INCLUDE_DIRS include
+#  LIBRARIES orientation
+#  CATKIN_DEPENDS roscpp rospy std_msgs
+#  DEPENDS system_lib
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+# include
+  ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+#   src/${PROJECT_NAME}/orientation.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+# add_executable(${PROJECT_NAME}_node src/orientation_node.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(${PROJECT_NAME}_node
+#   ${catkin_LIBRARIES}
+# )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# catkin_install_python(PROGRAMS
+#   scripts/my_python_script
+#   DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
+# install(TARGETS ${PROJECT_NAME}_node
+#   RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark libraries for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
+# install(TARGETS ${PROJECT_NAME}
+#   ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+#   DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+#   FILES_MATCHING PATTERN "*.h"
+#   PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+#   # myfile1
+#   # myfile2
+#   DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_orientation.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+#   target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/pixart_ros/package.xml b/pixart_ros/package.xml
new file mode 100644
index 0000000000000000000000000000000000000000..d24dc3436eb83fac278cde6f6f1b2585b1eaeadf
--- /dev/null
+++ b/pixart_ros/package.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0"?>
+<package format="2">
+  <name>pixart_ros</name>
+  <version>0.1.0</version>
+  <description>Package for reading Pixart optical sensors</description>
+
+
+  <maintainer email="jan.rissom@llt.rwth-aachen.de">Jan Rissom</maintainer>
+  <license>BSD</license>
+
+
+ 
+  <buildtool_depend>catkin</buildtool_depend>
+  <build_depend>roscpp</build_depend>
+  <build_depend>rospy</build_depend>
+  <build_depend>std_msgs</build_depend>
+  <build_depend>message_generation</build_depend>
+  <build_export_depend>roscpp</build_export_depend>
+  <build_export_depend>rospy</build_export_depend>
+  <build_export_depend>std_msgs</build_export_depend>
+  <exec_depend>roscpp</exec_depend>
+  <exec_depend>rospy</exec_depend>
+  <exec_depend>std_msgs</exec_depend>
+  <exec_depend>message_runtime</exec_depend>
+
+add_service_files(
+  FILES
+  PAA5102_service.srv
+)
+
+add_message_files(
+  FILES
+)
+
+  <!-- The export tag contains other, unspecified, tags -->
+  <export>
+    <!-- Other tools can request additional information be placed here -->
+
+  </export>
+</package>
diff --git a/pixart_ros/scripts/pixart_H_publisher.py b/pixart_ros/scripts/pixart_H_publisher.py
new file mode 100644
index 0000000000000000000000000000000000000000..7cb6af9ce39fae3149bcafcc64a9ee1143e1b385
--- /dev/null
+++ b/pixart_ros/scripts/pixart_H_publisher.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python3
+import signal
+import sys
+import time
+import numpy
+import rospy
+import serial
+#from std_msgs.msg import Float64MultiArray
+from sensorfusion.msg import Position
+from typing import Type
+
+values_prior = [0,0]
+#values = Float64MultiArray()
+values_float = [0,0]
+fail = 0
+
+sensor_adr = "/dev/ttyACM0"
+baud_rate = 115200
+
+
+def sensor_initialization() -> None:
+    """Initialize sensor."""
+    global sensor
+    sensor = serial.Serial(sensor_adr, baud_rate, timeout=1)  # open serial port
+    rospy.sleep(1)
+    #time.sleep(1)
+    print(sensor.readline())
+    sensor.write(b"pxi\n")
+    print(sensor.readline())
+    sensor.write(b"ver\n")
+    print(sensor.readline())
+
+    sensor.write(b"xy2uart_on\n")  # activate sensor output
+
+    # Read a couple of lines to clear output
+    #time.sleep(1)
+    rospy.sleep(1)
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print("Sensor initialization done!")
+
+
+def sensor_readout() -> None:
+    """Read sensor data and publish to topic 'pixart_H_values'"""
+    #global values
+    global values_prior
+    global values_float
+    global sensor
+    
+    #rate = rospy.Rate(120)  # 100hz
+    while not rospy.is_shutdown():
+        line = sensor.readline().strip().decode()
+        values_string = numpy.array(line.split(";"))
+        msg.x = round(float(values_string[0]) / -11.803, 4)
+        msg.y = round(float(values_string[1]) / 11.966, 4)
+        msg.x_var = 0.0 #calculated by kalman filter
+        msg.y_var = 0.0 #calculated by kalman filter 
+        msg.header.stamp = rospy.Time.now()
+        msg.header.frame_id = "/pixart_H_frame";       
+        pub.publish(msg)
+        #rate.sleep()
+        
+
+
+def exit() -> None:
+   
+    global sensor
+
+    sensor.close()
+    #print("Sensor closed")
+    sys.exit(0)
+
+
+if __name__ == "__main__":
+    rospy.init_node("pixart_H_pub", anonymous=True)
+    pub = rospy.Publisher("pixart_H_pos_cov", Position, queue_size=1)
+    msg = Position()
+    
+    signal.signal(signal.SIGINT, exit)
+    rospy.on_shutdown(exit)
+    sensor_initialization()
+    try:
+        sensor_readout()
+    except rospy.ROSInterruptException:
+        pass
+    
diff --git a/pixart_ros/scripts/pixart_V_publisher.py b/pixart_ros/scripts/pixart_V_publisher.py
new file mode 100644
index 0000000000000000000000000000000000000000..85ffcf99400b0e176335abbce196401bdc765b87
--- /dev/null
+++ b/pixart_ros/scripts/pixart_V_publisher.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python3
+import signal
+import sys
+import time
+import numpy
+import rospy
+import serial
+#from std_msgs.msg import Float64MultiArray
+from sensorfusion.msg import Position
+from typing import Type
+
+values_prior = [0,0]
+#values = Float64MultiArray()
+values_float = [0,0]
+fail = 0
+
+sensor_adr = "/dev/ttyACM1"
+baud_rate = 115200
+
+
+def sensor_initialization() -> None:
+    """Initialize sensor."""
+    global sensor
+    sensor = serial.Serial(sensor_adr, baud_rate, timeout=1)  # open serial port
+    #time.sleep(1)
+    rospy.sleep(1)
+    print(sensor.readline())
+    sensor.write(b"pxi\n")
+    print(sensor.readline())
+    sensor.write(b"ver\n")
+    print(sensor.readline())
+
+    sensor.write(b"xy2uart_on\n")  # activate sensor output
+
+    # Read a couple of lines to clear output
+    #time.sleep(1)
+    rospy.sleep(1)
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print("Sensor initialization done!")
+
+
+def sensor_readout() -> None:
+    """Read sensor data and publish to topic 'pixart_V_values'"""
+    #global values
+    global values_prior
+    global values_float
+    global sensor
+    
+    #rate = rospy.Rate(120)  # 100hz
+    while not rospy.is_shutdown():
+        line = sensor.readline().strip().decode()
+        values_string = numpy.array(line.split(";"))
+        msg.x = round(float(values_string[0]) / 11.803, 4)
+        msg.y = round(float(values_string[1]) / -11.966, 4)
+        msg.x_var = 0.0 #will be calculated in kalman filter
+        msg.y_var = 0.0 #will be calculated in kalman filter
+        msg.header.stamp = rospy.Time.now()
+        msg.header.frame_id = "/pixart_V_frame";       
+        pub.publish(msg)
+        #rate.sleep()
+        
+
+
+def exit() -> None:
+   
+    global sensor
+
+    sensor.close()
+    #print("Sensor closed")
+    sys.exit(0)
+
+
+if __name__ == "__main__":
+    rospy.init_node("pixart_V_pub", anonymous=True)
+    pub = rospy.Publisher("pixart_V_pos_cov", Position, queue_size=1)
+    msg = Position()
+    
+    signal.signal(signal.SIGINT, exit)
+    rospy.on_shutdown(exit)
+    sensor_initialization()
+    try:
+        sensor_readout()
+    except rospy.ROSInterruptException:
+        pass
+    
diff --git a/pixart_ros/scripts/test/PAA5102_client.py b/pixart_ros/scripts/test/PAA5102_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..6a0a33c43cb386430ef86c54e5f58558d2461c81
--- /dev/null
+++ b/pixart_ros/scripts/test/PAA5102_client.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+from typing import Type
+
+import rospy
+from pixart_ros.srv import PAA5102_service, PAA5102_serviceResponse
+
+
+def start_server() -> None:
+    """Start client and wait for service."""
+    rospy.wait_for_service("sensor_readout")
+
+
+def get_values(call: Type[PAA5102_service]) -> Type[PAA5102_serviceResponse]:
+    """Call service and get values.
+
+    Args:
+        call (Type[PAA5102_service]): Service call
+
+    Returns:
+        Type[PAA5102_serviceResponse]: Sensor values
+    """
+    try:
+        srv = rospy.ServiceProxy("sensor_readout", PAA5102_service)
+        resp1 = srv(call)
+        return resp1.values
+    except rospy.ServiceException as e:
+        print("Service call failed: %s" % e)
+
+
+if __name__ == "__main__":
+    call = 1
+    start_server()
+    while 1:
+        print(get_values(call))
diff --git a/pixart_ros/scripts/test/PAA5102_publisher.py b/pixart_ros/scripts/test/PAA5102_publisher.py
new file mode 100644
index 0000000000000000000000000000000000000000..016991790bd72f9cb424bb00c0d06abe52fcf76b
--- /dev/null
+++ b/pixart_ros/scripts/test/PAA5102_publisher.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+import signal
+import sys
+import time
+import numpy
+import rospy
+import serial
+from std_msgs.msg import Float64MultiArray
+from typing import Type
+
+
+values = Float64MultiArray()
+values_float = [0,0]
+fail = 0
+
+sensor_adr = "/dev/ttyACM0"
+baud_rate = 115200
+
+
+def sensor_initialization() -> None:
+    """Initialize sensor."""
+    global sensor
+    sensor = serial.Serial(sensor_adr, baud_rate, timeout=1)  # open serial port
+    time.sleep(1)
+    print(sensor.readline())
+    sensor.write(b"pxi\n")
+    print(sensor.readline())
+    sensor.write(b"ver\n")
+    print(sensor.readline())
+
+    sensor.write(b"xy2uart_on\n")  # activate sensor output
+
+    # Read a couple of lines to clear output
+    time.sleep(1)
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print("Sensor initialization done!")
+
+
+def sensor_readout() -> None:
+    """Read sensor data and publish to topic 'PAA5102_values'"""
+    global values
+    global values_float
+    global sensor
+    pub = rospy.Publisher("PAA5102_values", Float64MultiArray, queue_size=1)
+    rospy.init_node("PAA5102_pub", anonymous=True)
+    #rate = rospy.Rate(100)  # 100hz
+    while not rospy.is_shutdown():
+        line = sensor.readline().strip().decode()
+        values_string = numpy.array(line.split(";"))
+        try:
+            values_float[0] = float(values_string[0])
+            values_float[1] = float(values_string[1])
+            values.data = values_float
+            print(values.data)
+            pub.publish(values)
+            #rate.sleep()
+
+        except:
+            print("Line reading error!")
+            values.data = [0,0]
+            pub.publish(values)
+            #rate.sleep()
+
+
+def exit(signal, frame) -> None:
+    """Exit function that makes sure to close serial port.
+
+    Args:
+        signal (any): dummy
+        frame (any): dummy
+    """
+    sensor.close()
+    print("Sensor closed")
+    sys.exit(0)
+
+
+if __name__ == "__main__":
+    signal.signal(signal.SIGINT, exit)
+    sensor_initialization()
+    try:
+        sensor_readout()
+    except rospy.ROSInterruptException:
+        pass
diff --git a/pixart_ros/scripts/test/PAA5102_server.py b/pixart_ros/scripts/test/PAA5102_server.py
new file mode 100644
index 0000000000000000000000000000000000000000..aa0af7aead690939c10d88bafe0cc034e74fdb61
--- /dev/null
+++ b/pixart_ros/scripts/test/PAA5102_server.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+import signal
+import sys
+import time
+from typing import Type
+
+import numpy
+import rospy
+import serial
+from pixart_ros.srv import PAA5102_service, PAA5102_serviceResponse
+
+vector = [0, 0]
+fail = [0, 0]
+
+
+sensor_adr = "/dev/ttyACM0"     #adjust adress to match serial port
+baud_rate = 115200
+
+
+def sensor_initialization() -> None:
+    """Initialize sensor."""
+    global sensor
+    sensor = serial.Serial(sensor_adr, baud_rate, timeout=1)  # open serial port
+    time.sleep(1)
+    print(sensor.readline())
+    sensor.write(b"pxi\n")
+    print(sensor.readline())
+    sensor.write(b"ver\n")
+    print(sensor.readline())
+
+    sensor.write(b"xy2uart_on\n")  # activate sensor output
+
+    # Read a couple of lines to clear output
+    time.sleep(1)
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print(sensor.readline())
+    print("Sensor initialization done!")
+
+
+def sensor_readout(req: Type[PAA5102_service]) -> Type[PAA5102_serviceResponse]:
+    """Read sensor values.
+
+    Args:
+        req (Type[PAA5102]_service):  see service PAA5102_service.srv. Object with protery call and Response values.
+
+    Returns:
+        Type[PAA5102_serviceResponse]: Values at call time.
+    """
+    global vector
+    global sensor
+    global fail
+    if req.call == 1:
+        line = sensor.readline().strip().decode()
+        vector_string = numpy.array(line.split(";"))
+        try:
+            vector[0] = float(vector_string[0])
+            vector[1] = float(vector_string[1])
+            return PAA5102_serviceResponse(vector)
+
+        except:
+            print("Line reading error!")
+            return PAA5102_serviceResponse(fail)
+
+
+def sensor_readout_server() -> None:
+    """Start up server"""
+    rospy.init_node("PAA5102_service")
+    s = rospy.Service("sensor_readout", PAA5102_service, sensor_readout)
+    rospy.spin()
+
+
+def exit(signal, frame) -> None:
+    """Exit function that makes sure to close serial port.
+
+    Args:
+        signal (any): dummy
+        frame (any): dummy
+    """
+    sensor.close()
+    print("Sensor closed")
+    sys.exit(0)
+
+
+if __name__ == "__main__":
+    signal.signal(signal.SIGINT, exit)
+    sensor_initialization()
+    sensor_readout_server()
diff --git a/pixart_ros/scripts/test/PAA5102_subscriber.py b/pixart_ros/scripts/test/PAA5102_subscriber.py
new file mode 100644
index 0000000000000000000000000000000000000000..25012dc2a3d23cad12e4a46674d319565efa12f3
--- /dev/null
+++ b/pixart_ros/scripts/test/PAA5102_subscriber.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+from typing import Type
+from std_msgs.msg import Float64MultiArray
+import rospy
+
+
+def callback(values: Type[Float64MultiArray]) ->None:
+    """Print sensor values.
+
+    Args:
+        values (Type[float]): Message from topic (PAA5102_values)
+    """
+    print(values.data)
+
+
+def listener() -> None:
+    """Initialize node and subscribe to topic 'PAA5102_values'"""
+    rospy.init_node("PAA5102_sub", anonymous=True)
+
+    rospy.Subscriber("PAA5102_values", Float64MultiArray, callback)
+
+    rospy.spin()
+
+
+if __name__ == "__main__":
+    listener()
diff --git a/pixart_ros/srv/PAA5102_service.srv b/pixart_ros/srv/PAA5102_service.srv
new file mode 100644
index 0000000000000000000000000000000000000000..2ef9104745d806b0c1c3d9f6a39317e125b04e90
--- /dev/null
+++ b/pixart_ros/srv/PAA5102_service.srv
@@ -0,0 +1,3 @@
+int8 call
+---
+float32[2] values
diff --git a/sensorfusion/CMakeLists.txt b/sensorfusion/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..7b778b6c780c72435497636f38a39092202daea9
--- /dev/null
+++ b/sensorfusion/CMakeLists.txt
@@ -0,0 +1,222 @@
+cmake_minimum_required(VERSION 3.0.2)
+project(sensorfusion)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED COMPONENTS
+  roscpp
+  rospy
+  std_msgs
+  geometry_msgs
+  sensor_msgs
+  message_generation
+)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+##   your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+##   * add a build_depend tag for "message_generation"
+##   * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
+##   * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+##     but can be declared for certainty nonetheless:
+##     * add a exec_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+##   * add "message_generation" and every package in MSG_DEP_SET to
+##     find_package(catkin REQUIRED COMPONENTS ...)
+##   * add "message_runtime" and every package in MSG_DEP_SET to
+##     catkin_package(CATKIN_DEPENDS ...)
+##   * uncomment the add_*_files sections below as needed
+##     and list every .msg/.srv/.action file to be processed
+##   * uncomment the generate_messages entry below
+##   * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+ add_message_files(
+   FILES
+   Position.msg
+   SyncPos.msg
+   Motion.msg
+ )
+
+## Generate services in the 'srv' folder
+ add_service_files(
+   FILES
+   Trajectory.srv
+   Measurements.srv
+ )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+#   FILES
+#   Action1.action
+#   Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+ generate_messages(
+   DEPENDENCIES
+   std_msgs
+   geometry_msgs
+   sensor_msgs
+ )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+##   * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+##   * add "dynamic_reconfigure" to
+##     find_package(catkin REQUIRED COMPONENTS ...)
+##   * uncomment the "generate_dynamic_reconfigure_options" section below
+##     and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+#   cfg/DynReconf1.cfg
+#   cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+#  INCLUDE_DIRS include
+#  LIBRARIES sensorfusion
+#  CATKIN_DEPENDS roscpp rospy std_msgs geometry_msgs
+#  DEPENDS system_lib
+ CATKIN_DEPENDS message_runtime
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+# include
+  ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+#   src/${PROJECT_NAME}/sensorfusion.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+# add_executable(${PROJECT_NAME}_node src/sensorfusion_node.cpp)
+
+add_executable(lidar_pos src/lidar_pos.cpp)
+
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(${PROJECT_NAME}_node
+#   ${catkin_LIBRARIES}
+# )
+
+ target_link_libraries(lidar_pos
+   ${catkin_LIBRARIES} 
+ )
+
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# catkin_install_python(PROGRAMS
+#   scripts/my_python_script
+#   DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
+# install(TARGETS ${PROJECT_NAME}_node
+#   RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark libraries for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
+# install(TARGETS ${PROJECT_NAME}
+#   ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+#   DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+#   FILES_MATCHING PATTERN "*.h"
+#   PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+#   # myfile1
+#   # myfile2
+#   DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_sensorfusion.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+#   target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/sensorfusion/csv_eval/evaluation.csv b/sensorfusion/csv_eval/evaluation.csv
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sensorfusion/csv_path/bogen.csv b/sensorfusion/csv_path/bogen.csv
new file mode 100644
index 0000000000000000000000000000000000000000..ff1a7e1e313fba2a11c30a25b7fd3409aa38503c
--- /dev/null
+++ b/sensorfusion/csv_path/bogen.csv
@@ -0,0 +1,4 @@
+0,0
+0,70
+49.497,119.497
+119.497,119.497
\ No newline at end of file
diff --git a/sensorfusion/csv_path/coordinate_system.csv b/sensorfusion/csv_path/coordinate_system.csv
new file mode 100644
index 0000000000000000000000000000000000000000..9c4fd7d0affd58f6061ab7a098ae337ff946aca6
--- /dev/null
+++ b/sensorfusion/csv_path/coordinate_system.csv
@@ -0,0 +1,3 @@
+0,0
+0,30
+30,30
\ No newline at end of file
diff --git a/sensorfusion/csv_path/kf_traj.csv b/sensorfusion/csv_path/kf_traj.csv
new file mode 100644
index 0000000000000000000000000000000000000000..76b76b64125b1e6cf218b3040125b5763fab624e
--- /dev/null
+++ b/sensorfusion/csv_path/kf_traj.csv
@@ -0,0 +1,9 @@
+0,0
+70,0
+0,70
+70,70
+35,130.62
+0,70
+0,0
+70,70
+70,0
\ No newline at end of file
diff --git a/sensorfusion/csv_path/octagon.csv b/sensorfusion/csv_path/octagon.csv
new file mode 100644
index 0000000000000000000000000000000000000000..21357280b9318a73030942c23bd793cac0f55b3f
--- /dev/null
+++ b/sensorfusion/csv_path/octagon.csv
@@ -0,0 +1,9 @@
+0,0
+49.497,49.497
+119.497,49.497
+168.994,0
+168.994,-70
+119.497,-119.497
+49.497,-119.497
+0,-70
+0,0
\ No newline at end of file
diff --git a/sensorfusion/csv_path/path.csv b/sensorfusion/csv_path/path.csv
new file mode 100644
index 0000000000000000000000000000000000000000..c68afccc426508bd9e6c7538e518dcad00fbdc0a
--- /dev/null
+++ b/sensorfusion/csv_path/path.csv
@@ -0,0 +1,101 @@
+0.0,0.0
+3.96,3.96
+7.92,7.92
+11.88,11.88
+15.84,15.84
+19.8,19.8
+23.76,23.76
+27.72,27.72
+31.68,31.68
+35.64,35.64
+39.6,39.6
+43.56,43.56
+47.52,47.52
+52.3,49.5
+57.9,49.5
+63.5,49.5
+69.1,49.5
+74.7,49.5
+80.3,49.5
+85.9,49.5
+91.5,49.5
+97.1,49.5
+102.7,49.5
+108.3,49.5
+113.9,49.5
+119.5,49.5
+123.46,45.54
+127.42,41.58
+131.38,37.62
+135.34,33.66
+139.3,29.7
+143.26,25.74
+147.22,21.78
+151.18,17.82
+155.14,13.86
+159.09,9.9
+163.05,5.94
+167.01,1.98
+168.99,-2.8
+168.99,-8.4
+168.99,-14.0
+168.99,-19.6
+168.99,-25.2
+168.99,-30.8
+168.99,-36.4
+168.99,-42.0
+168.99,-47.6
+168.99,-53.2
+168.99,-58.8
+168.99,-64.4
+168.99,-70.0
+165.03,-73.96
+161.07,-77.92
+157.11,-81.88
+153.15,-85.84
+149.2,-89.8
+145.24,-93.76
+141.28,-97.72
+137.32,-101.68
+133.36,-105.64
+129.4,-109.6
+125.44,-113.56
+121.48,-117.52
+116.7,-119.5
+111.1,-119.5
+105.5,-119.5
+99.9,-119.5
+94.3,-119.5
+88.7,-119.5
+83.1,-119.5
+77.5,-119.5
+71.9,-119.5
+66.3,-119.5
+60.7,-119.5
+55.1,-119.5
+49.5,-119.5
+45.54,-115.54
+41.58,-111.58
+37.62,-107.62
+33.66,-103.66
+29.7,-99.7
+25.74,-95.74
+21.78,-91.78
+17.82,-87.82
+13.86,-83.86
+9.9,-79.9
+5.94,-75.94
+1.98,-71.98
+0.0,-67.2
+0.0,-61.6
+0.0,-56.0
+0.0,-50.4
+0.0,-44.8
+0.0,-39.2
+0.0,-33.6
+0.0,-28.0
+0.0,-22.4
+0.0,-16.8
+0.0,-11.2
+0.0,-5.6
+0.0,0.0
diff --git a/sensorfusion/csv_path/pruefbahn_trajectory.csv b/sensorfusion/csv_path/pruefbahn_trajectory.csv
new file mode 100644
index 0000000000000000000000000000000000000000..3e2ec8656f7d3a408e7d6697404fb97d4241cf73
--- /dev/null
+++ b/sensorfusion/csv_path/pruefbahn_trajectory.csv
@@ -0,0 +1,27 @@
+0,0
+70,70
+140,0
+210,0
+210,70
+280,70
+360,-10
+280,-90
+140,-90
+120,-70 
+140,-50
+160,-70
+140,-90
+90,-90
+90,-110
+70,-110
+70,-90
+50,-90
+50,-110
+30,-110 
+30,-90
+0,-90
+0,-77.5
+75,-57.5
+-30,-25
+0,-12.5
+0,0
\ No newline at end of file
diff --git a/sensorfusion/csv_path/rectangle.csv b/sensorfusion/csv_path/rectangle.csv
new file mode 100644
index 0000000000000000000000000000000000000000..ca5380dbd67262fd887f1c9b48e9654799498516
--- /dev/null
+++ b/sensorfusion/csv_path/rectangle.csv
@@ -0,0 +1,5 @@
+0,0
+-25,25
+0,50
+25,25
+0,0
\ No newline at end of file
diff --git a/sensorfusion/csv_path/test.csv b/sensorfusion/csv_path/test.csv
new file mode 100644
index 0000000000000000000000000000000000000000..e1adfd40f1d8c6e3d3669b26a26b2380587b6b71
--- /dev/null
+++ b/sensorfusion/csv_path/test.csv
@@ -0,0 +1,7 @@
+0,0
+0,50
+0,0
+50,50
+0,0
+50,0
+0,0
\ No newline at end of file
diff --git a/sensorfusion/csv_pos/estimate_pos.csv b/sensorfusion/csv_pos/estimate_pos.csv
new file mode 100644
index 0000000000000000000000000000000000000000..7a138571d2ff16ae2b2c036245f3df49bbd40726
--- /dev/null
+++ b/sensorfusion/csv_pos/estimate_pos.csv
@@ -0,0 +1,100 @@
+0.0,5.6003
+0.944,7.0438
+3.7406,11.7089
+7.4946,16.0896
+14.1913,23.2844
+17.9907,27.8791
+21.5501,32.0003
+28.3328,39.3801
+32.1114,43.6351
+35.7264,48.1127
+39.5887,52.4733
+45.9312,59.7795
+49.2126,63.0181
+49.5882,63.2983
+49.427,63.4596
+58.9626,65.008
+65.5062,66.2249
+72.5818,66.8141
+86.0629,67.9824
+93.9408,68.8661
+101.0797,69.5221
+107.5365,70.1908
+121.0033,71.5144
+128.7473,72.3348
+135.386,73.1694
+142.1475,73.7883
+146.72,73.9753
+150.6526,70.6442
+153.9244,67.5225
+157.8214,63.7738
+164.8213,57.3432
+168.9593,53.7628
+172.8875,50.0548
+176.6051,46.5639
+183.8121,40.05
+188.2698,35.9243
+191.9451,32.544
+195.999,28.4088
+199.0818,26.0802
+199.0948,25.3871
+199.4974,18.0545
+199.663,10.9487
+200.3511,-2.8736
+200.7695,-10.6796
+201.297,-18.2706
+201.4784,-25.6529
+202.197,-39.4689
+202.6929,-47.5378
+202.9808,-55.0487
+203.5328,-68.7357
+204.0035,-76.0249
+204.1777,-76.1916
+201.4003,-80.2129
+194.9752,-87.4435
+191.0937,-91.7554
+187.1989,-96.0128
+183.4845,-99.9488
+176.7609,-107.342
+172.7962,-111.4213
+169.1889,-115.3604
+162.4051,-122.7984
+158.3267,-127.3841
+154.8478,-131.531
+154.326,-131.2512
+152.1892,-132.7297
+145.9404,-133.6856
+138.8231,-134.2465
+131.8907,-134.8988
+118.6534,-135.821
+110.6487,-136.4138
+103.1307,-136.9249
+95.6325,-137.458
+82.2818,-138.2237
+74.4523,-138.6112
+67.3933,-139.0235
+56.5268,-139.4791
+55.2183,-139.7145
+51.5205,-135.8517
+47.9474,-132.1902
+41.3153,-125.9142
+37.3247,-122.093
+33.4938,-118.2875
+26.498,-111.4195
+22.3511,-107.3783
+18.616,-103.763
+14.9543,-100.2014
+7.9533,-93.3964
+4.3577,-90.3348
+4.1188,-89.7913
+3.9789,-89.7379
+3.5932,-79.2363
+3.2134,-71.0503
+2.9079,-63.879
+2.6066,-56.477
+1.9113,-42.5815
+1.7132,-34.2987
+1.357,-26.9017
+0.7181,-12.9295
+0.3114,-4.8554
+-0.0343,2.7014
diff --git a/sensorfusion/csv_pos/hpix_pos.csv b/sensorfusion/csv_pos/hpix_pos.csv
new file mode 100644
index 0000000000000000000000000000000000000000..736934ac5c8b60337d498b4c04860510ff3c2aa6
--- /dev/null
+++ b/sensorfusion/csv_pos/hpix_pos.csv
@@ -0,0 +1,101 @@
+0,0,0.0
+1.1861,1.5878
+4.2362,5.9335
+8.2182,10.2791
+15.5045,18.0511
+18.8088,22.0625
+22.3672,26.0739
+29.8229,34.0966
+33.0424,37.6901
+36.7703,42.2029
+40.9218,46.7157
+47.615,54.4041
+50.4109,57.4127
+50.3262,56.6605
+50.072,56.9112
+61.0014,58.5827
+67.1863,59.7526
+74.3031,60.2541
+88.791,61.424
+96.162,62.2597
+103.1941,62.8447
+109.6331,63.5133
+124.2057,64.8504
+131.1531,65.6025
+137.338,66.3547
+144.5395,66.9397
+148.7757,67.0232
+152.8425,63.0119
+155.8078,60.0869
+159.7899,56.2427
+167.4151,49.2228
+170.9735,46.2143
+174.7013,42.6208
+178.5139,39.0272
+186.2238,32.0909
+190.2906,28.2467
+194.0185,24.8203
+197.9158,21.2268
+200.9659,18.469
+200.4575,18.2183
+200.8811,9.9448
+201.0506,2.5907
+201.7284,-12.2012
+202.0673,-18.8033
+202.5756,-26.3246
+202.6603,-33.6788
+203.4229,-48.8049
+203.8465,-55.992
+204.1007,-63.4297
+204.609,-77.6366
+204.9479,-84.1551
+205.1173,-83.403
+201.8978,-87.9993
+194.8657,-95.6878
+191.4767,-99.3649
+187.41,-103.7941
+183.6821,-107.6383
+176.65,-115.3268
+172.9221,-119.0874
+169.279,-123.0152
+162.1622,-130.7037
+158.2648,-135.0493
+154.8759,-138.6428
+154.7912,-138.3085
+152.5036,-139.8964
+145.5562,-140.7321
+137.931,-141.2335
+131.3225,-141.8185
+117.1736,-142.7378
+109.8873,-143.2392
+102.4316,-143.7406
+94.637,-144.242
+80.8269,-144.9942
+73.3712,-145.3284
+66.7627,-145.6627
+55.2402,-146.0806
+55.3249,-146.2477
+51.1734,-141.7349
+47.615,-138.0578
+40.7524,-131.5394
+37.0245,-127.9458
+33.3813,-124.2688
+25.7562,-116.7475
+22.0283,-113.1539
+18.5546,-109.7276
+14.9115,-106.134
+7.2863,-98.6963
+4.2362,-95.6878
+4.4904,-96.1056
+4.4057,-96.0221
+3.982,-83.988
+3.6431,-76.1324
+3.389,-69.1125
+3.1348,-61.6747
+2.457,-46.8828
+2.3723,-39.3615
+2.0334,-31.7566
+1.525,-17.2154
+1.1014,-9.9448
+0.8472,-2.5071
+0.7625,5.0142
diff --git a/sensorfusion/csv_pos/kf_pos.csv b/sensorfusion/csv_pos/kf_pos.csv
new file mode 100644
index 0000000000000000000000000000000000000000..24686629b7ba9d9a9c73833335f8c25836bab50d
--- /dev/null
+++ b/sensorfusion/csv_pos/kf_pos.csv
@@ -0,0 +1,101 @@
+0,0,0.0
+0.944,1.4436
+3.7406,6.1086
+7.4946,10.4893
+14.1913,17.6841
+17.9907,22.2788
+21.5501,26.4
+28.3328,33.7798
+32.1114,38.0348
+35.7264,42.5125
+39.5887,46.873
+45.9312,54.1792
+49.2126,57.8442
+49.5882,57.6983
+49.427,57.8596
+58.9626,59.408
+65.5062,60.6249
+72.5818,61.2141
+86.0629,62.3824
+93.9408,63.2661
+101.0797,63.9221
+107.5365,64.5908
+121.0033,65.9144
+128.7473,66.7348
+135.386,67.5694
+142.1475,68.1881
+146.72,68.375
+150.6526,65.044
+153.9244,61.9222
+157.8214,58.1735
+164.8213,51.743
+168.9593,48.1625
+172.8875,44.4545
+176.6051,40.9636
+183.8121,34.4567
+188.2698,30.324
+191.9451,26.9437
+195.999,23.235
+199.0818,20.4802
+199.0948,19.7871
+199.4974,12.4545
+199.663,5.3487
+200.3511,-8.4736
+200.7695,-16.2796
+201.297,-23.8706
+201.4784,-31.2529
+202.197,-45.0689
+202.6929,-53.1378
+202.9808,-60.6487
+203.5328,-74.3357
+204.0035,-81.6252
+204.1777,-81.7919
+201.4003,-85.8132
+194.9752,-93.0438
+191.0937,-97.3486
+187.1989,-101.6131
+183.4845,-105.5491
+176.7609,-112.9423
+172.7962,-117.0216
+169.1889,-120.9607
+162.4051,-128.3987
+158.3267,-132.9843
+154.8478,-136.7048
+154.326,-136.8512
+152.1892,-138.3297
+145.9404,-139.2856
+138.8231,-139.8465
+131.8907,-140.4988
+118.6534,-141.421
+110.6487,-142.0138
+103.1307,-142.5249
+95.6325,-143.058
+82.2818,-143.8237
+74.4523,-144.2112
+67.3933,-144.6235
+56.5268,-145.0794
+55.2183,-145.3148
+51.5205,-141.452
+47.9474,-137.7905
+41.3153,-131.5144
+37.3247,-127.6933
+33.4938,-123.8878
+26.498,-117.0198
+22.3511,-112.9786
+18.616,-109.3633
+14.9543,-105.8017
+7.9533,-98.9967
+4.3577,-95.5086
+4.1188,-95.3913
+3.9789,-95.3379
+3.5932,-84.8363
+3.2134,-76.6503
+2.9079,-69.479
+2.6066,-62.077
+1.9113,-48.1815
+1.7132,-39.8987
+1.357,-32.5017
+0.7181,-18.5295
+0.3114,-10.4554
+-0.0343,-2.8986
+-0.1566,4.588
diff --git a/sensorfusion/csv_pos/lidar_pos.csv b/sensorfusion/csv_pos/lidar_pos.csv
new file mode 100644
index 0000000000000000000000000000000000000000..82dbe10df87d04145cc76b18db8020c7f0fc3f57
--- /dev/null
+++ b/sensorfusion/csv_pos/lidar_pos.csv
@@ -0,0 +1,101 @@
+0,0,0.0
+0.0,0.9999
+2.0,5.9999
+6.0,8.9999
+14.0,16.0
+16.0,20.0
+21.0,24.0
+29.0,31.0
+32.0,35.0
+34.0,39.0
+38.0,43.0
+46.0,51.0
+49.0,54.9999
+50.0,54.9999
+51.0,56.0
+56.0,57.0
+64.0,58.0
+70.0,58.0
+85.0,59.0
+91.0,61.0
+98.0,61.0
+105.0,62.0
+118.0,63.0
+127.0,63.0
+133.0,64.0
+140.0,65.0
+148.0,65.0
+149.0,62.0
+152.0001,59.0
+157.0001,56.0
+163.0,48.0
+166.0,45.9999
+170.0,40.9999
+174.0,39.0
+182.0,31.0
+185.0001,29.0
+189.0,24.0
+193.0,21.0
+196.0,17.0
+198.0,17.0
+197.0,13.0
+198.0,3.9999
+199.0,-11.0
+199.0,-18.0
+198.0,-25.0
+199.0,-32.0001
+198.0,-46.0
+198.0,-55.0001
+199.0,-61.0
+199.0,-75.0
+198.0,-84.0
+199.0,-86.0001
+199.0,-87.0
+191.0,-96.0001
+188.0,-99.0
+184.0,-103.0
+180.0001,-107.0
+173.0,-114.0001
+168.0,-119.0001
+165.0,-121.0001
+158.0,-130.0
+154.0,-133.0
+150.0,-137.0001
+146.0,-140.0
+146.0,-140.0
+143.0,-141.0
+136.0,-142.0001
+129.0001,-142.0001
+115.0,-143.0
+107.0,-144.0001
+101.0001,-144.0001
+94.0,-143.0
+79.0,-145.0
+72.0,-145.0
+65.0,-145.0
+50.0,-146.0
+48.0,-145.0
+48.0,-144.0001
+45.0,-141.0
+38.0,-132.0001
+33.0,-129.0001
+30.0,-125.0
+23.0,-118.0
+18.0,-115.0
+16.0,-111.0001
+11.0,-107.0
+6.0,-99.0
+1.0,-96.0001
+-1.0,-96.0001
+0.0,-95.0
+0.0,-87.0
+0.0,-80.0
+0.0,-72.0
+-1.0,-65.0001
+0.0,-50.0001
+0.0,-44.0
+0.0,-37.0001
+-1.0,-21.0
+0.0,-15.0
+0.0,-7.0001
+-1.0,-0.0
diff --git a/sensorfusion/csv_pos/qual_pos.csv b/sensorfusion/csv_pos/qual_pos.csv
new file mode 100644
index 0000000000000000000000000000000000000000..aeeadfadbf32001b2f24b55cd8ec6bfb8f5899a9
--- /dev/null
+++ b/sensorfusion/csv_pos/qual_pos.csv
@@ -0,0 +1,66 @@
+0.0,0.0
+-0.0,0.001
+-0.0,0.001
+0.08,2.001
+0.08,3.722
+0.08,3.722
+0.017,5.197
+0.112,7.031
+0.112,7.031
+0.187,8.804
+0.264,10.733
+0.264,10.733
+0.278,12.581
+0.282,14.271
+0.282,14.271
+0.346,16.342
+0.377,18.272
+0.377,18.272
+0.388,19.825
+0.374,21.728
+0.374,21.728
+0.445,23.585
+0.445,23.585
+0.634,25.533
+0.746,27.428
+0.746,27.428
+0.851,29.167
+1.028,31.066
+1.028,31.066
+1.057,32.98
+1.101,34.662
+1.101,34.662
+1.176,36.648
+1.21,38.224
+1.21,38.224
+1.28,40.083
+1.28,40.083
+1.407,41.962
+1.569,43.8
+1.569,43.8
+1.684,45.663
+1.787,47.651
+1.787,47.651
+1.776,49.358
+1.727,51.204
+1.727,51.204
+1.687,52.824
+1.646,54.827
+1.646,54.827
+1.624,56.607
+1.583,58.401
+1.583,58.401
+1.581,60.151
+1.581,60.151
+1.771,61.319
+1.764,61.423
+1.764,61.423
+1.765,61.371
+1.761,61.173
+1.761,61.173
+1.763,61.102
+1.765,61.144
+1.765,61.144
+1.763,61.202
+1.764,61.294
+1.764,61.294
diff --git a/sensorfusion/csv_pos/timestamp.csv b/sensorfusion/csv_pos/timestamp.csv
new file mode 100644
index 0000000000000000000000000000000000000000..68138b16f57eb467af24404f807e32df771a86c1
--- /dev/null
+++ b/sensorfusion/csv_pos/timestamp.csv
@@ -0,0 +1,101 @@
+0.0
+0.0907
+0.1932
+0.2956
+0.4913
+0.5907
+0.693
+0.8902
+0.9879
+1.0904
+1.1925
+1.3852
+1.4877
+1.59
+1.6923
+1.8932
+1.9872
+2.0896
+2.2908
+2.3848
+2.4869
+2.5771
+2.7818
+2.8843
+2.986
+3.0764
+3.2811
+3.3713
+3.4738
+3.5761
+3.769
+3.871
+3.9816
+4.0758
+4.28
+4.3789
+4.4734
+4.5841
+4.6865
+4.879
+4.9817
+5.0747
+5.2761
+5.3786
+5.481
+5.5828
+5.7759
+5.8784
+5.9802
+6.1815
+6.2841
+6.3776
+6.4765
+6.6813
+6.7837
+6.8856
+6.9875
+7.1889
+7.2831
+7.3849
+7.5869
+7.6887
+7.7822
+7.8813
+8.0866
+8.1884
+8.2787
+8.381
+8.586
+8.6877
+8.7895
+8.8887
+9.0934
+9.1867
+9.2861
+9.491
+9.584
+9.695
+9.7939
+9.9904
+10.0806
+10.1913
+10.388
+10.4895
+10.5885
+10.6908
+10.8868
+10.9858
+11.0967
+11.1906
+11.395
+11.4935
+11.5878
+11.6902
+11.8909
+11.9933
+12.0873
+12.2881
+12.3906
+12.493
+12.5954
diff --git a/sensorfusion/csv_pos/vel_dt.csv b/sensorfusion/csv_pos/vel_dt.csv
new file mode 100644
index 0000000000000000000000000000000000000000..b82186e1dbb3c293e7be97d0fdc3281213f73533
--- /dev/null
+++ b/sensorfusion/csv_pos/vel_dt.csv
@@ -0,0 +1 @@
+50.0,0.11199999999999999
diff --git a/sensorfusion/csv_pos/vpix_pos.csv b/sensorfusion/csv_pos/vpix_pos.csv
new file mode 100644
index 0000000000000000000000000000000000000000..63b523ff3e2c7728a9a249f08e4a2c4f10da0840
--- /dev/null
+++ b/sensorfusion/csv_pos/vpix_pos.csv
@@ -0,0 +1,101 @@
+0,0,0.0
+0.7625,1.0864
+4.0668,6.017
+7.8794,10.3627
+14.8267,17.8004
+18.3004,22.2297
+21.7741,26.3246
+28.8062,34.013
+32.2799,38.0244
+35.7536,42.5372
+39.3968,46.7157
+46.09,54.4877
+48.9706,57.7469
+48.9706,56.9948
+48.7164,57.2455
+59.7306,59.0841
+65.746,60.2541
+72.9476,60.7555
+87.2659,62.0926
+94.0439,62.9283
+101.076,63.5969
+107.3456,64.2654
+121.7487,65.7697
+128.6114,66.5218
+135.3893,67.4411
+141.7436,68.0261
+145.9798,68.1932
+149.623,64.5161
+153.0119,61.2569
+156.994,57.4127
+164.2803,50.8106
+168.1776,47.4678
+172.2443,43.6236
+175.8028,40.2808
+183.5127,33.3445
+187.5794,29.5838
+190.9684,26.4917
+195.2893,22.5639
+198.2547,19.8897
+197.7463,19.5554
+198.2547,11.1984
+198.3394,4.4292
+199.1866,-10.3627
+199.6103,-17.6333
+200.2033,-25.2382
+200.3728,-32.5923
+201.22,-46.9664
+201.7284,-54.237
+201.9825,-61.6747
+202.6603,-76.6338
+203.2534,-82.818
+203.3381,-81.8151
+200.1186,-86.4115
+193.256,-94.0999
+189.6128,-98.1949
+185.8849,-102.2898
+182.2418,-106.2176
+174.9555,-114.3239
+171.5666,-117.7503
+168.0929,-121.6781
+160.7219,-129.868
+157.2482,-133.8793
+153.8592,-137.4728
+153.7745,-137.1386
+151.3175,-138.81
+144.5395,-139.7292
+137.6769,-140.2307
+130.4753,-140.9828
+116.3264,-141.9856
+109.1248,-142.5706
+101.6691,-143.072
+94.4675,-143.657
+79.8949,-144.4927
+73.2865,-144.827
+66.0002,-145.3284
+54.7318,-145.8299
+54.8166,-146.0806
+50.8345,-141.6513
+47.2761,-138.0578
+39.9898,-131.2887
+36.5161,-127.9458
+32.5341,-124.0181
+25.2478,-116.9146
+21.52,-113.2375
+17.6226,-109.5604
+13.9795,-106.0505
+6.6085,-98.947
+3.4737,-95.9385
+3.7279,-96.3563
+3.5584,-96.2728
+3.1348,-84.2387
+2.7112,-76.3831
+2.3723,-69.3632
+2.0334,-61.9255
+1.1861,-47.05
+1.0167,-39.6122
+0.5931,-32.6759
+-0.2542,-17.3826
+-0.5931,-10.1956
+-1.0167,-2.6742
+-1.1014,4.7635
diff --git a/sensorfusion/launch/SF.launch b/sensorfusion/launch/SF.launch
new file mode 100644
index 0000000000000000000000000000000000000000..41e55bc9a0369ec0658e714c8d412a5b89184bc0
--- /dev/null
+++ b/sensorfusion/launch/SF.launch
@@ -0,0 +1,11 @@
+<launch>
+
+    <!--Kalman Filter using Qualisys Cameras and other sensors-->
+
+    <include file="$(find sensorfusion)/launch/sensors.launch"></include>
+    <node pkg="sensorfusion" type="KF.py" name="kf_node" required="true"></node>
+    <node pkg="sensorfusion" type="path.py" name="path_node"></node>
+    <node pkg="sensorfusion" type="drive.py" name="drive_node"></node>
+    <node pkg="sensorfusion" type="sync.py" name="sync_node"></node>
+
+</launch>
\ No newline at end of file
diff --git a/sensorfusion/launch/SFQ.launch b/sensorfusion/launch/SFQ.launch
new file mode 100644
index 0000000000000000000000000000000000000000..c02b0367d1ba9ee28e7b801622aa6c86ca4f9d80
--- /dev/null
+++ b/sensorfusion/launch/SFQ.launch
@@ -0,0 +1,11 @@
+<launch>
+
+    <!--Kalman Filter using sensors, but without Qualisys Cameras-->
+
+    <include file="$(find sensorfusion)/launch/sensorsQ.launch"></include>
+    <node pkg="sensorfusion" type="KFQ.py" name="kfq_node" required="true"></node>
+    <node pkg="sensorfusion" type="path.py" name="path_node"></node>
+    <node pkg="sensorfusion" type="drive.py" name="drive_node"></node>
+    <node pkg="sensorfusion" type="syncQ.py" name="syncQ_node"></node>
+
+</launch>
\ No newline at end of file
diff --git a/sensorfusion/launch/backup/pixart_H_old.launch b/sensorfusion/launch/backup/pixart_H_old.launch
new file mode 100644
index 0000000000000000000000000000000000000000..15d8f2de518fcd0dd8410a4877463f55e2f82c4d
--- /dev/null
+++ b/sensorfusion/launch/backup/pixart_H_old.launch
@@ -0,0 +1,4 @@
+<launch>
+    <node pkg="pixart_ros" type="PAA5102_H_publisher.py" name="pixart_H_pub_node" respawn="true"></node>
+    <node pkg="sensorfusion" type="pixart_H_pos.py" name="pixart_H_pos_node" output="screen" required="true"></node>
+</launch>
\ No newline at end of file
diff --git a/sensorfusion/launch/backup/pixart_V_old.launch b/sensorfusion/launch/backup/pixart_V_old.launch
new file mode 100644
index 0000000000000000000000000000000000000000..b7caf42b3df6810ebdf66a4503c1e68e475c1dad
--- /dev/null
+++ b/sensorfusion/launch/backup/pixart_V_old.launch
@@ -0,0 +1,4 @@
+<launch>
+    <node pkg="pixart_ros" type="PAA5102_V_publisher.py" name="pixart_V_pub_node" respawn="true"></node>
+    <node pkg="sensorfusion" type="pixart_V_pos.py" name="pixart_V_pos_node" output="screen" required="true"></node>
+</launch>
\ No newline at end of file
diff --git a/sensorfusion/launch/backup/qualisys_old.launch b/sensorfusion/launch/backup/qualisys_old.launch
new file mode 100644
index 0000000000000000000000000000000000000000..3f164bcff79ad4ff69145486e7744ebe80c11fc1
--- /dev/null
+++ b/sensorfusion/launch/backup/qualisys_old.launch
@@ -0,0 +1,4 @@
+<launch>
+    <node pkg="qualisys" type="stream_3d_residual.py" name="qualisys_pub_node" respawn="true"></node>
+    <node pkg="sensorfusion" type="qualisys_pos.py" name="qualisys_pos_node" output="screen" required="true"></node>
+</launch>
\ No newline at end of file
diff --git a/sensorfusion/launch/imu.launch b/sensorfusion/launch/imu.launch
new file mode 100644
index 0000000000000000000000000000000000000000..ec091bd373e05a2242d01bf5fc148c38f45a7aad
--- /dev/null
+++ b/sensorfusion/launch/imu.launch
@@ -0,0 +1,10 @@
+<launch>
+    <!-- Use groups if you want to launch several nodes for multiple devices.
+    Make sure to use different param files for each sensor. -->
+    <!-- <group ns="sensor_1"> -->
+        <node  name="xsens_mti_node" pkg="xsens_mti_driver" type="xsens_mti_node" required="true">
+            <rosparam command="load" file="$(find sensorfusion)/param/imu.yaml" />
+        </node>
+        <node  pkg="sensorfusion" type="imu_pos.py" name="imu_pos_node" output="screen" required="true"></node>
+    <!-- </group> -->
+</launch>
diff --git a/sensorfusion/launch/lidar.launch b/sensorfusion/launch/lidar.launch
new file mode 100644
index 0000000000000000000000000000000000000000..686893a626a901aeb31c616777652170786da0a9
--- /dev/null
+++ b/sensorfusion/launch/lidar.launch
@@ -0,0 +1,4 @@
+<launch>
+    <node pkg="ydlidar_ros" type="ydlidar_node" name="lidar_pub_node" respawn="true"></node>
+    <node pkg="sensorfusion" type="lidar_pos" name="lidar_pos_node" output="screen" required="true"></node>
+</launch>
diff --git a/sensorfusion/launch/pixart_H.launch b/sensorfusion/launch/pixart_H.launch
new file mode 100644
index 0000000000000000000000000000000000000000..31a5fedcbbd5033f11874f1a3b545311d52c2963
--- /dev/null
+++ b/sensorfusion/launch/pixart_H.launch
@@ -0,0 +1,3 @@
+<launch>
+    <node pkg="pixart_ros" type="pixart_H_publisher.py" name="pixart_H_pub_node" respawn="true"></node>
+</launch>
\ No newline at end of file
diff --git a/sensorfusion/launch/pixart_V.launch b/sensorfusion/launch/pixart_V.launch
new file mode 100644
index 0000000000000000000000000000000000000000..f267937d6624e9a37f1aa1f84f596ab70310a716
--- /dev/null
+++ b/sensorfusion/launch/pixart_V.launch
@@ -0,0 +1,3 @@
+<launch>
+    <node pkg="pixart_ros" type="pixart_V_publisher.py" name="pixart_V_pub_node" respawn="true"></node>
+</launch>
\ No newline at end of file
diff --git a/sensorfusion/launch/qualisys.launch b/sensorfusion/launch/qualisys.launch
new file mode 100644
index 0000000000000000000000000000000000000000..7de8df7390641b821e05e7b9cac8fb51aeaec780
--- /dev/null
+++ b/sensorfusion/launch/qualisys.launch
@@ -0,0 +1,3 @@
+<launch>
+    <node pkg="qualisys" type="qualisys_publisher.py" name="qualisys_pub_node" respawn="true"></node>
+</launch>
\ No newline at end of file
diff --git a/sensorfusion/launch/sensors.launch b/sensorfusion/launch/sensors.launch
new file mode 100644
index 0000000000000000000000000000000000000000..9e96ba6980d6a09159a8d27cd0a90f22a7c973aa
--- /dev/null
+++ b/sensorfusion/launch/sensors.launch
@@ -0,0 +1,9 @@
+<launch>
+    
+    <!--Launch File with Lidar and Pixart Sensors (Without Qualisys Cameras)-->
+
+    <include file="$(find sensorfusion)/launch/lidar.launch"></include>
+    <include file="$(find sensorfusion)/launch/pixart_H.launch"></include>
+    <include file="$(find sensorfusion)/launch/pixart_V.launch"></include>
+    
+</launch>
\ No newline at end of file
diff --git a/sensorfusion/launch/sensorsQ.launch b/sensorfusion/launch/sensorsQ.launch
new file mode 100644
index 0000000000000000000000000000000000000000..7d9b229486d2a923cfb4197bc32dd345d8afe273
--- /dev/null
+++ b/sensorfusion/launch/sensorsQ.launch
@@ -0,0 +1,9 @@
+<launch>
+
+    <!--Launch File with Qualisys Cameras and other sensors-->
+
+    <include file="$(find sensorfusion)/launch/lidar.launch"></include>
+    <include file="$(find sensorfusion)/launch/pixart_H.launch"></include>
+    <include file="$(find sensorfusion)/launch/pixart_V.launch"></include>
+    <include file="$(find sensorfusion)/launch/qualisys.launch"></include>
+</launch>
\ No newline at end of file
diff --git a/sensorfusion/msg/Motion.msg b/sensorfusion/msg/Motion.msg
new file mode 100644
index 0000000000000000000000000000000000000000..56564fa7aad641c8b8f07a51198efe784c98beca
--- /dev/null
+++ b/sensorfusion/msg/Motion.msg
@@ -0,0 +1,14 @@
+#Time to travel each segment (in s)
+float64 dt
+
+#Linear Velocity (0-100 mm/s)
+float64 velocity
+
+#Rotation velocity (?)
+float64 rot_vel
+
+#Direction robot needs to head (forward = 90, left = 0, right = 180, back = 270)
+float64 direction
+
+#Command to let robot start(1)/stop(0)
+float64 move
\ No newline at end of file
diff --git a/sensorfusion/msg/Position.msg b/sensorfusion/msg/Position.msg
new file mode 100644
index 0000000000000000000000000000000000000000..3e90302f72762aefa35d9d74a8550bb72ebc2cbe
--- /dev/null
+++ b/sensorfusion/msg/Position.msg
@@ -0,0 +1,17 @@
+#header for Timestamp
+std_msgs/Header header
+
+#X-Position
+float64 x
+
+#Y-Position
+float64 y
+
+#X-Variance
+float64 x_var
+
+#Y-Variance
+float64 y_var
+
+#Azimuth angle
+float64 azimuth
\ No newline at end of file
diff --git a/sensorfusion/msg/SyncPos.msg b/sensorfusion/msg/SyncPos.msg
new file mode 100644
index 0000000000000000000000000000000000000000..9d66775331eb4b24b7b8b62055ea643c04f787f4
--- /dev/null
+++ b/sensorfusion/msg/SyncPos.msg
@@ -0,0 +1,39 @@
+std_msgs/Header header
+
+#Lidar 
+float64 x_pos_lid
+float64 y_pos_lid
+float64 x_var_lid
+float64 y_var_lid
+#float64 azimuth_lid
+#float64 azimuth_var_lid
+
+
+#Pixart Front 
+float64 x_pos_vpix
+float64 y_pos_vpix
+float64 x_var_vpix
+float64 y_var_vpix
+
+
+#Pixart Back
+float64 x_pos_hpix
+float64 y_pos_hpix
+float64 x_var_hpix
+float64 y_var_hpix
+
+
+#Qualisys
+float64 x_pos_qual
+float64 y_pos_qual
+float64 x_var_qual
+float64 y_var_qual
+
+# #IMU
+# float64 azimuth_imu
+# float64 azimuth_var_imu
+
+
+
+
+
diff --git a/sensorfusion/package.xml b/sensorfusion/package.xml
new file mode 100644
index 0000000000000000000000000000000000000000..e7d28b94a1577b9cf719687440c9d359c1f47678
--- /dev/null
+++ b/sensorfusion/package.xml
@@ -0,0 +1,70 @@
+<?xml version="1.0"?>
+<package format="2">
+  <name>sensorfusion</name>
+  <version>0.0.0</version>
+  <description>The sensorfusion package</description>
+
+  <!-- One maintainer tag required, multiple allowed, one person per tag -->
+  <!-- Example:  -->
+  <!-- <maintainer email="jane.doe@example.com">Jane Doe</maintainer> -->
+  <maintainer email="pi@todo.todo">pi</maintainer>
+
+
+  <!-- One license tag required, multiple allowed, one license per tag -->
+  <!-- Commonly used license strings: -->
+  <!--   BSD, MIT, Boost Software License, GPLv2, GPLv3, LGPLv2.1, LGPLv3 -->
+  <license>TODO</license>
+
+
+  <!-- Url tags are optional, but multiple are allowed, one per tag -->
+  <!-- Optional attribute type can be: website, bugtracker, or repository -->
+  <!-- Example: -->
+  <!-- <url type="website">http://wiki.ros.org/sensorfusion</url> -->
+
+
+  <!-- Author tags are optional, multiple are allowed, one per tag -->
+  <!-- Authors do not have to be maintainers, but could be -->
+  <!-- Example: -->
+  <!-- <author email="jane.doe@example.com">Jane Doe</author> -->
+
+
+  <!-- The *depend tags are used to specify dependencies -->
+  <!-- Dependencies can be catkin packages or system dependencies -->
+  <!-- Examples: -->
+  <!-- Use depend as a shortcut for packages that are both build and exec dependencies -->
+  <!--   <depend>roscpp</depend> -->
+  <!--   Note that this is equivalent to the following: -->
+  <!--   <build_depend>roscpp</build_depend> -->
+  <!--   <exec_depend>roscpp</exec_depend> -->
+  <!-- Use build_depend for packages you need at compile time: -->
+  <!--   <build_depend>message_generation</build_depend> -->
+  <!-- Use build_export_depend for packages you need in order to build against this package: -->
+  <!--   <build_export_depend>message_generation</build_export_depend> -->
+  <!-- Use buildtool_depend for build tool packages: -->
+  <!--   <buildtool_depend>catkin</buildtool_depend> -->
+  <!-- Use exec_depend for packages you need at runtime: -->
+  <!--   <exec_depend>message_runtime</exec_depend> -->
+  <!-- Use test_depend for packages you need only for testing: -->
+  <!--   <test_depend>gtest</test_depend> -->
+  <!-- Use doc_depend for packages you need only for building documentation: -->
+  <!--   <doc_depend>doxygen</doc_depend> -->
+  <buildtool_depend>catkin</buildtool_depend>
+  <build_depend>roscpp</build_depend>
+  <build_depend>rospy</build_depend>
+  <build_depend>std_msgs</build_depend>
+  <build_export_depend>roscpp</build_export_depend>
+  <build_export_depend>rospy</build_export_depend>
+  <build_export_depend>std_msgs</build_export_depend>
+  <exec_depend>roscpp</exec_depend>
+  <exec_depend>rospy</exec_depend>
+  <exec_depend>std_msgs</exec_depend>
+  <build_depend>message_generation</build_depend>
+  <exec_depend>message_runtime</exec_depend>
+
+
+  <!-- The export tag contains other, unspecified, tags -->
+  <export>
+    <!-- Other tools can request additional information be placed here -->
+
+  </export>
+</package>
diff --git a/sensorfusion/param/imu.yaml b/sensorfusion/param/imu.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..a7b437020a0516f09206380d95edc93ae37c41a9
--- /dev/null
+++ b/sensorfusion/param/imu.yaml
@@ -0,0 +1,42 @@
+## Device settings, provide one of the following:
+##  - device_id (most secure if you use multiple devices)
+##  - port + baudrate information
+##  - nothing: the driver scans for devices and selects the first found.
+# device_id: '077007EF' # uppercase hex string
+port: '/dev/ttyUSB1'
+#baudrate: 921600 # non necessary for some devices
+
+## Log file (optional), placed in ~/.ros/ otherwise use absolute path
+# log_file: log.mtb
+
+publisher_queue_size: 1
+
+# TF transform frame_id (default: imu_link), you may want to change it if you use multiple devices
+#frame_id: "imu_link"
+
+# Message publishers
+pub_imu: false
+pub_quaternion: true
+pub_mag: false
+pub_angular_velocity: false
+pub_acceleration: false
+pub_free_acceleration: true
+pub_dq: false
+pub_dv: false
+pub_sampletime: false
+pub_temperature: false
+pub_pressure: false
+pub_gnss: false
+pub_twist: false
+pub_transform: false
+pub_positionLLA: false
+pub_velocity: false
+
+
+## Sensor standard deviation [x,y,z] (optional)
+## This value is used to override the covariance matrix in sensor_msgs/Imu and
+## sensor_msgs/MagneticField messages.
+# linear_acceleration_stddev: [0, 0, 0] # [m/s^2]
+# angular_velocity_stddev: [0, 0, 0] # [rad/s]
+# orientation_stddev: [0, 0, 0] # [rad]
+# magnetic_field_stddev: [0, 0, 0] # [Tesla]
diff --git a/sensorfusion/scripts/KF.py b/sensorfusion/scripts/KF.py
new file mode 100644
index 0000000000000000000000000000000000000000..62fa07b606fa869ca7cb456aceaafadd97659746
--- /dev/null
+++ b/sensorfusion/scripts/KF.py
@@ -0,0 +1,442 @@
+#!/usr/bin/env python3
+from typing import Type
+import numpy as np
+from sensorfusion.msg import SyncPos, Position, Motion
+from sensorfusion.srv import Trajectory, Measurements
+import rospy
+import csv
+
+
+class kalman_filter(object):
+   
+    def __init__(self):        
+        
+        #*-------User-Parameters--------#
+
+        #Show matrices and states on console
+        self.show = False
+
+        #Robot velocity (from 0-100 mm/s)
+        self.velocity = 50.0
+
+        
+        #*-------Init_Node--------#
+        
+        #Before shuting down, perform function "save_data" which let the robot stop and saves Position data
+        rospy.on_shutdown(self.save_data)
+
+        #Publishes commands to Drive node
+        self.pub_ = rospy.Publisher("/drive", Motion, queue_size=3)
+
+        #Synced Position and variances of all sensors with timestamp
+        self.Pos = SyncPos()
+
+        #Motion message for giving command to drive to certain direction, velocity and time
+        self.motion = Motion()
+
+        #Bool variable for executing functions in first run in specific way
+        self.not_init = True
+
+        #Starttime which will be used for noting correct time in csv files after data acquisition
+        self.starttime_sec = 0.0
+
+        #Time delay after receiving measurements and calculating while robot still drives forward
+        self.start_diff_dt = 0.0
+        self.end_diff_dt = 0.0
+        self.delay_dt = 0.0
+        
+        #Save X-Y Position and Timestamp of Kalman Filter and sensors
+        self.kf_pos_list = []
+        
+        self.lid_pos_list = []
+        
+        self.vpix_pos_list = []
+
+        self.hpix_pos_list = []
+        
+        self.imu_pos_list = []
+
+        self.timestamp_list = []
+
+        self.estimate_pos_list = []
+
+        self.vel_dt_list = []
+        
+        #Robot direction (forward = 90, right = 180, back = 270, left = 0)
+        self.direction = 0.0
+        
+        #Segment number 
+        # (example: (0,0)->(30,0) = 1.segment; (30,0)->(30,30) = 2.segment)
+        self.seg_number = 1
+
+        #Path number: Small trajectories of one segment 
+        # (example: in 1.segment (0,0)->(30,0) are (0,0)->(15,0) 1.path and (15,0)->(30,0) 2. path
+        #           in 2.segment (30,0)->(30,30) are (30,0)->(30,15) 3.path and (30,15)->(30,30) 4.path)
+        # Intervalls are defined in "trajectory" node
+        self.path_number = 1
+
+        #Angle at which robot is driving at current segment
+        self.seg_angle = 90.0
+
+        #Angle at which robot is driving at current path
+        self.angle = 90.0
+
+        #Time needed driving a segment with specific velocity
+        self.seg_dt = 0.0
+        
+        #Time needed driving a path with specific velocity
+        self.dt = 1.0
+
+        #Frequency of self.dt for letting the robot drive after prediction. 
+        # After reaching position, it should take measurements after the rate
+        self.drive_rate = rospy.Rate(1/self.dt)
+
+        #Driven Distance
+        self.driven_distance = (self.velocity*((self.path_number-1)*self.dt))
+        
+        #*-------Kalman Filter--------# n = 2, m = 2
+
+        #state variable (X-Position, Y-Position, X-Velocity, Y-Velocity) init
+        self.x = np.array([[0],
+                          [0]],
+                          dtype=float)
+        
+        #state covariance matrix init
+        self.P = np.array([[10,0],
+                           [0,10]], 
+                          dtype=float)
+                
+        #measurements
+        self.z = np.zeros((6,1), dtype=float)
+        
+        # # process noise covariance matrix
+        # self.Q = np.array([[(self.dt*np.cos(self.angle*np.pi/180))**2,((self.dt)**2) * np.sin(self.angle*np.pi/180)*np.cos(self.angle*np.pi/180)],
+        #                    [((self.dt)**2)*np.sin(self.angle*np.pi/180)*np.cos(self.angle*np.pi/180),(self.dt*np.sin(self.angle*np.pi/180))**2]],
+        #                   dtype=float)
+    
+        # # process noise 
+        # self.eps_ext = (0.00007017*self.velocity**2 + 0.002715*self.velocity + 0.38983)
+        # self.eps_vel = (0.0002957*self.velocity**2 + 0.22496*self.velocity + -0.79668)
+
+        self.Q = np.array([[0,0],
+                           [0,0]], 
+                          dtype=float)
+        
+        self.eps_ext = np.array([[0,0],
+                                 [0,0]], 
+                          dtype=float)
+        self.eps_vel = 1
+
+        #measurement covariance matrices
+        self.R = np.identity(6)
+
+        #state transition matrix
+        self.A = np.array([[1,0],
+                           [0,1]],
+                          dtype=float)
+        
+        self.B = np.identity(2)
+        
+        self.u = np.array([[round(self.velocity*np.cos(self.angle*np.pi/180),2)],
+                          [round(self.velocity*np.sin(self.angle*np.pi/180),2)]],
+                          dtype=float)
+        
+        self.w = np.array([[0],
+                           [0]],
+                          dtype=float)
+        
+        #state to measurement matrix
+        self.H = np.array([[1,0],
+                           [0,1],
+                           [1,0],
+                           [0,1],
+                           [1,0],
+                           [0,1]], 
+                          dtype=float)
+        
+        self.HT = np.transpose(self.H)
+
+        #Kalman Gain
+        self.K = np.zeros((2,2), dtype=float)
+
+    def save_data(self):
+        """let the robot stop and save data to csv files. After that shutdown the node
+        """
+    
+        #Let the robot stop
+        self.motion.direction = 0
+        self.motion.velocity = 0
+        self.motion.dt = 0
+        self.motion.move = 0
+
+        self.pub_.publish(self.motion)
+
+        #Write down saved data to csv files
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/kf_pos.csv', 'w', encoding='UTF8', newline='') as a:
+            writer = csv.writer(a)
+            writer.writerows(self.kf_pos_list)
+        
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/lidar_pos.csv', 'w', encoding='UTF8', newline='') as b:
+            writer = csv.writer(b)
+            writer.writerows(self.lid_pos_list)
+            
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/vpix_pos.csv', 'w', encoding='UTF8', newline='') as c:
+            writer = csv.writer(c)
+            writer.writerows(self.vpix_pos_list)
+
+        # with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/imu_pos.csv', 'w', encoding='UTF8', newline='') as d:
+        #     writer = csv.writer(c)
+        #     writer.writerows(self.imu_pos_list)
+
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/hpix_pos.csv', 'w', encoding='UTF8', newline='') as d:
+            writer = csv.writer(d)
+            writer.writerows(self.hpix_pos_list)
+
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/timestamp.csv', 'w', encoding='UTF8', newline='') as e:
+            writer = csv.writer(e)
+            writer.writerows(self.timestamp_list)    
+
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/estimate_pos.csv', 'w', encoding='UTF8', newline='') as f:
+            writer = csv.writer(f)
+            writer.writerows(self.estimate_pos_list)      
+
+        self.vel_dt_list.append([self.velocity,self.dt])
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/vel_dt.csv', 'w', encoding='UTF8', newline='') as h:
+            writer = csv.writer(h)
+            writer.writerows(self.vel_dt_list)     
+
+            
+    def getPath_client(self,vel,p_num,s_num):
+        """Client which request angle, dt and direction to forward the commands to driving server
+
+        Args:
+            vel (Type[Float64]): velocity to get the time needed to drive specific path
+            p_num (Type[Int64]): number of path of trajectory
+        """
+        rospy.wait_for_service('getPath_service')
+        
+        try:
+            getPath = rospy.ServiceProxy('getPath_service',Trajectory)
+            respond = getPath(p_num,s_num,vel)
+            
+            self.angle = respond.angle
+            if(self.angle > -0.00001 and self.angle < 0.00001): self.angle = 0.00001
+            if(self.angle > 179.99999 and self.angle < 180.00001): self.angle = 180.00001
+            self.direction = respond.direction
+            self.dt = respond.dt
+            self.seg_dt = respond.seg_dt
+            self.seg_angle = respond.seg_angle
+
+            if(self.angle == 999 and self.direction == 0):
+                rospy.signal_shutdown("Finished driving trajectory")
+                
+            #Setup next pathnumber for next cycle
+            self.path_number += 1
+        
+        except rospy.ServiceException as e:
+            print("Service call failed: %s" % e)
+
+    def getMeasurements_client(self):
+        """get synced measurements (position, variance) of sensors to proceed with update step of kalman filter
+        """
+        rospy.wait_for_service('getMeasurements_service')
+        
+        try:
+            getMeasurements = rospy.ServiceProxy('getMeasurements_service',Measurements)
+            self.Pos = getMeasurements()
+
+        except rospy.ServiceException as e:
+            print("Service call failed: %s" % e)
+
+    def filter(self):  
+        """Kalman Filter function with Prediction and Update Steps
+        """
+        
+        #Variable for execution time
+        start = rospy.Time.now().to_sec()
+
+        
+        
+        #*------Get_Path_Information------#
+        
+        #Receive necessary information (self.angle, self.dt, self.seg_dt, self.seg_angle)
+        self.getPath_client(self.velocity,self.path_number,self.seg_number)
+        
+        #Update time left to drive to next path position
+        if(self.not_init == False):  
+            self.end_diff_dt = rospy.Time.now().to_sec()
+            self.delay_dt = self.end_diff_dt - self.start_diff_dt
+            
+            # process noise 
+            self.eps_vel = (0.000637*self.velocity**2 + 0.2286*self.velocity - 0.2947)
+
+            self.eps_ext = (0.0000689*self.velocity**2 + 0.00567*self.velocity + 0.4114)
+
+            # process noise covariance matrix
+            self.Q = np.array([[(self.dt**2)*((self.eps_ext**2) + (self.eps_vel**2)),0],
+                               [0,(self.dt**2)*((self.eps_ext**2) + (self.eps_vel**2))]],
+                               dtype=float)
+    
+            
+
+           
+  
+        
+        
+        #*------Prediction------#
+        
+        #angle, vel x, vel y, dt, self.x, self.P, init state, init cov
+        if(self.show):
+            print("\n\n\n****************New Cycle*****************\n")
+            print("angle: ", self.angle)
+            print("dt: ", self.dt)
+            print("\nprior state: \n", self.x)
+            print("\nprior cov: \n", self.P)
+            print("\nvel_x: \n", self.u[0][0])
+            print("\nvel_y: \n", self.u[1][0])
+
+
+        # Predict State
+        x_p = self.A.dot(self.x) + self.dt*(self.B).dot(self.u) + self.w
+        # Predict Covariance 
+        P_p = self.A.dot(self.P).dot(np.transpose(self.A)) + self.Q
+
+        self.estimate_pos_list.append([round(x_p[0][0],4), round(x_p[1][0],4)])
+
+        #Predict State, predict covariance
+        if(self.show):
+            print("\npredicted state: \n", x_p)
+            print("\npredicted cov: \n", P_p)
+
+        #Do one measurement before starting to drive and save data to lists.
+        if(self.not_init):
+            
+            self.getMeasurements_client()
+            
+            self.starttime_sec = (rospy.Time(self.Pos.measure.header.stamp.secs, self.Pos.measure.header.stamp.nsecs)).to_sec()
+            self.timestamp_list.append([0.0])
+            self.kf_pos_list.append([0, 0, 0.0])
+            self.lid_pos_list.append([0, 0, 0.0])
+            self.vpix_pos_list.append([0, 0, 0.0])
+            self.hpix_pos_list.append([0, 0, 0.0])
+
+        
+        #*------Drive------#
+
+        #Start driving at first cycle without any more conditions
+        if(self.not_init):
+            
+            self.motion.dt = self.seg_dt
+            self.motion.direction = self.direction
+            self.motion.velocity = self.velocity
+            self.motion.move = 1
+
+            self.pub_.publish(self.motion)
+        
+        #If angles are quite similar (angles are integers), then command robot to drive another direction
+        elif(abs(self.angle - self.seg_angle) < 1.0):
+            
+            self.motion.dt = self.seg_dt
+            self.motion.direction = self.direction
+            self.motion.velocity = self.velocity
+            self.motion.move = 1
+
+            self.pub_.publish(self.motion)
+            print("\n\n******Change Direction to ", round(self.angle,2), " degrees*****\n\n")
+            self.seg_number += 1
+                
+        #Wait till robot reached position
+        rospy.sleep(self.dt-self.delay_dt)
+
+        #*-------Measure--------#
+        
+        #Receive measurements and save them in self.Pos
+        self.getMeasurements_client()               
+        self.start_diff_dt = rospy.Time.now().to_sec()
+        
+        #*--------Update--------#
+
+        # Compute Kalman Gain
+
+        #Observations
+        self.z = np.array([[self.Pos.measure.x_pos_lid],
+                          [self.Pos.measure.y_pos_lid],
+                          [self.Pos.measure.x_pos_vpix],
+                          [self.Pos.measure.y_pos_vpix],
+                          [self.Pos.measure.x_pos_hpix],
+                          [self.Pos.measure.y_pos_hpix]],
+                          dtype = float)
+        
+        #Sensor variances
+        var = np.array([[self.Pos.measure.x_var_lid],
+                       [self.Pos.measure.y_var_lid],
+                       [0.000002*self.driven_distance**2+0.000122*self.driven_distance+0.524632], #self.Pos.measure.x_var_vpix
+                       [0.000002*self.driven_distance**2+0.000122*self.driven_distance+0.524632],#self.Pos.measure.y_var_vpix
+                       [0.000002*self.driven_distance**2+0.000122*self.driven_distance+0.524632],#self.Pos.measure.x_var_hpix
+                       [0.000002*self.driven_distance**2+0.000122*self.driven_distance+0.524632]],#self.Pos.measure.y_var_hpix]
+                       dtype = float)
+        
+        self.R = np.identity(len(self.z)) * (var)   
+
+        #z (measurements), self.R (variances of sensor)
+        if(self.show):
+            print("\nz: \n", self.z)
+            print("R: \n", self.R)
+
+        S = self.H.dot(P_p).dot(self.HT) + self.R 
+
+        try:
+            K = P_p.dot(self.HT).dot(np.linalg.inv(S))
+        except:
+            rospy.signal_shutdown("Singular Matrix, Error due to pixart publisher")
+
+        
+        #Kalman Gain
+        if(self.show):
+            print("\nK: \n", K)
+        
+        residual = self.z - (self.H.dot(x_p))
+    
+        #Estimate state
+        self.x = x_p + K.dot(residual)
+        
+        # Estimate Covariance
+        self.P = P_p - K.dot(self.H).dot(P_p)
+
+        
+        #*--------Save_values--------#
+        
+        #Save Data to list
+        timestamp_sec = round(((rospy.Time(self.Pos.measure.header.stamp.secs, self.Pos.measure.header.stamp.nsecs)).to_sec()) - self.starttime_sec,4)
+        self.timestamp_list.append([timestamp_sec])
+        self.kf_pos_list.append([round(self.x[0][0],4), round(self.x[1][0],4)])
+        self.lid_pos_list.append([round(self.Pos.measure.x_pos_lid,4), round(self.Pos.measure.y_pos_lid,4)])
+        self.vpix_pos_list.append([round(self.Pos.measure.x_pos_vpix,4), round(self.Pos.measure.y_pos_vpix,4)])
+        self.hpix_pos_list.append([round(self.Pos.measure.x_pos_hpix,4), round(self.Pos.measure.y_pos_hpix,4)])
+        
+        
+        #self.imu_pos_list.append([round(self.Pos.measure.x_pos_imu,4), round(self.Pos.measure.y_pos_imu,4)])
+        
+        #*---------Output Screen------#
+        print("\nKF-Position: [" ,round(self.x[0][0],4), ", ", round(self.x[1][0],4),"]\n")
+
+        if(self.not_init):
+            self.not_init = False
+
+        #Execution time
+        end = rospy.Time.now().to_sec()
+        #print("Execution time: ", round(end-start,4)," sec\n")
+        print("Execution time: ", round((end-start)-(self.dt-self.delay_dt),4)," sec\n")
+
+
+
+if __name__ == "__main__":
+    rospy.init_node("kalman_filter", anonymous=True, disable_signals=True)
+    kf_obj = kalman_filter()
+    
+    #wait till all other nodes launched and Pixart Publisher restarted when errors occured, till starting using kalman filter
+    rospy.sleep(9.5) 
+    
+    while not rospy.is_shutdown():
+        kf_obj.filter()
+
diff --git a/sensorfusion/scripts/backup/KF_pos_vel_old.py b/sensorfusion/scripts/backup/KF_pos_vel_old.py
new file mode 100644
index 0000000000000000000000000000000000000000..32388584074fd1bfcaaa1698acb1093d9c9d4526
--- /dev/null
+++ b/sensorfusion/scripts/backup/KF_pos_vel_old.py
@@ -0,0 +1,384 @@
+#!/usr/bin/env python3
+from typing import Type
+import numpy as np
+from sensorfusion.msg import SyncPos, Position, Motion
+from sensorfusion.srv import Trajectory, Measurements
+import rospy
+import csv
+
+
+class kalman_filter(object):
+   
+    def __init__(self):        
+        
+        #*-------User-Parameters--------#
+
+        #Show matrices and states on console
+        self.show = False
+
+        #Robot velocity (from 0-100 mm/s)
+        self.velocity = 15.0
+
+        
+        #*-------Init_Node--------#
+        
+        #Before shuting down, perform function "save_data" which let the robot stop and saves Position data
+        rospy.on_shutdown(self.save_data)
+
+        #Publishes commands to Drive node
+        self.pub_ = rospy.Publisher("/drive", Motion, queue_size=3)
+
+        #Synced Position and variances of all sensors with timestamp
+        self.Pos = SyncPos()
+
+        #Motion message for giving command to drive to certain direction, velocity and time
+        self.motion = Motion()
+
+        #Bool variable for executing functions in first run in specific way
+        self.not_init = True
+
+        #Starttime which will be used for noting correct time in csv files after data acquisition
+        self.starttime_sec = 0.0
+
+        
+        #Save X-Y Position and Timestamp of Kalman Filter and sensors
+        self.kf_pos_list = []
+        
+        self.lid_pos_list = []
+        
+        self.vpix_pos_list = []
+
+        self.hpix_pos_list = []
+        
+        self.imu_pos_list = []
+
+        #More probable velocity than given value
+        self.estimated_velocity = self.velocity
+        
+        #Robot direction (forward = 90, right = 180, back = 270, left = 0)
+        self.direction = 0.0
+        
+        #Segment number 
+        # (example: (0,0)->(30,0) = 1.segment; (30,0)->(30,30) = 2.segment)
+        self.seg_number = 1
+
+        #Path number: Small trajectories of one segment 
+        # (example: in 1.segment (0,0)->(30,0) are (0,0)->(15,0) 1.path and (15,0)->(30,0) 2. path
+        #           in 2.segment (30,0)->(30,30) are (30,0)->(30,15) 3.path and (30,15)->(30,30) 4.path)
+        # Intervalls are defined in "trajectory" node
+        self.path_number = 1
+
+        #Angle at which robot is driving at current segment
+        self.seg_angle = 0.0
+
+        #Angle at which robot is driving at current path
+        self.angle = 90.0
+
+        #Time needed driving a segment with specific velocity
+        self.seg_dt = 0.0
+        
+        #Time needed driving a path with specific velocity
+        self.dt = 1.0
+
+        #Frequency of self.dt for letting the robot drive after prediction. 
+        # After reaching position, it should take measurements after the rate
+        self.drive_rate = rospy.Rate(1/self.dt)
+
+        
+        
+        #*-------Kalman Filter--------# n = 4, m = 2
+
+        #state variable (X-Position, Y-Position, X-Velocity, Y-Velocity)
+        self.x = np.zeros((4,1), dtype=float)
+        
+        #state covariance matrix
+        self.P = np.array([[10,0,0,0],[0,10,0,0],[0,0,3,0],[0,0,0,3]], dtype=float)
+                
+        #measurements
+        self.z = np.zeros((2,1), dtype=float)
+        
+        #process noise covariance matrix
+        self.Q = np.zeros((4,4), dtype=float)
+        self.Q[0][0] = 10
+        self.Q[1][1] = 10
+        
+        #measurement covariance matrices
+        self.R = np.zeros((2,2), dtype=float)
+
+        #state transition matrix
+        self.A = np.array([[1,0,self.dt,0],[0,1,0,self.dt],[0,0,1,0],[0,0,0,1]], dtype=float)
+        
+        #state to measurement matrix
+        self.H = np.array([[1,0,0,0],[0,1,0,0]], dtype=float)
+        self.HT = np.transpose(self.H)
+
+        #Kalman Gain
+        self.K = np.zeros((4,2), dtype=float)
+
+
+
+    def save_data(self):
+        """let the robot stop and save data to csv files. After that shutdown the node
+        """
+    
+        #Let the robot stop
+        self.motion.direction = 0
+        self.motion.velocity = 0
+        self.motion.dt = 0
+        self.motion.move = 0
+
+        self.pub_.publish(self.motion)
+
+        #Write down saved data to csv files
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/kf_pos.csv', 'w', encoding='UTF8', newline='') as a:
+            writer = csv.writer(a)
+            writer.writerows(self.kf_pos_list)
+        
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/lidar_pos.csv', 'w', encoding='UTF8', newline='') as b:
+            writer = csv.writer(b)
+            writer.writerows(self.lid_pos_list)
+            
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/vpix_pos.csv', 'w', encoding='UTF8', newline='') as c:
+            writer = csv.writer(c)
+            writer.writerows(self.vpix_pos_list)
+
+        # with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/imu_pos.csv', 'w', encoding='UTF8', newline='') as d:
+        #     writer = csv.writer(c)
+        #     writer.writerows(self.imu_pos_list)
+
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/hpix_pos.csv', 'w', encoding='UTF8', newline='') as d:
+            writer = csv.writer(d)
+            writer.writerows(self.hpix_pos_list)
+
+            
+    def getPath_client(self,vel,p_num,s_num):
+        """Client which request angle, dt and direction to forward the commands to driving server
+
+        Args:
+            vel (Type[Float64]): velocity to get the time needed to drive specific path
+            p_num (Type[Int64]): number of path of trajectory
+        """
+        rospy.wait_for_service('getPath_service')
+        
+        try:
+            getPath = rospy.ServiceProxy('getPath_service',Trajectory)
+            respond = getPath(p_num,s_num,vel)
+            
+            self.angle = respond.angle
+            if(self.angle > -0.00001 and self.angle < 0.00001): self.angle = 0.00001
+            if(self.angle > 179.99999 and self.angle < 180.00001): self.angle = 180.00001
+            self.direction = respond.direction
+            self.dt = respond.dt
+            self.seg_dt = respond.seg_dt
+            self.seg_angle = respond.seg_angle
+
+            if(self.angle == 999 and self.direction == 0):
+                rospy.signal_shutdown("Finished driving trajectory")
+        
+        except rospy.ServiceException as e:
+            print("Service call failed: %s" % e)
+
+    def getMeasurements_client(self):
+        """get synced measurements (position, variance) of sensors to proceed with update step of kalman filter
+        """
+        rospy.wait_for_service('getMeasurements_service')
+        
+        try:
+            getMeasurements = rospy.ServiceProxy('getMeasurements_service',Measurements)
+            self.Pos = getMeasurements()
+
+        except rospy.ServiceException as e:
+            print("Service call failed: %s" % e)
+
+    def filter(self):  
+        """Kalman Filter function with Prediction and Update Steps
+        """
+        
+        #Variable for execution time
+        start = rospy.Time.now().to_sec()
+
+        #*------Get_Path_Information------#
+        #Receive necessary information (self.angle, self.dt, self.seg_dt, self.seg_angle)
+        self.getPath_client(self.velocity,self.path_number,self.seg_number)
+        
+        #Setup next pathnumber for next cycle
+        self.path_number = self.path_number + 1
+        
+  
+        #*------Prediction------#
+        
+        #Setup State transition matrix
+        self.A[0][2] = self.dt
+        self.A[1][3] = self.dt
+        
+        #Set x and y velocity for predicting state
+        if(self.not_init == True):
+            self.x[2][0] = np.cos(self.angle*(np.pi/180)) * self.velocity
+            self.x[3][0] = np.sin(self.angle*(np.pi/180)) * self.velocity
+
+        #After KF cycle, estimate probable velocity and assign new x and y velocities
+        else:
+            self.estimated_velocity = (self.x[2][0]**2 + self.x[3][0]**2)**.5
+            self.x[2][0] = np.cos(self.angle*(np.pi/180)) * self.estimated_velocity
+            self.x[3][0] = np.sin(self.angle*(np.pi/180)) * self.estimated_velocity
+
+
+        #angle, vel x, vel y, dt, self.x, self.P, init state, init cov
+        if(self.show):
+            print("\n\n\n****************New Cycle*****************\n")
+            print("angle: ", self.angle)
+            print("vel_x: ", self.x[2][0])
+            print("vel_y: ", self.x[3][0])
+            print("dt: ", self.dt)
+            print("\ninit state: \n", self.x)
+            print("\ninit cov: \n", self.P)
+
+        # Predict State
+        x_p = self.A.dot(self.x)
+        # Predict Covariance 
+        P_p = self.A.dot(self.P).dot(np.transpose(self.A)) + self.Q
+
+        #Predict State, predict covariance
+        if(self.show):
+            print("\npredict state: \n", x_p)
+            print("\npredict cov: \n", P_p)
+
+        #Do one measurement before starting to drive and save data to lists.
+        if(self.not_init):
+            
+            self.getMeasurements_client()
+            
+            self.starttime_sec = (rospy.Time(self.Pos.measure.header.stamp.secs, self.Pos.measure.header.stamp.nsecs)).to_sec()
+            self.kf_pos_list.append([0, 0, 0.0])
+            self.lid_pos_list.append([0, 0, 0.0])
+            self.vpix_pos_list.append([0, 0, 0.0])
+            self.hpix_pos_list.append([0, 0, 0.0])
+
+        #*------Drive------#
+
+        #Start driving at first cycle without any more conditions
+        if(self.not_init):
+            
+            self.motion.dt = self.seg_dt
+            self.motion.direction = self.direction
+            self.motion.velocity = self.velocity
+            self.motion.move = 1
+
+            self.pub_.publish(self.motion)
+        
+        #If angles are quite similar (angles are integers), then command robot to drive another direction
+        elif(abs(self.angle - self.seg_angle) < 1.0):
+            
+            self.motion.dt = self.seg_dt
+            self.motion.direction = self.direction
+            self.motion.velocity = self.velocity
+            self.motion.move = 1
+
+            self.pub_.publish(self.motion)
+            print("\n\n******Change Direction to ", round(self.angle,2), " degrees*****\n\n")
+            self.seg_number += 1
+                
+        #Wait till robot reached position
+        #self.drive_rate.sleep()
+        rospy.sleep(self.dt)
+
+        #*-------Measure--------#
+        
+        #Receive measurements and save them in self.Pos
+        self.getMeasurements_client()               
+        
+        #*--------Update--------#
+
+        # Compute Kalman Gain
+        for i in range(3):
+
+            if(i == 0):
+                #Lidar measurement mean and covariance matrix
+                if(self.show): print("\n\n**************Lidar Sensor**************")
+                self.z = np.array([[self.Pos.measure.x_pos_lid],[self.Pos.measure.y_pos_lid]])
+                self.R = np.array([[self.Pos.measure.x_var_lid,0],[0,self.Pos.measure.y_var_lid]])
+            elif(i == 1):
+                # Pixart_V measurement mean and covariance matrix
+                if(self.show): print("\n\n**************Pixart Sensor Front**************")
+                self.z = np.array([[self.Pos.measure.x_pos_vpix],[self.Pos.measure.y_pos_vpix]])
+                self.R = np.array([[self.Pos.measure.x_var_vpix,0],[0,self.Pos.measure.y_var_vpix]]) 
+
+            elif(i == 2):
+                # Pixart_H measurement mean  covariance matrix
+                if(self.show): print("\n\n**************Pixart Sensor Back**************")
+                self.z = np.array([[self.Pos.measure.x_pos_hpix],[self.Pos.measure.y_pos_hpix]])
+                self.R = np.array([[self.Pos.measure.x_var_hpix,0],[0,self.Pos.measure.y_var_hpix]])     
+
+            #z (measurements), self.R (variances of sensor)
+            if(self.show):
+                print("\nz: \n", self.z)
+                print("R: \n", self.R)
+
+            S = self.H.dot(P_p).dot(self.HT) + self.R
+
+            try:
+                K = P_p.dot(self.HT).dot(np.linalg.inv(S))
+            except:
+                rospy.signal_shutdown("Singular Matrix, Error due to pixart publisher")
+
+            
+            #Kalman Gain
+            if(self.show):
+                print("\nK: \n", K)
+            
+            residual = self.z - (self.H.dot(x_p))
+        
+            #Estimate state
+            x_p = x_p + K.dot(residual)
+            
+            # Estimate Covariance
+            P_p = P_p - K.dot(self.H).dot(P_p)
+
+            #Updated state and covariance
+            if(self.show):
+                print("\nnew update state: \n", x_p)
+                print("\nnew update cov: \n", P_p)
+
+        #New state, new cov
+        if(self.show):
+            print("\nnew state: \n", x_p)
+            print("\nnew cov: \n\n",P_p)
+
+        #Save after update the state and covariance and work in next cycle with these values
+        self.x = x_p
+        self.P = P_p
+
+        
+        #*--------Save_values--------#
+        
+        #Save Data to list
+        timestamp_sec = round(((rospy.Time(self.Pos.measure.header.stamp.secs, self.Pos.measure.header.stamp.nsecs)).to_sec()) - self.starttime_sec,6)
+        self.kf_pos_list.append([round(self.x[0][0],4), round(self.x[1][0],4), timestamp_sec])
+        self.lid_pos_list.append([round(self.Pos.measure.x_pos_lid,4), round(self.Pos.measure.y_pos_lid,4), timestamp_sec])
+        self.vpix_pos_list.append([round(self.Pos.measure.x_pos_vpix,4), round(self.Pos.measure.y_pos_vpix,4), timestamp_sec])
+        self.hpix_pos_list.append([round(self.Pos.measure.x_pos_hpix,4), round(self.Pos.measure.y_pos_hpix,4), timestamp_sec])
+        #self.imu_pos_list.append([round(self.Pos.measure.x_pos_imu,4), round(self.Pos.measure.y_pos_imu,4), timestamp_sec])
+        
+        #*---------Output Screen------#
+        print("\nKF-Position: [" ,round(self.x[0][0],4), ", ", round(self.x[1][0],4),"]\n")
+
+        if(self.not_init):
+            self.not_init = False
+
+        #Execution time
+        end = rospy.Time.now().to_sec()
+        print("Execution time: ", round(end-start,4)," sec\n")
+        
+
+
+
+if __name__ == "__main__":
+    rospy.init_node("kalman_filter", anonymous=True, disable_signals=True)
+    kf_obj = kalman_filter()
+    
+    #wait till all other nodes launched and Pixart Publisher restarted when errors occured, till starting using kalman filter
+    rospy.sleep(9.5) 
+    
+    while not rospy.is_shutdown():
+        kf_obj.filter()
+
diff --git a/sensorfusion/scripts/backup/pixart_H_pos_old.py b/sensorfusion/scripts/backup/pixart_H_pos_old.py
new file mode 100644
index 0000000000000000000000000000000000000000..b54fb5ea90972afae6cf35677e01d1a800f363a4
--- /dev/null
+++ b/sensorfusion/scripts/backup/pixart_H_pos_old.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python3
+from typing import Type
+from std_msgs.msg import Float64MultiArray
+import rospy
+from sensorfusion.msg import Position
+import numpy as np
+
+
+def callback(values: Type[Float64MultiArray]) ->None:
+    """Read sensordata from topic PAA5102_H_values and publish position values, variances and timestamp to pixart_pos_cov
+
+    Args:
+        values (Type[Float64MultiArray]): Message from topic (PAA5102_H_values)
+    """
+    msg.x = np.round(values.data[0] / 11.803, 4) #Umrechnungsfaktor X-Position
+    msg.y = np.round(values.data[1] / -11.966, 4) #Umrechnungsfaktor Y-Position
+    msg.x_var = 6.25 #TODO estimate variance
+    msg.y_var = 6.25 
+    msg.header.stamp = rospy.Time.now()
+    msg.header.frame_id = "/pixart_H_frame";       
+    pub_.publish(msg)
+    
+
+if __name__ == "__main__":
+    rospy.init_node("pixart_H_pos", anonymous=True)
+    rospy.Subscriber("PAA5102_H_values", Float64MultiArray, callback)
+    pub_ = rospy.Publisher("/pixart_H_pos_cov", Position, queue_size=1)
+    msg = Position()
+
+    rospy.spin()
+
diff --git a/sensorfusion/scripts/backup/pixart_V_pos_old.py b/sensorfusion/scripts/backup/pixart_V_pos_old.py
new file mode 100644
index 0000000000000000000000000000000000000000..d3c4eb6bf413e82d4ec8957f9aa6cf30f0381902
--- /dev/null
+++ b/sensorfusion/scripts/backup/pixart_V_pos_old.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+from typing import Type
+from std_msgs.msg import Float64MultiArray
+import rospy
+from sensorfusion.msg import Position
+import numpy as np
+
+
+def callback(values: Type[Float64MultiArray]) ->None:
+    """Read sensordata from topic PAA5102_V_values and publish position values, variances and timestamp to pixart_pos_cov
+
+    Args:
+        values (Type[Float64MultiArray]): Message from topic (PAA5102_V_values)
+    """
+    msg.x = np.round(values.data[0] / -11.803, 3) #Umrechnungsfaktor X-Position
+    msg.y = np.round(values.data[1] / 11.966, 3) #Umrechnungsfaktor Y-Position
+    msg.x_var = 6.25 #TODO estimate variance
+    msg.y_var = 6.25 
+    msg.header.stamp = rospy.Time.now()
+    msg.header.frame_id = "/pixart_V_frame";       
+    pub_.publish(msg)
+    
+
+if __name__ == "__main__":
+    rospy.init_node("pixart_V_pos", anonymous=True)
+    rospy.Subscriber("PAA5102_V_values", Float64MultiArray, callback)
+    pub_ = rospy.Publisher("/pixart_V_pos_cov", Position, queue_size=1)
+    msg = Position()
+    rospy.spin()
\ No newline at end of file
diff --git a/sensorfusion/scripts/backup/qualisys_pos_old.py b/sensorfusion/scripts/backup/qualisys_pos_old.py
new file mode 100644
index 0000000000000000000000000000000000000000..6db44c5536115d57c336d44cd2ecb84e0a815d40
--- /dev/null
+++ b/sensorfusion/scripts/backup/qualisys_pos_old.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python3
+from typing import Type
+from std_msgs.msg import Float64MultiArray
+import rospy
+from sensorfusion.msg import Position
+import numpy as np
+
+
+def callback(values: Type[Float64MultiArray]) ->None:
+    """Read sensordata from topic qualisys_pos and publish position values, variances and timestamp to qualisys_pos_cov
+
+    Args:
+        values (Type[Float64MultiArray]): Message from topic (qualisys_pos)
+    """
+    pub_ = rospy.Publisher("/qualisys_pos_cov", Position, queue_size=1)
+    
+    msg = Position()
+    msg.x = np.round(values.data[0],3)
+    msg.y = np.round(values.data[1],3) 
+    msg.x_var = values.data[2]**2 
+    msg.y_var = values.data[2]**2
+    msg.header.stamp = rospy.Time.now()
+    msg.header.frame_id = "/qualisys_frame";       
+    pub_.publish(msg)
+
+def listener() -> None:
+    """Initialize node and subscribe to topic 'qualisys_pos'"""
+    rospy.init_node("qualisys_pos_node", anonymous=True)
+    rospy.Subscriber("qualisys_pub", Float64MultiArray, callback)
+    rospy.spin()
+
+if __name__ == "__main__":
+    listener()
diff --git a/sensorfusion/scripts/drive.py b/sensorfusion/scripts/drive.py
new file mode 100644
index 0000000000000000000000000000000000000000..50f2209c789af57012746e2d1f378e051a1ce41a
--- /dev/null
+++ b/sensorfusion/scripts/drive.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python3
+from typing import Type
+import rospy
+import serial
+from sensorfusion.msg import Motion
+
+
+#Open Serial to create connection to motor drivers
+ser = serial.Serial(
+		'/dev/ttyS0', 
+		baudrate = 115200,
+		parity = serial.PARITY_NONE,
+		stopbits=serial.STOPBITS_ONE,
+		bytesize =serial.EIGHTBITS,
+		timeout = 1
+		)
+
+ser.close()
+ser.open()
+
+#Let the robot stop, to be sure it isn't driving after last run
+ser.write(bytes(f'<{0},{0},{0},{0}>\n', 'utf-8'))
+
+
+class drive(object):
+
+    def __init__(self):        
+
+        #Subscribe to topic /drive and listen to published messages of kalman filter node
+        self.sub_ = rospy.Subscriber("/drive", Motion, self.drive)
+
+        
+    def drive(self,values):
+        """callback after receiving message of topic /drive. Drive with commands given by kalman filter
+
+        Args:
+            values (direction, velocity, move and dt)
+        """
+        #Let the robot drive with submitted commands of kalman filter node
+        #(robot_direction(0-359), rotation(??), robot_velocity (0-100), start=1||stop=0)
+
+        control_input = bytes(f'<{round(values.direction,2)},{0},{values.velocity},{values.move}>\n', 'utf-8')
+        print(control_input)
+        ser.write(control_input)
+        
+        #Drive dt amount of time
+        rospy.sleep(values.dt)
+
+        #Stop the robot after it has driven dt time
+        control_input = bytes(f'<{0},{0},{0},{0}>\n', 'utf-8')
+        ser.write(control_input)
+        
+
+if __name__=="__main__":
+    rospy.init_node("drive_node")
+    drive_obj = drive()
+    rospy.spin()
diff --git a/sensorfusion/scripts/eval/KF_diagramme.py b/sensorfusion/scripts/eval/KF_diagramme.py
new file mode 100644
index 0000000000000000000000000000000000000000..07fdaa78ebcb47e962feedec1481425f7d9b7110
--- /dev/null
+++ b/sensorfusion/scripts/eval/KF_diagramme.py
@@ -0,0 +1,243 @@
+import matplotlib.pyplot as plt
+import pandas as pd
+import numpy as np
+import uuid
+import csv
+import statistics
+
+plot_on_windows = True
+leica_frequency = 250
+
+
+def rmse(predictions, targets):
+    #return np.sqrt(((predictions - targets) ** 2).mean())
+    return np.sqrt(np.mean(((predictions - targets) ** 2)))
+
+def get_leica_data(leica,freq):
+
+
+      #Get Position data 
+    # original: X,[X-Position],Y,[Y-Position],Z,[Z-Position],Time
+    # converted: [X_position][Y_Position][Z_Position]
+    leica_raw_data = []
+    
+    for i in range(len(leica)):
+        leica_raw_data.append([float(leica[1][i]),float(leica[3][i]),float(leica[7][i])])
+
+    #Check how many samples of Lasertrackers are done in the range of the sample time of Kalman filter
+    #Save the result in compare_dt
+    len_raw = len(leica_raw_data)
+    compare_dt = []
+
+    for i in range(len_raw):
+        if((leica_raw_data[i][2]) - (leica_raw_data[0][2]) < vel_dt[1][0]):
+            continue
+        else:
+            compare_dt.append(i)
+            break
+
+
+    #Necessary to determine at which time the robot starts to move. Prior measurements of lasertracker are not relevant.
+    #Therefore we compare the measurements with the index difference of compare_dt and state that a change of 0.05mm in x or y direction 
+    #can be regarded as movement of the robot and relevant measurements of lasertracker
+    start_index = []
+
+    for j in range(compare_dt[0],len_raw):
+        if((leica_raw_data[j][0] - leica_raw_data[j-compare_dt[0]][0] >= 0.05) or (leica_raw_data[j][1] - leica_raw_data[j-compare_dt[0]][1] >= 0.05)):
+            start_index.append(j-3)
+            break
+        else: 
+            continue
+
+    #Adjust coordinate system to robots by using first measurement and subtract it from the next ones
+    leica_data = []
+
+    for i in range(start_index[0],len(leica)):
+        leica_data.append([((leica_raw_data[i][0])-(leica_raw_data[start_index[0]][0])),((leica_raw_data[i][1])-(leica_raw_data[start_index[0]][1])),((leica_raw_data[i][2])-(leica_raw_data[start_index[0]][2]))])
+
+    duration = []
+    duration.append(time[0][len(time)-1])
+
+    leica_data_temp = []
+    for i in range(len(leica_data)):
+        if(leica_data[i][2] <= duration[0]):
+            leica_data_temp.append([leica_data[i][0],leica_data[i][1],leica_data[i][2]])
+
+    leica_data = leica_data_temp
+
+    # #Filter lasertracker measurements by using only data which correspond to KF timestamps 
+    # useful_leica_data = []
+
+    # time_len = len(time)
+
+    # for i in range(time_len):
+    #     for j in range(len(leica_data)):
+    #         if(abs(leica_data[j][2] - time[0][i]) <= 1/leica_frequency):
+    #             useful_leica_data.append([leica_data[j][0],leica_data[j][1]])
+    #             break
+
+    #Filter lasertracker measurements by using only data which correspond to KF timestamps 
+
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(leica_data)
+    
+    leica_temp = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)        
+
+
+
+    distances = []
+    useful_leica_data = []
+    min_dist_index = []
+    amount_pos = len(kf)
+
+    for i in range(amount_pos):
+        for j in range(len(leica_data)):
+            distances.append([((leica_temp[0][j] - kf[0][i])**2 + (leica_temp[1][j] - kf[1][i])**2)**(1/2),j])
+        min_dist_index.append(min(distances))
+        distances = []
+
+
+    for i in range(len(min_dist_index)):
+        useful_leica_data.append(leica_data[min_dist_index[i][1]])
+        
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(useful_leica_data)
+    
+    leica = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)  
+                
+    return leica
+
+#Read out data from CSV files
+if(plot_on_windows):
+    
+    traj = pd.read_csv('C:\\Users\\user\\Desktop\\csv_path\\path.csv', header=None)
+    kf = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\kf_pos.csv', header=None)
+    hpix = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\hpix_pos.csv', header=None)
+    vpix = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\vpix_pos.csv', header=None)
+    lid = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\lidar_pos.csv', header=None)
+    qual = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\qual_pos.csv', header=None)
+    time = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\timestamp.csv', header=None)
+    est = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\estimate_pos.csv', header=None)
+    vel_dt = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\vel_dt.csv', header=None)
+    #leica = pd.read_csv('D:\\Messungen\\UDPMonitorLog.csv',header=None)  
+    leica = pd.read_csv('C:\\Users\\user\\Desktop\\UDPMonitorLog.csv',header=None)  
+    
+else:
+    traj = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_path/path.csv', header=None)
+    kf = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/kf_pos.csv', header=None)
+    hpix = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/hpix_pos.csv', header=None)
+    vpix = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/vpix_pos.csv', header=None)
+    lid = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/lidar_pos.csv', header=None)
+    qual = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/qual_pos.csv', header=None)
+    time = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/timestamp.csv', header=None)
+    est = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/estimate_pos.csv', header=None)
+    vel_dt = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/vel_dt.csv', header=None)
+
+leica_pos = get_leica_data(leica,leica_frequency)
+
+print(leica_pos[0])
+
+    
+plt.rc('axes', axisbelow=True)
+    
+#Position Data On Each Measurement Update
+plot1 = plt.figure(1)
+plt.suptitle('Positionen Prüfbahn\n', fontweight="bold")
+plt.title('Geschwindigkeit: %i' %int(vel_dt[0][0]) + ' mm/s')
+plt.xlabel('X-Position [$mm$]')
+plt.ylabel('Y-Position [$mm$]')
+plt.grid(True)
+
+#ax = plt.gca()
+#ax.set_aspect('equal', adjustable='box')
+
+plt.plot(traj[0],traj[1],'k--',label='Ideale Trajektorie')
+plt.scatter(vpix[0],vpix[1],label='Pixart_front')
+plt.scatter(hpix[0],hpix[1],label='Pixart_back')
+plt.scatter(lid[0],lid[1],label='LiDAR')
+#plt.scatter(qual[0],qual[1],label='Qualisys',color='magenta')
+plt.scatter(est[0],est[1],label='Schätzung')
+plt.plot(kf[0],kf[1],color='red',label='Kalman Filter',ls='--')
+plt.scatter(kf[0],kf[1],color='red')
+plt.scatter(leica_pos[0],leica_pos[1],marker='X',color='blue')
+plt.plot(leica_pos[0],leica_pos[1],label='Leica', ls='--',color='blue')
+
+plt.legend(fontsize=8)
+
+plt.savefig(r"C:/Users/user/Desktop/Results/" + str(uuid.uuid4()) + r".png")
+
+
+#X-Position Errors On Each Measurement Update
+kf_x_diff = np.subtract(kf[0],leica_pos[0])
+vpix_x_diff = np.subtract(vpix[0],leica_pos[0])
+hpix_x_diff = np.subtract(hpix[0],leica_pos[0])
+lid_x_diff = np.subtract(lid[0],leica_pos[0])
+#qual_x_diff = np.subtract(qual[0],leica_pos[0])
+est_x_diff = np.subtract(est[0],leica_pos[0])
+
+# print(len(kf))
+# print(len(leica_pos))
+
+plot2 = plt.figure(2)
+plt.suptitle('X-Position Abweichung\n', fontweight="bold")
+plt.title('RMSE: %f' %rmse(kf[0],leica_pos[0]) + ' mm')
+
+plt.xlabel('Zeit [$s$]')
+plt.ylabel('X-Abweichung [$mm$]')
+plt.grid(True)
+
+plt.plot(time,np.zeros(len(time)),'--',label='Leica Trajektorie',color='blue')
+plt.scatter(time,vpix_x_diff,label='Pixart_front')
+plt.scatter(time,hpix_x_diff,label='Pixart_back')
+plt.scatter(time,lid_x_diff,label='LiDAR')
+plt.scatter(time,est_x_diff,label='Schätzung')
+
+#plt.scatter(time,qual_x_diff,label='Qualisys',color='magenta')
+plt.plot(time,kf_x_diff,color='red',label='Kalman Filter',ls='--')
+plt.scatter(time,kf_x_diff,color='red')
+
+plt.legend(fontsize=10)
+
+plt.savefig(r"C:/Users/user/Desktop/Results/" + str(uuid.uuid4()) + r".png")
+
+
+
+
+#Y-Position Errors On Each Measurement Update
+kf_y_diff = np.subtract(kf[1],leica_pos[1])
+vpix_y_diff = np.subtract(vpix[1],leica_pos[1])
+hpix_y_diff = np.subtract(hpix[1],leica_pos[1])
+lid_y_diff = np.subtract(lid[1],leica_pos[1])
+#qual_y_diff = np.subtract(qual[1],leica_pos[1])
+est_y_diff = np.subtract(est[1],leica_pos[1])
+
+
+plot3 = plt.figure(3)
+#plt.title('Y-Position Abweichung\n\nRMSE: %f\n' %rmse(kf[1],leica_pos[1]), fontweight="bold")
+
+plt.suptitle('Y-Position Abweichung\n', fontweight="bold")
+plt.title('RMSE: %f' %rmse(kf[1],leica_pos[1]) + ' mm')
+
+plt.xlabel('Zeit [$s$]')
+plt.ylabel('Y-Abweichung [$mm$]')
+plt.grid(True)
+
+plt.plot(time,np.zeros(len(time)),'--',label='Leica Trajektorie',color='blue')
+plt.scatter(time,vpix_y_diff,label='Pixart_front')
+plt.scatter(time,hpix_y_diff,label='Pixart_back')
+plt.scatter(time,lid_y_diff,label='LiDAR')
+plt.scatter(time,est_y_diff,label='Schätzung')
+#plt.scatter(time,qual_y_diff,label='Qualisys',color='magenta')
+plt.plot(time,kf_y_diff,color='red',label='Kalman Filter',ls='--')
+plt.scatter(time,kf_y_diff,color='red')
+
+plt.legend(fontsize=10)
+
+plt.savefig(r"C:/Users/user/Desktop/Results/" + str(uuid.uuid4()) + r".png")
+
+
+plt.show()
\ No newline at end of file
diff --git a/sensorfusion/scripts/eval/KF_v10.py b/sensorfusion/scripts/eval/KF_v10.py
new file mode 100644
index 0000000000000000000000000000000000000000..82af79cd6732a4e6e6d8ef25255eb61ced9261a5
--- /dev/null
+++ b/sensorfusion/scripts/eval/KF_v10.py
@@ -0,0 +1,260 @@
+import matplotlib.pyplot as plt
+import pandas as pd
+import numpy as np
+import uuid
+import csv
+
+plot_on_windows = True
+leica_frequency = 210
+
+vel_dt = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\1\\csv_pos\\vel_dt.csv', header=None)
+
+#Read out data from CSV files
+kf_1 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\1\\csv_pos\\kf_pos.csv', header=None)
+leica_1 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\1\\UDPMonitorLog.csv',header=None)
+time_1 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\1\\csv_pos\\timestamp.csv', header=None)  
+
+kf_2 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\2\\csv_pos\\kf_pos.csv', header=None)
+leica_2 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\2\\UDPMonitorLog.csv',header=None)
+time_2 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\2\\csv_pos\\timestamp.csv', header=None)   
+
+kf_3 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\3\\csv_pos\\kf_pos.csv', header=None)
+leica_3 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\3\\UDPMonitorLog.csv',header=None) 
+time_3 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\3\\csv_pos\\timestamp.csv', header=None)  
+
+kf_4 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\4\\csv_pos\\kf_pos.csv', header=None)
+leica_4 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\4\\UDPMonitorLog.csv',header=None) 
+time_4 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\4\\csv_pos\\timestamp.csv', header=None)  
+
+kf_5 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\5\\csv_pos\\kf_pos.csv', header=None)
+leica_5 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\5\\UDPMonitorLog.csv',header=None)  
+time_5 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v10\\5\\csv_pos\\timestamp.csv', header=None) 
+
+
+
+def rmse(predictions, targets):
+    return np.sqrt(np.mean(((predictions - targets) ** 2)))
+
+def get_leica_data(leica,time,kf):
+
+
+    #Get Position data 
+    # original: X,[X-Position],Y,[Y-Position],Z,[Z-Position],Time
+    # converted: [X_position][Y_Position][Z_Position]
+    leica_raw_data = []
+    
+    for i in range(len(leica)):
+        leica_raw_data.append([float(leica[1][i]),float(leica[3][i]),float(leica[7][i])])
+
+    #Check how many samples of Lasertrackers are done in the range of the sample time of Kalman filter
+    #Save the result in compare_dt
+    len_raw = len(leica_raw_data)
+    compare_dt = []
+
+    for i in range(len_raw):
+        if((leica_raw_data[i][2]) - (leica_raw_data[0][2]) < vel_dt[1][0]):
+            continue
+        else:
+            compare_dt.append(i)
+            break
+
+
+    #Necessary to determine at which time the robot starts to move. Prior measurements of lasertracker are not relevant.
+    #Therefore we compare the measurements with the index difference of compare_dt and state that a change of 0.05mm in x or y direction 
+    #can be regarded as movement of the robot and relevant measurements of lasertracker
+    start_index = []
+
+    for j in range(compare_dt[0],len_raw):
+        if((leica_raw_data[j][0] - leica_raw_data[j-compare_dt[0]][0] >= 0.05) or (leica_raw_data[j][1] - leica_raw_data[j-compare_dt[0]][1] >= 0.05)):
+            start_index.append(j-3)
+            break
+        else: 
+            continue
+
+    #Adjust coordinate system to robots by using first measurement and subtract it from the next ones
+    leica_data = []
+
+    for i in range(start_index[0],len(leica)):
+        leica_data.append([((leica_raw_data[i][0])-(leica_raw_data[start_index[0]][0])),((leica_raw_data[i][1])-(leica_raw_data[start_index[0]][1])),((leica_raw_data[i][2])-(leica_raw_data[start_index[0]][2]))])
+
+    duration = []
+    duration.append(time[0][len(time)-1])
+
+    leica_data_temp = []
+    for i in range(len(leica_data)):
+        if(leica_data[i][2] <= duration[0]):
+            leica_data_temp.append([leica_data[i][0],leica_data[i][1],leica_data[i][2]])
+
+    leica_data = leica_data_temp
+
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(leica_data)
+    
+    leica_temp = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)        
+
+
+
+    distances = []
+    useful_leica_data = []
+    min_dist_index = []
+    amount_pos = len(kf)
+
+    for i in range(amount_pos):
+        for j in range(len(leica_data)):
+            distances.append([((leica_temp[0][j] - kf[0][i])**2 + (leica_temp[1][j] - kf[1][i])**2)**(1/2),j])
+        min_dist_index.append(min(distances))
+        distances = []
+
+
+    for i in range(len(min_dist_index)):
+        useful_leica_data.append(leica_data[min_dist_index[i][1]])
+        
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(useful_leica_data)
+    
+    leica = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)  
+                
+    return leica
+
+
+leica_pos_1 = get_leica_data(leica_1,time_1,kf_1)
+kf_x_1 = []
+kf_y_1 = []
+leica_x_1 = []
+leica_y_1 = []
+
+leica_pos_2 = get_leica_data(leica_2,time_2,kf_2)
+kf_x_2 = []
+kf_y_2 = []
+leica_x_2 = []
+leica_y_2 = []
+
+leica_pos_3 = get_leica_data(leica_3,time_3,kf_3)
+kf_x_3 = []
+kf_y_3 = []
+leica_x_3 = []
+leica_y_3 = []
+
+leica_pos_4 = get_leica_data(leica_4,time_4,kf_4)
+kf_x_4 = []
+kf_y_4 = []
+leica_x_4 = []
+leica_y_4 = []
+
+
+leica_pos_5 = get_leica_data(leica_5,time_5,kf_5)
+kf_x_5 = []
+kf_y_5 = []
+leica_x_5 = []
+leica_y_5 = []
+
+
+
+for i in range(len(kf_1)):
+    kf_x_1.append(kf_1[0][i])
+    kf_y_1.append(kf_1[1][i])
+    kf_x_2.append(kf_2[0][i])
+    kf_y_2.append(kf_2[1][i])
+    kf_x_3.append(kf_3[0][i])
+    kf_y_3.append(kf_3[1][i])
+    kf_x_4.append(kf_4[0][i])
+    kf_y_4.append(kf_4[1][i])
+    kf_x_5.append(kf_5[0][i])
+    kf_y_5.append(kf_5[1][i])
+    
+    leica_x_1.append(leica_pos_1[0][i])
+    leica_y_1.append(leica_pos_1[1][i])
+    leica_x_2.append(leica_pos_2[0][i])
+    leica_y_2.append(leica_pos_2[1][i])
+    leica_x_3.append(leica_pos_3[0][i])
+    leica_y_3.append(leica_pos_3[1][i])
+    leica_x_4.append(leica_pos_4[0][i])
+    leica_y_4.append(leica_pos_4[1][i])
+    leica_x_5.append(leica_pos_5[0][i])
+    leica_y_5.append(leica_pos_5[1][i])
+    
+    
+kf_x = np.transpose([kf_x_1,kf_x_2,kf_x_3,kf_x_4,kf_x_5])
+leica_x = np.transpose([leica_x_1,leica_x_2,leica_x_3,leica_x_4,leica_x_5])
+kf_y = np.transpose([kf_y_1,kf_y_2,kf_y_3,kf_y_4,kf_y_5])
+leica_y = np.transpose([leica_y_1,leica_y_2,leica_y_3,leica_y_4,leica_y_5])
+
+rmse_x_1 = []
+rmse_y_1 = []
+rmse_x_2 = []
+rmse_y_2 = []
+rmse_x_3 = []
+rmse_y_3 = []
+rmse_x_4 = []
+rmse_y_4 = []
+rmse_x_5 = []
+rmse_y_5 = []
+ 
+
+rmse_x_1 = rmse((kf_1[0]),(leica_pos_1[0]))
+rmse_y_1 = rmse((kf_1[1]),(leica_pos_1[1]))
+rmse_x_2 = rmse((kf_2[0]),(leica_pos_2[0]))
+rmse_y_2 = rmse((kf_2[1]),(leica_pos_2[1]))
+rmse_x_3 = rmse((kf_3[0]),(leica_pos_3[0]))
+rmse_y_3 = rmse((kf_3[1]),(leica_pos_3[1]))
+rmse_x_4 = rmse((kf_4[0]),(leica_pos_4[0]))
+rmse_y_4 = rmse((kf_4[1]),(leica_pos_4[1]))
+rmse_x_5 = rmse((kf_5[0]),(leica_pos_5[0]))
+rmse_y_5 = rmse((kf_5[1]),(leica_pos_5[1]))
+
+rmse_x = [rmse_x_1,rmse_x_2,rmse_x_3,rmse_x_4,rmse_x_5]
+rmse_y = [rmse_y_1,rmse_y_2,rmse_y_3,rmse_y_4,rmse_y_5]
+
+
+print(rmse_x,rmse_y)
+
+
+kf_mean_x = []
+leica_mean_x = []
+kf_mean_y = []
+leica_mean_y = []
+
+
+for i in range(len(kf_x)):
+    kf_mean_x.append(round(np.mean(kf_x[i]),4))
+    leica_mean_x.append(round(np.mean(leica_x[i]),4))
+    kf_mean_y.append(round(np.mean(kf_y[i]),4))
+    leica_mean_y.append(round(np.mean(leica_y[i]),4))
+
+#Bahn-Genauigkeit
+Bahn_Genauigkeit = []
+
+for i in range(len(kf_mean_x)):
+    Bahn_Genauigkeit.append((((kf_mean_x[i] - leica_mean_x[i])**2) + ((kf_mean_y[i] - leica_mean_y[i])**2))**(1/2))
+
+print(np.mean(Bahn_Genauigkeit))   
+AT_bahn = np.max(Bahn_Genauigkeit)
+
+Bahn_Genauigkeit = list(filter(lambda x : x != 0.0, Bahn_Genauigkeit))
+print(np.min(Bahn_Genauigkeit))
+print(np.mean(Bahn_Genauigkeit))   
+AT_bahn = np.max(Bahn_Genauigkeit)
+print(AT_bahn)
+
+
+plt.rc('axes', axisbelow=True)
+
+X = ['1','2','3','4', '5']
+X_axis = np.arange(len(X))
+
+plot1 = plt.figure(1)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis - 0.2, rmse_x, 0.3, label = 'X',color='steelblue')
+plt.bar(X_axis + 0.2, rmse_y, 0.3, label = 'Y',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Zyklus")
+plt.ylabel("RMSE [$mm$]")
+plt.title("RMSE Achteck - v10",fontweight="bold")
+plt.legend()
+
+plt.show()
diff --git a/sensorfusion/scripts/eval/KF_v100.py b/sensorfusion/scripts/eval/KF_v100.py
new file mode 100644
index 0000000000000000000000000000000000000000..24d779cb77ba699d0397effd023bd20a226f74fc
--- /dev/null
+++ b/sensorfusion/scripts/eval/KF_v100.py
@@ -0,0 +1,259 @@
+import matplotlib.pyplot as plt
+import pandas as pd
+import numpy as np
+import uuid
+import csv
+
+plot_on_windows = True
+leica_frequency = 250
+
+vel_dt = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\1\\csv_pos\\vel_dt.csv', header=None)
+
+#Read out data from CSV files
+kf_1 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\1\\csv_pos\\kf_pos.csv', header=None)
+leica_1 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\1\\UDPMonitorLog.csv',header=None)
+time_1 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\1\\csv_pos\\timestamp.csv', header=None)  
+
+kf_2 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\2\\csv_pos\\kf_pos.csv', header=None)
+leica_2 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\2\\UDPMonitorLog.csv',header=None)
+time_2 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\2\\csv_pos\\timestamp.csv', header=None)   
+
+kf_3 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\3\\csv_pos\\kf_pos.csv', header=None)
+leica_3 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\3\\UDPMonitorLog.csv',header=None) 
+time_3 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\3\\csv_pos\\timestamp.csv', header=None)  
+
+kf_4 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\4\\csv_pos\\kf_pos.csv', header=None)
+leica_4 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\4\\UDPMonitorLog.csv',header=None) 
+time_4 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\4\\csv_pos\\timestamp.csv', header=None)  
+
+kf_5 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\5\\csv_pos\\kf_pos.csv', header=None)
+leica_5 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\5\\UDPMonitorLog.csv',header=None)  
+time_5 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\5\\csv_pos\\timestamp.csv', header=None) 
+
+
+
+def rmse(predictions, targets):
+    return np.sqrt(np.mean(((predictions - targets) ** 2)))
+
+def get_leica_data(leica,time,kf):
+
+
+    #Get Position data 
+    # original: X,[X-Position],Y,[Y-Position],Z,[Z-Position],Time
+    # converted: [X_position][Y_Position][Z_Position]
+    leica_raw_data = []
+    
+    for i in range(len(leica)):
+        leica_raw_data.append([float(leica[1][i]),float(leica[3][i]),float(leica[7][i])])
+
+    #Check how many samples of Lasertrackers are done in the range of the sample time of Kalman filter
+    #Save the result in compare_dt
+    len_raw = len(leica_raw_data)
+    compare_dt = []
+
+    for i in range(len_raw):
+        if((leica_raw_data[i][2]) - (leica_raw_data[0][2]) < vel_dt[1][0]):
+            continue
+        else:
+            compare_dt.append(i)
+            break
+
+
+    #Necessary to determine at which time the robot starts to move. Prior measurements of lasertracker are not relevant.
+    #Therefore we compare the measurements with the index difference of compare_dt and state that a change of 0.05mm in x or y direction 
+    #can be regarded as movement of the robot and relevant measurements of lasertracker
+    start_index = []
+
+    for j in range(compare_dt[0],len_raw):
+        if((leica_raw_data[j][0] - leica_raw_data[j-compare_dt[0]][0] >= 0.05) or (leica_raw_data[j][1] - leica_raw_data[j-compare_dt[0]][1] >= 0.05)):
+            start_index.append(j-3)
+            break
+        else: 
+            continue
+
+    #Adjust coordinate system to robots by using first measurement and subtract it from the next ones
+    leica_data = []
+
+    for i in range(start_index[0],len(leica)):
+        leica_data.append([((leica_raw_data[i][0])-(leica_raw_data[start_index[0]][0])),((leica_raw_data[i][1])-(leica_raw_data[start_index[0]][1])),((leica_raw_data[i][2])-(leica_raw_data[start_index[0]][2]))])
+
+    duration = []
+    duration.append(time[0][len(time)-1])
+
+    leica_data_temp = []
+    for i in range(len(leica_data)):
+        if(leica_data[i][2] <= duration[0]):
+            leica_data_temp.append([leica_data[i][0],leica_data[i][1],leica_data[i][2]])
+
+    leica_data = leica_data_temp
+
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(leica_data)
+    
+    leica_temp = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)        
+
+
+
+    distances = []
+    useful_leica_data = []
+    min_dist_index = []
+    amount_pos = len(kf)
+
+    for i in range(amount_pos):
+        for j in range(len(leica_data)):
+            distances.append([((leica_temp[0][j] - kf[0][i])**2 + (leica_temp[1][j] - kf[1][i])**2)**(1/2),j])
+        min_dist_index.append(min(distances))
+        distances = []
+
+
+    for i in range(len(min_dist_index)):
+        useful_leica_data.append(leica_data[min_dist_index[i][1]])
+        
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(useful_leica_data)
+    
+    leica = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)  
+                
+    return leica
+
+
+leica_pos_1 = get_leica_data(leica_1,time_1,kf_1)
+kf_x_1 = []
+kf_y_1 = []
+leica_x_1 = []
+leica_y_1 = []
+
+leica_pos_2 = get_leica_data(leica_2,time_2,kf_2)
+kf_x_2 = []
+kf_y_2 = []
+leica_x_2 = []
+leica_y_2 = []
+
+leica_pos_3 = get_leica_data(leica_3,time_3,kf_3)
+kf_x_3 = []
+kf_y_3 = []
+leica_x_3 = []
+leica_y_3 = []
+
+leica_pos_4 = get_leica_data(leica_4,time_4,kf_4)
+kf_x_4 = []
+kf_y_4 = []
+leica_x_4 = []
+leica_y_4 = []
+
+
+leica_pos_5 = get_leica_data(leica_5,time_5,kf_5)
+kf_x_5 = []
+kf_y_5 = []
+leica_x_5 = []
+leica_y_5 = []
+
+
+
+for i in range(len(kf_1)):
+    kf_x_1.append(kf_1[0][i])
+    kf_y_1.append(kf_1[1][i])
+    kf_x_2.append(kf_2[0][i])
+    kf_y_2.append(kf_2[1][i])
+    kf_x_3.append(kf_3[0][i])
+    kf_y_3.append(kf_3[1][i])
+    kf_x_4.append(kf_4[0][i])
+    kf_y_4.append(kf_4[1][i])
+    kf_x_5.append(kf_5[0][i])
+    kf_y_5.append(kf_5[1][i])
+    
+    leica_x_1.append(leica_pos_1[0][i])
+    leica_y_1.append(leica_pos_1[1][i])
+    leica_x_2.append(leica_pos_2[0][i])
+    leica_y_2.append(leica_pos_2[1][i])
+    leica_x_3.append(leica_pos_3[0][i])
+    leica_y_3.append(leica_pos_3[1][i])
+    leica_x_4.append(leica_pos_4[0][i])
+    leica_y_4.append(leica_pos_4[1][i])
+    leica_x_5.append(leica_pos_5[0][i])
+    leica_y_5.append(leica_pos_5[1][i])
+    
+    
+kf_x = np.transpose([kf_x_1,kf_x_2,kf_x_3,kf_x_4,kf_x_5])
+leica_x = np.transpose([leica_x_1,leica_x_2,leica_x_3,leica_x_4,leica_x_5])
+kf_y = np.transpose([kf_y_1,kf_y_2,kf_y_3,kf_y_4,kf_y_5])
+leica_y = np.transpose([leica_y_1,leica_y_2,leica_y_3,leica_y_4,leica_y_5])
+
+rmse_x_1 = []
+rmse_y_1 = []
+rmse_x_2 = []
+rmse_y_2 = []
+rmse_x_3 = []
+rmse_y_3 = []
+rmse_x_4 = []
+rmse_y_4 = []
+rmse_x_5 = []
+rmse_y_5 = []
+    
+
+rmse_x_1 = rmse((kf_1[0]),(leica_pos_1[0]))
+rmse_y_1 = rmse((kf_1[1]),(leica_pos_1[1]))
+rmse_x_2 = rmse((kf_2[0]),(leica_pos_2[0]))
+rmse_y_2 = rmse((kf_2[1]),(leica_pos_2[1]))
+rmse_x_3 = rmse((kf_3[0]),(leica_pos_3[0]))
+rmse_y_3 = rmse((kf_3[1]),(leica_pos_3[1]))
+rmse_x_4 = rmse((kf_4[0]),(leica_pos_4[0]))
+rmse_y_4 = rmse((kf_4[1]),(leica_pos_4[1]))
+rmse_x_5 = rmse((kf_5[0]),(leica_pos_5[0]))
+rmse_y_5 = rmse((kf_5[1]),(leica_pos_5[1]))
+
+rmse_x = [rmse_x_1,rmse_x_2,rmse_x_3,rmse_x_4,rmse_x_5]
+rmse_y = [rmse_y_1,rmse_y_2,rmse_y_3,rmse_y_4,rmse_y_5]
+
+
+print(rmse_x,rmse_y)
+
+
+kf_mean_x = []
+leica_mean_x = []
+kf_mean_y = []
+leica_mean_y = []
+
+
+for i in range(len(kf_x)):
+    kf_mean_x.append(round(np.mean(kf_x[i]),4))
+    leica_mean_x.append(round(np.mean(leica_x[i]),4))
+    kf_mean_y.append(round(np.mean(kf_y[i]),4))
+    leica_mean_y.append(round(np.mean(leica_y[i]),4))
+
+#Bahn-Genauigkeit
+Bahn_Genauigkeit = []
+
+for i in range(len(kf_mean_x)):
+    Bahn_Genauigkeit.append((((kf_mean_x[i] - leica_mean_x[i])**2) + ((kf_mean_y[i] - leica_mean_y[i])**2))**(1/2))
+
+print(Bahn_Genauigkeit)
+
+Bahn_Genauigkeit = list(filter(lambda x : x != 0.0, Bahn_Genauigkeit))
+print(np.min(Bahn_Genauigkeit))
+print(np.mean(Bahn_Genauigkeit))   
+AT_bahn = np.max(Bahn_Genauigkeit)
+print(AT_bahn)
+
+
+plt.rc('axes', axisbelow=True)
+
+X = ['1','2','3','4', '5']
+X_axis = np.arange(len(X))
+
+plot1 = plt.figure(1)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis - 0.2, rmse_x, 0.3, label = 'X',color='steelblue')
+plt.bar(X_axis + 0.2, rmse_y, 0.3, label = 'Y',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Zyklus")
+plt.ylabel("RMSE [$mm$]")
+plt.title("RMSE Achteck - v100",fontweight="bold")
+plt.legend()
+
+plt.show()
diff --git a/sensorfusion/scripts/eval/KF_v50.py b/sensorfusion/scripts/eval/KF_v50.py
new file mode 100644
index 0000000000000000000000000000000000000000..0aa7715e868923f9e069b99b3405999abccbca9c
--- /dev/null
+++ b/sensorfusion/scripts/eval/KF_v50.py
@@ -0,0 +1,258 @@
+import matplotlib.pyplot as plt
+import pandas as pd
+import numpy as np
+import uuid
+import csv
+
+plot_on_windows = True
+leica_frequency = 250
+
+vel_dt = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\1\\csv_pos\\vel_dt.csv', header=None)
+
+#Read out data from CSV files
+kf_1 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\1\\csv_pos\\kf_pos.csv', header=None)
+leica_1 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\1\\UDPMonitorLog.csv',header=None)
+time_1 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\1\\csv_pos\\timestamp.csv', header=None)  
+
+kf_2 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\2\\csv_pos\\kf_pos.csv', header=None)
+leica_2 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\2\\UDPMonitorLog.csv',header=None)
+time_2 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\2\\csv_pos\\timestamp.csv', header=None)   
+
+kf_3 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\3\\csv_pos\\kf_pos.csv', header=None)
+leica_3 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\3\\UDPMonitorLog.csv',header=None) 
+time_3 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\3\\csv_pos\\timestamp.csv', header=None)  
+
+kf_4 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\4\\csv_pos\\kf_pos.csv', header=None)
+leica_4 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\4\\UDPMonitorLog.csv',header=None) 
+time_4 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\4\\csv_pos\\timestamp.csv', header=None)  
+
+kf_5 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\5\\csv_pos\\kf_pos.csv', header=None)
+leica_5 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\5\\UDPMonitorLog.csv',header=None)  
+time_5 = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v50\\5\\csv_pos\\timestamp.csv', header=None) 
+
+
+
+def rmse(predictions, targets):
+    return np.sqrt(np.mean(((predictions - targets) ** 2)))
+
+def get_leica_data(leica,time,kf):
+
+
+    #Get Position data 
+    # original: X,[X-Position],Y,[Y-Position],Z,[Z-Position],Time
+    # converted: [X_position][Y_Position][Z_Position]
+    leica_raw_data = []
+    
+    for i in range(len(leica)):
+        leica_raw_data.append([float(leica[1][i]),float(leica[3][i]),float(leica[7][i])])
+
+    #Check how many samples of Lasertrackers are done in the range of the sample time of Kalman filter
+    #Save the result in compare_dt
+    len_raw = len(leica_raw_data)
+    compare_dt = []
+
+    for i in range(len_raw):
+        if((leica_raw_data[i][2]) - (leica_raw_data[0][2]) < vel_dt[1][0]):
+            continue
+        else:
+            compare_dt.append(i)
+            break
+
+
+    #Necessary to determine at which time the robot starts to move. Prior measurements of lasertracker are not relevant.
+    #Therefore we compare the measurements with the index difference of compare_dt and state that a change of 0.05mm in x or y direction 
+    #can be regarded as movement of the robot and relevant measurements of lasertracker
+    start_index = []
+
+    for j in range(compare_dt[0],len_raw):
+        if((leica_raw_data[j][0] - leica_raw_data[j-compare_dt[0]][0] >= 0.05) or (leica_raw_data[j][1] - leica_raw_data[j-compare_dt[0]][1] >= 0.05)):
+            start_index.append(j-3)
+            break
+        else: 
+            continue
+
+    #Adjust coordinate system to robots by using first measurement and subtract it from the next ones
+    leica_data = []
+
+    for i in range(start_index[0],len(leica)):
+        leica_data.append([((leica_raw_data[i][0])-(leica_raw_data[start_index[0]][0])),((leica_raw_data[i][1])-(leica_raw_data[start_index[0]][1])),((leica_raw_data[i][2])-(leica_raw_data[start_index[0]][2]))])
+
+    duration = []
+    duration.append(time[0][len(time)-1])
+
+    leica_data_temp = []
+    for i in range(len(leica_data)):
+        if(leica_data[i][2] <= duration[0]):
+            leica_data_temp.append([leica_data[i][0],leica_data[i][1],leica_data[i][2]])
+
+    leica_data = leica_data_temp
+
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(leica_data)
+    
+    leica_temp = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)        
+
+
+
+    distances = []
+    useful_leica_data = []
+    min_dist_index = []
+    amount_pos = len(kf)
+
+    for i in range(amount_pos):
+        for j in range(len(leica_data)):
+            distances.append([((leica_temp[0][j] - kf[0][i])**2 + (leica_temp[1][j] - kf[1][i])**2)**(1/2),j])
+        min_dist_index.append(min(distances))
+        distances = []
+
+
+    for i in range(len(min_dist_index)):
+        useful_leica_data.append(leica_data[min_dist_index[i][1]])
+        
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(useful_leica_data)
+    
+    leica = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)  
+                
+    return leica
+
+
+leica_pos_1 = get_leica_data(leica_1,time_1,kf_1)
+kf_x_1 = []
+kf_y_1 = []
+leica_x_1 = []
+leica_y_1 = []
+
+leica_pos_2 = get_leica_data(leica_2,time_2,kf_2)
+kf_x_2 = []
+kf_y_2 = []
+leica_x_2 = []
+leica_y_2 = []
+
+leica_pos_3 = get_leica_data(leica_3,time_3,kf_3)
+kf_x_3 = []
+kf_y_3 = []
+leica_x_3 = []
+leica_y_3 = []
+
+leica_pos_4 = get_leica_data(leica_4,time_4,kf_4)
+kf_x_4 = []
+kf_y_4 = []
+leica_x_4 = []
+leica_y_4 = []
+
+
+leica_pos_5 = get_leica_data(leica_5,time_5,kf_5)
+kf_x_5 = []
+kf_y_5 = []
+leica_x_5 = []
+leica_y_5 = []
+
+
+
+for i in range(len(kf_1)):
+    kf_x_1.append(kf_1[0][i])
+    kf_y_1.append(kf_1[1][i])
+    kf_x_2.append(kf_2[0][i])
+    kf_y_2.append(kf_2[1][i])
+    kf_x_3.append(kf_3[0][i])
+    kf_y_3.append(kf_3[1][i])
+    kf_x_4.append(kf_4[0][i])
+    kf_y_4.append(kf_4[1][i])
+    kf_x_5.append(kf_5[0][i])
+    kf_y_5.append(kf_5[1][i])
+    
+    leica_x_1.append(leica_pos_1[0][i])
+    leica_y_1.append(leica_pos_1[1][i])
+    leica_x_2.append(leica_pos_2[0][i])
+    leica_y_2.append(leica_pos_2[1][i])
+    leica_x_3.append(leica_pos_3[0][i])
+    leica_y_3.append(leica_pos_3[1][i])
+    leica_x_4.append(leica_pos_4[0][i])
+    leica_y_4.append(leica_pos_4[1][i])
+    leica_x_5.append(leica_pos_5[0][i])
+    leica_y_5.append(leica_pos_5[1][i])
+    
+    
+kf_x = np.transpose([kf_x_1,kf_x_2,kf_x_3,kf_x_4,kf_x_5])
+leica_x = np.transpose([leica_x_1,leica_x_2,leica_x_3,leica_x_4,leica_x_5])
+kf_y = np.transpose([kf_y_1,kf_y_2,kf_y_3,kf_y_4,kf_y_5])
+leica_y = np.transpose([leica_y_1,leica_y_2,leica_y_3,leica_y_4,leica_y_5])
+
+rmse_x_1 = []
+rmse_y_1 = []
+rmse_x_2 = []
+rmse_y_2 = []
+rmse_x_3 = []
+rmse_y_3 = []
+rmse_x_4 = []
+rmse_y_4 = []
+rmse_x_5 = []
+rmse_y_5 = []
+    
+
+rmse_x_1 = rmse((kf_1[0]),(leica_pos_1[0]))
+rmse_y_1 = rmse((kf_1[1]),(leica_pos_1[1]))
+rmse_x_2 = rmse((kf_2[0]),(leica_pos_2[0]))
+rmse_y_2 = rmse((kf_2[1]),(leica_pos_2[1]))
+rmse_x_3 = rmse((kf_3[0]),(leica_pos_3[0]))
+rmse_y_3 = rmse((kf_3[1]),(leica_pos_3[1]))
+rmse_x_4 = rmse((kf_4[0]),(leica_pos_4[0]))
+rmse_y_4 = rmse((kf_4[1]),(leica_pos_4[1]))
+rmse_x_5 = rmse((kf_5[0]),(leica_pos_5[0]))
+rmse_y_5 = rmse((kf_5[1]),(leica_pos_5[1]))
+
+rmse_x = [rmse_x_1,rmse_x_2,rmse_x_3,rmse_x_4,rmse_x_5]
+rmse_y = [rmse_y_1,rmse_y_2,rmse_y_3,rmse_y_4,rmse_y_5]
+
+
+print(rmse_x,rmse_y)
+
+
+kf_mean_x = []
+leica_mean_x = []
+kf_mean_y = []
+leica_mean_y = []
+
+
+for i in range(len(kf_x)):
+    kf_mean_x.append(round(np.mean(kf_x[i]),4))
+    leica_mean_x.append(round(np.mean(leica_x[i]),4))
+    kf_mean_y.append(round(np.mean(kf_y[i]),4))
+    leica_mean_y.append(round(np.mean(leica_y[i]),4))
+
+#Bahn-Genauigkeit
+Bahn_Genauigkeit = []
+
+for i in range(len(kf_mean_x)):
+    Bahn_Genauigkeit.append((((kf_mean_x[i] - leica_mean_x[i])**2) + ((kf_mean_y[i] - leica_mean_y[i])**2))**(1/2))
+
+Bahn_Genauigkeit = list(filter(lambda x : x != 0.0, Bahn_Genauigkeit))
+print(np.min(Bahn_Genauigkeit))
+print(np.mean(Bahn_Genauigkeit))   
+AT_bahn = np.max(Bahn_Genauigkeit)
+
+print(AT_bahn)
+
+
+plt.rc('axes', axisbelow=True)
+
+X = ['1','2','3','4', '5']
+X_axis = np.arange(len(X))
+
+plot1 = plt.figure(1)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis - 0.2, rmse_x, 0.3, label = 'X',color='steelblue')
+plt.bar(X_axis + 0.2, rmse_y, 0.3, label = 'Y',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Zyklus")
+plt.ylabel("RMSE [$mm$]")
+plt.title("RMSE Achteck - v50",fontweight="bold")
+plt.legend()
+
+plt.show()
diff --git a/sensorfusion/scripts/eval/RMSE_calc.py b/sensorfusion/scripts/eval/RMSE_calc.py
new file mode 100644
index 0000000000000000000000000000000000000000..65ccac2f52993349c3a9bc1bdbd466d978158fa6
--- /dev/null
+++ b/sensorfusion/scripts/eval/RMSE_calc.py
@@ -0,0 +1,192 @@
+import matplotlib.pyplot as plt
+import pandas as pd
+import numpy as np
+import uuid
+import csv
+import statistics
+
+plot_on_windows = True
+leica_frequency = 250
+
+
+def rmse(predictions, targets):
+    #return np.sqrt(((predictions - targets) ** 2).mean())
+    return np.sqrt(np.mean(((predictions - targets) ** 2)))
+
+def get_leica_data(leica,freq):
+
+
+      #Get Position data 
+    # original: X,[X-Position],Y,[Y-Position],Z,[Z-Position],Time
+    # converted: [X_position][Y_Position][Z_Position]
+    leica_raw_data = []
+    
+    for i in range(len(leica)):
+        leica_raw_data.append([float(leica[1][i]),float(leica[3][i]),float(leica[7][i])])
+
+    #Check how many samples of Lasertrackers are done in the range of the sample time of Kalman filter
+    #Save the result in compare_dt
+    len_raw = len(leica_raw_data)
+    compare_dt = []
+
+    for i in range(len_raw):
+        if((leica_raw_data[i][2]) - (leica_raw_data[0][2]) < vel_dt[1][0]):
+            continue
+        else:
+            compare_dt.append(i)
+            break
+
+
+    #Necessary to determine at which time the robot starts to move. Prior measurements of lasertracker are not relevant.
+    #Therefore we compare the measurements with the index difference of compare_dt and state that a change of 0.05mm in x or y direction 
+    #can be regarded as movement of the robot and relevant measurements of lasertracker
+    start_index = []
+
+    for j in range(compare_dt[0],len_raw):
+        if((leica_raw_data[j][0] - leica_raw_data[j-compare_dt[0]][0] >= 0.05) or (leica_raw_data[j][1] - leica_raw_data[j-compare_dt[0]][1] >= 0.05)):
+            start_index.append(j-3)
+            break
+        else: 
+            continue
+
+    #Adjust coordinate system to robots by using first measurement and subtract it from the next ones
+    leica_data = []
+
+    for i in range(start_index[0],len(leica)):
+        leica_data.append([((leica_raw_data[i][0])-(leica_raw_data[start_index[0]][0])),((leica_raw_data[i][1])-(leica_raw_data[start_index[0]][1])),((leica_raw_data[i][2])-(leica_raw_data[start_index[0]][2]))])
+
+    duration = []
+    duration.append(time[0][len(time)-1])
+
+    leica_data_temp = []
+    for i in range(len(leica_data)):
+        if(leica_data[i][2] <= duration[0]):
+            leica_data_temp.append([leica_data[i][0],leica_data[i][1],leica_data[i][2]])
+
+    leica_data = leica_data_temp
+
+    # #Filter lasertracker measurements by using only data which correspond to KF timestamps 
+    # useful_leica_data = []
+
+    # time_len = len(time)
+
+    # for i in range(time_len):
+    #     for j in range(len(leica_data)):
+    #         if(abs(leica_data[j][2] - time[0][i]) <= 1/leica_frequency):
+    #             useful_leica_data.append([leica_data[j][0],leica_data[j][1]])
+    #             break
+
+    #Filter lasertracker measurements by using only data which correspond to KF timestamps 
+
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(leica_data)
+    
+    leica_temp = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)        
+
+
+
+    distances = []
+    useful_leica_data = []
+    min_dist_index = []
+    amount_pos = len(kf)
+
+    for i in range(amount_pos):
+        for j in range(len(leica_data)):
+            distances.append([((leica_temp[0][j] - kf[0][i])**2 + (leica_temp[1][j] - kf[1][i])**2)**(1/2),j])
+        min_dist_index.append(min(distances))
+        distances = []
+
+
+    for i in range(len(min_dist_index)):
+        useful_leica_data.append(leica_data[min_dist_index[i][1]])
+        
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(useful_leica_data)
+    
+    leica = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)  
+                
+    return leica
+
+#Read out data from CSV files
+if(plot_on_windows):
+    # traj = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\ROS\\RobotROS_ws\\src\\sensorfusion\\csv_path\\path.csv', header=None)
+    # kf = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\ROS\\RobotROS_ws\\src\\sensorfusion\\csv_pos\\kf_pos.csv', header=None)
+    # hpix = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\ROS\\RobotROS_ws\\src\\sensorfusion\\csv_pos\\hpix_pos.csv', header=None)
+    # vpix = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\ROS\\RobotROS_ws\\src\\sensorfusion\\csv_pos\\vpix_pos.csv', header=None)
+    # lid = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\ROS\\RobotROS_ws\\src\\sensorfusion\\csv_pos\\lidar_pos.csv', header=None)
+    # qual = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\ROS\\RobotROS_ws\\src\\sensorfusion\\csv_pos\\qual_pos.csv', header=None)
+    # time = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\ROS\\RobotROS_ws\\src\\sensorfusion\\csv_pos\\timestamp.csv', header=None)
+    # est = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\ROS\\RobotROS_ws\\src\\sensorfusion\\csv_pos\\estimate_pos.csv', header=None)
+    # vel = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\ROS\\RobotROS_ws\\src\\sensorfusion\\csv_pos\\vel_dt.csv', header=None)
+    
+
+    traj = pd.read_csv('C:\\Users\\user\\Desktop\\csv_path\\path.csv', header=None)
+    kf = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\kf_pos.csv', header=None)
+    hpix = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\hpix_pos.csv', header=None)
+    vpix = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\vpix_pos.csv', header=None)
+    lid = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\lidar_pos.csv', header=None)
+    qual = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\qual_pos.csv', header=None)
+    time = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\timestamp.csv', header=None)
+    est = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\estimate_pos.csv', header=None)
+    vel_dt = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\vel_dt.csv', header=None)
+    #leica = pd.read_csv('D:\\Messungen\\UDPMonitorLog.csv',header=None)  
+    leica = pd.read_csv('C:\\Users\\user\\Desktop\\UDPMonitorLog.csv',header=None)  
+    
+else:
+    traj = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_path/path.csv', header=None)
+    kf = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/kf_pos.csv', header=None)
+    hpix = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/hpix_pos.csv', header=None)
+    vpix = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/vpix_pos.csv', header=None)
+    lid = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/lidar_pos.csv', header=None)
+    qual = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/qual_pos.csv', header=None)
+    time = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/timestamp.csv', header=None)
+    est = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/estimate_pos.csv', header=None)
+    vel_dt = pd.read_csv('/home/pi/catkin_ws/src/sensorfusion/csv_pos/vel_dt.csv', header=None)
+
+leica_pos = get_leica_data(leica,leica_frequency)
+
+
+rmse_x_all = [rmse(kf[0],leica_pos[0]),rmse(hpix[0],leica_pos[0]),rmse(vpix[0],leica_pos[0]),rmse(lid[0],leica_pos[0]),rmse(est[0],leica_pos[0])]
+rmse_y_all = [rmse(kf[1],leica_pos[1]),rmse(hpix[1],leica_pos[1]),rmse(vpix[1],leica_pos[1]),rmse(lid[1],leica_pos[1]),rmse(est[1],leica_pos[1])]
+
+plt.rc('axes', axisbelow=True)
+
+X = ['Kalman Filter','Pixart_back','Pixart_front','LiDAR', 'Schätzung']
+X_axis = np.arange(len(X))
+
+plot1 = plt.figure(1)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis-0.15, rmse_x_all, 0.3, label = 'x',color='steelblue')
+plt.bar(X_axis+0.15, rmse_y_all, 0.3, label = 'y',color='lightblue')
+#plt.bar(X_axis + 0.2, v10_y_diff, 0.3, label = 'Y',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Positionen")
+plt.ylabel("RMSE [$mm$]")
+plt.title("RMSE - v100",fontweight="bold")
+plt.legend()
+
+
+# plt.suptitle('Y-Position Abweichung\n', fontweight="bold")
+# plt.title('RMSE: %f' %rmse(kf[1],leica_pos[1]) + ' mm')
+
+# plt.xlabel('Zeit [$s$]')
+# plt.ylabel('Y-Abweichung [$mm$]')
+# plt.grid(True)
+
+# plt.plot(time,np.zeros(len(time)),'--',label='Leica Trajektorie',color='blue')
+# plt.scatter(time,vpix_y_diff,label='Pixart_front')
+# plt.scatter(time,hpix_y_diff,label='Pixart_back')
+# plt.scatter(time,lid_y_diff,label='LiDAR')
+# plt.scatter(time,est_y_diff,label='Schätzung')
+# #plt.scatter(time,qual_y_diff,label='Qualisys',color='magenta')
+# plt.plot(time,kf_y_diff,color='red',label='Kalman Filter',ls='--')
+# plt.scatter(time,kf_y_diff,color='red')
+
+# plt.legend(fontsize=10)
+
+plt.show()
\ No newline at end of file
diff --git a/sensorfusion/scripts/eval/RMSE_diagramme.py b/sensorfusion/scripts/eval/RMSE_diagramme.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0001edbe9561505876968e89c7b5a9c22a8ddc7
--- /dev/null
+++ b/sensorfusion/scripts/eval/RMSE_diagramme.py
@@ -0,0 +1,64 @@
+import matplotlib.pyplot as plt
+import pandas as pd
+import numpy as np
+import uuid
+import csv
+
+
+
+rmse_x_v100 = [1.25,4.9,4.93,4.35,3.96]
+rmse_x_v50 = [9.6,9.7,9.02,1.71,2.82]
+rmse_x_v10 = [7.24,3.39,5.21,9.12,4.26]
+
+rmse_y_v100 = [1.40,3.9,6.3,5.57,5.41]
+rmse_y_v50 = [12.26,11.10,11.23,1.91,2.75]
+rmse_y_v10 = [6.83,4.83,4.51,9.11,4.13]
+
+bahn_genauigkeit = [15.25,18.03,10.3]
+mean_bg = [6.75,8.87,5.09]
+min_bg = [0.03,0.26,0.41]
+
+plt.rc('axes', axisbelow=True)
+
+X = ['1','2','3','4', '5']
+X_axis = np.arange(len(X))
+
+plot1 = plt.figure(1)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis -0.2, rmse_x_v10, 0.15, label = 'v10',color='midnightblue')
+plt.bar(X_axis, rmse_x_v50, 0.15, label = 'v50',color='steelblue')
+plt.bar(X_axis + 0.2, rmse_x_v100, 0.15, label = 'v100',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Zyklus")
+plt.ylabel("RMSE [$mm$]")
+plt.title("RMSE x-Richtung",fontweight="bold")
+plt.legend()
+
+plot2 = plt.figure(2)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis -0.2, rmse_y_v10, 0.15, label = 'v10',color='midnightblue')
+plt.bar(X_axis, rmse_y_v50, 0.15, label = 'v50',color='steelblue')
+plt.bar(X_axis + 0.2, rmse_y_v100, 0.15, label = 'v100',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Zyklus")
+plt.ylabel("RMSE [$mm$]")
+plt.title("RMSE y-Richtung",fontweight="bold")
+plt.legend()
+
+X = ['10','50','100']
+X_axis = np.arange(len(X))
+
+plot3 = plt.figure(3)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis-0.2, bahn_genauigkeit,color='dimgray',width = 0.3,label='Maximum')
+plt.bar(X_axis+0.2, mean_bg,color='darkgray',width = 0.3,label='Durchschnitt')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Geschwindigkeit [$mm/s$]")
+plt.ylabel("Bahn-Genauigkeit [$mm$]")
+plt.title("Bahn-Genauigkeit",fontweight="bold")
+plt.legend()
+
+plt.show()
diff --git a/sensorfusion/scripts/eval/geschwindigkeitsfehler.py b/sensorfusion/scripts/eval/geschwindigkeitsfehler.py
new file mode 100644
index 0000000000000000000000000000000000000000..31d3d10a512c37ed74a69587bc64cdeaf19fe217
--- /dev/null
+++ b/sensorfusion/scripts/eval/geschwindigkeitsfehler.py
@@ -0,0 +1,187 @@
+import matplotlib.pyplot as plt
+import pandas as pd
+import numpy as np
+import uuid
+import csv
+from itertools import chain
+
+plot_on_windows = True
+
+#Read out data from CSV files
+if(plot_on_windows):
+    
+    v10_1_raw = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Bogen\\v10_bogen_1.csv', header=None)
+    v10_2_raw = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Bogen\\v10_bogen_2.csv', header=None)
+    v50_1_raw = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Bogen\\v50_bogen_1.csv', header=None)
+    v50_2_raw= pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Bogen\\v50_bogen_2.csv', header=None)
+    v100_1_raw = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Bogen\\v100_bogen_1.csv', header=None)
+    v100_2_raw = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Bogen\\v100_bogen_2.csv', header=None)
+    
+       
+#Create Arrays for each position
+v10_1_x = []
+v10_1_y = []
+v50_1_x = []
+v50_1_y = []
+v100_1_x = []
+v100_1_y = []
+
+v10_2_x = []
+v10_2_y = []
+v50_2_x = []
+v50_2_y = []
+v100_2_x = []
+v100_2_y = []
+
+
+for i in range(len(v10_1_raw)):
+    v10_1_x.append([float(v10_1_raw[1][i])])
+    v50_1_x.append([float(v50_1_raw[1][i])])
+    v100_1_x.append([float(v100_1_raw[1][i])])
+    
+    v10_1_y.append([float(v10_1_raw[3][i])])
+    v50_1_y.append([float(v50_1_raw[3][i])])
+    v100_1_y.append([float(v100_1_raw[3][i])])
+    
+    v10_2_x.append([float(v10_2_raw[1][i])])
+    v50_2_x.append([float(v50_2_raw[1][i])])
+    v100_2_x.append([float(v100_2_raw[1][i])])
+    
+    v10_2_y.append([float(v10_2_raw[3][i])])
+    v50_2_y.append([float(v50_2_raw[3][i])])
+    v100_2_y.append([float(v100_2_raw[3][i])])
+      
+
+v10_1_x_diff = np.diff(np.array(v10_1_x).reshape(-1))
+v10_1_y_diff = np.diff(np.array(v10_1_y).reshape(-1))
+v10_2_x_diff = np.diff(np.array(v10_2_x).reshape(-1))
+v10_2_y_diff = np.diff(np.array(v10_2_y).reshape(-1))
+v10_x_diff = list(chain(v10_1_x_diff,v10_2_x_diff))
+v10_y_diff = list(chain(v10_1_y_diff,v10_2_y_diff))
+
+v50_1_x_diff = np.diff(np.array(v50_1_x).reshape(-1))
+v50_1_y_diff = np.diff(np.array(v50_1_y).reshape(-1))
+v50_2_x_diff = np.diff(np.array(v50_2_x).reshape(-1))
+v50_2_y_diff = np.diff(np.array(v50_2_y).reshape(-1))
+v50_x_diff = list(chain(v50_1_x_diff,v50_2_x_diff))
+v50_y_diff = list(chain(v50_1_y_diff,v50_2_y_diff))
+
+v100_1_x_diff = np.diff(np.array(v100_1_x).reshape(-1))
+v100_1_y_diff = np.diff(np.array(v100_1_y).reshape(-1))
+v100_2_x_diff = np.diff(np.array(v100_2_x).reshape(-1))
+v100_2_y_diff = np.diff(np.array(v100_2_y).reshape(-1))
+v100_x_diff = list(chain(v100_1_x_diff,v100_2_x_diff))
+v100_y_diff = list(chain(v100_1_y_diff,v100_2_y_diff))
+
+for i in range(len(v10_x_diff)):
+    v10_x_diff[i] -= 119.497
+    v50_x_diff[i] -= 119.497
+    v100_x_diff[i] -= 119.497
+    v10_y_diff[i] -= 119.497
+    v50_y_diff[i] -= 119.497
+    v100_y_diff[i] -= 119.497
+
+
+APv10 = np.max([((np.mean(v10_x_diff))**2 + ((np.mean(v10_y_diff)))**2)**(1/2)])
+APv50 = np.max([((np.mean(v50_x_diff))**2 + ((np.mean(v50_y_diff)))**2)**(1/2)])
+APv100 = np.max([((np.mean(v100_x_diff))**2 + ((np.mean(v100_y_diff)))**2)**(1/2)])
+
+
+
+print("\nX-Position Differenz Durchschnitt")
+print("******************************")
+print("v10_x_diff = ",np.mean(v10_x_diff))
+print("v50_x_diff = ",np.mean(v50_x_diff))
+print("v100_x_diff = ",np.mean(v100_x_diff))
+
+print("\nY-Position Differenz Durchschnitt")
+print("******************************")
+print("v10_y_diff = ",np.mean(v10_y_diff))
+print("v50_y_diff = ",np.mean(v50_y_diff))
+print("v100_y_diff = ",np.mean(v100_y_diff))
+
+print("\nPose-Genauigkeit")
+print("*******************")
+print("APv10 = ",APv10)
+print("APv50 = ",APv50)
+print("APv100 = ",APv100)
+
+e10 = (210+APv10)/(210/10) - 10
+e50 = (210+APv50)/(210/50) - 50
+e100 = (210+APv100)/(210/100) - 100
+
+print("\nDifferenz gemessene und angegebene Geschwindigkeit")
+print("*************************")
+print("e10 = ",e10)
+print("e50 = ",e50)
+print("e100 = ",e100)
+
+err10 = e10/10
+err50 = e50/50
+err100 = e100/100
+
+print("\nProzentualer Fehler")
+print("*******************")
+print("err10 = ",round(err10*100,3),"%")
+print("err50 = ",round(err50*100,3),"%")
+print("err100 = ",round(err100*100,3),"%")
+
+
+plt.rc('axes', axisbelow=True)
+
+X = ['1','2','3','4', '5',
+     '6','7','8', '9','10']
+X_axis = np.arange(len(X))
+
+plot1 = plt.figure(1)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis - 0.2, v10_x_diff, 0.3, label = 'X',color='steelblue')
+plt.bar(X_axis + 0.2, v10_y_diff, 0.3, label = 'Y',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Zyklus")
+plt.ylabel("Abweichung [$mm$]")
+plt.title("Abweichungen Kreisbahn - v10",fontweight="bold")
+plt.legend()
+
+
+
+
+plot2 = plt.figure(2)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis - 0.2, v50_x_diff, 0.3, label = 'X',color='steelblue')
+plt.bar(X_axis + 0.2, v50_y_diff, 0.3, label = 'Y',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Zyklus")
+plt.ylabel("Abweichung [$mm$]")
+plt.title("Abweichungen Kreisbahn - v50",fontweight="bold")
+plt.legend()
+
+
+
+plot3 = plt.figure(3)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis - 0.2, v100_x_diff, 0.3, label = 'X',color='steelblue')
+plt.bar(X_axis + 0.2, v100_y_diff, 0.3, label = 'Y',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Zyklus")
+plt.ylabel("Abweichung [$mm$]")
+plt.title("Abweichungen Kreisbahn - v100",fontweight="bold")
+plt.legend()
+
+
+
+plot4 = plt.figure(4)
+X = ['10','50','100']
+X_axis = np.arange(len(X))
+plt.xticks(X_axis, X)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis,[APv10,APv50,APv100], label = 'Pose-Genauigkeit',color='slategrey',width = 0.5)
+plt.xlabel("Geschwindigkeit [$mm/s$]")
+plt.ylabel("Pose-Genauigkeit [$mm$]")
+plt.title("Pose-Genauigkeit Kreisbahn",fontweight="bold")
+plt.legend()
+
+plt.show()
\ No newline at end of file
diff --git "a/sensorfusion/scripts/eval/st\303\266rfehler.py" "b/sensorfusion/scripts/eval/st\303\266rfehler.py"
new file mode 100644
index 0000000000000000000000000000000000000000..d1d550bcac88c79d8772034002dfcc5bcadac4ee
--- /dev/null
+++ "b/sensorfusion/scripts/eval/st\303\266rfehler.py"
@@ -0,0 +1,175 @@
+import matplotlib.pyplot as plt
+import pandas as pd
+import numpy as np
+import uuid
+import csv
+from itertools import chain
+
+plot_on_windows = True
+
+def rmse(predictions, targets):
+    return np.sqrt(((predictions - targets) ** 2).mean())
+
+#Read out data from CSV files
+if(plot_on_windows):
+    
+    v10_1_raw = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Achteck\\v10_1_achteck.csv', header=None)
+    v10_2_raw = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Achteck\\v10_2_achteck.csv', header=None)
+    v10_3_raw = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Achteck\\v10_3_achteck.csv', header=None)
+    v10_4_raw= pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Achteck\\v10_4_achteck.csv', header=None)
+    v50_raw = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Achteck\\v50_achteck.csv', header=None)
+    v100_raw = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\Achteck\\v100_achteck.csv', header=None)
+    
+#Create Arrays for each position
+v10_1_x = []
+v10_1_y = []
+v10_2_x = []
+v10_2_y = []
+v10_3_x = []
+v10_3_y = []
+v10_4_x = []
+v10_4_y = []
+v50_x = []
+v50_y = []
+v100_x = []
+v100_y = []
+
+
+for i in range(len(v50_raw)):
+    v50_x.append([float(v50_raw[1][i])])
+    v50_y.append([float(v50_raw[3][i])])
+      
+for i in range(len(v100_raw)):
+    v100_x.append([float(v100_raw[1][i])])
+    v100_y.append([float(v100_raw[3][i])])
+           
+for i in range(len(v10_1_raw)):
+    v10_1_x.append([float(v10_1_raw[1][i])])
+    v10_1_y.append([float(v10_1_raw[3][i])])
+    v10_2_x.append([float(v10_2_raw[1][i])])
+    v10_2_y.append([float(v10_2_raw[3][i])])
+
+for i in range(len(v10_3_raw)):
+    v10_3_x.append([float(v10_3_raw[1][i])])
+    v10_3_y.append([float(v10_3_raw[3][i])])
+    v10_4_x.append([float(v10_4_raw[1][i])])
+    v10_4_y.append([float(v10_4_raw[3][i])])
+
+v10_1_x_diff = np.diff(np.array(v10_1_x).reshape(-1))
+v10_1_y_diff = np.diff(np.array(v10_1_y).reshape(-1))
+v10_2_x_diff = np.diff(np.array(v10_2_x).reshape(-1))
+v10_2_y_diff = np.diff(np.array(v10_2_y).reshape(-1))
+v10_3_x_diff = np.diff(np.array(v10_3_x).reshape(-1))
+v10_3_y_diff = np.diff(np.array(v10_3_y).reshape(-1))
+v10_4_x_diff = np.diff(np.array(v10_4_x).reshape(-1))
+v10_4_y_diff = np.diff(np.array(v10_4_y).reshape(-1))
+v10_x_diff = list(chain(v10_1_x_diff,v10_2_x_diff,v10_3_x_diff,v10_4_x_diff))
+v10_y_diff = list(chain(v10_1_y_diff,v10_2_y_diff,v10_3_y_diff,v10_4_y_diff))
+v50_x_diff = np.diff(np.array(v50_x).reshape(-1))
+v50_y_diff = np.diff(np.array(v50_y).reshape(-1))
+v100_x_diff = np.diff(np.array(v100_x).reshape(-1))
+v100_y_diff = np.diff(np.array(v100_y).reshape(-1))
+
+APv10 = np.max([((np.mean(v10_x_diff))**2 + ((np.mean(v10_y_diff)))**2)**(1/2)])
+APv50 = np.max([((np.mean(v50_x_diff))**2 + ((np.mean(v50_y_diff)))**2)**(1/2)])
+APv100 = np.max([((np.mean(v100_x_diff))**2 + ((np.mean(v100_y_diff)))**2)**(1/2)])
+
+
+print("\nX-Position Differenz Durchschnitt")
+print("******************************")
+print("v10_x_diff = ",np.mean(v10_x_diff))
+print("v50_x_diff = ",np.mean(v50_x_diff))
+print("v100_x_diff = ",np.mean(v100_x_diff))
+
+print("\nY-Position Differenz Durchschnitt")
+print("******************************")
+print("v10_y_diff = ",np.mean(v10_y_diff))
+print("v50_y_diff = ",np.mean(v50_y_diff))
+print("v100_y_diff = ",np.mean(v100_y_diff))
+
+
+print("\nPose-Genauigkeit")
+print("*******************")
+print("APv10 = ",APv10)
+print("APv50 = ",APv50)
+print("APv100 = ",APv100)
+
+e10 = (560+APv10)/(560/10) - 10
+e50 = (560+APv50)/(560/50) - 50
+e100 = (560+APv100)/(560/100) - 100
+
+print("\nDifferenz gemessene und angegebene Geschwindigkeit")
+print("*************************")
+print("e10 = ",e10)
+print("e50 = ",e50)
+print("e100 = ",e100)
+
+err10 = e10/10
+err50 = e50/50
+err100 = e100/100
+
+print("\nProzentualer Fehler")
+print("*******************")
+print("err10 = ",round(err10*100,3),"%")
+print("err50 = ",round(err50*100,3),"%")
+print("err100 = ",round(err100*100,3),"%")
+
+
+plt.rc('axes', axisbelow=True)
+
+X = ['1','2','3','4', '5',
+     '6','7','8', '9','10']
+X_axis = np.arange(len(X))
+
+plot1 = plt.figure(1)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis - 0.2, v10_x_diff, 0.3, label = 'X',color='steelblue')
+plt.bar(X_axis + 0.2, v10_y_diff, 0.3, label = 'Y',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Zyklus")
+plt.ylabel("Abweichung [$mm$]")
+plt.title("Abweichungen Achteck - v10",fontweight="bold")
+plt.legend()
+
+
+
+
+plot2 = plt.figure(2)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis - 0.2, v50_x_diff, 0.3, label = 'X',color='steelblue')
+plt.bar(X_axis + 0.2, v50_y_diff, 0.3, label = 'Y',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Zyklus")
+plt.ylabel("Abweichung [$mm$]")
+plt.title("Abweichungen Achteck - v50",fontweight="bold")
+plt.legend()
+
+
+
+plot3 = plt.figure(3)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis - 0.2, v100_x_diff, 0.3, label = 'X',color='steelblue')
+plt.bar(X_axis + 0.2, v100_y_diff, 0.3, label = 'Y',color='lightsteelblue')
+
+plt.xticks(X_axis, X)
+plt.xlabel("Zyklus")
+plt.ylabel("Abweichung [$mm$]")
+plt.title("Abweichungen Achteck - v100",fontweight="bold")
+plt.legend()
+
+
+
+plot4 = plt.figure(4)
+X = ['10','50','100']
+X_axis = np.arange(len(X))
+plt.xticks(X_axis, X)
+plt.grid(True,axis = 'y')
+plt.bar(X_axis,[APv10,APv50,APv100], label = 'Pose-Genauigkeit',color='slategrey',width = 0.5)
+plt.xlabel("Geschwindigkeit [$mm/s$]")
+plt.ylabel("Pose-Genauigkeit [$mm$]")
+plt.title("Pose-Genauigkeit Achteck",fontweight="bold")
+plt.legend()
+
+plt.show()
\ No newline at end of file
diff --git a/sensorfusion/scripts/eval/test_leica.py b/sensorfusion/scripts/eval/test_leica.py
new file mode 100644
index 0000000000000000000000000000000000000000..521eb19540114ce3c920d40a7adbcbe21da5fb26
--- /dev/null
+++ b/sensorfusion/scripts/eval/test_leica.py
@@ -0,0 +1,127 @@
+import matplotlib.pyplot as plt
+import pandas as pd
+import numpy as np
+import uuid
+import csv
+
+
+
+def rmse(predictions, targets):
+    return np.sqrt(((predictions - targets) ** 2).mean())
+
+#Read out data from CSV files
+traj = pd.read_csv('C:\\Users\\user\\Desktop\\csv_path\\path.csv', header=None)
+kf = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\kf_pos.csv', header=None)
+hpix = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\hpix_pos.csv', header=None)
+vpix = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\vpix_pos.csv', header=None)
+lid = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\lidar_pos.csv', header=None)
+qual = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\qual_pos.csv', header=None)
+time = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\timestamp.csv', header=None)
+est = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\estimate_pos.csv', header=None)
+vel_dt = pd.read_csv('C:\\Users\\user\\Desktop\\csv_pos\\vel_dt.csv', header=None)
+#leica = pd.read_csv('C:\\Users\\user\\Desktop\\UDPMonitorLog.csv', header=None)
+leica = pd.read_csv('C:\\Users\\user\\Documents\\MEGA\\Studium\\Thesis\\Results\\KF\\v100\\3\\UDPMonitorLog.csv',header=None)  
+
+    
+
+leica_frequency = 250
+
+def get_leica_data(leica,freq):
+
+ #Get Position data 
+    # original: X,[X-Position],Y,[Y-Position],Z,[Z-Position],Time
+    # converted: [X_position][Y_Position][Z_Position]
+    leica_raw_data = []
+    
+    for i in range(len(leica)):
+        leica_raw_data.append([float(leica[1][i]),float(leica[3][i]),float(leica[7][i])])
+
+    #Check how many samples of Lasertrackers are done in the range of the sample time of Kalman filter
+    #Save the result in compare_dt
+    len_raw = len(leica_raw_data)
+    compare_dt = []
+
+    for i in range(len_raw):
+        if((leica_raw_data[i][2]) - (leica_raw_data[0][2]) < vel_dt[1][0]):
+            continue
+        else:
+            compare_dt.append(i)
+            break
+
+
+    #Necessary to determine at which time the robot starts to move. Prior measurements of lasertracker are not relevant.
+    #Therefore we compare the measurements with the index difference of compare_dt and state that a change of 0.05mm in x or y direction 
+    #can be regarded as movement of the robot and relevant measurements of lasertracker
+    start_index = []
+
+    for j in range(compare_dt[0],len_raw):
+        if((leica_raw_data[j][0] - leica_raw_data[j-compare_dt[0]][0] >= 0.05) or (leica_raw_data[j][1] - leica_raw_data[j-compare_dt[0]][1] >= 0.05)):
+            start_index.append(j-3)
+            break
+        else: 
+            continue
+
+    #Adjust coordinate system to robots by using first measurement and subtract it from the next ones
+    leica_data = []
+
+    for i in range(start_index[0],len(leica)):
+        leica_data.append([((leica_raw_data[i][0])-(leica_raw_data[start_index[0]][0])),((leica_raw_data[i][1])-(leica_raw_data[start_index[0]][1])),((leica_raw_data[i][2])-(leica_raw_data[start_index[0]][2]))])
+
+    duration = []
+    duration.append(time[0][len(time)-1])
+
+    leica_data_temp = []
+    for i in range(len(leica_data)):
+        if(leica_data[i][2] <= duration[0]):
+            leica_data_temp.append([leica_data[i][0],leica_data[i][1],leica_data[i][2]])
+
+    leica_data = leica_data_temp
+
+    # #Filter lasertracker measurements by using only data which correspond to KF timestamps 
+    # useful_leica_data = []
+
+    # time_len = len(time)
+
+    # for i in range(time_len):
+    #     for j in range(len(leica_data)):
+    #         if(abs(leica_data[j][2] - time[0][i]) <= 1/leica_frequency):
+    #             useful_leica_data.append([leica_data[j][0],leica_data[j][1]])
+    #             break
+
+    #Filter lasertracker measurements by using only data which correspond to KF timestamps 
+
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(leica_data)
+    
+    leica_temp = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)        
+
+
+
+    distances = []
+    useful_leica_data = []
+    min_dist_index = []
+    amount_pos = len(kf)
+
+    for i in range(amount_pos):
+        for j in range(len(leica_data)):
+            distances.append([((leica_temp[0][j] - kf[0][i])**2 + (leica_temp[1][j] - kf[1][i])**2)**(1/2),j])
+        min_dist_index.append(min(distances))
+        distances = []
+
+
+    for i in range(len(min_dist_index)):
+        useful_leica_data.append(leica_data[min_dist_index[i][1]])
+        
+    #Write results to csv file and save data to array
+    with open('C:\\Users\\user\\Desktop\\leica_pos.csv', 'w+', encoding='UTF8', newline='') as f:
+        writer = csv.writer(f)
+        writer.writerows(useful_leica_data)
+    
+    leica = pd.read_csv('C:\\Users\\user\\Desktop\\leica_pos.csv',header=None)  
+                
+    return leica
+
+array = get_leica_data(leica,leica_frequency)
+print(array[0])
\ No newline at end of file
diff --git a/sensorfusion/scripts/imu_pos.py b/sensorfusion/scripts/imu_pos.py
new file mode 100644
index 0000000000000000000000000000000000000000..0814103d2bee90cfa25d964025768d24668797d0
--- /dev/null
+++ b/sensorfusion/scripts/imu_pos.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python3
+from typing import Type
+from geometry_msgs.msg import QuaternionStamped
+import rospy
+from sensorfusion.msg import Position
+import numpy as np
+import math
+
+
+
+class imu(object):
+
+    def __init__(self):
+
+        self.pub_ = rospy.Publisher("/imu_pos_cov", Position, queue_size=1)
+
+        self.pub_angle = rospy.Publisher("/imu_azimuth", Position, queue_size=1)
+
+        rospy.Subscriber("/filter/quaternion", QuaternionStamped, self.callback)
+
+        self.x_vel = 0.0
+        self.y_vel = 0.0
+
+ 
+    def euler_from_quaternion(self,x, y, z, w):
+        """
+        Convert a quaternion into euler angles (roll, pitch, yaw)
+        roll is rotation around x in radians (counterclockwise)
+        pitch is rotation around y in radians (counterclockwise)
+        yaw is rotation around z in radians (counterclockwise)
+        """
+        t0 = +2.0 * (w * x + y * z)
+        t1 = +1.0 - 2.0 * (x * x + y * y)
+        roll_x = math.atan2(t0, t1)
+     
+        t2 = +2.0 * (w * y - z * x)
+        t2 = +1.0 if t2 > +1.0 else t2
+        t2 = -1.0 if t2 < -1.0 else t2
+        pitch_y = math.asin(t2)
+     
+        t3 = +2.0 * (w * z + x * y)
+        t4 = +1.0 - 2.0 * (y * y + z * z)
+        yaw_z = math.atan2(t3, t4)
+     
+        return roll_x, pitch_y, yaw_z # in radians    
+
+
+    def callback(self,values: Type[QuaternionStamped]) ->None:
+        """Read sensordata from topic /imu/dv and publish position values, variances and timestamp to imu_pos_cov
+
+        Args:
+            values (type): Message from topic (/imu/dv)
+        """
+        msg = Position()
+        roll_x, pitch_y, yaw_z = self.euler_from_quaternion(values.quaternion.x,values.quaternion.y,values.quaternion.z,values.quaternion.w)
+        yaw_z_deg = round(yaw_z * (180.0/np.pi),1)
+
+        if(yaw_z_deg >= -90.0 and yaw_z_deg < 180):
+             yaw_z_deg += 90.0
+        else:
+            yaw_z_deg += 450.0     
+
+        msg.header.stamp = rospy.Time.now()
+        msg.header.frame_id = "/imu_frame"; 
+        msg.azimuth = round(yaw_z_deg,2)    
+
+        
+        self.pub_angle.publish(msg)
+       
+
+        
+
+if __name__ == "__main__":
+    rospy.init_node("imu_pos_node", anonymous=True)
+    imu_obj = imu()
+    rospy.spin()
diff --git a/sensorfusion/scripts/path.py b/sensorfusion/scripts/path.py
new file mode 100644
index 0000000000000000000000000000000000000000..cbe6acd234f2fd10e5c16c19cb5df3cf49164b83
--- /dev/null
+++ b/sensorfusion/scripts/path.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python3
+from typing import Type
+from std_msgs.msg import Float64MultiArray, Float64
+from sensorfusion.srv import Trajectory, TrajectoryResponse
+import numpy as np
+import rospy
+import math
+import csv
+
+#Save X,Y Position in trajectory
+with open('/home/pi/catkin_ws/src/sensorfusion/csv_path/octagon.csv', newline='') as csvfile:
+        trajectory = np.loadtxt(csvfile, delimiter=',')
+
+
+class traj(object):
+
+    def __init__(self):
+
+        #Perform first time in specific way
+        self.not_init = True
+
+        self.path_list = []
+
+        self.segments_amount = len(trajectory)
+
+        self.kf_calc_time = 0.11
+
+        self.travel_time = 0.0
+
+        s = rospy.Service('getPath_service',Trajectory, self.getPath_server)
+
+
+    def calc_robot_direction_angle(self,current_pos:Type[Float64MultiArray], target_pos:Type[Float64MultiArray])->Float64:
+        """Calculate direction of robot which it should use to reach target position from current position
+
+        Args:
+            current_pos (Type[Float64MultiArray]): current position where the robot is located (approximately)
+            target_pos (Type[Float64MultiArray]): target position which should be reached
+
+        Returns:
+            Float64: return direction of robot
+        """
+
+        #X values identical, but difference in Y values. Determine driving forward or backward
+        if current_pos[0] == target_pos[0]:
+            if target_pos[1] > current_pos[1]:
+                robot_direction = 90.0 #Robot drives forwards
+                robot_angle = 90.0
+            else:
+                robot_direction = 270.0 #Robot drives backwards
+                robot_angle = 270.0
+
+        #Y values identical, but difference in X values. Determine driving left or right
+        elif current_pos[1] == target_pos[1]:
+            if target_pos[0] > current_pos[0]:
+                robot_direction = 180.0 #Robot drives to the right
+                robot_angle = 0.0
+            else:
+                robot_direction = 0 #Robot drives to the left
+                robot_angle = 180.0
+
+        #target point above
+        elif target_pos[1] > current_pos[1]:
+
+            #target point on left upper
+            if current_pos[0] > target_pos[0]:
+
+                #arctan(Perpendicular / Base) = angle
+                robot_direction = np.arctan(abs(target_pos[1]-current_pos[1])/abs(target_pos[0]-current_pos[0]))*(180.0/np.pi) 
+                robot_angle = np.arctan(abs(target_pos[1]-current_pos[1])/abs(target_pos[0]-current_pos[0]))*(180.0/np.pi) + 180.0
+            
+            #target point on right upper
+            else:
+                robot_direction = 90.0 + np.arctan((target_pos[0]-current_pos[0])/(target_pos[1]-current_pos[1]))*(180.0/np.pi)
+                robot_angle = np.arctan((target_pos[1]-current_pos[1])/(target_pos[0]-current_pos[0]))*(180.0/np.pi)
+
+        #target point below
+        else:#target_pos[1] < current_pos[1]:
+
+            #target point on left side
+            if current_pos[0] > target_pos[0]:
+                robot_direction = 270.0 + (np.arctan(abs(target_pos[0] - current_pos[0])/abs(target_pos[1] - current_pos[1])))*(180.0/np.pi)
+                robot_angle = np.arctan((target_pos[1]-current_pos[1])/(target_pos[0]-current_pos[0]))*(180.0/np.pi) - 180.0
+
+            #target point on right side
+            else:
+                robot_direction = 180.0 + np.arctan(abs(target_pos[1] - current_pos[1])/(target_pos[0] - current_pos[0]))*(180.0/np.pi)
+                robot_angle = np.arctan(abs(target_pos[1] - current_pos[1])/(target_pos[0] - current_pos[0]))*(180.0/np.pi)
+
+        return robot_direction, robot_angle
+
+    def calc_travel_time(self,current_pos:Type[Float64MultiArray], target_pos:Type[Float64MultiArray], robot_velocity:Type[Float64])->Float64:
+        """Calculate time which is needed to travel from current position to target position
+
+        Args:
+            current_pos (Type[Float64MultiArray]): current position where the robot is located (approximately)
+            target_pos (Type[Float64MultiArray]): target position which should be reached
+
+        Returns:
+            Float64: calculated time
+        """
+        route_length = (abs(current_pos[0] - target_pos[0])**2 + abs(current_pos[1] - target_pos[1])**2)**(1/2)
+        time = route_length / robot_velocity
+        return time
+
+    def getPath_server(self,request):
+        """calculate direction, angle and time to travel for robot and forward service to client
+
+        Args:
+            request (_type_): request of client, inputs velocity and path_number
+
+        Returns:
+            TrajectoryResponse: Respond with angle, dt and direction 
+        """
+
+        #Save paths in path_list at specific intervals
+        if(self.not_init):
+            x_pos = []
+            y_pos = []
+            path_list = []
+
+            #Save each X-Position in one list and each Y-Position in one list
+            for i in range(len(trajectory)):
+                x_pos.append(trajectory[i][0])
+                y_pos.append(trajectory[i][1])
+            
+            #Calculate Lenghts of segments and save them in an array
+            segment_lengths = (np.diff(x_pos)**2 + np.diff(y_pos)**2)**.5
+            
+            #Create cumsum_length variable to store the cumulative sum of lengths, see: https://www.geeksforgeeks.org/numpy-cumsum-in-python/
+            cumsum_length = np.zeros_like(x_pos)
+            
+            #If only two Position points in csv are given, save the one value, so that array looks like [0, segment_lengths[0]]
+            if(len(segment_lengths) == 1):
+                np.delete(cumsum_length,0)
+                cumsum_length[1] = segment_lengths[0]
+
+            #If only three Position points in csv are given, save two values, so that array looks like [0, segment_lengths[0], segment_lengths[1]]
+            elif(len(cumsum_length) == 2):
+                cumsum_length[0] = segment_lengths[0]
+                cumsum_length[1] = segment_lengths[0] + segment_lengths[1]
+
+            #If more Position points given, use cumsum function and add every value
+            else:    
+                cumsum_length[1:] = np.cumsum(segment_lengths)
+            
+            #Interpolate to determine points of specific length intervals
+            #From length 0 to the overall length, divide it into number of paths
+            #Path interval is determined by trajectory length divided by velocity (v/s) 
+            # which results in time needed to drive whole trajectory (t_traj)
+            # divide t_traj by approximate calculation time of kalman filter given by prior measurements (here it is 0.1)
+            #After optimatization, the value can be altered
+            cumsum_length_int = np.linspace(0,cumsum_length.max(),math.floor((cumsum_length.max()/request.velocity)/self.kf_calc_time))
+            x_pos_int = np.interp(cumsum_length_int, cumsum_length, x_pos)
+            y_pos_int = np.interp(cumsum_length_int, cumsum_length, y_pos)
+            
+            #Save the values in a list and write them down in csv file
+            for i in range(len(x_pos_int)):
+                self.path_list.append([round(x_pos_int[i],2),round(y_pos_int[i],2)])
+
+            with open('/home/pi/catkin_ws/src/sensorfusion/csv_path/path.csv', 'w', encoding='UTF8', newline='') as f:
+                writer = csv.writer(f)
+                writer.writerows(self.path_list)
+
+            #End this save again by changing bool value to False
+            self.not_init = False
+        
+        #No path information anymore? Return angle 999 to signal kalman filter that it has to stop and save data
+        if(request.path_number >= len(self.path_list)):
+            return TrajectoryResponse(angle=999, dt=self.travel_time, direction=0)
+
+        #Work with indices to make it easier to understand
+        i = request.path_number
+        j = request.seg_number
+
+        robot_direction, robot_angle = self.calc_robot_direction_angle(self.path_list[i-1],self.path_list[i])
+
+        if (j < len(trajectory)-1):
+            r_dir, seg_angle = self.calc_robot_direction_angle(trajectory[j],trajectory[j+1])
+        else:
+            seg_angle = 999        
+
+        self.travel_time = self.calc_travel_time(self.path_list[i-1],self.path_list[i],request.velocity)
+        segment_time = self.calc_travel_time(trajectory[j-1],trajectory[j],request.velocity)
+
+        return TrajectoryResponse(angle=(round(robot_angle,2)), dt=(self.travel_time), direction=(robot_direction), seg_dt=(segment_time), seg_angle=(seg_angle))
+
+
+if __name__=="__main__":
+    rospy.init_node("trajectory")
+    traj_obj = traj()
+    rospy.spin()
+
diff --git a/sensorfusion/scripts/qualisys/KFQ.py b/sensorfusion/scripts/qualisys/KFQ.py
new file mode 100644
index 0000000000000000000000000000000000000000..56915bec0bf2f8d1a471060d1bbf4168e2887296
--- /dev/null
+++ b/sensorfusion/scripts/qualisys/KFQ.py
@@ -0,0 +1,443 @@
+#!/usr/bin/env python3
+from typing import Type
+import numpy as np
+from sensorfusion.msg import SyncPos, Motion
+from sensorfusion.srv import Trajectory, Measurements
+import rospy
+import csv
+
+
+class kalman_filter(object):
+   
+    def __init__(self):        
+        
+        #*-------User-Parameters--------#
+
+        #Show matrices and states on console
+        self.show = False
+
+        #Robot velocity (from 0-100 mm/s)
+        self.velocity = 15.0
+
+        #Motion Qualisys on/off
+        self.qualisys = True
+        
+        #*-------Init_Node--------#
+        
+        #Before shuting down, perform function "save_data" which let the robot stop and saves Position data
+        rospy.on_shutdown(self.save_data)
+
+        #Publishes commands to Drive node
+        self.pub_ = rospy.Publisher("/drive", Motion, queue_size=3)
+
+        #Synced Position and variances of all sensors with timestamp
+        self.Pos = SyncPos()
+
+        #Motion message for giving command to drive to certain direction, velocity and time
+        self.motion = Motion()
+
+        #Bool variable for executing functions in first run in specific way
+        self.not_init = True
+
+        #Starttime which will be used for noting correct time in csv files after data acquisition
+        self.starttime_sec = 0.0
+
+        #Time delay after receiving measurements and calculating while robot still drives forward
+        self.start_diff_dt = 0.0
+        self.end_diff_dt = 0.0
+        self.delay_dt = 0.0
+        
+        #Save X-Y Position and Timestamp of Kalman Filter and sensors
+        self.kf_pos_list = []
+        
+        self.lid_pos_list = []
+        
+        self.vpix_pos_list = []
+
+        self.hpix_pos_list = []
+        
+        self.imu_pos_list = []
+
+        self.qual_pos_list = []
+
+        self.timestamp_list = []
+
+        self.estimate_pos_list = []
+
+        self.vel_dt_list = []
+        
+        #Robot direction (forward = 90, right = 180, back = 270, left = 0)
+        self.direction = 0.0
+        
+        #Segment number 
+        # (example: (0,0)->(30,0) = 1.segment; (30,0)->(30,30) = 2.segment)
+        self.seg_number = 1
+
+        #Path number: Small trajectories of one segment 
+        # (example: in 1.segment (0,0)->(30,0) are (0,0)->(15,0) 1.path and (15,0)->(30,0) 2. path
+        #           in 2.segment (30,0)->(30,30) are (30,0)->(30,15) 3.path and (30,15)->(30,30) 4.path)
+        # Intervalls are defined in "trajectory" node
+        self.path_number = 1
+
+        #Angle at which robot is driving at current segment
+        self.seg_angle = 90.0
+
+        #Angle at which robot is driving at current path
+        self.angle = 90.0
+
+        #Time needed driving a segment with specific velocity
+        self.seg_dt = 0.0
+        
+        #Time needed driving a path with specific velocity
+        self.dt = 1.0
+
+        #Frequency of self.dt for letting the robot drive after prediction. 
+        # After reaching position, it should take measurements after the rate
+        self.drive_rate = rospy.Rate(1/self.dt)
+
+        #Driven Distance
+        self.driven_distance = (self.velocity*((self.path_number-1)*self.dt))
+        
+        #*-------Kalman Filter--------# n = 2, m = 6, l = 1
+
+        #state variable (X-Position, Y-Position, X-Velocity, Y-Velocity) init
+        # n x 1 column vector
+        self.x = np.array([[0],
+                          [0]],
+                          dtype=float)
+        
+        #state covariance matrix init
+        # n x n matrix
+        self.P = np.array([[1,0],
+                           [0,1]], 
+                          dtype=float)
+                
+        #measurements
+        # m x 1 column vector
+        self.z = np.zeros((8,1), dtype=float)
+        
+        #process noise covariance matrix
+        # n x n matrix
+        self.Q = np.array([[10,0],
+                           [0,10]],
+                          dtype=float)
+        
+        #measurement covariance matrices
+        # m x m matrix
+        self.R = np.identity(8)
+
+        #state transition matrix
+        # n x n matrix
+        self.A = np.array([[1,0],
+                           [0,1]],
+                          dtype=float)
+        
+        #control-input model
+        # n x l matrix
+        self.B = np.identity(2)
+        
+        #control vector
+        # l scalar
+        self.u = np.array([[round(self.velocity*np.cos(self.angle*np.pi/180),2)],
+                          [round(self.velocity*np.sin(self.angle*np.pi/180),2)]],
+                          dtype=float)
+        
+        #process noise
+        # n x 1 column vector
+        self.w = np.array([[0],
+                           [0]],
+                          dtype=float)
+        
+        #state to measurement matrix
+        # m x n matrix
+        self.H = np.array([[1,0],
+                           [0,1],
+                           [1,0],
+                           [0,1],
+                           [1,0],
+                           [0,1],
+                           [1,0],
+                           [0,1]], 
+                          dtype=float)
+        
+        self.HT = np.transpose(self.H)
+
+        #Kalman Gain
+        # n x m matrix
+        self.K = np.zeros((2,8), dtype=float)
+
+    def save_data(self):
+        """let the robot stop and save data to csv files. After that shutdown the node
+        """
+    
+        #Let the robot stop
+        self.motion.direction = 0
+        self.motion.velocity = 0
+        self.motion.dt = 0
+        self.motion.move = 0
+
+        self.pub_.publish(self.motion)
+
+        #Write down saved data to csv files
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/kf_pos.csv', 'w', encoding='UTF8', newline='') as a:
+            writer = csv.writer(a)
+            writer.writerows(self.kf_pos_list)
+        
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/lidar_pos.csv', 'w', encoding='UTF8', newline='') as b:
+            writer = csv.writer(b)
+            writer.writerows(self.lid_pos_list)
+            
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/vpix_pos.csv', 'w', encoding='UTF8', newline='') as c:
+            writer = csv.writer(c)
+            writer.writerows(self.vpix_pos_list)
+
+        # with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/imu_pos.csv', 'w', encoding='UTF8', newline='') as d:
+        #     writer = csv.writer(c)
+        #     writer.writerows(self.imu_pos_list)
+
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/hpix_pos.csv', 'w', encoding='UTF8', newline='') as d:
+            writer = csv.writer(d)
+            writer.writerows(self.hpix_pos_list)
+
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/timestamp.csv', 'w', encoding='UTF8', newline='') as e:
+            writer = csv.writer(e)
+            writer.writerows(self.timestamp_list)    
+
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/estimate_pos.csv', 'w', encoding='UTF8', newline='') as f:
+            writer = csv.writer(f)
+            writer.writerows(self.estimate_pos_list)
+
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/qual_pos.csv', 'w', encoding='UTF8', newline='') as g:
+            writer = csv.writer(g)
+            writer.writerows(self.qual_pos_list)            
+
+        self.vel_dt_list.append([self.velocity,self.dt])
+        with open('/home/pi/catkin_ws/src/sensorfusion/csv_pos/vel_dt.csv', 'w', encoding='UTF8', newline='') as h:
+            writer = csv.writer(h)
+            writer.writerows(self.vel_dt_list)   
+            
+    def getPath_client(self,vel,p_num,s_num):
+        """Client which request angle, dt and direction to forward the commands to driving server
+
+        Args:
+            vel (Type[Float64]): velocity to get the time needed to drive specific path
+            p_num (Type[Int64]): number of path of trajectory
+        """
+        rospy.wait_for_service('getPath_service')
+        
+        try:
+            getPath = rospy.ServiceProxy('getPath_service',Trajectory)
+            respond = getPath(p_num,s_num,vel)
+            
+            self.angle = respond.angle
+            if(self.angle > -0.00001 and self.angle < 0.00001): self.angle = 0.00001
+            if(self.angle > 179.99999 and self.angle < 180.00001): self.angle = 180.00001
+            self.direction = respond.direction
+            self.dt = respond.dt
+            self.seg_dt = respond.seg_dt
+            self.seg_angle = respond.seg_angle
+
+            if(self.angle == 999 and self.direction == 0):
+                rospy.signal_shutdown("Finished driving trajectory")
+                
+            #Setup next pathnumber for next cycle
+            self.path_number += 1
+        
+        except rospy.ServiceException as e:
+            print("Service call failed: %s" % e)
+
+    def getMeasurements_client(self):
+        """get synced measurements (position, variance) of sensors to proceed with update step of kalman filter
+        """
+        rospy.wait_for_service('getMeasurements_service')
+        
+        try:
+            getMeasurements = rospy.ServiceProxy('getMeasurements_service',Measurements)
+            self.Pos = getMeasurements()
+
+        except rospy.ServiceException as e:
+            print("Service call failed: %s" % e)
+
+    def filter(self):  
+        """Kalman Filter function with Prediction and Update Steps
+        """
+        
+        #Variable for execution time
+        start = rospy.Time.now().to_sec()
+
+        
+        
+        #*------Get_Path_Information------#
+        
+        #Receive necessary information (self.angle, self.dt, self.seg_dt, self.seg_angle)
+        self.getPath_client(self.velocity,self.path_number,self.seg_number)
+        
+        #Update time left to drive to next path position
+        if(self.not_init == False):  
+            self.end_diff_dt = rospy.Time.now().to_sec()
+            self.delay_dt = self.end_diff_dt - self.start_diff_dt
+           
+  
+        
+        
+        #*------Prediction------#
+        
+        #angle, vel x, vel y, dt, self.x, self.P, init state, init cov
+        if(self.show):
+            print("\n\n\n****************New Cycle*****************\n")
+            print("angle: ", self.angle)
+            print("dt: ", self.dt)
+            print("\ninit state: \n", self.x)
+            print("\ninit cov: \n", self.P)
+            print("\nvel_x: \n", self.u[0][0])
+            print("\nvel_y: \n", self.u[1][0])
+
+        # Predict State
+        x_p = self.A.dot(self.x) + self.dt*(self.B).dot(self.u) + self.w
+        # Predict Covariance 
+        P_p = self.A.dot(self.P).dot(np.transpose(self.A)) + self.Q
+
+        self.estimate_pos_list.append([round(x_p[0][0],4), round(x_p[1][0],4)])
+
+        #Predict State, predict covariance
+        if(self.show):
+            print("\npredict state: \n", x_p)
+            print("\npredict cov: \n", P_p)
+
+        #Do one measurement before starting to drive and save data to lists.
+        if(self.not_init):
+            
+            self.getMeasurements_client()
+            
+            self.starttime_sec = (rospy.Time(self.Pos.measure.header.stamp.secs, self.Pos.measure.header.stamp.nsecs)).to_sec()
+            self.timestamp_list.append([0.0])
+            self.kf_pos_list.append([0.0, 0.0])
+            self.lid_pos_list.append([0.0, 0.0])
+            self.vpix_pos_list.append([0.0, 0.0])
+            self.hpix_pos_list.append([0.0, 0.0])
+            self.qual_pos_list.append([0.0, 0.0])
+
+        
+        #*------Drive------#
+
+        #Start driving at first cycle without any more conditions
+        if(self.not_init):
+            
+            self.motion.dt = self.seg_dt
+            self.motion.direction = self.direction
+            self.motion.velocity = self.velocity
+            self.motion.move = 1
+
+            self.pub_.publish(self.motion)
+        
+        #If angles are quite similar (angles are integers), then command robot to drive another direction
+        elif(abs(self.angle - self.seg_angle) < 1.0):
+            
+            self.motion.dt = self.seg_dt
+            self.motion.direction = self.direction
+            self.motion.velocity = self.velocity
+            self.motion.move = 1
+
+            self.pub_.publish(self.motion)
+            print("\n\n******Change Direction to ", round(self.angle,2), " degrees*****\n\n")
+            self.seg_number += 1
+
+
+        #Wait till robot reached position
+        rospy.sleep(self.dt-self.delay_dt)
+
+        #*-------Measure--------#
+        
+        #Receive measurements and save them in self.Pos
+        self.getMeasurements_client()               
+        self.start_diff_dt = rospy.Time.now().to_sec()
+        
+        #*--------Update--------#
+
+        # Compute Kalman Gain
+
+        #Observations
+        self.z = np.array([[self.Pos.measure.x_pos_lid],
+                          [self.Pos.measure.y_pos_lid],
+                          [self.Pos.measure.x_pos_vpix],
+                          [self.Pos.measure.y_pos_vpix],
+                          [self.Pos.measure.x_pos_hpix],
+                          [self.Pos.measure.y_pos_hpix],
+                          [self.Pos.measure.x_pos_qual],
+                          [self.Pos.measure.y_pos_qual]],
+                          dtype = float)
+        
+        #Sensor variances
+        var = np.array([[self.Pos.measure.x_var_lid],
+                       [self.Pos.measure.y_var_lid],
+                       [0.000002*self.driven_distance**2+0.000122*self.driven_distance+0.524632],#self.Pos.measure.x_var_vpix
+                       [0.000002*self.driven_distance**2+0.000122*self.driven_distance+0.524632],#self.Pos.measure.y_var_vpix
+                       [0.000002*self.driven_distance**2+0.000122*self.driven_distance+0.524632],#self.Pos.measure.x_var_hpix
+                       [0.000002*self.driven_distance**2+0.000122*self.driven_distance+0.524632],#self.Pos.measure.y_var_hpix
+                       [self.Pos.measure.x_var_qual],
+                       [self.Pos.measure.y_var_qual]],
+                       dtype = float)
+        
+        self.R = np.identity(len(self.z)) * (var)   
+
+        #z (measurements), self.R (variances of sensor)
+        if(self.show):
+            print("\nz: \n", self.z)
+            print("R: \n", self.R)
+
+        S = self.H.dot(P_p).dot(self.HT) + self.R 
+
+        try:
+            K = P_p.dot(self.HT).dot(np.linalg.inv(S))
+        except:
+            rospy.signal_shutdown("Singular Matrix, Error due to pixart publisher")
+
+        
+        #Kalman Gain
+        if(self.show):
+            print("\nK: \n", K)
+        
+        residual = self.z - (self.H.dot(x_p))
+
+        #Estimate state
+        self.x = x_p + K.dot(residual)
+        
+        # Estimate Covariance
+        self.P = P_p - K.dot(self.H).dot(P_p)
+
+        
+        #*--------Save_values--------#
+        
+        #Save Data to list
+        timestamp_sec = round(((rospy.Time(self.Pos.measure.header.stamp.secs, self.Pos.measure.header.stamp.nsecs)).to_sec()) - self.starttime_sec,4)
+        self.timestamp_list.append([timestamp_sec])
+        self.kf_pos_list.append([round(self.x[0][0],4), round(self.x[1][0],4)])
+        self.lid_pos_list.append([round(self.Pos.measure.x_pos_lid,4), round(self.Pos.measure.y_pos_lid,4)])
+        self.vpix_pos_list.append([round(self.Pos.measure.x_pos_vpix,4), round(self.Pos.measure.y_pos_vpix,4)])
+        self.hpix_pos_list.append([round(self.Pos.measure.x_pos_hpix,4), round(self.Pos.measure.y_pos_hpix,4)])
+        self.qual_pos_list.append([round(self.Pos.measure.x_pos_qual,4), round(self.Pos.measure.y_pos_qual,4)])
+        #self.imu_pos_list.append([round(self.Pos.measure.x_pos_imu,4), round(self.Pos.measure.y_pos_imu,4)])
+        
+        #*---------Output Screen------#
+        #print("\nKF-Position: [" ,round(self.x[0][0],4), ", ", round(self.x[1][0],4),"]\n")
+
+        if(self.not_init):
+            self.not_init = False
+
+        #Execution time
+        end = rospy.Time.now().to_sec()
+        #print("Execution time: ", round((end-start)-(self.dt-self.delay_dt),4)," sec\n")
+        
+
+
+
+if __name__ == "__main__":
+    rospy.init_node("kalman_filter", anonymous=True, disable_signals=True)
+    kf_obj = kalman_filter()
+    
+    #wait till all other nodes launched and Pixart Publisher restarted when errors occured, till starting using kalman filter
+    rospy.sleep(9.5) 
+    
+    while not rospy.is_shutdown():
+        kf_obj.filter()
+
diff --git a/sensorfusion/scripts/qualisys/syncQ.py b/sensorfusion/scripts/qualisys/syncQ.py
new file mode 100644
index 0000000000000000000000000000000000000000..4768725b75437b1b5c676502d729cc5dcb804977
--- /dev/null
+++ b/sensorfusion/scripts/qualisys/syncQ.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python3
+import message_filters
+import rospy
+from sensorfusion.msg import Position, SyncPos
+from typing import Type
+from sensorfusion.srv import Measurements, MeasurementsResponse
+
+
+class sync(object):
+   
+    def __init__(self):        
+    
+        #Parameter to change:
+        self.qualisys_on = True
+    
+        self.pub_ = rospy.Publisher("/sync_test", SyncPos, queue_size=1)
+
+        #self.imu_sub = message_filters.Subscriber("/imu_pos_cov",Position)
+        self.lidar_sub = message_filters.Subscriber("/lidar_pos_cov", Position)
+        self.vpix_sub = message_filters.Subscriber("/pixart_V_pos_cov", Position)
+        self.hpix_sub = message_filters.Subscriber("/pixart_H_pos_cov", Position)
+        #self.imu_sub = message_filters.Subscriber("/imu_azimuth", Position)
+        self.qual_sub = message_filters.Subscriber("/qualisys_pos_cov", Position)
+
+
+        #Synchronize the sensors (add qual_sub or imu_sub if built in)
+        self.ats = message_filters.ApproximateTimeSynchronizer([self.lidar_sub, self.vpix_sub, self.hpix_sub, self.qual_sub], queue_size = 5, slop = 0.1) #Slope anpassen
+        self.ats.registerCallback(self.syncedCallbackQual)
+        self.service = rospy.Service('getMeasurements_service',Measurements, self.getMeasurements_server)
+        self.msg = SyncPos()
+
+
+    def syncedCallbackQual(self, lidar:Type[Position], pixart_v:Type[Position], pixart_h:Type[Position], qual:Type[Position])->None:
+            """sync measurements of sensors and save it to SyncPos msg which contains all data of the sensors at once
+
+            Args:
+                lidar (Type[Position]): lidar Position data
+                pixart_v (Type[Position]): pixart_v Position data
+            """
+            self.data_process = True
+
+            #Initialize values to self.msg
+            self.msg.header.stamp = pixart_v.header.stamp
+
+            #Lidar
+            self.msg.x_pos_lid = lidar.x
+            self.msg.y_pos_lid = lidar.y
+            #self.msg.azimuth_lid = lidar.azimuth
+            self.msg.x_var_lid = lidar.x_var
+            self.msg.y_var_lid = lidar.y_var
+
+            #Pixart front
+            self.msg.x_pos_vpix = pixart_v.x
+            self.msg.y_pos_vpix = pixart_v.y
+            self.msg.x_var_vpix = pixart_v.x_var
+            self.msg.y_var_vpix = pixart_v.y_var
+
+            #Pixart back
+            self.msg.x_pos_hpix = pixart_h.x
+            self.msg.y_pos_hpix = pixart_h.y
+            self.msg.x_var_hpix = pixart_h.x_var
+            self.msg.y_var_hpix = pixart_h.y_var
+
+            #Qualisys
+            self.msg.x_pos_qual = qual.x
+            self.msg.y_pos_qual = qual.y
+            self.msg.x_var_qual = qual.x_var
+            self.msg.y_var_qual = qual.y_var
+
+            self.data_process = False
+
+            self.pub_.publish(self.msg)
+
+
+    def getMeasurements_server(self,request):
+        """forward SyncPos data to SF_kalman_filter node
+
+        Args:
+            request (_type_): without request there occurs an error
+
+        Returns:
+            _type_: return SyncPos data
+        """
+        while(1):
+            if(self.data_process == False): break
+        
+        return MeasurementsResponse(self.msg)
+        
+if __name__ == "__main__":
+    rospy.init_node("synced_measurements")
+    sync_obj = sync()
+    rospy.spin()
+
+
+
diff --git a/sensorfusion/scripts/sync.py b/sensorfusion/scripts/sync.py
new file mode 100644
index 0000000000000000000000000000000000000000..07bd01e934db2add07c72ca20cc664c43280905e
--- /dev/null
+++ b/sensorfusion/scripts/sync.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python3
+import message_filters
+import rospy
+from sensorfusion.msg import Position, SyncPos
+from typing import Type
+from sensorfusion.srv import Measurements, MeasurementsResponse
+
+
+class sync(object):
+   
+    def __init__(self):        
+    
+        #self.imu_sub = message_filters.Subscriber("/imu_pos_cov",Position)
+        self.lidar_sub = message_filters.Subscriber("/lidar_pos_cov", Position)
+        self.vpix_sub = message_filters.Subscriber("/pixart_V_pos_cov", Position)
+        self.hpix_sub = message_filters.Subscriber("/pixart_H_pos_cov", Position)
+
+        #Synchronize the sensors
+        self.ats = message_filters.ApproximateTimeSynchronizer([self.lidar_sub, self.vpix_sub, self.hpix_sub], queue_size = 5, slop = 0.01)
+        self.ats.registerCallback(self.syncedCallback)
+        self.service = rospy.Service('getMeasurements_service',Measurements, self.getMeasurements_server)
+        self.msg = SyncPos()
+
+
+    def syncedCallback(self, lidar:Type[Position], pixart_v:Type[Position], pixart_h:Type[Position])->None:
+        """sync measurements of sensors and save it to SyncPos msg which contains all data of the sensors at once
+
+        Args:
+            lidar (Type[Position]): lidar Position data
+            pixart_v (Type[Position]): pixart_v Position data
+        """
+        self.data_process = True
+
+        #Initialize values to self.msg
+        self.msg.header.stamp = pixart_v.header.stamp
+
+        self.msg.x_pos_lid = lidar.x
+        self.msg.y_pos_lid = lidar.y
+        self.msg.x_var_lid = lidar.x_var
+        self.msg.y_var_lid = lidar.y_var
+
+        self.msg.x_pos_vpix = pixart_v.x
+        self.msg.y_pos_vpix = pixart_v.y
+        self.msg.x_var_vpix = pixart_v.x_var
+        self.msg.y_var_vpix = pixart_v.y_var
+
+        self.msg.x_pos_hpix = pixart_h.x
+        self.msg.y_pos_hpix = pixart_h.y
+        self.msg.x_var_hpix = pixart_h.x_var
+        self.msg.y_var_hpix = pixart_h.y_var
+
+        # self.msg.x_pos_imu = imu.x
+        # self.msg.y_pos_imu = imu.y
+        # self.msg.x_var_imu = imu.x_var
+        # self.msg.y_var_imu = imu.y_var
+
+        self.data_process = False
+
+    def getMeasurements_server(self,request):
+        """forward SyncPos data to SF_kalman_filter node
+
+        Args:
+            request (_type_): without request there occurs an error
+
+        Returns:
+            _type_: return SyncPos data
+        """
+        while(1):
+            if(self.data_process == False): break
+        
+        return MeasurementsResponse(self.msg)
+        
+if __name__ == "__main__":
+    rospy.init_node("synced_measurements")
+    sync_obj = sync()
+    rospy.spin()
+
+
+
diff --git a/sensorfusion/src/lidar_pos.cpp b/sensorfusion/src/lidar_pos.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..ae3690895768bd7ed9bffb16dd225b9a5aac97e4
--- /dev/null
+++ b/sensorfusion/src/lidar_pos.cpp
@@ -0,0 +1,172 @@
+#include "ros/ros.h"
+#include "sensor_msgs/LaserScan.h"
+#include <vector>
+#include <unistd.h>
+#include "sensorfusion/Position.h"
+#include <cmath>
+
+#define RAD2DEG(x) ((x)*180./M_PI)
+
+using namespace std;
+
+static vector<double> ypos_dist_vec, xpos_dist_vec, min_dist_degree = {};
+double x_pos = 0.0, y_pos = 0.0, init_min_y_dist = 0.0, init_min_x_dist = 0.0, min_x_dist = 0.0, min_y_dist = 0.0, init_degree = 0.0, current_degree = 0.0;
+static bool init_not_done = true;
+
+class Lidar
+{
+public:
+  Lidar()
+  {
+    pub_ = nh.advertise<sensorfusion::Position>("/lidar_pos_cov", 1);
+
+    sub_ = nh.subscribe("/scan", 1000, &Lidar::scanCallback, this);
+  }
+
+
+void scanCallback(const sensor_msgs::LaserScan::ConstPtr& scan)
+{    
+    //Amount of measurements = "time between scans" divided by "time between measurements"
+    static int count = scan->scan_time / scan->time_increment; 
+
+    /*Determine Initial Distance to walls with Lidar Scanner
+    ------------------------------------------*/
+
+    //Do it only, if there is not a initial measurement determined. Check whether distance vectors of x and y are empty    
+    if (init_not_done){
+        
+        for(int i = 0; i < count; i++) {
+            
+            double degree = RAD2DEG(scan->angle_min + scan->angle_increment * i);
+
+           // Save distance measurements of Lidar (front wall)
+	        if(degree > -10.0 && degree < 10.0){
+                
+                if(scan->ranges[i] < scan->range_min) continue; //filter values with distance less than range_min (default = 0.1 [m])
+                ypos_dist_vec.push_back(scan->ranges[i]);
+                min_dist_degree.push_back(degree);
+
+            } 
+        
+            // Save distance measurements of Lidar (left wall)
+            if(degree > 80.0 && degree < 100.0) {
+                
+                if(scan->ranges[i] < scan->range_min) continue; //filter values with distance less than range_min (default = 0.1 [m])
+                xpos_dist_vec.push_back(scan->ranges[i]);
+
+            }
+
+            //Save distance measurements of Lidar (right wall)
+            //if(degree > -92.5 && degree < -87.5) {
+                
+              // if(scan->ranges[i] < scan->range_min) continue; //filter values with distance less than range_min (default = 0.1 [m])
+              //  xpos_dist_vec.push_back(scan->ranges[i]); 
+              //  min_dist_degree.push_back(degree);
+              //   
+            //}
+        
+        }
+
+    //Set initial measurements
+    init_min_x_dist = *min_element(xpos_dist_vec.begin(), xpos_dist_vec.end()) * 1000.0; //convert m in mm
+    
+    init_min_y_dist = *min_element(ypos_dist_vec.begin(), ypos_dist_vec.end()) * 1000.0; //convert m in mm
+
+    init_degree = min_dist_degree[min_element(ypos_dist_vec.begin(), ypos_dist_vec.end()) - ypos_dist_vec.begin()]; //Get index of the smallest value
+    
+    //Clear Vectors with values
+    xpos_dist_vec.clear(); ypos_dist_vec.clear();
+
+    //After initialization no further necessary
+    init_not_done = false;
+    }
+
+    
+    /*Determine X and Y Position
+    ------------------------------------------*/
+ 
+    for(int i = 0; i < count; i++) {
+        
+        double degree = RAD2DEG(scan->angle_min + scan->angle_increment * i);
+
+        //Save distance measurements of Lidar (front wall)
+	     if(degree > -10.0 && degree < 10.0){
+                
+                if(scan->ranges[i] < scan->range_min) continue; //filter values with distance less than range_min (default = 0.1 [m])
+                ypos_dist_vec.push_back(scan->ranges[i]);
+                min_dist_degree.push_back(degree);
+
+
+            } 
+        
+        //Save distance measurements of Lidar (left wall)
+        if(degree > 80.0 && degree < 100.0) {
+                
+                if(scan->ranges[i] < scan->range_min) continue; //filter values with distance less than range_min (default = 0.1 [m])
+                xpos_dist_vec.push_back(scan->ranges[i]); 
+
+            }
+
+      //   //Save distance measurements of Lidar (right wall)
+      //  if(degree > -92.5 && degree < -87.5) {
+                
+      //           if(scan->ranges[i] < scan->range_min) continue; //filter values with distance less than range_min (default = 0.1 [m])
+      //           xpos_dist_vec.push_back(scan->ranges[i]);
+      //           min_dist_degree.push_back(degree);
+
+      //       }
+
+    }
+        
+        sensorfusion::Position msg;
+        
+        //Determine x and y Position with Difference of current min distance measurement and init measurements
+        
+        //left and front wall
+        min_x_dist = *min_element(xpos_dist_vec.begin(), xpos_dist_vec.end()) * 1000.0; //convert m in mm
+        min_y_dist = *min_element(ypos_dist_vec.begin(), ypos_dist_vec.end()) * 1000.0; //convert m in mm
+        current_degree = min_dist_degree[min_element(ypos_dist_vec.begin(), ypos_dist_vec.end()) - ypos_dist_vec.begin()];
+
+
+        setprecision(3);
+        //left and front wall
+        msg.x = (min_x_dist - init_min_x_dist);
+        msg.y = -(min_y_dist - init_min_y_dist);
+        msg.azimuth = init_degree - current_degree + 90.0;
+
+        // //right and front wall
+        //msg.x = -(min_x_dist - init_min_x_dist);
+        //msg.y = -(min_y_dist - init_min_y_dist);
+        
+        if(min_x_dist <= 1000){msg.x_var = 44.44;} 
+        else{msg.x_var = pow(((min_x_dist*0.02)/3.0),2);} //relative error of 2%
+
+        if(min_y_dist <= 1000){msg.y_var = 44.44;} 
+        else{msg.y_var = pow(((min_y_dist*0.02)/3.0),2);} //relative error of 2%
+
+        msg.header.stamp = ros::Time::now();
+        msg.header.frame_id = "/lidar_frame";
+              
+        pub_.publish(msg);
+
+        //Clear Vectors with values
+        xpos_dist_vec.clear(); ypos_dist_vec.clear();
+} 
+
+private:
+  ros::NodeHandle nh; 
+  ros::Publisher pub_;
+  ros::Subscriber sub_;
+};
+
+int main(int argc, char **argv)
+{
+  ros::init(argc, argv, "lidar_pos");
+  Lidar lidar_obj;
+  ros::spin();
+
+  return 0;
+}
+
+
+
diff --git a/sensorfusion/srv/Measurements.srv b/sensorfusion/srv/Measurements.srv
new file mode 100644
index 0000000000000000000000000000000000000000..a8fd6664ba99cb0c273dabace3e01101f6daba15
--- /dev/null
+++ b/sensorfusion/srv/Measurements.srv
@@ -0,0 +1,4 @@
+#Ask only for measurement data
+---
+#Measurement data
+SyncPos measure
\ No newline at end of file
diff --git a/sensorfusion/srv/Trajectory.srv b/sensorfusion/srv/Trajectory.srv
new file mode 100644
index 0000000000000000000000000000000000000000..cbfd36f1686a9d865d72564a831eba6fb33987fa
--- /dev/null
+++ b/sensorfusion/srv/Trajectory.srv
@@ -0,0 +1,23 @@
+#Index of path (mini segments)
+int64 path_number
+
+#Index of segment (parts of trajectory)
+int64 seg_number
+
+#Linear velocity
+float64 velocity
+---
+#Angle at which robot is heading
+float64 angle
+
+#Time to drive path
+float64 dt
+
+#Direction at which robot needs to drive
+float64 direction
+
+#Segment time to drive
+float64 seg_dt
+
+#Segment angle
+float64 seg_angle
\ No newline at end of file
diff --git a/sensorfusion/test/drive_test.py b/sensorfusion/test/drive_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..81dcb4a6c3770a16213269473d7fa9fd2b610be5
--- /dev/null
+++ b/sensorfusion/test/drive_test.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python3
+from typing import Type
+from std_msgs.msg import Float64MultiArray, Float64
+import serial
+import rospy
+import numpy as np
+from sensorfusion.srv import Trajectory, TrajectoryResponse
+
+#Open Serial to create connection to motor drivers
+ser = serial.Serial(
+		'/dev/ttyS0', 
+		baudrate = 115200,
+		parity = serial.PARITY_NONE,
+		stopbits=serial.STOPBITS_ONE,
+		bytesize =serial.EIGHTBITS,
+		timeout = 1
+		)
+
+ser.close()
+ser.open()
+
+#Save X,Y Position in trajectory
+with open('/home/pi/catkin_ws/src/sensorfusion/csv_path/coordinate_system.csv', newline='') as csvfile:
+        trajectory = np.loadtxt(csvfile, delimiter=',')
+
+#TODO Check whether angles are correct
+def calc_robot_direction_angle(current_pos:Type[Float64MultiArray], target_pos:Type[Float64MultiArray])->Float64:
+    """Calculate direction of robot which it should use to reach target position from current position
+
+    Args:
+        current_pos (Type[Float64MultiArray]): current position where the robot is located (approximately)
+        target_pos (Type[Float64MultiArray]): target position which should be reached
+
+    Returns:
+        Float64: return direction of robot
+    """
+
+    #X values identical, but difference in Y values. Determine driving forward or backward
+    if current_pos[0] == target_pos[0]:
+        if target_pos[1] > current_pos[1]:
+            robot_direction = 90.0 #Robot drives forwards
+            robot_angle = 90.0
+        else:
+            robot_direction = 270.0 #Robot drives backwards
+            robot_angle = 270.0
+
+    #Y values identical, but difference in X values. Determine driving left or right
+    elif current_pos[1] == target_pos[1]:
+        if target_pos[0] > current_pos[0]:
+            robot_direction = 180.0 #Robot drives to the right
+            robot_angle = 0.0
+        else:
+            robot_direction = 0 #Robot drives to the left
+            robot_angle = 180.0
+
+    #target point above
+    elif target_pos[1] > current_pos[1]:
+
+        #target point on left upper
+        if current_pos[0] > target_pos[0]:
+
+            #arctan(Perpendicular / Base) = angle
+            robot_direction = np.arctan(abs(target_pos[1]-current_pos[1])/abs(target_pos[0]-current_pos[0]))*(180.0/np.pi) 
+            robot_angle = np.arctan(abs(target_pos[1]-current_pos[1])/abs(target_pos[0]-current_pos[0]))*(180.0/np.pi) + 180.0
+        
+        #target point on right upper
+        else:
+            robot_direction = 90.0 + np.arctan((target_pos[1]-current_pos[1])/(target_pos[0]-current_pos[0]))*(180.0/np.pi)
+            robot_angle = np.arctan((target_pos[1]-current_pos[1])/(target_pos[0]-current_pos[0]))*(180.0/np.pi)
+
+    #target point below
+    else:#target_pos[1] < current_pos[1]:
+
+        #target point on left side
+        if current_pos[0] > target_pos[0]:
+            robot_direction = 270.0 + (np.arctan(abs(target_pos[1] - current_pos[1])/abs(target_pos[0] - current_pos[0])))*(180.0/np.pi)
+            robot_angle = np.arctan((target_pos[1]-current_pos[1])/(target_pos[0]-current_pos[0]))*(180.0/np.pi) - 180.0
+
+        #target point on right side
+        else:
+            robot_direction = 180.0 + np.arctan(abs(target_pos[1] - current_pos[1])/(target_pos[0] - current_pos[0]))*(180.0/np.pi)
+            robot_angle = np.arctan(abs(target_pos[1] - current_pos[1])/(target_pos[0] - current_pos[0]))*(180.0/np.pi)
+
+    return robot_direction, robot_angle
+
+def calc_travel_time(current_pos:Type[Float64MultiArray], target_pos:Type[Float64MultiArray], robot_velocity:Type[Float64])->Float64:
+    """Calculate time which is needed to travel from current position to target position
+
+    Args:
+        current_pos (Type[Float64MultiArray]): current position where the robot is located (approximately)
+        target_pos (Type[Float64MultiArray]): target position which should be reached
+
+    Returns:
+        Float64: calculated time
+    """
+    route_length = (abs(current_pos[0] - target_pos[0])**2 + abs(current_pos[1] - target_pos[1])**2)**(1/2)
+    time = route_length / robot_velocity
+    return time
+
+def callback():
+
+    #Set parameters robot
+    amount_rounds = 1
+    robot_velocity = 20
+    
+    #Driving
+    for j in range(amount_rounds):
+
+        for i in range(1,len(trajectory)):
+            robot_direction, robot_angle = calc_robot_direction_angle(trajectory[i-1],trajectory[i])
+            travel_time = calc_travel_time(trajectory[i],trajectory[i-1],robot_velocity)
+
+            if(i > 1):
+                print("\nDriving from position ", trajectory[i-1], " to ", trajectory[i])
+
+            #rospy.sleep(0.5)
+            control_input = bytes(f'<{robot_direction},{0},{robot_velocity},{1}>\n', 'utf-8')
+            ser.write(control_input)
+
+            #Let the robot drive certain amount of time in direction
+            rospy.sleep(travel_time)
+            
+            #Let the robot stop for a short time
+            control_input = bytes(f'<{0},{0},{0},{0}>\n', 'utf-8')
+            ser.write(control_input)
+            rospy.sleep(3.0)
+
+        control_input = bytes(f'<{0},{0},{0},{0}>\n', 'utf-8')
+        ser.write(control_input)
+        #rospy.sleep(3.0)
+
+
+if __name__=="__main__":
+    rospy.init_node("trajectory")
+    callback()
\ No newline at end of file
diff --git a/sensorfusion/test/test_trajectory_init.py b/sensorfusion/test/test_trajectory_init.py
new file mode 100644
index 0000000000000000000000000000000000000000..3a044feadf37d8aa37b25c129abac5c5d06056b1
--- /dev/null
+++ b/sensorfusion/test/test_trajectory_init.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python3
+import numpy as np
+import math
+import csv
+
+x_pos = []
+y_pos = []
+path_list = []
+trajectory = [[1,0],[2,0],[2,2]]
+
+for i in range(len(trajectory)):
+    x_pos.append(trajectory[i][0])
+    y_pos.append(trajectory[i][1])
+
+print(x_pos)
+print(y_pos)    
+
+segment_lengths = (np.diff(x_pos)**2 + np.diff(y_pos)**2)**.5
+print(segment_lengths)
+
+cumsum_length = np.zeros_like(x_pos)
+print(cumsum_length)
+
+if(len(segment_lengths) == 1):
+    np.delete(cumsum_length,0)
+    cumsum_length[1] = segment_lengths[0]
+
+elif(len(cumsum_length) < 3):
+    cumsum_length[0] = segment_lengths[0]
+    cumsum_length[1] = segment_lengths[0] + segment_lengths[1]
+    print(cumsum_length)
+
+else:    
+    cumsum_length[1:] = np.cumsum(segment_lengths)
+    print(cumsum_length)
+
+cumsum_length_int = np.linspace(0,cumsum_length.max(),10)
+print(cumsum_length_int)
+
+
+x_pos_int = np.interp(cumsum_length_int, cumsum_length, x_pos)
+y_pos_int = np.interp(cumsum_length_int, cumsum_length, y_pos)
+
+# for i in range(len(x_pos_int)):
+#     path_list.append([round(x_pos_int[i],4),round(y_pos_int[i],4)])
+
+# with open('/home/pi/catkin_ws/src/sensorfusion/csv/path.csv', 'w', encoding='UTF8', newline='') as f:
+#     writer = csv.writer(f)
+#     writer.writerows(path_list)
\ No newline at end of file
diff --git a/ydlidar_ros b/ydlidar_ros
new file mode 160000
index 0000000000000000000000000000000000000000..39f4307059e83e08c462fbe08addac178cab4210
--- /dev/null
+++ b/ydlidar_ros
@@ -0,0 +1 @@
+Subproject commit 39f4307059e83e08c462fbe08addac178cab4210