diff --git a/README.md b/README.md
index 980c67be3ffd7c890174ea5e26b039cfc59f3b5f..f293c15c6ee137fd1ec58da6dbd705f5901a7297 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
 [![Build](https://git-ce.rwth-aachen.de/wzl-mq-ms/forschung-lehre/lava/unified-device-interface/python/badges/master/pipeline.svg)](https://git-ce.rwth-aachen.de/wzl-mq-ms/forschung-lehre/lava/unified-device-interface/python/commits/master)
 
 # Python Unified Device Interface
-Current stable version: 10.0.3
+Current stable version: 10.0.4
 
 ## Installation
 1. Install the WZL-UDI package via pip
@@ -69,6 +69,10 @@ Funded by the Deutsche Forschungsgemeinschaft (DFG, German Research Foundation)
 
 ## Recent changes
 
+**10.0.4** - 2024-03-21
+  - increased logging verbosity of streaming class
+  - fixed streaming of semantic measurements
+
 **10.0.3** - 2024-03-21
   - added license field to semantic provision of measurement range
 
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 1e3781b3a3ed69d6e1f522b840ae26070cba2409..13536756b76852228131e779b9f9a3c93bc9b44f 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,8 +1,9 @@
-aiohttp==3.8.4
+aiohttp==3.9.1
 Deprecated==1.2.13
 nest-asyncio==1.5.6
 pytest==7.1.1
 rdflib==7.0.0
+pytz==2024.1
 sphinx==3.5.2
 sphinx-rtd-theme==1.0.0
 wzl-mqtt~=2.6.1
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 759acd45c7751889903fca75fd0a0c903515f79a..a1029db6c4e2cf3956bf042c3b09d3e1767eaa39 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
     long_description = fh.read()
 
 setup(name='wzl-udi',
-      version='10.0.3',
+      version='10.0.4',
       url='https://git-ce.rwth-aachen.de/wzl-mq-public/soil/python',
       project_urls={
           "Bug Tracker": "https://git-ce.rwth-aachen.de/wzl-mq-public/soil/python/-/issues",
diff --git a/src/soil/measurement.py b/src/soil/measurement.py
index 134369a70a9af5ac99cf95ec4cec1d6e749577c3..9160e3d5baf0e8c072824cd6fc9421f0c30ab315 100644
--- a/src/soil/measurement.py
+++ b/src/soil/measurement.py
@@ -259,7 +259,6 @@ class Measurement(Variable):
             data_graph.bind(Semantics.prefix, Semantics.namespace)
             uncertainty_subject = Semantics.namespace[f'{self._semantic_name}MeasurementUncertainty']
             covariance = self.__getitem__('covariance', 0)
-            print(covariance)
             if covariance is not None:
                 rdf_covariance = self.serialize_value(data_graph, covariance)
                 data_graph.add((uncertainty_subject, Namespaces.rdf.type, Namespaces.si.CoverageInterval))
diff --git a/src/soil/stream.py b/src/soil/stream.py
index 3b6ed19bef93abf8368ca507f91b91727d0706a3..12ab43e83af9b5c209bed7068086a52f18901502 100644
--- a/src/soil/stream.py
+++ b/src/soil/stream.py
@@ -1,5 +1,6 @@
 import datetime
 import json
+import traceback
 from abc import ABC, abstractmethod
 from typing import List, Callable, Any, Union, Dict, Tuple
 
@@ -20,8 +21,15 @@ logger = root_logger.get(__name__)
 
 class JobError(Exception):
 
-    def __init__(self):
-        pass
+    def __init__(self, message: str, predeccessor: Exception = None):
+        self._predecessor: predeccessor
+        self._message = message
+
+    @property
+    def message(self) -> str:
+         return self._message
+
+
 
 
 class Job(ABC):
@@ -73,7 +81,7 @@ class Job(ABC):
             time = time if time is not None else datetime.datetime.now()
             return self._next is not None and self._next <= time and self._is_triggered()
         except Exception as e:
-            raise JobError()
+            raise JobError('is_triggered failed', predeccessor=e)
 
     @abstractmethod
     def _is_triggered(self) -> bool:
@@ -97,7 +105,7 @@ class Job(ABC):
 
     def data(self, model: Component = None) -> Dict:
         if model is None:
-            raise JobError()
+            raise JobError('Can not retrieve data. Model is missing')
         try:
             uuids = self.topic.split('/')
             data = model.__getitem__(uuids).serialize([], False)
@@ -109,11 +117,11 @@ class Job(ABC):
             data['timestamp'] = variable.serialize_time(datetime.datetime.now())
             return data
         except Exception as e:
-            raise JobError()
+            raise JobError('Can not retrieve data. Due to another error.', predeccessor=e)
 
     def semantic_data(self, model: Component = None) -> (str, rdflib.Graph):
         if model is None:
-            raise JobError()
+            raise JobError('Can not retrieve semantic data. Model is missing')
         try:
             uuids = self.topic.split('/')
             element = model.__getitem__(uuids)
@@ -122,7 +130,7 @@ class Job(ABC):
             data += element.serialize_semantics(ResourceType.observation)
 
             measurement_subject = \
-                list((data.subjects(predicate=Namespaces.rdf.type, object=Namespaces.soil.Measurement)))[0]
+                list((data.subjects(predicate=Namespaces.rdf.type, object=Namespaces.sosa.Observation)))[0]
 
             # replace value
             data.remove((None, Namespaces.qudt.value, None))
@@ -135,7 +143,7 @@ class Job(ABC):
 
             return element.semantic_name, data
         except Exception as e:
-            raise JobError()
+            raise JobError('Can not semantic retrieve data. Due to another error.', predeccessor=e)
 
 
 class FixedJob(Job):
@@ -291,6 +299,8 @@ class StreamScheduler(object):
                         try:
                             self._publisher.get('tier1').publish(job.topic, message, 1)
                         except ClientNotFoundError:
+                            logger.warn('Client not found error occured.')
+                            logger.warn(traceback.format_exc())
                             self._publisher.publish(job.topic, message, 1)
 
                         # try to send semantic data package
@@ -305,16 +315,19 @@ class StreamScheduler(object):
                             try:
                                 self._publisher.get('tier2').publish(url, message, 1)
                             except ClientNotFoundError:
+                                logger.warn('Client not found error occured.')
+                                logger.warn(traceback.format_exc())
                                 self._publisher.publish(url, message, 1)
 
-                        except JobError:
-                            pass
+                        except JobError as e:
+                            logger.error(e.message)
+                            logger.error(traceback.format_exc())
 
                     job.schedule()
                     next = job.determine_next(next)
-                except JobError:
-                    # logger.error(traceback.format_exc())
-                    # job.stop()
+                except JobError as e:
+                    logger.error(e.message)
+                    logger.error(traceback.format_exc())
                     pass
 
             if next is None: