diff --git a/Bachelorarbeit/Bachelorarbeit.zip b/Bachelorarbeit/Bachelorarbeit.zip
index c0852177a32a2f2274f3175b8df15c02ee90830e..e29a85484b90f602fd1ed42a3068baa1706a09da 100644
Binary files a/Bachelorarbeit/Bachelorarbeit.zip and b/Bachelorarbeit/Bachelorarbeit.zip differ
diff --git a/Bachelorarbeit/ToF_Preasentation.pptx b/Bachelorarbeit/ToF_Preasentation.pptx
new file mode 100644
index 0000000000000000000000000000000000000000..26a62a04f863fcbd99b9cbbb1855d775ba307055
Binary files /dev/null and b/Bachelorarbeit/ToF_Preasentation.pptx differ
diff --git a/Bachelorarbeit/18_vl53l7cx_clean_with_STlibrary_Shift_register_20250127154813/18_vl53l7cx_clean_with_STlibrary_Shift_register_20250127154813.ino b/Bachelorarbeit/V1/18_vl53l7cx_clean_with_STlibrary_Shift_register_20250127154813/18_vl53l7cx_clean_with_STlibrary_Shift_register_20250127154813.ino
similarity index 100%
rename from Bachelorarbeit/18_vl53l7cx_clean_with_STlibrary_Shift_register_20250127154813/18_vl53l7cx_clean_with_STlibrary_Shift_register_20250127154813.ino
rename to Bachelorarbeit/V1/18_vl53l7cx_clean_with_STlibrary_Shift_register_20250127154813/18_vl53l7cx_clean_with_STlibrary_Shift_register_20250127154813.ino
diff --git a/Bachelorarbeit/4_vl53l7cx_clean_with_STlibrary/4_vl53l7cx_clean_with_STlibrary.ino b/Bachelorarbeit/V1/4_vl53l7cx_clean_with_STlibrary/4_vl53l7cx_clean_with_STlibrary.ino
similarity index 100%
rename from Bachelorarbeit/4_vl53l7cx_clean_with_STlibrary/4_vl53l7cx_clean_with_STlibrary.ino
rename to Bachelorarbeit/V1/4_vl53l7cx_clean_with_STlibrary/4_vl53l7cx_clean_with_STlibrary.ino
diff --git "a/Bachelorarbeit/Abk\303\274rzungen.tex" "b/Bachelorarbeit/V1/Abk\303\274rzungen.tex"
similarity index 100%
rename from "Bachelorarbeit/Abk\303\274rzungen.tex"
rename to "Bachelorarbeit/V1/Abk\303\274rzungen.tex"
diff --git a/Bachelorarbeit/Anforderungen.tex b/Bachelorarbeit/V1/Anforderungen.tex
similarity index 100%
rename from Bachelorarbeit/Anforderungen.tex
rename to Bachelorarbeit/V1/Anforderungen.tex
diff --git "a/Bachelorarbeit/Assets/Kapitel/K1 - Einf\303\274hrung.tex" "b/Bachelorarbeit/V1/Assets/Kapitel/K1 - Einf\303\274hrung.tex"
similarity index 100%
rename from "Bachelorarbeit/Assets/Kapitel/K1 - Einf\303\274hrung.tex"
rename to "Bachelorarbeit/V1/Assets/Kapitel/K1 - Einf\303\274hrung.tex"
diff --git a/Bachelorarbeit/Assets/Kapitel/Kapitel/K2 - Stand der Technik.tex b/Bachelorarbeit/V1/Assets/Kapitel/Kapitel/K2 - Stand der Technik.tex
similarity index 100%
rename from Bachelorarbeit/Assets/Kapitel/Kapitel/K2 - Stand der Technik.tex
rename to Bachelorarbeit/V1/Assets/Kapitel/Kapitel/K2 - Stand der Technik.tex
diff --git a/Bachelorarbeit/Assets/Kapitel/Kapitel/Kapitel/K3 - Grundlagen und Methodik.tex b/Bachelorarbeit/V1/Assets/Kapitel/Kapitel/Kapitel/K3 - Grundlagen und Methodik.tex
similarity index 100%
rename from Bachelorarbeit/Assets/Kapitel/Kapitel/Kapitel/K3 - Grundlagen und Methodik.tex
rename to Bachelorarbeit/V1/Assets/Kapitel/Kapitel/Kapitel/K3 - Grundlagen und Methodik.tex
diff --git a/Bachelorarbeit/Assets/Kapitel/Kapitel/Kapitel/Kapitel/K4 - Umsetzung.tex b/Bachelorarbeit/V1/Assets/Kapitel/Kapitel/Kapitel/Kapitel/K4 - Umsetzung.tex
similarity index 100%
rename from Bachelorarbeit/Assets/Kapitel/Kapitel/Kapitel/Kapitel/K4 - Umsetzung.tex
rename to Bachelorarbeit/V1/Assets/Kapitel/Kapitel/Kapitel/Kapitel/K4 - Umsetzung.tex
diff --git a/Bachelorarbeit/Assets/Kapitel/Kapitel/Kapitel/Kapitel/Kapitel/K5 - Bewertung und Ausblick.tex b/Bachelorarbeit/V1/Assets/Kapitel/Kapitel/Kapitel/Kapitel/Kapitel/K5 - Bewertung und Ausblick.tex
similarity index 100%
rename from Bachelorarbeit/Assets/Kapitel/Kapitel/Kapitel/Kapitel/Kapitel/K5 - Bewertung und Ausblick.tex
rename to Bachelorarbeit/V1/Assets/Kapitel/Kapitel/Kapitel/Kapitel/Kapitel/K5 - Bewertung und Ausblick.tex
diff --git a/Bachelorarbeit/Assets/titelangaben.tex b/Bachelorarbeit/V1/Assets/titelangaben.tex
similarity index 100%
rename from Bachelorarbeit/Assets/titelangaben.tex
rename to Bachelorarbeit/V1/Assets/titelangaben.tex
diff --git a/Bachelorarbeit/BA_old.bib b/Bachelorarbeit/V1/BA_old.bib
similarity index 100%
rename from Bachelorarbeit/BA_old.bib
rename to Bachelorarbeit/V1/BA_old.bib
diff --git a/Bachelorarbeit/V1/Bachelorarbeit.zip b/Bachelorarbeit/V1/Bachelorarbeit.zip
new file mode 100644
index 0000000000000000000000000000000000000000..c0852177a32a2f2274f3175b8df15c02ee90830e
Binary files /dev/null and b/Bachelorarbeit/V1/Bachelorarbeit.zip differ
diff --git a/Bachelorarbeit/Code_settings.tex b/Bachelorarbeit/V1/Code_settings.tex
similarity index 100%
rename from Bachelorarbeit/Code_settings.tex
rename to Bachelorarbeit/V1/Code_settings.tex
diff --git a/Bachelorarbeit/Einleitung.tex b/Bachelorarbeit/V1/Einleitung.tex
similarity index 100%
rename from Bachelorarbeit/Einleitung.tex
rename to Bachelorarbeit/V1/Einleitung.tex
diff --git a/Bachelorarbeit/Exported Items.bib b/Bachelorarbeit/V1/Exported Items.bib
similarity index 100%
rename from Bachelorarbeit/Exported Items.bib
rename to Bachelorarbeit/V1/Exported Items.bib
diff --git a/Bachelorarbeit/Fazit und Ausblick.tex b/Bachelorarbeit/V1/Fazit und Ausblick.tex
similarity index 100%
rename from Bachelorarbeit/Fazit und Ausblick.tex
rename to Bachelorarbeit/V1/Fazit und Ausblick.tex
diff --git a/Bachelorarbeit/Flowchart_Ba.drawio b/Bachelorarbeit/V1/Flowchart_Ba.drawio
similarity index 100%
rename from Bachelorarbeit/Flowchart_Ba.drawio
rename to Bachelorarbeit/V1/Flowchart_Ba.drawio
diff --git a/Bachelorarbeit/Flowchart_Ba.drawio.png b/Bachelorarbeit/V1/Flowchart_Ba.drawio.png
similarity index 100%
rename from Bachelorarbeit/Flowchart_Ba.drawio.png
rename to Bachelorarbeit/V1/Flowchart_Ba.drawio.png
diff --git a/Bachelorarbeit/Forschungsfragen.txt b/Bachelorarbeit/V1/Forschungsfragen.txt
similarity index 100%
rename from Bachelorarbeit/Forschungsfragen.txt
rename to Bachelorarbeit/V1/Forschungsfragen.txt
diff --git a/Bachelorarbeit/Fragen_an_Sophie b/Bachelorarbeit/V1/Fragen_an_Sophie
similarity index 100%
rename from Bachelorarbeit/Fragen_an_Sophie
rename to Bachelorarbeit/V1/Fragen_an_Sophie
diff --git a/Bachelorarbeit/Kapitel/4 Acronyme.tex b/Bachelorarbeit/V1/Kapitel/4 Acronyme.tex
similarity index 100%
rename from Bachelorarbeit/Kapitel/4 Acronyme.tex
rename to Bachelorarbeit/V1/Kapitel/4 Acronyme.tex
diff --git a/Bachelorarbeit/Overleaf_Edit_Link.txt b/Bachelorarbeit/V1/Overleaf_Edit_Link.txt
similarity index 100%
rename from Bachelorarbeit/Overleaf_Edit_Link.txt
rename to Bachelorarbeit/V1/Overleaf_Edit_Link.txt
diff --git a/Bachelorarbeit/Pic.bib b/Bachelorarbeit/V1/Pic.bib
similarity index 100%
rename from Bachelorarbeit/Pic.bib
rename to Bachelorarbeit/V1/Pic.bib
diff --git a/Bachelorarbeit/Praxisprojekt_Bericht_Rene_Ebeling.pdf b/Bachelorarbeit/V1/Praxisprojekt_Bericht_Rene_Ebeling.pdf
similarity index 100%
rename from Bachelorarbeit/Praxisprojekt_Bericht_Rene_Ebeling.pdf
rename to Bachelorarbeit/V1/Praxisprojekt_Bericht_Rene_Ebeling.pdf
diff --git a/Bachelorarbeit/Primary Sources.bib b/Bachelorarbeit/V1/Primary Sources.bib
similarity index 100%
rename from Bachelorarbeit/Primary Sources.bib
rename to Bachelorarbeit/V1/Primary Sources.bib
diff --git a/Bachelorarbeit/Primary_Sources.bib b/Bachelorarbeit/V1/Primary_Sources.bib
similarity index 100%
rename from Bachelorarbeit/Primary_Sources.bib
rename to Bachelorarbeit/V1/Primary_Sources.bib
diff --git a/Bachelorarbeit/Quellen/1-s2.0-S0921889003001556-main.pdf b/Bachelorarbeit/V1/Quellen/1-s2.0-S0921889003001556-main.pdf
similarity index 100%
rename from Bachelorarbeit/Quellen/1-s2.0-S0921889003001556-main.pdf
rename to Bachelorarbeit/V1/Quellen/1-s2.0-S0921889003001556-main.pdf
diff --git a/Bachelorarbeit/Quellen/Articel.bib b/Bachelorarbeit/V1/Quellen/Articel.bib
similarity index 100%
rename from Bachelorarbeit/Quellen/Articel.bib
rename to Bachelorarbeit/V1/Quellen/Articel.bib
diff --git a/Bachelorarbeit/Quellen/Collision_Detection_with_joint_torque_Sensors.pdf b/Bachelorarbeit/V1/Quellen/Collision_Detection_with_joint_torque_Sensors.pdf
similarity index 100%
rename from Bachelorarbeit/Quellen/Collision_Detection_with_joint_torque_Sensors.pdf
rename to Bachelorarbeit/V1/Quellen/Collision_Detection_with_joint_torque_Sensors.pdf
diff --git a/Bachelorarbeit/V1/Quellen/DIN_EN_12464-12021-11.pdf b/Bachelorarbeit/V1/Quellen/DIN_EN_12464-12021-11.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..e721ffff2be166d1797340c0b53cf2353b63609e
Binary files /dev/null and b/Bachelorarbeit/V1/Quellen/DIN_EN_12464-12021-11.pdf differ
diff --git a/Bachelorarbeit/Quellen/DIN_EN_ISO_10218-1.pdf b/Bachelorarbeit/V1/Quellen/DIN_EN_ISO_10218-1.pdf
similarity index 100%
rename from Bachelorarbeit/Quellen/DIN_EN_ISO_10218-1.pdf
rename to Bachelorarbeit/V1/Quellen/DIN_EN_ISO_10218-1.pdf
diff --git a/Bachelorarbeit/Quellen/DIN_EN_ISO_10218-2.pdf b/Bachelorarbeit/V1/Quellen/DIN_EN_ISO_10218-2.pdf
similarity index 100%
rename from Bachelorarbeit/Quellen/DIN_EN_ISO_10218-2.pdf
rename to Bachelorarbeit/V1/Quellen/DIN_EN_ISO_10218-2.pdf
diff --git a/Bachelorarbeit/Quellen/DIN_EN_ISO_13855.pdf b/Bachelorarbeit/V1/Quellen/DIN_EN_ISO_13855.pdf
similarity index 100%
rename from Bachelorarbeit/Quellen/DIN_EN_ISO_13855.pdf
rename to Bachelorarbeit/V1/Quellen/DIN_EN_ISO_13855.pdf
diff --git a/Bachelorarbeit/Quellen/DIN_EN_ISO_8373.pdf b/Bachelorarbeit/V1/Quellen/DIN_EN_ISO_8373.pdf
similarity index 100%
rename from Bachelorarbeit/Quellen/DIN_EN_ISO_8373.pdf
rename to Bachelorarbeit/V1/Quellen/DIN_EN_ISO_8373.pdf
diff --git a/Bachelorarbeit/Quellen/Laser_Range_finding.pdf b/Bachelorarbeit/V1/Quellen/Laser_Range_finding.pdf
similarity index 100%
rename from Bachelorarbeit/Quellen/Laser_Range_finding.pdf
rename to Bachelorarbeit/V1/Quellen/Laser_Range_finding.pdf
diff --git a/Bachelorarbeit/Quellen/Li_2019_J._Phys.__Conf._Ser._1267_012036.pdf b/Bachelorarbeit/V1/Quellen/Li_2019_J._Phys.__Conf._Ser._1267_012036.pdf
similarity index 100%
rename from Bachelorarbeit/Quellen/Li_2019_J._Phys.__Conf._Ser._1267_012036.pdf
rename to Bachelorarbeit/V1/Quellen/Li_2019_J._Phys.__Conf._Ser._1267_012036.pdf
diff --git "a/Bachelorarbeit/Quellen/Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D\342\200\220Bildgebung.pdf" "b/Bachelorarbeit/V1/Quellen/Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D\342\200\220Bildgebung.pdf"
similarity index 100%
rename from "Bachelorarbeit/Quellen/Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D\342\200\220Bildgebung.pdf"
rename to "Bachelorarbeit/V1/Quellen/Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D\342\200\220Bildgebung.pdf"
diff --git a/Bachelorarbeit/Quellen/Robotics_Control_Sensing_Vision_and_Inte.pdf b/Bachelorarbeit/V1/Quellen/Robotics_Control_Sensing_Vision_and_Inte.pdf
similarity index 100%
rename from Bachelorarbeit/Quellen/Robotics_Control_Sensing_Vision_and_Inte.pdf
rename to Bachelorarbeit/V1/Quellen/Robotics_Control_Sensing_Vision_and_Inte.pdf
diff --git a/Bachelorarbeit/Quellen/Sensoren_Buch.pdf b/Bachelorarbeit/V1/Quellen/Sensoren_Buch.pdf
similarity index 100%
rename from Bachelorarbeit/Quellen/Sensoren_Buch.pdf
rename to Bachelorarbeit/V1/Quellen/Sensoren_Buch.pdf
diff --git a/Bachelorarbeit/Quellen/VL53L7CX_Datasheet.pdf b/Bachelorarbeit/V1/Quellen/VL53L7CX_Datasheet.pdf
similarity index 100%
rename from Bachelorarbeit/Quellen/VL53L7CX_Datasheet.pdf
rename to Bachelorarbeit/V1/Quellen/VL53L7CX_Datasheet.pdf
diff --git a/Bachelorarbeit/SCH-19.bib b/Bachelorarbeit/V1/SCH-19.bib
similarity index 100%
rename from Bachelorarbeit/SCH-19.bib
rename to Bachelorarbeit/V1/SCH-19.bib
diff --git a/Bachelorarbeit/Stand der Technik.tex b/Bachelorarbeit/V1/Stand der Technik.tex
similarity index 100%
rename from Bachelorarbeit/Stand der Technik.tex
rename to Bachelorarbeit/V1/Stand der Technik.tex
diff --git a/Bachelorarbeit/Umsetzung.tex b/Bachelorarbeit/V1/Umsetzung.tex
similarity index 100%
rename from Bachelorarbeit/Umsetzung.tex
rename to Bachelorarbeit/V1/Umsetzung.tex
diff --git a/Bachelorarbeit/anhang.tex b/Bachelorarbeit/V1/anhang.tex
similarity index 100%
rename from Bachelorarbeit/anhang.tex
rename to Bachelorarbeit/V1/anhang.tex
diff --git a/Bachelorarbeit/ba.acn b/Bachelorarbeit/V1/ba.acn
similarity index 100%
rename from Bachelorarbeit/ba.acn
rename to Bachelorarbeit/V1/ba.acn
diff --git a/Bachelorarbeit/ba.aux b/Bachelorarbeit/V1/ba.aux
similarity index 100%
rename from Bachelorarbeit/ba.aux
rename to Bachelorarbeit/V1/ba.aux
diff --git a/Bachelorarbeit/ba.bbl b/Bachelorarbeit/V1/ba.bbl
similarity index 100%
rename from Bachelorarbeit/ba.bbl
rename to Bachelorarbeit/V1/ba.bbl
diff --git a/Bachelorarbeit/ba.bcf b/Bachelorarbeit/V1/ba.bcf
similarity index 100%
rename from Bachelorarbeit/ba.bcf
rename to Bachelorarbeit/V1/ba.bcf
diff --git a/Bachelorarbeit/ba.blg b/Bachelorarbeit/V1/ba.blg
similarity index 100%
rename from Bachelorarbeit/ba.blg
rename to Bachelorarbeit/V1/ba.blg
diff --git a/Bachelorarbeit/ba.glo b/Bachelorarbeit/V1/ba.glo
similarity index 100%
rename from Bachelorarbeit/ba.glo
rename to Bachelorarbeit/V1/ba.glo
diff --git a/Bachelorarbeit/ba.glsdefs b/Bachelorarbeit/V1/ba.glsdefs
similarity index 100%
rename from Bachelorarbeit/ba.glsdefs
rename to Bachelorarbeit/V1/ba.glsdefs
diff --git a/Bachelorarbeit/ba.ist b/Bachelorarbeit/V1/ba.ist
similarity index 100%
rename from Bachelorarbeit/ba.ist
rename to Bachelorarbeit/V1/ba.ist
diff --git a/Bachelorarbeit/ba.lof b/Bachelorarbeit/V1/ba.lof
similarity index 100%
rename from Bachelorarbeit/ba.lof
rename to Bachelorarbeit/V1/ba.lof
diff --git a/Bachelorarbeit/ba.log b/Bachelorarbeit/V1/ba.log
similarity index 100%
rename from Bachelorarbeit/ba.log
rename to Bachelorarbeit/V1/ba.log
diff --git a/Bachelorarbeit/ba.lot b/Bachelorarbeit/V1/ba.lot
similarity index 100%
rename from Bachelorarbeit/ba.lot
rename to Bachelorarbeit/V1/ba.lot
diff --git a/Bachelorarbeit/ba.nlo b/Bachelorarbeit/V1/ba.nlo
similarity index 100%
rename from Bachelorarbeit/ba.nlo
rename to Bachelorarbeit/V1/ba.nlo
diff --git a/Bachelorarbeit/ba.out b/Bachelorarbeit/V1/ba.out
similarity index 100%
rename from Bachelorarbeit/ba.out
rename to Bachelorarbeit/V1/ba.out
diff --git a/Bachelorarbeit/ba.pdf b/Bachelorarbeit/V1/ba.pdf
similarity index 100%
rename from Bachelorarbeit/ba.pdf
rename to Bachelorarbeit/V1/ba.pdf
diff --git a/Bachelorarbeit/ba.run.xml b/Bachelorarbeit/V1/ba.run.xml
similarity index 100%
rename from Bachelorarbeit/ba.run.xml
rename to Bachelorarbeit/V1/ba.run.xml
diff --git a/Bachelorarbeit/ba.synctex.gz b/Bachelorarbeit/V1/ba.synctex.gz
similarity index 100%
rename from Bachelorarbeit/ba.synctex.gz
rename to Bachelorarbeit/V1/ba.synctex.gz
diff --git a/Bachelorarbeit/ba.tex b/Bachelorarbeit/V1/ba.tex
similarity index 100%
rename from Bachelorarbeit/ba.tex
rename to Bachelorarbeit/V1/ba.tex
diff --git a/Bachelorarbeit/ba.toc b/Bachelorarbeit/V1/ba.toc
similarity index 100%
rename from Bachelorarbeit/ba.toc
rename to Bachelorarbeit/V1/ba.toc
diff --git a/Bachelorarbeit/ba.xdy b/Bachelorarbeit/V1/ba.xdy
similarity index 100%
rename from Bachelorarbeit/ba.xdy
rename to Bachelorarbeit/V1/ba.xdy
diff --git a/Bachelorarbeit/chat_text.tex b/Bachelorarbeit/V1/chat_text.tex
similarity index 100%
rename from Bachelorarbeit/chat_text.tex
rename to Bachelorarbeit/V1/chat_text.tex
diff --git a/Bachelorarbeit/fh_logo.png b/Bachelorarbeit/V1/fh_logo.png
similarity index 100%
rename from Bachelorarbeit/fh_logo.png
rename to Bachelorarbeit/V1/fh_logo.png
diff --git a/Bachelorarbeit/fhacmb.sty b/Bachelorarbeit/V1/fhacmb.sty
similarity index 100%
rename from Bachelorarbeit/fhacmb.sty
rename to Bachelorarbeit/V1/fhacmb.sty
diff --git a/Bachelorarbeit/images/20200501_Time_of_flight.svg.png b/Bachelorarbeit/V1/images/20200501_Time_of_flight.svg.png
similarity index 100%
rename from Bachelorarbeit/images/20200501_Time_of_flight.svg.png
rename to Bachelorarbeit/V1/images/20200501_Time_of_flight.svg.png
diff --git a/Bachelorarbeit/images/AMA-22_dynamic.jpg b/Bachelorarbeit/V1/images/AMA-22_dynamic.jpg
similarity index 100%
rename from Bachelorarbeit/images/AMA-22_dynamic.jpg
rename to Bachelorarbeit/V1/images/AMA-22_dynamic.jpg
diff --git a/Bachelorarbeit/images/AMA-22_static.jpg b/Bachelorarbeit/V1/images/AMA-22_static.jpg
similarity index 100%
rename from Bachelorarbeit/images/AMA-22_static.jpg
rename to Bachelorarbeit/V1/images/AMA-22_static.jpg
diff --git a/Bachelorarbeit/images/Cobots-Forecast-Global-Market-1024x576.jpg b/Bachelorarbeit/V1/images/Cobots-Forecast-Global-Market-1024x576.jpg
similarity index 100%
rename from Bachelorarbeit/images/Cobots-Forecast-Global-Market-1024x576.jpg
rename to Bachelorarbeit/V1/images/Cobots-Forecast-Global-Market-1024x576.jpg
diff --git a/Bachelorarbeit/images/HER-18_freq.png b/Bachelorarbeit/V1/images/HER-18_freq.png
similarity index 100%
rename from Bachelorarbeit/images/HER-18_freq.png
rename to Bachelorarbeit/V1/images/HER-18_freq.png
diff --git a/Bachelorarbeit/images/HER-18_tria.png b/Bachelorarbeit/V1/images/HER-18_tria.png
similarity index 100%
rename from Bachelorarbeit/images/HER-18_tria.png
rename to Bachelorarbeit/V1/images/HER-18_tria.png
diff --git a/Bachelorarbeit/images/HER-18_wave.png b/Bachelorarbeit/V1/images/HER-18_wave.png
similarity index 100%
rename from Bachelorarbeit/images/HER-18_wave.png
rename to Bachelorarbeit/V1/images/HER-18_wave.png
diff --git a/Bachelorarbeit/images/Halterung_Seite.jpg b/Bachelorarbeit/V1/images/Halterung_Seite.jpg
similarity index 100%
rename from Bachelorarbeit/images/Halterung_Seite.jpg
rename to Bachelorarbeit/V1/images/Halterung_Seite.jpg
diff --git a/Bachelorarbeit/images/Halterung_Seite_Oben.jpg b/Bachelorarbeit/V1/images/Halterung_Seite_Oben.jpg
similarity index 100%
rename from Bachelorarbeit/images/Halterung_Seite_Oben.jpg
rename to Bachelorarbeit/V1/images/Halterung_Seite_Oben.jpg
diff --git "a/Bachelorarbeit/images/Halterung_Seite_Oben_schr\303\244g.jpg" "b/Bachelorarbeit/V1/images/Halterung_Seite_Oben_schr\303\244g.jpg"
similarity index 100%
rename from "Bachelorarbeit/images/Halterung_Seite_Oben_schr\303\244g.jpg"
rename to "Bachelorarbeit/V1/images/Halterung_Seite_Oben_schr\303\244g.jpg"
diff --git a/Bachelorarbeit/images/Halterung_Sete_1.jpg b/Bachelorarbeit/V1/images/Halterung_Sete_1.jpg
similarity index 100%
rename from Bachelorarbeit/images/Halterung_Sete_1.jpg
rename to Bachelorarbeit/V1/images/Halterung_Sete_1.jpg
diff --git a/Bachelorarbeit/images/Halterung_Top.jpg b/Bachelorarbeit/V1/images/Halterung_Top.jpg
similarity index 100%
rename from Bachelorarbeit/images/Halterung_Top.jpg
rename to Bachelorarbeit/V1/images/Halterung_Top.jpg
diff --git a/Bachelorarbeit/images/Optomechanical LiDAR.png b/Bachelorarbeit/V1/images/Optomechanical LiDAR.png
similarity index 100%
rename from Bachelorarbeit/images/Optomechanical LiDAR.png
rename to Bachelorarbeit/V1/images/Optomechanical LiDAR.png
diff --git a/Bachelorarbeit/images/Screenshot 2025-02-20 213420.jpg b/Bachelorarbeit/V1/images/Screenshot 2025-02-20 213420.jpg
similarity index 100%
rename from Bachelorarbeit/images/Screenshot 2025-02-20 213420.jpg
rename to Bachelorarbeit/V1/images/Screenshot 2025-02-20 213420.jpg
diff --git a/Bachelorarbeit/images/Screenshot 2025-04-30 165111.png b/Bachelorarbeit/V1/images/Screenshot 2025-04-30 165111.png
similarity index 100%
rename from Bachelorarbeit/images/Screenshot 2025-04-30 165111.png
rename to Bachelorarbeit/V1/images/Screenshot 2025-04-30 165111.png
diff --git a/Bachelorarbeit/images/Sensor_holder_on_UR10e.jpg b/Bachelorarbeit/V1/images/Sensor_holder_on_UR10e.jpg
similarity index 100%
rename from Bachelorarbeit/images/Sensor_holder_on_UR10e.jpg
rename to Bachelorarbeit/V1/images/Sensor_holder_on_UR10e.jpg
diff --git a/Bachelorarbeit/images/Topic_explained.png b/Bachelorarbeit/V1/images/Topic_explained.png
similarity index 100%
rename from Bachelorarbeit/images/Topic_explained.png
rename to Bachelorarbeit/V1/images/Topic_explained.png
diff --git a/Bachelorarbeit/images/UR10e v4.aux b/Bachelorarbeit/V1/images/UR10e v4.aux
similarity index 100%
rename from Bachelorarbeit/images/UR10e v4.aux
rename to Bachelorarbeit/V1/images/UR10e v4.aux
diff --git a/Bachelorarbeit/images/UR10e v4.log b/Bachelorarbeit/V1/images/UR10e v4.log
similarity index 100%
rename from Bachelorarbeit/images/UR10e v4.log
rename to Bachelorarbeit/V1/images/UR10e v4.log
diff --git a/Bachelorarbeit/images/UR10e v4.out b/Bachelorarbeit/V1/images/UR10e v4.out
similarity index 100%
rename from Bachelorarbeit/images/UR10e v4.out
rename to Bachelorarbeit/V1/images/UR10e v4.out
diff --git a/Bachelorarbeit/images/UR10e v4.pdf b/Bachelorarbeit/V1/images/UR10e v4.pdf
similarity index 100%
rename from Bachelorarbeit/images/UR10e v4.pdf
rename to Bachelorarbeit/V1/images/UR10e v4.pdf
diff --git a/Bachelorarbeit/images/UR10e v4.synctex.gz b/Bachelorarbeit/V1/images/UR10e v4.synctex.gz
similarity index 100%
rename from Bachelorarbeit/images/UR10e v4.synctex.gz
rename to Bachelorarbeit/V1/images/UR10e v4.synctex.gz
diff --git a/Bachelorarbeit/images/UR10e v4.tex b/Bachelorarbeit/V1/images/UR10e v4.tex
similarity index 100%
rename from Bachelorarbeit/images/UR10e v4.tex
rename to Bachelorarbeit/V1/images/UR10e v4.tex
diff --git a/Bachelorarbeit/images/UR10e_v4.u3d b/Bachelorarbeit/V1/images/UR10e_v4.u3d
similarity index 100%
rename from Bachelorarbeit/images/UR10e_v4.u3d
rename to Bachelorarbeit/V1/images/UR10e_v4.u3d
diff --git a/Bachelorarbeit/images/VL53L7CX_Package.jpg b/Bachelorarbeit/V1/images/VL53L7CX_Package.jpg
similarity index 100%
rename from Bachelorarbeit/images/VL53L7CX_Package.jpg
rename to Bachelorarbeit/V1/images/VL53L7CX_Package.jpg
diff --git a/Bachelorarbeit/images/Versuchsaufbau_mit_VL53L5CX.jpg b/Bachelorarbeit/V1/images/Versuchsaufbau_mit_VL53L5CX.jpg
similarity index 100%
rename from Bachelorarbeit/images/Versuchsaufbau_mit_VL53L5CX.jpg
rename to Bachelorarbeit/V1/images/Versuchsaufbau_mit_VL53L5CX.jpg
diff --git a/Bachelorarbeit/images/Wiring_Schematic.png b/Bachelorarbeit/V1/images/Wiring_Schematic.png
similarity index 100%
rename from Bachelorarbeit/images/Wiring_Schematic.png
rename to Bachelorarbeit/V1/images/Wiring_Schematic.png
diff --git a/Bachelorarbeit/images/sensormodul.jpg b/Bachelorarbeit/V1/images/sensormodul.jpg
similarity index 100%
rename from Bachelorarbeit/images/sensormodul.jpg
rename to Bachelorarbeit/V1/images/sensormodul.jpg
diff --git a/Bachelorarbeit/images/two_pcd.jpg b/Bachelorarbeit/V1/images/two_pcd.jpg
similarity index 100%
rename from Bachelorarbeit/images/two_pcd.jpg
rename to Bachelorarbeit/V1/images/two_pcd.jpg
diff --git a/Bachelorarbeit/main.pdf b/Bachelorarbeit/V1/main.pdf
similarity index 100%
rename from Bachelorarbeit/main.pdf
rename to Bachelorarbeit/V1/main.pdf
diff --git a/Bachelorarbeit/main.synctex.gz b/Bachelorarbeit/V1/main.synctex.gz
similarity index 100%
rename from Bachelorarbeit/main.synctex.gz
rename to Bachelorarbeit/V1/main.synctex.gz
diff --git a/Bachelorarbeit/pcl_rob_node.py b/Bachelorarbeit/V1/pcl_rob_node.py
similarity index 100%
rename from Bachelorarbeit/pcl_rob_node.py
rename to Bachelorarbeit/V1/pcl_rob_node.py
diff --git a/Bachelorarbeit/ser_test_node.py b/Bachelorarbeit/V1/ser_test_node.py
similarity index 100%
rename from Bachelorarbeit/ser_test_node.py
rename to Bachelorarbeit/V1/ser_test_node.py
diff --git a/Bachelorarbeit/text.tex b/Bachelorarbeit/V1/text.tex
similarity index 100%
rename from Bachelorarbeit/text.tex
rename to Bachelorarbeit/V1/text.tex
diff --git a/Bachelorarbeit/titelangaben.tex b/Bachelorarbeit/V1/titelangaben.tex
similarity index 100%
rename from Bachelorarbeit/titelangaben.tex
rename to Bachelorarbeit/V1/titelangaben.tex
diff --git a/Bachelorarbeit/vl53l7cxhalterung.pdf b/Bachelorarbeit/V1/vl53l7cxhalterung.pdf
similarity index 100%
rename from Bachelorarbeit/vl53l7cxhalterung.pdf
rename to Bachelorarbeit/V1/vl53l7cxhalterung.pdf
diff --git "a/Bachelorarbeit/V2/Abk\303\274rzungen.tex" "b/Bachelorarbeit/V2/Abk\303\274rzungen.tex"
new file mode 100644
index 0000000000000000000000000000000000000000..f421fad7272a605d922855bc66bb4f0d94f10a48
--- /dev/null
+++ "b/Bachelorarbeit/V2/Abk\303\274rzungen.tex"
@@ -0,0 +1,51 @@
+\newacronym{UR}{UR}{Universal Robots}
+\newacronym{HRC}{HRC}{Human-Robot Collaboration}
+\newacronym{Cobot}{Cobot}{kollaborationsfähigen Roboter}
+\newacronym{PC}{PC}{persönlicher Computer}
+\newacronym{ToF}{ToF-Sensor}{Time-of-Flight-Sensor}
+\newacronym{ToFs}{ToF-Sensoren}{Time-of-Flight-Sensoren}
+\newacronym{Cobots}{Cobots}{kollaborationsfähige Roboter}
+\newacronym{RGB-D}{RGB-D}{Rot-Grün-Blau-Tiefen Kamera}
+\newacronym{KI}{KI}{Künstlichen Inteligenz}
+\newacronym{LIDAR}{LiDAR}{Light Detection and Ranging}
+\newacronym{RGB}{RGB-Kamera}{Rot-Grün-Gelb Kamera}
+\newacronym{LRF}{LRF}{Laser Range Finder}
+\newacronym{MEMS}{MEMS}{Mikro Elektrisch Mechanischen Systeme}
+\newacronym{FOV}{FoV}{Field of View}
+\newacronym{FOI}{FoI}{Field of Illumination}
+\newacronym{I2C}{I$^2$C}{Inter-Integrated Circuit}
+\newacronym{IC}{IC}{Integrated Circuit}
+\newacronym{MCU}{MCU}{Microcontroller}
+\newacronym{ROS}{ROS2}{Robot Operating System 2}
+\newacronym{RVIZ}{RVIZ2}{Robot Visualization 2}
+\newacronym{NUC}{NUC}{Next Unit of Computing}
+\newacronym{LPN}{LPn}{Low-Power-Mode-Communication-Enable}
+\newacronym{JSON}{JSON}{JavaScript Object Notation}
+\newacronym{LED}{LED}{Light Emitting Diode}
+\newacronym{USB}{USB}{Universal Seriell Bus}
+\newacronym{IMU}{IMU}{Inertial Measurement Unit}
+\newacronym{CNN}{CNN}{Convolutional Neural Network}
+\newacronym{VCSEL}{VCSEL}{Vertical Cavity Surface Emitting Laser}
+\newacronym{DOE}{DOE}{Diffractive Optical Elements}
+\newacronym{SPAD}{SPAD}{single photon avalanche diode}
+\newacronym{HGC}{HGC}{Handgeführte Steuerelemente}
+\newacronym{SSM}{SSM}{Geschwindigkeits- und Trennungsüberwachung}
+\newacronym{PFL}{PFL}{Energie und Kraftbegrenzung}
+\newacronym{BWS}{BWS}{berührungslos wirkende Schutzeinrichtungen}
+\newacronym{SPE}{SPE}{sensitive Schutzeinrichtungen}
+\newacronym{PSD}{PSD}{Einrichtung  zur  Anwesenheitsmeldung}
+\newacronym{SDA}{SDA}{Serial Data Line}
+\newacronym{SCL}{SCl}{Serial Clock Line}
+\newacronym{PLA}{PLA}{Polylactid}
+\newacronym{GPIO}{GPIO}{ General Purpose Input/Output}
+\newacronym{RAM}{RAM}{Random Access Memory}
+\newacronym{TF}{TF}{Transformations-Framework}
+\newacronym{URDF}{URDF}{Unified Robot Description Format}
+\newacronym{PCD}{PCD}{Pointcloud Data}
+\newacronym{UART}{UART}{Universal Asynchronous Receiver/Transmitter}
+\newacronym{PWM}{PWM}{Pulse Width Modulation}
+\newacronym{ADC}{ADC}{Analog-to-Digital Converter}
+\newacronym{SPI}{SPI}{Serial Peripheral Interface}
+\newacronym{VSYS}{VSYS}{System Supply Voltage}
+\newacronym{SRAM}{SRAM}{Static Random Access Memory}
+\newacronym{ARM}{ARM}{Advanced RISC Machine}
diff --git "a/Bachelorarbeit/V2/Assets/Kapitel/K1 - Einf\303\274hrung.tex" "b/Bachelorarbeit/V2/Assets/Kapitel/K1 - Einf\303\274hrung.tex"
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/Bachelorarbeit/V2/Assets/Kapitel/Kapitel/K2 - Stand der Technik.tex b/Bachelorarbeit/V2/Assets/Kapitel/Kapitel/K2 - Stand der Technik.tex
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/Bachelorarbeit/V2/Assets/Kapitel/Kapitel/Kapitel/K3 - Grundlagen und Methodik.tex b/Bachelorarbeit/V2/Assets/Kapitel/Kapitel/Kapitel/K3 - Grundlagen und Methodik.tex
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/Bachelorarbeit/V2/Assets/Kapitel/Kapitel/Kapitel/Kapitel/K4 - Umsetzung.tex b/Bachelorarbeit/V2/Assets/Kapitel/Kapitel/Kapitel/Kapitel/K4 - Umsetzung.tex
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/Bachelorarbeit/V2/Assets/Kapitel/Kapitel/Kapitel/Kapitel/Kapitel/K5 - Bewertung und Ausblick.tex b/Bachelorarbeit/V2/Assets/Kapitel/Kapitel/Kapitel/Kapitel/Kapitel/K5 - Bewertung und Ausblick.tex
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/Bachelorarbeit/V2/Assets/titelangaben.tex b/Bachelorarbeit/V2/Assets/titelangaben.tex
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/Bachelorarbeit/V2/BA.bib b/Bachelorarbeit/V2/BA.bib
new file mode 100644
index 0000000000000000000000000000000000000000..a4920be558772f9575c76daa27dd27a81722cc5d
--- /dev/null
+++ b/Bachelorarbeit/V2/BA.bib
@@ -0,0 +1,1308 @@
+@misc{UNR-15,
+	title = {About {Universal} {Robots} - robotics company},
+	url = {https://www.universal-robots.com/media/50889/ur10_de.pdf},
+	urldate = {2025-05-23},
+	file = {About Universal Robots - robotics company:/home/sochi/Zotero/storage/F9GWBPCE/about-us.html:text/html},
+    year = {2015},
+}
+@misc{DIN-13855,
+	address = {Berlin},
+	title = {{DIN} {EN} {ISO} 13855:2022-04, {Sicherheit} von {Maschinen}\_- {Anordnung} von {Schutzeinrichtungen} im {Hinblick} auf {Annäherung} des menschlichen {Körpers} ({ISO}/{DIS}\_13855:2022); {Deutsche} und {Englische} {Fassung} {prEN}\_ISO\_13855:2022},
+	shorttitle = {{DIN} {EN} {ISO} 13855},
+	url = {https://www.dinmedia.de/de/-/-/349862066},
+	doi = {10.31030/3329034},
+	urldate = {2025-05-23},
+	publisher = {DIN Media GmbH},
+}
+
+@misc{AIC-25,
+	title = {{AiCoBot} – {Intelligente} {Automatisierung} \& kollaborative {Robotik}},
+	url = {https://www.aicobot.de/},
+	abstract = {Jetzt mit AiCoBot automatisieren! Alles rund um kollaborative Robotersysteme, fahrerlose Transportsysteme, Greifer, Schleifkits und Schrauber},
+	language = {de},
+	urldate = {2025-05-23},
+	month = mar,
+	year = {2025},
+	file = {Snapshot:/home/sochi/Zotero/storage/5DVQJFJG/www.aicobot.de.html:text/html},
+}
+
+@misc{JOH-24,
+	title = {{VL53L5}/{L7CX} target status validity},
+	url = {https://community.st.com/t5/imaging-sensors/vl53l5-l7cx-target-status-validity/td-p/700260},
+	abstract = {Hello, the VL53L5CX and L7CX ULD guides mention that a target status of 6 and 9 should be considered with a confidence of 50 \%, while a comment in the api.h file says that  "5 \& 9 means ranging OK", which gives the impression of a higher confidence ration than 50 \%. Therefore, my question is: how to...},
+	language = {en},
+	urldate = {2025-05-18},
+	month = jul,
+    author= {John E KVAM},
+	year = {2024},
+	note = {Section: Imaging (sensors)},
+	file = {Snapshot:/home/sochi/Zotero/storage/VUZW5NUW/700260.html:text/html},
+}
+
+@misc{GIT-25,
+	title = {stm32duino/{VL53L7CX}},
+	copyright = {BSD-3-Clause},
+	url = {https://github.com/stm32duino/VL53L7CX},
+	abstract = {Arduino library to support the VL53L7CX Time-of-Flight 8x8 multizone ranging sensor with 90 degrees FoV},
+	urldate = {2025-05-18},
+	publisher = {STM32duino},
+	month = may,
+	year = {2025},
+	note = {original-date: 2023-01-27T14:01:21Z},
+}
+
+@misc{DIN-12464-1,
+	title = {{DIN} {EN} 12464-1:2021-11, {Licht} und {Beleuchtung}\_- {Beleuchtung} von {Arbeitsstätten}\_- {Teil}\_1: {Arbeitsstätten} in {Innenräumen}; {Deutsche} {Fassung} {EN}\_12464-1:2021},
+	shorttitle = {{DIN} {EN} 12464-1},
+	url = {https://www.dinmedia.de/de/-/-/334047306},
+	doi = {10.31030/3233193},
+	urldate = {2025-05-16},
+	publisher = {DIN Media GmbH},
+	file = {PDF:/home/sochi/Zotero/storage/C3AW49XA/DIN EN 12464-12021-11, Licht und Beleuchtung_- Beleuchtung von Arbeitsstätten_- Teil_1 Arbeitsstät.pdf:application/pdf},
+}
+
+@misc{STM-25,
+	title = {{VL53L7CX} {\textbar} {Product} - {STMicroelectronics}},
+	url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html},
+	abstract = {VL53L7CX - Time-of-Flight (ToF) 8x8 multizone ranging sensor with 90 degrees FoV, VL53L7CXV0GC/1, STMicroelectronics},
+	language = {en},
+	urldate = {2025-05-16},
+	file = {Snapshot:/home/sochi/Zotero/storage/GJSAS6YA/vl53l7cx.html:text/html},
+}
+
+@article{SCH-19,
+	title = {Master of {Science} {Thesis} {TPRMM} 2019 {KTH} {Industrial} {Engineering} and {Management} {Production} {Engineering} {SE}-100 44 {Stockholm}},
+	abstract = {A close collaboration between humans and robots is one approach to achieve flexible production flows and a high degree of automation at the same time. In human-robot collaboration, both entities work alongside each other in a fenceless, shared environment. These workstations combine human flexibility, tactile sense and intelligence with robotic speed, endurance, and accuracy. This leads to improved ergonomic working conditions for the operator, better quality and higher efficiency. However, the widespread adoption of human-robot collaboration is limited by the current safety legislation. Robots are powerful machines and without spatial separation to the operator the risks drastically increase. The technical specification ISO/TS 15066 serves as a guideline for collaborative operations and supplements the international standard ISO 10218 for industrial robots. Because ISO/TS 15066 represents the first draft for a coming standard, companies have to gain knowledge in applying ISO/TS 15066. Currently, the guideline prohibits a collision with the head in transient contact. In this thesis work, a safety system is designed which is in compliance with ISO/TS 15066 and where certified safety technologies are used. Four theoretical safety system designs with a laser scanner as a presence sensing device and a collaborative robot, the KUKA lbr iiwa, are proposed. The system either stops the robot motion, reduces the robot’s speed and then triggers a stop or only activates a stop after a collision between the robot and the human occurred. In system 3 the size of the stop zone is decreased by combining the speed and separation monitoring principle with the power- and force-limiting safeguarding mode. The safety zones are static and are calculated according to the protective separation distance in ISO/TS 15066. A risk assessment is performed to reduce all risks to an acceptable level and lead to the final safety system design after three iterations. As a proof of concept the final safety system design is implemented for a demonstrator in a laboratory environment at Scania. With a feasibility study, the implementation differences between theory and praxis for the four proposed designs are identified and a feasible safety system behavior is developed. The robot reaction is realized through the safety configuration of the robot. There three ESM states are defined to use the internal safety functions of the robot and to integrate the laser scanner signal. The laser scanner is connected as a digital input to the discrete safety interface of the robot controller. To sum up, this thesis work describes the safety system design with all implementation details.},
+	language = {en},
+	author = {Schaffert, Carolin},
+	file = {PDF:/home/sochi/Zotero/storage/9IJFXE6P/Schaffert - Master of Science Thesis TPRMM 2019 KTH Industrial Engineering and Management Production Engineering.pdf:application/pdf},
+}
+
+@article{FIS-23,
+	title = {Sichere roboterbasierte {Produktion}: {Trends} und {Revisionen} in {Europäischen} {Normen} und {Richtlinien}},
+	volume = {140},
+	issn = {1613-7620},
+	shorttitle = {Sichere roboterbasierte {Produktion}},
+	url = {https://doi.org/10.1007/s00502-023-01155-z},
+	doi = {10.1007/s00502-023-01155-z},
+	abstract = {Sicherheit, also Personen- sowie Informationssicherheit, ist eine der wichtigsten Anforderungen an jede Maschine und Anlage. Die Maschinenrichtlinie 2006/42/EG regelt das Inverkehrbringen von sicheren vollständigen und unvollständigen Maschinen. Ende Juni 2023 wurde die EU-weite Maschinenverordnung veröffentlicht, die die Maschinenrichtlinie in Zukunft ersetzen wird. Diese Neuerung ist für alle Maschinenbereiche von Relevanz. Speziell für die Robotik herrscht aktuell außerdem eine starke Revision im Bereich der Normierung. Zum einen wurde die ISO 13849, die meist verbreitete ISO-Norm für funktionale Sicherheit, erneuert. Zum anderen wird die ISO 10218, jene Norm, die die Anforderungen für den Entwurf von Industrierobotern und die Integration von deren Applikationen regelt, überarbeitet. Dieser Beitrag gibt einen Einblick in die aktuellen Trends und Revisionen der europäischen Normen und Richtlinien, die für stationäre Industrieroboter und deren Applikationen relevant sind.},
+	language = {de},
+	urldate = {2025-05-13},
+	journal = {e \& i Elektrotechnik und Informationstechnik},
+	author = {Fischer, Clara and Haspl, Thomas and Rathmair, Michael and Schlund, Sebastian},
+	month = oct,
+	year = {2023},
+	keywords = {Guidelines, Leitfäden, Machinery directive, Machinery regulation, Maschinenrichtlinie, Maschinenverordnung, Robot safety, Robot safety standards, Robotersicherheit, Robotersicherheitsnormen},
+	file = {Full Text PDF:/home/sochi/Zotero/storage/5XCTNVNM/Fischer et al. - 2023 - Sichere roboterbasierte Produktion Trends und Revisionen in Europäischen Normen und Richtlinien.pdf:application/pdf},
+}
+
+@article{haddadin_robot_2017,
+	title = {Robot Collisions: A Survey on Detection, Isolation, and Identification},
+	volume = {33},
+	issn = {1941-0468},
+	url = {https://ieeexplore.ieee.org/abstract/document/8059840},
+	doi = {10.1109/TRO.2017.2723903},
+	shorttitle = {Robot Collisions},
+	abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.},
+	number = {6},
+	journaltitle = {{IEEE} Transactions on Robotics},
+	author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin},
+	urldate = {2025-02-12},
+	date = {2017-12},
+	note = {Conference Name: {IEEE} Transactions on Robotics},
+	keywords = {Algorithm design and analysis, Collision avoidance, Collision detection, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction ({pHRI}), Real-time systems, Robot sensing systems, safe robotics, Service robots},
+	file = {Accepted Version:C\:\\Users\\Rene\\Zotero\\storage\\IEXJFAMF\\Haddadin et al. - 2017 - Robot Collisions A Survey on Detection, Isolation, and Identification.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\LDB3Q92K\\8059840.html:text/html},
+}
+
+@book{hertzberg_mobile_2012,
+	location = {Berlin, Heidelberg},
+	title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik},
+	rights = {https://www.springernature.com/gp/researchers/text-and-data-mining},
+	isbn = {978-3-642-01725-4 978-3-642-01726-1},
+	url = {https://link.springer.com/10.1007/978-3-642-01726-1},
+	series = {{eXamen}.press},
+	shorttitle = {Mobile Roboter},
+	publisher = {Springer Berlin Heidelberg},
+	author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas},
+	urldate = {2025-02-12},
+	date = {2012},
+	langid = {german},
+	doi = {10.1007/978-3-642-01726-1},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\RLTU9P46\\Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf},
+}
+
+@misc{CHA-25,
+	title = {Selection and comparison of sensors for collision avoidance in collaborative robots},
+	author = {Chao Xu},
+	date = {2025},
+	langid = {english},
+}
+
+@inproceedings{HEL-16,
+	title = {Robot operating system: A modular software framework for automated driving},
+	url = {https://ieeexplore.ieee.org/abstract/document/7795766},
+	doi = {10.1109/ITSC.2016.7795766},
+	shorttitle = {Robot operating system},
+	abstract = {Automated vehicles are complex systems with a high degree of interdependencies between its components. This complexity sets increasing demands for the underlying software framework. This paper firstly analyzes the requirements for software frameworks. Afterwards an overview on existing software frameworks, that have been used for automated driving projects, is provided with an in-depth introduction into an emerging open-source software framework, the Robot Operating System ({ROS}). After discussing the main features, advantages and disadvantages of {ROS}, the communication overhead of {ROS} is analyzed quantitatively in various configurations showing its applicability for systems with a high data load.},
+	eventtitle = {2016 {IEEE} 19th International Conference on Intelligent Transportation Systems ({ITSC})},
+	booktitle = {2016 {IEEE} 19th International Conference on Intelligent Transportation Systems ({ITSC})},
+	author = {Hellmund, André-Marcel and Wirges, Sascha and Taş, Ömer Şahin and Bandera, Claudio and Salscheider, Niels Ole},
+	urldate = {2025-04-29},
+	date = {2016-11},
+	note = {{ISSN}: 2153-0017},
+	keywords = {C++ languages, Operating systems, Pipeline processing, Real-time systems, Systems architecture, Vehicles},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\8L63BFMT\\Hellmund et al. - 2016 - Robot operating system A modular software framework for automated driving.pdf:application/pdf},
+}
+
+@misc{GPA-16,
+ author = {Pr{\"u}fungssekretariat},
+ year = {2016},
+ title = {Gestaltungsrichtlinie f{\"u}r Projekt- und Abschlussarbeiten},
+ file = {layout-richtlinie-2016-04-15:Attachments/layout-richtlinie-2016-04-15.pdf:application/pdf}
+}
+
+@collection{HER-18,
+	location = {Wiesbaden},
+	title = {Sensoren in Wissenschaft und Technik},
+	rights = {http://www.springer.com/tdm},
+	isbn = {978-3-658-12562-2},
+	url = {http://link.springer.com/10.1007/978-3-658-12562-2},
+	publisher = {Springer Fachmedien Wiesbaden},
+	editor = {Hering, Ekbert and Schönfelder, Gert},
+	urldate = {2025-02-12},
+	date = {2018},
+	langid = {german},
+	doi = {10.1007/978-3-658-12562-2},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\9TI57WXD\\Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf},
+}
+
+@article{saudabayev_sensors_2015,
+	title = {Sensors for Robotic Hands: A Survey of State of the Art},
+	volume = {3},
+	issn = {2169-3536},
+	url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549},
+	doi = {10.1109/ACCESS.2015.2482543},
+	shorttitle = {Sensors for Robotic Hands},
+	abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.},
+	journaltitle = {{IEEE} Access},
+	author = {Saudabayev, Artur and Varol, Huseyin Atakan},
+	urldate = {2025-02-12},
+	date = {2015},
+	note = {Conference Name: {IEEE} Access},
+	keywords = {Robot sensing systems, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors, Sensors},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\HR7ZUF8W\\Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\484D4R7H\\7283549.html:text/html},
+}
+
+@article{paya_state---art_2017,
+	title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors},
+	volume = {2017},
+	rights = {Copyright © 2017 L. Payá et al.},
+	issn = {1687-7268},
+	url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650},
+	doi = {10.1155/2017/3497650},
+	abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.},
+	number = {1},
+	journaltitle = {Journal of Sensors},
+	author = {Payá, L. and Gil, A. and Reinoso, O.},
+	urldate = {2025-02-12},
+	date = {2017},
+	langid = {english},
+	note = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1155/2017/3497650},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\EZ473NGD\\Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\86LDAQ62\\3497650.html:text/html},
+}
+
+@online{STM-24,
+	title = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV}},
+	url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html},
+    publisher = {STMicroelectronics},
+	abstract = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV}, {VL}53L7CXV0GC/1, {STMicroelectronics}},
+	urldate = {2025-02-12},
+    date = {2024-09-16},
+	langid = {english},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\VEYLCCLA\\vl53l7cx.html:text/html},
+}
+
+@online{RAS-24,
+	title = {Pico-series Microcontrollers - Raspberry Pi Documentation},
+	url = {https://github.com/raspberrypi/documentation/blob/develop/documentation/asciidoc/microcontrollers/pico-series/about_pico.adoc},
+    publisher = {Raspberry},
+	abstract = {The official documentation for Raspberry Pi computers and microcontrollers},
+	urldate = {2025-02-12},
+    date = {2024-12},
+	langid = {english},
+}
+
+@online{noauthor_chatgpt_nodate,
+	title = {{ChatGPT}},
+	url = {https://chatgpt.com},
+	abstract = {A conversational {AI} system that listens, learns, and challenges},
+	urldate = {2025-02-12},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\ZT8MG8Y4\\678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html},
+}
+
+@software{iii_earlephilhowerarduino-pico_2025,
+	title = {earlephilhower/arduino-pico},
+	rights = {{LGPL}-2.1},
+	url = {https://github.com/earlephilhower/arduino-pico},
+	abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards},
+	author = {{III}, Earle F. Philhower},
+	urldate = {2025-02-12},
+	date = {2025-02-11},
+	note = {original-date: 2021-02-25T04:20:27Z},
+	keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi},
+}
+
+@online{ORO-25,
+	title = {Tutorials — {ROS} 2 Documentation: Humble documentation},
+	url = {https://docs.ros.org/en/humble/Tutorials.html},
+	urldate = {2025-02-12},
+    publisher = {Open Robotics},
+    date = {2025},
+	file = {Tutorials — ROS 2 Documentation\: Humble documentation:C\:\\Users\\Rene\\Zotero\\storage\\28S5GUZ5\\Tutorials.html:text/html},
+}
+
+@online{noauthor_examples_nodate,
+	title = {Examples - trimesh 4.6.2 documentation},
+	url = {https://trimesh.org/examples.html},
+	urldate = {2025-02-12},
+	file = {Examples - trimesh 4.6.2 documentation:C\:\\Users\\Rene\\Zotero\\storage\\82WA6KM7\\examples.html:text/html},
+}
+
+@software{grans_sebastiangransros2-point-cloud-demo_2024,
+	title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo},
+	rights = {{MIT}},
+	url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo},
+	abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2},
+	author = {Grans, Sebastian},
+	urldate = {2025-02-12},
+	date = {2024-12-08},
+	note = {original-date: 2020-06-30T16:55:21Z},
+}
+
+@article{wunderlich_rasante_2013,
+	title = {Rasante Entwicklung in der 3D‐Bildgebung: Weiterentwickelte Time‐of‐Flight‐Technologie verbessert miniaturisierte 3D‐Kameras und Sensoren},
+	volume = {8},
+	rights = {http://onlinelibrary.wiley.com/{termsAndConditions}\#vor},
+	issn = {1863-1460, 2191-1975},
+	url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018},
+	doi = {10.1002/opph.201300018},
+	shorttitle = {Rasante Entwicklung in der 3D‐Bildgebung},
+	abstract = {Abstract
+            Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.},
+	number = {3},
+	journaltitle = {Optik \& Photonik},
+	shortjournal = {Optik \& Photonik},
+	author = {Wunderlich, Max},
+	urldate = {2025-02-18},
+	date = {2013-09},
+	langid = {german},
+	file = {Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung:C\:\\Users\\Rene\\Zotero\\storage\\H7CSUHLW\\Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung.pdf:application/pdf},
+}
+
+@article{LI-19,
+	title = {Common Sensors in Industrial Robots: A Review},
+	volume = {1267},
+	issn = {1742-6588, 1742-6596},
+	url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036},
+	doi = {10.1088/1742-6596/1267/1/012036},
+	shorttitle = {Common Sensors in Industrial Robots},
+	abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.},
+	journaltitle = {Journal of Physics: Conference Series},
+	shortjournal = {J. Phys.: Conf. Ser.},
+	author = {Li, Peng and Liu, Xiangpeng},
+	urldate = {2025-02-18},
+	date = {2019-07-01},
+	langid = {english},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\UVXS2R7J\\Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf},
+}
+
+@misc{DIN-10218-1,
+	title = {{DIN} {EN} {ISO} 10218-1:2021-09, Robotik\_- Sicherheitsanforderungen\_- Teil\_1: Industrieroboter ({ISO}/{DIS}\_10218-1.2:2021); Deutsche und Englische Fassung {prEN}\_ISO\_10218-1:2021},
+	shorttitle = {{DIN} {EN} {ISO} 10218-1},
+}
+
+@misc{DIN-10218-2,
+	title = {{DIN} {EN} {ISO} 10218-2:2021-03, Robotik\_- Sicherheitsanforderungen für Robotersysteme in industrieller Umgebung\_- Teil\_2: Robotersysteme, Roboteranwendungen und Integration von Roboterzellen ({ISO}/{DIS}\_10218-2:2020); Deutsche und Englische Fassung {prEN}\_ISO\_10218-2:2020},
+	shorttitle = {{DIN} {EN} {ISO} 10218-2},
+    doi = {https://dx.doi.org/10.31030/1626163},
+}
+
+@book{hertzberg_mobile_2012-1,
+	location = {Berlin, Heidelberg},
+	title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik},
+	isbn = {978-3-642-01725-4 978-3-642-01726-1},
+	url = {https://link.springer.com/10.1007/978-3-642-01726-1},
+	series = {{eXamen}.press},
+	shorttitle = {Mobile Roboter},
+	publisher = {Springer Berlin Heidelberg},
+	author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas},
+	urldate = {2025-02-12},
+	date = {2012},
+	doi = {10.1007/978-3-642-01726-1},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\4LFEHVEK\\Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf},
+}
+
+@collection{hering_sensoren_2018-1,
+	location = {Wiesbaden},
+	title = {Sensoren in Wissenschaft und Technik},
+	isbn = {978-3-658-12561-5 978-3-658-12562-2},
+	url = {http://link.springer.com/10.1007/978-3-658-12562-2},
+	publisher = {Springer Fachmedien Wiesbaden},
+	editor = {Hering, Ekbert and Schönfelder, Gert},
+	urldate = {2025-02-12},
+	date = {2018},
+	doi = {10.1007/978-3-658-12562-2},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\BG7FCKRW\\Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf},
+}
+
+@article{saudabayev_sensors_2015-1,
+	title = {Sensors for Robotic Hands: A Survey of State of the Art},
+	volume = {3},
+	issn = {2169-3536},
+	url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549},
+	doi = {10.1109/ACCESS.2015.2482543},
+	shorttitle = {Sensors for Robotic Hands},
+	abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.},
+	journaltitle = {{IEEE} Access},
+	author = {Saudabayev, Artur and Varol, Huseyin Atakan},
+	urldate = {2025-02-12},
+	date = {2015},
+	keywords = {Robot sensing systems, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors, Sensors},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\CDE3NZ3S\\Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\BEAGLR7C\\7283549.html:text/html},
+}
+
+@article{paya_state---art_2017-1,
+	title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors},
+	volume = {2017},
+	issn = {1687-7268},
+	url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650},
+	doi = {10.1155/2017/3497650},
+	abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.},
+	number = {1},
+	journaltitle = {Journal of Sensors},
+	author = {Payá, L. and Gil, A. and Reinoso, O.},
+	urldate = {2025-02-12},
+	date = {2017},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\G2QJUK53\\Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\PSAFIKFD\\3497650.html:text/html},
+}
+
+@online{noauthor_chatgpt_nodate-1,
+	title = {{ChatGPT}},
+	url = {https://chatgpt.com},
+	abstract = {A conversational {AI} system that listens, learns, and challenges},
+	urldate = {2025-02-12},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\R25VDLY2\\678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html},
+}
+
+@software{iii_earlephilhowerarduino-pico_2025-1,
+	title = {earlephilhower/arduino-pico},
+	url = {https://github.com/earlephilhower/arduino-pico},
+	abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards},
+	author = {{III}, Earle F. Philhower},
+	urldate = {2025-02-12},
+	date = {2025-02-11},
+	keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi},
+}
+
+@online{noauthor_tutorials_nodate-1,
+	title = {Tutorials — {ROS} 2 Documentation: Humble documentation},
+	url = {https://docs.ros.org/en/humble/Tutorials.html},
+	urldate = {2025-02-12},
+	file = {Tutorials — ROS 2 Documentation\: Humble documentation:C\:\\Users\\Rene\\Zotero\\storage\\HQ4G28QE\\Tutorials.html:text/html},
+}
+
+@online{noauthor_examples_nodate-1,
+	title = {Examples - trimesh 4.6.2 documentation},
+	url = {https://trimesh.org/examples.html},
+	urldate = {2025-02-12},
+	file = {Examples - trimesh 4.6.2 documentation:C\:\\Users\\Rene\\Zotero\\storage\\SURMD6VT\\examples.html:text/html},
+}
+
+@software{grans_sebastiangransros2-point-cloud-demo_2024-1,
+	title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo},
+	url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo},
+	abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2},
+	author = {Grans, Sebastian},
+	urldate = {2025-02-12},
+	date = {2024-12-08},
+}
+
+@article{wunderlich_rasante_2013-1,
+	title = {Rasante Entwicklung in der 3D‐Bildgebung: Weiterentwickelte Time‐of‐Flight‐Technologie verbessert miniaturisierte 3D‐Kameras und Sensoren},
+	volume = {8},
+	issn = {1863-1460, 2191-1975},
+	url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018},
+	doi = {10.1002/opph.201300018},
+	shorttitle = {Rasante Entwicklung in der 3D‐Bildgebung},
+	abstract = {Abstract Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.},
+	number = {3},
+	journaltitle = {Optik \& Photonik},
+	shortjournal = {Optik \& Photonik},
+	author = {Wunderlich, Max},
+	urldate = {2025-02-18},
+	date = {2013-09},
+	file = {Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung:C\:\\Users\\Rene\\Zotero\\storage\\JZJDENIL\\Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung.pdf:application/pdf},
+}
+
+@misc{ISO-8373,
+	title = {Robotics - Vocabulary},
+	url = {https://www.dinmedia.de/de/norm/iso-8373/348036781},
+	shorttitle = {{ISO} 8373:2021-11},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	date = {2021-11},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\6SUCZU6R\\DIN_EN_ISO_8373.pdf:application/pdf},
+}
+
+@inproceedings{POP-17,
+	location = {Lisbon},
+	title = {Collision detection, localization \& classification for industrial robots with joint torque sensors},
+	isbn = {978-1-5386-3518-6},
+	url = {http://ieeexplore.ieee.org/document/8172400/},
+	doi = {10.1109/ROMAN.2017.8172400},
+	abstract = {High dynamic capabilities of industrial robots make them dangerous for humans and environment. To reduce this factor and advance collaboration between human and manipulator fast and reliable collision detection algorithm is required. To overcome this problem, we present an approach allowing to detect collision, localize action point and classify collision nature. Internal joint torque and encoder measurements were used to determine potential collisions with the robot links. This work proposes two ways of solving this problem: using classical analytical approach and learning approach implemented with neural network. The suggested algorithms were examined on the industrial robotic arm Kuka iiwa {LBR} 14 R820, ground truth information on the contact nature and its location were obtained with 3D {LIDAR} and camera.},
+	eventtitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})},
+	booktitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})},
+	publisher = {{IEEE}},
+	author = {Popov, Dmitry and Klimchik, Alexandr and Mavridis, Nikolaos},
+	urldate = {2025-02-19},
+	date = {2017-08},
+	langid = {english},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\LVC2B7U6\\Popov et al. - 2017 - Collision detection, localization & classification for industrial robots with joint torque sensors.pdf:application/pdf},
+}
+
+@misc{DIN-15066,
+	title = {{DIN} {ISO}/{TS} 15066:2017-04, Roboter und Robotikgeräte\_- Kollaborierende Roboter ({ISO}/{TS}\_15066:2016)},
+    date = {2017},
+}
+
+@article{LIU-24,
+	title = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing: A Literature Review},
+	volume = {40},
+	issn = {1044-7318},
+	url = {https://doi.org/10.1080/10447318.2022.2041907},
+	doi = {10.1080/10447318.2022.2041907},
+	shorttitle = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing},
+	abstract = {The rapid development of robot technology has introduced a substantial impact on manufacturing. Numerous studies have been carried out to apply collaborative robots (cobots) to address manufacturing productivity and ergonomics issues, which has brought extensive opportunities. In this context, a systematic literature search in the Web of Science, Scopus, and Google Scholar databases was carried out by electronic and manual search. Thus, 59 relevant contributions out of 4488 studies were analyzed by using preferred reporting items for systematic reviews and meta-analysis ({PRISMA}). To provide an overview of the different results, studies are summarized according to the following criteria: country, author, year, study design, robot category, results, and future opportunities. The effects of cobots on safety, system design, workplace design, task scheduling, productivity, and ergonomics are discussed to provide a better understanding of the application of cobots in manufacturing. To incentive future research, this paper reviews the development of cobots in manufacturing and discusses future opportunities and directions from cobots and manufacturing system perspectives. This paper provides novel and valuable insights into cobots application and illustrates potential developments of future human-cobot interaction.},
+	journaltitle = {International Journal of Human–Computer Interaction},
+	author = {Liu, Li and Guo, Fu and Zou, Zishuai and Duffy, Vincent G.},
+	urldate = {2025-02-19},
+	date = {2024-02-16},
+	note = {Publisher: Taylor \& Francis
+\_eprint: https://doi.org/10.1080/10447318.2022.2041907},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\G9ECNMWG\\Liu et al. - 2024 - Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing.pdf:application/pdf},
+}
+
+@article{maheepala_low_2021,
+	title = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices: A Review},
+	volume = {21},
+	issn = {1558-1748},
+	url = {https://ieeexplore.ieee.org/abstract/document/9165781},
+	doi = {10.1109/JSEN.2020.3015932},
+	shorttitle = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices},
+	abstract = {With the advancements of the Internet of Things ({IoT}) technology, applications of battery powered machine vision based {IoT} devices is rapidly growing. While numerous research works are being conducted to develop low power hardware solutions for {IoT} devices, image capture and image processing remain high power demanding processes leading to a short battery life. However, the power consumption of the machine vision based {IoT} devices can be minimized by the careful optimization of the hardware components that are used is these devices. In this article, we present a review of low power machine vision hardware components for the {IoT} applications. A guide to selecting the optimum processors and image sensors for a given battery powered machine vision based {IoT} device is presented. Next, the factors that must be considered when selecting processors and image sensors for a given {IoT} application are discussed, and selection criteria for the processors and image sensors are established. Then, the current commercially available hardware components are reviewed in accordance with the established selection criteria. Finally, the research trends in the field of battery powered machine vision based {IoT} devices are discussed, and the potential future research directions in the field are presented.},
+	number = {2},
+	journaltitle = {{IEEE} Sensors Journal},
+	author = {Maheepala, Malith and Joordens, Matthew A. and Kouzani, Abbas Z.},
+	urldate = {2025-02-19},
+	date = {2021-01},
+	note = {Conference Name: {IEEE} Sensors Journal},
+	keywords = {Batteries, Cloud computing, image sensor, Image sensors, Internet of Things, low power, machine vision, Machine vision, processor, Program processors, Transceivers},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\XSY3V6PK\\Maheepala et al. - 2021 - Low Power Processors and Image Sensors for Vision-Based IoT Devices A Review.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\Y7EV2L8T\\9165781.html:text/html},
+}
+
+@article{nath_review_2022,
+	title = {A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector},
+	volume = {7},
+	rights = {Copyright (c) 2022 Aditya S.  Nath},
+	issn = {2736-576X},
+	url = {https://www.ej-eng.org/index.php/ejeng/article/view/2624},
+	doi = {10.24018/ejeng.2022.7.1.2624},
+	abstract = {The study explores recent innovations in robotic and sensor-based technologies that are spearheading advancements in the construction sector to achieve improvements in construction quality, efficiency, and safety. Automation in construction, although coined as a concept in 1980s, has witnessed minimal progress in the level of application. The study attempts to identify issues constraining adoption of automation in the sector, the recent developments in technologies and their scope in construction, their applications and impacts, and way forward. The role of robotics in various stages of construction and its impact on a wider scale has been identified and discussed. The evolution of Building Information Modeling ({BIM}) has transitioned it into being an efficient mediator in the construction process with novel concepts such as 4D and 5D {BIM} and Building Knowledge Management. Various sensor technologies, functioning at diverse scales, have found wide-ranging applications on construction sites ranging from high-accuracy positioning to slow-tracking of personnel and materials, as well as, in progress monitoring and quality control. Information Technology has a major role in binding the sensor technology with on-site requirements to create positive results. A study was done to identify such technological interventions and various software utilities which integrate {BIM} and sensor technology with tools such as {GIS}. The factors which restrained developments in automation in construction sector were identified in the course of study. Various global examples of advanced automated construction technologies with applications in various stages of construction were discussed. The review successfully identifies the nascent technological innovations and their productive usage in relevant areas of construction sector.},
+	number = {1},
+	journaltitle = {European Journal of Engineering and Technology Research},
+	author = {Nath, Aditya S.},
+	urldate = {2025-02-19},
+	date = {2022-02-28},
+	langid = {english},
+	keywords = {Sensors},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\AA6ZJJBN\\Nath - 2022 - A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector.pdf:application/pdf},
+}
+
+@online{XIA-23,
+	title = {Can the collaborative robot market experience a second growth surge in the post-pandemic era?},
+	url = {https://interactanalysis.com/insight/can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era/},
+	abstract = {The market for collaborative robots is forecast to show strong growth over the coming years, with rising sales in industrial and non-industrial sectors.},
+	titleaddon = {Interact Analysis},
+	urldate = {2025-02-19},
+    date = {2023-03},
+    author = {Maya Xiao},
+	langid = {british},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\25UG57J5\\can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era.html:text/html},
+}
+
+@article{LI-24,
+	title = {Safe human–robot collaboration for industrial settings: a survey},
+	issn = {1572-8145},
+	url = {https://doi.org/10.1007/s10845-023-02159-4},
+	doi = {10.1007/s10845-023-02159-4},
+	shorttitle = {Safe human–robot collaboration for industrial settings},
+	abstract = {Human–robot collaboration ({HRC}) plays a pivotal role in today’s industry by supporting increasingly customised product development. Via {HRC}, the strengths of humans and robots can be combined to facilitate collaborative jobs within common workplaces to achieve specific industrial goals. Given the significance of safety assurance in {HRC}, in this survey paper, an update on standards and implementation approaches presented in the latest literature is given to reflect the state-of-the-art of this prominent research topic. First, an overview of safety standards for industrial robots, collaborative robots, and {HRC} is provided. Then, a survey of various approaches to {HRC} safety is conducted from two main perspectives, i.e., pre-collision and post-collision, which are further detailed in the aspects of sensing, prediction, learning, planning/replanning, and compliance control. Major characteristics, pros, cons, and applicability of the approaches are analysed. Finally, challenging issues and prospects for the future development of {HRC} safety are highlighted to provide recommendations for relevant stakeholders to consider when designing {HRC}-enabled industrial systems.},
+	journaltitle = {Journal of Intelligent Manufacturing},
+	shortjournal = {J Intell Manuf},
+	author = {Li, Weidong and Hu, Yudie and Zhou, Yong and Pham, Duc Truong},
+	urldate = {2025-02-19},
+	date = {2024-06-01},
+	langid = {english},
+	keywords = {Collision detection, Collaborative robots, Human–robot collaboration ({HRC}), Obstacle avoidance, Safety},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\4JS4CSVA\\Li et al. - 2024 - Safe human–robot collaboration for industrial settings a survey.pdf:application/pdf},
+}
+
+@inproceedings{amaya-mejia_vision-based_2022,
+	title = {Vision-Based Safety System for Barrierless Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9981689},
+	doi = {10.1109/IROS47612.2022.9981689},
+	abstract = {Human safety has always been the main priority when working near an industrial robot. With the rise of Human-Robot Collaborative environments, physical barriers to avoiding collisions have been disappearing, increasing the risk of accidents and the need for solutions that ensure a safe Human-Robot Collaboration. This paper proposes a safety system that implements Speed and Separation Monitoring ({SSM}) type of operation. For this, safety zones are defined in the robot's workspace following current standards for industrial collaborative robots. A deep learning-based computer vision system detects, tracks, and estimates the 3D position of operators close to the robot. The robot control system receives the operator's 3D position and generates 3D representations of them in a simulation environment. Depending on the zone where the closest operator was detected, the robot stops or changes its operating speed. Three different operation modes in which the human and robot interact are presented. Results show that the vision-based system can correctly detect and classify in which safety zone an operator is located and that the different proposed operation modes ensure that the robot's reaction and stop time are within the required time limits to guarantee safety.},
+	eventtitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})},
+	booktitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})},
+	author = {Amaya-Mejía, Lina María and Duque-Suárez, Nicolás and Jaramillo-Ramírez, Daniel and Martinez, Carol},
+	urldate = {2025-02-19},
+	date = {2022-10},
+	note = {{ISSN}: 2153-0866},
+	keywords = {Collision avoidance, Service robots, Safety, Collaboration, Robot control, Solid modeling, Three-dimensional displays},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\XX9FL2U5\\Amaya-Mejía et al. - 2022 - Vision-Based Safety System for Barrierless Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\29LFGV4B\\9981689.html:text/html},
+}
+
+@inproceedings{choi_xr-based_2022,
+	title = {An {XR}-based Approach to Safe Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9757621},
+	doi = {10.1109/VRW55335.2022.00106},
+	abstract = {It is crucial to prevent safety accidents for human-robot collaboration. This study proposes an extended reality ({XR}) approach to safe {HRC} by calculating the minimum distance between the human operator and robot in real time. The proposed approach scans the real environment with multiple sensors and constructs its virtual space in {XR}. The virtual robot is synchronized with the real robot by matching the point cloud of real environment with that of the virtual robot. This system can effectively provide task assistance and safety information to the user wearing an {XR} device.},
+	eventtitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})},
+	booktitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})},
+	author = {Choi, Sung Ho and Park, Kyeong-Beom and Roh, Dong Hyeon and Lee, Jae Yeol and Ghasemi, Yalda and Jeong, Heejin},
+	urldate = {2025-02-19},
+	date = {2022-03},
+	keywords = {Real-time systems, Robot sensing systems, Service robots, Safety, Collaboration, Three-dimensional displays, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—{XR}-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality ({MR}/{XR}), safety distance},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\VYUDN5LQ\\Choi et al. - 2022 - An XR-based Approach to Safe Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\GDI6TZQ2\\9757621.html:text/html},
+}
+
+@inproceedings{ALN-22,
+	title = {Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a robot in an industrial environment},
+	url = {https://ieeexplore.ieee.org/document/9900548},
+	doi = {10.1109/RO-MAN53752.2022.9900548},
+	abstract = {Accurate detection of the human body and its limbs in real-time is one of the challenges toward human-robot collaboration ({HRC}) technology in the factory of the future. In this work, a new algorithm has been developed to fuse thermal, depth, and color information. Starting with the calibration of the sensors to each other, proceeding with data fusion and detection of human body parts, and ending with pose estimation. The proposed approach has been tested in various {HRC} scenarios. It has shown a significant decrease in errors and noise in industrial environments. Furthermore, it produces not only a higher positioning accuracy of the head and hands compared to the state of art algorithms (e.g., {OpenPose}), but it is also faster. Hence, such an algorithm could be joined with digital twins for safe and efficient human-robot collaboration.},
+	eventtitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})},
+	booktitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})},
+	author = {Al Naser, Ibrahim and Dahmen, Johannes and Bdiwi, Mohamad and Ihlenfeldt, Steffen},
+	urldate = {2025-02-19},
+	date = {2022-08},
+	note = {{ISSN}: 1944-9437},
+	keywords = {Service robots, Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Sensor fusion, Stability criteria, Thermal sensors},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\Q933FYY2\\Al Naser et al. - 2022 - Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a r.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\942BAXF5\\9900548.html:text/html},
+}
+
+@inproceedings{RAS-20,
+	title = {Local and Global Sensors for Collision Avoidance},
+	url = {https://ieeexplore.ieee.org/document/9235223},
+	doi = {10.1109/MFI49285.2020.9235223},
+	abstract = {Implementation of safe and efficient human robot collaboration for agile production cells with heavy-duty industrial robots, having large stopping distances and large self-occlusion areas, is a challenging task. Collision avoidance is the main functionality required to realize this task. In fact, it requires accurate estimation of shortest distance between known (robot) and unknown (human or anything else) objects in a large area. This work proposes a selective fusion of global and local sensors, representing a large range 360° {LiDAR} and a small range {RGB} camera respectively, in the context of dynamic speed and separation monitoring. Safety functionality has been evaluated for collision detection between unknown dynamic object to manipulator joints. The system yields 29-40\% efficiency compared to fenced system. Heavy-duty industrial robot and a controlled linear axis dummy is used for evaluating different robot and scenario configurations. Results suggest higher efficiency and safety when using local and global setup.},
+	eventtitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})},
+	booktitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})},
+	author = {Rashid, Aquib and Peesapati, Kannan and Bdiwi, Mohamad and Krusche, Sebastian and Hardt, Wolfram and Putz, Matthias},
+	urldate = {2025-02-19},
+	date = {2020-09},
+	keywords = {Service robots, Safety, Sensor fusion, Cameras, Laser radar, Production, Robot vision systems},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\HAXPN6EL\\Rashid et al. - 2020 - Local and Global Sensors for Collision Avoidance.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\4X42Y6TK\\9235223.html:text/html},
+}
+
+@article{JAI-03,
+	title = {A survey of Laser Range Finding},
+	url = {http://www.siddjain.com/ee236a.pdf},
+	abstract = {This report provides a informal survey of laser distance measurement. Applications of laser range finding are briefly discussed and various techniques for laser ranging such as active laser triangulation, pulsed time-of-flight ({TOF}), phase shift, {FMCW}, and correlation are described.},
+	author = {Jain, Siddharth},
+	urldate = {2025-02-19},
+    date = {2003-12-2},
+	langid = {english},
+}
+
+@online{noauthor_file20200501_2020,
+	title = {File:20200501 Time of flight.svg - Wikipedia},
+	url = {https://commons.wikimedia.org/wiki/File:20200501_Time_of_flight.svg},
+	shorttitle = {File},
+	urldate = {2025-02-20},
+	date = {2020-05-01},
+	langid = {english},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\H7EUEBHT\\File20200501_Time_of_flight.html:text/html},
+}
+
+@article{RAJ-20,
+	title = {A Survey on {LiDAR} Scanning Mechanisms},
+	volume = {9},
+	rights = {http://creativecommons.org/licenses/by/3.0/},
+	issn = {2079-9292},
+	url = {https://www.mdpi.com/2079-9292/9/5/741},
+	doi = {10.3390/electronics9050741},
+	abstract = {In recent years, light detection and ranging ({LiDAR}) technology has gained huge popularity in various applications such as navigation, robotics, remote sensing, and advanced driving assistance systems ({ADAS}). This popularity is mainly due to the improvements in {LiDAR} performance in terms of range detection, accuracy, power consumption, as well as physical features such as dimension and weight. Although a number of literatures on {LiDAR} technology have been published earlier, not many has been reported on the state-of-the-art {LiDAR} scanning mechanisms. The aim of this article is to review the scanning mechanisms employed in {LiDAR} technology from past research works to the current commercial products. The review highlights four commonly used mechanisms in {LiDAR} systems: Opto-mechanical, electromechanical, micro-electromechanical systems ({MEMS}), and solid-state scanning. The study reveals that electro-mechanical scanning is the most prominent technology in use today. The commercially available 1D time of flight ({TOF}) {LiDAR} instrument is currently the most attractive option for conversion from 1D to 3D {LiDAR} system, provided that low scanning rate is not an issue. As for applications with low size, weight, and power ({SWaP}) requirements, {MEMS} scanning is found to be the better alternative. {MEMS} scanning is by far the more matured technology compared to solid-state scanning and is currently given great emphasis to increase its robustness for fulfilling the requirements of {ADAS} applications. Finally, solid-state {LiDAR} systems are expected to fill in the gap in {ADAS} applications despite the low technology readiness in comparison to {MEMS} scanners. However, since solid-state scanning is believed to have superior robustness, field of view ({FOV}), and scanning rate potential, great efforts are given by both academics and industries to further develop this technology.},
+	journaltitle = {Electronics},
+	author = {Raj, Thinal and Hashim, Fazida Hanim and Huddin, Aqilah Baseri and Ibrahim, Mohd Faisal and Hussain, Aini},
+	urldate = {2025-02-20},
+	date = {2020-05},
+	langid = {english},
+}
+
+@article{SUR-03,
+	title = {An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of indoor environments},
+	volume = {45},
+	issn = {0921-8890},
+	url = {https://www.sciencedirect.com/science/article/pii/S0921889003001556},
+	doi = {10.1016/j.robot.2003.09.004},
+	abstract = {Digital 3D models of the environment are needed in rescue and inspection robotics, facility managements and architecture. This paper presents an automatic system for gaging and digitalization of 3D indoor environments. It consists of an autonomous mobile robot, a reliable 3D laser range finder and three elaborated software modules. The first module, a fast variant of the Iterative Closest Points algorithm, registers the 3D scans in a common coordinate system and relocalizes the robot. The second module, a next best view planner, computes the next nominal pose based on the acquired 3D data while avoiding complicated obstacles. The third module, a closed-loop and globally stable motor controller, navigates the mobile robot to a nominal pose on the base of odometry and avoids collisions with dynamical obstacles. The 3D laser range finder acquires a 3D scan at this pose. The proposed method allows one to digitalize large indoor environments fast and reliably without any intervention and solves the {SLAM} problem. The results of two 3D digitalization experiments are presented using a fast octree-based visualization method.},
+	journaltitle = {Robotics and Autonomous Systems},
+	shortjournal = {Robotics and Autonomous Systems},
+	author = {Surmann, Hartmut and Nüchter, Andreas and Hertzberg, Joachim},
+	urldate = {2025-02-20},
+	date = {2003-12-31},
+	keywords = {3D digitalization, 3D gaging, 3D laser range finder, Autonomous mobile robots, Next best view planning, Robot relocalization, Scan matching, {SLAM}},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\BKNJW2B7\\Surmann et al. - 2003 - An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of ind.pdf:application/pdf;ScienceDirect Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\H82LXSD3\\S0921889003001556.html:text/html},
+}
+
+@article{NIC-12,
+	title = {Design and characterization of a 256x64-pixel single-photon imager in {CMOS} for a {MEMS}-based laser scanning time-of-flight sensor},
+	volume = {20},
+	rights = {© 2012 {OSA}},
+	issn = {1094-4087},
+	url = {https://opg.optica.org/oe/abstract.cfm?uri=oe-20-11-11863},
+	doi = {10.1364/OE.20.011863},
+	abstract = {We introduce an optical time-of-flight image sensor taking advantage of a {MEMS}-based laser scanning device. Unlike previous approaches, our concept benefits from the high timing resolution and the digital signal flexibility of single-photon pixels in {CMOS} to allow for a nearly ideal cooperation between the image sensor and the scanning device. This technique enables a high signal-to-background light ratio to be obtained, while simultaneously relaxing the constraint on size of the {MEMS} mirror. These conditions are critical for devising practical and low-cost depth sensors intended to operate in uncontrolled environments, such as outdoors. A proof-of-concept prototype capable of operating in real-time was implemented. This paper focuses on the design and characterization of a 256x64-pixel image sensor, which also comprises an event-driven readout circuit, an array of 64 row-level high-throughput time-to-digital converters, and a 16Gbit/s global readout circuit. Quantitative evaluation of the sensor under 2klux of background light revealed a repeatability error of 13.5cm throughout the distance range of 20 meters.},
+	journaltitle = {Optics Express},
+	shortjournal = {Opt. Express, {OE}},
+	author = {Niclass, Cristiano and Ito, Kota and Soga, Mineki and Matsubara, Hiroyuki and Aoyagi, Isao and Kato, Satoru and Kagami, Manabu},
+	urldate = {2025-02-20},
+	date = {2012-05-21},
+	note = {Publisher: Optica Publishing Group},
+	keywords = {Image sensors, Deformable mirrors, Diode lasers, Light emitting diodes, Optical systems, Systems design},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\BZWW7BVY\\Niclass et al. - 2012 - Design and characterization of a 256x64-pixel single-photon imager in CMOS for a MEMS-based laser sc.pdf:application/pdf},
+}
+
+@software{SPA-25,
+	title = {sparkfun/{SparkFun}\_VL53L5CX\_Arduino\_Library},
+	url = {https://github.com/sparkfun/SparkFun_VL53L5CX_Arduino_Library},
+	publisher = {{SparkFun} Electronics},
+	urldate = {2025-02-21},
+	date = {2025-01-28},
+	note = {original-date: 2021-10-22T21:06:36Z},
+}
+
+@online{ADI-14,
+	title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}},
+	url = {https://github.com/adityakamath/tof_imager_micro_ros/tree/humble/teensy_pcl_publisher},
+	urldate = {2025-02-21},
+    author = {Aditya Kamath},
+    date = {2023-12-14},
+	file = {tof_imager_micro_ros/teensy_pcl_publisher at humble · adityakamath/tof_imager_micro_ros · GitHub:C\:\\Users\\Rene\\Zotero\\storage\\TEVM2A5B\\teensy_pcl_publisher.html:text/html},
+}
+
+@online{noauthor_tof_imager_micro_rosteensy_pcl_publisherteensy_pcl_publisherino_nodate,
+	title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher/teensy\_pcl\_publisher.ino at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}},
+	url = {https://github.com/adityakamath/tof_imager_micro_ros/blob/humble/teensy_pcl_publisher/teensy_pcl_publisher.ino#L177},
+	urldate = {2025-02-21},
+	file = {tof_imager_micro_ros/teensy_pcl_publisher/teensy_pcl_publisher.ino at humble · adityakamath/tof_imager_micro_ros · GitHub:C\:\\Users\\Rene\\Zotero\\storage\\PYV8KTSC\\teensy_pcl_publisher.html:text/html},
+}
+
+@article{VOG-23,
+	title = {Von Industrie 4.0 zu Industrie 5.0 – Idee, Konzept und Wahrnehmung},
+	volume = {60},
+	issn = {2198-2775},
+	url = {https://doi.org/10.1365/s40702-023-01002-x},
+	doi = {10.1365/s40702-023-01002-x},
+	abstract = {In der sich rasant entwickelnden Landschaft der industriellen Automatisierung läutet das Aufkommen von Industrie 5.0 (I5.0) einen Paradigmenwechsel hin zu einem stärker kollaborativen und menschzentrierten Ansatz ein. In diesem Beitrag wird die Rolle der Mensch-Maschine-Kollaboration und menschzentrierter Werkzeuge bei der Förderung einer symbiotischen Beziehung zwischen fortschrittlichen Technologien und menschlichen Benutzern untersucht, um so das volle Potenzial von I5.0 zu erschließen. Als nächste Stufe in der Entwicklung des Produktionssektors zielt I5.0 darauf ab, ein Gleichgewicht zwischen Automatisierung und menschlichen Fähigkeiten herzustellen und die sich ergänzenden Stärken beider zu nutzen. Es werden Technologien vorgestellt, welche menschzentrierte Lösungen zur Steigerung von Produktivität, Flexibilität und Nachhaltigkeit in der Fabrik der Zukunft fokussieren.},
+	journaltitle = {{HMD} Praxis der Wirtschaftsinformatik},
+	shortjournal = {{HMD}},
+	author = {Vogel-Heuser, Birgit and Bengler, Klaus},
+	urldate = {2025-04-16},
+	date = {2023-12-01},
+	langid = {german},
+	keywords = {Cyber-Physical Production Systems, Cyber-physische Produktionssysteme, Fabrik der Zukunft, Factory of the Future, Human-Centered Automation, Human-Machine Collaboration, Industrie 4.0, Industrie 5.0, Industry 4.0, Industry 5.0, Mensch-Maschine-Kollaboration, Menschzentrierte Automatisierung},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\U6RG6RSY\\Vogel-Heuser and Bengler - 2023 - Von Industrie 4.0 zu Industrie 5.0 – Idee, Konzept und Wahrnehmung.pdf:application/pdf},
+}
+
+@software{UR-25,
+	title = {{UniversalRobots}/Universal\_Robots\_ROS2\_GZ\_Simulation},
+	url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_GZ_Simulation},
+	publisher = {Universal Robots A/S},
+	urldate = {2025-02-24},
+	date = {2025-02-17},
+}
+
+@online{noauthor_tutorials_nodate-2,
+	title = {Tutorials — {ROS} 2 Documentation: Humble documentation},
+	url = {https://docs.ros.org/en/humble/Tutorials.html},
+	urldate = {2025-02-12},
+}
+
+@software{grans_sebastiangransros2-point-cloud-demo_2024-2,
+	title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo},
+	url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo},
+	abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2},
+	author = {Grans, Sebastian},
+	urldate = {2025-02-12},
+	date = {2024-12-08},
+}
+
+@online{noauthor_examples_nodate-2,
+	title = {Examples - trimesh 4.6.2 documentation},
+	url = {https://trimesh.org/examples.html},
+	urldate = {2025-02-12},
+}
+
+@software{iii_earlephilhowerarduino-pico_2025-2,
+	title = {earlephilhower/arduino-pico},
+	url = {https://github.com/earlephilhower/arduino-pico},
+	abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards},
+	author = {{III}, Earle F. Philhower},
+	urldate = {2025-02-12},
+	date = {2025-02-11},
+	keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi},
+}
+
+@online{noauthor_chatgpt_nodate-2,
+	title = {{ChatGPT}},
+	url = {https://chatgpt.com},
+	abstract = {A conversational {AI} system that listens, learns, and challenges},
+	urldate = {2025-02-12},
+}
+
+@article{paya_state---art_2017-2,
+	title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors},
+	volume = {2017},
+	issn = {1687-7268},
+	url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650},
+	doi = {10.1155/2017/3497650},
+	abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.},
+	number = {1},
+	journaltitle = {Journal of Sensors},
+	author = {Payá, L. and Gil, A. and Reinoso, O.},
+	urldate = {2025-02-12},
+	date = {2017},
+}
+
+@article{saudabayev_sensors_2015-2,
+	title = {Sensors for Robotic Hands: A Survey of State of the Art},
+	volume = {3},
+	issn = {2169-3536},
+	url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549},
+	doi = {10.1109/ACCESS.2015.2482543},
+	shorttitle = {Sensors for Robotic Hands},
+	abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.},
+	journaltitle = {{IEEE} Access},
+	author = {Saudabayev, Artur and Varol, Huseyin Atakan},
+	urldate = {2025-02-12},
+	date = {2015},
+	keywords = {Robot sensing systems, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors, Sensors},
+}
+
+@collection{hering_sensoren_2018-2,
+	location = {Wiesbaden},
+	title = {Sensoren in Wissenschaft und Technik},
+	isbn = {978-3-658-12561-5 978-3-658-12562-2},
+	url = {http://link.springer.com/10.1007/978-3-658-12562-2},
+	publisher = {Springer Fachmedien Wiesbaden},
+	editor = {Hering, Ekbert and Schönfelder, Gert},
+	urldate = {2025-02-12},
+	date = {2018},
+	doi = {10.1007/978-3-658-12562-2},
+}
+
+@article{wunderlich_rasante_2013-2,
+	title = {Rasante Entwicklung in der 3D‐Bildgebung: Weiterentwickelte Time‐of‐Flight‐Technologie verbessert miniaturisierte 3D‐Kameras und Sensoren},
+	volume = {8},
+	issn = {1863-1460, 2191-1975},
+	url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018},
+	doi = {10.1002/opph.201300018},
+	shorttitle = {Rasante Entwicklung in der 3D‐Bildgebung},
+	abstract = {Abstract Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.},
+	number = {3},
+	journaltitle = {Optik \& Photonik},
+	shortjournal = {Optik \& Photonik},
+	author = {Wunderlich, Max},
+	urldate = {2025-02-18},
+	date = {2013-09},
+}
+
+@book{hertzberg_mobile_2012-2,
+	location = {Berlin, Heidelberg},
+	title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik},
+	isbn = {978-3-642-01725-4 978-3-642-01726-1},
+	url = {https://link.springer.com/10.1007/978-3-642-01726-1},
+	series = {{eXamen}.press},
+	shorttitle = {Mobile Roboter},
+	publisher = {Springer Berlin Heidelberg},
+	author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas},
+	urldate = {2025-02-12},
+	date = {2012},
+	doi = {10.1007/978-3-642-01726-1},
+}
+
+@article{liu_application_2024-1,
+	title = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing: A Literature Review},
+	volume = {40},
+	issn = {1044-7318},
+	url = {https://doi.org/10.1080/10447318.2022.2041907},
+	doi = {10.1080/10447318.2022.2041907},
+	shorttitle = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing},
+	abstract = {The rapid development of robot technology has introduced a substantial impact on manufacturing. Numerous studies have been carried out to apply collaborative robots (cobots) to address manufacturing productivity and ergonomics issues, which has brought extensive opportunities. In this context, a systematic literature search in the Web of Science, Scopus, and Google Scholar databases was carried out by electronic and manual search. Thus, 59 relevant contributions out of 4488 studies were analyzed by using preferred reporting items for systematic reviews and meta-analysis ({PRISMA}). To provide an overview of the different results, studies are summarized according to the following criteria: country, author, year, study design, robot category, results, and future opportunities. The effects of cobots on safety, system design, workplace design, task scheduling, productivity, and ergonomics are discussed to provide a better understanding of the application of cobots in manufacturing. To incentive future research, this paper reviews the development of cobots in manufacturing and discusses future opportunities and directions from cobots and manufacturing system perspectives. This paper provides novel and valuable insights into cobots application and illustrates potential developments of future human-cobot interaction.},
+	number = {4},
+	journaltitle = {International Journal of Human–Computer Interaction},
+	author = {Liu, Li and Guo, Fu and Zou, Zishuai and Duffy, Vincent G.},
+	urldate = {2025-02-19},
+	date = {2024-02-16},
+}
+
+@article{maheepala_low_2021-1,
+	title = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices: A Review},
+	volume = {21},
+	issn = {1558-1748},
+	url = {https://ieeexplore.ieee.org/abstract/document/9165781},
+	doi = {10.1109/JSEN.2020.3015932},
+	shorttitle = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices},
+	abstract = {With the advancements of the Internet of Things ({IoT}) technology, applications of battery powered machine vision based {IoT} devices is rapidly growing. While numerous research works are being conducted to develop low power hardware solutions for {IoT} devices, image capture and image processing remain high power demanding processes leading to a short battery life. However, the power consumption of the machine vision based {IoT} devices can be minimized by the careful optimization of the hardware components that are used is these devices. In this article, we present a review of low power machine vision hardware components for the {IoT} applications. A guide to selecting the optimum processors and image sensors for a given battery powered machine vision based {IoT} device is presented. Next, the factors that must be considered when selecting processors and image sensors for a given {IoT} application are discussed, and selection criteria for the processors and image sensors are established. Then, the current commercially available hardware components are reviewed in accordance with the established selection criteria. Finally, the research trends in the field of battery powered machine vision based {IoT} devices are discussed, and the potential future research directions in the field are presented.},
+	number = {2},
+	journaltitle = {{IEEE} Sensors Journal},
+	author = {Maheepala, Malith and Joordens, Matthew A. and Kouzani, Abbas Z.},
+	urldate = {2025-02-19},
+	date = {2021-01},
+	keywords = {Batteries, Cloud computing, image sensor, Image sensors, Internet of Things, low power, machine vision, Machine vision, processor, Program processors, Transceivers},
+}
+
+@inproceedings{popov_collision_2017-1,
+	location = {Lisbon},
+	title = {Collision detection, localization \& classification for industrial robots with joint torque sensors},
+	isbn = {978-1-5386-3518-6},
+	url = {http://ieeexplore.ieee.org/document/8172400/},
+	doi = {10.1109/ROMAN.2017.8172400},
+	abstract = {High dynamic capabilities of industrial robots make them dangerous for humans and environment. To reduce this factor and advance collaboration between human and manipulator fast and reliable collision detection algorithm is required. To overcome this problem, we present an approach allowing to detect collision, localize action point and classify collision nature. Internal joint torque and encoder measurements were used to determine potential collisions with the robot links. This work proposes two ways of solving this problem: using classical analytical approach and learning approach implemented with neural network. The suggested algorithms were examined on the industrial robotic arm Kuka iiwa {LBR} 14 R820, ground truth information on the contact nature and its location were obtained with 3D {LIDAR} and camera.},
+	eventtitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})},
+	booktitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})},
+	publisher = {{IEEE}},
+	author = {Popov, Dmitry and Klimchik, Alexandr and Mavridis, Nikolaos},
+	urldate = {2025-02-19},
+	date = {2017-08},
+}
+
+@article{jain_survey_nodate-1,
+	title = {A survey of Laser Range Finding},
+	url = {http://www.siddjain.com/ee236a.pdf},
+	abstract = {This report provides a informal survey of laser distance measurement. Applications of laser range finding are briefly discussed and various techniques for laser ranging such as active laser triangulation, pulsed time-of-flight ({TOF}), phase shift, {FMCW}, and correlation are described.},
+	author = {Jain, Siddharth},
+	urldate = {2025-02-19},
+}
+
+@inproceedings{AMA-22,
+	title = {Vision-Based Safety System for Barrierless Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9981689},
+	doi = {10.1109/IROS47612.2022.9981689},
+	abstract = {Human safety has always been the main priority when working near an industrial robot. With the rise of Human-Robot Collaborative environments, physical barriers to avoiding collisions have been disappearing, increasing the risk of accidents and the need for solutions that ensure a safe Human-Robot Collaboration. This paper proposes a safety system that implements Speed and Separation Monitoring ({SSM}) type of operation. For this, safety zones are defined in the robot's workspace following current standards for industrial collaborative robots. A deep learning-based computer vision system detects, tracks, and estimates the 3D position of operators close to the robot. The robot control system receives the operator's 3D position and generates 3D representations of them in a simulation environment. Depending on the zone where the closest operator was detected, the robot stops or changes its operating speed. Three different operation modes in which the human and robot interact are presented. Results show that the vision-based system can correctly detect and classify in which safety zone an operator is located and that the different proposed operation modes ensure that the robot's reaction and stop time are within the required time limits to guarantee safety.},
+	eventtitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})},
+	booktitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})},
+	author = {Amaya-Mejía, Lina María and Duque-Suárez, Nicolás and Jaramillo-Ramírez, Daniel and Martinez, Carol},
+	urldate = {2025-02-19},
+	date = {2022-10},
+	keywords = {Collision avoidance, Service robots, Safety, Collaboration, Robot control, Solid modeling, Three-dimensional displays},
+}
+
+@inproceedings{choi_xr-based_2022-1,
+	title = {An {XR}-based Approach to Safe Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9757621},
+	doi = {10.1109/VRW55335.2022.00106},
+	abstract = {It is crucial to prevent safety accidents for human-robot collaboration. This study proposes an extended reality ({XR}) approach to safe {HRC} by calculating the minimum distance between the human operator and robot in real time. The proposed approach scans the real environment with multiple sensors and constructs its virtual space in {XR}. The virtual robot is synchronized with the real robot by matching the point cloud of real environment with that of the virtual robot. This system can effectively provide task assistance and safety information to the user wearing an {XR} device.},
+	eventtitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})},
+	booktitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})},
+	author = {Choi, Sung Ho and Park, Kyeong-Beom and Roh, Dong Hyeon and Lee, Jae Yeol and Ghasemi, Yalda and Jeong, Heejin},
+	urldate = {2025-02-19},
+	date = {2022-03},
+	keywords = {Real-time systems, Robot sensing systems, Service robots, Safety, Collaboration, Three-dimensional displays, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—{XR}-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality ({MR}/{XR}), safety distance},
+}
+
+@inproceedings{al_naser_fusion_2022-1,
+	title = {Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a robot in an industrial environment},
+	url = {https://ieeexplore.ieee.org/document/9900548},
+	doi = {10.1109/RO-MAN53752.2022.9900548},
+	abstract = {Accurate detection of the human body and its limbs in real-time is one of the challenges toward human-robot collaboration ({HRC}) technology in the factory of the future. In this work, a new algorithm has been developed to fuse thermal, depth, and color information. Starting with the calibration of the sensors to each other, proceeding with data fusion and detection of human body parts, and ending with pose estimation. The proposed approach has been tested in various {HRC} scenarios. It has shown a significant decrease in errors and noise in industrial environments. Furthermore, it produces not only a higher positioning accuracy of the head and hands compared to the state of art algorithms (e.g., {OpenPose}), but it is also faster. Hence, such an algorithm could be joined with digital twins for safe and efficient human-robot collaboration.},
+	eventtitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})},
+	booktitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})},
+	author = {Al Naser, Ibrahim and Dahmen, Johannes and Bdiwi, Mohamad and Ihlenfeldt, Steffen},
+	urldate = {2025-02-19},
+	date = {2022-08},
+	keywords = {Service robots, Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Sensor fusion, Stability criteria, Thermal sensors},
+}
+
+@online{RCR-20,
+	title = {File:20200501 Time of flight.svg - Wikipedia},
+	url = {https://commons.wikimedia.org/wiki/File:20200501_Time_of_flight.svg},
+	shorttitle = {File},
+	urldate = {2025-02-20},
+    author = {RCraig09},
+	date = {2020-05-01},
+}
+
+@online{noauthor_tof_imager_micro_rosteensy_pcl_publisherteensy_pcl_publisherino_nodate-1,
+	title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher/teensy\_pcl\_publisher.ino at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}},
+	url = {https://github.com/adityakamath/tof_imager_micro_ros/blob/humble/teensy_pcl_publisher/teensy_pcl_publisher.ino#L177},
+	urldate = {2025-02-21},
+}
+
+@online{IID-25,
+	title = {{IIDEA} - Inklusion und Integration durch Cobots auf dem ersten Arbeitsmarkt - {RWTH} {AACHEN} {UNIVERSITY} {IGMR} - Deutsch},
+	url = {https://www.igmr.rwth-aachen.de/cms/igmr/forschung/projekte/aktuelle-projekte/~baxrrf/iidea/},
+	urldate = {2025-04-16},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\MHVTD38V\\undefined:text/html},
+}
+
+@incollection{mcgrath_sensing_2013,
+	location = {Berkeley, {CA}},
+	title = {Sensing and Sensor Fundamentals},
+	isbn = {978-1-4302-6014-1},
+	url = {https://doi.org/10.1007/978-1-4302-6014-1_2},
+	abstract = {Sensors utilize a wide spectrum of transducer and signal transformation approaches with corresponding variations in technical complexity. These range from relatively simple temperature measurement based on a bimetallic thermocouple, to the detection of specific bacteria species using sophisticated optical systems. Within the healthcare, wellness, and environmental domains, there are a variety of sensing approaches, including microelectromechanical systems ({MEMS}), optical, mechanical, electrochemical, semiconductor, and biosensing. As outlined in Chapter 1, the proliferation of sensor-based applications is growing across a range of sensing targets such as air, water, bacteria, movement, and physiology. As with any form of technology, sensors have both strengths and weaknesses. Operational performance may be a function of the transduction method, the deployment environment, or the system components. In this chapter, we review the common sensing mechanisms that are used in the application domains of interest within the scope of this book, along with their respective strengths and weaknesses. Finally, we describe the process of selecting and specifying sensors for an application.},
+	booktitle = {Sensor Technologies: Healthcare, Wellness, and Environmental Applications},
+	publisher = {Apress},
+	author = {{McGrath}, Michael J. and Scanaill, Cliodhna Ní},
+	editor = {{McGrath}, Michael J. and Scanaill, Cliodhna Ní},
+	urldate = {2025-02-26},
+	date = {2013},
+	langid = {english},
+	doi = {10.1007/978-1-4302-6014-1_2},
+	keywords = {Bulk Acoustic Wave, Electrochemical Sensor, Indium Antimonide, Linear Transfer Function, Smoke Detector},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\5CKH2AHE\\McGrath et al. - 2013 - Sensing and Sensor Fundamentals.pdf:application/pdf},
+}
+
+@article{zyl_sensor_2009,
+	title = {The Sensor Web: systems of sensor systems},
+	volume = {2},
+	issn = {1753-8947},
+	url = {https://doi.org/10.1080/17538940802439549},
+	doi = {10.1080/17538940802439549},
+	shorttitle = {The Sensor Web},
+	abstract = {Global Earth Observing System of Systems ({GEOSS}) presents a great challenge of System of Systems integration across organisational and political boundaries. One existing paradigm that can address the scale of the challenge is that of the Sensor Web. In this paradigm, the internet is evolving into an active, macro sensing instrument, capable of drawing sensory data from around the globe to the fingertips of individuals. The Sensor Web will support scientific research and facilitate transparent political decision making. This article presents some of the technologies explored and activities engaged in by the {GEOSS} Sensor Web community, towards achieving {GEOSS} goals.},
+	number = {1},
+	journaltitle = {International Journal of Digital Earth},
+	author = {Zyl, T. L. van and Simonis, I. and {McFerren}, G.},
+	urldate = {2025-02-26},
+	date = {2009-03-01},
+	keywords = {data acquisition, digital earth architecture, earth observation, Sensor Web, systems of systems},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\2EJXBMW8\\Zyl et al. - 2009 - The Sensor Web systems of sensor systems.pdf:application/pdf},
+}
+
+@article{zyl_sensor_2009-1,
+	title = {The Sensor Web: systems of sensor systems},
+	volume = {2},
+	issn = {1753-8947},
+	url = {https://doi.org/10.1080/17538940802439549},
+	doi = {10.1080/17538940802439549},
+	shorttitle = {The Sensor Web},
+	abstract = {Global Earth Observing System of Systems ({GEOSS}) presents a great challenge of System of Systems integration across organisational and political boundaries. One existing paradigm that can address the scale of the challenge is that of the Sensor Web. In this paradigm, the internet is evolving into an active, macro sensing instrument, capable of drawing sensory data from around the globe to the fingertips of individuals. The Sensor Web will support scientific research and facilitate transparent political decision making. This article presents some of the technologies explored and activities engaged in by the {GEOSS} Sensor Web community, towards achieving {GEOSS} goals.},
+	number = {1},
+	journaltitle = {International Journal of Digital Earth},
+	author = {Zyl, T. L. van and Simonis, I. and {McFerren}, G.},
+	urldate = {2025-02-26},
+	date = {2009-03-01},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\6QZEMCEX\\Zyl et al. - 2009 - The Sensor Web systems of sensor systems.pdf:application/pdf},
+}
+
+@book{hertzberg_mobile_2012-3,
+	location = {Berlin, Heidelberg},
+	title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik},
+	rights = {https://www.springernature.com/gp/researchers/text-and-data-mining},
+	isbn = {978-3-642-01725-4 978-3-642-01726-1},
+	url = {https://link.springer.com/10.1007/978-3-642-01726-1},
+	series = {{eXamen}.press},
+	shorttitle = {Mobile Roboter},
+	publisher = {Springer Berlin Heidelberg},
+	author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas},
+	urldate = {2025-02-12},
+	date = {2012},
+	langid = {german},
+	doi = {10.1007/978-3-642-01726-1},
+}
+
+@article{zyl_sensor_2009-2,
+	title = {The Sensor Web: systems of sensor systems},
+	volume = {2},
+	issn = {1753-8947},
+	url = {https://doi.org/10.1080/17538940802439549},
+	doi = {10.1080/17538940802439549},
+	shorttitle = {The Sensor Web},
+	abstract = {Global Earth Observing System of Systems ({GEOSS}) presents a great challenge of System of Systems integration across organisational and political boundaries. One existing paradigm that can address the scale of the challenge is that of the Sensor Web. In this paradigm, the internet is evolving into an active, macro sensing instrument, capable of drawing sensory data from around the globe to the fingertips of individuals. The Sensor Web will support scientific research and facilitate transparent political decision making. This article presents some of the technologies explored and activities engaged in by the {GEOSS} Sensor Web community, towards achieving {GEOSS} goals.},
+	number = {1},
+	journaltitle = {International Journal of Digital Earth},
+	author = {Zyl, T. L. van and Simonis, I. and {McFerren}, G.},
+	urldate = {2025-02-26},
+	date = {2009-03-01},
+	keywords = {data acquisition, digital earth architecture, earth observation, Sensor Web, systems of systems},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\FUEW7ZSG\\Zyl et al. - 2009 - The Sensor Web systems of sensor systems.pdf:application/pdf},
+}
+
+@article{saudabayev_sensors_2015-3,
+	title = {Sensors for Robotic Hands: A Survey of State of the Art},
+	volume = {3},
+	issn = {2169-3536},
+	url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549},
+	doi = {10.1109/ACCESS.2015.2482543},
+	shorttitle = {Sensors for Robotic Hands},
+	abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.},
+	journaltitle = {{IEEE} Access},
+	author = {Saudabayev, Artur and Varol, Huseyin Atakan},
+	urldate = {2025-02-12},
+	date = {2015},
+	keywords = {Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, Robot sensing systems, robotic hands, Robots, sensors, Sensors},
+}
+
+@software{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025-1,
+	title = {{UniversalRobots}/Universal\_Robots\_ROS2\_GZ\_Simulation},
+	rights = {{BSD}-3-Clause},
+	url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_GZ_Simulation},
+	publisher = {Universal Robots A/S},
+	urldate = {2025-02-24},
+	date = {2025-02-17},
+	note = {original-date: 2021-12-15T12:15:45Z},
+}
+
+@article{paya_state---art_2017-3,
+	title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors},
+	volume = {2017},
+	rights = {Copyright © 2017 L. Payá et al.},
+	issn = {1687-7268},
+	url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650},
+	doi = {10.1155/2017/3497650},
+	abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.},
+	number = {1},
+	journaltitle = {Journal of Sensors},
+	author = {Payá, L. and Gil, A. and Reinoso, O.},
+	urldate = {2025-02-12},
+	date = {2017},
+	langid = {english},
+}
+
+@misc{EAR-25,
+	title = {earlephilhower/arduino-pico},
+	rights = {{LGPL}-2.1},
+	url = {https://github.com/earlephilhower/arduino-pico},
+	abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards},
+	author = {{III}, Earle F. Philhower},
+	urldate = {2025-02-12},
+	date = {2025-02},
+	keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi},
+}
+
+@misc{noauthor_tutorials_nodate-3,
+	title = {Tutorials — {ROS} 2 Documentation: Humble documentation},
+	url = {https://docs.ros.org/en/humble/Tutorials.html},
+	urldate = {2025-02-12},
+}
+
+@misc{noauthor_examples_nodate-3,
+	title = {Examples - trimesh 4.6.2 documentation},
+	url = {https://trimesh.org/examples.html},
+	urldate = {2025-02-12},
+}
+
+@misc{grans_sebastiangransros2-point-cloud-demo_2024-3,
+	title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo},
+	rights = {{MIT}},
+	url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo},
+	abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2},
+	author = {Grans, Sebastian},
+	urldate = {2025-02-12},
+	date = {2024-12},
+}
+
+@incollection{mcgrath_sensing_2013-1,
+	location = {Berkeley, {CA}},
+	title = {Sensing and Sensor Fundamentals},
+	isbn = {978-1-4302-6014-1},
+	url = {https://doi.org/10.1007/978-1-4302-6014-1_2},
+	abstract = {Sensors utilize a wide spectrum of transducer and signal transformation approaches with corresponding variations in technical complexity. These range from relatively simple temperature measurement based on a bimetallic thermocouple, to the detection of specific bacteria species using sophisticated optical systems. Within the healthcare, wellness, and environmental domains, there are a variety of sensing approaches, including microelectromechanical systems ({MEMS}), optical, mechanical, electrochemical, semiconductor, and biosensing. As outlined in Chapter 1, the proliferation of sensor-based applications is growing across a range of sensing targets such as air, water, bacteria, movement, and physiology. As with any form of technology, sensors have both strengths and weaknesses. Operational performance may be a function of the transduction method, the deployment environment, or the system components. In this chapter, we review the common sensing mechanisms that are used in the application domains of interest within the scope of this book, along with their respective strengths and weaknesses. Finally, we describe the process of selecting and specifying sensors for an application.},
+	booktitle = {Sensor Technologies: Healthcare, Wellness, and Environmental Applications},
+	publisher = {Apress},
+	author = {{McGrath}, Michael J. and Scanaill, Cliodhna Ní},
+	editor = {{McGrath}, Michael J. and Scanaill, Cliodhna Ní},
+	urldate = {2025-02-26},
+	date = {2013},
+	langid = {english},
+	doi = {10.1007/978-1-4302-6014-1_2},
+	keywords = {Bulk Acoustic Wave, Electrochemical Sensor, Indium Antimonide, Linear Transfer Function, Smoke Detector},
+}
+
+@article{zyl_sensor_2009-3,
+	title = {The Sensor Web: systems of sensor systems},
+	volume = {2},
+	issn = {1753-8947},
+	url = {https://doi.org/10.1080/17538940802439549},
+	doi = {10.1080/17538940802439549},
+	shorttitle = {The Sensor Web},
+	abstract = {Global Earth Observing System of Systems ({GEOSS}) presents a great challenge of System of Systems integration across organisational and political boundaries. One existing paradigm that can address the scale of the challenge is that of the Sensor Web. In this paradigm, the internet is evolving into an active, macro sensing instrument, capable of drawing sensory data from around the globe to the fingertips of individuals. The Sensor Web will support scientific research and facilitate transparent political decision making. This article presents some of the technologies explored and activities engaged in by the {GEOSS} Sensor Web community, towards achieving {GEOSS} goals.},
+	number = {1},
+	journaltitle = {International Journal of Digital Earth},
+	author = {Zyl, T. L. van and Simonis, I. and {McFerren}, G.},
+	urldate = {2025-02-26},
+	date = {2009-03},
+	keywords = {data acquisition, digital earth architecture, earth observation, Sensor Web, systems of systems},
+}
+
+@misc{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025-2,
+	title = {{UniversalRobots}/Universal\_Robots\_ROS2\_GZ\_Simulation},
+	rights = {{BSD}-3-Clause},
+	url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_GZ_Simulation},
+	publisher = {Universal Robots A/S},
+	urldate = {2025-02-24},
+	date = {2025-02},
+}
+
+@misc{noauthor_tof_imager_micro_rosteensy_pcl_publisherteensy_pcl_publisherino_nodate-2,
+	title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher/teensy\_pcl\_publisher.ino at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}},
+	url = {https://github.com/adityakamath/tof_imager_micro_ros/blob/humble/teensy_pcl_publisher/teensy_pcl_publisher.ino#L177},
+	urldate = {2025-02-21},
+}
+
+@inproceedings{al_naser_fusion_2022-2,
+	title = {Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a robot in an industrial environment},
+	url = {https://ieeexplore.ieee.org/document/9900548},
+	doi = {10.1109/RO-MAN53752.2022.9900548},
+	abstract = {Accurate detection of the human body and its limbs in real-time is one of the challenges toward human-robot collaboration ({HRC}) technology in the factory of the future. In this work, a new algorithm has been developed to fuse thermal, depth, and color information. Starting with the calibration of the sensors to each other, proceeding with data fusion and detection of human body parts, and ending with pose estimation. The proposed approach has been tested in various {HRC} scenarios. It has shown a significant decrease in errors and noise in industrial environments. Furthermore, it produces not only a higher positioning accuracy of the head and hands compared to the state of art algorithms (e.g., {OpenPose}), but it is also faster. Hence, such an algorithm could be joined with digital twins for safe and efficient human-robot collaboration.},
+	booktitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})},
+	author = {Al Naser, Ibrahim and Dahmen, Johannes and Bdiwi, Mohamad and Ihlenfeldt, Steffen},
+	urldate = {2025-02-19},
+	date = {2022-08},
+	keywords = {Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Sensor fusion, Service robots, Stability criteria, Thermal sensors},
+}
+
+@inproceedings{choi_xr-based_2022-2,
+	title = {An {XR}-based Approach to Safe Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9757621},
+	doi = {10.1109/VRW55335.2022.00106},
+	abstract = {It is crucial to prevent safety accidents for human-robot collaboration. This study proposes an extended reality ({XR}) approach to safe {HRC} by calculating the minimum distance between the human operator and robot in real time. The proposed approach scans the real environment with multiple sensors and constructs its virtual space in {XR}. The virtual robot is synchronized with the real robot by matching the point cloud of real environment with that of the virtual robot. This system can effectively provide task assistance and safety information to the user wearing an {XR} device.},
+	booktitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})},
+	author = {Choi, Sung Ho and Park, Kyeong-Beom and Roh, Dong Hyeon and Lee, Jae Yeol and Ghasemi, Yalda and Jeong, Heejin},
+	urldate = {2025-02-19},
+	date = {2022-03},
+	keywords = {Collaboration, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—{XR}-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality ({MR}/{XR}), Real-time systems, Robot sensing systems, Safety, safety distance, Service robots, Three-dimensional displays},
+}
+
+@inproceedings{amaya-mejia_vision-based_2022-2,
+	title = {Vision-Based Safety System for Barrierless Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9981689},
+	doi = {10.1109/IROS47612.2022.9981689},
+	abstract = {Human safety has always been the main priority when working near an industrial robot. With the rise of Human-Robot Collaborative environments, physical barriers to avoiding collisions have been disappearing, increasing the risk of accidents and the need for solutions that ensure a safe Human-Robot Collaboration. This paper proposes a safety system that implements Speed and Separation Monitoring ({SSM}) type of operation. For this, safety zones are defined in the robot's workspace following current standards for industrial collaborative robots. A deep learning-based computer vision system detects, tracks, and estimates the 3D position of operators close to the robot. The robot control system receives the operator's 3D position and generates 3D representations of them in a simulation environment. Depending on the zone where the closest operator was detected, the robot stops or changes its operating speed. Three different operation modes in which the human and robot interact are presented. Results show that the vision-based system can correctly detect and classify in which safety zone an operator is located and that the different proposed operation modes ensure that the robot's reaction and stop time are within the required time limits to guarantee safety.},
+	booktitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})},
+	author = {Amaya-Mejía, Lina María and Duque-Suárez, Nicolás and Jaramillo-Ramírez, Daniel and Martinez, Carol},
+	urldate = {2025-02-19},
+	date = {2022-10},
+	keywords = {Collaboration, Collision avoidance, Robot control, Safety, Service robots, Solid modeling, Three-dimensional displays},
+}
+
+@article{li_safe_2024-2,
+	title = {Safe human–robot collaboration for industrial settings: a survey},
+	volume = {35},
+	issn = {1572-8145},
+	url = {https://doi.org/10.1007/s10845-023-02159-4},
+	doi = {10.1007/s10845-023-02159-4},
+	shorttitle = {Safe human–robot collaboration for industrial settings},
+	abstract = {Human–robot collaboration ({HRC}) plays a pivotal role in today’s industry by supporting increasingly customised product development. Via {HRC}, the strengths of humans and robots can be combined to facilitate collaborative jobs within common workplaces to achieve specific industrial goals. Given the significance of safety assurance in {HRC}, in this survey paper, an update on standards and implementation approaches presented in the latest literature is given to reflect the state-of-the-art of this prominent research topic. First, an overview of safety standards for industrial robots, collaborative robots, and {HRC} is provided. Then, a survey of various approaches to {HRC} safety is conducted from two main perspectives, i.e., pre-collision and post-collision, which are further detailed in the aspects of sensing, prediction, learning, planning/replanning, and compliance control. Major characteristics, pros, cons, and applicability of the approaches are analysed. Finally, challenging issues and prospects for the future development of {HRC} safety are highlighted to provide recommendations for relevant stakeholders to consider when designing {HRC}-enabled industrial systems.},
+	number = {5},
+	journaltitle = {Journal of Intelligent Manufacturing},
+	author = {Li, Weidong and Hu, Yudie and Zhou, Yong and Pham, Duc Truong},
+	urldate = {2025-02-19},
+	date = {2024-06},
+	langid = {english},
+	keywords = {Collaborative robots, Collision detection, Human–robot collaboration ({HRC}), Obstacle avoidance, Safety},
+}
+
+@article{nath_review_2022-2,
+	title = {A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector},
+	volume = {7},
+	rights = {Copyright (c) 2022 Aditya S. Nath},
+	issn = {2736-576X},
+	url = {https://www.ej-eng.org/index.php/ejeng/article/view/2624},
+	doi = {10.24018/ejeng.2022.7.1.2624},
+	abstract = {The study explores recent innovations in robotic and sensor-based technologies that are spearheading advancements in the construction sector to achieve improvements in construction quality, efficiency, and safety. Automation in construction, although coined as a concept in 1980s, has witnessed minimal progress in the level of application. The study attempts to identify issues constraining adoption of automation in the sector, the recent developments in technologies and their scope in construction, their applications and impacts, and way forward. The role of robotics in various stages of construction and its impact on a wider scale has been identified and discussed. The evolution of Building Information Modeling ({BIM}) has transitioned it into being an efficient mediator in the construction process with novel concepts such as 4D and 5D {BIM} and Building Knowledge Management. Various sensor technologies, functioning at diverse scales, have found wide-ranging applications on construction sites ranging from high-accuracy positioning to slow-tracking of personnel and materials, as well as, in progress monitoring and quality control. Information Technology has a major role in binding the sensor technology with on-site requirements to create positive results. A study was done to identify such technological interventions and various software utilities which integrate {BIM} and sensor technology with tools such as {GIS}. The factors which restrained developments in automation in construction sector were identified in the course of study. Various global examples of advanced automated construction technologies with applications in various stages of construction were discussed. The review successfully identifies the nascent technological innovations and their productive usage in relevant areas of construction sector.},
+	number = {1},
+	journaltitle = {European Journal of Engineering and Technology Research},
+	author = {Nath, Aditya S.},
+	urldate = {2025-02-19},
+	date = {2022-02},
+	langid = {english},
+	keywords = {Sensors},
+}
+
+@article{maheepala_low_2021-2,
+	title = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices: A Review},
+	volume = {21},
+	issn = {1558-1748},
+	url = {https://ieeexplore.ieee.org/abstract/document/9165781},
+	doi = {10.1109/JSEN.2020.3015932},
+	shorttitle = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices},
+	abstract = {With the advancements of the Internet of Things ({IoT}) technology, applications of battery powered machine vision based {IoT} devices is rapidly growing. While numerous research works are being conducted to develop low power hardware solutions for {IoT} devices, image capture and image processing remain high power demanding processes leading to a short battery life. However, the power consumption of the machine vision based {IoT} devices can be minimized by the careful optimization of the hardware components that are used is these devices. In this article, we present a review of low power machine vision hardware components for the {IoT} applications. A guide to selecting the optimum processors and image sensors for a given battery powered machine vision based {IoT} device is presented. Next, the factors that must be considered when selecting processors and image sensors for a given {IoT} application are discussed, and selection criteria for the processors and image sensors are established. Then, the current commercially available hardware components are reviewed in accordance with the established selection criteria. Finally, the research trends in the field of battery powered machine vision based {IoT} devices are discussed, and the potential future research directions in the field are presented.},
+	number = {2},
+	journaltitle = {{IEEE} Sensors Journal},
+	author = {Maheepala, Malith and Joordens, Matthew A. and Kouzani, Abbas Z.},
+	urldate = {2025-02-19},
+	date = {2021-01},
+	keywords = {Batteries, Cloud computing, image sensor, Image sensors, Internet of Things, low power, machine vision, Machine vision, processor, Program processors, Transceivers},
+}
+
+@article{liu_application_2024-2,
+	title = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing: A Literature Review},
+	volume = {40},
+	issn = {1044-7318},
+	url = {https://doi.org/10.1080/10447318.2022.2041907},
+	doi = {10.1080/10447318.2022.2041907},
+	shorttitle = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing},
+	abstract = {The rapid development of robot technology has introduced a substantial impact on manufacturing. Numerous studies have been carried out to apply collaborative robots (cobots) to address manufacturing productivity and ergonomics issues, which has brought extensive opportunities. In this context, a systematic literature search in the Web of Science, Scopus, and Google Scholar databases was carried out by electronic and manual search. Thus, 59 relevant contributions out of 4488 studies were analyzed by using preferred reporting items for systematic reviews and meta-analysis ({PRISMA}). To provide an overview of the different results, studies are summarized according to the following criteria: country, author, year, study design, robot category, results, and future opportunities. The effects of cobots on safety, system design, workplace design, task scheduling, productivity, and ergonomics are discussed to provide a better understanding of the application of cobots in manufacturing. To incentive future research, this paper reviews the development of cobots in manufacturing and discusses future opportunities and directions from cobots and manufacturing system perspectives. This paper provides novel and valuable insights into cobots application and illustrates potential developments of future human-cobot interaction.},
+	number = {4},
+	journaltitle = {International Journal of Human–Computer Interaction},
+	author = {Liu, Li and Guo, Fu and Zou, Zishuai and Duffy, Vincent G.},
+	urldate = {2025-02-19},
+	date = {2024-02},
+}
+
+@inproceedings{popov_collision_2017-2,
+	location = {Lisbon},
+	title = {Collision detection, localization \& classification for industrial robots with joint torque sensors},
+	isbn = {978-1-5386-3518-6},
+	url = {http://ieeexplore.ieee.org/document/8172400/},
+	doi = {10.1109/ROMAN.2017.8172400},
+	abstract = {High dynamic capabilities of industrial robots make them dangerous for humans and environment. To reduce this factor and advance collaboration between human and manipulator fast and reliable collision detection algorithm is required. To overcome this problem, we present an approach allowing to detect collision, localize action point and classify collision nature. Internal joint torque and encoder measurements were used to determine potential collisions with the robot links. This work proposes two ways of solving this problem: using classical analytical approach and learning approach implemented with neural network. The suggested algorithms were examined on the industrial robotic arm Kuka iiwa {LBR} 14 R820, ground truth information on the contact nature and its location were obtained with 3D {LIDAR} and camera.},
+	booktitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})},
+	publisher = {{IEEE}},
+	author = {Popov, Dmitry and Klimchik, Alexandr and Mavridis, Nikolaos},
+	urldate = {2025-02-19},
+	date = {2017-08},
+	langid = {english},
+}
+
+@misc{DIN-8373,
+	title = {Robotics - Vocabulary},
+	url = {https://www.dinmedia.de/de/norm/iso-8373/348036781},
+	shorttitle = {{ISO} 8373:2021-11},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	date = {2021},
+}
+
+@incollection{HAD-16,
+	location = {Cham},
+	title = {Physical {HumanRobot} Interaction},
+	isbn = {978-3-319-32552-1},
+	url = {https://doi.org/10.1007/978-3-319-32552-1_69},
+	abstract = {Over the last two decades, the foundations for physical human–robot interaction ({pHRI}) have evolved from successful developments in mechatronics, control, and planning, leading toward safer lightweight robot designs and interaction control schemes that advance beyond the current capacities of existing high-payload and high-precision position-controlled industrial robots. Based on their ability to sense physical interaction, render compliant behavior along the robot structure, plan motions that respect human preferences, and generate interaction plans for collaboration and coaction with humans, these novel robots have opened up novel and unforeseen application domains, and have advanced the field of human safety in robotics.},
+	booktitle = {Springer Handbook of Robotics},
+	publisher = {Springer International Publishing},
+	author = {Haddadin, Sami and Croft, Elizabeth},
+	editor = {Siciliano, Bruno and Khatib, Oussama},
+	urldate = {2025-04-17},
+	date = {2016},
+	langid = {english},
+	doi = {10.1007/978-3-319-32552-1_69},
+	keywords = {Collision Detection, Contact Force, Impedance Control, Industrial Robot, Joint Torque},
+}
diff --git a/Bachelorarbeit/V2/BA_old.bib b/Bachelorarbeit/V2/BA_old.bib
new file mode 100644
index 0000000000000000000000000000000000000000..7fedd0a440cba71db0344612639c7ce9b710dde6
--- /dev/null
+++ b/Bachelorarbeit/V2/BA_old.bib
@@ -0,0 +1,1852 @@
+
+@software{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025,
+	title = {{UniversalRobots}/Universal\_Robots\_ROS2\_Gazebo\_Simulation},
+	rights = {{BSD}-3-Clause},
+	url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation},
+	publisher = {Universal Robots A/S},
+	urldate = {2025-02-17},
+	date = {2025-02-13},
+	note = {original-date: 2021-12-15T12:18:45Z},
+}
+
+@article{haddadin_robot_2017,
+	title = {Robot Collisions: A Survey on Detection, Isolation, and Identification},
+	volume = {33},
+	issn = {1941-0468},
+	url = {https://ieeexplore.ieee.org/abstract/document/8059840},
+	doi = {10.1109/TRO.2017.2723903},
+	shorttitle = {Robot Collisions},
+	abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.},
+	pages = {1292--1312},
+	number = {6},
+	journaltitle = {{IEEE} Transactions on Robotics},
+	author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin},
+	urldate = {2025-02-12},
+	date = {2017-12},
+	note = {Conference Name: {IEEE} Transactions on Robotics},
+	keywords = {Algorithm design and analysis, Collision avoidance, Collision detection, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction ({pHRI}), Real-time systems, Robot sensing systems, safe robotics, Service robots},
+	file = {Accepted Version:C\:\\Users\\Rene\\Zotero\\storage\\IEXJFAMF\\Haddadin et al. - 2017 - Robot Collisions A Survey on Detection, Isolation, and Identification.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\LDB3Q92K\\8059840.html:text/html},
+}
+
+@book{hertzberg_mobile_2012,
+	location = {Berlin, Heidelberg},
+	title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik},
+	rights = {https://www.springernature.com/gp/researchers/text-and-data-mining},
+	isbn = {978-3-642-01725-4 978-3-642-01726-1},
+	url = {https://link.springer.com/10.1007/978-3-642-01726-1},
+	series = {{eXamen}.press},
+	shorttitle = {Mobile Roboter},
+	publisher = {Springer Berlin Heidelberg},
+	author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas},
+	urldate = {2025-02-12},
+	date = {2012},
+	langid = {german},
+	doi = {10.1007/978-3-642-01726-1},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\RLTU9P46\\Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf},
+}
+
+@collection{hering_sensoren_2018,
+	location = {Wiesbaden},
+	title = {Sensoren in Wissenschaft und Technik},
+	rights = {http://www.springer.com/tdm},
+	isbn = {978-3-658-12561-5 978-3-658-12562-2},
+	url = {http://link.springer.com/10.1007/978-3-658-12562-2},
+	publisher = {Springer Fachmedien Wiesbaden},
+	editor = {Hering, Ekbert and Schönfelder, Gert},
+	urldate = {2025-02-12},
+	date = {2018},
+	langid = {german},
+	doi = {10.1007/978-3-658-12562-2},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\9TI57WXD\\Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf},
+}
+
+@article{saudabayev_sensors_2015,
+	title = {Sensors for Robotic Hands: A Survey of State of the Art},
+	volume = {3},
+	issn = {2169-3536},
+	url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549},
+	doi = {10.1109/ACCESS.2015.2482543},
+	shorttitle = {Sensors for Robotic Hands},
+	abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.},
+	pages = {1765--1782},
+	journaltitle = {{IEEE} Access},
+	author = {Saudabayev, Artur and Varol, Huseyin Atakan},
+	urldate = {2025-02-12},
+	date = {2015},
+	note = {Conference Name: {IEEE} Access},
+	keywords = {Robot sensing systems, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors, Sensors},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\HR7ZUF8W\\Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\484D4R7H\\7283549.html:text/html},
+}
+
+@article{paya_state---art_2017,
+	title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors},
+	volume = {2017},
+	rights = {Copyright © 2017 L. Payá et al.},
+	issn = {1687-7268},
+	url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650},
+	doi = {10.1155/2017/3497650},
+	abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.},
+	pages = {3497650},
+	number = {1},
+	journaltitle = {Journal of Sensors},
+	author = {Payá, L. and Gil, A. and Reinoso, O.},
+	urldate = {2025-02-12},
+	date = {2017},
+	langid = {english},
+	note = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1155/2017/3497650},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\EZ473NGD\\Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\86LDAQ62\\3497650.html:text/html},
+}
+
+@online{noauthor_vl53l7cx_nodate,
+	title = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}},
+	url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html},
+	abstract = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV}, {VL}53L7CXV0GC/1, {STMicroelectronics}},
+	urldate = {2025-02-12},
+	langid = {english},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\VEYLCCLA\\vl53l7cx.html:text/html},
+}
+
+@online{noauthor_pico-series_nodate,
+	title = {Pico-series Microcontrollers - Raspberry Pi Documentation},
+	url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html},
+	abstract = {The official documentation for Raspberry Pi computers and microcontrollers},
+	urldate = {2025-02-12},
+	langid = {english},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\KUCB8PVI\\pico-series.html:text/html},
+}
+
+@online{noauthor_chatgpt_nodate,
+	title = {{ChatGPT}},
+	url = {https://chatgpt.com},
+	abstract = {A conversational {AI} system that listens, learns, and challenges},
+	urldate = {2025-02-12},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\ZT8MG8Y4\\678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html},
+}
+
+@software{iii_earlephilhowerarduino-pico_2025,
+	title = {earlephilhower/arduino-pico},
+	rights = {{LGPL}-2.1},
+	url = {https://github.com/earlephilhower/arduino-pico},
+	abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards},
+	author = {{III}, Earle F. Philhower},
+	urldate = {2025-02-12},
+	date = {2025-02-11},
+	note = {original-date: 2021-02-25T04:20:27Z},
+	keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi},
+}
+
+@online{noauthor_tutorials_nodate,
+	title = {Tutorials — {ROS} 2 Documentation: Humble documentation},
+	url = {https://docs.ros.org/en/humble/Tutorials.html},
+	urldate = {2025-02-12},
+	file = {Tutorials — ROS 2 Documentation\: Humble documentation:C\:\\Users\\Rene\\Zotero\\storage\\28S5GUZ5\\Tutorials.html:text/html},
+}
+
+@online{noauthor_examples_nodate,
+	title = {Examples - trimesh 4.6.2 documentation},
+	url = {https://trimesh.org/examples.html},
+	urldate = {2025-02-12},
+	file = {Examples - trimesh 4.6.2 documentation:C\:\\Users\\Rene\\Zotero\\storage\\82WA6KM7\\examples.html:text/html},
+}
+
+@software{grans_sebastiangransros2-point-cloud-demo_2024,
+	title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo},
+	rights = {{MIT}},
+	url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo},
+	abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2},
+	author = {Grans, Sebastian},
+	urldate = {2025-02-12},
+	date = {2024-12-08},
+	note = {original-date: 2020-06-30T16:55:21Z},
+}
+
+@article{wunderlich_rasante_2013,
+	title = {Rasante Entwicklung in der 3D‐Bildgebung: Weiterentwickelte Time‐of‐Flight‐Technologie verbessert miniaturisierte 3D‐Kameras und Sensoren},
+	volume = {8},
+	rights = {http://onlinelibrary.wiley.com/{termsAndConditions}\#vor},
+	issn = {1863-1460, 2191-1975},
+	url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018},
+	doi = {10.1002/opph.201300018},
+	shorttitle = {Rasante Entwicklung in der 3D‐Bildgebung},
+	abstract = {Abstract
+            Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.},
+	pages = {38--40},
+	number = {3},
+	journaltitle = {Optik \& Photonik},
+	shortjournal = {Optik \& Photonik},
+	author = {Wunderlich, Max},
+	urldate = {2025-02-18},
+	date = {2013-09},
+	langid = {german},
+	file = {Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung:C\:\\Users\\Rene\\Zotero\\storage\\H7CSUHLW\\Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung.pdf:application/pdf},
+}
+
+@article{li_common_2019,
+	title = {Common Sensors in Industrial Robots: A Review},
+	volume = {1267},
+	issn = {1742-6588, 1742-6596},
+	url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036},
+	doi = {10.1088/1742-6596/1267/1/012036},
+	shorttitle = {Common Sensors in Industrial Robots},
+	abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.},
+	pages = {012036},
+	number = {1},
+	journaltitle = {Journal of Physics: Conference Series},
+	shortjournal = {J. Phys.: Conf. Ser.},
+	author = {Li, Peng and Liu, Xiangpeng},
+	urldate = {2025-02-18},
+	date = {2019-07-01},
+	langid = {english},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\UVXS2R7J\\Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf},
+}
+
+@misc{noauthor_din_nodate,
+	title = {{DIN} {EN} {ISO} 10218-1:2021-09, Robotik\_- Sicherheitsanforderungen\_- Teil\_1: Industrieroboter ({ISO}/{DIS}\_10218-1.2:2021); Deutsche und Englische Fassung {prEN}\_ISO\_10218-1:2021},
+	url = {https://www.dinmedia.de/de/-/-/341406648},
+	doi = {10.31030/3272912},
+	shorttitle = {{DIN} {EN} {ISO} 10218-1},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	langid = {german},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\XCP5RDRY\\DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf},
+}
+
+@misc{noauthor_din_nodate-1,
+	title = {{DIN} {EN} {ISO} 10218-2:2021-03, Robotik\_- Sicherheitsanforderungen für Robotersysteme in industrieller Umgebung\_- Teil\_2: Robotersysteme, Roboteranwendungen und Integration von Roboterzellen ({ISO}/{DIS}\_10218-2:2020); Deutsche und Englische Fassung {prEN}\_ISO\_10218-2:2020},
+	url = {https://www.dinmedia.de/de/-/-/331246964},
+	doi = {10.31030/3215258},
+	shorttitle = {{DIN} {EN} {ISO} 10218-2},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	langid = {german},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\M7E9L4CP\\DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf},
+}
+
+@software{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025-1,
+	title = {{UniversalRobots}/Universal\_Robots\_ROS2\_Gazebo\_Simulation},
+	url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation},
+	publisher = {Universal Robots A/S},
+	urldate = {2025-02-17},
+	date = {2025-02-13},
+}
+
+@article{haddadin_robot_2017-1,
+	title = {Robot Collisions: A Survey on Detection, Isolation, and Identification},
+	volume = {33},
+	issn = {1941-0468},
+	url = {https://ieeexplore.ieee.org/abstract/document/8059840},
+	doi = {10.1109/TRO.2017.2723903},
+	shorttitle = {Robot Collisions},
+	abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.},
+	pages = {1292--1312},
+	number = {6},
+	journaltitle = {{IEEE} Transactions on Robotics},
+	author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin},
+	urldate = {2025-02-12},
+	date = {2017-12},
+	keywords = {Algorithm design and analysis, Collision avoidance, Collision detection, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction ({pHRI}), Real-time systems, Robot sensing systems, safe robotics, Service robots},
+	file = {Accepted Version:C\:\\Users\\Rene\\Zotero\\storage\\BGZ6TUWR\\Haddadin et al. - 2017 - Robot Collisions A Survey on Detection, Isolation, and Identification.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\UJAHJUBE\\8059840.html:text/html},
+}
+
+@book{hertzberg_mobile_2012-1,
+	location = {Berlin, Heidelberg},
+	title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik},
+	isbn = {978-3-642-01725-4 978-3-642-01726-1},
+	url = {https://link.springer.com/10.1007/978-3-642-01726-1},
+	series = {{eXamen}.press},
+	shorttitle = {Mobile Roboter},
+	publisher = {Springer Berlin Heidelberg},
+	author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas},
+	urldate = {2025-02-12},
+	date = {2012},
+	doi = {10.1007/978-3-642-01726-1},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\4LFEHVEK\\Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf},
+}
+
+@collection{hering_sensoren_2018-1,
+	location = {Wiesbaden},
+	title = {Sensoren in Wissenschaft und Technik},
+	isbn = {978-3-658-12561-5 978-3-658-12562-2},
+	url = {http://link.springer.com/10.1007/978-3-658-12562-2},
+	publisher = {Springer Fachmedien Wiesbaden},
+	editor = {Hering, Ekbert and Schönfelder, Gert},
+	urldate = {2025-02-12},
+	date = {2018},
+	doi = {10.1007/978-3-658-12562-2},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\BG7FCKRW\\Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf},
+}
+
+@article{saudabayev_sensors_2015-1,
+	title = {Sensors for Robotic Hands: A Survey of State of the Art},
+	volume = {3},
+	issn = {2169-3536},
+	url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549},
+	doi = {10.1109/ACCESS.2015.2482543},
+	shorttitle = {Sensors for Robotic Hands},
+	abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.},
+	pages = {1765--1782},
+	journaltitle = {{IEEE} Access},
+	author = {Saudabayev, Artur and Varol, Huseyin Atakan},
+	urldate = {2025-02-12},
+	date = {2015},
+	keywords = {Robot sensing systems, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors, Sensors},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\CDE3NZ3S\\Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\BEAGLR7C\\7283549.html:text/html},
+}
+
+@article{paya_state---art_2017-1,
+	title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors},
+	volume = {2017},
+	issn = {1687-7268},
+	url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650},
+	doi = {10.1155/2017/3497650},
+	abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.},
+	pages = {3497650},
+	number = {1},
+	journaltitle = {Journal of Sensors},
+	author = {Payá, L. and Gil, A. and Reinoso, O.},
+	urldate = {2025-02-12},
+	date = {2017},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\G2QJUK53\\Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\PSAFIKFD\\3497650.html:text/html},
+}
+
+@online{noauthor_vl53l7cx_nodate-1,
+	title = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}},
+	url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html},
+	abstract = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV}, {VL}53L7CXV0GC/1, {STMicroelectronics}},
+	urldate = {2025-02-12},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\BKZM82KQ\\vl53l7cx.html:text/html},
+}
+
+@online{noauthor_pico-series_nodate-1,
+	title = {Pico-series Microcontrollers - Raspberry Pi Documentation},
+	url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html},
+	abstract = {The official documentation for Raspberry Pi computers and microcontrollers},
+	urldate = {2025-02-12},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\VKDULUUX\\pico-series.html:text/html},
+}
+
+@online{noauthor_chatgpt_nodate-1,
+	title = {{ChatGPT}},
+	url = {https://chatgpt.com},
+	abstract = {A conversational {AI} system that listens, learns, and challenges},
+	urldate = {2025-02-12},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\R25VDLY2\\678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html},
+}
+
+@software{iii_earlephilhowerarduino-pico_2025-1,
+	title = {earlephilhower/arduino-pico},
+	url = {https://github.com/earlephilhower/arduino-pico},
+	abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards},
+	author = {{III}, Earle F. Philhower},
+	urldate = {2025-02-12},
+	date = {2025-02-11},
+	keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi},
+}
+
+@online{noauthor_tutorials_nodate-1,
+	title = {Tutorials — {ROS} 2 Documentation: Humble documentation},
+	url = {https://docs.ros.org/en/humble/Tutorials.html},
+	urldate = {2025-02-12},
+	file = {Tutorials — ROS 2 Documentation\: Humble documentation:C\:\\Users\\Rene\\Zotero\\storage\\HQ4G28QE\\Tutorials.html:text/html},
+}
+
+@online{noauthor_examples_nodate-1,
+	title = {Examples - trimesh 4.6.2 documentation},
+	url = {https://trimesh.org/examples.html},
+	urldate = {2025-02-12},
+	file = {Examples - trimesh 4.6.2 documentation:C\:\\Users\\Rene\\Zotero\\storage\\SURMD6VT\\examples.html:text/html},
+}
+
+@software{grans_sebastiangransros2-point-cloud-demo_2024-1,
+	title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo},
+	url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo},
+	abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2},
+	author = {Grans, Sebastian},
+	urldate = {2025-02-12},
+	date = {2024-12-08},
+}
+
+@article{wunderlich_rasante_2013-1,
+	title = {Rasante Entwicklung in der 3D‐Bildgebung: Weiterentwickelte Time‐of‐Flight‐Technologie verbessert miniaturisierte 3D‐Kameras und Sensoren},
+	volume = {8},
+	issn = {1863-1460, 2191-1975},
+	url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018},
+	doi = {10.1002/opph.201300018},
+	shorttitle = {Rasante Entwicklung in der 3D‐Bildgebung},
+	abstract = {Abstract Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.},
+	pages = {38--40},
+	number = {3},
+	journaltitle = {Optik \& Photonik},
+	shortjournal = {Optik \& Photonik},
+	author = {Wunderlich, Max},
+	urldate = {2025-02-18},
+	date = {2013-09},
+	file = {Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung:C\:\\Users\\Rene\\Zotero\\storage\\JZJDENIL\\Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung.pdf:application/pdf},
+}
+
+@article{li_common_2019-1,
+	title = {Common Sensors in Industrial Robots: A Review},
+	volume = {1267},
+	issn = {1742-6588, 1742-6596},
+	url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036},
+	doi = {10.1088/1742-6596/1267/1/012036},
+	shorttitle = {Common Sensors in Industrial Robots},
+	abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.},
+	pages = {012036},
+	number = {1},
+	journaltitle = {Journal of Physics: Conference Series},
+	shortjournal = {J. Phys.: Conf. Ser.},
+	author = {Li, Peng and Liu, Xiangpeng},
+	urldate = {2025-02-18},
+	date = {2019-07-01},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\WQ5C229K\\Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf},
+}
+
+@misc{noauthor_din_nodate-2,
+	title = {{DIN} {EN} {ISO} 10218-2:2021-03, Robotik\_- Sicherheitsanforderungen für Robotersysteme in industrieller Umgebung\_- Teil\_2: Robotersysteme, Roboteranwendungen und Integration von Roboterzellen ({ISO}/{DIS}\_10218-2:2020); Deutsche und Englische Fassung {prEN}\_ISO\_10218-2:2020},
+	url = {https://www.dinmedia.de/de/-/-/331246964},
+	shorttitle = {{DIN} {EN} {ISO} 10218-2},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	doi = {10.31030/3215258},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\HB28M28Z\\DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf},
+}
+
+@misc{noauthor_din_nodate-3,
+	title = {{DIN} {EN} {ISO} 10218-1:2021-09, Robotik\_- Sicherheitsanforderungen\_- Teil\_1: Industrieroboter ({ISO}/{DIS}\_10218-1.2:2021); Deutsche und Englische Fassung {prEN}\_ISO\_10218-1:2021},
+	url = {https://www.dinmedia.de/de/-/-/341406648},
+	doi = {10.31030/3272912},
+	shorttitle = {{DIN} {EN} {ISO} 10218-1},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	langid = {german},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\FFMUVR22\\DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf},
+}
+
+@misc{noauthor_robotics_2021,
+	title = {Robotics - Vocabulary},
+	url = {https://www.dinmedia.de/de/norm/iso-8373/348036781},
+	shorttitle = {{ISO} 8373:2021-11},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	date = {2021-11},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\6SUCZU6R\\DIN_EN_ISO_8373.pdf:application/pdf},
+}
+
+@inproceedings{popov_collision_2017,
+	location = {Lisbon},
+	title = {Collision detection, localization \& classification for industrial robots with joint torque sensors},
+	isbn = {978-1-5386-3518-6},
+	url = {http://ieeexplore.ieee.org/document/8172400/},
+	doi = {10.1109/ROMAN.2017.8172400},
+	abstract = {High dynamic capabilities of industrial robots make them dangerous for humans and environment. To reduce this factor and advance collaboration between human and manipulator fast and reliable collision detection algorithm is required. To overcome this problem, we present an approach allowing to detect collision, localize action point and classify collision nature. Internal joint torque and encoder measurements were used to determine potential collisions with the robot links. This work proposes two ways of solving this problem: using classical analytical approach and learning approach implemented with neural network. The suggested algorithms were examined on the industrial robotic arm Kuka iiwa {LBR} 14 R820, ground truth information on the contact nature and its location were obtained with 3D {LIDAR} and camera.},
+	eventtitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})},
+	pages = {838--843},
+	booktitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})},
+	publisher = {{IEEE}},
+	author = {Popov, Dmitry and Klimchik, Alexandr and Mavridis, Nikolaos},
+	urldate = {2025-02-19},
+	date = {2017-08},
+	langid = {english},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\LVC2B7U6\\Popov et al. - 2017 - Collision detection, localization & classification for industrial robots with joint torque sensors.pdf:application/pdf},
+}
+
+@article{liu_application_2024,
+	title = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing: A Literature Review},
+	volume = {40},
+	issn = {1044-7318},
+	url = {https://doi.org/10.1080/10447318.2022.2041907},
+	doi = {10.1080/10447318.2022.2041907},
+	shorttitle = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing},
+	abstract = {The rapid development of robot technology has introduced a substantial impact on manufacturing. Numerous studies have been carried out to apply collaborative robots (cobots) to address manufacturing productivity and ergonomics issues, which has brought extensive opportunities. In this context, a systematic literature search in the Web of Science, Scopus, and Google Scholar databases was carried out by electronic and manual search. Thus, 59 relevant contributions out of 4488 studies were analyzed by using preferred reporting items for systematic reviews and meta-analysis ({PRISMA}). To provide an overview of the different results, studies are summarized according to the following criteria: country, author, year, study design, robot category, results, and future opportunities. The effects of cobots on safety, system design, workplace design, task scheduling, productivity, and ergonomics are discussed to provide a better understanding of the application of cobots in manufacturing. To incentive future research, this paper reviews the development of cobots in manufacturing and discusses future opportunities and directions from cobots and manufacturing system perspectives. This paper provides novel and valuable insights into cobots application and illustrates potential developments of future human-cobot interaction.},
+	pages = {915--932},
+	number = {4},
+	journaltitle = {International Journal of Human–Computer Interaction},
+	author = {Liu, Li and Guo, Fu and Zou, Zishuai and Duffy, Vincent G.},
+	urldate = {2025-02-19},
+	date = {2024-02-16},
+	note = {Publisher: Taylor \& Francis
+\_eprint: https://doi.org/10.1080/10447318.2022.2041907},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\G9ECNMWG\\Liu et al. - 2024 - Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing.pdf:application/pdf},
+}
+
+@article{maheepala_low_2021,
+	title = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices: A Review},
+	volume = {21},
+	issn = {1558-1748},
+	url = {https://ieeexplore.ieee.org/abstract/document/9165781},
+	doi = {10.1109/JSEN.2020.3015932},
+	shorttitle = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices},
+	abstract = {With the advancements of the Internet of Things ({IoT}) technology, applications of battery powered machine vision based {IoT} devices is rapidly growing. While numerous research works are being conducted to develop low power hardware solutions for {IoT} devices, image capture and image processing remain high power demanding processes leading to a short battery life. However, the power consumption of the machine vision based {IoT} devices can be minimized by the careful optimization of the hardware components that are used is these devices. In this article, we present a review of low power machine vision hardware components for the {IoT} applications. A guide to selecting the optimum processors and image sensors for a given battery powered machine vision based {IoT} device is presented. Next, the factors that must be considered when selecting processors and image sensors for a given {IoT} application are discussed, and selection criteria for the processors and image sensors are established. Then, the current commercially available hardware components are reviewed in accordance with the established selection criteria. Finally, the research trends in the field of battery powered machine vision based {IoT} devices are discussed, and the potential future research directions in the field are presented.},
+	pages = {1172--1186},
+	number = {2},
+	journaltitle = {{IEEE} Sensors Journal},
+	author = {Maheepala, Malith and Joordens, Matthew A. and Kouzani, Abbas Z.},
+	urldate = {2025-02-19},
+	date = {2021-01},
+	note = {Conference Name: {IEEE} Sensors Journal},
+	keywords = {Batteries, Cloud computing, image sensor, Image sensors, Internet of Things, low power, machine vision, Machine vision, processor, Program processors, Transceivers},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\XSY3V6PK\\Maheepala et al. - 2021 - Low Power Processors and Image Sensors for Vision-Based IoT Devices A Review.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\Y7EV2L8T\\9165781.html:text/html},
+}
+
+@article{nath_review_2022,
+	title = {A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector},
+	volume = {7},
+	rights = {Copyright (c) 2022 Aditya S.  Nath},
+	issn = {2736-576X},
+	url = {https://www.ej-eng.org/index.php/ejeng/article/view/2624},
+	doi = {10.24018/ejeng.2022.7.1.2624},
+	abstract = {The study explores recent innovations in robotic and sensor-based technologies that are spearheading advancements in the construction sector to achieve improvements in construction quality, efficiency, and safety. Automation in construction, although coined as a concept in 1980s, has witnessed minimal progress in the level of application. The study attempts to identify issues constraining adoption of automation in the sector, the recent developments in technologies and their scope in construction, their applications and impacts, and way forward. The role of robotics in various stages of construction and its impact on a wider scale has been identified and discussed. The evolution of Building Information Modeling ({BIM}) has transitioned it into being an efficient mediator in the construction process with novel concepts such as 4D and 5D {BIM} and Building Knowledge Management. Various sensor technologies, functioning at diverse scales, have found wide-ranging applications on construction sites ranging from high-accuracy positioning to slow-tracking of personnel and materials, as well as, in progress monitoring and quality control. Information Technology has a major role in binding the sensor technology with on-site requirements to create positive results. A study was done to identify such technological interventions and various software utilities which integrate {BIM} and sensor technology with tools such as {GIS}. The factors which restrained developments in automation in construction sector were identified in the course of study. Various global examples of advanced automated construction technologies with applications in various stages of construction were discussed. The review successfully identifies the nascent technological innovations and their productive usage in relevant areas of construction sector.},
+	pages = {85--89},
+	number = {1},
+	journaltitle = {European Journal of Engineering and Technology Research},
+	author = {Nath, Aditya S.},
+	urldate = {2025-02-19},
+	date = {2022-02-28},
+	langid = {english},
+	note = {Number: 1},
+	keywords = {Sensors},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\AA6ZJJBN\\Nath - 2022 - A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector.pdf:application/pdf},
+}
+
+@online{noauthor_can_nodate,
+	title = {Can the collaborative robot market experience a second growth surge in the post-pandemic era?},
+	url = {https://interactanalysis.com/insight/can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era/},
+	abstract = {The market for collaborative robots is forecast to show strong growth over the coming years, with rising sales in industrial and non-industrial sectors.},
+	titleaddon = {Interact Analysis},
+	urldate = {2025-02-19},
+	langid = {british},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\25UG57J5\\can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era.html:text/html},
+}
+
+@article{li_safe_2024,
+	title = {Safe human–robot collaboration for industrial settings: a survey},
+	volume = {35},
+	issn = {1572-8145},
+	url = {https://doi.org/10.1007/s10845-023-02159-4},
+	doi = {10.1007/s10845-023-02159-4},
+	shorttitle = {Safe human–robot collaboration for industrial settings},
+	abstract = {Human–robot collaboration ({HRC}) plays a pivotal role in today’s industry by supporting increasingly customised product development. Via {HRC}, the strengths of humans and robots can be combined to facilitate collaborative jobs within common workplaces to achieve specific industrial goals. Given the significance of safety assurance in {HRC}, in this survey paper, an update on standards and implementation approaches presented in the latest literature is given to reflect the state-of-the-art of this prominent research topic. First, an overview of safety standards for industrial robots, collaborative robots, and {HRC} is provided. Then, a survey of various approaches to {HRC} safety is conducted from two main perspectives, i.e., pre-collision and post-collision, which are further detailed in the aspects of sensing, prediction, learning, planning/replanning, and compliance control. Major characteristics, pros, cons, and applicability of the approaches are analysed. Finally, challenging issues and prospects for the future development of {HRC} safety are highlighted to provide recommendations for relevant stakeholders to consider when designing {HRC}-enabled industrial systems.},
+	pages = {2235--2261},
+	number = {5},
+	journaltitle = {Journal of Intelligent Manufacturing},
+	shortjournal = {J Intell Manuf},
+	author = {Li, Weidong and Hu, Yudie and Zhou, Yong and Pham, Duc Truong},
+	urldate = {2025-02-19},
+	date = {2024-06-01},
+	langid = {english},
+	keywords = {Collision detection, Collaborative robots, Human–robot collaboration ({HRC}), Obstacle avoidance, Safety},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\4JS4CSVA\\Li et al. - 2024 - Safe human–robot collaboration for industrial settings a survey.pdf:application/pdf},
+}
+
+@inproceedings{amaya-mejia_vision-based_2022,
+	title = {Vision-Based Safety System for Barrierless Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9981689},
+	doi = {10.1109/IROS47612.2022.9981689},
+	abstract = {Human safety has always been the main priority when working near an industrial robot. With the rise of Human-Robot Collaborative environments, physical barriers to avoiding collisions have been disappearing, increasing the risk of accidents and the need for solutions that ensure a safe Human-Robot Collaboration. This paper proposes a safety system that implements Speed and Separation Monitoring ({SSM}) type of operation. For this, safety zones are defined in the robot's workspace following current standards for industrial collaborative robots. A deep learning-based computer vision system detects, tracks, and estimates the 3D position of operators close to the robot. The robot control system receives the operator's 3D position and generates 3D representations of them in a simulation environment. Depending on the zone where the closest operator was detected, the robot stops or changes its operating speed. Three different operation modes in which the human and robot interact are presented. Results show that the vision-based system can correctly detect and classify in which safety zone an operator is located and that the different proposed operation modes ensure that the robot's reaction and stop time are within the required time limits to guarantee safety.},
+	eventtitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})},
+	pages = {7331--7336},
+	booktitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})},
+	author = {Amaya-Mejía, Lina María and Duque-Suárez, Nicolás and Jaramillo-Ramírez, Daniel and Martinez, Carol},
+	urldate = {2025-02-19},
+	date = {2022-10},
+	note = {{ISSN}: 2153-0866},
+	keywords = {Collision avoidance, Service robots, Safety, Collaboration, Robot control, Solid modeling, Three-dimensional displays},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\XX9FL2U5\\Amaya-Mejía et al. - 2022 - Vision-Based Safety System for Barrierless Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\29LFGV4B\\9981689.html:text/html},
+}
+
+@inproceedings{choi_xr-based_2022,
+	title = {An {XR}-based Approach to Safe Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9757621},
+	doi = {10.1109/VRW55335.2022.00106},
+	abstract = {It is crucial to prevent safety accidents for human-robot collaboration. This study proposes an extended reality ({XR}) approach to safe {HRC} by calculating the minimum distance between the human operator and robot in real time. The proposed approach scans the real environment with multiple sensors and constructs its virtual space in {XR}. The virtual robot is synchronized with the real robot by matching the point cloud of real environment with that of the virtual robot. This system can effectively provide task assistance and safety information to the user wearing an {XR} device.},
+	eventtitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})},
+	pages = {481--482},
+	booktitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})},
+	author = {Choi, Sung Ho and Park, Kyeong-Beom and Roh, Dong Hyeon and Lee, Jae Yeol and Ghasemi, Yalda and Jeong, Heejin},
+	urldate = {2025-02-19},
+	date = {2022-03},
+	keywords = {Real-time systems, Robot sensing systems, Service robots, Safety, Collaboration, Three-dimensional displays, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—{XR}-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality ({MR}/{XR}), safety distance},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\VYUDN5LQ\\Choi et al. - 2022 - An XR-based Approach to Safe Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\GDI6TZQ2\\9757621.html:text/html},
+}
+
+@inproceedings{al_naser_fusion_2022,
+	title = {Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a robot in an industrial environment},
+	url = {https://ieeexplore.ieee.org/document/9900548},
+	doi = {10.1109/RO-MAN53752.2022.9900548},
+	abstract = {Accurate detection of the human body and its limbs in real-time is one of the challenges toward human-robot collaboration ({HRC}) technology in the factory of the future. In this work, a new algorithm has been developed to fuse thermal, depth, and color information. Starting with the calibration of the sensors to each other, proceeding with data fusion and detection of human body parts, and ending with pose estimation. The proposed approach has been tested in various {HRC} scenarios. It has shown a significant decrease in errors and noise in industrial environments. Furthermore, it produces not only a higher positioning accuracy of the head and hands compared to the state of art algorithms (e.g., {OpenPose}), but it is also faster. Hence, such an algorithm could be joined with digital twins for safe and efficient human-robot collaboration.},
+	eventtitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})},
+	pages = {532--537},
+	booktitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})},
+	author = {Al Naser, Ibrahim and Dahmen, Johannes and Bdiwi, Mohamad and Ihlenfeldt, Steffen},
+	urldate = {2025-02-19},
+	date = {2022-08},
+	note = {{ISSN}: 1944-9437},
+	keywords = {Service robots, Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Sensor fusion, Stability criteria, Thermal sensors},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\Q933FYY2\\Al Naser et al. - 2022 - Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a r.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\942BAXF5\\9900548.html:text/html},
+}
+
+@inproceedings{rashid_local_2020,
+	title = {Local and Global Sensors for Collision Avoidance},
+	url = {https://ieeexplore.ieee.org/document/9235223},
+	doi = {10.1109/MFI49285.2020.9235223},
+	abstract = {Implementation of safe and efficient human robot collaboration for agile production cells with heavy-duty industrial robots, having large stopping distances and large self-occlusion areas, is a challenging task. Collision avoidance is the main functionality required to realize this task. In fact, it requires accurate estimation of shortest distance between known (robot) and unknown (human or anything else) objects in a large area. This work proposes a selective fusion of global and local sensors, representing a large range 360° {LiDAR} and a small range {RGB} camera respectively, in the context of dynamic speed and separation monitoring. Safety functionality has been evaluated for collision detection between unknown dynamic object to manipulator joints. The system yields 29-40\% efficiency compared to fenced system. Heavy-duty industrial robot and a controlled linear axis dummy is used for evaluating different robot and scenario configurations. Results suggest higher efficiency and safety when using local and global setup.},
+	eventtitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})},
+	pages = {354--359},
+	booktitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})},
+	author = {Rashid, Aquib and Peesapati, Kannan and Bdiwi, Mohamad and Krusche, Sebastian and Hardt, Wolfram and Putz, Matthias},
+	urldate = {2025-02-19},
+	date = {2020-09},
+	keywords = {Service robots, Safety, Sensor fusion, Cameras, Laser radar, Production, Robot vision systems},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\HAXPN6EL\\Rashid et al. - 2020 - Local and Global Sensors for Collision Avoidance.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\4X42Y6TK\\9235223.html:text/html},
+}
+
+@article{jain_survey_nodate,
+	title = {A survey of Laser Range Finding},
+	url = {http://www.siddjain.com/ee236a.pdf},
+	abstract = {This report provides a informal survey of laser distance measurement. Applications of laser range finding are briefly discussed and various techniques for laser ranging such as active laser triangulation, pulsed time-of-flight ({TOF}), phase shift, {FMCW}, and correlation are described.},
+	author = {Jain, Siddharth},
+	urldate = {2025-02-19},
+	langid = {english},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\X2WNAHZB\\Jain - A survey of Laser Range Finding.pdf:application/pdf},
+}
+
+@online{noauthor_file20200501_2020,
+	title = {File:20200501 Time of flight.svg - Wikipedia},
+	url = {https://commons.wikimedia.org/wiki/File:20200501_Time_of_flight.svg},
+	shorttitle = {File},
+	urldate = {2025-02-20},
+	date = {2020-05-01},
+	langid = {english},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\H7EUEBHT\\File20200501_Time_of_flight.html:text/html},
+}
+
+@article{raj_survey_2020,
+	title = {A Survey on {LiDAR} Scanning Mechanisms},
+	volume = {9},
+	rights = {http://creativecommons.org/licenses/by/3.0/},
+	issn = {2079-9292},
+	url = {https://www.mdpi.com/2079-9292/9/5/741},
+	doi = {10.3390/electronics9050741},
+	abstract = {In recent years, light detection and ranging ({LiDAR}) technology has gained huge popularity in various applications such as navigation, robotics, remote sensing, and advanced driving assistance systems ({ADAS}). This popularity is mainly due to the improvements in {LiDAR} performance in terms of range detection, accuracy, power consumption, as well as physical features such as dimension and weight. Although a number of literatures on {LiDAR} technology have been published earlier, not many has been reported on the state-of-the-art {LiDAR} scanning mechanisms. The aim of this article is to review the scanning mechanisms employed in {LiDAR} technology from past research works to the current commercial products. The review highlights four commonly used mechanisms in {LiDAR} systems: Opto-mechanical, electromechanical, micro-electromechanical systems ({MEMS}), and solid-state scanning. The study reveals that electro-mechanical scanning is the most prominent technology in use today. The commercially available 1D time of flight ({TOF}) {LiDAR} instrument is currently the most attractive option for conversion from 1D to 3D {LiDAR} system, provided that low scanning rate is not an issue. As for applications with low size, weight, and power ({SWaP}) requirements, {MEMS} scanning is found to be the better alternative. {MEMS} scanning is by far the more matured technology compared to solid-state scanning and is currently given great emphasis to increase its robustness for fulfilling the requirements of {ADAS} applications. Finally, solid-state {LiDAR} systems are expected to fill in the gap in {ADAS} applications despite the low technology readiness in comparison to {MEMS} scanners. However, since solid-state scanning is believed to have superior robustness, field of view ({FOV}), and scanning rate potential, great efforts are given by both academics and industries to further develop this technology.},
+	pages = {741},
+	number = {5},
+	journaltitle = {Electronics},
+	author = {Raj, Thinal and Hashim, Fazida Hanim and Huddin, Aqilah Baseri and Ibrahim, Mohd Faisal and Hussain, Aini},
+	urldate = {2025-02-20},
+	date = {2020-05},
+	langid = {english},
+	note = {Number: 5
+Publisher: Multidisciplinary Digital Publishing Institute},
+	keywords = {electro-mechanical scanning, {LiDAR}, {MEMS} scanning, opto-mechanical scanning, solid-state {LiDAR}},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\2PBQYF7P\\Raj et al. - 2020 - A Survey on LiDAR Scanning Mechanisms.pdf:application/pdf},
+}
+
+@article{surmann_autonomous_2003,
+	title = {An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of indoor environments},
+	volume = {45},
+	issn = {0921-8890},
+	url = {https://www.sciencedirect.com/science/article/pii/S0921889003001556},
+	doi = {10.1016/j.robot.2003.09.004},
+	abstract = {Digital 3D models of the environment are needed in rescue and inspection robotics, facility managements and architecture. This paper presents an automatic system for gaging and digitalization of 3D indoor environments. It consists of an autonomous mobile robot, a reliable 3D laser range finder and three elaborated software modules. The first module, a fast variant of the Iterative Closest Points algorithm, registers the 3D scans in a common coordinate system and relocalizes the robot. The second module, a next best view planner, computes the next nominal pose based on the acquired 3D data while avoiding complicated obstacles. The third module, a closed-loop and globally stable motor controller, navigates the mobile robot to a nominal pose on the base of odometry and avoids collisions with dynamical obstacles. The 3D laser range finder acquires a 3D scan at this pose. The proposed method allows one to digitalize large indoor environments fast and reliably without any intervention and solves the {SLAM} problem. The results of two 3D digitalization experiments are presented using a fast octree-based visualization method.},
+	pages = {181--198},
+	number = {3},
+	journaltitle = {Robotics and Autonomous Systems},
+	shortjournal = {Robotics and Autonomous Systems},
+	author = {Surmann, Hartmut and Nüchter, Andreas and Hertzberg, Joachim},
+	urldate = {2025-02-20},
+	date = {2003-12-31},
+	keywords = {3D digitalization, 3D gaging, 3D laser range finder, Autonomous mobile robots, Next best view planning, Robot relocalization, Scan matching, {SLAM}},
+	file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\BKNJW2B7\\Surmann et al. - 2003 - An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of ind.pdf:application/pdf;ScienceDirect Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\H82LXSD3\\S0921889003001556.html:text/html},
+}
+
+@article{niclass_design_2012,
+	title = {Design and characterization of a 256x64-pixel single-photon imager in {CMOS} for a {MEMS}-based laser scanning time-of-flight sensor},
+	volume = {20},
+	rights = {© 2012 {OSA}},
+	issn = {1094-4087},
+	url = {https://opg.optica.org/oe/abstract.cfm?uri=oe-20-11-11863},
+	doi = {10.1364/OE.20.011863},
+	abstract = {We introduce an optical time-of-flight image sensor taking advantage of a {MEMS}-based laser scanning device. Unlike previous approaches, our concept benefits from the high timing resolution and the digital signal flexibility of single-photon pixels in {CMOS} to allow for a nearly ideal cooperation between the image sensor and the scanning device. This technique enables a high signal-to-background light ratio to be obtained, while simultaneously relaxing the constraint on size of the {MEMS} mirror. These conditions are critical for devising practical and low-cost depth sensors intended to operate in uncontrolled environments, such as outdoors. A proof-of-concept prototype capable of operating in real-time was implemented. This paper focuses on the design and characterization of a 256x64-pixel image sensor, which also comprises an event-driven readout circuit, an array of 64 row-level high-throughput time-to-digital converters, and a 16Gbit/s global readout circuit. Quantitative evaluation of the sensor under 2klux of background light revealed a repeatability error of 13.5cm throughout the distance range of 20 meters.},
+	pages = {11863--11881},
+	number = {11},
+	journaltitle = {Optics Express},
+	shortjournal = {Opt. Express, {OE}},
+	author = {Niclass, Cristiano and Ito, Kota and Soga, Mineki and Matsubara, Hiroyuki and Aoyagi, Isao and Kato, Satoru and Kagami, Manabu},
+	urldate = {2025-02-20},
+	date = {2012-05-21},
+	note = {Publisher: Optica Publishing Group},
+	keywords = {Image sensors, Deformable mirrors, Diode lasers, Light emitting diodes, Optical systems, Systems design},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\BZWW7BVY\\Niclass et al. - 2012 - Design and characterization of a 256x64-pixel single-photon imager in CMOS for a MEMS-based laser sc.pdf:application/pdf},
+}
+
+@online{noauthor_vlp_nodate,
+	title = {{VLP} 16 {\textbar} Ouster},
+	url = {https://ouster.com/products/hardware/vlp-16},
+	abstract = {Mid-range lidar sensor},
+	urldate = {2025-02-20},
+	langid = {english},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\AR82YJRS\\vlp-16.html:text/html},
+}
+
+@software{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025,
+	title = {sparkfun/{SparkFun}\_VL53L5CX\_Arduino\_Library},
+	url = {https://github.com/sparkfun/SparkFun_VL53L5CX_Arduino_Library},
+	publisher = {{SparkFun} Electronics},
+	urldate = {2025-02-21},
+	date = {2025-01-28},
+	note = {original-date: 2021-10-22T21:06:36Z},
+}
+
+@online{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate,
+	title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}},
+	url = {https://github.com/adityakamath/tof_imager_micro_ros/tree/humble/teensy_pcl_publisher},
+	urldate = {2025-02-21},
+	file = {tof_imager_micro_ros/teensy_pcl_publisher at humble · adityakamath/tof_imager_micro_ros · GitHub:C\:\\Users\\Rene\\Zotero\\storage\\TEVM2A5B\\teensy_pcl_publisher.html:text/html},
+}
+
+@online{noauthor_tof_imager_micro_rosteensy_pcl_publisherteensy_pcl_publisherino_nodate,
+	title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher/teensy\_pcl\_publisher.ino at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}},
+	url = {https://github.com/adityakamath/tof_imager_micro_ros/blob/humble/teensy_pcl_publisher/teensy_pcl_publisher.ino#L177},
+	urldate = {2025-02-21},
+	file = {tof_imager_micro_ros/teensy_pcl_publisher/teensy_pcl_publisher.ino at humble · adityakamath/tof_imager_micro_ros · GitHub:C\:\\Users\\Rene\\Zotero\\storage\\PYV8KTSC\\teensy_pcl_publisher.html:text/html},
+}
+
+@article{vogel-heuser_von_2023,
+	title = {Von Industrie 4.0 zu Industrie 5.0 – Idee, Konzept und Wahrnehmung},
+	volume = {60},
+	issn = {2198-2775},
+	url = {https://doi.org/10.1365/s40702-023-01002-x},
+	doi = {10.1365/s40702-023-01002-x},
+	abstract = {In der sich rasant entwickelnden Landschaft der industriellen Automatisierung läutet das Aufkommen von Industrie 5.0 (I5.0) einen Paradigmenwechsel hin zu einem stärker kollaborativen und menschzentrierten Ansatz ein. In diesem Beitrag wird die Rolle der Mensch-Maschine-Kollaboration und menschzentrierter Werkzeuge bei der Förderung einer symbiotischen Beziehung zwischen fortschrittlichen Technologien und menschlichen Benutzern untersucht, um so das volle Potenzial von I5.0 zu erschließen. Als nächste Stufe in der Entwicklung des Produktionssektors zielt I5.0 darauf ab, ein Gleichgewicht zwischen Automatisierung und menschlichen Fähigkeiten herzustellen und die sich ergänzenden Stärken beider zu nutzen. Es werden Technologien vorgestellt, welche menschzentrierte Lösungen zur Steigerung von Produktivität, Flexibilität und Nachhaltigkeit in der Fabrik der Zukunft fokussieren.},
+	pages = {1124--1142},
+	number = {6},
+	journaltitle = {{HMD} Praxis der Wirtschaftsinformatik},
+	shortjournal = {{HMD}},
+	author = {Vogel-Heuser, Birgit and Bengler, Klaus},
+	urldate = {2025-04-16},
+	date = {2023-12-01},
+	langid = {german},
+	keywords = {Cyber-Physical Production Systems, Cyber-physische Produktionssysteme, Fabrik der Zukunft, Factory of the Future, Human-Centered Automation, Human-Machine Collaboration, Industrie 4.0, Industrie 5.0, Industry 4.0, Industry 5.0, Mensch-Maschine-Kollaboration, Menschzentrierte Automatisierung},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\U6RG6RSY\\Vogel-Heuser and Bengler - 2023 - Von Industrie 4.0 zu Industrie 5.0 – Idee, Konzept und Wahrnehmung.pdf:application/pdf},
+}
+
+@software{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025,
+	title = {{UniversalRobots}/Universal\_Robots\_ROS2\_GZ\_Simulation},
+	url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_GZ_Simulation},
+	publisher = {Universal Robots A/S},
+	urldate = {2025-02-24},
+	date = {2025-02-17},
+}
+
+@online{noauthor_tutorials_nodate-2,
+	title = {Tutorials — {ROS} 2 Documentation: Humble documentation},
+	url = {https://docs.ros.org/en/humble/Tutorials.html},
+	urldate = {2025-02-12},
+}
+
+@software{grans_sebastiangransros2-point-cloud-demo_2024-2,
+	title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo},
+	url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo},
+	abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2},
+	author = {Grans, Sebastian},
+	urldate = {2025-02-12},
+	date = {2024-12-08},
+}
+
+@online{noauthor_examples_nodate-2,
+	title = {Examples - trimesh 4.6.2 documentation},
+	url = {https://trimesh.org/examples.html},
+	urldate = {2025-02-12},
+}
+
+@software{iii_earlephilhowerarduino-pico_2025-2,
+	title = {earlephilhower/arduino-pico},
+	url = {https://github.com/earlephilhower/arduino-pico},
+	abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards},
+	author = {{III}, Earle F. Philhower},
+	urldate = {2025-02-12},
+	date = {2025-02-11},
+	keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi},
+}
+
+@online{noauthor_chatgpt_nodate-2,
+	title = {{ChatGPT}},
+	url = {https://chatgpt.com},
+	abstract = {A conversational {AI} system that listens, learns, and challenges},
+	urldate = {2025-02-12},
+}
+
+@online{noauthor_pico-series_nodate-2,
+	title = {Pico-series Microcontrollers - Raspberry Pi Documentation},
+	url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html},
+	abstract = {The official documentation for Raspberry Pi computers and microcontrollers},
+	urldate = {2025-02-12},
+}
+
+@online{noauthor_vl53l7cx_nodate-2,
+	title = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}},
+	url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html},
+	abstract = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV}, {VL}53L7CXV0GC/1, {STMicroelectronics}},
+	urldate = {2025-02-12},
+}
+
+@article{paya_state---art_2017-2,
+	title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors},
+	volume = {2017},
+	issn = {1687-7268},
+	url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650},
+	doi = {10.1155/2017/3497650},
+	abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.},
+	pages = {3497650},
+	number = {1},
+	journaltitle = {Journal of Sensors},
+	author = {Payá, L. and Gil, A. and Reinoso, O.},
+	urldate = {2025-02-12},
+	date = {2017},
+}
+
+@article{saudabayev_sensors_2015-2,
+	title = {Sensors for Robotic Hands: A Survey of State of the Art},
+	volume = {3},
+	issn = {2169-3536},
+	url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549},
+	doi = {10.1109/ACCESS.2015.2482543},
+	shorttitle = {Sensors for Robotic Hands},
+	abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.},
+	pages = {1765--1782},
+	journaltitle = {{IEEE} Access},
+	author = {Saudabayev, Artur and Varol, Huseyin Atakan},
+	urldate = {2025-02-12},
+	date = {2015},
+	keywords = {Robot sensing systems, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors, Sensors},
+}
+
+@collection{hering_sensoren_2018-2,
+	location = {Wiesbaden},
+	title = {Sensoren in Wissenschaft und Technik},
+	isbn = {978-3-658-12561-5 978-3-658-12562-2},
+	url = {http://link.springer.com/10.1007/978-3-658-12562-2},
+	publisher = {Springer Fachmedien Wiesbaden},
+	editor = {Hering, Ekbert and Schönfelder, Gert},
+	urldate = {2025-02-12},
+	date = {2018},
+	doi = {10.1007/978-3-658-12562-2},
+}
+
+@article{haddadin_robot_2017-2,
+	title = {Robot Collisions: A Survey on Detection, Isolation, and Identification},
+	volume = {33},
+	issn = {1941-0468},
+	url = {https://ieeexplore.ieee.org/abstract/document/8059840},
+	doi = {10.1109/TRO.2017.2723903},
+	shorttitle = {Robot Collisions},
+	abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.},
+	pages = {1292--1312},
+	number = {6},
+	journaltitle = {{IEEE} Transactions on Robotics},
+	author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin},
+	urldate = {2025-02-12},
+	date = {2017-12},
+	keywords = {Algorithm design and analysis, Collision avoidance, Collision detection, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction ({pHRI}), Real-time systems, Robot sensing systems, safe robotics, Service robots},
+}
+
+@article{wunderlich_rasante_2013-2,
+	title = {Rasante Entwicklung in der 3D‐Bildgebung: Weiterentwickelte Time‐of‐Flight‐Technologie verbessert miniaturisierte 3D‐Kameras und Sensoren},
+	volume = {8},
+	issn = {1863-1460, 2191-1975},
+	url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018},
+	doi = {10.1002/opph.201300018},
+	shorttitle = {Rasante Entwicklung in der 3D‐Bildgebung},
+	abstract = {Abstract Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.},
+	pages = {38--40},
+	number = {3},
+	journaltitle = {Optik \& Photonik},
+	shortjournal = {Optik \& Photonik},
+	author = {Wunderlich, Max},
+	urldate = {2025-02-18},
+	date = {2013-09},
+}
+
+@book{hertzberg_mobile_2012-2,
+	location = {Berlin, Heidelberg},
+	title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik},
+	isbn = {978-3-642-01725-4 978-3-642-01726-1},
+	url = {https://link.springer.com/10.1007/978-3-642-01726-1},
+	series = {{eXamen}.press},
+	shorttitle = {Mobile Roboter},
+	publisher = {Springer Berlin Heidelberg},
+	author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas},
+	urldate = {2025-02-12},
+	date = {2012},
+	doi = {10.1007/978-3-642-01726-1},
+}
+
+@software{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025-2,
+	title = {{UniversalRobots}/Universal\_Robots\_ROS2\_Gazebo\_Simulation},
+	url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation},
+	publisher = {Universal Robots A/S},
+	urldate = {2025-02-17},
+	date = {2025-02-13},
+}
+
+@article{li_common_2019-2,
+	title = {Common Sensors in Industrial Robots: A Review},
+	volume = {1267},
+	issn = {1742-6588, 1742-6596},
+	url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036},
+	doi = {10.1088/1742-6596/1267/1/012036},
+	shorttitle = {Common Sensors in Industrial Robots},
+	abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.},
+	pages = {012036},
+	number = {1},
+	journaltitle = {Journal of Physics: Conference Series},
+	shortjournal = {J. Phys.: Conf. Ser.},
+	author = {Li, Peng and Liu, Xiangpeng},
+	urldate = {2025-02-18},
+	date = {2019-07-01},
+}
+
+@misc{noauthor_din_nodate-4,
+	title = {{DIN} {EN} {ISO} 10218-1:2021-09, Robotik\_- Sicherheitsanforderungen\_- Teil\_1: Industrieroboter ({ISO}/{DIS}\_10218-1.2:2021); Deutsche und Englische Fassung {prEN}\_ISO\_10218-1:2021},
+	url = {https://www.dinmedia.de/de/-/-/341406648},
+	shorttitle = {{DIN} {EN} {ISO} 10218-1},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	doi = {10.31030/3272912},
+}
+
+@article{li_common_2019-3,
+	title = {Common Sensors in Industrial Robots: A Review},
+	volume = {1267},
+	issn = {1742-6588, 1742-6596},
+	url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036},
+	doi = {10.1088/1742-6596/1267/1/012036},
+	shorttitle = {Common Sensors in Industrial Robots},
+	abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.},
+	pages = {012036},
+	number = {1},
+	journaltitle = {Journal of Physics: Conference Series},
+	shortjournal = {J. Phys.: Conf. Ser.},
+	author = {Li, Peng and Liu, Xiangpeng},
+	urldate = {2025-02-18},
+	date = {2019-07-01},
+}
+
+@misc{noauthor_din_nodate-5,
+	title = {{DIN} {EN} {ISO} 10218-2:2021-03, Robotik\_- Sicherheitsanforderungen für Robotersysteme in industrieller Umgebung\_- Teil\_2: Robotersysteme, Roboteranwendungen und Integration von Roboterzellen ({ISO}/{DIS}\_10218-2:2020); Deutsche und Englische Fassung {prEN}\_ISO\_10218-2:2020},
+	url = {https://www.dinmedia.de/de/-/-/331246964},
+	shorttitle = {{DIN} {EN} {ISO} 10218-2},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	doi = {10.31030/3215258},
+}
+
+@misc{noauthor_din_nodate-6,
+	title = {{DIN} {EN} {ISO} 10218-2:2021-03, Robotik\_- Sicherheitsanforderungen für Robotersysteme in industrieller Umgebung\_- Teil\_2: Robotersysteme, Roboteranwendungen und Integration von Roboterzellen ({ISO}/{DIS}\_10218-2:2020); Deutsche und Englische Fassung {prEN}\_ISO\_10218-2:2020},
+	url = {https://www.dinmedia.de/de/-/-/331246964},
+	shorttitle = {{DIN} {EN} {ISO} 10218-2},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	doi = {10.31030/3215258},
+}
+
+@misc{noauthor_robotics_2021-1,
+	title = {Robotics - Vocabulary},
+	url = {https://www.dinmedia.de/de/norm/iso-8373/348036781},
+	shorttitle = {{ISO} 8373:2021-11},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	date = {2021-11},
+}
+
+@article{liu_application_2024-1,
+	title = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing: A Literature Review},
+	volume = {40},
+	issn = {1044-7318},
+	url = {https://doi.org/10.1080/10447318.2022.2041907},
+	doi = {10.1080/10447318.2022.2041907},
+	shorttitle = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing},
+	abstract = {The rapid development of robot technology has introduced a substantial impact on manufacturing. Numerous studies have been carried out to apply collaborative robots (cobots) to address manufacturing productivity and ergonomics issues, which has brought extensive opportunities. In this context, a systematic literature search in the Web of Science, Scopus, and Google Scholar databases was carried out by electronic and manual search. Thus, 59 relevant contributions out of 4488 studies were analyzed by using preferred reporting items for systematic reviews and meta-analysis ({PRISMA}). To provide an overview of the different results, studies are summarized according to the following criteria: country, author, year, study design, robot category, results, and future opportunities. The effects of cobots on safety, system design, workplace design, task scheduling, productivity, and ergonomics are discussed to provide a better understanding of the application of cobots in manufacturing. To incentive future research, this paper reviews the development of cobots in manufacturing and discusses future opportunities and directions from cobots and manufacturing system perspectives. This paper provides novel and valuable insights into cobots application and illustrates potential developments of future human-cobot interaction.},
+	pages = {915--932},
+	number = {4},
+	journaltitle = {International Journal of Human–Computer Interaction},
+	author = {Liu, Li and Guo, Fu and Zou, Zishuai and Duffy, Vincent G.},
+	urldate = {2025-02-19},
+	date = {2024-02-16},
+}
+
+@article{maheepala_low_2021-1,
+	title = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices: A Review},
+	volume = {21},
+	issn = {1558-1748},
+	url = {https://ieeexplore.ieee.org/abstract/document/9165781},
+	doi = {10.1109/JSEN.2020.3015932},
+	shorttitle = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices},
+	abstract = {With the advancements of the Internet of Things ({IoT}) technology, applications of battery powered machine vision based {IoT} devices is rapidly growing. While numerous research works are being conducted to develop low power hardware solutions for {IoT} devices, image capture and image processing remain high power demanding processes leading to a short battery life. However, the power consumption of the machine vision based {IoT} devices can be minimized by the careful optimization of the hardware components that are used is these devices. In this article, we present a review of low power machine vision hardware components for the {IoT} applications. A guide to selecting the optimum processors and image sensors for a given battery powered machine vision based {IoT} device is presented. Next, the factors that must be considered when selecting processors and image sensors for a given {IoT} application are discussed, and selection criteria for the processors and image sensors are established. Then, the current commercially available hardware components are reviewed in accordance with the established selection criteria. Finally, the research trends in the field of battery powered machine vision based {IoT} devices are discussed, and the potential future research directions in the field are presented.},
+	pages = {1172--1186},
+	number = {2},
+	journaltitle = {{IEEE} Sensors Journal},
+	author = {Maheepala, Malith and Joordens, Matthew A. and Kouzani, Abbas Z.},
+	urldate = {2025-02-19},
+	date = {2021-01},
+	keywords = {Batteries, Cloud computing, image sensor, Image sensors, Internet of Things, low power, machine vision, Machine vision, processor, Program processors, Transceivers},
+}
+
+@misc{noauthor_din_nodate-7,
+	title = {{DIN} {EN} {ISO} 10218-1:2021-09, Robotik\_- Sicherheitsanforderungen\_- Teil\_1: Industrieroboter ({ISO}/{DIS}\_10218-1.2:2021); Deutsche und Englische Fassung {prEN}\_ISO\_10218-1:2021},
+	url = {https://www.dinmedia.de/de/-/-/341406648},
+	shorttitle = {{DIN} {EN} {ISO} 10218-1},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	doi = {10.31030/3272912},
+}
+
+@inproceedings{popov_collision_2017-1,
+	location = {Lisbon},
+	title = {Collision detection, localization \& classification for industrial robots with joint torque sensors},
+	isbn = {978-1-5386-3518-6},
+	url = {http://ieeexplore.ieee.org/document/8172400/},
+	doi = {10.1109/ROMAN.2017.8172400},
+	abstract = {High dynamic capabilities of industrial robots make them dangerous for humans and environment. To reduce this factor and advance collaboration between human and manipulator fast and reliable collision detection algorithm is required. To overcome this problem, we present an approach allowing to detect collision, localize action point and classify collision nature. Internal joint torque and encoder measurements were used to determine potential collisions with the robot links. This work proposes two ways of solving this problem: using classical analytical approach and learning approach implemented with neural network. The suggested algorithms were examined on the industrial robotic arm Kuka iiwa {LBR} 14 R820, ground truth information on the contact nature and its location were obtained with 3D {LIDAR} and camera.},
+	eventtitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})},
+	pages = {838--843},
+	booktitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})},
+	publisher = {{IEEE}},
+	author = {Popov, Dmitry and Klimchik, Alexandr and Mavridis, Nikolaos},
+	urldate = {2025-02-19},
+	date = {2017-08},
+}
+
+@article{nath_review_2022-1,
+	title = {A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector},
+	volume = {7},
+	issn = {2736-576X},
+	url = {https://www.ej-eng.org/index.php/ejeng/article/view/2624},
+	doi = {10.24018/ejeng.2022.7.1.2624},
+	abstract = {The study explores recent innovations in robotic and sensor-based technologies that are spearheading advancements in the construction sector to achieve improvements in construction quality, efficiency, and safety. Automation in construction, although coined as a concept in 1980s, has witnessed minimal progress in the level of application. The study attempts to identify issues constraining adoption of automation in the sector, the recent developments in technologies and their scope in construction, their applications and impacts, and way forward. The role of robotics in various stages of construction and its impact on a wider scale has been identified and discussed. The evolution of Building Information Modeling ({BIM}) has transitioned it into being an efficient mediator in the construction process with novel concepts such as 4D and 5D {BIM} and Building Knowledge Management. Various sensor technologies, functioning at diverse scales, have found wide-ranging applications on construction sites ranging from high-accuracy positioning to slow-tracking of personnel and materials, as well as, in progress monitoring and quality control. Information Technology has a major role in binding the sensor technology with on-site requirements to create positive results. A study was done to identify such technological interventions and various software utilities which integrate {BIM} and sensor technology with tools such as {GIS}. The factors which restrained developments in automation in construction sector were identified in the course of study. Various global examples of advanced automated construction technologies with applications in various stages of construction were discussed. The review successfully identifies the nascent technological innovations and their productive usage in relevant areas of construction sector.},
+	pages = {85--89},
+	number = {1},
+	journaltitle = {European Journal of Engineering and Technology Research},
+	author = {Nath, Aditya S.},
+	urldate = {2025-02-19},
+	date = {2022-02-28},
+	keywords = {Sensors},
+}
+
+@inproceedings{rashid_local_2020-1,
+	title = {Local and Global Sensors for Collision Avoidance},
+	url = {https://ieeexplore.ieee.org/document/9235223},
+	doi = {10.1109/MFI49285.2020.9235223},
+	abstract = {Implementation of safe and efficient human robot collaboration for agile production cells with heavy-duty industrial robots, having large stopping distances and large self-occlusion areas, is a challenging task. Collision avoidance is the main functionality required to realize this task. In fact, it requires accurate estimation of shortest distance between known (robot) and unknown (human or anything else) objects in a large area. This work proposes a selective fusion of global and local sensors, representing a large range 360° {LiDAR} and a small range {RGB} camera respectively, in the context of dynamic speed and separation monitoring. Safety functionality has been evaluated for collision detection between unknown dynamic object to manipulator joints. The system yields 29-40\% efficiency compared to fenced system. Heavy-duty industrial robot and a controlled linear axis dummy is used for evaluating different robot and scenario configurations. Results suggest higher efficiency and safety when using local and global setup.},
+	eventtitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})},
+	pages = {354--359},
+	booktitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})},
+	author = {Rashid, Aquib and Peesapati, Kannan and Bdiwi, Mohamad and Krusche, Sebastian and Hardt, Wolfram and Putz, Matthias},
+	urldate = {2025-02-19},
+	date = {2020-09},
+	keywords = {Service robots, Safety, Sensor fusion, Cameras, Laser radar, Production, Robot vision systems},
+}
+
+@article{jain_survey_nodate-1,
+	title = {A survey of Laser Range Finding},
+	url = {http://www.siddjain.com/ee236a.pdf},
+	abstract = {This report provides a informal survey of laser distance measurement. Applications of laser range finding are briefly discussed and various techniques for laser ranging such as active laser triangulation, pulsed time-of-flight ({TOF}), phase shift, {FMCW}, and correlation are described.},
+	author = {Jain, Siddharth},
+	urldate = {2025-02-19},
+}
+
+@online{noauthor_can_nodate-1,
+	title = {Can the collaborative robot market experience a second growth surge in the post-pandemic era?},
+	url = {https://interactanalysis.com/insight/can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era/},
+	abstract = {The market for collaborative robots is forecast to show strong growth over the coming years, with rising sales in industrial and non-industrial sectors.},
+	urldate = {2025-02-19},
+}
+
+@article{li_safe_2024-1,
+	title = {Safe human–robot collaboration for industrial settings: a survey},
+	volume = {35},
+	issn = {1572-8145},
+	url = {https://doi.org/10.1007/s10845-023-02159-4},
+	doi = {10.1007/s10845-023-02159-4},
+	shorttitle = {Safe human–robot collaboration for industrial settings},
+	abstract = {Human–robot collaboration ({HRC}) plays a pivotal role in today’s industry by supporting increasingly customised product development. Via {HRC}, the strengths of humans and robots can be combined to facilitate collaborative jobs within common workplaces to achieve specific industrial goals. Given the significance of safety assurance in {HRC}, in this survey paper, an update on standards and implementation approaches presented in the latest literature is given to reflect the state-of-the-art of this prominent research topic. First, an overview of safety standards for industrial robots, collaborative robots, and {HRC} is provided. Then, a survey of various approaches to {HRC} safety is conducted from two main perspectives, i.e., pre-collision and post-collision, which are further detailed in the aspects of sensing, prediction, learning, planning/replanning, and compliance control. Major characteristics, pros, cons, and applicability of the approaches are analysed. Finally, challenging issues and prospects for the future development of {HRC} safety are highlighted to provide recommendations for relevant stakeholders to consider when designing {HRC}-enabled industrial systems.},
+	pages = {2235--2261},
+	number = {5},
+	journaltitle = {Journal of Intelligent Manufacturing},
+	shortjournal = {J Intell Manuf},
+	author = {Li, Weidong and Hu, Yudie and Zhou, Yong and Pham, Duc Truong},
+	urldate = {2025-02-19},
+	date = {2024-06-01},
+	keywords = {Collision detection, Collaborative robots, Human–robot collaboration ({HRC}), Obstacle avoidance, Safety},
+}
+
+@inproceedings{amaya-mejia_vision-based_2022-1,
+	title = {Vision-Based Safety System for Barrierless Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9981689},
+	doi = {10.1109/IROS47612.2022.9981689},
+	abstract = {Human safety has always been the main priority when working near an industrial robot. With the rise of Human-Robot Collaborative environments, physical barriers to avoiding collisions have been disappearing, increasing the risk of accidents and the need for solutions that ensure a safe Human-Robot Collaboration. This paper proposes a safety system that implements Speed and Separation Monitoring ({SSM}) type of operation. For this, safety zones are defined in the robot's workspace following current standards for industrial collaborative robots. A deep learning-based computer vision system detects, tracks, and estimates the 3D position of operators close to the robot. The robot control system receives the operator's 3D position and generates 3D representations of them in a simulation environment. Depending on the zone where the closest operator was detected, the robot stops or changes its operating speed. Three different operation modes in which the human and robot interact are presented. Results show that the vision-based system can correctly detect and classify in which safety zone an operator is located and that the different proposed operation modes ensure that the robot's reaction and stop time are within the required time limits to guarantee safety.},
+	eventtitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})},
+	pages = {7331--7336},
+	booktitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})},
+	author = {Amaya-Mejía, Lina María and Duque-Suárez, Nicolás and Jaramillo-Ramírez, Daniel and Martinez, Carol},
+	urldate = {2025-02-19},
+	date = {2022-10},
+	keywords = {Collision avoidance, Service robots, Safety, Collaboration, Robot control, Solid modeling, Three-dimensional displays},
+}
+
+@inproceedings{choi_xr-based_2022-1,
+	title = {An {XR}-based Approach to Safe Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9757621},
+	doi = {10.1109/VRW55335.2022.00106},
+	abstract = {It is crucial to prevent safety accidents for human-robot collaboration. This study proposes an extended reality ({XR}) approach to safe {HRC} by calculating the minimum distance between the human operator and robot in real time. The proposed approach scans the real environment with multiple sensors and constructs its virtual space in {XR}. The virtual robot is synchronized with the real robot by matching the point cloud of real environment with that of the virtual robot. This system can effectively provide task assistance and safety information to the user wearing an {XR} device.},
+	eventtitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})},
+	pages = {481--482},
+	booktitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})},
+	author = {Choi, Sung Ho and Park, Kyeong-Beom and Roh, Dong Hyeon and Lee, Jae Yeol and Ghasemi, Yalda and Jeong, Heejin},
+	urldate = {2025-02-19},
+	date = {2022-03},
+	keywords = {Real-time systems, Robot sensing systems, Service robots, Safety, Collaboration, Three-dimensional displays, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—{XR}-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality ({MR}/{XR}), safety distance},
+}
+
+@inproceedings{al_naser_fusion_2022-1,
+	title = {Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a robot in an industrial environment},
+	url = {https://ieeexplore.ieee.org/document/9900548},
+	doi = {10.1109/RO-MAN53752.2022.9900548},
+	abstract = {Accurate detection of the human body and its limbs in real-time is one of the challenges toward human-robot collaboration ({HRC}) technology in the factory of the future. In this work, a new algorithm has been developed to fuse thermal, depth, and color information. Starting with the calibration of the sensors to each other, proceeding with data fusion and detection of human body parts, and ending with pose estimation. The proposed approach has been tested in various {HRC} scenarios. It has shown a significant decrease in errors and noise in industrial environments. Furthermore, it produces not only a higher positioning accuracy of the head and hands compared to the state of art algorithms (e.g., {OpenPose}), but it is also faster. Hence, such an algorithm could be joined with digital twins for safe and efficient human-robot collaboration.},
+	eventtitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})},
+	pages = {532--537},
+	booktitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})},
+	author = {Al Naser, Ibrahim and Dahmen, Johannes and Bdiwi, Mohamad and Ihlenfeldt, Steffen},
+	urldate = {2025-02-19},
+	date = {2022-08},
+	keywords = {Service robots, Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Sensor fusion, Stability criteria, Thermal sensors},
+}
+
+@online{noauthor_file20200501_2020-1,
+	title = {File:20200501 Time of flight.svg - Wikipedia},
+	url = {https://commons.wikimedia.org/wiki/File:20200501_Time_of_flight.svg},
+	shorttitle = {File},
+	urldate = {2025-02-20},
+	date = {2020-05-01},
+}
+
+@article{raj_survey_2020-1,
+	title = {A Survey on {LiDAR} Scanning Mechanisms},
+	volume = {9},
+	issn = {2079-9292},
+	url = {https://www.mdpi.com/2079-9292/9/5/741},
+	doi = {10.3390/electronics9050741},
+	abstract = {In recent years, light detection and ranging ({LiDAR}) technology has gained huge popularity in various applications such as navigation, robotics, remote sensing, and advanced driving assistance systems ({ADAS}). This popularity is mainly due to the improvements in {LiDAR} performance in terms of range detection, accuracy, power consumption, as well as physical features such as dimension and weight. Although a number of literatures on {LiDAR} technology have been published earlier, not many has been reported on the state-of-the-art {LiDAR} scanning mechanisms. The aim of this article is to review the scanning mechanisms employed in {LiDAR} technology from past research works to the current commercial products. The review highlights four commonly used mechanisms in {LiDAR} systems: Opto-mechanical, electromechanical, micro-electromechanical systems ({MEMS}), and solid-state scanning. The study reveals that electro-mechanical scanning is the most prominent technology in use today. The commercially available 1D time of flight ({TOF}) {LiDAR} instrument is currently the most attractive option for conversion from 1D to 3D {LiDAR} system, provided that low scanning rate is not an issue. As for applications with low size, weight, and power ({SWaP}) requirements, {MEMS} scanning is found to be the better alternative. {MEMS} scanning is by far the more matured technology compared to solid-state scanning and is currently given great emphasis to increase its robustness for fulfilling the requirements of {ADAS} applications. Finally, solid-state {LiDAR} systems are expected to fill in the gap in {ADAS} applications despite the low technology readiness in comparison to {MEMS} scanners. However, since solid-state scanning is believed to have superior robustness, field of view ({FOV}), and scanning rate potential, great efforts are given by both academics and industries to further develop this technology.},
+	pages = {741},
+	number = {5},
+	journaltitle = {Electronics},
+	author = {Raj, Thinal and Hashim, Fazida Hanim and Huddin, Aqilah Baseri and Ibrahim, Mohd Faisal and Hussain, Aini},
+	urldate = {2025-02-20},
+	date = {2020-05},
+	keywords = {electro-mechanical scanning, {LiDAR}, {MEMS} scanning, opto-mechanical scanning, solid-state {LiDAR}},
+}
+
+@article{surmann_autonomous_2003-1,
+	title = {An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of indoor environments},
+	volume = {45},
+	issn = {0921-8890},
+	url = {https://www.sciencedirect.com/science/article/pii/S0921889003001556},
+	doi = {10.1016/j.robot.2003.09.004},
+	abstract = {Digital 3D models of the environment are needed in rescue and inspection robotics, facility managements and architecture. This paper presents an automatic system for gaging and digitalization of 3D indoor environments. It consists of an autonomous mobile robot, a reliable 3D laser range finder and three elaborated software modules. The first module, a fast variant of the Iterative Closest Points algorithm, registers the 3D scans in a common coordinate system and relocalizes the robot. The second module, a next best view planner, computes the next nominal pose based on the acquired 3D data while avoiding complicated obstacles. The third module, a closed-loop and globally stable motor controller, navigates the mobile robot to a nominal pose on the base of odometry and avoids collisions with dynamical obstacles. The 3D laser range finder acquires a 3D scan at this pose. The proposed method allows one to digitalize large indoor environments fast and reliably without any intervention and solves the {SLAM} problem. The results of two 3D digitalization experiments are presented using a fast octree-based visualization method.},
+	pages = {181--198},
+	number = {3},
+	journaltitle = {Robotics and Autonomous Systems},
+	shortjournal = {Robotics and Autonomous Systems},
+	author = {Surmann, Hartmut and Nüchter, Andreas and Hertzberg, Joachim},
+	urldate = {2025-02-20},
+	date = {2003-12-31},
+	keywords = {3D digitalization, 3D gaging, 3D laser range finder, Autonomous mobile robots, Next best view planning, Robot relocalization, Scan matching, {SLAM}},
+}
+
+@article{niclass_design_2012-1,
+	title = {Design and characterization of a 256x64-pixel single-photon imager in {CMOS} for a {MEMS}-based laser scanning time-of-flight sensor},
+	volume = {20},
+	issn = {1094-4087},
+	url = {https://opg.optica.org/oe/abstract.cfm?uri=oe-20-11-11863},
+	doi = {10.1364/OE.20.011863},
+	abstract = {We introduce an optical time-of-flight image sensor taking advantage of a {MEMS}-based laser scanning device. Unlike previous approaches, our concept benefits from the high timing resolution and the digital signal flexibility of single-photon pixels in {CMOS} to allow for a nearly ideal cooperation between the image sensor and the scanning device. This technique enables a high signal-to-background light ratio to be obtained, while simultaneously relaxing the constraint on size of the {MEMS} mirror. These conditions are critical for devising practical and low-cost depth sensors intended to operate in uncontrolled environments, such as outdoors. A proof-of-concept prototype capable of operating in real-time was implemented. This paper focuses on the design and characterization of a 256x64-pixel image sensor, which also comprises an event-driven readout circuit, an array of 64 row-level high-throughput time-to-digital converters, and a 16Gbit/s global readout circuit. Quantitative evaluation of the sensor under 2klux of background light revealed a repeatability error of 13.5cm throughout the distance range of 20 meters.},
+	pages = {11863--11881},
+	number = {11},
+	journaltitle = {Optics Express},
+	shortjournal = {Opt. Express, {OE}},
+	author = {Niclass, Cristiano and Ito, Kota and Soga, Mineki and Matsubara, Hiroyuki and Aoyagi, Isao and Kato, Satoru and Kagami, Manabu},
+	urldate = {2025-02-20},
+	date = {2012-05-21},
+	keywords = {Image sensors, Deformable mirrors, Diode lasers, Light emitting diodes, Optical systems, Systems design},
+}
+
+@online{noauthor_vlp_nodate-1,
+	title = {{VLP} 16 {\textbackslash}textbar Ouster},
+	url = {https://ouster.com/products/hardware/vlp-16},
+	abstract = {Mid-range lidar sensor},
+	urldate = {2025-02-20},
+}
+
+@software{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025-1,
+	title = {sparkfun/{SparkFun}\_VL53L5CX\_Arduino\_Library},
+	url = {https://github.com/sparkfun/SparkFun_VL53L5CX_Arduino_Library},
+	publisher = {{SparkFun} Electronics},
+	urldate = {2025-02-21},
+	date = {2025-01-28},
+}
+
+@online{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate-1,
+	title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}},
+	url = {https://github.com/adityakamath/tof_imager_micro_ros/tree/humble/teensy_pcl_publisher},
+	urldate = {2025-02-21},
+}
+
+@online{noauthor_tof_imager_micro_rosteensy_pcl_publisherteensy_pcl_publisherino_nodate-1,
+	title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher/teensy\_pcl\_publisher.ino at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}},
+	url = {https://github.com/adityakamath/tof_imager_micro_ros/blob/humble/teensy_pcl_publisher/teensy_pcl_publisher.ino#L177},
+	urldate = {2025-02-21},
+}
+
+@online{noauthor_iidea_nodate,
+	title = {{IIDEA} - Inklusion und Integration durch Cobots auf dem ersten Arbeitsmarkt - {RWTH} {AACHEN} {UNIVERSITY} {IGMR} - Deutsch},
+	url = {https://www.igmr.rwth-aachen.de/cms/igmr/forschung/projekte/aktuelle-projekte/~baxrrf/iidea/},
+	urldate = {2025-04-16},
+	file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\MHVTD38V\\undefined:text/html},
+}
+
+@misc{noauthor_din_nodate-8,
+	title = {{DIN} {EN} {ISO} 10218-2:2021-03, Robotik\_- Sicherheitsanforderungen für Robotersysteme in industrieller Umgebung\_- Teil\_2: Robotersysteme, Roboteranwendungen und Integration von Roboterzellen ({ISO}/{DIS}\_10218-2:2020); Deutsche und Englische Fassung {prEN}\_ISO\_10218-2:2020},
+	url = {https://www.dinmedia.de/de/-/-/331246964},
+	shorttitle = {{DIN} {EN} {ISO} 10218-2},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	langid = {german},
+	doi = {10.31030/3215258},
+}
+
+@misc{noauthor_din_nodate-9,
+	title = {{DIN} {EN} {ISO} 10218-1:2021-09, Robotik\_- Sicherheitsanforderungen\_- Teil\_1: Industrieroboter ({ISO}/{DIS}\_10218-1.2:2021); Deutsche und Englische Fassung {prEN}\_ISO\_10218-1:2021},
+	url = {https://www.dinmedia.de/de/-/-/341406648},
+	shorttitle = {{DIN} {EN} {ISO} 10218-1},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	langid = {german},
+	doi = {10.31030/3272912},
+}
+
+@article{li_common_2019-4,
+	title = {Common Sensors in Industrial Robots: A Review},
+	volume = {1267},
+	issn = {1742-6588, 1742-6596},
+	url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036},
+	doi = {10.1088/1742-6596/1267/1/012036},
+	shorttitle = {Common Sensors in Industrial Robots},
+	abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.},
+	pages = {012036},
+	number = {1},
+	journaltitle = {Journal of Physics: Conference Series},
+	author = {Li, Peng and Liu, Xiangpeng},
+	urldate = {2025-02-18},
+	date = {2019-07},
+	langid = {english},
+}
+
+@article{wunderlich_rasante_2013-3,
+	title = {Rasante Entwicklung in der 3D‐Bildgebung: Weiterentwickelte Time‐of‐Flight‐Technologie verbessert miniaturisierte 3D‐Kameras und Sensoren},
+	volume = {8},
+	rights = {http://onlinelibrary.wiley.com/{termsAndConditions}\#vor},
+	issn = {1863-1460, 2191-1975},
+	url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018},
+	doi = {10.1002/opph.201300018},
+	shorttitle = {Rasante Entwicklung in der 3D‐Bildgebung},
+	abstract = {Abstract Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.},
+	pages = {38--40},
+	number = {3},
+	journaltitle = {Optik \& Photonik},
+	author = {Wunderlich, Max},
+	urldate = {2025-02-18},
+	date = {2013-09},
+	langid = {german},
+}
+
+@incollection{mcgrath_sensing_2013,
+	location = {Berkeley, {CA}},
+	title = {Sensing and Sensor Fundamentals},
+	isbn = {978-1-4302-6014-1},
+	url = {https://doi.org/10.1007/978-1-4302-6014-1_2},
+	abstract = {Sensors utilize a wide spectrum of transducer and signal transformation approaches with corresponding variations in technical complexity. These range from relatively simple temperature measurement based on a bimetallic thermocouple, to the detection of specific bacteria species using sophisticated optical systems. Within the healthcare, wellness, and environmental domains, there are a variety of sensing approaches, including microelectromechanical systems ({MEMS}), optical, mechanical, electrochemical, semiconductor, and biosensing. As outlined in Chapter 1, the proliferation of sensor-based applications is growing across a range of sensing targets such as air, water, bacteria, movement, and physiology. As with any form of technology, sensors have both strengths and weaknesses. Operational performance may be a function of the transduction method, the deployment environment, or the system components. In this chapter, we review the common sensing mechanisms that are used in the application domains of interest within the scope of this book, along with their respective strengths and weaknesses. Finally, we describe the process of selecting and specifying sensors for an application.},
+	pages = {15--50},
+	booktitle = {Sensor Technologies: Healthcare, Wellness, and Environmental Applications},
+	publisher = {Apress},
+	author = {{McGrath}, Michael J. and Scanaill, Cliodhna Ní},
+	editor = {{McGrath}, Michael J. and Scanaill, Cliodhna Ní},
+	urldate = {2025-02-26},
+	date = {2013},
+	langid = {english},
+	doi = {10.1007/978-1-4302-6014-1_2},
+	keywords = {Bulk Acoustic Wave, Electrochemical Sensor, Indium Antimonide, Linear Transfer Function, Smoke Detector},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\5CKH2AHE\\McGrath et al. - 2013 - Sensing and Sensor Fundamentals.pdf:application/pdf},
+}
+
+@misc{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025-3,
+	title = {{UniversalRobots}/Universal\_Robots\_ROS2\_Gazebo\_Simulation},
+	rights = {{BSD}-3-Clause},
+	url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation},
+	publisher = {Universal Robots A/S},
+	urldate = {2025-02-17},
+	date = {2025-02},
+}
+
+@article{zyl_sensor_2009,
+	title = {The Sensor Web: systems of sensor systems},
+	volume = {2},
+	issn = {1753-8947},
+	url = {https://doi.org/10.1080/17538940802439549},
+	doi = {10.1080/17538940802439549},
+	shorttitle = {The Sensor Web},
+	abstract = {Global Earth Observing System of Systems ({GEOSS}) presents a great challenge of System of Systems integration across organisational and political boundaries. One existing paradigm that can address the scale of the challenge is that of the Sensor Web. In this paradigm, the internet is evolving into an active, macro sensing instrument, capable of drawing sensory data from around the globe to the fingertips of individuals. The Sensor Web will support scientific research and facilitate transparent political decision making. This article presents some of the technologies explored and activities engaged in by the {GEOSS} Sensor Web community, towards achieving {GEOSS} goals.},
+	pages = {16--30},
+	number = {1},
+	journaltitle = {International Journal of Digital Earth},
+	author = {Zyl, T. L. van and Simonis, I. and {McFerren}, G.},
+	urldate = {2025-02-26},
+	date = {2009-03-01},
+	keywords = {data acquisition, digital earth architecture, earth observation, Sensor Web, systems of systems},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\2EJXBMW8\\Zyl et al. - 2009 - The Sensor Web systems of sensor systems.pdf:application/pdf},
+}
+
+@article{haddadin_robot_2017-3,
+	title = {Robot Collisions: A Survey on Detection, Isolation, and Identification},
+	volume = {33},
+	issn = {1941-0468},
+	url = {https://ieeexplore.ieee.org/abstract/document/8059840},
+	doi = {10.1109/TRO.2017.2723903},
+	shorttitle = {Robot Collisions},
+	abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.},
+	pages = {1292--1312},
+	number = {6},
+	journaltitle = {{IEEE} Transactions on Robotics},
+	author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin},
+	urldate = {2025-02-12},
+	date = {2017-12},
+	keywords = {Algorithm design and analysis, Collision avoidance, Collision detection, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction ({pHRI}), Real-time systems, Robot sensing systems, safe robotics, Service robots},
+}
+
+@article{zyl_sensor_2009-1,
+	title = {The Sensor Web: systems of sensor systems},
+	volume = {2},
+	issn = {1753-8947},
+	url = {https://doi.org/10.1080/17538940802439549},
+	doi = {10.1080/17538940802439549},
+	shorttitle = {The Sensor Web},
+	abstract = {Global Earth Observing System of Systems ({GEOSS}) presents a great challenge of System of Systems integration across organisational and political boundaries. One existing paradigm that can address the scale of the challenge is that of the Sensor Web. In this paradigm, the internet is evolving into an active, macro sensing instrument, capable of drawing sensory data from around the globe to the fingertips of individuals. The Sensor Web will support scientific research and facilitate transparent political decision making. This article presents some of the technologies explored and activities engaged in by the {GEOSS} Sensor Web community, towards achieving {GEOSS} goals.},
+	pages = {16--30},
+	number = {1},
+	journaltitle = {International Journal of Digital Earth},
+	author = {Zyl, T. L. van and Simonis, I. and {McFerren}, G.},
+	urldate = {2025-02-26},
+	date = {2009-03-01},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\6QZEMCEX\\Zyl et al. - 2009 - The Sensor Web systems of sensor systems.pdf:application/pdf},
+}
+
+@book{hertzberg_mobile_2012-3,
+	location = {Berlin, Heidelberg},
+	title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik},
+	rights = {https://www.springernature.com/gp/researchers/text-and-data-mining},
+	isbn = {978-3-642-01725-4 978-3-642-01726-1},
+	url = {https://link.springer.com/10.1007/978-3-642-01726-1},
+	series = {{eXamen}.press},
+	shorttitle = {Mobile Roboter},
+	publisher = {Springer Berlin Heidelberg},
+	author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas},
+	urldate = {2025-02-12},
+	date = {2012},
+	langid = {german},
+	doi = {10.1007/978-3-642-01726-1},
+}
+
+@article{zyl_sensor_2009-2,
+	title = {The Sensor Web: systems of sensor systems},
+	volume = {2},
+	issn = {1753-8947},
+	url = {https://doi.org/10.1080/17538940802439549},
+	doi = {10.1080/17538940802439549},
+	shorttitle = {The Sensor Web},
+	abstract = {Global Earth Observing System of Systems ({GEOSS}) presents a great challenge of System of Systems integration across organisational and political boundaries. One existing paradigm that can address the scale of the challenge is that of the Sensor Web. In this paradigm, the internet is evolving into an active, macro sensing instrument, capable of drawing sensory data from around the globe to the fingertips of individuals. The Sensor Web will support scientific research and facilitate transparent political decision making. This article presents some of the technologies explored and activities engaged in by the {GEOSS} Sensor Web community, towards achieving {GEOSS} goals.},
+	pages = {16--30},
+	number = {1},
+	journaltitle = {International Journal of Digital Earth},
+	author = {Zyl, T. L. van and Simonis, I. and {McFerren}, G.},
+	urldate = {2025-02-26},
+	date = {2009-03-01},
+	keywords = {data acquisition, digital earth architecture, earth observation, Sensor Web, systems of systems},
+	file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\FUEW7ZSG\\Zyl et al. - 2009 - The Sensor Web systems of sensor systems.pdf:application/pdf},
+}
+
+@collection{hering_sensoren_2018-3,
+	location = {Wiesbaden},
+	title = {Sensoren in Wissenschaft und Technik},
+	rights = {http://www.springer.com/tdm},
+	isbn = {978-3-658-12561-5 978-3-658-12562-2},
+	url = {http://link.springer.com/10.1007/978-3-658-12562-2},
+	publisher = {Springer Fachmedien Wiesbaden},
+	editor = {Hering, Ekbert and Schönfelder, Gert},
+	urldate = {2025-02-12},
+	date = {2018},
+	langid = {german},
+	doi = {10.1007/978-3-658-12562-2},
+}
+
+@article{saudabayev_sensors_2015-3,
+	title = {Sensors for Robotic Hands: A Survey of State of the Art},
+	volume = {3},
+	issn = {2169-3536},
+	url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549},
+	doi = {10.1109/ACCESS.2015.2482543},
+	shorttitle = {Sensors for Robotic Hands},
+	abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.},
+	pages = {1765--1782},
+	journaltitle = {{IEEE} Access},
+	author = {Saudabayev, Artur and Varol, Huseyin Atakan},
+	urldate = {2025-02-12},
+	date = {2015},
+	keywords = {Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, Robot sensing systems, robotic hands, Robots, sensors, Sensors},
+}
+
+@software{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025-1,
+	title = {{UniversalRobots}/Universal\_Robots\_ROS2\_GZ\_Simulation},
+	rights = {{BSD}-3-Clause},
+	url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_GZ_Simulation},
+	publisher = {Universal Robots A/S},
+	urldate = {2025-02-24},
+	date = {2025-02-17},
+	note = {original-date: 2021-12-15T12:15:45Z},
+}
+
+@article{paya_state---art_2017-3,
+	title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors},
+	volume = {2017},
+	rights = {Copyright © 2017 L. Payá et al.},
+	issn = {1687-7268},
+	url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650},
+	doi = {10.1155/2017/3497650},
+	abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.},
+	pages = {3497650},
+	number = {1},
+	journaltitle = {Journal of Sensors},
+	author = {Payá, L. and Gil, A. and Reinoso, O.},
+	urldate = {2025-02-12},
+	date = {2017},
+	langid = {english},
+}
+
+@misc{noauthor_vl53l7cx_nodate-3,
+	title = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}},
+	url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html},
+	abstract = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV}, {VL}53L7CXV0GC/1, {STMicroelectronics}},
+	urldate = {2025-02-12},
+	langid = {english},
+}
+
+@misc{noauthor_pico-series_nodate-3,
+	title = {Pico-series Microcontrollers - Raspberry Pi Documentation},
+	url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html},
+	abstract = {The official documentation for Raspberry Pi computers and microcontrollers},
+	urldate = {2025-02-12},
+	langid = {english},
+}
+
+@misc{noauthor_chatgpt_nodate-3,
+	title = {{ChatGPT}},
+	url = {https://chatgpt.com},
+	abstract = {A conversational {AI} system that listens, learns, and challenges},
+	urldate = {2025-02-12},
+}
+
+@misc{iii_earlephilhowerarduino-pico_2025-3,
+	title = {earlephilhower/arduino-pico},
+	rights = {{LGPL}-2.1},
+	url = {https://github.com/earlephilhower/arduino-pico},
+	abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards},
+	author = {{III}, Earle F. Philhower},
+	urldate = {2025-02-12},
+	date = {2025-02},
+	keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi},
+}
+
+@misc{noauthor_tutorials_nodate-3,
+	title = {Tutorials — {ROS} 2 Documentation: Humble documentation},
+	url = {https://docs.ros.org/en/humble/Tutorials.html},
+	urldate = {2025-02-12},
+}
+
+@software{noauthor_sparkfun_nodate,
+	title = {{SparkFun} {VL}53L5CX},
+	url = {https://github.com/sparkfun/SparkFun_VL53L5CX_Arduino_Library},
+}
+
+@misc{noauthor_examples_nodate-3,
+	title = {Examples - trimesh 4.6.2 documentation},
+	url = {https://trimesh.org/examples.html},
+	urldate = {2025-02-12},
+}
+
+@misc{grans_sebastiangransros2-point-cloud-demo_2024-3,
+	title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo},
+	rights = {{MIT}},
+	url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo},
+	abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2},
+	author = {Grans, Sebastian},
+	urldate = {2025-02-12},
+	date = {2024-12},
+}
+
+@incollection{mcgrath_sensing_2013-1,
+	location = {Berkeley, {CA}},
+	title = {Sensing and Sensor Fundamentals},
+	isbn = {978-1-4302-6014-1},
+	url = {https://doi.org/10.1007/978-1-4302-6014-1_2},
+	abstract = {Sensors utilize a wide spectrum of transducer and signal transformation approaches with corresponding variations in technical complexity. These range from relatively simple temperature measurement based on a bimetallic thermocouple, to the detection of specific bacteria species using sophisticated optical systems. Within the healthcare, wellness, and environmental domains, there are a variety of sensing approaches, including microelectromechanical systems ({MEMS}), optical, mechanical, electrochemical, semiconductor, and biosensing. As outlined in Chapter 1, the proliferation of sensor-based applications is growing across a range of sensing targets such as air, water, bacteria, movement, and physiology. As with any form of technology, sensors have both strengths and weaknesses. Operational performance may be a function of the transduction method, the deployment environment, or the system components. In this chapter, we review the common sensing mechanisms that are used in the application domains of interest within the scope of this book, along with their respective strengths and weaknesses. Finally, we describe the process of selecting and specifying sensors for an application.},
+	pages = {15--50},
+	booktitle = {Sensor Technologies: Healthcare, Wellness, and Environmental Applications},
+	publisher = {Apress},
+	author = {{McGrath}, Michael J. and Scanaill, Cliodhna Ní},
+	editor = {{McGrath}, Michael J. and Scanaill, Cliodhna Ní},
+	urldate = {2025-02-26},
+	date = {2013},
+	langid = {english},
+	doi = {10.1007/978-1-4302-6014-1_2},
+	keywords = {Bulk Acoustic Wave, Electrochemical Sensor, Indium Antimonide, Linear Transfer Function, Smoke Detector},
+}
+
+@article{zyl_sensor_2009-3,
+	title = {The Sensor Web: systems of sensor systems},
+	volume = {2},
+	issn = {1753-8947},
+	url = {https://doi.org/10.1080/17538940802439549},
+	doi = {10.1080/17538940802439549},
+	shorttitle = {The Sensor Web},
+	abstract = {Global Earth Observing System of Systems ({GEOSS}) presents a great challenge of System of Systems integration across organisational and political boundaries. One existing paradigm that can address the scale of the challenge is that of the Sensor Web. In this paradigm, the internet is evolving into an active, macro sensing instrument, capable of drawing sensory data from around the globe to the fingertips of individuals. The Sensor Web will support scientific research and facilitate transparent political decision making. This article presents some of the technologies explored and activities engaged in by the {GEOSS} Sensor Web community, towards achieving {GEOSS} goals.},
+	pages = {16--30},
+	number = {1},
+	journaltitle = {International Journal of Digital Earth},
+	author = {Zyl, T. L. van and Simonis, I. and {McFerren}, G.},
+	urldate = {2025-02-26},
+	date = {2009-03},
+	keywords = {data acquisition, digital earth architecture, earth observation, Sensor Web, systems of systems},
+}
+
+@misc{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025-2,
+	title = {{UniversalRobots}/Universal\_Robots\_ROS2\_GZ\_Simulation},
+	rights = {{BSD}-3-Clause},
+	url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_GZ_Simulation},
+	publisher = {Universal Robots A/S},
+	urldate = {2025-02-24},
+	date = {2025-02},
+}
+
+@misc{noauthor_tof_imager_micro_rosteensy_pcl_publisherteensy_pcl_publisherino_nodate-2,
+	title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher/teensy\_pcl\_publisher.ino at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}},
+	url = {https://github.com/adityakamath/tof_imager_micro_ros/blob/humble/teensy_pcl_publisher/teensy_pcl_publisher.ino#L177},
+	urldate = {2025-02-21},
+}
+
+@misc{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate-2,
+	title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}},
+	url = {https://github.com/adityakamath/tof_imager_micro_ros/tree/humble/teensy_pcl_publisher},
+	urldate = {2025-02-21},
+}
+
+@misc{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025-2,
+	title = {sparkfun/{SparkFun}\_VL53L5CX\_Arduino\_Library},
+	url = {https://github.com/sparkfun/SparkFun_VL53L5CX_Arduino_Library},
+	publisher = {{SparkFun} Electronics},
+	urldate = {2025-02-21},
+	date = {2025-01},
+}
+
+@misc{noauthor_vlp_nodate-2,
+	title = {{VLP} 16 {\textbackslash}textbar Ouster},
+	url = {https://ouster.com/products/hardware/vlp-16},
+	abstract = {Mid-range lidar sensor},
+	urldate = {2025-02-20},
+	langid = {english},
+}
+
+@article{niclass_design_2012-2,
+	title = {Design and characterization of a 256x64-pixel single-photon imager in {CMOS} for a {MEMS}-based laser scanning time-of-flight sensor},
+	volume = {20},
+	rights = {© 2012 {OSA}},
+	issn = {1094-4087},
+	url = {https://opg.optica.org/oe/abstract.cfm?uri=oe-20-11-11863},
+	doi = {10.1364/OE.20.011863},
+	abstract = {We introduce an optical time-of-flight image sensor taking advantage of a {MEMS}-based laser scanning device. Unlike previous approaches, our concept benefits from the high timing resolution and the digital signal flexibility of single-photon pixels in {CMOS} to allow for a nearly ideal cooperation between the image sensor and the scanning device. This technique enables a high signal-to-background light ratio to be obtained, while simultaneously relaxing the constraint on size of the {MEMS} mirror. These conditions are critical for devising practical and low-cost depth sensors intended to operate in uncontrolled environments, such as outdoors. A proof-of-concept prototype capable of operating in real-time was implemented. This paper focuses on the design and characterization of a 256x64-pixel image sensor, which also comprises an event-driven readout circuit, an array of 64 row-level high-throughput time-to-digital converters, and a 16Gbit/s global readout circuit. Quantitative evaluation of the sensor under 2klux of background light revealed a repeatability error of 13.5cm throughout the distance range of 20 meters.},
+	pages = {11863--11881},
+	number = {11},
+	journaltitle = {Optics Express},
+	author = {Niclass, Cristiano and Ito, Kota and Soga, Mineki and Matsubara, Hiroyuki and Aoyagi, Isao and Kato, Satoru and Kagami, Manabu},
+	urldate = {2025-02-20},
+	date = {2012-05},
+	keywords = {Deformable mirrors, Diode lasers, Image sensors, Light emitting diodes, Optical systems, Systems design},
+}
+
+@article{surmann_autonomous_2003-2,
+	title = {An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of indoor environments},
+	volume = {45},
+	issn = {0921-8890},
+	url = {https://www.sciencedirect.com/science/article/pii/S0921889003001556},
+	doi = {10.1016/j.robot.2003.09.004},
+	abstract = {Digital 3D models of the environment are needed in rescue and inspection robotics, facility managements and architecture. This paper presents an automatic system for gaging and digitalization of 3D indoor environments. It consists of an autonomous mobile robot, a reliable 3D laser range finder and three elaborated software modules. The first module, a fast variant of the Iterative Closest Points algorithm, registers the 3D scans in a common coordinate system and relocalizes the robot. The second module, a next best view planner, computes the next nominal pose based on the acquired 3D data while avoiding complicated obstacles. The third module, a closed-loop and globally stable motor controller, navigates the mobile robot to a nominal pose on the base of odometry and avoids collisions with dynamical obstacles. The 3D laser range finder acquires a 3D scan at this pose. The proposed method allows one to digitalize large indoor environments fast and reliably without any intervention and solves the {SLAM} problem. The results of two 3D digitalization experiments are presented using a fast octree-based visualization method.},
+	pages = {181--198},
+	number = {3},
+	journaltitle = {Robotics and Autonomous Systems},
+	author = {Surmann, Hartmut and Nüchter, Andreas and Hertzberg, Joachim},
+	urldate = {2025-02-20},
+	date = {2003-12},
+	keywords = {3D digitalization, 3D gaging, 3D laser range finder, Autonomous mobile robots, Next best view planning, Robot relocalization, Scan matching, {SLAM}},
+}
+
+@article{raj_survey_2020-2,
+	title = {A Survey on {LiDAR} Scanning Mechanisms},
+	volume = {9},
+	rights = {http://creativecommons.org/licenses/by/3.0/},
+	issn = {2079-9292},
+	url = {https://www.mdpi.com/2079-9292/9/5/741},
+	doi = {10.3390/electronics9050741},
+	abstract = {In recent years, light detection and ranging ({LiDAR}) technology has gained huge popularity in various applications such as navigation, robotics, remote sensing, and advanced driving assistance systems ({ADAS}). This popularity is mainly due to the improvements in {LiDAR} performance in terms of range detection, accuracy, power consumption, as well as physical features such as dimension and weight. Although a number of literatures on {LiDAR} technology have been published earlier, not many has been reported on the state-of-the-art {LiDAR} scanning mechanisms. The aim of this article is to review the scanning mechanisms employed in {LiDAR} technology from past research works to the current commercial products. The review highlights four commonly used mechanisms in {LiDAR} systems: Opto-mechanical, electromechanical, micro-electromechanical systems ({MEMS}), and solid-state scanning. The study reveals that electro-mechanical scanning is the most prominent technology in use today. The commercially available 1D time of flight ({TOF}) {LiDAR} instrument is currently the most attractive option for conversion from 1D to 3D {LiDAR} system, provided that low scanning rate is not an issue. As for applications with low size, weight, and power ({SWaP}) requirements, {MEMS} scanning is found to be the better alternative. {MEMS} scanning is by far the more matured technology compared to solid-state scanning and is currently given great emphasis to increase its robustness for fulfilling the requirements of {ADAS} applications. Finally, solid-state {LiDAR} systems are expected to fill in the gap in {ADAS} applications despite the low technology readiness in comparison to {MEMS} scanners. However, since solid-state scanning is believed to have superior robustness, field of view ({FOV}), and scanning rate potential, great efforts are given by both academics and industries to further develop this technology.},
+	pages = {741},
+	number = {5},
+	journaltitle = {Electronics},
+	author = {Raj, Thinal and Hashim, Fazida Hanim and Huddin, Aqilah Baseri and Ibrahim, Mohd Faisal and Hussain, Aini},
+	urldate = {2025-02-20},
+	date = {2020-05},
+	langid = {english},
+	keywords = {electro-mechanical scanning, {LiDAR}, {MEMS} scanning, opto-mechanical scanning, solid-state {LiDAR}},
+}
+
+@misc{noauthor_file20200501_2020-2,
+	title = {File:20200501 Time of flight.svg - Wikipedia},
+	url = {https://commons.wikimedia.org/wiki/File:20200501_Time_of_flight.svg},
+	shorttitle = {File},
+	urldate = {2025-02-20},
+	date = {2020-05},
+	langid = {english},
+}
+
+@article{jain_survey_nodate-2,
+	title = {A survey of Laser Range Finding},
+	url = {http://www.siddjain.com/ee236a.pdf},
+	abstract = {This report provides a informal survey of laser distance measurement. Applications of laser range finding are briefly discussed and various techniques for laser ranging such as active laser triangulation, pulsed time-of-flight ({TOF}), phase shift, {FMCW}, and correlation are described.},
+	author = {Jain, Siddharth},
+	urldate = {2025-02-19},
+	langid = {english},
+}
+
+@inproceedings{rashid_local_2020-2,
+	title = {Local and Global Sensors for Collision Avoidance},
+	url = {https://ieeexplore.ieee.org/document/9235223},
+	doi = {10.1109/MFI49285.2020.9235223},
+	abstract = {Implementation of safe and efficient human robot collaboration for agile production cells with heavy-duty industrial robots, having large stopping distances and large self-occlusion areas, is a challenging task. Collision avoidance is the main functionality required to realize this task. In fact, it requires accurate estimation of shortest distance between known (robot) and unknown (human or anything else) objects in a large area. This work proposes a selective fusion of global and local sensors, representing a large range 360° {LiDAR} and a small range {RGB} camera respectively, in the context of dynamic speed and separation monitoring. Safety functionality has been evaluated for collision detection between unknown dynamic object to manipulator joints. The system yields 29-40\% efficiency compared to fenced system. Heavy-duty industrial robot and a controlled linear axis dummy is used for evaluating different robot and scenario configurations. Results suggest higher efficiency and safety when using local and global setup.},
+	pages = {354--359},
+	booktitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})},
+	author = {Rashid, Aquib and Peesapati, Kannan and Bdiwi, Mohamad and Krusche, Sebastian and Hardt, Wolfram and Putz, Matthias},
+	urldate = {2025-02-19},
+	date = {2020-09},
+	keywords = {Cameras, Laser radar, Production, Robot vision systems, Safety, Sensor fusion, Service robots},
+}
+
+@inproceedings{al_naser_fusion_2022-2,
+	title = {Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a robot in an industrial environment},
+	url = {https://ieeexplore.ieee.org/document/9900548},
+	doi = {10.1109/RO-MAN53752.2022.9900548},
+	abstract = {Accurate detection of the human body and its limbs in real-time is one of the challenges toward human-robot collaboration ({HRC}) technology in the factory of the future. In this work, a new algorithm has been developed to fuse thermal, depth, and color information. Starting with the calibration of the sensors to each other, proceeding with data fusion and detection of human body parts, and ending with pose estimation. The proposed approach has been tested in various {HRC} scenarios. It has shown a significant decrease in errors and noise in industrial environments. Furthermore, it produces not only a higher positioning accuracy of the head and hands compared to the state of art algorithms (e.g., {OpenPose}), but it is also faster. Hence, such an algorithm could be joined with digital twins for safe and efficient human-robot collaboration.},
+	pages = {532--537},
+	booktitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})},
+	author = {Al Naser, Ibrahim and Dahmen, Johannes and Bdiwi, Mohamad and Ihlenfeldt, Steffen},
+	urldate = {2025-02-19},
+	date = {2022-08},
+	keywords = {Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Sensor fusion, Service robots, Stability criteria, Thermal sensors},
+}
+
+@inproceedings{choi_xr-based_2022-2,
+	title = {An {XR}-based Approach to Safe Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9757621},
+	doi = {10.1109/VRW55335.2022.00106},
+	abstract = {It is crucial to prevent safety accidents for human-robot collaboration. This study proposes an extended reality ({XR}) approach to safe {HRC} by calculating the minimum distance between the human operator and robot in real time. The proposed approach scans the real environment with multiple sensors and constructs its virtual space in {XR}. The virtual robot is synchronized with the real robot by matching the point cloud of real environment with that of the virtual robot. This system can effectively provide task assistance and safety information to the user wearing an {XR} device.},
+	pages = {481--482},
+	booktitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})},
+	author = {Choi, Sung Ho and Park, Kyeong-Beom and Roh, Dong Hyeon and Lee, Jae Yeol and Ghasemi, Yalda and Jeong, Heejin},
+	urldate = {2025-02-19},
+	date = {2022-03},
+	keywords = {Collaboration, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—{XR}-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality ({MR}/{XR}), Real-time systems, Robot sensing systems, Safety, safety distance, Service robots, Three-dimensional displays},
+}
+
+@inproceedings{amaya-mejia_vision-based_2022-2,
+	title = {Vision-Based Safety System for Barrierless Human-Robot Collaboration},
+	url = {https://ieeexplore.ieee.org/document/9981689},
+	doi = {10.1109/IROS47612.2022.9981689},
+	abstract = {Human safety has always been the main priority when working near an industrial robot. With the rise of Human-Robot Collaborative environments, physical barriers to avoiding collisions have been disappearing, increasing the risk of accidents and the need for solutions that ensure a safe Human-Robot Collaboration. This paper proposes a safety system that implements Speed and Separation Monitoring ({SSM}) type of operation. For this, safety zones are defined in the robot's workspace following current standards for industrial collaborative robots. A deep learning-based computer vision system detects, tracks, and estimates the 3D position of operators close to the robot. The robot control system receives the operator's 3D position and generates 3D representations of them in a simulation environment. Depending on the zone where the closest operator was detected, the robot stops or changes its operating speed. Three different operation modes in which the human and robot interact are presented. Results show that the vision-based system can correctly detect and classify in which safety zone an operator is located and that the different proposed operation modes ensure that the robot's reaction and stop time are within the required time limits to guarantee safety.},
+	pages = {7331--7336},
+	booktitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})},
+	author = {Amaya-Mejía, Lina María and Duque-Suárez, Nicolás and Jaramillo-Ramírez, Daniel and Martinez, Carol},
+	urldate = {2025-02-19},
+	date = {2022-10},
+	keywords = {Collaboration, Collision avoidance, Robot control, Safety, Service robots, Solid modeling, Three-dimensional displays},
+}
+
+@article{li_safe_2024-2,
+	title = {Safe human–robot collaboration for industrial settings: a survey},
+	volume = {35},
+	issn = {1572-8145},
+	url = {https://doi.org/10.1007/s10845-023-02159-4},
+	doi = {10.1007/s10845-023-02159-4},
+	shorttitle = {Safe human–robot collaboration for industrial settings},
+	abstract = {Human–robot collaboration ({HRC}) plays a pivotal role in today’s industry by supporting increasingly customised product development. Via {HRC}, the strengths of humans and robots can be combined to facilitate collaborative jobs within common workplaces to achieve specific industrial goals. Given the significance of safety assurance in {HRC}, in this survey paper, an update on standards and implementation approaches presented in the latest literature is given to reflect the state-of-the-art of this prominent research topic. First, an overview of safety standards for industrial robots, collaborative robots, and {HRC} is provided. Then, a survey of various approaches to {HRC} safety is conducted from two main perspectives, i.e., pre-collision and post-collision, which are further detailed in the aspects of sensing, prediction, learning, planning/replanning, and compliance control. Major characteristics, pros, cons, and applicability of the approaches are analysed. Finally, challenging issues and prospects for the future development of {HRC} safety are highlighted to provide recommendations for relevant stakeholders to consider when designing {HRC}-enabled industrial systems.},
+	pages = {2235--2261},
+	number = {5},
+	journaltitle = {Journal of Intelligent Manufacturing},
+	author = {Li, Weidong and Hu, Yudie and Zhou, Yong and Pham, Duc Truong},
+	urldate = {2025-02-19},
+	date = {2024-06},
+	langid = {english},
+	keywords = {Collaborative robots, Collision detection, Human–robot collaboration ({HRC}), Obstacle avoidance, Safety},
+}
+
+@misc{noauthor_can_nodate-2,
+	title = {Can the collaborative robot market experience a second growth surge in the post-pandemic era?},
+	url = {https://interactanalysis.com/insight/can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era/},
+	abstract = {The market for collaborative robots is forecast to show strong growth over the coming years, with rising sales in industrial and non-industrial sectors.},
+	urldate = {2025-02-19},
+	langid = {british},
+	note = {Publication Title: Interact Analysis},
+}
+
+@article{nath_review_2022-2,
+	title = {A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector},
+	volume = {7},
+	rights = {Copyright (c) 2022 Aditya S. Nath},
+	issn = {2736-576X},
+	url = {https://www.ej-eng.org/index.php/ejeng/article/view/2624},
+	doi = {10.24018/ejeng.2022.7.1.2624},
+	abstract = {The study explores recent innovations in robotic and sensor-based technologies that are spearheading advancements in the construction sector to achieve improvements in construction quality, efficiency, and safety. Automation in construction, although coined as a concept in 1980s, has witnessed minimal progress in the level of application. The study attempts to identify issues constraining adoption of automation in the sector, the recent developments in technologies and their scope in construction, their applications and impacts, and way forward. The role of robotics in various stages of construction and its impact on a wider scale has been identified and discussed. The evolution of Building Information Modeling ({BIM}) has transitioned it into being an efficient mediator in the construction process with novel concepts such as 4D and 5D {BIM} and Building Knowledge Management. Various sensor technologies, functioning at diverse scales, have found wide-ranging applications on construction sites ranging from high-accuracy positioning to slow-tracking of personnel and materials, as well as, in progress monitoring and quality control. Information Technology has a major role in binding the sensor technology with on-site requirements to create positive results. A study was done to identify such technological interventions and various software utilities which integrate {BIM} and sensor technology with tools such as {GIS}. The factors which restrained developments in automation in construction sector were identified in the course of study. Various global examples of advanced automated construction technologies with applications in various stages of construction were discussed. The review successfully identifies the nascent technological innovations and their productive usage in relevant areas of construction sector.},
+	pages = {85--89},
+	number = {1},
+	journaltitle = {European Journal of Engineering and Technology Research},
+	author = {Nath, Aditya S.},
+	urldate = {2025-02-19},
+	date = {2022-02},
+	langid = {english},
+	keywords = {Sensors},
+}
+
+@article{maheepala_low_2021-2,
+	title = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices: A Review},
+	volume = {21},
+	issn = {1558-1748},
+	url = {https://ieeexplore.ieee.org/abstract/document/9165781},
+	doi = {10.1109/JSEN.2020.3015932},
+	shorttitle = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices},
+	abstract = {With the advancements of the Internet of Things ({IoT}) technology, applications of battery powered machine vision based {IoT} devices is rapidly growing. While numerous research works are being conducted to develop low power hardware solutions for {IoT} devices, image capture and image processing remain high power demanding processes leading to a short battery life. However, the power consumption of the machine vision based {IoT} devices can be minimized by the careful optimization of the hardware components that are used is these devices. In this article, we present a review of low power machine vision hardware components for the {IoT} applications. A guide to selecting the optimum processors and image sensors for a given battery powered machine vision based {IoT} device is presented. Next, the factors that must be considered when selecting processors and image sensors for a given {IoT} application are discussed, and selection criteria for the processors and image sensors are established. Then, the current commercially available hardware components are reviewed in accordance with the established selection criteria. Finally, the research trends in the field of battery powered machine vision based {IoT} devices are discussed, and the potential future research directions in the field are presented.},
+	pages = {1172--1186},
+	number = {2},
+	journaltitle = {{IEEE} Sensors Journal},
+	author = {Maheepala, Malith and Joordens, Matthew A. and Kouzani, Abbas Z.},
+	urldate = {2025-02-19},
+	date = {2021-01},
+	keywords = {Batteries, Cloud computing, image sensor, Image sensors, Internet of Things, low power, machine vision, Machine vision, processor, Program processors, Transceivers},
+}
+
+@article{liu_application_2024-2,
+	title = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing: A Literature Review},
+	volume = {40},
+	issn = {1044-7318},
+	url = {https://doi.org/10.1080/10447318.2022.2041907},
+	doi = {10.1080/10447318.2022.2041907},
+	shorttitle = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing},
+	abstract = {The rapid development of robot technology has introduced a substantial impact on manufacturing. Numerous studies have been carried out to apply collaborative robots (cobots) to address manufacturing productivity and ergonomics issues, which has brought extensive opportunities. In this context, a systematic literature search in the Web of Science, Scopus, and Google Scholar databases was carried out by electronic and manual search. Thus, 59 relevant contributions out of 4488 studies were analyzed by using preferred reporting items for systematic reviews and meta-analysis ({PRISMA}). To provide an overview of the different results, studies are summarized according to the following criteria: country, author, year, study design, robot category, results, and future opportunities. The effects of cobots on safety, system design, workplace design, task scheduling, productivity, and ergonomics are discussed to provide a better understanding of the application of cobots in manufacturing. To incentive future research, this paper reviews the development of cobots in manufacturing and discusses future opportunities and directions from cobots and manufacturing system perspectives. This paper provides novel and valuable insights into cobots application and illustrates potential developments of future human-cobot interaction.},
+	pages = {915--932},
+	number = {4},
+	journaltitle = {International Journal of Human–Computer Interaction},
+	author = {Liu, Li and Guo, Fu and Zou, Zishuai and Duffy, Vincent G.},
+	urldate = {2025-02-19},
+	date = {2024-02},
+}
+
+@inproceedings{popov_collision_2017-2,
+	location = {Lisbon},
+	title = {Collision detection, localization \& classification for industrial robots with joint torque sensors},
+	isbn = {978-1-5386-3518-6},
+	url = {http://ieeexplore.ieee.org/document/8172400/},
+	doi = {10.1109/ROMAN.2017.8172400},
+	abstract = {High dynamic capabilities of industrial robots make them dangerous for humans and environment. To reduce this factor and advance collaboration between human and manipulator fast and reliable collision detection algorithm is required. To overcome this problem, we present an approach allowing to detect collision, localize action point and classify collision nature. Internal joint torque and encoder measurements were used to determine potential collisions with the robot links. This work proposes two ways of solving this problem: using classical analytical approach and learning approach implemented with neural network. The suggested algorithms were examined on the industrial robotic arm Kuka iiwa {LBR} 14 R820, ground truth information on the contact nature and its location were obtained with 3D {LIDAR} and camera.},
+	pages = {838--843},
+	booktitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})},
+	publisher = {{IEEE}},
+	author = {Popov, Dmitry and Klimchik, Alexandr and Mavridis, Nikolaos},
+	urldate = {2025-02-19},
+	date = {2017-08},
+	langid = {english},
+}
+
+@misc{noauthor_robotics_2021-2,
+	title = {Robotics - Vocabulary},
+	url = {https://www.dinmedia.de/de/norm/iso-8373/348036781},
+	shorttitle = {{ISO} 8373:2021-11},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	date = {2021-11},
+}
+
+@misc{noauthor_din_nodate-10,
+	title = {{DIN} {EN} {ISO} 10218-1:2021-09, Robotik\_- Sicherheitsanforderungen\_- Teil\_1: Industrieroboter ({ISO}/{DIS}\_10218-1.2:2021); Deutsche und Englische Fassung {prEN}\_ISO\_10218-1:2021},
+	url = {https://www.dinmedia.de/de/-/-/341406648},
+	shorttitle = {{DIN} {EN} {ISO} 10218-1},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	langid = {german},
+	doi = {10.31030/3272912},
+}
+
+@misc{noauthor_din_nodate-11,
+	title = {{DIN} {EN} {ISO} 10218-2:2021-03, Robotik\_- Sicherheitsanforderungen für Robotersysteme in industrieller Umgebung\_- Teil\_2: Robotersysteme, Roboteranwendungen und Integration von Roboterzellen ({ISO}/{DIS}\_10218-2:2020); Deutsche und Englische Fassung {prEN}\_ISO\_10218-2:2020},
+	url = {https://www.dinmedia.de/de/-/-/331246964},
+	shorttitle = {{DIN} {EN} {ISO} 10218-2},
+	publisher = {{DIN} Media {GmbH}},
+	urldate = {2025-02-19},
+	doi = {10.31030/3215258},
+}
+
+@article{li_common_2019-5,
+	title = {Common Sensors in Industrial Robots: A Review},
+	volume = {1267},
+	issn = {1742-6588, 1742-6596},
+	url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036},
+	doi = {10.1088/1742-6596/1267/1/012036},
+	shorttitle = {Common Sensors in Industrial Robots},
+	abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.},
+	pages = {012036},
+	number = {1},
+	journaltitle = {Journal of Physics: Conference Series},
+	author = {Li, Peng and Liu, Xiangpeng},
+	urldate = {2025-02-18},
+	date = {2019-07},
+}
+
+@incollection{haddadin_physical_2016,
+	location = {Cham},
+	title = {Physical {HumanRobot} Interaction},
+	isbn = {978-3-319-32552-1},
+	url = {https://doi.org/10.1007/978-3-319-32552-1_69},
+	abstract = {Over the last two decades, the foundations for physical human–robot interaction ({pHRI}) have evolved from successful developments in mechatronics, control, and planning, leading toward safer lightweight robot designs and interaction control schemes that advance beyond the current capacities of existing high-payload and high-precision position-controlled industrial robots. Based on their ability to sense physical interaction, render compliant behavior along the robot structure, plan motions that respect human preferences, and generate interaction plans for collaboration and coaction with humans, these novel robots have opened up novel and unforeseen application domains, and have advanced the field of human safety in robotics.},
+	pages = {1835--1874},
+	booktitle = {Springer Handbook of Robotics},
+	publisher = {Springer International Publishing},
+	author = {Haddadin, Sami and Croft, Elizabeth},
+	editor = {Siciliano, Bruno and Khatib, Oussama},
+	urldate = {2025-04-17},
+	date = {2016},
+	langid = {english},
+	doi = {10.1007/978-3-319-32552-1_69},
+	keywords = {Collision Detection, Contact Force, Impedance Control, Industrial Robot, Joint Torque},
+}
diff --git a/Bachelorarbeit/V2/Code/Final_18_vl53l7cx_clean_both_I2C_with_STMlibrary_PCF8575.ino b/Bachelorarbeit/V2/Code/Final_18_vl53l7cx_clean_both_I2C_with_STMlibrary_PCF8575.ino
new file mode 100644
index 0000000000000000000000000000000000000000..3c5c72edc1e194b1582594c81aadbb4d793e39ef
--- /dev/null
+++ b/Bachelorarbeit/V2/Code/Final_18_vl53l7cx_clean_both_I2C_with_STMlibrary_PCF8575.ino
@@ -0,0 +1,342 @@
+/**
+ ******************************************************************************
+ * @file    VL53L7CX_Sat_HelloWorld.ino
+ * @author  STMicroelectronics
+ * @version V1.0.0
+ * @date    11 November 2021
+ * @brief   Arduino test application for the STMicrolectronics VL53L7CX
+ *          proximity sensor satellite based on FlightSense.
+ *          This application makes use of C++ classes obtained from the C
+ *          components' drivers.
+ ******************************************************************************
+ * @attention
+ *
+ * <h2><center>&copy; COPYRIGHT(c) 2021 STMicroelectronics</center></h2>
+ *
+ * Redistribution and use in source and binary forms, with or without modification,
+ * are permitted provided that the following conditions are met:
+ *   1. Redistributions of source code must retain the above copyright notice,
+ *      this list of conditions and the following disclaimer.
+ *   2. Redistributions in binary form must reproduce the above copyright notice,
+ *      this list of conditions and the following disclaimer in the documentation
+ *      and/or other materials provided with the distribution.
+ *   3. Neither the name of STMicroelectronics nor the names of its contributors
+ *      may be used to endorse or promote products derived from this software
+ *      without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ ******************************************************************************
+ */
+#include <Arduino.h>
+#include <Wire.h>
+#include <ArduinoJson.h>
+#include <vl53l7cx_class.h>
+#include <string.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <stdint.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <PCF8575.h>
+#include <cmath>
+
+#define DEV_I2C1 Wire1
+#define DEV_I2C0 Wire
+
+#define LedPin LED_BUILTIN
+
+#define FAKE_I2C_RST 99
+
+#define LPN_PIN0 0
+#define LPN_PIN1 1
+#define LPN_PIN2 2
+#define LPN_PIN3 3
+#define LPN_PIN4 4
+#define LPN_PIN5 5
+#define LPN_PIN6 6
+#define LPN_PIN7 7
+#define LPN_PIN8 8
+
+uint16_t sensoraddress0 = 0x30;
+uint16_t sensoraddress1 = 0x31;
+uint16_t sensoraddress2 = 0x32;
+uint16_t sensoraddress3 = 0x33;
+uint16_t sensoraddress4 = 0x34;
+uint16_t sensoraddress5 = 0x35;
+uint16_t sensoraddress6 = 0x36;
+uint16_t sensoraddress7 = 0x37;
+uint16_t sensoraddress8 = 0x38;
+
+uint16_t wait_for_i2c = 50;
+uint16_t imageWidth = 8;
+uint16_t ranging_frequency = 15;
+uint32_t i2c_freq_hz = 1000000;
+uint32_t baudrate = 1000000;
+pin_size_t SDA_PIN0 = 4;
+pin_size_t SCL_PIN0 = 5;
+pin_size_t SDA_PIN1 = 26;
+pin_size_t SCL_PIN1 = 27;
+int sensor_count = 0;
+bool sensorInitializedI2C1[8] = { false };  // global oder static
+bool sensorInitializedI2C0[8] = { false };  // global oder static
+
+//  adjust addresses if needed
+PCF8575 PCF1(0x21, &DEV_I2C1);  //  or Wire2 if supported
+PCF8575 PCF0(0x20, &DEV_I2C0);  //  or Wire2 if supported
+
+JsonDocument doc;
+
+// Components.
+VL53L7CX sensor0(&DEV_I2C1, LPN_PIN0, FAKE_I2C_RST);
+VL53L7CX sensor1(&DEV_I2C1, LPN_PIN1, FAKE_I2C_RST);
+VL53L7CX sensor2(&DEV_I2C1, LPN_PIN2, FAKE_I2C_RST);
+VL53L7CX sensor3(&DEV_I2C1, LPN_PIN3, FAKE_I2C_RST);
+VL53L7CX sensor4(&DEV_I2C1, LPN_PIN4, FAKE_I2C_RST);
+VL53L7CX sensor5(&DEV_I2C1, LPN_PIN5, FAKE_I2C_RST);
+VL53L7CX sensor6(&DEV_I2C1, LPN_PIN6, FAKE_I2C_RST);
+VL53L7CX sensor7(&DEV_I2C1, LPN_PIN7, FAKE_I2C_RST);
+VL53L7CX sensor8(&DEV_I2C1, LPN_PIN8, FAKE_I2C_RST);
+VL53L7CX sensor9(&DEV_I2C0, LPN_PIN0, FAKE_I2C_RST);
+VL53L7CX sensor10(&DEV_I2C0, LPN_PIN1, FAKE_I2C_RST);
+VL53L7CX sensor11(&DEV_I2C0, LPN_PIN2, FAKE_I2C_RST);
+VL53L7CX sensor12(&DEV_I2C0, LPN_PIN3, FAKE_I2C_RST);
+VL53L7CX sensor13(&DEV_I2C0, LPN_PIN4, FAKE_I2C_RST);
+VL53L7CX sensor14(&DEV_I2C0, LPN_PIN5, FAKE_I2C_RST);
+VL53L7CX sensor15(&DEV_I2C0, LPN_PIN6, FAKE_I2C_RST);
+VL53L7CX sensor16(&DEV_I2C0, LPN_PIN7, FAKE_I2C_RST);
+VL53L7CX sensor17(&DEV_I2C0, LPN_PIN8, FAKE_I2C_RST);
+
+void blink_led_loop(void);
+
+void blink_led_loop(void) {
+  do {
+    // Blink the led forever
+    digitalWrite(LedPin, HIGH);
+    delay(wait_for_i2c);
+    digitalWrite(LedPin, LOW);
+  } while (1);
+}
+
+/* Setup ---------------------------------------------------------------------*/
+
+void setup() {
+  delay(3000);
+  // Led.
+  pinMode(LedPin, OUTPUT);
+  digitalWrite(LedPin, HIGH);
+
+  // Initialize serial for output.
+  Serial.begin(baudrate);
+  Serial.println();
+  Serial.println("Please wait, it may take few seconds...");
+
+  // Initialize I2C bus.
+  DEV_I2C0.begin();
+  DEV_I2C0.setClock(i2c_freq_hz);
+
+  DEV_I2C1.setSCL(SCL_PIN1);
+  DEV_I2C1.setSDA(SDA_PIN1);
+  DEV_I2C1.begin();
+  DEV_I2C1.setClock(i2c_freq_hz);
+
+  setupPCF(PCF0, DEV_I2C0, 0);
+  setupPCF(PCF1, DEV_I2C1, 1);
+  // Initialize and configure sensors
+
+  initializeSensor(sensor9, sensoraddress0, LPN_PIN0, PCF0, "PCF0");
+  initializeSensor(sensor10, sensoraddress1, LPN_PIN1, PCF0, "PCF0");
+  initializeSensor(sensor11, sensoraddress2, LPN_PIN2, PCF0, "PCF0");
+  initializeSensor(sensor12, sensoraddress3, LPN_PIN3, PCF0, "PCF0");
+  initializeSensor(sensor13, sensoraddress4, LPN_PIN4, PCF0, "PCF0");
+  initializeSensor(sensor14, sensoraddress5, LPN_PIN5, PCF0, "PCF0");
+  initializeSensor(sensor15, sensoraddress6, LPN_PIN6, PCF0, "PCF0");
+  initializeSensor(sensor16, sensoraddress7, LPN_PIN7, PCF0, "PCF0");
+  initializeSensor(sensor17, sensoraddress8, LPN_PIN8, PCF0, "PCF0");
+
+  Serial.println("Initialized " + String(sensor_count) + " Sensors");
+
+  initializeSensor(sensor0, sensoraddress0, LPN_PIN0, PCF1, "PCF1");
+  initializeSensor(sensor1, sensoraddress1, LPN_PIN1, PCF1, "PCF1");
+  initializeSensor(sensor2, sensoraddress2, LPN_PIN2, PCF1, "PCF1");
+  initializeSensor(sensor3, sensoraddress3, LPN_PIN3, PCF1, "PCF1");
+  initializeSensor(sensor4, sensoraddress4, LPN_PIN4, PCF1, "PCF1");
+  initializeSensor(sensor5, sensoraddress5, LPN_PIN5, PCF1, "PCF1");
+  initializeSensor(sensor6, sensoraddress6, LPN_PIN6, PCF1, "PCF1");
+  initializeSensor(sensor7, sensoraddress7, LPN_PIN7, PCF1, "PCF1");
+  initializeSensor(sensor8, sensoraddress8, LPN_PIN8, PCF1, "PCF1");
+
+  Serial.println("Initialized " + String(sensor_count) + " Sensors");
+}
+
+void loop() {
+  // Declare the result data variables for each sensor
+  VL53L7CX_ResultsData Results0;
+  VL53L7CX_ResultsData Results1;
+  VL53L7CX_ResultsData Results2;
+  VL53L7CX_ResultsData Results3;
+  VL53L7CX_ResultsData Results4;
+  VL53L7CX_ResultsData Results5;
+  VL53L7CX_ResultsData Results6;
+  VL53L7CX_ResultsData Results7;
+  VL53L7CX_ResultsData Results8;
+  VL53L7CX_ResultsData Results9;
+  VL53L7CX_ResultsData Results10;
+  VL53L7CX_ResultsData Results11;
+  VL53L7CX_ResultsData Results12;
+  VL53L7CX_ResultsData Results13;
+  VL53L7CX_ResultsData Results14;
+  VL53L7CX_ResultsData Results15;
+  VL53L7CX_ResultsData Results16;
+  VL53L7CX_ResultsData Results17;
+
+  // Process each sensor data and save to respective JSON arrays
+  processSensorData(sensor0, Results0, "sensor0", LPN_PIN0);
+  processSensorData(sensor1, Results1, "sensor1", LPN_PIN1);
+  processSensorData(sensor2, Results2, "sensor2", LPN_PIN2);
+  processSensorData(sensor3, Results3, "sensor3", LPN_PIN3);
+  processSensorData(sensor4, Results4, "sensor4", LPN_PIN4);
+  processSensorData(sensor5, Results5, "sensor5", LPN_PIN5);
+  processSensorData(sensor6, Results6, "sensor6", LPN_PIN6);
+  processSensorData(sensor7, Results7, "sensor7", LPN_PIN7);
+  processSensorData(sensor8, Results8, "sensor8", LPN_PIN8);
+  processSensorData(sensor9, Results9, "sensor9", LPN_PIN0);
+  processSensorData(sensor10, Results10, "sensor10", LPN_PIN1);
+  processSensorData(sensor11, Results11, "sensor11", LPN_PIN2);
+  processSensorData(sensor12, Results12, "sensor12", LPN_PIN3);
+  processSensorData(sensor13, Results13, "sensor13", LPN_PIN4);
+  processSensorData(sensor14, Results14, "sensor14", LPN_PIN5);
+  processSensorData(sensor15, Results15, "sensor15", LPN_PIN6);
+  processSensorData(sensor16, Results16, "sensor17", LPN_PIN8);
+
+  // Serialize the JSON document and print to Serial
+  serializeJson(doc, Serial);
+  Serial.println();
+}
+
+void i2cScanner(TwoWire DEV_I2C) {
+  for (byte address = 1; address < 127; address++) {
+    DEV_I2C.beginTransmission(address);
+    if (DEV_I2C.endTransmission() == 0) {
+      Serial.print("Device found at address 0x");
+      if (address < 0x10)
+        Serial.print("0");
+      Serial.println(address, HEX);
+    }
+  }
+}
+
+void initializeSensor(VL53L7CX &sensor, uint16_t sensorAddress, int lpnPin, PCF8575 &PCF, String PCF_Name) {
+  uint8_t status = VL53L7CX_STATUS_OK;
+  uint8_t isAlive = 0;
+
+  PCF.write(lpnPin, HIGH);
+  double x = PCF.read16();
+  x = log2(x);
+  Serial.println("Starting to initialize Sensor " + String((int)round(x)) + " on the " + PCF_Name + " half.");
+
+  // Set I2C address
+  status = sensor.vl53l7cx_set_i2c_address(sensorAddress << 1);
+  if (status != VL53L7CX_STATUS_OK) {
+    Serial.print("Failed to initialize Sensor " + String((int)round(x)) + " and received I2C Error:");
+    Serial.println(VL53L7CX_STATUS_ERROR);
+    PCF.write16(0x0000);
+    return;
+  }
+
+  status = sensor.vl53l7cx_is_alive(&isAlive);
+  if (!isAlive || status != VL53L7CX_STATUS_OK) {
+    Serial.print("Failed to initialize Sensor " + String((int)round(x)) + " and received is_alive Error:");
+    Serial.println(VL53L7CX_STATUS_ERROR);
+      PCF.write16(0x0000);
+    return;
+
+  }
+
+  // Init VL53L7CX sensor
+  status = sensor.vl53l7cx_init();
+  if (status != VL53L7CX_STATUS_OK) {
+    Serial.print("Failed to initialize Sensor " + String((int)round(x)) + " and received init Error:");
+    Serial.println(VL53L7CX_STATUS_ERROR);
+    PCF.write16(0x0000);
+    return;
+  }
+
+  delay(wait_for_i2c);
+
+  // Set resolution and frequency
+  sensor.vl53l7cx_set_resolution(VL53L7CX_RESOLUTION_8X8);
+  delay(wait_for_i2c);
+
+  sensor.vl53l7cx_set_ranging_frequency_hz(ranging_frequency);
+  delay(wait_for_i2c);
+
+  // Start ranging
+  sensor.vl53l7cx_start_ranging();
+  delay(wait_for_i2c);
+
+  sensorInitializedI2C1[lpnPin] = true;
+  sensor_count++;
+
+  // Activate sensor power
+  PCF.write16(0x0000);
+}
+
+void processSensorData(VL53L7CX &sensor, VL53L7CX_ResultsData &results, const char *sensorKey, int lpn_pin) {
+  if (!sensorInitializedI2C1[lpn_pin]) return;
+  uint8_t NewDataReady = 0;
+  uint8_t status;
+
+  // Wait for data to be ready
+  do {
+    status = sensor.vl53l7cx_check_data_ready(&NewDataReady);
+  } while (!NewDataReady);
+
+  // Turn LED on to indicate data processing
+  digitalWrite(LedPin, HIGH);
+
+  if ((!status) && (NewDataReady != 0)) {
+    status = sensor.vl53l7cx_get_ranging_data(&results);
+
+    JsonArray sensorData = doc[sensorKey].to<JsonArray>();
+
+    for (int y = imageWidth * (imageWidth - 1); y >= 0; y -= imageWidth) {
+      for (int x = 0; x <= imageWidth - 1; x++) {
+        int index = VL53L7CX_NB_TARGET_PER_ZONE * (x + y);
+
+        // Neu: JsonArray via add<JsonArray>()
+        JsonArray measurement = sensorData.add<JsonArray>();
+        measurement.add(results.distance_mm[index]);
+        measurement.add(results.target_status[index]);
+      }
+    }
+  }
+
+  digitalWrite(LedPin, LOW);
+}
+
+void setupPCF(PCF8575 &PCF, TwoWire DEV_I2C, int num) {
+  if (!PCF.begin(0x0000)) {
+    Serial.println("could not initialize...");
+  }
+  while (!PCF.isConnected()) {
+    i2cScanner(DEV_I2C);
+    delay(500);
+    PCF.begin(0x0000);
+    int x = PCF.lastError();
+    Serial.println("Error from I2C " + String(num) + " and Fehlercode: " + String(x));
+  }
+
+  PCF.write16(0xFFFF);
+  delay(wait_for_i2c);
+  PCF.write16(0x0000);
+}
diff --git a/Bachelorarbeit/V2/Code_settings.tex b/Bachelorarbeit/V2/Code_settings.tex
new file mode 100644
index 0000000000000000000000000000000000000000..c503665637edd5eb39785dd2ccc622997475ca36
--- /dev/null
+++ b/Bachelorarbeit/V2/Code_settings.tex
@@ -0,0 +1,109 @@
+\definecolor{arduinoGreen}    {rgb} {0.17, 0.43, 0.01}
+\definecolor{arduinoGrey}     {rgb} {0.47, 0.47, 0.33}
+\definecolor{arduinoOrange}   {rgb} {0.8 , 0.4 , 0   }
+\definecolor{arduinoBlue}     {rgb} {0.01, 0.61, 0.98}
+\definecolor{arduinoDarkBlue} {rgb} {0.0 , 0.2 , 0.5 }
+\lstset{
+	language=C++,                   % Programmiersprache
+	basicstyle=\ttfamily\small,     % Schriftart und -größe
+	keywordstyle=\color{arduinoGreen},      % Schlüsselwörter in hellgrün
+    commentstyle=\color{gray},	
+    stringstyle=\color{arduinoDarkBlue},
+	numbers=left,                   % Zeilennummern links anzeigen
+	numberstyle=\small,              % Zeilennummern in kleiner Schrift
+	stepnumber=1,                   % Jede Zeile nummerieren
+	numbersep=10pt,                 % Abstand der Nummerierung vom Code
+	backgroundcolor=\color{gray!10},% Hintergrundfarbe
+	frame=single,                   % Rahmen um den Code
+	breaklines=true,                % Automatischer Zeilenumbruch
+	captionpos=b,                   % Beschriftung unterhalb des Codes
+	tabsize=2,                      % Breite von Tabs
+	showstringspaces=false          % Leerzeichen in Strings nicht anzeigen
+    morekeyword={
+        uint16\_t,
+        uint32\_t,
+        pin\_size\_t,
+    }
+}
+\lstdefinestyle{customcpp}{
+	keywordstyle=[2]\color{arduinoOrange}\bfseries,        % Style for custom keywords
+    keywordstyle=[3]\color{arduinoBlue},
+    keywordstyle=[4]\color{arduinoDarkBlue},
+    deletekeywords = {
+        double,
+        int,
+        bool,
+        void,
+    },
+	morekeywords=[2]{
+		digitalWrite,
+		LPn,
+		delay,
+		sensor,
+		begin,
+		init_sensor,
+		vl53l7cx_set_i2c_address,
+		vl53l7cx_set_resolution,
+		vl53l7cx_set_ranging_frequency_hz,
+		vl53l7cx_start_ranging,
+        vl53l7cx_init,
+        sensorInitializedI2C1,
+        sensorInitializedI2C0,
+        PCF1,
+        PCF0,
+        sensor0, sensor1, sensor2, sensor3, sensor4, sensor5, sensor6, sensor7, sensor8, sensor9, sensor10, sensor11 ,sensor12, sensor13, sensor14, sensor15, sensor16, sensor17,
+        blink_led_loop,
+        pinMode,
+        DEV_I2C0,
+        DEV_I2C1,
+        setupPCF,
+        initializeSensor,
+        println,
+        processSensorData,
+        serializeJson,
+        i2cScanner,
+        DEV_I2C.beginTransmission,
+        DEV_I2C.endTransmission,
+        Serial.print,
+        write,
+        write16,
+        PCF,
+        Serial,
+        print,
+        vl53l7cx_is_alive,
+        read16,
+        read,
+        lastError,
+        isConnected,
+        setClock,
+        setSCL,
+        setSDA,
+        String,
+        beginTransmission,
+        endTransmission,
+        DEV_I2C,
+	},
+    morekeywords=[3]{
+        uint8_t,
+        uint16_t,
+        uint32_t,
+        pin_size_t,
+        void,
+        int,
+        double,
+        string,
+        char,
+    },
+    morekeywords=[4]{
+        1,
+        2,
+        3,
+        4,
+        5,
+        6,
+        7,
+        8,
+        9,
+        0,
+    },
+}
\ No newline at end of file
diff --git a/Bachelorarbeit/V2/Exported Items.bib b/Bachelorarbeit/V2/Exported Items.bib
new file mode 100644
index 0000000000000000000000000000000000000000..db08d82142f6ecd27bcfb6bba602aea29b287a91
--- /dev/null
+++ b/Bachelorarbeit/V2/Exported Items.bib	
@@ -0,0 +1,8 @@
+
+@misc{UNR-15,
+	title = {About {Universal} {Robots} - robotics company},
+	url = {https://www.universal-robots.com/media/50889/ur10_de.pdf},
+	urldate = {2025-05-23},
+	file = {About Universal Robots - robotics company:/home/sochi/Zotero/storage/F9GWBPCE/about-us.html:text/html},
+    year = {2015},
+}
diff --git a/Bachelorarbeit/V2/Kapitel/Abstract.tex b/Bachelorarbeit/V2/Kapitel/Abstract.tex
new file mode 100644
index 0000000000000000000000000000000000000000..46c2b09c4cdd20d303ce5c13bd2433ea3bf9a42c
--- /dev/null
+++ b/Bachelorarbeit/V2/Kapitel/Abstract.tex
@@ -0,0 +1,10 @@
+\chapter{Abstract}
+The transition to Industry 5.0 emphasizes direct \gls{HRC}, necessitating advanced collision avoidance systems for collaborative robots (cobots). This bachelor's thesis presents the development of an exteroceptive \acrlong{ToF}system designed for collision avoidance in robotics, specifically for a Universal Robots UR10 industrial robot.
+
+The primary objective was to generate a complete, digital, three-dimensional representation of the robot's environment from an ego-perspective using \gls{ToFs}. This comprehensive environmental mapping aims to provide sufficient information to prevent collisions with objects within the robot's dynamic restricted space. A key challenge addressed is the differentiation between the robot's own structure and external objects within the sensor data, as well as the processing of data from non-stationary sensors. The system is designed to initiate a monitored stop of the UR10 upon detecting an imminent collision.
+
+The developed system utilizes 18 VL53L7CX \gls{ToF} arranged in three concentric rings on the robot's forearm, connected to a Raspberry Pi Pico microcontroller. Sensor data is processed and transmitted via UART to a Linux-based \acrshort{PC}, where it is further handled by a \acrfull{ROS}-package consisting of three nodes: serial\_to\_pcl\_node, pcl\_filter\_node, and moveit\_stop\_node. These nodes are responsible for converting raw sensor data into \gls{PCD}, filtering out the robot's own geometry, and detecting potential collisions within a defined safety zone around the robot's links.
+
+However, in its current form, the developed system is not sufficient as a standalone safety measure. Limitations such as reduced detection range $d_e$ for low-reflective materials, blind spots between sensors, and system latencies due to the robot’s braking time $T_s$ and reaction time $T_r$ restrict its effectiveness. Therefore, it can currently only be used as a supplementary measure alongside existing safeguards such as power and force limiting (PFL). Future work should aim to optimize sensor placement, reduce latency, and improve detection robustness through further validation.
+
+\newpage
\ No newline at end of file
diff --git a/Bachelorarbeit/V2/Kapitel/Danksagung.tex b/Bachelorarbeit/V2/Kapitel/Danksagung.tex
new file mode 100644
index 0000000000000000000000000000000000000000..a90695c76f810f0f1f5090787d2d1148f42ee39d
--- /dev/null
+++ b/Bachelorarbeit/V2/Kapitel/Danksagung.tex
@@ -0,0 +1,9 @@
+\chapter{Danksagung}
+An dieser Stelle möchte ich mich bei all den Menschen bedanken, die mich während der Entstehung dieser Bachelorarbeit unterstützt haben.
+
+Mein besonderer Dank gilt meiner externen Betreuerin Sophie Charlotte Keunecke, die mir nicht nur mit großem Fachwissen zur Seite stand, sondern auch weit über die offiziellen Betreuungszeiten hinaus engagierte Unterstützung geleistet hat. Mit Rat, Tat und Robotern war sie stets erreichbar und hat maßgeblich dazu beigetragen, dass diese Arbeit in der vorliegenden Form entstehen konnte. Ihre Begeisterung für die Robotik und ihr unermüdlicher Einsatz haben mich immer wieder motiviert und inspiriert.
+
+Darüber hinaus danke ich allen Beteiligten, die zur erfolgreichen Durchführung meines Projekts beigetragen haben, sei es durch fachliche Beratung, technische Unterstützung oder ermutigende Worte.
+
+Vielen Dank!
+\newpage
\ No newline at end of file
diff --git a/Bachelorarbeit/V2/Kapitel/Einleitung.tex b/Bachelorarbeit/V2/Kapitel/Einleitung.tex
new file mode 100644
index 0000000000000000000000000000000000000000..05f74d872c7e522e5883ca5d941095dc41e49f82
--- /dev/null
+++ b/Bachelorarbeit/V2/Kapitel/Einleitung.tex
@@ -0,0 +1,100 @@
+\chapter{Einleitung}
+% Warum wurde das Thema gewählt?
+Mit dem Übergang zur Industrie 5.0 rückt die direkte Zusammenarbeit von Mensch und Maschine zunehmend in den Fokus industrieller Entwicklungen. Einen wesentlichen Beitrag leisten \gls{Cobots}, da sie speziell für den sicheren und effizienten gemeinsamen Einsatz mit dem Menschen konzipiert sind \cite{VOG-23}. Der Bedarf an solchen Systemen nimmt seit Jahren kontinuierlich zu, wie Abbildung \ref{Cobot Growth} zeigt.
+\begin{figure}[h]
+	\centering
+	\includegraphics[scale=0.5]{images/Cobots-Forecast-Global-Market-1024x576.jpg}
+	\caption{Market Outlook for Cobots \cite{XIA-23}.}
+	\label{Cobot Growth}
+\end{figure}
+\\Dies ist aber nur gewährleistet, wenn von Robotern keine Gefahr für den Menschen ausgeht.
+Vor allem in der Zusammenarbeit mit körperlich beeinträchtigten Menschen erweisen sich \gls{Cobots} \cite{DIN-15066} als hilfreich, da sie die Handhabung von Arbeitsmaterialien erleichtern können \cite{IID-25}.
+\\Auch aufgrund von einer alternden Gesellschaft, in der das Durchschnittsalter der arbeitenden Bevölkerung immer weiter zunimmt, können \gls{Cobots} für eine große Entlastung sorgen, indem sie repetitive Aufgaben, die einen Menschen physisch belasten würden, übernehmen \cite{HAD-16}.
+\\Aus diesen genannten Gründen benötigt man Robotersysteme, die die Zusammenarbeit zwischen Mensch und Maschine effizient und sicher gestaltbar machen.
+\\Aktuell werden Kollisionen von \gls{Cobots} mit Menschen oder Objekten häufig mithilfe intrinsischer Sensoren detektiert \cite{POP-17} oder durch eine ``Begrenzungseinrichtung'' \cite{DIN-10218-2} zwischen Mensch und Maschine verhindert. 
+\\In kollaborativen Arbeitsumgebungen, in denen physische Begrenzungseinrichtungen nicht realisierbar sind, stellt die frühzeitige Bewegungserkennung und das vorausschauende Stoppen eine wirksame Maßnahme zur Gefährdungsminimierung dar. Die Integration exterozeptiver Sensorik ermöglicht es, potenzielle Kollisionen zu erkennen, bevor sie eintreten, und so aktiv zu vermeiden. 
+\\Dies entspricht der \cite{DIN-10218-2}, Abschnitt 13.1, die Anforderungen für die Mensch-Roboter-Kollaboration definiert, sowie der \cite{DIN-15066}, Abschnitt 5.2, die spezifischen Anforderungen für die Kollisionsvermeidung und Risikominderung festlegt. 
+\\Durch die Vermeidung physischer Kontakte kann die Sicherheit erhöht und gleichzeitig ein unterbrechungsfreier, effizienter Arbeitsablauf gewährleistet werden.
+
+\section{Zielsetzung}
+In vorangegangenen Arbeiten wurden bereits unterschiedliche Sensoren, Sensorpositionierungen und Kommunikationsschnittstellen bewertet. Basierend auf diesen Bewertungen wurde ein Rahmen für diese Arbeit entworfen, der in einem späteren Kapitel noch näher erläutert wird.  
+\\Das Ziel dieser Arbeit ist es, mithilfe von exterozeptiven Abstandssensoren aus der Ego-Perspektive des Roboters eine vollständige, digitale, dreidimensionale Abbildung der Umgebung eines ``Industrieroboter[s]'' \cite{DIN-10218-2} zu generieren.
+Unter ``vollständig'' wird in diesem Fall verstanden, dass die Abbildung genügend Informationen enthält, um eine Kollision mit einem Objekt im ''eingeschränkten Raum'' \cite{DIN-10218-2} zu verhindern.
+\\Der eingeschränkte Raum wird in der Norm als der Raum definiert, der durch Begrenzungseinrichtungen vom maximal erreichbaren Raum des Roboters getrennt ist \cite{DIN-10218-2}.
+\\Da Abstandssensoren selbst nicht differenzieren, ob die gemessenen Punkte zum Robotersystem selbst oder zu der Umgebung des Systems gehören, muss das Robotersystem aus der Abbildung gefiltert werden.
+\\Im Fall einer drohenden Kollision soll es zu einem ``überwachten Stillstand'' kommen. 
+In der Norm wird das als ``Sicherheitsfunktion, die einen Zustand überwacht, in dem der Roboter bei aktiver Antriebsenergie angehalten wurde'' definiert \cite{DIN-10218-2}. 
+\\Zum aktuellen Zeitpunkt muss nicht zwischen dem Ziel einer Manipulation und einem anderen Objekt unterschieden werden – mit Ausnahme des Roboters selbst.
+
+\section{Forschungsfragen}
+\label{Forschungsfragen}
+Aus den vorgelegten Zielen lassen sich folgende Forschungsfragen definieren:
+\begin{itemize}
+	\item Lässt sich mit exterozeptiven Sensoren eine Kollision mit einem Menschen verhindern?
+	\item Wie erreicht man eine vollständige Überschneidung des eingeschränkten Raums eines Industrieroboters mit dem Detektionsbereich durch exterozeptive \gls{ToFs} aus der Ego-Perspektive?
+	\item Wie wertet man die Sensordaten aus, wenn die Sensoren nicht ortsfest sind?
+	\item Wie unterscheidet man Objekte im eingeschränkten Raum von dem Industrieroboter selbst?
+\end{itemize}
+Der Detektionsbereich ist der Bereich, welcher durch sensitive Schutzeinrichtungen erfasst wird, wie in Abbildung \ref{fig:Räume} zu sehen ist \cite{DIN-10218-2}.
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=\linewidth]{images/DIN_EN_ISO-10218-2.png}
+    \caption{zeigt eine Übersicht der verschiedenen „Räume“ einer Roboteranwendung \cite{DIN-10218-2}}
+    \label{fig:Räume}
+\end{figure}
+
+\begin{table}[H]
+\renewcommand{\arraystretch}{1.2}
+\caption*{\textbf{Legende}} % unnumbered caption (with bold text)
+\begin{tabularx}{\textwidth}{@{} lX lX @{}}
+A & Manipulator & 1 & maximaler Raum \\
+B & Endeffektor & 2 & eingeschränkter Raum \\
+C & Werkstück & 3 & Betriebsraum \\
+D & nichttrennende Schutzeinrichtung & 4 & geschützter Bereich \\
+  & \textit{(Sicherheitslaserscanner abgebildet)} & &
+\end{tabularx}
+\end{table}
+
+\begin{center}
+\textit{maximaler Raum} $\geq$ \textit{eingeschränkter Raum} $\geq$ \textit{Betriebsraum}
+\end{center}
+
+
+Eine sensitive Schutzeinrichtung ist eine Einrichtung für den Nachweis von Personen oder Körperteilen, die ein entsprechendes Signal an das 
+Steuerungssystem übermittelt, um so das Risiko für die erkannten Personen zu vermindern \cite{DIN-10218-2}.
+Unter exterozeptiven Sensoren versteht man einen Robotersensor, der dazu bestimmt ist, Zustände der Umgebung des Roboters oder die Wechselwirkungen des Roboters mit seiner Umgebung zu erfassen \cite{DIN-8373}.
+
+\section{Rahmen und Anforderungen}
+\label{Rahmen_und_Anforderungen}
+Für diese Arbeit ist ein Rahmen vorgegeben, der aus vorangegangenen Arbeiten resultiert  und hier nun näher erläutert wird.
+Die Kollisionsvermeidung soll mit \gls{ToFs} aus der Ego-Perspektive des Roboters realisiert werden. 
+\\In dieser Arbeit \cite{CHA-25} wurden Abstandssensoren miteinander verglichen. In der genannten Arbeit kam man zu dem Ergebnis, dass sich der \gls{ToF} besonders gut für die Kollisionsvermeidung eignet. Unter der Bedingung, dass die Objekte, mit denen eine Kollision vermieden werden soll, nicht aus Glas bestehen.
+\\Die Ego-Perspektive ergibt sich aus der Voraussetzung, dass die Sensorik ``on-board'' sein soll. 
+Die Software, die verwendet werden soll, ist \gls{ROS}, basierend auf einem Linux-Betriebssystem.
+\\Der in dieser Arbeit eingesetzte Industrieroboter ist der UR10 der Firma \gls{UR}.
+\\Da der Schwerpunkt dieser Arbeit auf der Kollisionsvermeidung mithilfe von \gls{ToF} liegt, wurde der UR10 gewählt, trotz der bereits vorhandenen \gls{PFL} die den Roboter bereits zur \gls{HRC} befähigt. Der \gls{UR}10 bietet aufgrund seiner \gls{ROS}-Kompatibilität sowie der umfangreichen Dokumentation geeignete Voraussetzungen für die Umsetzung des Vorhabens.
+
+Zunächst werden hier die  Anforderungen im Allgemeinen definiert, um sie dann im Anschluss näher zu definieren.	
+\begin{itemize}
+	\item Der Detailgrad von den Informationen über den eingeschränkten Raum des Roboters reicht aus, um einen Menschen wahrzunehmen.
+	\item Die Informationen sind so aktuell, dass man damit eine Kollision verhindern kann.
+    \item Die Informationen können genutzt werden um einen überwachten Stillstand des \gls{UR}10 einzuleiten, wenn eine Kollision bevorsteht.
+\end{itemize}
+
+\subsection{Detailgrad}
+Die Sensoren sind sensibel genug, um einen Arm, der als Papierkreis mit unterschiedlichen Durchmessern und Reflexionsvermögen  vereinfacht wird, in einem Abstand von \SI{50}{\centi\meter} von einem \gls{ToF} wahrzunehmen. Die Lücken zwischen der \gls{FOV} der einzelnen Sensoren sind klein genug, um den oben beschriebenen Arm nicht zu ``übersehen''. 
+
+\subsection{Aktualität der Informationen}
+Da die Sensoren eine limitierte Anzahl an Messungen pro Sekunde durchführen können und die Auswertung der Sensordaten mit einer Latenz einhergeht, muss ein adäquater Schutztrennabstand zur kollaborierenden Roboteranwendung nach \cite{DIN-10218-2} bestimmt werden, der ausreicht, um den mit dem Robotersystem zusammenarbeitenden Menschen nicht zu gefährden, wenn keine \gls{PFL} aktiv ist.
+
+\subsection{Nutzbarkeit der Informationen}
+Es soll untersucht werden, inwiefern eine bereits ausgeführte Trajektorie auf Grundlage aktueller Sensordaten unterbrochen werden kann, sobald eine drohende Kollision detektiert wird. Dabei darf die Bewegung des Robotersystems ausschließlich dann gestoppt werden, wenn ein Objekt den definierten Schutztrennabstand unterschreitet. Veraltete Messwerte oder Objekte, die sich nicht mehr im kritischen Gefahrenbereich befinden, dürfen bei der Entscheidungsfindung nicht berücksichtigt werden.
+
+\section{Aufbau der Arbeit}
+Diese Arbeit gliedert sich in mehrere aufeinander aufbauende Kapitel, die zur Beantwortung der Forschungsfragen beitragen.
+
+Im zweiten Kapitel wird zunächst der Stand der Technik beleuchtet. Dabei werden Normen sowie alternative Ansätze zur Kollisionsvermeidung durch exterozeptive Sensorik vorgestellt. Auch unterschiedliche physikalische Prinzipien optischer Abstandssensoren werden erläutert, um das gewählte Sensorprinzip einzuordnen.
+
+Das dritte Kapitel beschreibt die praktische Umsetzung des entwickelten Sensorsystems. Es werden die mechanische Anordnung der Sensoren, die \gls{MCU}-Programmierung sowie die softwareseitige Verarbeitung der Sensordaten im \gls{ROS}-Framework vorgestellt. Am Ende dieses Kapitels wird geprüft, inwieweit die zuvor formulierten Anforderungen erfüllt wurden, unter Rückgriff auf erhobene Messdaten.
+
+Im vierten und letzten Kapitel werden die Ergebnisse der Arbeit zusammengefasst und bewertet. Zudem erfolgt ein Ausblick auf mögliche Weiterentwicklungen des Systems sowie offene Fragestellungen für zukünftige Arbeiten.
\ No newline at end of file
diff --git "a/Bachelorarbeit/V2/Kapitel/Erkl\303\244rung.tex" "b/Bachelorarbeit/V2/Kapitel/Erkl\303\244rung.tex"
new file mode 100644
index 0000000000000000000000000000000000000000..38e0fe1b134f9c695d1f1b37302a8b595a3bb8e3
--- /dev/null
+++ "b/Bachelorarbeit/V2/Kapitel/Erkl\303\244rung.tex"
@@ -0,0 +1,10 @@
+\chapter*{Erklärung}
+\vspace{5mm}
+Ich versichere hiermit, dass ich die vorliegende Arbeit selbständig verfasst und keine anderen als die im Quellenverzeichnis angegebenen Quellen benutzt habe. Stellen, die wörtlich oder sinngemäß aus veröffentlichten oder noch nicht veröffentlichten Quellen entnommen sind, sind als solche kenntlich gemacht. Die Zeichnungen oder Abbildungen in dieser Arbeit sind von mir selbst erstellt worden oder mit einem entsprechenden Quellennachweis versehen. Diese Arbeit ist in gleicher oder ähnlicher Form noch bei keiner anderen Prüfungsbehörde eingereicht worden. \cite{GPA-16}
+
+Für diese Arbeit wurde Künstliche Intelligenz zur Rechtschreibüberprüfung und zum paraphrasieren verwendet.
+\vspace{4mm}
+
+Aachen, {\today}
+
+\newpage
\ No newline at end of file
diff --git a/Bachelorarbeit/V2/Kapitel/Fazit und Ausblick.tex b/Bachelorarbeit/V2/Kapitel/Fazit und Ausblick.tex
new file mode 100644
index 0000000000000000000000000000000000000000..280477bdaa0a2e92d3f8d4898290e4c7d51881aa
--- /dev/null
+++ b/Bachelorarbeit/V2/Kapitel/Fazit und Ausblick.tex	
@@ -0,0 +1,36 @@
+\chapter{Fazit und Ausblick}
+
+\section{Fazit}
+
+Im Rahmen dieser Arbeit wurde ein exterozeptives Time-of-Flight-Sensorsystem zur Kollisionsvermeidung am \gls{UR}10-Industrieroboter prototypisch umgesetzt. 18 \texttt{VL53L7CX}-Sensoren in drei konzentrischen Ringen liefern über zwei \gls{I2C}-Busse auf einem Raspberry Pi Pico erfasste Distanzdaten, die per \gls{JSON}-Serialisierung und \gls{UART} an einen \gls{ROS}-fähigen Linux-\acrshort{PC} übertragen werden. Dort werden sie zu einer PointCloud2 verbunden, gefiltert und in die Überwachung der Roboterbewegung eingebunden.
+
+Die Evaluierung zeigte jedoch, dass das System in seiner aktuellen Version als alleinige Schutzeinrichtung gemäß \cite{DIN-10218-2} nicht ausreichend sicher ist. Dies liegt im Wesentlichen an drei Faktoren:
+
+\begin{itemize}
+\item \textbf{Beeinflussung des effektiven Detektionsvermögens $d_e$:} Materialien mit niedrigem Reflexionsvermögen können nur wahrgenommen werden, wenn sie nicht zu weit vom \gls{ToF} entfernt sind oder groß genug sind.
+\item \textbf{Tote Winkel:} Trotz dreier Sensorringe bleiben nicht unerhebliche Bereiche unbedeckt, in denen Kollisionen unbemerkt bleiben können weshalb ebenfalls ein größerer Wert für das effektive Detektionsvermögen $d_e$ angenommen weren muss.
+\item \textbf{Lange Systemlatenzen:} Die Bremsdauer $T_s$ des \gls{UR}10 und die Reaktionszeit $T_r$ zwischen Unterschreitung des Schutztrennabstands und Einleitung eines überwachten Stillstands sind derzeit zu lang, um eine vollständige Kollisionsvermeidung zu gewährleisten.
+\end{itemize}
+
+Daher eignet sich das entwickelte \gls{ToF}system zum gegenwärtigen Stand nur als ergänzende Schutzmaßnahme, beispielsweise in Kombination mit der werkseitig integrierten \gls{PFL} des \gls{UR}10.
+
+\section{Ausblick}
+
+Für eine industrielle Einsatzreife und höhere Robustheit werden im Folgenden Optimierungspotenziale skizziert:
+\begin{itemize}
+\item \textbf{Optimierte Sensoranordnung:} Durch adaptive oder zusätzliche Montagewinkel lassen sich tote Winkel weiter verkleinern und eine besseres effektives Detektionsvermögen erreichen.
+\item \textbf{Dual-Core-Verarbeitung auf dem Pico:} Die parallele Nutzung beider Cortex-M0+-Kerne beschleunigt die \gls{JSON}-Seri­alisierung und Vorverarbeitung, wodurch die Reaktionszeit $T_r$ verkürzt wird.
+\item \textbf{Höherfrequenter I²C-Bus:} Ein \gls{GPIO}-Erweiterungsboard mit 1 MHz-Unterstützung ermöglicht schnellere Lesezyklen und eine Erhöhung der effektiven Messrate, wodurch die Gesamtreaktionszeit $T_r$ reduziert werden kann.
+\item \textbf{Verteilte Mikrocontroller-Architektur:} Durch mehrere Raspberry Pi Pico mit jeweils weniger \texttt{VL53L7CX}-Sensoren pro Bus könnten die \gls{ToF} mit 15 Messungen pro Sekunde verwendet werden, um die Gesamtreaktionszeit $T_r$ zu reduzieren.
+\item \textbf{Wechsel zu Compiler-sprachen:} Eine Portierung der \gls{ROS}-Nodes von Python auf C++ reduziert die Laufzeit der Datenverarbeitung und senkt somit $T_r$.
+\item \textbf{Schutzabstände abhängig vom Betriebszustand:} Wenn man den \gls{UR}10 mit weniger Nutzlast verwendet als die maximal Last (\SI{10}{\kilogram}), dann sollte sich die Nachlaufzeit und Nachlaufweg des Roboters verkürzen. In dem Fall, dass ein überwachter Stillstand eingeleitet werden muss, kann eine kleinere gesamt Masse schneller gebremst werden.
+\item \textbf{Reflektive Schutzkleidung:} Arbeitskleidung mit hohem Reflektionsvermögen erhöht das effektive Detektionsvermögen $d_e$ und erlaubt nach DIN 13855 eine Reduzierung der Eindringtiefe $C$, wodurch geringere Schutztrennabstände genügen.
+\end{itemize}
+
+Überlegungen, die auf dem Sensorsystem aufbauen können:
+\begin{itemize}
+    \item \textbf{Wiederaufnahme der Trajektorie:} Neue Softwarekomponenten, die erlauben die Trajektorie wieder aufzunehmen, nachdem der Bediener oder ein anderes Objekt den Schutztrennabstand wieder verlassen hat.
+    \item \textbf{Anpassen der Trajektorie:} Auf Basis der Umgebungsdaten von den \gls{ToFs} kann man der Bahnplanungs-Software MoveIt2 Kollisionsobjekte (Wände oder Boden) übergeben, die bei der Berechnung der Trajektorie berücksichtigt werden.
+\end{itemize}
+
+Durch die Kombination dieser Maßnahmen kann das entwickelte Sensorsystem zu einer verlässlichen Schutzfunktion in kollaborativen Robotikanwendungen weiterentwickelt werden. Zudem eröffnen sich weiterführende Forschungsperspektiven in der sensordatenbasierten, adaptiven Trajektorienplanung und der Entwicklung sicherheitsrelevanter Softwarearchitekturen im Industrie 5.0-Kontext.
diff --git a/Bachelorarbeit/V2/Kapitel/Stand der Technik.tex b/Bachelorarbeit/V2/Kapitel/Stand der Technik.tex
new file mode 100644
index 0000000000000000000000000000000000000000..60282e8b0c50e6213425c01fe133ee9924612bcf
--- /dev/null
+++ b/Bachelorarbeit/V2/Kapitel/Stand der Technik.tex	
@@ -0,0 +1,218 @@
+\chapter{Stand der Technik}
+Dieses Kapitel ist in vier Unterkapitel unterteilt. 
+Im ersten Teil wird darauf eingegangen, wie die \gls{HRC} in der Norm definiert ist. 
+\\Im zweiten Teil geht es um die Kollisionsvermeidung an sich, wie sie in anderen Arbeiten erreicht wurde und wie sie in dieser Arbeit definiert wird. 
+\\Im dritten Teil geht es um die Sensorik und um unterschiedliche Distanzmessverfahren mit exterozeptiven optischen Sensoren.
+\\Im vierten Kapitel geht es um Grundlagen zu Programmen, die in der Arbeit verwendet wurden. Diese werden an dieser Stelle erläutert, um dem Leser das Nachvollziehen im darauf folgenden Kapitel zu erleichtern.
+\section{Kollaborierende Anwendungen}
+In der \cite{DIN-10218-2} werden 3 Arten der \gls{HRC} beschrieben und in Abbildung \ref{FIS-23_HRC} dargestellt:
+\begin{itemize}[]
+    \item \gls{HGC} 
+    \item \gls{SSM}
+    \item \acrfull{PFL}
+\end{itemize}
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.8\linewidth]{images/HRC_FIS-23.png}
+    \caption{\gls{HRC} arten \cite{FIS-23}.}
+    \label{FIS-23_HRC}
+\end{figure}
+\newpage
+Bei der \gls{HGC} steuert die Bedienperson den Roboter manuell über eine Handführungseinrichtung. Vor dem Betreten des eingeschränkten Raums muss der Roboter in einen überwachten Stillstand versetzt werden. Die Aufgabe wird dann durch direkte manuelle Steuerung am oder nahe dem Endeffektor ausgeführt \cite{DIN-10218-2}.
+Ein überwachter Stillstand ist ein Halt der  Kategorie 2 nach IEC 60204-1.
+\\Bei der \gls{SSM} dürfen sich Roboter und Bedienperson gleichzeitig im eingeschränkten Raum aufhalten. Die Sicherheit wird durch die kontinuierliche Einhaltung eines definierten Schutztrennabstands gewährleistet. Nähert sich die Bedienperson dem Roboter unter diesen Abstand, stoppt der Roboter automatisch. Entfernt sich die Person wieder, kann der Roboter seine Bewegung selbstständig fortsetzen. Reduziert der Roboter seine Geschwindigkeit, darf auch der Sicherheitsabstand entsprechend verringert werden \cite{DIN-10218-2}.
+\\Der Sicherheitsabstand wird nach der folgenden Formel aus \cite{DIN-10218-2} berechnet:
+\begin{equation}
+S_p = S_h + S_r + S_s + C + Z_d + Z_r
+\label{eq:Schutztrennabstand}
+\end{equation}
+
+\begin{table}[h]
+\centering
+\caption{Beschreibung der Variablen in Gleichung~\ref{eq:Schutztrennabstand}}
+\begin{tabular}{>{\raggedright\arraybackslash}p{2cm} p{11cm}}
+$S_p$ & Schutztrennabstand zwischen Mensch und Robotersystem \\
+$S_h$ & Anteil des Schutztrennabstands durch Bewegung des Bedieners, bis die Anwendung gestoppt hat \\
+$S_r$ & Anteil des Schutztrennabstands, der durch die Reaktionszeit der Anwendung verursacht wird \\
+$S_s$ & Anteil des Schutztrennabstands, der beim Stoppen der Anwendung entsteht \\
+$C$ & Eindringtiefe (intrusion distance), gemäß ISO 13855 \\
+$Z_d$ & Positionsunsicherheit des Bedieners, bedingt durch die Messgenauigkeit des Erkennungssystems \\
+$Z_r$ & Positionsunsicherheit der Anwendung, bedingt durch die Messgenauigkeit des Systems \\
+\label{Tabelle_Schutztrennabstand}
+\end{tabular}
+\end{table}
+Bei der \gls{PFL} kann es absichtlich oder unbeabsichtigt zu physischem Kontakt zwischen Roboter und Bedienperson kommen. Dafür sind speziell ausgelegte Robotersysteme mit leistungs- und kraftbegrenzten Eigenschaften erforderlich. Die Risikominderung erfolgt entweder durch inhärent sichere Bauweise oder durch sicherheitsbezogene Steuerungssysteme, sodass alle Gefährdungen unter den in der Risikobeurteilung festgelegten Belastungsgrenzwerten bleiben \cite{DIN-10218-2}.
+
+Das Sensorsystem, das in dieser Arbeit beschrieben wird, ist eine \gls{SPE}. Das ist eine ``Einrichtung  für  den  Nachweis  von  Personen  oder  Körperteilen,  die  ein  entsprechendes  Signal  an  das  Steuerungssystem übermittelt, um so das Risiko für die erkannten Personen zu vermindern'' \cite{DIN-10218-2}. 
+\\Inkrementalgeber und Torsionssensoren zählen zu den propriozeptiven Sensoren und erfassen ausschließlich Zustandsgrößen, die den internen Zustand eines Industrieroboters betreffen \cite{DIN-8373}.
+Aus diesem Grund sind diese Sensoren ungeeignet, um Kollisionen zu vermeiden, und können ausschließlich Kollisionen detektieren. Jedoch sind das die Art von Sensoren, die in verschiedenen \gls{Cobots} verbaut sind. Um Kollisionen zu vermeiden, muss man aus diesem Grund auf exterozeptive Sensoren (\gls{SPE}), die an oder um den Industrieroboter herum montiert sind, zurückgreifen.
+
+\section{Kollisionsvermeidung}
+Der Begriff ``Kollisionsvermeidung'' wird in dieser Arbeit wie folgt definiert:
+Maßnahmen und Strategien, die darauf abzielen, physische Zusammenstöße eines Roboters mit seiner Umgebung — insbesondere mit Menschen, Maschinen oder baulichen Strukturen — durch sensorbasierte Überwachung und situationsabhängige Reaktion zu verhindern.
+
+%Unterschiedliche Paper zum Stand dere Technik in der Kollisionsvermeidung
+\indent Bei der Kollisionsvermeidung kann man unterschiedliche Ansätze verfolgen. 
+Eine in \cite{ALN-22}, aus dem Jahr 2022, beschriebene Methode besteht darin, mittels einer Wärmebildkamera eine Person zu detektieren und das resultierende Wärmebild mit den Daten einer \gls{RGB-D}-Kamera zu fusionieren, um die räumliche Position der Person zu bestimmen und eine \gls{SSM} zu ermöglichen.
+\\Jedoch wird zum Auswerten von Kamerainput vergleichsweise viel Rechenleistung benötigt, und in manchen Anwendungen ist das Aufnehmen und Auswerten von Kamerabildern aus Gründen des Datenschutzes unerwünscht. 
+Dies sind Beweggründe für den Einsatz von \gls{ToFs} anstelle herkömmlicher Kameras, da sie einerseits mit einem deutlich geringeren Rechenaufwand auskommen und andererseits datenschutzrechtlich unbedenklicher sind.
+
+In dieser Masterarbeit \cite{SCH-19} von Carolin Schaffert aus dem Jahr 2019 wurde eine kollaborierende Anwendung mit \gls{SSM} und mit einem 2D \gls{LIDAR}-Scanner realisiert, wie in der graphischen Darstellung in Abbildung \ref{SCH-19_Graphic} zu sehen ist. Aus Abbildung \ref{SCH-19_Graphic} geht ebenfalls hervor, dass ein wesentlicher Anteil des Schutztrennabstands durch die Eindringtiefe des Operators bestimmt wird. Diese Eindringtiefe ist so bemessen, dass der Roboter selbst dann nicht erreicht werden kann, wenn der Operator den Arm vollständig ausstreckt.
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=\linewidth]{images/SCH-19_Graphical representation of protective distance.png}
+    \caption{Grafische Darstellung des Schutztrennabstands in \cite{SCH-19}}
+    \label{SCH-19_Graphic}
+\end{figure}
+ Wird jedoch der gesamte Körper des Operators einschließlich aller Extremitäten in die sicherheitstechnische Betrachtung einbezogen, kann die erforderliche Eindringtiefe reduziert werden, ohne die Sicherheit des Operators zu gefährden. 
+
+In dieser Arbeit \cite{AMA-22}, aus dem Jahr 2022, wird mit einer \gls{RGB-D} Kamera der Raum um einen \gls{UR}-3 aus der Vogelperspektive überwacht. Es werden zwei unterschiedliche Strategien angewandt, um eine \gls{SSM} zu realisieren.
+In der Arbeit wird mit einem \gls{CNN} die relative Position eines oder mehrerer Menschen zum \gls{UR}-3 aus dem Video-Input, der \gls{RGB-D}-Kamera bestimmt. 
+Die Umgebung des Roboters ist in unterschiedliche Zonen unterteilt. Je näher ein Mensch an den Industrieroboter herangeht, desto langsamer wird er.
+In der Hochrisikozone kommt der Roboter zum Stillstand.
+\\Die beiden Strategien unterscheiden sich darin, dass die Hochrisikozonen, welche in Abbildung \ref{AMA-22_dynamic} und \ref{AMA-22_static} rot dargestellt werden, beim einen Ansatz statisch sind und beim anderen Ansatz dynamisch.
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.7\linewidth]{images/AMA-22_static.jpg}
+    \caption{Abmessungen der Schutzzonen: Hochrisikozone (rot), Niedrigrisikozone (gelb), sichere Zone (grün), Quelle:\cite{AMA-22}}
+    \label{AMA-22_static}
+\end{figure}
+Statisch bedeutet, dass die Zone unabhängig von der Bewegung des Roboters ist. In der beschriebenen Arbeit liegen die Zonen um den \gls{UR}-3, wie in Abbildung \ref{AMA-22_static} gezeigt.
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.5\linewidth]{images/AMA-22_dynamic.jpg}
+    \caption{Sicherheitszonen um Handgelenk- und Ellbogengelenke: Durch die Überwachung dieser Zonen stoppt der Roboter nur dann, wenn es erforderlich ist – und gewährleistet so sowohl Sicherheit als auch Effizienz \cite{AMA-22}.}
+    \label{AMA-22_dynamic}
+\end{figure}
+Dynamisch bedeutet, dass die Hochrisikozone sich mit der Bewegung der Gelenke mitbewegt und, wie in Abbildung \ref{AMA-22_dynamic} gezeigt, immer die Gelenke im Zentrum der Zonen liegen. 
+\\ Der dynamische Ansatz in der Arbeit \cite{AMA-22} ähnelt dem Vorgehen in dieser Arbeit, wie sich in späteren Kapiteln noch zeigen wird. Jedoch ist das Auswerten von Video-Input aus den schon bei \cite{ALN-22} erwähnten Gründen ausgeschlossen worden.
+
+Eine weitere Methode zur Kollisionsvermeidung, die dem in diesem Projekt verfolgten Ansatz ähnelt, bestimmt den Abstand zwischen Mensch und Roboter durch die Fusion von Sensordaten eines \gls{LIDAR}-Sensors, einer \gls{RGB}-Kamera sowie der propriozeptiven Sensorik des Roboters. Im Gegensatz zur hier verwendeten Lösung sind der \gls{LIDAR}-Scanner und die Kamera in dem dargestellten System (siehe Abbildung~\ref{RAS-20_Graphik}) nicht direkt am Roboter montiert. Dies kann den mechanischen Aufbau unter Umständen aufwändiger gestalten. Wie in der Originalarbeit beschrieben, wurden zur Kompensation von Abschattungen im eingeschränkten Raum zwei \gls{SPE} eingesetzt, die sich gegenseitig ergänzen \cite{RAS-20}. 
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=\linewidth]{images/RAS-20_Graphik.png}
+    \caption{Sensoranordnung in Roboterzelle \cite{RAS-20}.}
+    \label{RAS-20_Graphik}
+\end{figure}
+
+\section{Exterozeptive optische Sensoren}
+%Wie funktionieren optische Sensoren?
+Optische Sensoren machen sich unterschiedliche physikalische Prinzipien zu Nutze, um Abstände zu messen. Im Folgenden wird auf drei Prinzipien näher eingegangen \cite{LI-19}. 
+
+\subsection{Phasen- oder Frequenzlaufzeitverfahren}
+
+Phasen- und Frequenzlaufzeitverfahren sind Varianten des Laufzeitverfahrens zur Distanzmessung. Dabei wird das Licht moduliert – entweder in Amplitude, wie in Abbildung \ref{HER-18_wave} zu sehen ist, oder in Frequenz, wie in \ref{HER-18_freq} zu sehen ist. Die Distanz zum Objekt wird aus der Phasen- bzw. Frequenzdifferenz zwischen gesendetem und empfangenem Signal berechnet \cite{HER-18}.
+\\Beim Phasenlaufzeitverfahren ist die Messung periodisch, mit einer Eindeutigkeitsgrenze von $\lambda_m / 2$, was durch die Verwendung mehrerer Modulationswellenlängen kompensiert wird.
+\\Das Frequenzlaufzeitverfahren nutzt eine kontinuierliche Frequenzänderung, wobei aus der Differenz von Sende- und Empfangsfrequenz die Entfernung bestimmt wird. Auch hier wird die Mehrdeutigkeit durch die Verwendung mehrerer Frequenzen reduziert. Beide Verfahren erreichen bei diffus reflektierenden Oberflächen Reichweiten bis zu \SI{10}{\meter}.
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.57\linewidth]{images/HER-18_wave.png}
+    \caption{Prinzip des Phasenlaufzeitverfahrens \cite{HER-18}}
+    \label{HER-18_wave}
+\end{figure}
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.57\linewidth]{images/HER-18_freq.png}
+    \caption{Prinzip des Frequenzlaufzeitverfahrens \cite{HER-18}}
+    \label{HER-18_freq}
+\end{figure}
+
+
+\subsection{Triangulation}
+Bei der zweiten Methode zur Abstandsbestimmung mit Lasern wird der Laserstrahl geneigt und basierend darauf, wo der Laser auf einem ``großflächigen'' Detektor auftrifft, kann der Abstand zwischen Sensor und Messobjekt trianguliert werden \cite{HER-18}.
+
+Das Prinzip der optischen Triangulation beruht auf der geometrischen Bestimmung des Objektabstands durch die Auswertung der Position eines reflektierten Lichtstrahls auf einer Detektionseinheit. Die Abbildungen \ref{fig:triangulationsprinzip} zeigen schematisch den Aufbau eines solchen Systems. Ein Sender emittiert einen Lichtstrahl, der vom zu detektierenden Objekt reflektiert und anschließend über eine Empfangsoptik auf die Detektionsebene projiziert wird. Verschiebt sich das Objekt entlang der Tiefenachse, so verändert sich die Auftreffposition $x$ auf der Detektionseinheit. Zusammenhang zwischen der Objektposition und der Messgröße ergibt sich gemäß Gleichung~\eqref{eq:triangulation} zu:
+
+\begin{equation}
+x = \frac{B \cdot F}{d}
+\label{eq:triangulation}
+\end{equation}
+
+Dabei bezeichnen:
+\begin{itemize}
+    \item $x$ die gemessene Position des Lichtflecks in der Detektionsebene,
+    \item $B$ den Basisabstand zwischen Sende- und Empfangsoptik,
+    \item $F$ den Abstand zwischen Empfangsoptik und Detektionsebene,
+    \item $d$ den Objektabstand (gesuchter Wert).
+\end{itemize}
+
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.7\textwidth]{images/HER-18_tria.png}
+    \caption{Schematische Darstellung des Triangulationsmessprinzips (modifiziert) \cite{HER-18} 
+    \label{fig:triangulationsprinzip}}
+\end{figure}
+\newpage
+Ein Vorteil dieser Methode ist, dass äußerst präzise Abstände gemessen werden können. Aus diesem Grund werden diese Abstandssensoren auch für die Bestimmung von Oberflächenrauigkeit genutzt. Jedoch sind die Sensormodule zu groß, um sie auf einem Industrieroboter in ausreichender Anzahl zu montieren.
+
+\subsection{Pulslaufzeitverfahren (Time of Flight)}
+Die \gls{ToFs} messen die Zeit, die ein emittierter Lichtimpuls  braucht, um zu einer Oberfläche zu ''fliegen``, von der Oberfläche reflektiert zu werden und vom Receiver (in Abbildung \ref{ToF Explained}: unten) des Scanners wieder wahrgenommen zu werden.
+Mit Lichtgeschwindigkeit, der Zeit und dem Brechungsindex lässt sich dann der gesamte Weg des Lichtes errechnen und durch Halbieren des Weges lässt sich dann die Distanz zum Ziel bestimmen.
+\begin{figure}[h]
+	\centering
+	\includegraphics[width=0.7\linewidth]{images/20200501_Time_of_flight.svg.png}
+	\caption{Funktionsweise eines \gls{ToF} \cite{RCR-20}}
+	\label{ToF Explained}
+\end{figure}
+\\
+Im Weiteren wird sich in dieser Arbeit auf die Fortschritte der optischen Sensoren, die sich das Pulslaufzeitverfahren zu eigen machen, fokussiert. \cite{RAJ-20}
+\\
+%Welche Technischen Möglichkeiten gibt es um aus 1-Dimensionalen ToF-Sensoren  eine räumliche Wahrnehmung zu generiren?
+Um aus einem 1-Dimensionalen \gls{ToF} eine räumliche Wahrnehmung zu schaffen, muss man mehrere Punkte messen und in einem Referenz-Koordinatensystem beschreiben. Als Koordinatenursprung bietet sich zur Simplifizierung die Position des Sensors selbst an.
+Wenn man nun die Orientierung des Sensormoduls ändert und die Absolutposition des \gls{LRF} beibehält, dann kann man mit genügend Punkten  und mit den richtigen Werkzeugen zur Visualisierung ein 3-Dimensionales Abbild der Umgebung schaffen.
+Beim optomechanischen Scannen wird die Orientierung des Emitters und des Empfängers selbst nicht verändert, sondern mit Hilfe von Spiegeln und Prismen wird der ausgehende Laserstrahl und die eingehende Reflexion abgelenkt, um die Distanz von unterschiedlichen Punkten zum Sensor zu messen. In Abbildung \ref{Opto LiDAR Example} kann man zwei Beispiele für einen optomechanischen 2D-\gls{LIDAR} sehen. Der grundsätzliche Aufbau von den beiden ist ähnlich. Man verwendet in beiden \gls{LIDAR} einen um 45° zu seiner Drehachse geneigten Spiegel, um den Laserstrahl und seine Reflexion abzulenken. In Abbildung 6a hat man den Spiegel unterhalb des Emitters und Empfängers positioniert und man hat im Gegensatz zu dem Aufbau in Abbildung \ref{Opto LiDAR Example}b den Emitter und Empfänger 90° versetzt zueinander angeordnet.\cite{RAJ-20}  
+\begin{figure}[h]
+	\centering
+	\includegraphics[scale=0.1]{images/Optomechanical LiDAR.png}
+	\caption{Beispiel für einen Optomechanischen 2D-\acrshort{LIDAR} \cite{RAJ-20}}
+	\label{Opto LiDAR Example}
+\end{figure}
+\\
+Beim Elektromechanischen Scannen verwendet man Servomotoren, um mit einem 2-dimensionalen optomechanischen Scanner eine 3-dimensionale Aufnahme der Umgebung zu schaffen. Das haben \cite{SUR-03} erreicht, indem sie den gesamten 2D-Scanner um eine ihrer Achsen rotieren lassen haben.
+\\
+Beim Scannen mit \gls{MEMS} verwendet man ein Spiegel-Array zum Ablenken des Laserstrahls und seiner Reflexion. Jedoch konnte bei \cite{NIC-12} nur eine geringe \gls{FOV} von \SI{15}{\degree} in der Vertikalen und \SI{11}{\degree} in der Horizontalen erreicht werden. Zudem wird in \cite{RAJ-20} erwähnt, dass eine \gls{FOV} von \SI{40}{\degree} in der Diagonalen nicht übertroffen werden konnte und dass ein \gls{MEMS} basierter \gls{LIDAR} anfällig gegenüber Vibrationen ist, was diese Art Sensor unbrauchbar macht für die Montage auf einem beweglichen Teil eines Industrieroboters.
+
+Der in dieser Arbeit eingesetzte Sensor, der \texttt{VL53L7CX}, gehört zur Klasse der Solid-State-Scanner und arbeitet nach dem sogenannten Flash-Prinzip. Dabei sendet eine \SI{940}{\nano\meter} \gls{VCSEL} einen breit gestreuten Lichtimpuls aus, dessen Reflexionen anschließend durch ein Array aus \gls{SPAD} detektiert werden. Die räumliche Auflösung wird durch die Integration von \gls{DOE} auf Sender- und Empfängerseite sowie durch die spezielle Anordnung der $64$ Empfangskanäle erreicht. Jeder dieser Kanäle ist auf eine leicht unterschiedliche Richtung innerhalb eines $60^\circ\times60^\circ$ großen Sichtfeldes (\gls{FOV}) ausgerichtet, wodurch der Sensor eine multizonale Tiefenwahrnehmung ermöglicht.
+
+Zu den Merkmalen des \texttt{VL53L7CX} zählen:
+\begin{itemize}
+    \item Multizonen-Distanzmessung mit wählbaren Auflösungen von $4 \times 4$ oder $8 \times 8$ Zonen innerhalb eines diagonalen \SI{90}{\degree} Sichtfelds,
+    \item Multitarget-Erkennung zur simultanen Erfassung mehrerer Objekte in einzelnen Zonen,
+    \item Reichweite von bis zu \SI{350}{\centi\meter} bei gleichzeitig hoher Messgeschwindigkeit von bis zu \SI{60}{\hertz} bei einer $4 \times 4$ Auflösung und \SI{15}{\hertz} bei einer $8 \times 8$ Auflösung, 
+    \item Integrierter Bewegungsindikator pro Zone zur Erkennung von Objektbewegungen,
+    \item Histogrammverarbeitung und algorithmische Kompensation zur Minimierung von Störeinflüssen durch Abdeckmaterialien,
+\end{itemize}
+
+Abbildung~\ref{VL53L7CX Package} zeigt das Sensormodul des \texttt{VL53L7CX} im Gehäuse.
+\\
+\begin{figure}[h]
+	\centering
+	\includegraphics[scale=0.2]{images/VL53L7CX_Package.jpg}
+	\caption{Package eines \texttt{VL53L7CX} \cite{STM-24}}
+	\label{VL53L7CX Package}
+\end{figure}
+
+
+\section{Programme}
+
+\subsection{Robot Operating System 2}
+\gls{ROS} ist ein Open Source Framework, das beim Erstellen von Anwendungen für die Robotik helfen soll.
+Innerhalb des Frameworks erstellt man Packages. Packages sind Ansammlungen von Nodes, die in unterschiedlichen Programmiersprachen geschrieben sein können.
+Innerhalb eines Packages verwendet man für Nodes nur eine Programmiersprache. Nodes können in Dauerschleifen ausgeführt werden und während die Nodes laufen durch \gls{ROS}-Subscriber neue Daten erhalten. Dieser Datenaustausch geschieht unter anderem über Topics, die von \gls{ROS}-Pulishern den Subscribern zur Verfügung gestellt werden. In Abbildung \ref{Topic_Viz} wird der Datenaustausch dargestellt.
+\\
+\begin{figure}[H]
+	\centering
+	\includegraphics[scale=0.65]{images/Topic_explained.png}
+	\caption{Visualisierung von einem Topic, Source:\cite{ORO-25}}
+	\label{Topic_Viz}
+\end{figure}
+
+\subsection{RVIZ2 und Gazebo Classic}
+\gls{RVIZ} und Gazebo Classic sind zwei Werkzeuge in der Robotik, insbesondere in der \gls{ROS}-Umgebung.
+\\
+\gls{RVIZ} ist ein Visualisierungstool, das Sensordaten, Roboterbewegungen und Umgebungskarten in einer grafischen Oberfläche darstellt. Es ermöglicht das Debuggen und die Interaktion mit Sensordaten sowie die Visualisierung von Transformationsbeziehungen zwischen verschiedenen Robotergelenken und Sensoren.
+\\
+Gazebo Classic hingegen ist eine Simulationsumgebung, die realistische physikalische Modelle von Robotern und ihrer Umgebung erstellt. In Gazebo können Kollisionen, Gravitation, Reibung und andere physikalische Effekte simuliert werden, wodurch sich das Verhalten eines Roboters vor dem Einsatz in der realen Welt testen lässt.
+
+\subsection*{Zusammenfassung}
+Zusammenfassend lässt sich festhalten, dass bereits eine Vielzahl bewährter Ansätze zur Abstandsmessung und Kollisionsvermeidung existiert. Der in dieser Arbeit verfolgte Ansatz zeichnet sich dadurch aus, dass er zentrale Vorteile bestehender Methoden integriert: die dynamische Gestaltung einer \gls{SSM} nach \cite{AMA-22}, der Verzicht auf kamerabasierte Systeme aus Gründen des Datenschutzes und des Rechenaufwands wie in \cite{SCH-19}, sowie die sensorische Datenfusion unterschiedlicher Quellen, ähnlich wie sie in \cite{RAS-20} beschrieben ist. Damit bildet der vorgestellte Ansatz eine vielversprechende Grundlage, auf der im folgenden Kapitel die konkrete Umsetzung des Sensorsystems erläutert wird.
\ No newline at end of file
diff --git a/Bachelorarbeit/V2/Kapitel/Umsetzung.tex b/Bachelorarbeit/V2/Kapitel/Umsetzung.tex
new file mode 100644
index 0000000000000000000000000000000000000000..60e9b0ba73d6196fe0e6f74d28487e6e5a64ebdd
--- /dev/null
+++ b/Bachelorarbeit/V2/Kapitel/Umsetzung.tex
@@ -0,0 +1,783 @@
+\chapter{Umsetzung}
+%Struktur des Kapitels in zukunft nach dem Flowchart orientieren!!!
+\begin{figure}[h]
+	\centering
+	\includegraphics[width=0.85\linewidth]{images/FlowChart_BA.png}
+	\caption{Ablaufdiagramm von der Sensordaten Verarbeitung}
+	\label{Flowchart}
+\end{figure}
+
+\section*{Einleitung}
+Abbildung~\ref{Flowchart} veranschaulicht den Datenfluss von der Sensormodul-Erfassung bis zur Visualisierung in \gls{RVIZ} und zur Kollisionsvermeidung. Entsprechend dieser Struktur gliedert sich das vorliegende Kapitel in fünf Abschnitte:
+\begin{enumerate}
+  \item \textbf{Sensorik und Verdrahtung:} Beschreibung der mechanischen Anordnung der 18 VL53L7CX-\glspl{ToF} in konzentrischen Ringen, der internen I²C-Verdrahtung (Wire \& Wire1) sowie der Anbindung an den Raspberry Pi Pico (Kapitel~\ref{Sensoranordnung_und_Sensormontage}).
+  \item \textbf{Firmware auf dem Mikrocontroller:} Erläuterung des auf dem Pico ausgeführten Programms, inklusive der \acrshort{JSON}-Serialisierung und \acrshort{UART}-Übertragung der Sensordaten (Kapitel~\ref{MicroController}).
+  \item \textbf{Datenverarbeitung auf dem Host–PC:} Darstellung der \gls{ROS}–Nodes \\(\texttt{serial\_to\_pcl\_node}, \texttt{pcl\_filter\_node}, \texttt{moveit\_stop\_node}) zur Umwandlung der Rohdaten in Punktwolken, Filterung der Robotergeometrie und Auslösung eines überwachten Stopps (Kapitel~\ref{ROS2_Programm}).
+  \item \textbf{Messdaten und Analyse:} Präsentation und Auswertung der experimentellen Ergebnisse zum effektiven Detektionsvermögen $d_e$ (Tabellen~\ref{tab:Detektionsvermögen_weiß} und \ref{tab:Detektionsvermögen_schwarz}) sowie zur Systemreaktionszeit $T_r$ (Tabellen~\ref{tab:reaktionszeiten}, \ref{tab:serielle_sensorfrequenz}, \ref{tab:reaktionszeit_terme})  (Kapitel~\ref{Messdaten}).
+  \item \textbf{Überprüfung der Anforderungen:} Bewertung, inwieweit das entwickelte System die in Kapitel~\ref{Rahmen_und_Anforderungen} definierten Kriterien erfüllt (Kapitel~\ref{Überprüfung_der_Anforderungen}).
+\end{enumerate}
+\section{Sensoranordnung und Sensormontage}
+\label{Sensoranordnung_und_Sensormontage}
+In diesem Projekt werden insgesamt 18 \gls{ToF}en eingesetzt, die in drei konzentrischen Ringen mit jeweils sechs Sensoren angeordnet sind. In Abbildung \ref{fig:3d_modell_2} ist der obere Ring vollständig und der mittlere Ring teilweise sichtbar. Ziel dieser Anordnung ist es, ausgehend von einem zentralen Punkt am Unterarm des \gls{UR}10 einen möglichst großen Bereich rund um das Robotersystem zu überwachen. Die Entscheidung für eine zentralisierte Sensoranordnung wird getroffen, um die Leitungslängen der \gls{SCL} und \gls{SDA} der \gls{I2C}-Kommunikation möglichst kurz zu halten. Längere Leitungen können parasitäre Kapazitäten verursachen, welche die Signalqualität bei hohen Frequenzen und somit die Zuverlässigkeit der Datenübertragung negativ beeinflussen. Die einzelnen Sensoren sind auf den Ringen mit einem Winkelabstand von jeweils \SI{60}{\degree} zueinander kreisförmig angeordnet, dadurch lässt sich die horizontale \gls{FOV} von ebenfalls \SI{60}{\degree} der \texttt{VL53L7CX} ohne Überschneidungen verwenden. In Abbildung \ref{fig:Angular} ist diese Anordnung noch einmal grafisch dargestellt. Die Ringe der Sensorhalterung sind ebenfalls um \SI{60}{\degree} zueinander versetzt, wie man der Zeichnung \ref{fig:Techikcal_Drawing} entnehmen kann.
+
+Zur Montage am Unterarm des Roboters lässt sich die Halterung in zwei Hälften aufteilen. In einer Hälfte, die im Folgenden die erste Hälfte genannt wird, ist der Raspberry Pi Pico montiert, wie sich Abbildung \ref{fig:Innere_der_Halterung} entnehmen lässt. Die Halterung wird durch drei orthogonal zur Oberfläche des Roboterarms eingebrachte Schrauben fixiert, die durch Anpressen an den Unterarm ein Verrutschen verhindern. In der ersten Hälfte ist ein Gewinde für eine dieser Schrauben platziert und in der zweiten Hälfte sind die anderen beiden Schraubengewinde platziert. Um ein Zerkratzen der metallischen Oberfläche des Unterarms vom \gls{UR}10 zu verhindern, werden mehrere Lagen Isolierband auf die Enden der Schrauben geklebt, wie man ebenfalls in Abbildung \ref{fig:Innere_der_Halterung} erkennen kann.
+\\Um die beiden Hälften am Roboter zu verbinden, sind vier Gewinde mittig an den Kanten der Hälften in das \gls{PLA} eingeschmolzen. In der zweiten Hälfte werden Schrauben in die Gewinde gedreht. Danach werden die Hälften so an den Roboterarm gehalten, dass die Schraubenenden, die aus der zweiten Hälfte herausragen, an den Gewinden in der ersten Hälfte anliegen.\begin{figure}
+    \centering
+    \includegraphics[width=0.8\linewidth]{images/Cropped_Bracket.png}
+    \caption{Eine Aufnahme des Inneren der Sensorhalterung mit allen montierten Komponenten.}
+    \label{fig:Innere_der_Halterung}
+\end{figure}
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.5\linewidth]{images/Angualar_distance.png}
+    \caption{Technische Abbildung des Oberen Rings der Sensorhalterung mit Winkelangaben}
+    \label{fig:Angular}
+\end{figure}
+
+Die Spannungsversorgung sowie der \gls{I2C}-Datenbus sind symmetrisch auf die zwei Systemhälften aufgeteilt, wobei jede Hälfte über ein eigenes \gls{I2C}-Interface verfügt. Innerhalb jeder Hälfte werden Spannungsversorgung und \gls{I2C}-Signale durch jeweils neun \texttt{VL53L7CX}-Verbinder hindurchgeführt und münden schließlich in einen \gls{I2C}-\acrshort{GPIO}-Expander des Typs PCF8575. Die entsprechende Verschaltung ist im Schaltplan (siehe Abbildung \ref{fig:Schematic}) grafisch dargestellt.
+
+An dieser Stelle lässt sich bereits die zweite Forschungsfrage aus dem Unterkapitel \ref{Forschungsfragen} beantworten. Diese Frage bezieht sich auf die Überschneidung vom eingeschränkten Raum mit dem Detektionsbereich der \gls{ToFs}. Durch die Positionierung und Ausrichtung der einzelnen Sensoren auf der Halterung und der Positionierung der Halterung auf dem Unterarm des \gls{UR}10 lässt sich der eingeschränkte Raum des Industrieroboters überwachen. In den Abbildungen \ref{fig:FOV_Pyramids_above} und \ref{fig:FOV_Pyramids_angeled_side} ist mit teilweise durchsichtigen Pyramiden die \gls{FOV} der \texttt{VL53L7CX} dargestellt. Ebenfalls zu sehen sind Lücken zwischen den Pyramiden mit den Maßen, die sich aus den Zeichnungen \ref{fig:fov_drawing} und \ref{fig:fov_top_view} entnehmen lassen.
+
+\section{MicroController}
+\label{MicroController}
+In diesem Unterkapitel werden die Funktionen vom Raspberry Pi Pico und das Programm, das auf dem \gls{MCU} läuft, beschrieben. 
+\subsection{Raspberry Pi Pico}
+Der Raspberry Pi Pico ist ein kostengünstiger \gls{MCU}, der sich besonders für eingebettete Systeme eignet. Ein zentrales Merkmal ist die Unterstützung mehrerer Kommunikationsprotokolle, insbesondere von \gls{I2C}, das eine effiziente Anbindung zahlreicher externer Sensoren und Aktoren ermöglicht. Die wichtigsten technischen Merkmale sind:
+
+\begin{itemize}
+  \item Mikrocontroller: Raspberry Pi RP2040 (Dual-Core \acrshort{ARM} Cortex-M0+ mit 133\,MHz)
+  \item Arbeitsspeicher: 264\,kB \acrshort{SRAM}
+  \item Flash-Speicher: 2\,MB On-Board-Flash
+  \item \gls{GPIO}: 26 multifunktionale Pins (u.\,a.\ \gls{I2C}, \acrshort{SPI}, \acrshort{UART}, \acrshort{PWM}, \acrshort{ADC})
+  \item Betriebsspannung: 1{,}8–5{,}5\,V (über \acrshort{VSYS} oder micro\acrshort{USB})
+  \item Programmierung: In C/C++ oder MicroPython über die USB-Schnittstelle
+\end{itemize}
+
+Zur Integration multipler Sensoren wird aufgrund seiner zwei \gls{I2C}-Schnittstellen von der externen Betreuerin Sophie Charlotte Keunecke der Raspberry Pi Pico \gls{MCU} vorgeschlagen \cite{RAS-24}.
+
+\subsection{Programm}
+Das entwickelte Programm basiert auf einem Beispiel aus dem Github-Repository \cite{GIT-25}. Das Programm steuert mehrere \texttt{VL53L7CX}-\gls{ToFs} über einen \gls{MCU}. 
+Die Datenübertragung erfolgt über zwei unabhängige \gls{I2C}-Datenbusse (DEV\_I2C0, DEV\_I2C1).
+Die ermittelten Messwerte werden in \gls{JSON} serialisiert und via \gls{UART} an den Host (Linux-\acrshort{PC} übertragen.
+Ein Arduino-Programm ist in zwei Hauptteile unterteilt. Der erste Teil ist die \textcolor{orange}{setup}()-Funktion. Im \textcolor{orange}{setup}() werden Funktionen einmal nach Neustart des \gls{MCU} ausgeführt.
+\\Der zweite Teil ist die \textcolor{orange}{loop}()-Funktion. Die Funktionen innerhalb der \textcolor{orange}{loop}() werden in einer Schleife ausgeführt.
+
+Zu Beginn werden die Bibliotheken in ein Programm inkludiert. Die wichtigsten sind die folgenden:   
+\begin{itemize}
+	\setlength{\itemsep}{0pt}  % Reduces space between items
+	\item Arduino.h: Grundlegende Arduino-Funktionalitäten
+	\item Wire.h: I2C-Kommunikation
+	\item ArduinoJson.h: Serialisierung der Sensordaten in \gls{JSON}-Format
+	\item vl53l7cx\_class.h: Ansteuerung der \texttt{VL53L7CX}-Sensoren
+	\item PCF8575.h: Steuerung der PCF8575 \gls{I2C}-\gls{GPIO}-Expander
+\end{itemize}
+Zuerst wird an dieser Stelle auf die Funktionen innerhalb der \textcolor{orange}{setup}() eingegangen.
+\begin{lstlisting}[caption={Der Code, der zu Beginn einmal ausgeführt wird.},label=Snippet_Setup,style=customcpp]
+void setup() {
+  // Led.
+  pinMode(LedPin, OUTPUT);
+  digitalWrite(LedPin, HIGH);
+
+  // Initialize serial for output.
+  Serial.begin(baudrate);
+  Serial.println();
+  Serial.println("Please wait, it may take few seconds...");
+
+  // Initialize I2C bus.
+  DEV_I2C0.begin();
+  DEV_I2C0.setClock(i2c_freq_hz);
+
+  DEV_I2C1.setSCL(SCL_PIN1);
+  DEV_I2C1.setSDA(SDA_PIN1);
+  DEV_I2C1.begin();
+  DEV_I2C1.setClock(i2c_freq_hz);
+
+  //setup PCF8575 Boards on I2C0 and I2C1
+  setupPCF(PCF0, DEV_I2C0, 0);
+  setupPCF(PCF1, DEV_I2C1, 1);
+  
+  // Initialize and configure sensors
+  initializeSensor(sensor9,sensoraddress0, LPN_PIN0, PCF0,"PCF0");
+  initializeSensor(sensor10,sensoraddress1, LPN_PIN1, PCF0,"PCF0");
+  initializeSensor(sensor11,sensoraddress2, LPN_PIN2, PCF0,"PCF0");
+  initializeSensor(sensor12,sensoraddress3, LPN_PIN3, PCF0,"PCF0");
+  initializeSensor(sensor13,sensoraddress4, LPN_PIN4, PCF0,"PCF0");
+  initializeSensor(sensor14,sensoraddress5, LPN_PIN5, PCF0,"PCF0");
+  initializeSensor(sensor15,sensoraddress6, LPN_PIN6, PCF0,"PCF0");
+  initializeSensor(sensor16,sensoraddress7, LPN_PIN7, PCF0,"PCF0");
+  initializeSensor(sensor17,sensoraddress8, LPN_PIN8, PCF0,"PCF0");
+
+  Serial.println("Initialized "+String(sensor_count)+" Sensors");
+
+  initializeSensor(sensor0,sensoraddress0, LPN_PIN0, PCF1,"PCF1");
+  initializeSensor(sensor1,sensoraddress1, LPN_PIN1, PCF1,"PCF1");
+  initializeSensor(sensor2,sensoraddress2, LPN_PIN2, PCF1,"PCF1");
+  initializeSensor(sensor3,sensoraddress3, LPN_PIN3, PCF1,"PCF1");
+  initializeSensor(sensor4,sensoraddress4, LPN_PIN4, PCF1,"PCF1");
+  initializeSensor(sensor5,sensoraddress5, LPN_PIN5, PCF1,"PCF1");
+  initializeSensor(sensor6,sensoraddress6, LPN_PIN6, PCF1,"PCF1");
+  initializeSensor(sensor7,sensoraddress7, LPN_PIN7, PCF1,"PCF1");
+  initializeSensor(sensor8,sensoraddress8, LPN_PIN8, PCF1,"PCF1");
+
+  Serial.println("Initialized "+String(sensor_count)+" Sensors");
+}
+\end{lstlisting}
+Zu Beginn der \textcolor{orange}{setup}()-Funktion, die in Listing \ref{Snippet_Setup} dargestellt ist, wird die auf dem Raspberry Pi Pico integrierte \gls{LED} aktiviert, um anzuzeigen, dass sich der Mikrocontroller (\gls{MCU}) derzeit in der Initialisierungsphase befindet.
+
+\textbf{Zeilen 6–18}: Anschließend erfolgt die Initialisierung der seriellen Schnittstelle sowie der beiden \gls{I2C}-Schnittstellen des Raspberry Pi Pico. Für \gls{I2C}0 werden die Standard-Pins verwendet, während für die zweite \gls{I2C}-Schnittstelle benutzerdefinierte Pins konfiguriert werden, da diese im physischen Aufbau besser zugänglich sind.
+
+\textbf{Zeilen 21–22}: In diesem Abschnitt werden zwei benutzerdefinierte Funktionen zur Initialisierung der \gls{I2C}-\gls{GPIO}-Expander aufgerufen, auf deren Funktionsweise im weiteren Verlauf noch eingegangen wird.
+
+\textbf{Zeilen 24–48}: Im letzten Abschnitt der \textcolor{orange}{setup}()-Funktion werden die \texttt{VL53L7CX}-Sensoren sequentiell initialisiert. Nach jeweils neun Sensoren erfolgt eine Rückmeldung über die serielle Schnittstelle. Sollte ein Sensor nicht korrekt initialisiert werden können, gibt die Funktion \textcolor{orange}{initializeSensor}() eine entsprechende Fehlermeldung über die serielle Schnittstelle aus.
+
+
+Nun wird die im \textcolor{orange}{setup}() verwendete Funktion \textcolor{orange}{initializeSensor} betrachtet.
+Der Code in \ref{Snippet Init} initialisiert den Sensor, weist einem \texttt{VL53L7CX} eine neue \gls{I2C}-Adresse zu, konfiguriert die Auflösung und die Abtastrate des Sensors. Anschließend wird die Entfernungserfassung (Ranging) aktiviert. Die Initialisierungsfunktion erwartet ein \texttt{VL53L7CX}-Objekt, die Zieladresse des zugehörigen \gls{LPN}-Pins, ein Objekt der PCF8575-Klasse und den Namen des PCF, um rückmelden zu können, auf welcher Hälfte ein Sensor initialisiert wird. 
+\begin{lstlisting}[caption={Funktion zum initialisieren der Sensoren im Setup},label=Snippet Init,style=customcpp]
+void initializeSensor(VL53L7CX &sensor, uint16_t sensorAddress, int lpnPin, PCF8575 &PCF, String PCF_Name) {
+  uint8_t status = VL53L7CX_STATUS_OK;
+  uint8_t isAlive = 0;
+  //Checks if only the LPn from the target sensor is set high
+  PCF.write(lpnPin, HIGH);
+  double x = PCF.read16();
+  x = log2(x);
+  Serial.println(
+  "Starting to initialize Sensor " + String((int)round(x)) + " on the " + PCF_Name + " half."
+  );
+  // Changes I2C address from default to custom
+  status = sensor.vl53l7cx_set_i2c_address(sensorAddress << 1);
+  if (status != VL53L7CX_STATUS_OK) {
+    Serial.print(
+    "Failed to initialize Sensor " + String((int)round(x)) 
+    +" and received I2C Error:"
+    );
+    Serial.println(VL53L7CX_STATUS_ERROR);
+    PCF.write16(0x0000);
+    return;
+  }
+  //Checks if Sensor is alive
+  status = sensor.vl53l7cx_is_alive(&isAlive);
+  if (!isAlive || status != VL53L7CX_STATUS_OK) {
+    Serial.print(
+    "Failed to initialize Sensor " + String((int)round(x)) 
+    +" and received is_alive Error:"
+    );
+    Serial.println(VL53L7CX_STATUS_ERROR);
+    PCF.write16(0x0000);
+    return;
+  }
+  // Init VL53L7CX sensor
+  status = sensor.vl53l7cx_init();
+  if (status != VL53L7CX_STATUS_OK) {
+    Serial.print("Failed to initialize Sensor " + String((int)round(x)) 
+    +" and received init Error:");
+    Serial.println(VL53L7CX_STATUS_ERROR);
+    PCF.write16(0x0000);
+    return;
+  }
+  // Set resolution and frequency
+  sensor.vl53l7cx_set_resolution(VL53L7CX_RESOLUTION_8X8);
+  sensor.vl53l7cx_set_ranging_frequency_hz(ranging_frequency);
+  // Start ranging
+  sensor.vl53l7cx_start_ranging();
+  sensor_count++;
+  // Deactivates sensor LPn
+  PCF.write16(0x0000);
+}
+\end{lstlisting}
+
+\textbf{Zeilen 2–3:} Die Variablen \texttt{status} und \texttt{isAlive} werden auf ihren Ausgangszustand zurückgesetzt.
+
+\textbf{Zeilen 5–10:} Der PCF8575-\gls{GPIO}-Expander wird über die \gls{I2C}-Schnittstelle angesteuert, um den \gls{LPN}-Pin des aktuell zu initialisierenden Sensors auf \texttt{high} zu setzen. Dies ist erforderlich, um dessen \gls{I2C}-Adresse ändern zu können. Die \gls{LPN}-Pins aller übrigen \texttt{VL53L7CX}-Sensoren verbleiben auf LOW, um unbeabsichtigte Adresskonflikte zu vermeiden. Anschließend wird mithilfe der Funktion \textcolor{orange}{PCF.read16}() verifiziert, ob ausschließlich der gewünschte \gls{LPN} aktiviert wird. Das Ergebnis dieser Überprüfung wird über die serielle Schnittstelle zurückgemeldet.
+
+\textbf{Zeilen 11–41:} In diesem Abschnitt wird die \gls{I2C}-Adresse des Sensors geändert, um ihn im weiteren Verlauf individuell adressieren zu können. Zudem wird überprüft, ob der Sensor auf die neue Adresse reagiert. Anschließend wird mittels der Funktion \textcolor{orange}{vl53l7cx\_init} die Firmware über den \gls{I2C}-Bus in den \gls{RAM} des Sensors geladen, da dieser über keinen persistenten Speicher verfügt. Sollte einer dieser Schritte fehlschlagen, wird ein Fehlercode über die serielle Schnittstelle ausgegeben, alle \gls{LPN}-Pins werden auf \texttt{low} zurückgesetzt, und die Initialisierung des betreffenden Sensors wird abgebrochen.
+
+\textbf{Zeilen 42–46:} Mit den Funktionen \textcolor{orange}{vl53l7cx\_set\_resolution}() und 
+\\\textcolor{orange}{vl53l7cx\_set\_ranging\_frequency\_hz}() aus der \texttt{VL53L7CX}-Bibliothek werden die gewünschte Auflösung sowie die Messfrequenz des Sensors konfiguriert. Nach erfolgreicher Konfiguration wird der Sensor aktiviert.
+
+\textbf{Zeilen 47–49:} Der Zähler \texttt{sensor\_count} wird nach erfolgreicher Initialisierung um eins erhöht und alle Pins vom PCF8575-Board werden auf den Wert \texttt{low} gesetzt.
+
+Nun wird die \textcolor{orange}{setupPCF}()-Funktion \ref{Snippet_setupPCF} betrachtet. Diese Funktion wird im \textcolor{orange}{setup}() genutzt, um die PCF8575 \gls{I2C} zu \gls{GPIO}-Erweiterungsboards zu initialisieren und, falls nötig, über die serielle Schnittstelle zurück zu melden, dass die Initialisierung nicht erfolgreich war.
+\begin{lstlisting}[caption={Funktion zum initialisieren der PCF8575 Boards im Setup},label=Snippet_setupPCF,style=customcpp]
+void setupPCF(PCF8575 &PCF, TwoWire DEV_I2C, int num) {
+  if (!PCF.begin(0x0000)) {
+    Serial.println("could not initialize...");
+  }
+  while (!PCF.isConnected()) {
+    i2cScanner(DEV_I2C);
+    delay(500);
+    PCF.begin(0x0000);
+    int x = PCF.lastError();
+    Serial.println("Error from I2C " + String(num) + " and Fehlercode: " + String(x));
+  }
+}
+\end{lstlisting}
+\textbf{Zeilen 2–4:} Die Initialisierung des PCF8575-\gls{GPIO}-Erweiterungsboards erfolgt mittels der Funktion \textcolor{orange}{PCF.begin}() unter Angabe des Argumentes \texttt{0x0000}. Dieses Argument bewirkt, dass sämtliche Pins des Boards beim Start standardmäßig auf \texttt{LOW} gesetzt werden. Sollte die Initialisierung fehlschlagen, liefert die Funktion einen \texttt{false}-Wert zurück. In diesem Fall wird über die serielle Schnittstelle eine Fehlermeldung ausgegeben, die darauf hinweist, dass das Board nicht erfolgreich initialisiert werden konnte.
+
+\textbf{Zeilen 5–12:} In diesem Abschnitt wird eine blockierende \textcolor{arduinoGreen}{while}-Schleife verwendet. Innerhalb dieser Schleife durchsucht die Funktion \textcolor{orange}{i2cScanner}() das \gls{I2C}-Interface nach angeschlossenen Geräten. Anschließend erfolgt eine Wartezeit von \SI{500}{\milli\second}, bevor ein erneuter Versuch zur Initialisierung des PCF8575-Boards unternommen wird. Im Anschluss daran wird über die serielle Schnittstelle ausgegeben, auf welchem \gls{I2C}-Bus die Initialisierung fehlgeschlagen ist und welcher Fehlercode dabei zurückgegeben wird. Wird das \gls{I2C}-\gls{GPIO}-Erweiterungsmodul weiterhin nicht erkannt, so wird die Schleife erneut durchlaufen.
+
+Als nächstes wird die \textcolor{orange}{loop}-Funktion \ref{Snippet_Loop} des Programms betrachtet. Innerhalb der Schleife werden die Sensordaten abgefragt und über die serielle Schnittstelle als \gls{JSON}-String an die nächste Instanz weitergegeben.
+\begin{lstlisting}[label=Snippet_Loop, style=customcpp, caption={Loop-Funktion des Programms}]
+void loop() {
+  // Declare the result data variables for each sensor
+  VL53L7CX_ResultsData Results0;
+  VL53L7CX_ResultsData Results1;
+  VL53L7CX_ResultsData Results2;
+  VL53L7CX_ResultsData Results3;
+  VL53L7CX_ResultsData Results4;
+  VL53L7CX_ResultsData Results5;
+  VL53L7CX_ResultsData Results6;
+  VL53L7CX_ResultsData Results7;
+  VL53L7CX_ResultsData Results8;
+  VL53L7CX_ResultsData Results9;
+  VL53L7CX_ResultsData Results10;
+  VL53L7CX_ResultsData Results11;
+  VL53L7CX_ResultsData Results12;
+  VL53L7CX_ResultsData Results13;
+  VL53L7CX_ResultsData Results14;
+  VL53L7CX_ResultsData Results15;
+  VL53L7CX_ResultsData Results16;
+  VL53L7CX_ResultsData Results17;
+
+  // Process each sensor data and save to respective JSON arrays
+  processSensorData(sensor0, Results0, "sensor0", LPN_PIN0);
+  processSensorData(sensor1, Results1, "sensor1", LPN_PIN1);
+  processSensorData(sensor2, Results2, "sensor2", LPN_PIN2);
+  processSensorData(sensor3, Results3, "sensor3", LPN_PIN3);
+  processSensorData(sensor4, Results4, "sensor4", LPN_PIN4);
+  processSensorData(sensor5, Results5, "sensor5", LPN_PIN5);
+  processSensorData(sensor6, Results6, "sensor6", LPN_PIN6);
+  processSensorData(sensor7, Results7, "sensor7", LPN_PIN7);
+  processSensorData(sensor8, Results8, "sensor8", LPN_PIN8);
+  processSensorData(sensor9, Results9, "sensor9", LPN_PIN0);
+  processSensorData(sensor10, Results10, "sensor10", LPN_PIN1);
+  processSensorData(sensor11, Results11, "sensor11", LPN_PIN2);
+  processSensorData(sensor12, Results12, "sensor12", LPN_PIN3);
+  processSensorData(sensor13, Results13, "sensor13", LPN_PIN4);
+  processSensorData(sensor14, Results14, "sensor14", LPN_PIN5);
+  processSensorData(sensor15, Results15, "sensor15", LPN_PIN6);
+  processSensorData(sensor16, Results16, "sensor16", LPN_PIN7);
+  processSensorData(sensor17, Results17, "sensor17", LPN_PIN8);
+
+  // Serialize the JSON document and print to Serial
+  serializeJson(doc, Serial);
+  Serial.println();
+}
+\end{lstlisting}
+\textbf{Zeilen 2–20:} In diesem Abschnitt der \textcolor{orange}{loop}()-Funktion werden 18 Instanzen der Struktur \texttt{VL53L7CX\_ResultsData} deklariert. Diese dienen als temporäre Container für die aktuellen Messdaten der jeweiligen Sensoren. Bei jedem neuen Durchlauf der Schleife werden diese Container überschrieben, sodass stets nur aktuelle Messwerte verarbeitet werden.
+
+\textbf{Zeilen 22–40:} Im Anschluss wird die Funktion \textcolor{orange}{processSensorData}() insgesamt 18-mal aufgerufen – einmal für jeden angeschlossenen \gls{ToF}. Der Funktion werden jeweils vier Parameter übergeben: das entsprechende Sensorobjekt der Klasse \texttt{VL53L7CX}, die zugehörige Datenstruktur \texttt{ResultsX}, ein eindeutiger Sensornamensstring zur Identifikation im \gls{JSON}-Objekt sowie der zugehörige \gls{LPN}-Pin des Sensors. Über diese Aufrufe werden die erfassten Sensordaten verarbeitet, in die zugehörigen \gls{JSON}-Arrays geschrieben und für die serielle Ausgabe vorbereitet. Die ersten neun Sensoren gehören zu einer Hälfte und die letzten neun Sensoren zur anderen Hälfte des Aufbaus. Deshalb wiederholen sich ab Sensor neun die \gls{LPN}-Pinbezeichnungen. 
+
+
+
+Abschließend beschreibt der Code in \ref{Snippet Prozess} die Datenverarbeitung.
+Diese Funktion ruft die Messwerte des \gls{ToF}s ab und speichert die Daten in einem \gls{JSON}-Array.
+Zur Visualisierung des Verarbeitungsvorgangs wird eine \gls{LED} ein- und wieder ausgeschaltet.
+Abschließend übermittelt die Hauptschleife bei vollständiger Datensammlung die \gls{JSON}-Payload mit einer Baudrate von 1000000 an den \acrshort{PC} mit dem \gls{ROS}-Programm. Die Funktion benötigt als Argumente: eine \texttt{VL53L7CX}-Instanz, einen leeren Daten-Container, den Extraktionsschlüssel sowie die Pin-Nummer von dem \gls{LPN}-Pin des Sensors.	
+\begin{lstlisting}[label=Snippet Prozess, style=customcpp, caption={processSensorData - Verarbeitung der Sensordaten}]
+void processSensorData(VL53L7CX &sensor, VL53L7CX_ResultsData &results, const char *sensorKey, int lpn_pin) {
+  uint8_t NewDataReady = 0;
+  uint8_t status;
+
+  // Wait for data to be ready
+  do {
+    status = sensor.vl53l7cx_check_data_ready(&NewDataReady);
+  } while (!NewDataReady);
+
+  // Turn LED on to indicate data processing
+  digitalWrite(LedPin, HIGH);
+
+  if ((!status) && (NewDataReady != 0)) {
+    status = sensor.vl53l7cx_get_ranging_data(&results);
+    JsonArray sensorData = doc[sensorKey].to<JsonArray>();
+    for (int y = imageWidth * (imageWidth - 1); y >= 0; y -= imageWidth) {
+      for (int x = 0; x <= imageWidth - 1; x++) {
+        int index = VL53L7CX_NB_TARGET_PER_ZONE * (x + y);
+
+        // Neu: JsonArray via add<JsonArray>()
+        JsonArray measurement = sensorData.add<JsonArray>();
+        measurement.add(results.distance_mm[index]);
+        measurement.add(results.target_status[index]);
+      }
+    }
+  }
+  digitalWrite(LedPin, LOW);
+}
+\end{lstlisting}
+
+\textbf{Zeile 3-9:} Zwei Hilfsvariablen (\texttt{NewDataReady} und \texttt{status}) werden deklariert. In einer \texttt{do-while}-Schleife wird so lange gewartet, bis der Sensor neue Daten zur Verfügung stellt. Dies geschieht über die Methode \texttt{vl53l7cx\_check\_data\_ready}.
+
+\textbf{Zeile 11-12:} Eine Status-LED wird eingeschaltet, um anzuzeigen, dass eine Messung verarbeitet wird.
+
+\textbf{Zeile 14-26:} Sobald neue Daten verfügbar sind und kein Fehlerstatus vorliegt, werden die Messwerte mittels \texttt{vl53l7cx\_get\_ranging\_data} ausgelesen. Die Messdaten werden anschließend in ein JSON-Array geschrieben, das im Dokument unter dem gegebenen \texttt{sensorKey} gespeichert wird. Dabei wird ein verschachteltes zweidimensionales Raster (entsprechend der Sensorauflösung) durchlaufen. Für jede Zone wird ein JSON-Array mit der gemessenen Distanz in Millimetern und dem zugehörigen Statuswert erstellt und dem übergeordneten Sensor-Array hinzugefügt.
+
+\textbf{Zeile 28:} Nach Abschluss der Verarbeitung wird die Status-LED wieder ausgeschaltet.
+
+
+
+
+\section{ROS2-Programm}
+\label{ROS2_Programm}
+Für das Projekt wird ein \gls{ROS}-Package entwickelt, das die Daten, die seriell als \gls{JSON} über eine \gls{USB} Verbindung vom Raspberry Pi Pico übermittelt werden, aufbereitet und als Topic veröffentlicht. Das Package besteht aktuell aus 3 Nodes.
+\\
+Die erste \gls{ROS} Node liest Sensordaten über eine serielle Verbindung aus, verarbeitet sie und veröffentlicht sie als \texttt{Pointcloud2}-Nachricht. Die Daten stammen von mehreren \texttt{VL53L7CX}-\gls{ToFs} und werden als \gls{JSON}-String empfangen. Die Node interpretiert die \gls{JSON}-Daten, konvertiert die Distanzwerte in 3D-Koordinaten und führt eine Rotationskorrektur durch, um die Sensordaten in ein einheitliches Koordinatensystem zu überführen. Anschließend werden die Punkte zu einer Punktwolke zusammengeführt und im \gls{ROS}-Frame ``vl53l7cx\_link'' veröffentlicht. Die Hauptfunktionen umfassen das Einlesen und Verarbeiten der seriellen Daten, die Berechnung der Punktkoordinaten aus den Sensordaten sowie die Generierung und Veröffentlichung der \gls{PCD} (\texttt{Pointcloud2}).
+
+\subsection{serial\_to\_pcl\_node}
+
+Die \texttt{serial\_to\_pcl\_node} \ref{serial_to_pcl_node} dient als Brücke zwischen dem seriell angebundenen Raspberry Pi Pico und dem \gls{ROS}-Ökosystem. Sie ist in Python implementiert und ermöglicht die Umwandlung und Veröffentlichung von Entfernungsdaten als Punktwolken im \gls{ROS}-Format. Ihre Hauptaufgabe ist es, die seriell übertragenen \gls{JSON}-Daten der \gls{ToFs} zu empfangen, in kartesische Koordinaten umzuwandeln, geometrisch zu transformieren und als \texttt{sensor\_msgs/PointCloud2} zu publizieren.
+
+\subsection*{Initialisierung und Setup}
+Beim Starten der Node (\texttt{\_\_init\_\_}) erfolgt:
+
+\begin{itemize}
+\item Das Deklarieren und Auslesen der Parameter \texttt{parent\_frame} (Standard: \texttt{vl53l7c\_link}) und \texttt{usb\_port} (Standard: \texttt{/dev/ttyACM0}).
+\item Die Initialisierung eines \texttt{PointCloud2}-Publishers auf dem Topic \texttt{/pcl}.
+\item Die Öffnung der seriellen Verbindung zur Sensorplattform mit einer Baudrate von $1 text{Mbit/s}$.
+\item Der Start des permanenten Lese- und Veröffentlichungsprozesses über die Methode \texttt{read\_serial\_and\_publish}.
+\end{itemize}
+
+\subsection*{Datenverarbeitung}
+Die Node erwartet zeilenweise \gls{JSON}-Daten mit Entfernungswerten von bis zu 18 Sensoren. Diese werden wie folgt verarbeitet:
+
+\begin{itemize}
+    \item Vorverarbeitung: Entfernungspaare mit ungültigen Statuswerten (nicht 5,6,9 oder 12) werden durch den Platzhalterwert \texttt{99999} ersetzt \cite{JOH-24}.
+    \item Umformung: Die gültigen Distanzwerte jedes Sensors werden in ein $8 \times 8$ Raster umgewandelt, wie bei dem Raster bei der Aufnahme der Daten durch den Sensor (siehe Abbildung \ref{VL53L7CX_Raster}).
+    \item Koordinatentransformation:
+    \begin{itemize}
+        \item Berechnung lokaler 3D-Koordinaten pro Sensor unter Berücksichtigung der vertikalen und horizontalen Blickwinkel, definiert durch die Sensorhalterung, die in Unterkapitel \ref{Sensoranordnung_und_Sensormontage} beschrieben wird.
+        \item Übersetzung der Punkte gemäß der physischen Anordnung der Sensorhalterung auf dem \gls{UR}10, durch veröffentlichen der \gls{PCD} mit dem \texttt{``vl53l7cx\_link''} als Referenzkoordinatensystem im Header vom Topic.
+    \end{itemize}
+    \item Kombination und Filterung: Alle Punktwolken werden zu einer globalen Punktwolke zusammengeführt und anschließend durch einen Distanzfilter (Standard: $< 3\text{m}$) reduziert, um Punkte, die mit dem Platzhalterwert versehen werden oder für die Kollisionsvermeidung nicht relevant sind, zu filtern.
+\end{itemize}
+
+\subsection*{Daten Ausgang}
+Die resultierende Punktwolke wird im \gls{ROS}-Format \texttt{PointCloud2} mit dem Namen \texttt{/pcl} und einer benutzerdefinierbaren \texttt{frame\_id} veröffentlicht. Das Koordinatensystem (Frame), das bei der Anwendung am \gls{UR}10 verwendet wird, heißt \texttt{vl53l7cx\_link} und wird standardmäßig verwendet, wenn kein anderer Frame vorgegeben wird.
+
+
+\subsection{pcl\_filter\_node}
+Die \texttt{pcl\_filter\_node} \ref{pcl_filter_node} dient der Verarbeitung und Klassifizierung von Punktwolkendaten zur Trennung von dem Robotersystem und Punkten von der Umgebung im Sichtfeld eines \gls{ToF}. Die Hauptaufgabe der Node ist es, zu bestimmen, welche Punkte einer empfangenen Punktwolke sich innerhalb oder auf einem approximierten Roboterkörper befinden und welche nicht. Letztere werden verwendet, um die Umgebung auf sich nähernde Objekte zu überwachen.
+
+\subsection*{Initialisierung und Setup}
+Beim Start der Node (\texttt{\_\_init\_\_}) werden folgende Komponenten initialisiert:
+\begin{itemize}
+\item Eine Subscription auf das Topic \texttt{/pcl}, über welches Punktwolkendaten im Format \texttt{sensor\_msgs/PointCloud2} empfangen werden.
+\item Zwei Publisher:
+\begin{itemize}
+\item \texttt{/valid\_from\_perspective} für Punkte außerhalb des Robotervolumens (enthält Punkte von der Umgebung und potentiellen Kollisionsobjekten),
+\item \texttt{/invalid\_from\_perspective} für Punkte innerhalb des Robotervolumens (durch den Roboter selbst verursacht).
+\end{itemize}
+\item Ein \texttt{TransformListener} mit zugehörigem \texttt{Buffer}, um Transformationen zwischen dem Frame von der Sensorhalterung (\texttt{vl53l7cx\_link}) und den beiden relevanten Robotergliedern (\texttt{upper\_arm\_link}, \texttt{forearm\_link}) des \gls{UR}10 abzurufen.
+\item Zwei Meshes in Zylinderform, welche als statische Approximationen der Robotersegmente dienen. Diese werden zunächst erzeugt, um $90^\circ$ um die $y$-Achse rotiert und anschließend entlang der $x$- und $z$-Achse translatiert, sodass sie die realen Gliederlagen umfassen.
+\end{itemize}
+
+\subsection*{Callback-Funktion \texttt{pointcloud\_callback}}
+Bei Empfang einer neuen Punktwolke läuft folgender Verarbeitungsprozess ab:
+\begin{enumerate}
+\item Mittels \texttt{\gls{TF}2} werden aktuelle Transformationen zwischen dem Frame von der Sensorhalterung und den Robotergliedern abgerufen.
+\item Die beiden vorbereiteten Zylinder-Meshes werden entsprechend der aktuellen Transformationsdaten transformiert, um ihre reale Lage im Koordinatensystem von der Sensorhalterung widerzuspiegeln.
+\item Die empfangene Punktwolke wird in ein \texttt{NumPy}-Array umgewandelt und die 3D-Koordinaten extrahiert.
+\item Mittels der Methode \texttt{contains} von \texttt{trimesh} wird für jeden Punkt überprüft, ob er innerhalb eines der transformierten Meshes liegt.
+\item Die Punktwolke wird aufgeteilt in:
+\begin{itemize}
+\item \textbf{Gültige Punkte} außerhalb der Robotergeometrie,
+\item \textbf{Ungültige Punkte} innerhalb der Robotergeometrie und damit der Roboter selbst.
+\end{itemize}
+\item Beide Punktgruppen werden als neue \texttt{PointCloud2}-Nachrichten auf ihren jeweiligen Topics veröffentlicht.
+\end{enumerate}
+
+\subsection*{Hilfsfunktionen}
+\begin{itemize}
+\item \texttt{transform\_mesh}: Wendet die kombinierte Transformation (Rotation + Translation) einer \texttt{\gls{TF}2}-Nachricht auf ein Mesh an.
+\item \texttt{point\_cloud}: Wandelt ein \texttt{NumPy}-Array von XYZ-Koordinaten in eine gültige \texttt{sensor\_msgs/PointCloud2}-Nachricht um.
+\item \texttt{visualize\_meshes} (optional): Zeigt die transformierten Meshes in einer \texttt{trimesh.Scene} an (nicht standardmäßig aktiv).
+\end{itemize}
+
+\subsection*{Funktion im Gesamtsystem}
+Diese Node übernimmt eine zentrale Vorverarbeitungsaufgabe. Durch die Klassifikation der Punktwolkendaten in ``roboterintern'' und ``roboterextern'' kann sie der nachgelagerten Node gefilterte Informationen über die aktuelle Umgebungssituation aus der Sensorperspektive zur Verfügung stellen. Die Kombination aus dynamischer \gls{TF}-Anbindung und Verwendung von Zylindern, die Roboterglieder umfassen, erlaubt eine Integration in unterschiedliche Robotersysteme durch Anpassen der Bezeichnungen von den Links des Ober- und Unterarms innerhalb der Node.
+
+
+\subsection{moveit\_stop\_node}
+Die \texttt{moveit\_stop\_node} \ref{moveit_stop_node} übernimmt die sicherheitskritische Verarbeitung von Punktwolkendaten zur Kollisionsvermeidung. Die Hauptfunktionalität der Node liegt in der Detektion von Objekten innerhalb einer als kritisch definierten Zone um bestimmte Roboterglieder und der Einleitung eines überwachten Stillstands.
+
+Beim Starten der dritten Node (\texttt{\_\_init\_\_}) werden folgende Komponenten initialisiert:
+\begin{itemize}
+  \item Eine Subscription auf das Topic \texttt{/valid\_from\_perspective}, welches Punktwolkendaten im Format \texttt{sensor\_msgs/PointCloud2} empfängt.
+  \item Drei Publisher:
+  \begin{itemize}
+    \item \texttt{/trajectory\_execution\_event} zur Veröffentlichung eines Stop-Kommandos bei Kollisionserkennung.
+    \item \texttt{/out\_safe\_zone} zur Publikation aller Punkte außerhalb der definierten Gefahrenzone.
+    \item \texttt{/in\_safe\_zone} zur Publikation der als kritisch eingestuften Punkte.
+  \end{itemize}
+  \item Ein \texttt{TransformListener} zur Abfrage von Transformationen zwischen der Sensorperspektive (\texttt{vl53l7cx\_link}) und den Robotergliedern (\texttt{upper\_arm\_link}, \texttt{forearm\_link}).
+  \item Zwei kollisionskritische Volumen (Meshes) in Form von Zylindern, welche die jeweiligen Robotersegmente geometrisch repräsentieren. Diese werden zur besseren Kollisionsabschätzung, im Vergleich zu den Zylindern in der \texttt{pcl\_filter\_node}, um $5\,\text{cm}$ im Radius und in der Länge vergrößert und entlang der $x$- und $z$-Achse entsprechend der realen Gliederpositionen transformiert.
+\end{itemize}
+
+\subsection*{Callback-Funktion \texttt{pointcloud\_callback}}
+Wird eine neue Punktwolke empfangen, erfolgt folgende Verarbeitung:
+\begin{enumerate}
+  \item Transformation der beiden Zylinder-Meshes in den aktuellen Koordinatenrahmen der Sensorperspektive basierend auf den \gls{TF}2-Daten.
+  \item Extraktion der Punktkoordinaten $(x, y, z)$ aus der empfangenen Punktwolke.
+  \item Prüfung, welche Punkte innerhalb der transformierten Meshes liegen (mittels \texttt{trimesh.contains}).
+  \item Aufteilung der Punkte in zwei Gruppen: innerhalb und außerhalb der Gefahrenzone.
+  \item Publikation beider Gruppen auf den jeweiligen Topics (\texttt{/in\_safe\_zone} und \texttt{/out\_safe\_zone}).
+  \item Falls die Anzahl der Punkte innerhalb der Gefahrenzone einen Schwellwert (z.\,B. \texttt{point\_threshold} = 2) übersteigt, wird über \texttt{/trajectory\_execution\_event} ein Stoppbefehl gesendet.
+\end{enumerate}
+
+\subsection*{Hilfsfunktionen}
+\begin{itemize}
+  \item \texttt{transform\_mesh}: Wendet eine Kombination aus Translation und Rotation auf ein gegebenes Mesh an basierend auf einer \gls{TF}2-Transformation.
+  \item \texttt{create\_point\_cloud}: Wandelt ein \texttt{NumPy}-Array von Punkten in eine \texttt{PointCloud2}-Nachricht um, welche anschließend publiziert werden kann.
+  \item \texttt{stop\_trajectory}: Erstellt und versendet eine \texttt{std\_msgs/String}-Nachricht mit dem Inhalt \texttt{``stop''}.
+\end{itemize}
+
+\subsection*{Funktion im Gesamtsystem}
+Diese Node erfüllt eine sicherheitsrelevante Überwachungsfunktion zur Erkennung potenzieller Kollisionen zwischen dem Robotersystem und Objekten im eingeschränkten Raum. Durch die dynamische Einbindung von \gls{TF}2-Informationen und die geometrische Approximation der Robotersegmente über Meshes ermöglicht sie eine dynamische \gls{SSM}.
+
+\section{UR10}
+Ein Bestandteil dieser Arbeit ist die Integration des Sensormoduls am Roboterarm des \gls{UR}10, um den eingeschränkten Raum zu überwachen. Der \gls{UR}10 ist ein sechsachsiger Industrieroboter, der häufig in kollaborativen Anwendungen eingesetzt wird. Der \gls{UR}10 verfügt über integrierte Drehmomentsensorik in seinen Gelenken sowie über eine präzise Steuerung seiner Bewegungen, was die Umsetzung von \gls{PFL} gemäß ISO/TS 15066 ermöglicht. Diese Fähigkeit erlaubt es dem Roboter, Kräfte und Geschwindigkeiten so zu regulieren, dass potenzielle Kollisionen mit Menschen in der \gls{HRC} frühzeitig erkannt und die Reaktionskräfte begrenzt werden \cite{UR-25}. Die Integration externer Sensorik in das Robotermodell(\gls{URDF}) ist eine Voraussetzung für die Erfassung von Umgebungsinformationen, da die mit den Abstandsmessungen der \gls{ToFs} generierte Punktwolke in dem Referenzkoordinatensystem der Sensorhalterung publiziert wird.
+\\Zu diesem Zweck wird ein eigenes \gls{URDF}-Package mit der Bezeichnung \texttt{ur\_description} erstellt. Es basiert auf dem offiziellen \texttt{ur\_description}-Package und wird um einen zusätzlichen statischen Link (\texttt{vl53l7cx\_link}) erweitert. Dieser Link stellt das Sensormodul dar, das mechanisch mit dem Unterarm des \gls{UR}10, wie in Kapitel \ref{Sensoranordnung_und_Sensormontage} erläutert, verbunden ist.
+\\Der \texttt{vl53l7cx\_link} wird als statischer \gls{URDF}-Link in die bestehende Modellstruktur integriert. Auf dem neuen Link wird ein 3D-Mesh zur Visualisierung in \gls{RVIZ} dargestellt. Die statische Definition innerhalb des Robotermodells – relativ zum sich bewegenden \texttt{forearm\_link} – erlaubt die Transformation der Sensordaten in das Roboterkoordinatensystem. Auf diese Weise kann die Sensorpose berechnet und in Beziehung zu den Gelenkstellungen des Roboters gesetzt werden. Die Einbindung eines geometrisch ausgerichteten Mesh-Modells unterstützt dabei die grafische Darstellung in der Visualisierungsumgebung, wie in Abbildung \ref{fig:Sensor_holder_on_UR10e} zu sehen ist.
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.3\linewidth]{images/Sensor_holder_on_UR10e.jpg}
+    \caption{In diesem Bild kann man die Sensorhalterung auf dem \gls{UR}10 sehen.}
+    \label{fig:Sensor_holder_on_UR10e}
+\end{figure}
+
+Das \texttt{ur\_description}-Package bildet somit die Grundlage für die zeitliche und räumliche Verarbeitung von Sensordaten innerhalb eines bewegten Bezugsrahmens. Die modellbasierte Sensorintegration mit definierter Geometrie und kinematischer Transformation ermöglicht das Auswerten von Sensordaten von nicht ortsfesten \gls{ToFs}.
+
+\section{Messdaten}
+\label{Messdaten}
+An dieser Stelle wird beschrieben, wie die Daten zum Detailgrad ($d_e$) – im folgenden Unterkapitel auch als „effektives Detektionsvermögen“ bezeichnet – sowie zur Reaktionszeit ($T_r$) des Sensorsystems ermittelt werden.
+
+\subsection*{Detektionsvermögen}
+Zur Bestimmung der unteren Nachweisgrenze eines \gls{ToF}s werden kreisförmige Papierkreise mit variierendem Durchmesser und unterschiedlichem Reflexionsvermögen in einem definierten Abstand von \SI{50}{\centi\meter} so ausgerichtet, dass ihre Kreisfläche senkrecht auf das Sensormodul zeigt. Anschließend wurde mithilfe von \gls{RVIZ} überprüft, ob die jeweiligen Objekte vom \gls{ToF} detektiert werden konnten. Die Ergebnisse dieser Messungen lassen sich den Tabelle \ref{tab:Detektionsvermögen_weiß} und \ref{tab:Detektionsvermögen_schwarz} entnehmen.
+
+\begin{table}[H]
+    \centering
+    \begin{tabular}{|c|c|c|}
+    \hline
+        {\small Größe des Papierkreises in \SI{}{\milli\meter}}&{\small Beleuchtungsstärke in \SI{}{\lux}}& {\small Wird der Kreis detektiert?}\\
+     \hline
+        100 & 187,4 & ja\\
+     \hline
+        80 & 169,4 & ja\\
+     \hline
+        60 & 156,8 & ja\\
+     \hline
+        40 & 158,1 & ja\\
+     \hline
+        20 & 192,7 & ja\\
+     \hline
+        10 & 188,2 & nein\\
+     \hline
+    \end{tabular}
+    \caption{Effektives Detektionsvermögen eines weißen Papierkreises bei \SI{50}{\centi\meter} abstand zum Sensor.}
+    \label{tab:Detektionsvermögen_weiß}
+\end{table}
+
+\begin{table}[H]
+    \centering
+    \begin{tabular}{|c|c|c|}
+    \hline
+        {\small Größe des Papierkreises in \SI{}{\milli\meter}}&{\small Beleuchtungsstärke in \SI{}{\lux}}& {\small wird der Kreis detektiert?}\\
+     \hline
+        100 & 183,7 & ja \\
+     \hline
+        80 & 162,6 & ja \\
+     \hline
+        60 & 162,4 & ja \\
+     \hline
+        40 & 172,3 & nein \\
+     \hline
+        20 & 179,9 & nein \\
+    \hline
+        10 & 188,3 & nein \\
+    \hline
+    \end{tabular}
+    \caption{Effektives Detektionsvermögen eines schwarzen Papierkreises bei \SI{50}{\centi\meter} abstand zum Sensor.}
+    \label{tab:Detektionsvermögen_schwarz}
+\end{table}
+
+\subsection*{Reaktionszeit}
+Um zu ermitteln, wie viel Zeit ($T_n$) vergeht, bis das Sensorsystem eine Unterschreitung des Sicherheitsabstands erkennt und dem \gls{UR}10 die Anweisung zum Einleiten eines überwachten Stillstands gibt, wird im Moment der erfolgreichen Dekodierung einer \gls{JSON}-Nachricht ein Zeitstempel erfasst.
+\\Dieser Zeitstempel wird im Header der \texttt{Pointcloud2}-Nachrichten von der ersten (serial\_to\_pcl\_node) bis zur letzten Node (moveit\_stop\_node) weitergereicht. Wird eine Unterschreitung in der letzten Node detektiert, so wird der Zeitstempel von der aktuellen Zeit subtrahiert und die Differenz über das Terminal des \acrshort{PC} mit Linux-Betriebssystem ausgegeben.
+
+Um herauszufinden, wie lange der Raspberry Pi Pico und der \texttt{VL53L7CX} benötigen, um die Sensordaten zu verarbeiten ($T_p$) und an den \acrshort{PC} weiterzugeben, wird die \textcolor{arduinoOrange}{micros}() Funktion am Anfang und Ende der \textcolor{arduinoOrange}{loop}() des Programms  verwendet und die Differenz aus den beiden Werten wird gebildet. 
+Die Gesamtreaktionszeit des Systems $T_r$ ergibt sich nun aus der Gleichung \ref{eq:Zeit_T_r} und das Ergebnis der Gleichung sowie die Summanden sind in Tabelle \ref{tab:reaktionszeit_terme}.
+\begin{equation}
+    \label{eq:Zeit_T_r}
+    T_r=T_n+T_p
+\end{equation}
+
+\begin{table}[h!]
+\centering
+\begin{tabular}{|l|r|}
+\hline
+\textbf{Metrik} & \textbf{Wert} \\
+\hline
+Anzahl Messwerte & 155\\
+Gemittelte Reaktionszeit & 231{,}3 \SI{}{\milli\second} \\
+Kürzeste Reaktionszeit & 207{,}3 \SI{}{\milli\second} \\
+Längste Reaktionszeit & 271{,}1 \SI{}{\milli\second} \\
+Feedback-Frequenz & ca.~8{,}62\,Hz \\
+\hline
+\end{tabular}
+\caption{Statistische Auswertung der Reaktionszeiten und Feedbackfrequenz des \texttt{moveit\_stop\_node}.}
+\label{tab:reaktionszeiten}
+\end{table}
+Der Tabelle \ref{tab:reaktionszeiten} kann die längste Reaktionszeit von den 155 Messwerten entnommen werden und diese Zeit entspricht $T_n$ aus der Gleichung  \ref{eq:Zeit_T_r}. 
+\begin{table}[h!]
+\centering
+\begin{tabular}{|l|r|}
+\hline
+\textbf{Metrik} & \textbf{Wert} \\
+\hline
+Anzahl Messwerte & 509 \\
+Gemittelte Schleifendauer & 166{,}669 \SI{}{\milli\second} \\
+Sensorfrequenz (aus Schleifendauer) & ca.~6{,}00\,Hz \\
+\hline
+\end{tabular}
+\caption{Analyse der Sensordaten: Frequenz basierend auf Zeitstempeln und Schleifendauer.}
+\label{tab:serielle_sensorfrequenz}
+\end{table}
+\\Die Tabelle~\ref{tab:serielle_sensorfrequenz} zeigt die gemittelte Dauer einer Verarbeitungsschleife, welche dem Summanden $T_p$ in Gleichung~\ref{eq:Zeit_T_r} entspricht. Ein Anteil der erhöhten Schleifendauer ist auf die Serialisierung des \gls{JSON}-Strings zurückzuführen. Die hierfür benötigte Zeit wurde in Abschnitt~\ref{übertragungsdauer_usb} berechnet.
+\begin{table}[H]
+    \centering
+    \renewcommand{\arraystretch}{1.3}
+    \begin{tabular}{|c|c|p{10cm}|}
+        \hline
+        \textbf{Symbol} & \textbf{Zeit in \SI{}{\milli\second}} & \textbf{Bedeutung} \\
+        \hline
+        $T_n$ & 271{,}1&Zeit die für die Verarbeitung der Daten in den \gls{ROS} Nodes nötig ist\\
+        \hline
+        $T_p$ & 166{,}7 &Zeit die nötig ist für die Verarbeitung der Daten auf dem \gls{MCU} \\
+        \hline
+        $T_r$ & 437{,}8 &Gesamtreaktionszeit des Sensorsystems \\
+        \hline
+    \end{tabular}
+    \caption{Erklärung der Terme in Gleichung~\ref{eq:Zeit_T_r} und Messwerte}
+    \label{tab:reaktionszeit_terme}
+\end{table}
+Die Rohdaten kann man hier einsehen: 
+\url{https://git-ce.rwth-aachen.de/rene.ebeling/robot-sensor/-/tree/main/Progress_Documentation/Meassurements?ref_type=heads}
+
+\section{Überprüfung der Anforderungen}
+\label{Überprüfung_der_Anforderungen}
+In diesem Unterkapitel wird nun basierend auf den Messdaten aus dem vorigen Unterkapitel nachgewiesen, ob die Anforderungen aus Kapitel \ref{Rahmen_und_Anforderungen} erfüllt werden.
+\subsection{Detailgrad}
+Ob ein Objekt von einem \gls{ToF} wahrgenommen wird, hängt davon ab, wie stark ein Objekt reflektiert und wie groß die Überschneidung eines Objekts mit der \gls{FOV} eines \gls{SPAD} ist (siehe Abbildung \ref{Detection_Volume}).
+\begin{figure}[h]
+    \centering
+    \includegraphics[width=0.5\linewidth]{images/STM-25_Detection_Volume.png}
+    \caption{Einfluss der Reflektivität auf das Detections Volumen \cite{STM-25}.}
+    \label{Detection_Volume}
+\end{figure}
+Bei dem \texttt{VL53L7CX} ist die gesamte \gls{FOV} bei einer $8 \times 8$ Auflösung in \texttt{64} Einzelbereiche unterteilt, wie in Abbildung \ref{VL53L7CX_FOV} zu sehen ist. Wenn ein ganzer Einzelbereich mit einem Objekt gefüllt ist, dann kann bei einem grauen Objekt mit einer Reflexion von \texttt{17\%} und bei einer Umgebungsbeleuchtung von \SI{5000}{\lux} der Abstand nur gemessen werden, wenn das Objekt nicht weiter als \SI{40}{\centi\meter} entfernt ist.
+\\Zum Vergleich: In der Logistik werden in der Norm \cite{DIN-12464-1} maximal \SI{1000}{\lux} im Konfigurations- und Auslieferungsbereich empfohlen und in den meisten anderen Bereichen \SI{500}{\lux} oder weniger.
+\\Wenn das Objekt weiß ist und im Dunkeln gemessen wird, dann können Abstände von bis zu \SI{2}{\meter} gemessen werden. Aus der Tabelle, die aus dem Datenblatt des Sensors stammt, \ref{VL53L7CX_Performance_Table} im Anhang können weitere Werte entnommen werden \cite{STM-24}.
+
+In Kapitel \ref{Messdaten} wird das effektive Detektionsvermögen $d_e$ der verwendeten Sensoren unter Laborbedingungen ermittelt. 
+Die in den Tabellen \ref{tab:Detektionsvermögen_weiß} und \ref{tab:Detektionsvermögen_schwarz} dargestellten Ergebnisse zeigen, dass ein dunkles Objekt in einer Entfernung von \SI{50}{\centi\meter} nur dann detektiert werden kann, wenn es mindestens die Fläche eines Kreises mit einem Durchmesser von \SI{60}{\milli\meter} aufweist. 
+Somit ergibt sich für das effektive Detektionsvermögen $d_e$ auf Basis der Messwerte ein Wert von \SI{60}{\milli\meter}. 
+\\Die Wahl des Messabstands von \SI{50}{\centi\meter} orientiert sich an der Annahme, dass Objekte, welche den geforderten Schutztrennabstand unterschreiten, sich nicht weiter von der Sensorhalterung entfernt befinden.
+Da zwischen den Messfeldern \acrshort{FOV} benachbarter Sensoren ein unbeaufsichtigter Bereich von mehr als \SI{60}{\milli\meter} verbleibt, ist für das effektive Detektionsvermögen $d_e$ nicht der Sensorbereich, sondern die Ausdehnung dieses nicht erfassten Intervalls maßgeblich. Aus Abbildung \ref{fig:fov_top_view} ergibt sich daher für $d_e$ ein Wert von \SI{77,25}{\milli\meter}.
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.7\linewidth]{images/STM-25_FOV.jpg}
+    \caption{\gls{FOV} eines \texttt{VL53L7CX} bei einer $8 \times 8$ Auflösung \cite{STM-25}}
+    \label{VL53L7CX_FOV}
+\end{figure}
+
+
+\subsection{Aktualität der Abstandsmessungen}
+Wie im vorhergehenden Kapitel beschrieben, existiert für \gls{HRC}-Anwendungen eine Formel zur Berechnung des erforderlichen Schutztrennabstandes (vgl. Gleichung~\ref{eq:Schutztrennabstand}). Die einzelnen Komponenten dieser Gleichung sind in Tabelle~\ref{Tabelle_Schutztrennabstand} aufgeführt.  
+\begin{equation}
+S_p = S_h + S_r + S_s + C + Z_d + Z_r \tag{\ref{eq:Schutztrennabstand}}
+\end{equation}
+
+\begin{table}[h]
+\centering
+\caption*{\textbf{Tabelle~\ref{Tabelle_Schutztrennabstand}:} Beschreibung der Variablen in Gleichung~\ref{eq:Schutztrennabstand}}
+\begin{tabular}{>{\raggedright\arraybackslash}p{2cm} p{11cm}}
+$S_p$ & Schutztrennabstand zwischen Mensch und Robotersystem \\
+$S_h$ & Anteil des Schutztrennabstands durch Bewegung des Bedieners, bis die Anwendung gestoppt hat \\
+$S_r$ & Anteil des Schutztrennabstands, der durch die Reaktionszeit der Anwendung verursacht wird \\
+$S_s$ & Anteil des Schutztrennabstands, der beim Stoppen der Anwendung entsteht \\
+$C$ & Eindringtiefe (intrusion distance), gemäß ISO 13855 \\
+$Z_d$ & Positionsunsicherheit des Bedieners, bedingt durch die Messgenauigkeit des Erkennungssystems \\
+$Z_r$ & Positionsunsicherheit der Anwendung, bedingt durch die Messgenauigkeit des Systems \\
+\end{tabular}
+\end{table}
+Zur Effizienzsteigerung ist es aus anwendungstechnischer Sicht wünschenswert, den Schutztrennabstand der Roboteranwendung möglichst gering zu halten. Eine reduzierte Überwachungsfrequenz der Roboterumgebung in Bezug auf potenzielle Kollisionsobjekte führt jedoch zu einer verlängerten Reaktionszeit gegenüber plötzlich auftretenden Objekten im Gefahrenbereich. Die erforderliche Größe der Terme \( S_h \) und \( S_r \) in Gleichung~\ref{eq:Schutztrennabstand} ist dabei maßgeblich von der Reaktionszeit des Systems sowie von den relativen Bewegungsgeschwindigkeiten zwischen Roboter und sich nähernden Objekten abhängig. In dieser Arbeit wird ein Zylinder mit einem Radius von \SI{30}{\centi\meter} um den Ober- und Unterarm des Roboters als maximal tolerierbarer Schutztrennabstand definiert. Es ist daher zu untersuchen, ob und in welchem Maße die Bewegungsgeschwindigkeit der Roboteranwendung reduziert werden muss, um diesen Zielwert für den Schutztrennabstand zu erreichen, und ob eine alleinige Anpassung der Geschwindigkeit hierfür ausreichend ist.
+
+Nun werden die Summanden der Gleichung \ref{eq:Schutztrennabstand} bestimmt:
+
+Die Geschwindigkeit des Bedieners $v_h(t_0)$ wird in \cite{DIN-10218-2} mit \SI{1,6}{\meter/\second} angenommen. Die Reaktionszeit des Systems $T_r$ wird in Kapitel \ref{Messdaten} bestimmt. Die Zeit, die der \gls{UR}10 maximal zum Entschleunigen benötigt ($T_s$) lässt sich auf Basis der Werte in Tabelle \ref{Nachlaufzeit_UR} linear interpolieren (siehe Berechnung \ref{eq:nachlaufzeit_interpolation}). Auf Grundlage dieser Berechnung kann für die Nachlaufzeit $T_s$ ein Wert von \SI{375}{\milli\second} angenommen werden, bezogen auf den Betrieb bei Volllast (\SI{10}{\kilogram}) und vollständig horizontal ausgestrecktem Arm.
+\\Mit diesen Werten lässt sich nun $S_h$ mit der Gleichung \ref{eq:Formular_S_h_simplified} bestimmen. Die Variablen der Gleichung \ref{eq:Formular_S_h_simplified} sind in Tabelle \ref{tab:Schutztrennabstand_S_h_Terme} mit Bedeutung und Wert aufgelistet.
+\begin{equation}
+    \centering
+    \label{eq:Formular_S_h_simplified}
+    S_h = v_h(t_0) \times (T_r + T_s)
+\end{equation}
+\begin{table}[H]
+    \centering
+    \renewcommand{\arraystretch}{1.3}
+    \begin{tabular}{|c|c|p{10cm}|}
+        \hline
+        \textbf{Symbol} & \textbf{Wert / Einheit} & \textbf{Bedeutung} \\
+        \hline
+        $v_h(t_0)$ & \SI{1600}{\milli\meter\per\second} & Geschwindigkeit des Bedieners zum Zeitpunkt $t_0$ an dem der Schutztrennabstand unterschritten wird (gemäß ISO 13855) \\
+        \hline
+        $T_r$ & \SI{437{,}8}{\milli\second} & Reaktionszeit des Sensorsystems (vgl. Tabelle~\ref{tab:reaktionszeit_terme}) \\
+        \hline
+        $T_s$ & \SI{375}{\milli\second} & Interpolierte Zeit bis zum vollständigen Stillstand des Roboters nach Erhalt des Stoppsignals (vgl. Gleichung \ref{eq:nachlaufzeit_interpolation}) \\
+        \hline
+        $S_h$ & \SI{1300{,}48}{\milli\meter} & Anteil des Schutztrennabstandes zur Anwendung aufgrund der Bewegung des Bedieners\\
+        \hline
+    \end{tabular}
+    \caption{Erklärung der Terme in Gleichung~\ref{eq:Formular_S_h_simplified} und angenommene Werte}
+    \label{tab:Schutztrennabstand_S_h_Terme}
+\end{table}
+Die Gleichung \ref{eq:Formular_S_r_simplified} beschreibt, wie weit sich der Endeffektor in Richtung des Bedieners bewegt, bevor das Robotersystem mit dem Bremsvorgang beginnt. Dieser Abstand \(S_r\) hängt ausschließlich von der Geschwindigkeit des Endeffektors zum Zeitpunkt \(v_r(t_0)\) sowie von der Reaktionszeit des Systems \(T_r\) ab. 
+\\Mit diesen Werten lässt sich nun $S_r$ mit der Gleichung \ref{eq:Formular_S_r_simplified} bestimmen. Die Variablen der Gleichung \ref{eq:Formular_S_r_simplified} sind in Tabelle \ref{tab:Schutztrennabstand_S_r_Terme} mit Bedeutung und Wert aufgelistet.
+\begin{equation}
+    \centering
+    \label{eq:Formular_S_r_simplified}
+    S_r=v_r(t_0)\times T_r
+\end{equation}
+\begin{table}[H]
+    \centering
+    \renewcommand{\arraystretch}{1.3}
+    \begin{tabular}{|c|c|p{10cm}|}
+        \hline
+        \textbf{Symbol} & \textbf{Wert / Einheit} & \textbf{Bedeutung} \\
+        \hline
+        $v_r(t_0)$ & \SI{250}{\milli\meter\per\second} & Geschwindigkeit des Roboters zum Zeitpunkt $t_0$, also zu Beginn der Reaktionszeit \\
+        \hline
+        $T_r$ & \SI{437{,}8}{\milli\second} & Reaktionszeit des Sensorsystems (vgl. Tabelle~\ref{tab:reaktionszeit_terme}) \\
+        \hline
+        $S_r$ & \SI{108{,}45}{\milli\meter} & Anteil des Schautzabstandes aufgrund verzögertem einsetzen des Bremsens\\
+        \hline
+    \end{tabular}
+    \caption{Erklärung der Terme in Gleichung~\ref{eq:Formular_S_r_simplified} und verwendete Werte}
+    \label{tab:Schutztrennabstand_S_r_Terme}
+\end{table}
+
+Da im Datenblatt des \gls{UR}10 die Nachlaufzeit bei unterschiedlichen \\Endeffektor-Geschwindigkeiten angegeben ist, wird \ref{eq:Formular_S_s} verwendet. Gemäß \cite{DIN-10218-2} lässt sich damit der Nachlaufweg des Roboterarms in Abhängigkeit von der Geschwindigkeit und der Nachlaufzeit $T_s$ (vgl. Tabelle
+\ref{tab:Schutztrennabstand_S_h_Terme}) bestimmen.
+\begin{equation}
+    \centering
+    S_s = \int_{t_0 + T_r}^{t_0 + T_r + T_s} v_s(t) \, dt
+    \label{eq:Formular_S_s}
+\end{equation}
+Die Formel \ref{eq:Formular_S_s} wird vereinfacht, da die Geschwindigkeit $v_s$ des \gls{UR}10 in Abhängigkeit von der Zeit während eines Bremsvorgangs nicht bekannt ist. Jedoch  wird davon ausgegangen, dass die Geschwindigkeit während des Bremsens die Geschwindigkeit zu Beginn des Bremsvorgangs nicht überschreitet. Deshalb ist das Ergebnis der Gleichung \ref{eq:Formular_S_s_simplified} immer kleiner als das Ergebnis der Gleichung \ref{eq:Formular_S_s} und kann deshalb stattdessen verwendet werden.
+\begin{equation}
+\label{eq:Formular_S_s_simplified}
+    S_s=T_s\times v_s(t_0)
+\end{equation}
+\begin{table}[H]
+    \centering
+    \renewcommand{\arraystretch}{1.3}
+    \begin{tabular}{|c|c|p{10cm}|}
+        \hline
+        \textbf{Symbol} & \textbf{Wert / Einheit} & \textbf{Bedeutung} \\
+        \hline
+        $T_s$ & \SI{375}{\milli\second} & Interpolierte Zeit bis zum vollständigen Stillstand des Roboters nach Erhalt des Stoppsignals (vgl. Gleichung \ref{eq:nachlaufzeit_interpolation}) \\
+        \hline
+        $v_s(t_0)$ & \SI{250}{\milli\meter\per\second} & Geschwindigkeit des Roboters zum Zeitpunkt $t_0$, also zu Beginn des Bremsvorgangs \\
+        \hline
+        $S_s$ & \SI{93{,}75}{\milli\meter} & Anteil des Schutztrennabstandes aufgrund des Nachlaufweges des \gls{UR}10 nach Gleichung \ref{eq:Formular_S_s_simplified}\\
+        \hline
+    \end{tabular}
+    \caption{Erklärung der Terme in Gleichung~\ref{eq:Formular_S_s_simplified} und angenommene Werte}
+    \label{tab:Schutztrennabstand_S_s_Terme}
+\end{table}
+Da im Unterkapitel Detailgrad nachgewiesen werden konnte, dass das Sensorsystem ein effektives Detektionsvermögen $d_e$ von \SI{77{,}25}{\milli\meter} hat, wird die Eindringtiefe $C$ als so groß wie der Wert $D_{DT}$ aus der \cite{DIN-13855} (Kapitel 8.3.4) angenommen, da $55mm <d_e < 120mm$ ist. Für das effektive Detektionsvermögen $d_e$ wird in diesem Fall von einem Wert von \SI{77{,}25}{\milli\meter} ausgegangen, da in Kapitel~\ref{Messdaten} gezeigt wird, dass dunkle Objekte, die kleiner sind als die toten Winkel des Sensoraufbaus, von den Sensoren detektiert werden können.
+
+Der Wert $Z_r$ aus Gleichung \ref{eq:Schutztrennabstand} kann man vernachlässigen im Vergleich zu den anderen Werten, da laut \cite{UNR-15} die Wiederholgenauigkeit des \gls{UR}10 bei $+/- 0,1 mm$ liegt.
+
+Der Wert $Z_d$ aus Gleichung \ref{eq:Schutztrennabstand} entspricht der Messungenauigkeit des \texttt{VL53L7CX} und lässt sich dieser Tabelle \ref{VL53L7CX_accuracy_Table} aus dem Datenblatt  entnehmen \cite{STM-25}.
+
+Da jetzt alle Summanden der Gleichung \ref{eq:Schutztrennabstand} bestimmt wurden, lässt sich der Schutztrennabstand $S_p$ bestimmen. In Tabelle \ref{tab:Schutztrennabstand} werden die Werte nochmal zusammengefasst und aufgelistet, sowie das Ergebnis der Rechnung aufgezeigt.
+\begin{table}[H]
+    \centering
+    \renewcommand{\arraystretch}{1.3}
+    \begin{tabular}{|c|c|p{10cm}|}
+        \hline
+        \textbf{Symbol} & \textbf{Wert / Einheit} & \textbf{Bedeutung} \\
+        \hline
+        $S_h$ & \SI{1300{,}48}{\milli\meter} & Anteil des Schutztrennabstandes zur Anwendung aufgrund der Bewegung des Bedieners (vgl. Tabelle \ref{tab:Schutztrennabstand_S_h_Terme})\\
+        \hline
+        $S_r$ & \SI{108{,}45}{\milli\meter} & Anteil des Schutztrennabstands, der durch die Reaktionszeit der Anwendung verursacht wird (vgl. Tabelle \ref{tab:Schutztrennabstand_S_r_Terme})\\
+        \hline
+        $S_s$ & \SI{93{,}75}{\milli\meter} &  Anteil des Schutztrennabstandes aufgrund des Nachlaufweges des \gls{UR}10 (vgl. Tabelle \ref{tab:Schutztrennabstand_S_s_Terme}) \\
+        \hline
+        $C$ & \SI{850}{\milli\meter} & Eindringtiefe, gemäß \cite{DIN-13855} \\
+        \hline
+        $Z_d$ & \SI{70}{\milli\meter} & Messungenauigkeit des \texttt{VL53L7CX} \\
+        \hline
+        $S_p$ & \SI{2422{,}68}{\milli\meter} & Gesamt-Schutztrennabstand zwischen Mensch und Robotersystem \\
+        \hline
+    \end{tabular}
+    \caption{Erklärung der Terme in Gleichung~\ref{eq:Schutztrennabstand} und angenommene Werte}
+    \label{tab:Schutztrennabstand}
+\end{table}
+Wie sich Tabelle \ref{tab:Schutztrennabstand} entnehmen lässt, ist der notwendige Schutztrennabstand größer als \SI{30}{\centi\meter}, sodass das Sensorsystem in seiner hier dargelegten Form ohne die kombinierte Anwendung mit der \gls{PFL}-Funktion des \gls{UR}10 nicht den Anforderungen der \cite{DIN-10218-2} entspricht. 
+\\Die erste Forschungsfrage aus Kapitel~\ref{Forschungsfragen} kann nun beantwortet werden. Die Berechnung des erforderlichen Schutztrennabstands gemäß Gleichung~\ref{eq:Schutztrennabstand} zeigt, dass insbesondere die Anteile \( S_h \) und \( C \) jeweils den zulässigen Maximalwert von \SI{300}{\milli\meter}, wie er beispielhaft als Zielgröße angenommen wird, deutlich überschreiten. Entsprechend erfüllt das vorgestellte Sensorsystem die Anforderungen an eine alleinige technische Schutzmaßnahme im Sinne der DIN EN ISO 10218-2 in Verbindung mit DIN EN ISO 13855 nicht. Eine sicherheitstechnische Verwendung ist daher ausschließlich im Rahmen eines kombinierten Schutzkonzepts möglich, bei dem ergänzende Maßnahmen – wie etwa \gls{PFL} – berücksichtigt werden.
+
+\subsection{Nutzbarkeit der Informationen}
+In diesem Kapitel werden die Forschungsfragen 3 und 4 aus dem Kapitel \ref{Forschungsfragen} beantwortet. Es wird aufgezeigt, wie man den Roboter von anderen Objekten im Detektionsbereich unterscheidet, wie man die Daten von nicht ortsfesten Sensoren verwendet und wie die Abstandsmessungen dadurch für die Kollisionsvermeidung nutzbar gemacht werden. 
+
+\subsubsection*{Filterung durch die Sensorfirmware}
+Die \texttt{VL53L7CX}-Firmware filtert die zurückgegebenen Distanzdaten basierend auf dem 'Target Status'. Dieser Status gibt die Gültigkeit der Messung an. Um konsistente Daten zu erhalten, müssen ungültige Zielstatuswerte herausgefiltert werden. Laut einem Mitarbeiter von STMicroelectronics sind die Statuswerte \texttt{5,6,9} und \texttt{12} verwendbar \cite{JOH-24}. Alle anderen Statuswerte liegen unter dem 50\%-Konfidenzniveau \cite{STM-25}. Durch diese Vorsortierung werden fehlerhafte Abstandsmessungen mit einem entsprechenden Statuswert versehen und können im Anschluss in der \texttt{serial\_to\_pcl\_node} aussortiert werden.
+
+\begin{table}[H]
+    \centering
+    \caption{Liste der verfügbaren Statuswerte der \texttt{VL53L7CX} Abstandsmessungen \cite{STM-25}}
+    \label{tab:target_status}
+    \begin{tabularx}{\linewidth}{cX}
+        \toprule
+        \textbf{Target Status} & \textbf{Beschreibung} \\
+        \midrule
+        0   & Ranging-Daten werden nicht aktualisiert \\
+        1   & Signalrate auf dem SPAD-Array zu niedrig \\
+        2   & Zielphase \\
+        3   & Sigma-Schätzer zu hoch \\
+        4   & Zielkonsistenz fehlgeschlagen \\
+        5   & Bereich gültig \\
+        6   & Wrap-around nicht durchgeführt (typischerweise die erste Reichweite) \\
+        7   & Ratenkonsistenz fehlgeschlagen \\
+        8   & Signalrate zu niedrig für das aktuelle Ziel \\
+        9   & Bereich gültig mit großem Puls (kann auf ein zusammengeführtes Ziel zurückzuführen sein) \\
+        10  & Bereich gültig, aber kein Ziel bei der vorherigen Reichweite erkannt \\
+        11  & Messkonsistenz fehlgeschlagen \\
+        12  & Ziel durch ein anderes unscharf, aufgrund des Sharpener \\
+        13  & Ziel erkannt, aber inkonsistente Daten. Tritt häufig bei sekundären Zielen auf \\
+        255 & Kein Ziel erkannt (nur wenn die Anzahl der erkannten Ziele aktiviert ist) \\
+        \bottomrule
+    \end{tabularx}
+\end{table}
+
+\subsubsection*{Filterung durch die \texttt{pcl\_filter\_node}}
+
+Die \texttt{pcl\_filter\_node} trennt anhand statischer Zylinder-Approximationen der \gls{UR}10-Gelenksegmente die eingehende \gls{PCD} in „roboterinterne“ und „roboterexterne“ Punkte. Sie abonniert das Topic \texttt{/pcl} und publiziert auf \texttt{/invalid\_from\_perspective} alle Punkte innerhalb der Zylinder sowie auf \texttt{/valid\_from\_perspective} diejenigen außerhalb.  
+
+\paragraph{Arbeitsablauf}
+\begin{enumerate}
+  \item Abruf aktueller Transformationen zwischen Sensormodul-Frame und Roboterlinks via \texttt{tf2\_ros}.  
+  \item Anwendung der Transformationsmatrizen auf vordefinierte Zylinder-Meshes zur Bestimmung ihrer Lage im Sensormodul-Frame.  
+  \item Punkt-in-Mesh-Test: Jeder Punkt der empfangenen \texttt{Pointcloud2} wird geprüft, ob er innerhalb eines der transformierten Zylinder liegt.  
+  \item Klassifikation und Publikation:  
+    \begin{itemize}
+      \item \emph{Invalid Points}: Punkte im Roboterkörper → \texttt{/invalid\_from\_perspective}  
+      \item \emph{Valid Points}: Umgebungsdaten → \texttt{/valid\_from\_perspective}  
+    \end{itemize}
+\end{enumerate}
+
+Diese Vorfilterung ermöglicht nachfolgenden Nodes eine Kollisionsdetektion, indem nur objektrelevante Punkte weiterverarbeitet werden.
+
+
+\subsubsection*{Problematik nicht-ortsfester Sensoren}
+Die Verwendung von nicht-ortsfesten Sensoren führt zu Herausforderungen, da für die Kollisionsvermeidung Punkte in einem statischen und globalen Referenzkoordinatensystem benötigt werden.
+\\Wenn ein Sensor nicht ortsfest ist, ändert sich seine Position und Orientierung relativ zu einem globalen Referenzkoordinatensystem ständig. Dies erfordert die kontinuierliche und präzise Verfolgung der Sensorpose. Ohne diese Informationen wären die von dem Sensor kommenden \gls{PCD} bedeutungslos, da ihre räumliche Beziehung zur Umgebung unbekannt wäre. Das `tf2\_ros`-System ist genau für diesen Zweck konzipiert. Es ermöglicht das Beziehen von Transformationsmatrizen basierend auf der aktuellen Position der Roboterkomponenten. Durch Verwenden dieser Matrizen lassen sich Meshes in ihrem Referenzkoordinatensystem bewegen und mit der Bewegung von Ober- oder Unterarm synchronisieren.
\ No newline at end of file
diff --git a/Bachelorarbeit/V2/Kapitel/anhang.tex b/Bachelorarbeit/V2/Kapitel/anhang.tex
new file mode 100644
index 0000000000000000000000000000000000000000..b63c457d3077ad1d26fc9edf91bb18854338ffa5
--- /dev/null
+++ b/Bachelorarbeit/V2/Kapitel/anhang.tex
@@ -0,0 +1,208 @@
+\chapter{Quellcode}
+\section{Arduino-Quellcode}
+
+Der vollständige Arduino-Code ist im Git-Repository unter folgendem Link verfügbar:  
+\href{https://git-ce.rwth-aachen.de/rene.ebeling/robot-sensor/-/blob/main/Arduino/Final_18_vl53l7cx_clean_both_I2C_with_STMlibrary_PCF8575/Final_18_vl53l7cx_clean_both_I2C_with_STMlibrary_PCF8575.ino?ref_type=heads}{\texttt{Final\_18\_vl53l7cx\_clean\_both\_I2C\_with\_STMlibrary\_PCF8575.ino}}
+
+
+\section{ROS2-Nodes Quellcode}
+\subsection*{\texttt{serial\_to\_pcl\_node} (Python)}
+\label{serial_to_pcl_node}
+Der vollständige Quellcode der Node ist im Git-Repository unter folgendem Link verfügbar:  
+\href{https://git-ce.rwth-aachen.de/rene.ebeling/robot-sensor/-/blob/main/workspaces/COLCON_WS/src/serial_to_pcl/serial_to_pcl/serial_to_pcl_node.py?ref_type=heads}{\texttt{serial\_to\_pcl\_node.py}}
+\\\url{https://git-ce.rwth-aachen.de/rene.ebeling/robot-sensor}
+\\In dem Unterordner:
+\\\texttt{workspaces} \(\rightarrow\) \texttt{COLCON\_WS} \(\rightarrow\) \texttt{src} \(\rightarrow\) \texttt{serial\_to\_pcl} \(\rightarrow\) \texttt{serial\_to\_pcl} \(\rightarrow\) \\\texttt{serial\_to\_pcl\_node.py}
+
+\subsection*{\texttt{pcl\_filter\_node} (Python)}
+\label{pcl_filter_node}
+
+Der vollständige Quellcode der Node ist im Git-Repository unter folgendem Link verfügbar:  
+\href{https://git-ce.rwth-aachen.de/rene.ebeling/robot-sensor/-/blob/main/workspaces/COLCON_WS/src/serial_to_pcl/serial_to_pcl/pcl_filter_node.py?ref_type=heads}{\texttt{pcl\_filter\_node.py}}
+\\\url{https://git-ce.rwth-aachen.de/rene.ebeling/robot-sensor}
+\\In dem Unterordner:
+\\\texttt{workspaces} \(\rightarrow\) \texttt{COLCON\_WS} \(\rightarrow\) \texttt{src} \(\rightarrow\) \texttt{serial\_to\_pcl} \(\rightarrow\) \texttt{serial\_to\_pcl} \(\rightarrow\) \\\texttt{pcl\_filter\_node.py}
+
+\subsection*{\texttt{moveit\_stop\_node} (Python)}
+\label{moveit_stop_node}
+
+Der vollständige Quellcode der Node ist im Git-Repository unter folgendem Link verfügbar:  
+\href{https://git-ce.rwth-aachen.de/rene.ebeling/robot-sensor/-/blob/main/workspaces/COLCON_WS/src/serial_to_pcl/serial_to_pcl/moveit_stop_node.py?ref_type=heads}{\texttt{moveit\_stop\_node.py}}
+\\In dem Unterordner:
+\\\texttt{workspaces} \(\rightarrow\) \texttt{COLCON\_WS} \(\rightarrow\) \texttt{src} \(\rightarrow\) \texttt{serial\_to\_pcl} \(\rightarrow\) \texttt{serial\_to\_pcl} \(\rightarrow\) \\\texttt{moveit\_stop\_node.py}
+
+\chapter{3D-Modelle des Sensormoduls und Technische Zeichnungen}
+\begin{figure}[h]
+	\centering
+	\includegraphics[width=\textwidth]{images/Halterung_Seite_Oben.jpg} % Ersetzen Sie dies durch Ihre 3D-Modelle
+	\caption{3D-Modell des Sensormoduls (Perspektive 2)}
+	\label{fig:3d_modell_2}
+\end{figure}
+\begin{figure}
+    \centering
+    \includegraphics[width=\linewidth]{Technische_Zeichnungen/kleine_vl53l5cx_halterung Drawing v2.pdf}
+    \caption{Technische Zeichnung nur mit Maßen notwendig zur Beschreibung des Grundkörpers}
+    \label{fig:Techikcal_Drawing}
+\end{figure}
+\begin{figure}[h]
+	\centering
+	\includegraphics[width=\textwidth]{images/Halterung_Seite.jpg} % Ersetzen Sie dies durch Ihre 3D-Modelle
+	\caption{3D-Modell des Sensormoduls (Perspektive 1)}
+	\label{fig:3d_modell_1}
+\end{figure}
+\begin{figure}[h]
+	\centering
+	\includegraphics[width=\textwidth]{images/Halterung_Seite_Oben_schräg.jpg} % Ersetzen Sie dies durch Ihre 3D-Modelle
+	\caption{3D-Modell des Sensormoduls (Perspektive 3)}
+	\label{fig:3d_modell_3}
+\end{figure}
+\begin{figure}[h]
+	\centering
+	\includegraphics[width=\textwidth]{images/Halterung_Top.jpg} % Ersetzen Sie dies durch Ihre 3D-Modelle
+	\caption{3D-Modell des Sensormoduls (Perspektive 4)}
+	\label{fig:3d_modell_4}
+\end{figure}
+\chapter{VL53L7CX}
+
+\subsection*{Datenblatt Informationen}
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.9\linewidth]{images/VL53L7CX_Raster.png}
+    \caption{Raster mit Nummerierung der Zonen. Die Eckzonen werden hervorgehoben, da sie eine weniger hohe ``Ranging Performance'' haben \cite{STM-25}.}
+    \label{VL53L7CX_Raster}
+\end{figure}
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.9\linewidth]{images/VL53L7CX_Performance_Table.png}
+    \caption{Maximale Reichweite bei ``autonomous mode'' im 1-Hz-Modus mit 8×8-Auflösung und einer Integrationszeit von \SI{20}{\milli\second} \cite{STM-25}.}
+    \label{VL53L7CX_Performance_Table}
+\end{figure}
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=\linewidth]{images/VL53L7CX_range_accuracy.png}
+    \caption{Tabelle zur Messgenauigkeit des VL53L7CX aus dem Datenblatt \cite{STM-25}.}
+    \label{VL53L7CX_accuracy_Table}
+\end{figure}
+
+%\label{VL53L7CX_DS}
+%\includepdf[pages=-]{Quellen/DataSheet_vl53l7cx.pdf}
+
+\subsection*{Übertragungsdauer USB}
+Zur Berechnung der Übertragungsdauer einer Datenmenge von \SI{10{,}76}{\kilo\byte} bei einer Baudrate von \SI{2e6}{\bit\per\second} wird wie folgt vorgegangen:
+\label{übertragungsdauer_usb}
+\begin{itemize}
+  \item Datenmenge:
+  \[
+    \SI{10{,}76}{\kilo\byte} = 10{,}76 \times 1024 = \SI{11008}{\byte}
+  \]
+  
+  \item Bei einer UART-Übertragung werden pro Byte typischerweise \textbf{10 Bit} übertragen (1 Startbit, 8 Datenbits, 1 Stoppbit), also:
+  \[
+    \SI{11008}{\byte} \times 10 = \SI{110080}{\bit}
+  \]
+  
+  \item Die Übertragungszeit ergibt sich zu:
+  \[
+    \frac{\SI{110080}{\bit}}{\SI{2e6}{\bit\per\second}} = \SI{0.05504}{\second} = \SI{55.04}{\milli\second}
+  \]
+\end{itemize}
+
+Die Übertragung dauert somit etwa \textbf{\SI{55}{\milli\second}}.
+
+\chapter{Versuchsaufbauten}
+\begin{figure}[h]
+	\centering
+	\includegraphics[scale=0.5]{images/Versuchsaufbau_mit_VL53L5CX.jpg}
+	\caption{Einer der ersten Versuchsaufbauten mit zwei VL53L5CX}
+	\label{fig:Sersuchsaufbau}
+\end{figure}
+
+\begin{figure}[h]
+    \centering
+    \includegraphics[width=\linewidth]{images/Sensorhalterung_im_Institut.jpeg}
+    \caption{Sensorhalterung mit Sensoren im Institut am \gls{UR}10}
+    \label{fig:Sensorhalterung_im_Institut}
+\end{figure}
+
+\begin{figure}[h]
+    \centering
+    \includegraphics[width=\linewidth]{images/Gesamtaufbau_im_Institut.jpeg}
+    \caption{Gesamtaufbau mit Sensorsystem und \gls{UR}10 im Institut}
+    \label{fig:Gesamtaufbau}
+\end{figure}
+\chapter{Schaltplan}
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=1.1\linewidth, angle=90]{images/Wire_Schematic.png}
+    \caption{Vereinfachter Schaltplan}
+    \label{fig:Schematic}
+\end{figure}
+
+
+\chapter{FOV auf UR10}
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.7\linewidth]{images/UR10_with_FOV_Pyramids_Home.png}
+    \caption{\gls{UR}10 mit \gls{FOV} Pyramids von schräg Oben}
+    \label{fig:FOV_Pyramids_above}
+\end{figure}
+\begin{figure}[H]
+    \centering
+    \includegraphics[width=0.7\linewidth]{images/UR10_Angeled_Side_with_FOV_Pyramids.png}
+    \caption{\gls{UR}10 mit \gls{FOV} Pyramids von der Seite}
+    \label{fig:FOV_Pyramids_angeled_side}
+\end{figure}
+
+\begin{figure}[H]
+  \centering
+  \includegraphics[width=1\linewidth]{Technische_Zeichnungen/Simple+Mount+with+FOV+Bodies+Top+View+Drawing.pdf}
+  \caption{Top-View der Sensorhalterung mit Sichtfeldern (FoV).}
+  \label{fig:fov_top_view}
+\end{figure}
+
+\begin{figure}[H]
+  \centering
+  \includegraphics[width=1\linewidth]{Technische_Zeichnungen/Simple+Mount+with+FOV+Bodies+Drawing.pdf}
+  \caption{Draufsicht und Seitenansicht der Sensorhalterung mit Sichtfeldern.}
+  \label{fig:fov_drawing}
+\end{figure}
+
+
+\chapter{UR10}
+\begin{figure}[h]
+    \centering
+    \includegraphics[width=1\linewidth]{images/Nachlaufzeit_UR10.png}
+    \caption{Nachlaufzeit UR10 \cite{AIC-25}}
+    \label{Nachlaufzeit_UR}
+\end{figure}
+
+\paragraph{Lineare Interpolation der Nachlaufzeit für \SI{0{,}25}{\meter\per\second}:}
+
+Zur Bestimmung der Nachlaufzeit bei einer Endeffektor-Geschwindigkeit von \SI{0{,}25}{\meter\per\second} wird eine lineare Interpolation zwischen zwei bekannten Werten aus Abbildung~G-1 durchgeführt:
+
+\begin{align*}
+v_1 &= \SI{1{,}0}{\meter\per\second}, & t_1 &= \SI{450}{\milli\second} \\
+v_2 &= \SI{1{,}5}{\meter\per\second}, & t_2 &= \SI{500}{\milli\second} \\
+v   &= \SI{0{,}25}{\meter\per\second}
+\end{align*}
+
+Die Interpolationsformel lautet:
+
+\begin{equation}
+t(v) = t_1 + \frac{v - v_1}{v_2 - v_1} \cdot (t_2 - t_1)
+\label{eq:nachlaufzeit_interpolation}
+\end{equation}
+
+Einsetzen der Werte ergibt:
+
+\begin{align*}
+t(0{,}25) &= 450 + \frac{0{,}25 - 1{,}0}{1{,}5 - 1{,}0} \cdot (500 - 450) \\
+&= 450 + \frac{-0{,}75}{0{,}5} \cdot 50 \\
+&= 450 - 75 \\
+&= \SI{375}{\milli\second}
+\end{align*}
+
+Somit beträgt die geschätzte maximale Nachlaufzeit bei \SI{0{,}25}{\meter\per\second}:
+\[
+\boxed{t = \SI{375}{\milli\second}}
+\]
diff --git a/Bachelorarbeit/V2/Quellen/1-s2.0-S0921889003001556-main.pdf b/Bachelorarbeit/V2/Quellen/1-s2.0-S0921889003001556-main.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..03df54a1f41b933f62471bbd3e05bcf5ae727a8e
Binary files /dev/null and b/Bachelorarbeit/V2/Quellen/1-s2.0-S0921889003001556-main.pdf differ
diff --git a/Bachelorarbeit/V2/Quellen/Collision_Detection_with_joint_torque_Sensors.pdf b/Bachelorarbeit/V2/Quellen/Collision_Detection_with_joint_torque_Sensors.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..ff4caac1c7c7020a09e01c1348cb96334b99af02
Binary files /dev/null and b/Bachelorarbeit/V2/Quellen/Collision_Detection_with_joint_torque_Sensors.pdf differ
diff --git a/Bachelorarbeit/V2/Quellen/DIN_EN_ISO_10218-1.pdf b/Bachelorarbeit/V2/Quellen/DIN_EN_ISO_10218-1.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..7ba2ea0e7082c061a969b5077ee0e0e29c2c9bd1
Binary files /dev/null and b/Bachelorarbeit/V2/Quellen/DIN_EN_ISO_10218-1.pdf differ
diff --git a/Bachelorarbeit/V2/Quellen/DIN_EN_ISO_10218-2.pdf b/Bachelorarbeit/V2/Quellen/DIN_EN_ISO_10218-2.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..3221cb4f0ffc22889a13a15ec340c16df15b297a
Binary files /dev/null and b/Bachelorarbeit/V2/Quellen/DIN_EN_ISO_10218-2.pdf differ
diff --git a/Bachelorarbeit/V2/Quellen/DIN_EN_ISO_8373.pdf b/Bachelorarbeit/V2/Quellen/DIN_EN_ISO_8373.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..cc322415c92ac6a488f3c00fac7d956aa9220b34
Binary files /dev/null and b/Bachelorarbeit/V2/Quellen/DIN_EN_ISO_8373.pdf differ
diff --git a/Bachelorarbeit/V2/Quellen/DataSheet_vl53l7cx.pdf b/Bachelorarbeit/V2/Quellen/DataSheet_vl53l7cx.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..a1ee86147277c290561096a2b8f29c3eb24511cf
Binary files /dev/null and b/Bachelorarbeit/V2/Quellen/DataSheet_vl53l7cx.pdf differ
diff --git a/Bachelorarbeit/V2/Quellen/Laser_Range_finding.pdf b/Bachelorarbeit/V2/Quellen/Laser_Range_finding.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..2ae1600fb70b233e6f8979f856822fd81fca4270
Binary files /dev/null and b/Bachelorarbeit/V2/Quellen/Laser_Range_finding.pdf differ
diff --git a/Bachelorarbeit/V2/Quellen/Li_2019_J._Phys.__Conf._Ser._1267_012036.pdf b/Bachelorarbeit/V2/Quellen/Li_2019_J._Phys.__Conf._Ser._1267_012036.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..958d8475571f9589d3a3a1515ecdb3dad05e13fa
Binary files /dev/null and b/Bachelorarbeit/V2/Quellen/Li_2019_J._Phys.__Conf._Ser._1267_012036.pdf differ
diff --git "a/Bachelorarbeit/V2/Quellen/Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D\342\200\220Bildgebung.pdf" "b/Bachelorarbeit/V2/Quellen/Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D\342\200\220Bildgebung.pdf"
new file mode 100644
index 0000000000000000000000000000000000000000..432bd5283eabc46f8668669e28497fbe0a18a3dd
--- /dev/null
+++ "b/Bachelorarbeit/V2/Quellen/Optik   Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D\342\200\220Bildgebung.pdf"	
@@ -0,0 +1,2708 @@
+%PDF-1.3
+%����
+1 0 obj
+<</Subtype/Form/Filter/FlateDecode/Type/XObject/Matrix [1 0 0 1 0 0]/FormType 1/Resources<</ProcSet [/PDF /Text /ImageB /ImageC /ImageI]/Font<</F1 2 0 R>>>>/BBox[0 0 595.28 841.89]/Length 270>>stream
+x���Ok�@���蘒�'6h�Skۓࡁjk2�[6;a�*~�fz*�a�7��<U�|�(D�@�f
+s

�bY��Vy!T���&�u�u�,fȕ^�0�3��c�P�6p�C��p/%{g=9�&\��:���;ٰ�Z	�T.��"mRJ���i
+�+��
{|��s��oD�BE�`|�5��F�~�n򟁘�����_l7M2ʷ��p��@�>„h��3���'
+~���2}�lm���d�=�w]���5����w�^�p��s_��+�{�
+endstream
+endobj
+3 0 obj
+<</Subtype/Form/Filter/FlateDecode/Type/XObject/Matrix [1 0 0 1 0 0]/FormType 1/Resources<</ProcSet [/PDF /Text /ImageB /ImageC /ImageI]/Font<</F1 2 0 R>>>>/BBox[0 0 595.28 841.89]/Length 270>>stream
+x���Ok�@���蘒�'6h�Skۓࡁjk2�[6;a�*~�fz*�a�7��<U�|�(D�@�f
+s

�bY��Vy!T���&�u�u�,fȕ^�0�3��c�P�6p�C��p/%{g=9�&\��:���;ٰ�Z	�T.��"mRJ���i
+�+��
{|��s��oD�BE�`|�5��F�~�n򟁘�����_l7M2ʷ��p��@�>„h��3���'
+~���2}�lm���d�=�w]���5����w�^�p��s_��+�{�
+endstream
+endobj
+5 0 obj
+<</Subtype/XML/Type/Metadata/Length 58504>>stream
+<?xpacket begin="" id="W5M0MpCehiHzreSzNTczkc9d"?>
+<x:xmpmeta x:xmptk="Adobe XMP Core 5.2-c003 61.141987, 2011/02/22-12:03:51        " xmlns:x="adobe:ns:meta/">
+   <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
+      <rdf:Description rdf:about="" xmlns:xmp="http://ns.adobe.com/xap/1.0/" xmlns:xmpGImg="http://ns.adobe.com/xap/1.0/g/img/" xmlns:xmpTPg="http://ns.adobe.com/xap/1.0/t/pg/">
+         <xmp:CreateDate>2013-09-04T15:42:53+02:00</xmp:CreateDate>
+         <xmp:MetadataDate>2025-02-18T07:38:05-08:00</xmp:MetadataDate>
+         <xmp:ModifyDate>2025-02-18T07:38:05-08:00</xmp:ModifyDate>
+         <xmp:CreatorTool>Adobe InDesign CS5.5 (7.5.3)</xmp:CreatorTool>
+         <xmp:PageInfo>
+            <rdf:Seq>
+               <rdf:li rdf:parseType="Resource">
+                  <xmpTPg:PageNumber>1</xmpTPg:PageNumber>
+                  <xmpGImg:format>JPEG</xmpGImg:format>
+                  <xmpGImg:width>256</xmpGImg:width>
+                  <xmpGImg:height>256</xmpGImg:height>
+                  <xmpGImg:image>/9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA
+AQBIAAAAAQAB/+4AE0Fkb2JlAGSAAAAAAQUAAgAg/9sAhAAMCAgICAgMCAgMEAsLCxAUDg0NDhQY
+EhMTExIYFBIUFBQUEhQUGx4eHhsUJCcnJyckMjU1NTI7Ozs7Ozs7Ozs7AQ0LCxAOECIYGCIyKCEo
+MjsyMjIyOzs7Ozs7Ozs7Ozs7Ozs7OztAQEBAQDtAQEBAQEBAQEBAQEBAQEBAQEBAQED/wAARCAEA
+ALUDAREAAhEBAxEB/8QBQgAAAQUBAQEBAQEAAAAAAAAAAwABAgQFBgcICQoLAQABBQEBAQEBAQAA
+AAAAAAABAAIDBAUGBwgJCgsQAAEEAQMCBAIFBwYIBQMMMwEAAhEDBCESMQVBUWETInGBMgYUkaGx
+QiMkFVLBYjM0coLRQwclklPw4fFjczUWorKDJkSTVGRFwqN0NhfSVeJl8rOEw9N14/NGJ5SkhbSV
+xNTk9KW1xdXl9VZmdoaWprbG1ub2N0dXZ3eHl6e3x9fn9xEAAgIBAgQEAwQFBgcHBgI7AQACEQMh
+MRIEQVFhcSITBTKBkRShsUIjwVLR8DMkYuFygpJDUxVjczTxJQYWorKDByY1wtJEk1SjF2RFVTZ0
+ZeLys4TD03Xj80aUpIW0lcTU5PSltcXV5fVWZnaGlqa2xtbm9ic3R1dnd4eXp7fH1+f3/9oADAMB
+AAIRAxEAPwD0nDw8Q4lBNFf82z8xv7o8klJvseJ/oK/8xv8AckpX2PE/0Ff+Y3+5JTVtyOh0WOqu
+djMe36TXBoISUw+2/V7/AEmL/wBBJSvtv1e/0mL/ANBJTZor6bk1+rjspsZMbmtaRI+SSkn2PE/0
+Ff8AmN/uSUr7Hif6Cv8AzG/3JKV9jxP9BX/mN/uSUr7Hif6Cv/Mb/ckpX2PE/wBBX/mN/uSUr7Hi
+f6Cv/Mb/AHJKV9jxP9BX/mN/uSUr7Hif6Cv/ADG/3JKV9jxP9BX/AJjf7klK+x4n+gr/AMxv9ySl
+fY8T/QV/5jf7klK+x4n+gr/zG/3JKV9jxP8AQV/5jf7klK+x4n+gr/zG/wBySlfY8T/QV/5jf7kl
+IXYeJ9rrHoV/zdn5jf3qvJJTLDqf9ko/TPH6NnZn7o/kJvCe6kvpP/0z/uZ/5BLhPdSvSf8A6Z/3
+M/8AIJcJ7qQWdKw7nmy5jLHu5c6uok/EmtLhPdTD9i9P/wBDX/21V/6SS4T3Ur9i9P8A9DX/ANtV
+f+kkuE91J6cNmOz06HGpnO1ja2iT5CtLhPdTP0n/AOmf9zP/ACCXCe6lek//AEz/ALmf+QS4T3Ur
+0n/6Z/3M/wDIJcJ7qV6T/wDTP+5n/kEuE91K9J/+mf8Acz/yCXCe6lek/wD0z/uZ/wCQS4T3Ur0n
+/wCmf9zP/IJcJ7qV6T/9M/7mf+QS4T3Ur0n/AOmf9zP/ACCXCe6lek//AEz/ALmf+QS4T3Ur0n/6
+Z/3M/wDIJcJ7qV6T/wDTP+5n/kEuE91K9J/+mf8Acz/yCXCe6lek/wD0z/uZ/wCQS4T3Ur0n/wCm
+f9zP/IJcJ7qROqf9rr/TP/m7OzP3qv5CXCe6kmH/AESj/i2f9SE5SZJSklKSUpJSklKSUpJSklKS
+UpJSklKSUpJSklKSUpJSklKSUpJSklKSUhd/S6v+Ls/6qpJSsP8AolH/ABbP+pCSkySlJKeE6t9Z
+es4X1utxW5NjOn0ZeFS5hZR6AZfTZZY17nN9be4s9hB2j85JSj/jPsGMbj0ohwIcWG9w/RGluRv1
+x93D4nbs779sFJTYZ/jDffluxcfpwd6l9VGLY68hlgtvsxQ9zm0PbG5k+wvHaZBCSnPx/wDGN1L1
+L8+zCFuLe3Dbi4oscXVWXV5Fj5dViPe4H0HdjEDTUwlOr1Tr3Us131es6V9rpr6vRbkWU4oxTfAq
+qtYN2cPT9u7XxSU53Vvr51YsyMPp2Kyi6i4sbkvuDnObRm04Vm6v7MWgv9UccAnwEpSS7/GXY25/
+T3dNdXlAvoOy+Q2+q01WtDnY5+hXFkluvgkpFmfX/Ps6Fdbg1CmyvGr25dt9LrvXNFOS79WNTGvG
+2zlo5/NASU2KPr3l3dZxD6EYWa2qlmMXjfW+zMfi+rafS3NeA3WudPFJSzf8ZrjjtzHdMAp2VOsj
+Il4N1WTc0Nb6ADv6KQdRyElN0fXDIyvq/wBZ6i/G+yXdLYDFFzbC7fTXeC19uNtBAsjVjklNjoX1
+us6v1izpL8QUtY3JLLhbvLvsl7cd25npVhu7dOhKSnEd/jCz68/7QcXdi5FLGYmI1znk2HJyKPVs
+dXjPsaSKD7Gtf27lJTv/AFX+tT/rO+99WIMfHoZQ7e60ueX31V37dnpAQN5E7u3GuiU9AkpSSlJK
+UkpSSkLv6XV/xdn/AFVSSlYf9Eo/4tn/AFISUmSUpJTx/wBZOu4nR+pW15fRcbJD635bLnPrN1zs
+PHfe2x1fpPcGs2+mHuMg8CElJvq9R0Dq32jDv6DgYluIaLzVXXXbWRkV+pU9rjRV7gJB9uiSnJ6N
+9ZugZuY17ejdNxrb7aLDdU4WuFtuQ6trbTXhy2/cze0H47gkplR9Y/qZlY/Tjb0fDa7NY1uXW6hr
+q8WkV2ZLQ637PsdArkM0PdJTq5X1g+oXUaqa89lORXSdlTMnCscKhtqJO23H/Rs22s92jdRqkpD0
+vrH1M6ljtry8DGxbr7TjnFfjbw/1cl1TIf6DWvbZbSC4jQO+lqElJGfWD/F45wsaMYTuv9U4b2tD
+n0ue5xsNAaHOqYeTJAhJTf6ZV9U+uMtycDCxrRUBhWmzE9Jwa1lbm0lt1THbQwtgcJKb1nQuh3Os
+fd07Esded1pdRW4vM75fLdTu117pKY2/V/otlD8cYVFTXtDZqrYxzYa9jXMc1oLXNFjtpHEmElNb
+on1T6R0Pp9/TaWHJoynbrxkNrcHgMbWGlldddcbW/u690lOhT03p2Nd9ox8Wmm075srra136RwfZ
+7mgH3OEnxKSkB+r/AEEi4HpuIRkmbwaK/wBIZ3y/2e73a690lNrHwsPELji0VUGzbv8ASY1m7Y0M
+ZO0CdrQAPJJSZJSklKSUpJSklIXf0ur/AIuz/qqklKw/6JR/xbP+pCSkySlJKec639Vuk9aybX9S
+z8nY8PjF9Znp1vfS7HL6w9jnNO18xu2zqQkpngfVv6u4ZNuS6vqV5dW4ZGcKbHs9Fja6gzZWxrdo
+b2EpKaWN9SPq/j249zs++52Ga/s/qWUgMbVd9pFfspZINh1mT5pKQO+oHQK8Y0Yufb7atrK7bKnV
+usFFmK19m2tr/oWQdrgkpp9P/wAX+Fk07/rLmsuyBaHtFFldjSz08eotc63HZz9nGrWtIGk8pKdR
+n1L6DWWvZ1DIFlT2WY7/AFKJpNd78uGD0YINthJ3BySmtkfUDoH7Otw8TMfvc39F61jCze2i3FZv
+2MY7bttMwZSU6X1Q6bb0PCyWdSy6bsnLyTkPLLA8D9HVUBu9Oif5ufoBJTvfasX/AE1f+cP70lK+
+1Yv+mr/zh/ekpX2rF/01f+cP70lK+1Yv+mr/AM4f3pKV9qxf9NX/AJw/vSUuy+iw7a7GOPg1wJ/B
+JSRJSklKSUpJSklIXf0ur/i7P+qqSUrD/olH/Fs/6kJKTJKUkp5LrDfq+epXnMflC6RvFezbO0cS
+J4SU0tn1W/fzf/A//IpKVs+q37+b/wCB/wDkUlK2fVb9/N/8D/8AIpKVs+q37+b/AOB/+RSUrZ9V
+v383/wAD/wDIpKVs+q37+b/4H/5FJStn1W/fzf8AwP8A8ikpWz6rfv5v/gf/AJFJStn1W/fzf/A/
+/IpKdmr6o9JuqZcy3I22NDxLmTDhP+jSUz/5m9L/ANLkf5zP/SaSlf8AM3pf+lyP85n/AKTSU2en
+fVzC6ZkjKofa54BbDy0iD8GBJTqpKUkpSSlJKUkpC7+l1f8AF2f9VUkpWH/RKP8Ai2f9SElJklKS
+U5eXf9XWZD25gxvXB9/qMaXTHclp7JKRfafqp4Yn/bbf/IpKUMj6qEgBuJJ0H6Nv/kUlN/8AZXS/
++4eP/wBtM/8AIpKV+yul/wDcPH/7aZ/5FJSv2V0v/uHj/wDbTP8AyKSlfsrpf/cPH/7aZ/5FJSv2
+V0v/ALh4/wD20z/yKSlfsrpf/cPH/wC2mf8AkUlK/ZXS/wDuHj/9tM/8ikpsta1jQxgDWtEADQAD
+sElLpKUkpSSlJKUkpSSlJKUkpC7+l1f8XZ/1VSSlYf8ARKP+LZ/1ISUmSUpJTXs6fgXPNl2NTY93
+LnVtJPxJCSmP7K6X/wBw8f8A7aZ/5FJSh0vpgMjEoBH/AATP/IpKbSSlJKUkpiXtBjUx4An8iFqV
+6jfB3+af7krUr1G+Dv8ANP8Aclaleo3wd/mn+5K1K9Rvg7/NP9yVqV6jfB3+af7krUr1G+Dv80/3
+JWpXqN8Hf5p/uStSvUb4O/zT/claleo3wd/mn+5K1K9Rvg7/ADT/AHJWpXqN8Hf5p/uStS4IIkIq
+RO/pdX/F2f8AVVJKVh/0Sj/i2f8AUhJSZJSklPL9Uyclmfc1mTnMaHaNqqDmDQfRPqt/Ikpq/bMv
+/uX1L/tgf+lklK+2Zf8A3L6l/wBsD/0skpX2zL/7l9S/7YH/AKWSUr7Zl/8AcvqX/bA/9LJKZV5e
+UbGg5XUT7hoaRHPf9Mkpu9b6pnYee6rGt2M2tdENOpHmChHqpzx9YOquLgMg+0wfazwB/d805S1n
+1h6pW3e/IIEgfRZy47R+b4lJT1FGS7qOIzIwrBUdzmn1GbxLCWOBAc3uPFBTH0OsAQMugnxdju/h
+kBJTJlPVht35VJgndtocJBiIm90Ef6hJShR1Nvpxl1uLQBbvpkOIOpbttbtkfFJShR1MOaTlVkSd
+49GJHYN/S6fikpuJKUkpSSlJKY1/RP8AWd+UoBSN39Lq/wCLs/6qpFSsP+iUf8Wz/qQkpMkpSSnm
+Op4+Q/Puc3FzbAXaOqu2sOg+iPTKSmt9lyf+4XUf+3//AFEkpX2XJ/7hdR/7f/8AUSSlfZcn/uF1
+H/t//wBRJKV9lyf+4XUf+3//AFEkplXjZIsaThdQHuGpv057/oklMfrTVXfn202tDmWVta5p7giC
+lHqp4n6v9C+x9RuyL6bAK5GObDX7ZA+lssf7od8EVOhm0vZc14rcxpsqG5rgWGHjlvblMA9TYnMH
+CBd69tfte3+qRvZh5Pqia3Zl3pbRJAG3n+1uTmCna9XZHrQ2QIjXX90eJ/Kkqr2ZCzn1BsMbomdE
+lUoWN92727RLt2kDxSVTJrg7UdueySKU1zXjcwhwPcGQkoil0lKSU851Dr9mJ9bMPpv2mpuHZURf
+WYltpDyzc7tPtgSkp6Gv6J/rO/KUApG7+l1f8XZ/1VSKlYf9Eo/4tn/UhJSZJSklPKdVood1C9zq
+MN5LtXW5JY86D6TfVbH3JKan2fG/7jYH/sYf/S6SlfZ8b/uNgf8AsYf/AEukpX2fG/7jYH/sYf8A
+0ukpX2fG/wC42B/7GH/0ukpnVj43qMjGwR7hxlknn/jklL/Wmp9nVHFlr6oY36G09v5TXJR6qchu
+M9rw832OgyR7Ru+O1oRQmextjdrxIkH5gyPyJKe8w8WnDx20UCGAl2pkkvJe4/MlBKdJSxa10FwB
+jUT2SVazq2PIc4SW8amPuSSDSz6mvPu4IAcOxA7FJQNKNcuJmGu+kPE/FJVsfSI9jCGsJkgaEeIb
+4Skq2BtvqyGUei6yqyT6wc2GH91wc4OM+QKHVdQMbvV8x61k4+T9bsi64ltTckMcf+KiufhLEVj6
+nX9E/wBZ35SgFMHf0ur/AIuz/qqkVKw/6JR/xbP+pCSkySlJKeS6scf9o377cJp3ai2uxz+B9ItY
+QkpqTi/6bp//AG1b/wCk0UKnF/03T/8Atq3/ANJpKVOL/pun/wDbVv8A6TSUqcX/AE3T/wDtq3/0
+mkplUcb1WRdgfSHFVs8/8WklX1uzGYvVQwtLnWNaGgacBHFDiJCzJPgFuMc68/Rra34uJ/IFYHLH
+uwHmvBb7e5tRLyzfvYxob/KcAe54UWWAgd2XFOUxZD3vQupHPwbH2u3WY91lDyNNWmWj47XNUTKN
+W+bQJH54IG348fJJNKNh1ZoLDwORH73ZJVLbrD+jIh/dwHtjxH9ySqC/6QkMMwNS/TUeHxSVooNs
+MNcdAZkGCR2BSVYUat0NshzB2Os+E/BJV0u2sgy524N+jPInxPdJRLyuH0bFH13znZlbLN1Iysdj
+mgt9zmtc/wCIcCkh6qv6J/rO/KUIqYO/pdX/ABdn/VVIqVh/0Sj/AItn/UhJSZJSklORl9FzMnJs
+vrzvSa8yGei10fMuCSkP/N7P/wDLL/wBv/k0lM6Og5lVzLLc71WNcC6s0tG4A6idySnV+zY3+iZ/
+mj+5JSvs2N/omf5o/uSUr7Njj/BM/wA0f3JKeC+vjSetUgGJZE8QS1Scv85YeY+VwqccVvFhcHET
+xucdRHLnFWwYxOpatSkNAmqw7fQe4afpa37QIBDXCeddAqmQAHQugMpnECjoOpv/AHnt/q70jHx6
+bOqZTXer9pvvqJc8BrXAVH9GDBkM8FEQLXxySETHoXWd1XAYw3WOcxrQZc+uxsDudWDTRFYyf1LD
+qZ6lj3Mbt37nMeAGjkn2pKW/aeHzuf8A9tWf+QSUkObjNc5pcQWgk+10aDcYO3XTskpEOq4O9rDY
+Wue4MbvY9suO2I3NH7wSU3ElKSU0XYVz+ss6gSxlVWO6kRq95sc1x3aaBuwR8UlNyv6J/rO/KUAp
+G7+l1f8AF2f9VUipWH/RKP8Ai2f9SElJklKSU831L7X9uu9OvqZbu0NFpbXwPoj0ykprfr3+i6x/
+28f/AEkkpX69/ousf9vH/wBJJKV+vf6LrH/bx/8ASSSlfr3+i6x/28f/AEkkpev7d6jZq6vEj6Vx
+jnv+iSU3Ot43Q7c3d1K8stLQQ2e3A/wT/wAqGoU0RjfVdugynD5/+oEbKF/R+rP/AHMf/nH/ANII
+WVOu7r/SH1mo5DQ0jadpsaY8i1gI+SWqWq7O+rzgQ7Ic4OYKzN2QdA7ePnP53KWqlNzvq+xjqxkv
+h3M5GST8iTIS1Upmd9XmEkZDjLdhDr8hwImdWukTpzylqpVed9Xqg5rMlwDyCZvyDx4Twlqpdud9
+XmWeq3JeCH+pAvydszu+h9GJ7RCWqm3/AM4+lf8Aciv/AKf/AKTS1Ur/AJx9K/7kV/8AT/8ASaWq
+lf8AOPpX/civ/p/+k0tVOkwFrYPMk/eZSAUjd/S6v+Ls/wCqqRUrD/olH/Fs/wCpCSkySlJKeS6t
+QHdRvd9nrfLvpOy2Vk6D8wvEJKan2Yf9xqv/AGNr/wDSiKFfZh/3Gq/9ja//AEokpX2Yf9xqv/Y2
+v/0okpX2Yf8Acar/ANja/wD0okplVjj1Wfq1Q9w/7WsPfw9RJLrdcuyWZobVZcxuwaV311CZP5r9
+UFOf9pzf9Nk/+xdKSlfac3/TZP8A7F0pKV9pzf8ATZP/ALF0pKV9pzf9Nk/+xdKSlfac3/TZP/sX
+SkpX2nN/02T/AOxdKSlfac3/AE2T/wCxdKSlfac3/TZP/sXSkpX2nN/02T/7F0pKV9pzf9Nk/wDs
+XSkptdLyMw59Ic++0EkFr8mp7YIIJLW6mOUlPTJKQu/pdX/F2f8AVVJKVh/0Sj/i2f8AUhJSZJSk
+lPKdV+zftC/f+z927X1vX38D6Wz2/ckpqfqn/mr/APZlJSv1T/zV/wDsykpX6p/5q/8A2ZSUr9U/
+81f/ALMpKZ1fZfUZH7M+kOPtE89pSU6HX6y7OBDC79G3UYov7n88n8ElOb6Tv9E7/wBgG/3pKV6T
+v9E7/wBgG/3pKV6Tv9E7/wBgG/3pKV6Tv9E7/wBgG/3pKdCjoGRkUsua+hgeJDX4rWuHxCSkn/Nr
+J/0uN/7DMSUr/m1k/wClxv8A2GYkpX/NrJ/0uN/7DMSUr/m1k/6XG/8AYZiSlf8ANrJ/0uN/7DMS
+Uzp6BnY9guoyMeuxsw5uM0ESISU7dLbWVMbc8WWAAPeBtBPjCSmDv6XV/wAXZ/1VSSlYf9Eo/wCL
+Z/1ISUmSUpJTyXVrw3qN7ftFbId9F2IywjQfnlhlJTU+0j/uTV/7BV/+k0UK+0j/ALk1f+wVf/pN
+JSvtI/7k1f8AsFX/AOk0lOpT0TqN9LL68jG2WND2zi1Aw4SP8GglIzoPU2va45GMQCCYxqh/6LSU
+6WX1nBwrjRkOeHgA+1jnCD5gJKQ/85Olfv2f9tv/APIpKV/zk6V+/Z/22/8A8ikpX/OTpX79n/bb
+/wDyKSlf85Olfv2f9tv/APIpKV/zk6V+/Z/22/8A8ikpsZnVsPBLBkOcDYNzdrHO0/sgpKa//OTp
+X79n/bb/APyKSlf85Olfv2f9tv8A/IpKV/zk6V+/Z/22/wD8ikpX/OTpX79n/bb/APyKSlf85Olf
+v2f9tv8A/IpKdGq1l1TLq/o2ND2yIMOEjRJTB39Lq/4uz/qqklKw/wCiUf8AFs/6kJKTJKUkpwus
+52ZYXYePj5tZreD6+OwkOAHAI7apKcrd1fx6r/mO/vSUrd1fx6r/AJjv70lK3dX8eq/5jv70lK3d
+X8eq/wCY7+9JTKt3VvUbJ6pG4TLHRz31SU6PWXZgzSKTnhu1v9FZur+/xSU0d/UfHq//AG3/ALUl
+K39R8er/APbf+1JSt/UfHq//AG3/ALUlK39R8er/APbf+1JSt/UfHq//AG3/ALUlOl1p2UH0egc0
+ez3fZW7hP8vzSU5u/qPj1f8A7b/2pKVv6j49X/7b/wBqSlb+o+PV/wDtv/akpW/qPj1f/tv/AGpK
+Vv6j49X/AO2/9qSnp8PccSgv37vTZu9TR87R9P8AleKSlO/pdX/F2f8AVVJKVh/0Sj/i2f8AUhJS
+ZJSklPK9Vq3dQvP2S6yXfTbkhgOg4bsMJKavo/8AdHI/9i2/+k0VK9H/ALo5H/sW3/0mkpXo/wDd
+HI/9i2/+k0lK9H/ujkf+xbf/AEmkplVT+kZ+pXj3DX7W09/+LSU6nWMbItzS+vCuvbtHvryPSb8N
+sFBSsDozMljnZdORiOaYa05BfuHjoAkptf8AN3B/0mR/265JSv8Am7g/6TI/7dckpX/N3B/0mR/2
+65JSv+buD/pMj/t1ySkPXKXuspFeNZkBrCJZf6Ma9xBlJTmfZ7//ACvyP/Y0f+RSUr7Pf/3Av/8A
+Y0f+RSUr7Pf/AOV+R/7Gj/yKSlfZ7/8AyvyP/Y0f+RSUr7Pf/wCV+R/7Gj/yKSnp8MEYlALSwitg
+LS7cR7RoXd/ikpTv6XV/xdn/AFVSSlYf9Eo/4tn/AFISUmSUpJTyXVhj/tG/fVhOO7U22WNfwPpB
+rwElNSMX/Q9P/wC3bf8A0oihUYv+h6f/ANu2/wDpRJSoxf8AQ9P/AO3bf/SiSlRi/wCh6f8A9u2/
++lElMqhjeqyKcD6Q4ttnn/jEkvSZ3U8zFvNNOBbktAB9RnGvb6JQUg/bnUf/ACpyPv8A/MElKb1v
+qBcAelXiTzPH/QSU7KSlJKUkpw/rCyl11XqtxHHaY+02OYefzdpCSnJ9LE/0fTP+3rP/ACSSlvRx
+Ij0+m/8Ab9v/AJJJS/pYn+j6Z/29Z/5JJSvSxP8AR9M/7es/8kkpXpYn+j6Z/wBvWf8AkklPV4QA
+w6A0NAFTIFZJZG0fRJ7eCSl3f0ur/i7P+qqSUrD/AKJR/wAWz/qQkpMkpSSnlOq30N6he11+Gwh2
+rbcYveNB9J3pOn70lNT7Rjf9ycD/ANgz/wCkElK+0Y3/AHJwP/YM/wDpBJSvtGN/3JwP/YM/+kEl
+K+0Y3/cnA/8AYM/+kElM6sjG9RkZOCfcOMQg8/8AEpKdfq3VbMTMNLc6rHAaD6b6nvOvfc1pCSmn
++3rv/LTH/wDYe3/yKSlft67/AMtMf/2Ht/8AIpKV+3rv/LTH/wDYe3/yKSlft67/AMtMf/2Ht/8A
+IpKV+3rv/LTH/wDYe3/yKSm1167HZZR61uMwuZI9eg3E68tIa6ElOX9qwf8AuRgf+wTv/SaSlfas
+H/uRgf8AsE7/ANJpKV9qwf8AuRgf+wTv/SaSlfasH/uRgf8AsE7/ANJpKV9qwf8AuRgf+wTv/SaS
+nq8Itdh0OaWuaamEFjdrSNo+i3SB5JKXd/S6v+Ls/wCqqSUrD/olH/Fs/wCpCSkySlJKeY6nkZDM
++5rcrNrAdo2qncwaD6J9QJKa32rJ/wC5vUf+2P8A1KkpX2rJ/wC5vUf+2P8A1KkpX2rJ/wC5vUf+
+2P8A1KkpX2rJ/wC5vUf+2P8A1KkplXk5JsaDm9QPuGho057/AKVJTodYtyGZpbXkZlbdo9tFPqM/
+ztwSU0vtGZ/3L6j/AOw3/maSlfaMz/uX1H/2G/8AM0lK+0Zn/cvqP/sN/wCZpKV9ozP+5fUf/Yb/
+AMzSUr7Rmf8AcvqP/sN/5mkp0es23sfT6V2VVLNfs9XqA/1vcIKSnO+0Zn/cvqP/ALDf+ZpKV9oz
+P+5fUf8A2G/8zSUr7Rmf9y+o/wDsN/5mkpX2jM/7l9R/9hv/ADNJSvtGZ/3L6j/7Df8AmaSnpcQk
+4tJcXOJrYS542uJgauHY+KSlO/pdX/F2f9VUkpWH/RKP+LZ/1ISUmSUpJTWsz8au70HWMFkgbC8B
+0njTnumHJAGrFotgeq4TSWuuqBGhBsb/AHp6V29Tw3uDGXVucdAA9pJSUm9fy/FJSvX8vxSUr1/L
+8UlK9fySUr1/JJSvX8klK9fySUr1/JJSvX8klOV12nIttqNNeW8Bpk41orA1/OlrklOZ9kzv9B1P
+/wBiG/8ApNJSvsmd/oOp/wDsQ3/0mkpX2TO/0HU//Yhv/pNJTawOlXZT3NyHdQxQ0SHPvBny0rCS
+m5/zcq/7nZv/AG6P/IJKdSmsU0spDnPFbQ3c8y47REuPikpg7+l1f8XZ/wBVUkpWH/RKP+LZ/wBS
+ElJklKSU4uV0b1uqftD1oh7H7Ns/QDdJ3fyfBVJ8nxZuO+y0x1atnTehOsc576NxcS6bHAyTr/hg
+rVS7pXpweh49rbqrcdr2Hc0+o4wR8bkql3U6rfVcA5tjCCJBDSQQf7aVS7qVF/77P80/+TSqXdSo
+v/fZ/mn/AMmlUu6lRf8Avs/zT/5NKpd1Ki/99n+af/JpVLupUX/vs/zT/wCTSqXdSov/AH2f5p/8
+mlUu6lRf++z/ADT/AOTSqXdS4Fs6uaR3hpH/AH5ICSmt1vMtpaMUMofVcwh4vuFRI40lzSilwPQw
+/wDuHhf+xv8A6mSUr0MP/uHhf+xv/qZJSvQw/wDuHhf+xv8A6mSUydi4zRLsLDA88wj/ANHJKY+h
+h/8AcPC/9jf/AFMkp67BAGFjhoa0CpkNY7e0DaNGukyPNJS7v6XV/wAXZ/1VSSlYf9Eo/wCLZ/1I
+SUmSUpJTXs+mUlPM3dFsfdY+cP3OJ9znzqe+qKE+F0XCbv8At4x3zGz0nuEczMu+CSnYZZjVsbWx
+7A1gDWjcNANB3SUv69H+kZ/nBJSvXo/0jP8AOCSlevR/pGf5wSUr16P9Iz/OCSlevR/pGf5wSUr1
+6P8ASM/zgkpXr0f6Rn+cElJElOZ9YH0ttp9V2I07DH2mtzzz+btBQS5Xq4n+k6Z/2zZ/5FJSvVxP
+9J0z/tmz/wAikpXq4n+k6Z/2zZ/5FJSvVxP9J0z/ALZs/wDIpKV6uJ/pOmf9s2f+RSU9XhEHDoLS
+0g1Mg1ghkbR9EHt4JKXd/S6v+Ls/6qpJSsP+iUf8Wz/qQkpMkpSSmvZ9MpKeGyfs/wBptn0J3u59
+aeT4IoRfq3/df/wdJSv1b/uv/wCDpKV+rf8Adf8A8HSUr9W/7r/+DpKV+rf91/8AwdJSv1b/ALr/
+APg6Slfq3/df/wAHSUr9W/7r/wDg6SlwccGR9n0/49JT1fRcvJzcZ1+Q+uz3bWmsEcDWdwCSluvO
+Itpi99PtOjMcXzr4k6IJcre7/ubd/wCwLf70lK3u/wC5t3/sC3+9JSt7v+5t3/sC3+9JSt7v+5t3
+/sC3+9JSt7v+5t3/ALAt/vSU9Vh64dB3F36NnuLdhPtGpb2+CSlO/pdX/F2f9VUkpWH/AESj/i2f
+9SElJklKSU17PplJTyl914vsAtuA3u0HUKmjn90t0+CKGHrX/wCmv/8AclT/AORSUr1r/wDTX/8A
+uSp/8ikpXrX/AOmv/wDclT/5FJSvWv8A9Nf/AO5Kn/yKSletf/pr/wD3JU/+RSUr1r/9Nf8A+5Kn
+/wAikpXrX/6a/wD9yVP/AJFJSvWv/wBNf/7kqf8AyKSmdGTkMuY9r7bCHAhjuoUuDjPBG3VJT0GH
+fmXb/teL9m2xt/SNs3TM/R4hJSHrbb3W1el9ujaZ+xmG8/neaCXN9PM8esf53+1JSvTzPHrH+d/t
+SUr08zx6x/nf7UlK9PM8esf53+1JSvTzPHrH+d/tSU9LiAjFpDt8+myfV+nMD6f8rxSUp39Lq/4u
+z/qqklKw/wCiUf8AFs/6kJKTJKUkpr2fTKSnBt6Hmvse8XY4DnEgHGrJ1PcliKGP7Azv9Njf+w1X
+/kElK/YGd/psb/2Gq/8AIJKV+wM7/TY3/sNV/wCQSUr9gZ3+mxv/AGGq/wDIJKV+wM7/AE2N/wCw
+1X/kElK/YGd/psb/ANhqv/IJKV+wM7/TY3/sNV/5BJTD9j3/APcrD/8AYen/AMgkplV0q5ljHnKx
+CGuBIFFQOh7ENSU7rbK3mGOa4+AIKSmh16k220kYwyIadTkCiNfAuEoJcr7I7/yvb/7HN/8AJpKV
+9kd/5Xt/9jm/+TSUr7I7/wAr2/8Asc3/AMmkpX2R3/le3/2Ob/5NJSvsjv8Ayvb/AOxzf/JpKeqw
+xtw6G7dkVsG3dv2+0abu/wAUlKd/S6v+Ls/6qpJSsP8AolH/ABbP+pCSkySlJKQvqcXEjukpb0X+
+SSlei/ySUr0X+SSlei/ySUr0X+SSlei/ySUr0X+SSlei/wAklOefq30smTjt1/lO/wDJJKV/za6X
+/wBx2/5zv/JJKTYvRsTCebMWoVucNpILjpz3J8ElMOrdOyc2yt1FOJaGNIJyd8jX83YkpofsHqH/
+AHF6Z91396SlfsHqH/cXpn3Xf3pKV+weof8AcXpn3Xf3pKV+weof9xemfdd/ekpX7B6h/wBxemfd
+d/ekp3sat1WNVU8Na5jGtLa52AgAQ2dY8ElLO/pdX/F2f9VUkpWH/RKP+LZ/1ISUvkuvZRY7GaLL
+g0mthMAu7DkJKcpmd9ZhPqdNrd4bbWt/K9ySmf2/6w/+Vbf+32f3pKV9v+sP/lW3/t9n96SlnZ/1
+ijTpjR5m5jvwBCbkMhE8O6ipvUPrCR/yY09p9ZrZ84JSgZGIvdSfDyus3ZIbl4TcejaZeLGvM9vo
+u/gnKdFJSklKSUpJSklKSU1c3IzKNn2TFOVundFja9sRH0+ZSU1vt/Wf/Ko/+xFaSlfb+s/+VR/9
+iK0lK+39Z/8AKo/+xFaSlfb+s/8AlUf/AGIrSUr7f1n/AMqj/wCxFaSlfb+s/wDlUf8A2IrSU6FT
+nvqY6xnpvc0FzJB2kjVsjmElMHf0ur/i7P8AqqklIcPMxBiUA31/zbPz2/ujzSUm+2Yn+nr/AM9v
+96SlfbMT/T1/57f70lK+2Yn+nr/z2/3pKV9sxP8AT1/57f70lK+2Yn+nr/z2/wB6SlfbMT/T1/57
+f70lK+2Yn+nr/wA9v96SlfbMT/T1/wCe3+9JSvtmJ/p6/wDPb/ekpX2zE/09f+e3+9JSvtmJ/p6/
+89v96SlfbMT/AE9f+e3+9JSvtmJ/p6/89v8AekpX2zE/09f+e3+9JSvtmJ/p6/8APb/ekpX2zE/0
+9f8Ant/vSUr7Zif6ev8Az2/3pKV9sxP9PX/nt/vSUr7Zif6ev/Pb/ekpX2zE/wBPX/nt/vSUr7Zi
+f6ev/Pb/AHpKQuzMT7XWfXr/AJuz89v71Xmkp//Z</xmpGImg:image>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <xmpTPg:PageNumber>2</xmpTPg:PageNumber>
+                  <xmpGImg:format>JPEG</xmpGImg:format>
+                  <xmpGImg:width>256</xmpGImg:width>
+                  <xmpGImg:height>256</xmpGImg:height>
+                  <xmpGImg:image>/9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA
+AQBIAAAAAQAB/+4AE0Fkb2JlAGSAAAAAAQUAAgAg/9sAhAAKBwcHBwcKBwcKDgkJCQ4RDAsLDBEU
+EBAQEBAUEQ8RERERDxERFxoaGhcRHyEhISEfKy0tLSsyMjIyMjIyMjIyAQsJCQ4MDh8XFx8rIh0i
+KzIrKysrMjIyMjIyMjIyMjIyMjIyMjI+Pj4+PjJAQEBAQEBAQEBAQEBAQEBAQEBAQED/wAARCAEA
+ALUDAREAAhEBAxEB/8QBogAAAAcBAQEBAQAAAAAAAAAABAUDAgYBAAcICQoLAQACAgMBAQEBAQAA
+AAAAAAABAAIDBAUGBwgJCgsQAAIBAwMCBAIGBwMEAgYCcwECAxEEAAUhEjFBUQYTYSJxgRQykaEH
+FbFCI8FS0eEzFmLwJHKC8SVDNFOSorJjc8I1RCeTo7M2F1RkdMPS4ggmgwkKGBmElEVGpLRW01Uo
+GvLj88TU5PRldYWVpbXF1eX1ZnaGlqa2xtbm9jdHV2d3h5ent8fX5/c4SFhoeIiYqLjI2Oj4KTlJ
+WWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+hEAAgIBAgMFBQQFBgQIAwNtAQACEQMEIRIxQQVRE2Ei
+BnGBkTKhsfAUwdHhI0IVUmJy8TMkNEOCFpJTJaJjssIHc9I14kSDF1STCAkKGBkmNkUaJ2R0VTfy
+o7PDKCnT4/OElKS0xNTk9GV1hZWltcXV5fVGVmZ2hpamtsbW5vZHV2d3h5ent8fX5/c4SFhoeIiY
+qLjI2Oj4OUlZaXmJmam5ydnp+So6SlpqeoqaqrrK2ur6/9oADAMBAAIRAxEAPwCbeU/KflW58q6L
+cXGi6fNNNp9rJJJJaws7u0MbMzM0ZJJJ3OKpt/gzyf8A9WHTf+kOD/qnirv8GeT/APqw6b/0hwf9
+U8Vd/gzyf/1YdN/6Q4P+qeKu/wAGeT/+rDpv/SHB/wBU8Vd/gzyf/wBWHTf+kOD/AKp4q7/Bnk//
+AKsOm/8ASHB/1TxV3+DPJ/8A1YdN/wCkOD/qnirv8GeT/wDqw6b/ANIcH/VPFXf4M8n/APVh03/p
+Dg/6p4q7/Bnk/wD6sOm/9IcH/VPFXf4M8n/9WHTf+kOD/qnirv8ABnk//qw6b/0hwf8AVPFWK/mV
+5a8u6d5RurrT9GsYLhXhCvDbRI4DSKDRkQHpgPJhl+l5DFoV9KA31FVVhUH0q7H/AGOQtoImO9GW
+3luENyvoSiAVJESqPvYAZEyk4+fJngLA271GW10mCVvTgjdF2XmFav3AYLkWniz5B9VIdls2Pw2s
+I/2C/wBMkAe9tjCY5yl81W306Cfkwt4uKbueC0A99sBJDDLnljHMo+00qwlDgWkRCKXLMi1Cjv0+
+4ZHiLhT1GY8pH5oq5sNKiv8A0hZQCOI+kw9JNyvwufs+NaYOM2uXUZYkjilt5qZ0rTg8lk9rAHVy
+El9NRv4HboceI8rY/mMw/iPzKl+iYPU+q/UovU8PSXl4/wAuPFLvX8xl58UvmXuHk6KQ+UdCIncA
+6babAJQfuI/FDlxge8/Z+p6Sk59GT/lok+6P/qnjwHvP2fqWnejJ/wAtEn3R/wDVPHgPefs/UtO9
+GT/lok+6P/qnjwHvP2fqWnejJ/y0SfdH/wBU8eA95+z9S070ZP8Alok+6P8A6p48B7z9n6lp3oyf
+8tEn3R/9U8eA95+z9S070ZP+WiT7o/8AqnjwHvP2fqWnejJ/y0SfdH/1Tx4D3n7P1LTvRk/5aJPu
+j/6p48B7z9n6lp3oyf8ALRJ90f8A1Tx4D3n7P1LTvRk/5aJPuj/6p48B7z9n6lp3oyf8tEn3R/8A
+VPHgPefs/UtILW9Ct9f0uTSb2aZYpWVmkiKLIODBhSsbL2/lw8O26JQEhRY8Pyx0pVCLqepqAKCk
+0Xb/AKN8PCvCe9qX8rtImiMMmp6myN9oGWHf/p3x4USx8XMoT/lTPlitfreoH/npD/2T48LHwIrh
++Tnlgf8AH1fn/npD/wBk+DhQdPEq4/Kjy8FCC6vgnUqJIqE+J/cYPDDV+Qhvud1aH8sdBgNUuLw/
+ErEF4qHjuAf3PTB4Qax2Xi7z+Pgsb8rtAYgm5vSetecW5/5EY+CEfyTi7z+Pgvm/LHQZ39R7i85U
+VTR49+IpX+59sTiC/wAlYu8/j4Kv/KutE9L0/XuuQX0/V5x8+PhX0vDb5YfCCf5Kw95TLyZ/yh+g
+/wDbNs/+TEeTc9OGdVIBrU9KAnp8sBkArXqr4N/wLf0wcQW3eqvg3/At/THiC271V8G/4Fv6Y8QW
+3eqvg3/At/THiC271V8G/wCBb+mPEFt3qr4N/wAC39MeILbvVXwb/gW/pjxBbd6q+Df8C39MeILb
+vVXwb/gW/pjxBbd6q+Df8C39MeILbvVXwb/gW/pjxBbaa4hjQySOI0U0LP8ACAT/AK1PHCDaqX6S
+07/lrh/5GL/XCrv0lp3/AC1w/wDIxf64q79Jad/y1w/8jF/rirv0lp3/AC1w/wDIxf64q79Jad/y
+1w/8jF/riqrFPBcKWgkSVQaEowYA/RiqpirsVdiqS+TP+UP0H/tm2f8AyYjxVH3lv9YuLQ+o8Xoy
+GX92QOVFI4PUGqmuR/iVFAUAA7ZJW8VdirsVdirsVdirsVdirsVdiqV61w/R9xz9DjzT/erl6X7H
+X09/lgjzKsX/ANG/7U3/AE8YVd/o3/am/wCnjFXf6N/2pv8Ap4xV3+jf9qb/AKeMVd/o3/am/wCn
+jFWR+W/T+qy+n9Vp6m/1Lnw+yPtepvXFU5xV2KuxVJfJn/KH6D/2zbP/AJMR4qmkn9/D/sv1ZE/U
+rri4FuocxySgmhES8yNiakDftklQy6rG2xt7lG+KitEwJ4/hv2xVWtb5LskJFNHQVJmiaMdStPjA
+qdsVROKuxV2KuxV2KtdNzirgQRUGo9sVbxVLNZcx2Fw4mFvR0/eGMTU+x+wQa4I8yrGfrjf9XRf+
+4ev/AFTwq7643/V0X/uHr/1TxV31xv8Aq6L/ANw9f+qeKu+uN/1dF/7h6/8AVPFXfXG/6ui/9w9f
++qeKsi8uSmW1lY3AuaSU5CEQU2G3EKtfniqb4q7FXYqkvkz/AJQ/Qf8Atm2f/JiPFU0k/v4f9l+r
+In6lVCTWgHTJKo3l3badaT3t0wit7aNppX8FQcmO3sMVWWN/bahaQX9kTJa3MayxSU4gqw5A0ahH
+vtitIgSLz9M7OQWCmlSBStPvGKaXV9sUOr7Yq6vtirq7VIxVoVO7DvsPDFXFgOvXsMVpsEntTFUv
+1YyLZTmL6xy5rT6oKy/sfZ/jgjzKsd9S+pWmt/8AA/2YVRen2t3fSNG1xqtqFXlynooO9KD4euKp
+h+gbj/q63v8AwY/5pxV36BuP+rre/wDBj/mnFXfoG4/6ut7/AMGP+acVR1hZvZRtG9zLdFm5cpjy
+I2pQdMVRWKuxV2KpL5M/5Q/Qf+2bZ/8AJiPFUxupordknmdY44w7O7sFVQB1Zm2AyJ+pCo88EJRZ
+ZURpTxQMwBdqVotepoMklDax6Y0y5lmWV1giab07dykjemC9FKsm+3jTFUPpVpBLaWt8JL1DIizi
+K5ncuOaqaSqHZeg3HTFUS9nZtqUV66A3SQvHHJU1EbMhZRvTrTFQtvIdUkkBsb2K3XiBxlg9apBO
+4KzRdsUtCDWdwb6Ahg9KWxBFQeFP9II+E9dt8UK1ol8hJvLiKYEAIIojGa1NSS0sldqeGKq9Vc7E
+EA/eR/TFWyw6DqfuGKqGnrdLaqt9NHc3AZxJLFGYkNHalIzJKRQbfaOKonFUHe3UtnbzTwwi4dXU
+CMuI61Cj7TVGRjzKsVmMk80kz6fKGkYuQuoKBVjXYcckqz02/wCWCb/uIL/zTirvTb/lgm/7iC/8
+04q702/5YJv+4gv/ADTirvTb/lgm/wC4gv8AzTirIvLgK2stYXg/efZecTk7DfkOnyxVN8VdirsV
+SXyZ/wAofoP/AGzbP/kxHiqK1e3t7q3aC7iS4hdH5xyRGdWoOQrEN23HQb5H+JUqleG7ZDeWayNp
+7ie3Mmn3JVXUUDpUEchXanTJKn9zHDLayw3QV4pEKSK32WVhxIIPY1xVdBDDbQx29uixQwqEjjQA
+KqqKKqgdAB0xVzCsi068Wp964qv6jFUHBqSTajd6d6E6GzWJjPJGVhk9UM1I3OzcePxYppGE+HU4
+oWq6mqxkNxNG3rQ+B98U0uAA+nrihbH9k/6zf8SOKr8VS/U7V7yzngjijnZnUhJiyoaBDuUocEeZ
+VisltbRO0UkOlo6EqymeUEEGhB+LCqJsLfy+xf8ASS2CCg9P0JnNT3ryfFUb9V8k/wA1v/yNP/Ne
+Ku+q+Sf5rf8A5Gn/AJrxV31XyT/Nb/8AI0/814qmukx6THC40goYi1X9Niw5UHiT2xVH4q7FXYqk
+vkz/AJQ/Qf8Atm2f/JiPFUxu44pSsU4BikV0cE0BVhQiu2RP1KqRG3hjSKNwEjUKtWqaAUG7Ek5J
+UPqb2TWFwbpRcRJG7vEtGZgqklQtd6janfpiq3ShpsVjE+nxrbQXAE4jI4H4wD8S12NMVVZ7y3gl
+i5tX1KovEFtyV68a0HviqsZIwah19xUb4qhzf263gttzJIgYUBKgAt9pxVR9+Kq3+jE8iUJPU7Yp
+4igNOh0nTbu8gswUkvZmvZyWd1aRwgZuUjMoOw+Edu2KEy9WL+dfvGKqFve20rywo/xQsQxIIU1L
+fZY7Hp2xVdcXtrbR+pLNGlSFTm6oGc/ZUFiBUnpiqF1W5a1sZ547hbUh0pMyGQCoT9lQ3XBHmVYm
+93ayO0kmoWLu5LMzWBJJO5JJgwq19Ysv+W6w/wCkD/rxirvrFl/y3WH/AEgf9eMVd9Ysv+W6w/6Q
+P+vGKu+sWX/LdYf9IH/XjFWS+WXie0lMM0M4Em7QQ+goPEbFeCVPviqdYq7FXYqkvkz/AJQ/Qf8A
+tm2f/JiPFUZqN+tgYpDHLLz5LSGMyU+yd6MtMBBtUF/iNP8Allu/+kZv+qmNSV3+I0/5Zbv/AKRm
+/wCa8akrv8Rp/wAst3/0jN/zXjUld/iNP+WW7/6Rm/5rxqSu/wARp/yy3f8A0jN/zXjUld/iNP8A
+llu/+kZv+a8akrv8Rp/yy3f/AEjN/wA141JXf4jT/llu/wDpGb/mvGpK7/Eaf8st3/0jN/zXjUld
+/iNP+WW7/wCkZv8AmvGpKsl123nUJNY3MighgHtSwDKag7v1GNSVGauZodPleN5VcspDWqc3G6jZ
+Sw8PHGIpWN/XNQ/5adV/6RV/6q4Vd9c1D/lp1X/pFX/qrirvrmof8tOq/wDSKv8A1VxV31zUP+Wn
+Vf8ApFX/AKq4q765qH/LTqv/AEir/wBVcVZB5elmltpWnkuZCJKA3cYjalB9kBm2xVNsVdirsVSX
+yZ/yh+g/9s2z/wCTEeKqHmwxCO29UxD4np6zyp2Xp6P8cVY3ys/5rT/kddYVdys/5rT/AJHXWKu5
+Wf8ANaf8jrrFXcrP+a0/5HXWKu5Wf81p/wAjrrFXcrP+a0/5HXWKu5Wf81p/yOusVdys/wCa0/5H
+XWKu5Wf81p/yOusVdys/5rT/AJHXWKp35b1KCGc2XqW4SbdBE0zuZPhAH70dKDAqd6ykj6fIsUcs
+rErRLd/TkO46NQ4qxv6tff8ALFqf/SV/zZirvq19/wAsWp/9JX/NmKu+rX3/ACxan/0lf82Yq76t
+ff8ALFqf/SV/zZirvq19/wAsWp/9JX/NmKp9oEcsdtIJoriEl6gXUnqsRQbg0G2KprirsVdiqS+T
+P+UP0H/tm2f/ACYjxVrzI86Jb+g0q1LV9IoP5evqYqkPrX/+/Lv74MVd61//AL8u/vgxV3rX/wDv
+y7++DFXetf8A+/Lv74MVd61//vy7++DFXetf/wC/Lv74MVd61/8A78u/vgxV3rX/APvy7++DFXet
+f/78u/vgxV3rX/8Avy7++DFVSI6tJ8UP11yvdPRNPuxVM2SaTQJV1JDIxlHw38giFKrSroVp7Yqk
+v1TT/wDll07/AKT2/wCquKu+qaf/AMsunf8ASe3/AFVxV31TT/8All07/pPb/qrirvqmn/8ALLp3
+/Se3/VXFXfVNP/5ZdO/6T2/6q4qyPy3HDFayiCOCIGSpFvMZ1J4jqxZqH2xVOcVdirsVSXyZ/wAo
+foP/AGzbP/kxHiqtrely6msKxej+7LE+uhfrTpQjwxVKf8KXfjZf8iW/5qxV3+FLvxsv+RLf81Yq
+iLLyusc3K/jtZouJ+GOMqeXY1riqYf4d0T/ljj+4/wBcVd/h3RP+WOP7j/XFXf4d0T/ljj+4/wBc
+Vd/h3RP+WOP7j/XFXf4d0T/ljj+4/wBcVd/h3RP+WOP7j/XFXf4d0T/ljj+4/wBcVRNpp9lYBhZw
+rCJKFuPenT9eKofXWRdNkMhhVarU3Kl4/tDqqgnFWKetZ/790r/kRL/zTirvWs/9+6V/yIl/5pxV
+3rWf+/dK/wCREv8AzTirvWs/9+6V/wAiJf8AmnFXetZ/790r/kRL/wA04qyPy20TWspia2YepubR
+GRa8R9oOBviqc4q7FXYqkvkz/lD9B/7Ztn/yYjxV3mG4ng+r+jdy2nLnX0YPX5U4ddxSmKpN+kL3
+/q7Xf/SD/wA3Yq79IXv/AFdrv/pB/wCbsVd+kL3/AKu13/0g/wDN2Ku/SF7/ANXa7/6Qf+bsVd+k
+L3/q7Xf/AEg/83Yqml7c3CaRaSrezRu5+KZbfm77N9qOvw4qlf6Qvf8Aq7Xf/SD/AM3Yq79IXv8A
+1drv/pB/5uxV36Qvf+rtd/8ASD/zdirv0he/9Xa7/wCkH/m7FXfpC9/6u13/ANIP/N2Kpp9YmbQG
+la8mZ+dPrBtvj+0NvRr+OKpR9auP+rnc/wDSAP8AmrFXfWrj/q53P/SAP+asVd9auP8Aq53P/SAP
++asVd9auP+rnc/8ASAP+asVd9auP+rnc/wDSAP8AmrFWQeXpHktpS9w9yRJTlJD6BGw24gmvzxVN
+sVdirsVSXyZ/yh+g/wDbNs/+TEeKteY1um+rfVhen7fL6k3H+SnP+GKpJ6ep/wAutf8AIw/0xV3p
+6n/LrX/Iw/0xV3p6n/LrX/Iw/wBMVd6ep/y61/yMP9MVd6ep/wAutf8AIw/0xVNr5bs6NZhBf+qD
+8fpPSfo396cVSn09T/l1r/kYf6Yq709T/l1r/kYf6Yq709T/AJda/wCRh/pirvT1P+XWv+Rh/pir
+vT1P+XWv+Rh/piqbKt3/AIfZaX/repsC/wDpNOQ6N4YqlPp6n/LrX/Iw/wBMVd6ep/y61/yMP9MV
+d6ep/wAutf8AIw/0xV3p6n/LrX/Iw/0xV3p6n/LrX/Iw/wBMVZB5eW4W2l+sC6Dept9dbk9KD7Pt
+iqbYq7FXYqkvkz/lD9B/7Ztn/wAmI8VU/M6K/wBW5W63FPU+1ci34/Y/mZeVfwxVIfRT/q3x/wDc
+RT/qpirvRT/q3x/9xFP+qmKu9FP+rfH/ANxFP+qmKu9FP+rfH/3EU/6qYq70U/6t8f8A3EU/6qYq
+nF/Gp0SyU2qOAR+6N0qBdm6TFwG+/FUn9FP+rfH/ANxFP+qmKu9FP+rfH/3EU/6qYq70U/6t8f8A
+3EU/6qYq70U/6t8f/cRT/qpirvRT/q3x/wDcRT/qpiqcLGv+HHT6qlPU/ufrS8ftDf1+dPoriqT+
+in/Vvj/7iKf9VMVd6Kf9W+P/ALiKf9VMVd6Kf9W+P/uIp/1UxV3op/1b4/8AuIp/1UxV3op/1b4/
++4in/VTFWR+W0CWsoEC2/wC8+ys4uK/CN+Ss1Pliqc4q7FXYqkvkz/lD9B/7Ztn/AMmI8VU/M/pf
+6N6n1L/dlPrvqf5H2PS/GuKpD/o3/am/6eMVd/o3/am/6eMVd/o3/am/6eMVd/o3/am/6eMVd/o3
+/am/6eMVTi/9H9CWXL6hxqKer6vodG/uuPxffiqT/wCjf9qb/p4xV3+jf9qb/p4xV3+jf9qb/p4x
+V3+jf9qb/p4xV3+jf9qb/p4xVOuCJ5cJYWAR3DD+9+rEchv/AD12xVJf9G/7U3/Txirv9G/7U3/T
+xirv9G/7U3/Txirv9G/7U3/Txirv9G/7U3/TxirI/Lfp/VZfT+q09Tf6lz4fZH2vU3riqc4q7FXY
+qkvkz/lD9B/7Ztn/AMmI8VU/M7qn1blcLb19T7VsLjl9j+ZW40/HFUh9ZP8Aq4R/9w5P+qeKu9ZP
++rhH/wBw5P8AqnirvWT/AKuEf/cOT/qnirvWT/q4R/8AcOT/AKp4q71k/wCrhH/3Dk/6p4qyeCbT
+k0q1fUZIXjI+B5Y1jUtv0jIAXFVn1vyt/PZfdH/TFXfW/K389l90f9MVd9b8rfz2X3R/0xV31vyt
+/PZfdH/TFVW2/wAP3jmK1W1mcDkVRUJABArsPfFXa0sUGlOsbJbIpWh9ESqPiH+6uJH4Yqxb1k/6
+uEf/AHDk/wCqeKu9ZP8Aq4R/9w5P+qeKu9ZP+rhH/wBw5P8AqnirvWT/AKuEf/cOT/qnirvWT/q4
+R/8AcOT/AKp4qyPy24e1lInW4/efaWAW9PhG3FVWvzxVOcVdirsVSXyZ/wAofoP/AGzbP/kxHirv
+MJnH1f0Wvl+3X6gnP+T+8+Jae304qk3O9/35rn/Ij/r5irud7/vzXP8AkR/18xV3O9/35rn/ACI/
+6+Yq7ne/781z/kR/18xV3O9/35rn/Ij/AK+Yqm13p15qekWkUD8ZEPJjfRguRRh8SsslG3xVLP8A
+Cmsf7+sv+RCf9UcVd/hTWP8Af1l/yIT/AKo4qmel+XIoYXXVYba5lLVRkiVQFoNtkTviqO/QWjf8
+sUP/AAAxVVttNsLNzLa28cLkcSyKASCQabfLFVLWTINPkMRuA1VobReUvUfZFR9OKsb53v8AvzXP
++RH/AF8xV3O9/wB+a5/yI/6+Yq7ne/781z/kR/18xV3O9/35rn/Ij/r5irud7/vzXP8AkR/18xVP
+tAMptpPWa7Y89vrycHpQfZFW2xVNcVdirsVSXyZ/yh+g/wDbNs/+TEeKteY4pJPq3p20lzTnX07g
+Qcfsdag8q4qkn1W4/wCrZc/9J4/5pxV31W4/6tlz/wBJ4/5pxV31W4/6tlz/ANJ4/wCacVd9VuP+
+rZc/9J4/5pxV31W4/wCrZc/9J4/5pxVNr6GVtGs0FpLIyneJbkIybN9qWnxYqlP1W4/6tlz/ANJ4
+/wCacVd9VuP+rZc/9J4/5pxV31W4/wCrZc/9J4/5pxV31W4/6tlz/wBJ4/5pxV31W4/6tlz/ANJ4
+/wCacVTZYZf8PtH9UlDepX0PrI5n4hv61PwxVKfqtx/1bLn/AKTx/wA04q76rcf9Wy5/6Tx/zTir
+vqtx/wBWy5/6Tx/zTirvqtx/1bLn/pPH/NOKu+q3H/Vsuf8ApPH/ADTirIPL0bx20oe3e2Jkrxkm
+9cnYb8gBT5Yqm2KuxV2KpL5M/wCUP0H/ALZtn/yYjxVfrunTX/oejbQXPp86/WGdePLj9n0yOtN8
+VSn/AA7ef9W2w/5GTf8ANWKu/wAO3n/VtsP+Rk3/ADVirv8ADt5/1bbD/kZN/wA1Yq7/AA7ef9W2
+w/5GTf8ANWKu/wAO3n/VtsP+Rk3/ADViqZXmlzzaXa2i2ltI8Jq0TvII12P2Cp5d++Kpb/h28/6t
+th/yMm/5qxV3+Hbz/q22H/Iyb/mrFXf4dvP+rbYf8jJv+asVd/h28/6tth/yMm/5qxV3+Hbz/q22
+H/Iyb/mrFUyXS5xorWP1S29Qvy9DnJ6X2ga8q8q4qlv+Hbz/AKtth/yMm/5qxVH2PlqxaEnUbGBJ
+eR4iF5CvGgp9puta4qif8MaF/wAsa/8ABP8A81Yq7/DGhf8ALGv/AAT/APNWKu/wxoX/ACxr/wAE
+/wDzViqMstPs9OjaKziESOeTAEmppT9onFUTirsVdiqS+TP+UP0H/tm2f/JiPFU3L0biFLEAE0p3
+r4keGAy3V3Nv99t/wv8AzVgs9yu5t/vtv+F/5qxs9yu5t/vtv+F/5qxs9yu5t/vtv+F/5qxs9yu5
+t/vtv+F/5qxs9yu5t/vtv+F/5qxs9yu5t/vtvvX/AJqxs9yu5t/vtv8Ahf8AmrGz3K7m3++2/wCF
+/wCasbPcrubf77b/AIX/AJqxs9yu5t/vtv8Ahf8AmrGz3Kh77UY7CzlvJUYrCVDKtOVWKgd6ftYJ
+ZKFs8WM5JUGPP+YOmJJ6X1adiSAGX0yu/uJMMZiTkfkZVdqUn5k6TG/pta3FagCnpkGvgRJlnAXH
+lER5lSb80tGSYwG0uiwNKgRkfeJMjIGLi/m8d03/AMrQ0b1DF9UuiQaVAjI+8SZRLUAdG/GRPku/
+5Wbo/q+j9VuuVeNQEI+/1KYPzI7m2OHi6sk0bV4NatDeW6NGgdo6PStVp/KWHfLcc+MWjLiOM0j8
+m1uxV2KpL5M/5Q/Qf+2bZ/8AJiPFU3H963+qv62yI+pVO+vINOsrjULkkQWkTzykCpCRqXag+QyS
+oDy75k0vzRZPqGks7wRSmBjIpQ81VHOx9nGKptirFPPfnGDyjaWxudPfUIr9miejBEUKByDEq1SQ
+2wpvQ74qno0zSriFWWBGjkHNStRUMK8gQe9cVVG02wYcWt0I4GKhG3A1qv44qxnzxrWh+ULOG+uN
+PjvZb2ZYDDyCMyKjBn3Vq8V2/wBliqfWENm9nb6lDaPA7xC4WA/DIpdK8GHLjy+KhBPXFUp8l+eL
+Xzl9fENs9m9hIq8JGDMyPy4saKKGqGq708cVZRirGPM/nmx8sapp2lTW8l3NqNfgtypkjqyxxn02
+K15sSBuOmKovzUpfQb9F6sYwPmWiynN9BcjRf3o+P3MHhultbeOFuJZQACCCCSx71pksVcNO0hhk
+QZdEi1Wwuby6luIQpjdiVPIdPoqMmNTDF6Sd3Qa3PGy1a2tjNqDzXqidBbxosbVoHARamlP5TmPn
+1FRNJ0+THq88qAPDEDfvT/TNS023iSNEMYhJJpEwFFqa1K/y++YEpRI83aYp4ccK2BTt7q3e2+uK
+axcGflsdgRvluONORAgi068oTRz6dPLEQVNzJQingnhmdg+l1+uN5B7k+y1xXYq7FUl8mf8AKH6D
+/wBs2z/5MR4qm4/vW/1V/W2RH1Kp31nBqNlcafcgmC7ieCUA0JSRSjUPyOSVJPJGj6bo2imLSlZI
+Li4mnId/U35ekpDeHCNcANht1GLw58LIsLUkfm7yva+btIOlXMhgKyxzRTAFjGyGjELyUHkjMu/j
+XtiqJ8vCOPRrS2SVpTaRrbOXpzDRAIVbiqeHhuN8ANtmbEccqTPC1sT8xeWLDzZrlnHeys8GkL6s
+1rRTG5lPwqxXjICVTfenTbrgvdsOOsYkep2ZZha0s03QNH0e6uLjTrSO3lvC0k0iirszMXertU0q
+2y9BinomeKGL33ky21PzjbeabuYSrYRJHFasmwkQtIknMMPsl67g/hinojfNKl9CvkXqzRqPmWiG
+U5fpLfov70fH7nnot7j0l9NQ+wFeQoT7HkPHK4kja3bCeQRBihLyzuJlkjMQAbruATvU7q+WDLwk
+G3S60Z5YzHaioLBGQrNCrGnxfBWp+7MfNniCU4tP6RYHyRNrzjtSpCqHPMVLCgO4FAlO2arPGM8l
+25GHCROMug6LkcJaSwGnJ42VSDt8W4yAiZZQeluYLMdizf8ALeN49AlR6Ei6k3BqPsx9DnQafaDr
+tZ9fwZblzjuxV2KpL5M/5Q/Qf+2bZ/8AJiPFU3H963+qv62yI+pVK+nNtZXFwKkwxO4A6/CpO3TC
+eTPFHjmB3lKPKMldPkhNP3UvwgdlKrT8QcEOTldpR/eA94T/ACThOxVidrfnSNfuraZq29xMedf2
+C55o33OAfbftkAaLs8mHxtPEjmAyHU7+PTbOS5ehYDjEpP2nP2V/r7ZImg4ODCcswEt8qrI9tc3s
+1WluZjVz1YKAf+JM2CDkdoECQiOQCe5JwlOX4Qr/AMpBPbY7GvyrXFMV5IAJPQbnFC2MMEHP7R3b
+5nfFMjuk/mhS+h3qL1Z4gPmWiynL9JbtH/ej4/cwqzWlnHUg79Qag7DvlVVL4O6ifQpz0q304kOs
+1I9KvaaxaRRRhpacVAOzdh8s0+u00pSNOTizYTGPqHIJfPPHJp5jjYM1Idu/wrJX9eIhK+Tj+MB1
+Sy4ZpIwIga8VVidv1nwyzCKlv3s8GUVVvRfy5Vl8vurEEi5k3BDD7Kdxm504qDja36/gyvLnGdir
+sVSXyZ/yh+g/9s2z/wCTEeKpuP71v9Vf1tkR9SoDzBN6OkXLDqwVP+DZU9vHDLk5GjjxZgknlCYJ
+eXEHeWMP7fu2p/zMyMObmdpRuAPcfx9zLcm6t2KsM81QiPVOYXaaJWJ8WFU/UoyuXN2/Z8rxV3FK
+ptWutRSCG5bktuhELEmsgJpzbxOwH498F25MNNHESR15s30CH0dItRUnmnq1P/FhMg/BssjydNrJ
+cWaSY4WhpgGBVhUEUI9jioNKXLmEStSxPKm32D8X47YsqrdWxYpL5nVn0S9RftM8QHbctFlOX6S3
+6P8Avfn9zBrLWYrOI28xBYlTGRVw3IDfku2QOEk2HMzjjiHT3r3EdYQjKw2beh+kVwkcBous1OGZ
+hYISdYQy7SueO1Qen/C5h6ux/C4mlzxybcTVwZLW3/d0ciiivf55hYZnJOuTteACNtuim3JVlJYU
+Ugjr8xhjGQyVTLSxNvQ/y+DLoHFiCRO4qCCDsncdc3Om2gw1v958GUZe4zsVdiqS+TP+UP0H/tm2
+f/JiPFU25BZHZiAAq1J6dWyI+pWLefNajtdCd7KWKaUNzKcuVUUH+Vv5iuThESkAW/BM44ymP4Qw
+7yR5puZdXRrxAIwjO3og1KsOAWhJ/aYHr2zI1OkjiiJAtWn7SnqhOEwNhY+YerWt7bXqc7dw1PtL
+0YfMZjMVckAEk0A3JOKsI8/30YgR7OWOWSGC4kkUHkQECla8fE1HUe3TBw8UwO92GimceHJMDkNv
+fRYJpOv3upTelLGnIqzxGMEFSorxoSa16ZlarRRx4+KLR2Z2zPNn4JgUXs+nX9neQL9VNOKj90w4
+uooNiuYzjy+oozFCDudRgiWsU0DOpAZHk47d91Dmo+WKvKD+afmuW+eGxs7Ri8piiQh6E14rX979
+J6ZlT0tQ4g48dXxSEXqWk6nHeW8Mc0im8Ea+sApQM4HxlFJbavQVOYrkIfzGpbR7tV+0ZIgPnyiy
+rJ9Jb9J/efP7nmN1bSNcRyoQ9QtPjFCaUrscljFBy8u4tC/WNQiZQ7iYsQFTmNw3hx2yyeKNOnzT
+lVcwrx3VhHJ6cbAs3QUJFfuzXauOcjk06bSaETuMjXxUdQuYbmIwwFRIaUCCgPetemYGDDkjkuQd
+xWIQqJJS1UmWUIzfED2av41zMIbdOC9c8gU/w+oBBKyuDTfei98zNKCIOPrf7z4Mmy9xnYq7FUl8
+mf8AKH6D/wBs2z/5MR4qmc0EVyJYJhyjkRVYVIqKt3GRH1KxxtN0SCSaOXUErGSDHPT1FoBVaVUt
+1HbJJBIV9M0jyv6zRad6PrLyDLCqI1EYI1aIDStPwyUskpcywjCMeQTuCytrZuUKcWI4k1J2/wAx
+kWSrLEk0TwyjkkilHHiGFCNsVY7daHpWnE3F1dLDBIwRDOAeJPJuPOo2+eLITkBV7KNtZ+SrZ/Uh
+ubNX3YtG0KmgNG3XfqfHJnJIiiWsY4g2AyCztrFUS5s6OkiBkkVuSsrbgg1pQ5Bki8VY9d6HpdgT
+cyXQtLam6zsOCmoA4szLT6TiqGi03yek3rG4tPXeknqr6Ss1dwwY1J+dcn4kqq9mPhxu63TyytdO
+ZI7uzKyow5Ryo3JSD3BU0OQZKWs8P0fcB0SReaVWSUQrtwI/eGlOmRABtlCZhKwxZo7FjVrOzJ8T
+qCf81YeENx1uQ93yCm1npbmrafZE+Jv46/8AEsNNM8nFzA+Sz9G6OW5fo6xqO/1+Ov8AxLITxRkN
+2AABug79G6PWv6Osq+P1+P8A5qyo6PGe/wCZbhqJDu+S5NN0pnVU02yZ2ICgX0ZJJ6U3wfkcfn8y
+2DXZByr5BmWj2EWnWKQxQC1LEu8StzCsdj8X0ZdjxjGKDVlyyyyso/JtbsVdiqS+TP8AlD9B/wC2
+bZ/8mI8VTV3EchZujAAbgdCf5iPHI7gq19Zj/wA2T/mrGz3K19Yj8Ov+Un/NWNnuVv6yn+bJ/wA1
+Y2e5XfWU/wA2T/mrGz3K76yn+bJ/zVjZ7ld9Zj8P+GT/AJqxs9yu+sp/myf81Y2e5XfWU/zZP+as
+bPcrvrMf+bJ/zVjZ7ld9Zj/zZP8AmrGz3K4XCE0AqT0AZP8AmrGz3KgdaSR9Nn4o5ZnQhUhS5ag4
+j+6c8T0wxVi/1e5/5Z7n/uEW/wDzVhV31e5/5Z7n/uEW/wDzVirvq9z/AMs9z/3CLf8A5qxV31e5
+/wCWe5/7hFv/AM1Yq2sN2jB0guVZSCrDSIAQR3HxYqzeBJY4USeT1pFUB5OIXke54jYYqqYq7FXY
+qkvkz/lD9B/7Ztn/AMmI8VX67pM2qeh6KQP6XOv1jnty4/Z9Mj+XfFUtt/KsgnQ3UNm0IPxiP1gx
+HtV8VTT/AAxoX/LGv/BP/wA1Yq7/AAxoX/LGv/BP/wA1Yq7/AAxoX/LGv/BP/wA1Yq7/AAxoX/LG
+v/BP/wA1Yq7/AAxoX/LGv/BP/wA1Yq7/AAxoX/LGv/BP/wA1Yq7/AAxoX/LGv/BP/wA1Yq7/AAxo
+X/LGv/BP/wA1Yq7/AAxoX/LGv/BP/wA1Yqvh8vaNbypPDaqkkZDI3JtiPm2Kqmr2cl/YPbRpFIzF
+SFnLqmxB3MRDYqx3/Cd7/wAsmnf8jLv/AKqYq7/Cd7/yyad/yMu/+qmKu/wne/8ALJp3/Iy7/wCq
+mKu/wne/8smnf8jLv/qpiqYWXlPT/R/3JWkPrcjT6vJPw49vtyVriqexxpDGkUY4pGoVR1oAKDri
+q/FXYq7FUl8mf8ofoP8A2zbP/kxHiqPvrm+t+H1KzN5yrzpIkfGlKfb61xVCfpHXP+rOf+kmLFXf
+pHXP+rOf+kmLFXfpHXP+rOf+kmLFXfpHXP8Aqzn/AKSYsVd+kdc/6s5/6SYsVd+kdc/6s5/6SYsV
+RtlNdTxF7y2NpIGIEZdZKrQfFVNsVROKuxV2KuxV2KtMSFJUVNNh4nFUl/SPmX/q0Kf+jiP+uKu/
+SPmX/q0L/wBJEf8AXFWm1HzNxNNJRT4mdG/AMDleaUowJiLKCTWzl1LzKVB/RCnxProtT8icljMj
+EE81F079IeZ+QI0lONN19dK18a8v4ZJKpZ3fmGW9RLywjt7Q15uJFdlopp9l+7e2KpxirsVYf5T8
+2eVbbyrotvca1p8M0On2sckcl1Cro6wxqysrSAggjcYqm3+M/J//AFftN/6TIP8Aqpirv8Z+T/8A
+q/ab/wBJkH/VTFXf4z8n/wDV+03/AKTIP+qmKu/xn5P/AOr9pv8A0mQf9VMVd/jPyf8A9X7Tf+ky
+D/qpirv8Z+T/APq/ab/0mQf9VMVd/jPyf/1ftN/6TIP+qmKu/wAZ+T/+r9pv/SZB/wBVMVd/jPyf
+/wBX7Tf+kyD/AKqYq7/Gfk//AKv2m/8ASZB/1UxV3+M/J/8A1ftN/wCkyD/qpirv8Z+T/wDq/ab/
+ANJkH/VTFXf4z8n/APV+03/pMg/6qYq7/Gfk/wD6v2m/9JkH/VTFXf4z8n/9X7Tf+kyD/qpirv8A
+Gfk//q/ab/0mQf8AVTFXf4z8n/8AV+03/pMg/wCqmKu/xn5P/wCr9pv/AEmQf9VMVd/jPyf/ANX7
+Tf8ApMg/6qYq7/Gfk/8A6v2m/wDSZB/1UxV3+M/J/wD1ftN/6TIP+qmKu/xn5P8A+r9pv/SZB/1U
+xV//2Q==</xmpGImg:image>
+               </rdf:li>
+            </rdf:Seq>
+         </xmp:PageInfo>
+      </rdf:Description>
+      <rdf:Description rdf:about="" xmlns:stEvt="http://ns.adobe.com/xap/1.0/sType/ResourceEvent#" xmlns:stRef="http://ns.adobe.com/xap/1.0/sType/ResourceRef#" xmlns:xmpMM="http://ns.adobe.com/xap/1.0/mm/">
+         <xmpMM:InstanceID>uuid:b975a92e-e779-4e66-b941-8d6829f37dd7</xmpMM:InstanceID>
+         <xmpMM:DocumentID>xmp.did:CB8BE22208F9E21199F6F1A6839C123F</xmpMM:DocumentID>
+         <xmpMM:OriginalDocumentID>xmp.did:8704653A63A0E211A02FE78F5A96C50F</xmpMM:OriginalDocumentID>
+         <xmpMM:RenditionClass>proof:pdf</xmpMM:RenditionClass>
+         <xmpMM:History>
+            <rdf:Seq>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>created</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:8704653A63A0E211A02FE78F5A96C50F</stEvt:instanceID>
+                  <stEvt:when>2013-04-08T17:44:41+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:8804653A63A0E211A02FE78F5A96C50F</stEvt:instanceID>
+                  <stEvt:when>2013-04-08T17:48:17+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:8904653A63A0E211A02FE78F5A96C50F</stEvt:instanceID>
+                  <stEvt:when>2013-04-08T17:48:17+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:8B04653A63A0E211A02FE78F5A96C50F</stEvt:instanceID>
+                  <stEvt:when>2013-04-08T17:48:39+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:8E04653A63A0E211A02FE78F5A96C50F</stEvt:instanceID>
+                  <stEvt:when>2013-04-08T17:52:37+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:1EAFFF7664A0E211A02FE78F5A96C50F</stEvt:instanceID>
+                  <stEvt:when>2013-04-08T17:53:32+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:D57B09C38AA2E211B824F907B8988419</stEvt:instanceID>
+                  <stEvt:when>2013-04-11T11:32:43+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:3091F8D377A6E211A19E8401FF10C637</stEvt:instanceID>
+                  <stEvt:when>2013-04-16T11:27:16+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:3191F8D377A6E211A19E8401FF10C637</stEvt:instanceID>
+                  <stEvt:when>2013-04-16T11:28:19+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:3291F8D377A6E211A19E8401FF10C637</stEvt:instanceID>
+                  <stEvt:when>2013-04-16T11:28:19+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:3391F8D377A6E211A19E8401FF10C637</stEvt:instanceID>
+                  <stEvt:when>2013-04-16T11:31:38+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:3691F8D377A6E211A19E8401FF10C637</stEvt:instanceID>
+                  <stEvt:when>2013-04-16T11:39:07+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:6293CAB279A6E211A19E8401FF10C637</stEvt:instanceID>
+                  <stEvt:when>2013-04-16T11:43:40+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:1B56DE7693ADE2118B9890C4E6C809D7</stEvt:instanceID>
+                  <stEvt:when>2013-04-25T12:32:44+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:BE980E431AB6E211A3E0C2898E6598F5</stEvt:instanceID>
+                  <stEvt:when>2013-05-06T09:00:18+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:BF980E431AB6E211A3E0C2898E6598F5</stEvt:instanceID>
+                  <stEvt:when>2013-05-06T09:00:41+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:926D8BEFEFB6E2119526A83C34FCECDB</stEvt:instanceID>
+                  <stEvt:when>2013-05-07T10:27:20+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:F19DCB319DBBE211871BC090AD078DDF</stEvt:instanceID>
+                  <stEvt:when>2013-05-13T09:17:39+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:4163605CEED7E2119A70CD5FD695EBCD</stEvt:instanceID>
+                  <stEvt:when>2013-06-18T10:13:50+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:4B62E001EFD7E2119A70CD5FD695EBCD</stEvt:instanceID>
+                  <stEvt:when>2013-06-18T10:13:50+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:4E62E001EFD7E2119A70CD5FD695EBCD</stEvt:instanceID>
+                  <stEvt:when>2013-06-18T10:21:16+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:5562E001EFD7E2119A70CD5FD695EBCD</stEvt:instanceID>
+                  <stEvt:when>2013-06-18T11:49:30+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:6B953E55FDD7E2119A70CD5FD695EBCD</stEvt:instanceID>
+                  <stEvt:when>2013-06-18T11:56:23+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:CC9F06D9FDD7E2119A70CD5FD695EBCD</stEvt:instanceID>
+                  <stEvt:when>2013-06-18T12:00:04+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:CD9F06D9FDD7E2119A70CD5FD695EBCD</stEvt:instanceID>
+                  <stEvt:when>2013-06-18T12:00:35+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:CEF5D6C402D8E2119A70CD5FD695EBCD</stEvt:instanceID>
+                  <stEvt:when>2013-06-18T12:35:17+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:2D62CDDE02D8E2119A70CD5FD695EBCD</stEvt:instanceID>
+                  <stEvt:when>2013-06-18T12:36:01+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:2E62CDDE02D8E2119A70CD5FD695EBCD</stEvt:instanceID>
+                  <stEvt:when>2013-06-18T12:37:36+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:6F5DE84B72DAE211AD5CB350602E4F7C</stEvt:instanceID>
+                  <stEvt:when>2013-06-21T14:58:41+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:E7870E4C72DAE211AD5CB350602E4F7C</stEvt:instanceID>
+                  <stEvt:when>2013-06-21T14:58:41+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:B72DCA467CDDE211804CB34FA9B54E56</stEvt:instanceID>
+                  <stEvt:when>2013-06-25T11:47:40+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:B82DCA467CDDE211804CB34FA9B54E56</stEvt:instanceID>
+                  <stEvt:when>2013-06-25T11:47:40+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:300C5AC87CDDE211804CB34FA9B54E56</stEvt:instanceID>
+                  <stEvt:when>2013-06-25T11:51:18+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:310C5AC87CDDE211804CB34FA9B54E56</stEvt:instanceID>
+                  <stEvt:when>2013-06-25T11:51:18+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:168B298289DDE211928CF09CC6637949</stEvt:instanceID>
+                  <stEvt:when>2013-06-25T13:22:23+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:2C64ECD289DDE211928CF09CC6637949</stEvt:instanceID>
+                  <stEvt:when>2013-06-25T13:24:39+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:30BFAFEE89DDE211928CF09CC6637949</stEvt:instanceID>
+                  <stEvt:when>2013-06-25T13:25:25+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:516AC6411DF8E2119575DED76A74B9D6</stEvt:instanceID>
+                  <stEvt:when>2013-07-29T09:05:31+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:C298D8411DF8E2119575DED76A74B9D6</stEvt:instanceID>
+                  <stEvt:when>2013-07-29T09:05:31+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:C1C2A2E420F8E211BC0B8F76F6AFB319</stEvt:instanceID>
+                  <stEvt:when>2013-07-29T09:31:33+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:AC7FB00121F8E211BC0B8F76F6AFB319</stEvt:instanceID>
+                  <stEvt:when>2013-07-29T09:32:22+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:06A4E7A7F7F8E211BFFECE6097C47D07</stEvt:instanceID>
+                  <stEvt:when>2013-07-30T11:14:49+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:EB0B288302F9E21199F6F1A6839C123F</stEvt:instanceID>
+                  <stEvt:when>2013-07-30T13:06:42+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:CB8BE22208F9E21199F6F1A6839C123F</stEvt:instanceID>
+                  <stEvt:when>2013-07-30T13:06:52+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:1D9B0E30AAF9E21198A6D9C6F64B61EE</stEvt:instanceID>
+                  <stEvt:when>2013-07-31T08:26:52+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:D2D9DF27D9F9E21182C1DB02F9AFF021</stEvt:instanceID>
+                  <stEvt:when>2013-07-31T14:03:04+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:D3D9DF27D9F9E21182C1DB02F9AFF021</stEvt:instanceID>
+                  <stEvt:when>2013-07-31T14:03:04+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:22752170D9F9E21198A6D9C6F64B61EE</stEvt:instanceID>
+                  <stEvt:when>2013-07-31T14:05:05+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:B40F1A1FEB0EE311B0B5A9E007286365</stEvt:instanceID>
+                  <stEvt:when>2013-08-27T09:34:35+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:C7C42C27D813E3118B4D820E19E5743A</stEvt:instanceID>
+                  <stEvt:when>2013-09-02T16:01:24+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:C8C42C27D813E3118B4D820E19E5743A</stEvt:instanceID>
+                  <stEvt:when>2013-09-02T16:01:24+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:7286B0B7DE13E311A3269133EA1818A7</stEvt:instanceID>
+                  <stEvt:when>2013-09-02T16:48:23+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <stEvt:action>saved</stEvt:action>
+                  <stEvt:instanceID>xmp.iid:91493AD2DE13E311A3269133EA1818A7</stEvt:instanceID>
+                  <stEvt:when>2013-09-02T16:49:08+02:00</stEvt:when>
+                  <stEvt:softwareAgent>Adobe InDesign 7.5</stEvt:softwareAgent>
+                  <stEvt:changed>/;/metadata</stEvt:changed>
+               </rdf:li>
+            </rdf:Seq>
+         </xmpMM:History>
+         <xmpMM:DerivedFrom rdf:parseType="Resource">
+            <stRef:instanceID>xmp.iid:EB0B288302F9E21199F6F1A6839C123F</stRef:instanceID>
+            <stRef:documentID>xmp.did:4B62E001EFD7E2119A70CD5FD695EBCD</stRef:documentID>
+            <stRef:originalDocumentID>xmp.did:8704653A63A0E211A02FE78F5A96C50F</stRef:originalDocumentID>
+            <stRef:renditionClass>default</stRef:renditionClass>
+         </xmpMM:DerivedFrom>
+      </rdf:Description>
+      <rdf:Description rdf:about="" xmlns:idPriv="http://ns.adobe.com/xmp/InDesign/private">
+         <idPriv:DocChangeCount>1115</idPriv:DocChangeCount>
+      </rdf:Description>
+      <rdf:Description rdf:about="" xmlns:dc="http://purl.org/dc/elements/1.1/">
+         <dc:format>application/pdf</dc:format>
+      </rdf:Description>
+      <rdf:Description rdf:about="" xmlns:pdf="http://ns.adobe.com/pdf/1.3/">
+         <pdf:Producer>PDFlib PLOP 2.0.0p6 (SunOS)/Adobe PDF Library 9.9; modified using iText 4.2.0 by 1T3XT</pdf:Producer>
+         <pdf:Trapped>False</pdf:Trapped>
+      </rdf:Description>
+   </rdf:RDF>
+</x:xmpmeta>                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+                                                                                                   
+<?xpacket end="w"?>
+endstream
+endobj
+6 0 obj
+<</Filter/FlateDecode/Length 10>>stream
+x�+�����|
+endstream
+endobj
+7 0 obj
+<</Filter/FlateDecode/Length 36>>stream
+x�S�*�*T0T0�B�����i�������y8#
+endstream
+endobj
+8 0 obj
+<</Filter/FlateDecode/Length 10>>stream
+x�+�����|
+endstream
+endobj
+9 0 obj
+<</Filter/FlateDecode/Length 36>>stream
+x�S�*�*T0T0�B�����i�������yA$
+endstream
+endobj
+37 0 obj
+<</Filter/FlateDecode/Length 15>>stream
+H�j` 
+��8��
+endstream
+endobj
+36 0 obj
+<</Length1 24320/Filter/FlateDecode/Length 12050>>stream
+H�tW{l�W?���;��Il'���'q�8�c�Y�IV��AR��͘� M_jH�G�iE�2�$��CT�!!*�$�0%
Z�Hka�H�*61��R �	�s�o�!Q�������{~�k҈��>K���Ǔ�,�C4ف�}����`�о���?4��q� �w��O:v���KW���m�xxf��w�}�Ѯ���F�=bɡ}��ó�΄���׈l������n�#2Jg��9n|�(O���?�v��`�hd����3�_���Oc�6����H'�mX_Ku�_��O)5��e�������[�][}��$H
�t#�p8�|�����3��<�o��"��՗/Q�Z"�/�X�ڲ{�"q�2E��N:깣��٩��$���X���}|�kOt�S��֑��j�h��fo�j���Se��j��tk�o���3\�
+�ڒ����PM��u}����<���0Cfe}bc�hh`�7������~�����$q�����{�����rሆ�� �=���5@3�
��S��,P�_".�-.��k���������T{B���d�����1i�"&V��@�!���r��R��K�a�2:
,��]�]c���[s�m��i�������}���]s'�,���J���Y����V�,F������[�pN��g��qe�UUR�y��������y>��=�b��Y�12>�����ش޹��qw��
]¦$��Ƶ,V/!6
+V���D��1�������c6�nP��خ\��!���ݱ�{��� ŵ�%j��	�'��I���-�#ϫ8�C6/�T��x@����/g�1�1N	��2A������bE��s*�9�o'������sB|N�ω��	�|� ����;���$6���N`����/��*<�E�n�֊$�F�pK��@��&�v��3@QΪ��Sɬ��h�D�J-��,ڵ����6��V_&��j����O�n��Tq|����x����c��s���ܢ囦w���nڐ����}������-[���m�ʄÙ��	�D5��_���_���
+��I��*0��=��r����2���"�)R��(�I���눾BO�|�,����rE�L�t�|����N�)�X�*�u@]dT5>��<���Jԇ� ~9נW���@���q����#/��0ݧ��a���Ma��Lx���W�+�s�Q�
+W;:�S�؄�M%c26!G26!c26!c262VY��ʵ>&e/wx��q4�+6�*aF!̨f
+�B�Q3
+aF!�(��0�B��@��O(/�WΝP�r�y���ʧ��w���*I�P���"o�Ă�
�|�)\`�s����+��D��v���љ5F:�H�m4�=���v�E��Y��l�i�����`���V��!hHוi���ӃQjG">���jܘ2���@���!��%sa�Lp�����٤�t���V/��`0�`��
+β�S��犄���8{A渂�Hs�נu9�J_��
+��[&�P����#��g>�B�YX,4	��Rh�)�uu�9&Ҟ��	H{Ω���a��-(u���!"	1�W�{6+Vd���կ�^�"��@���U��u@��jǡ�ա��c�pE
�t#�p8��}�2�K#3����p���A���=eM��$�i��W�JR���UN?�;��K����R��B蹰���B�z.���炁�p9X�]�-�k�5a��*O	�@yJ�<%P�(O	���SB���SB��^ض	InS\F�sH�7��(ʑ�d
Y6��V��C�;S�Z�N�m����p�
��L$i�O����t�h���ܟk�JfvgMmR�>��T7u����%�,��ꩉu�R}u���D|[�^�gszg&�}�sˮ�H*l�"nC<���@�P�?��e�B�~}+�-��K�W��˱*���%쵒��X�Eҍ��x�����Vq�@�w��U.����^QqQ�I�xUp�!UT	�s*O^��Jy�Y��.t����(��]�a���&^a�H�Lk)m\���ݷol�-m��h�\��+W>��L�S#�Q�e�����{qc��cdpF��L�`��d��E��mEr��ySMުȇr�߽��d޸��
&\u��ӌ��%�¼y�P6?2��`Z��/���$�a/��jث�䠪#�3*m>�J�hi"�4F�#M��&�Ha�!�H�m��$��"�w�b�%�����3��N�[��[,�=j�G�kL�E��!M�dv��K� ~�D�¢myL���Q.U�^h*n)�7�'Ƃ����4�M�i�_y�d,�rW+u��c%�\�U&\���M�G��#�����?~��,@�r�A'q�.#h@Y4��8)�n+��%��i��\Y�9��G��fo�4�Kz���qU�;3���ػ�:�w׏�g�]{����z�t�q��M�y�$$MSH
+$)���.R#��R�HAi-�
+$��x�PK��P
+%?"��PEm�sS�?��:�Ͻ3s���|�/�~k�mR�m�TweOř��P��҅��R{[f<�?ݱaZ۫�'z����w
n�^�(lJ䬖��D�϶�u��#T{l� j�%Jz�"µ�avwk�/ϟ�Z��(]�6!������ǔB >����
+�!����O����Y�E"BZ�_�:�"��������S��9/�]x��"�"�]D���w�."ćT,S�l�����c��I*�Wj5�ߧ�W�g �
�<�� �����<�g��p���mՃ��5����Qn�i�=Q�E�aL����u(���I��%��<�XJ���-�uL���v0� �xҁܔ��r�P��z�g�-���ڙ�hD��؂�u�ڂD�˚Oמ	T���T��w�sgK]���k7��v�y��|$�J�h���JVo1�M���Xavo���*���m3�S��C�{��BGv�4�2�=�x6Nq�����K����o."L�*�Y�4�F(��[�p�>�H�ą>x(�!Wż��y4[��
+��`����*��!�BH���j"C��)]�l��+C���n�@�P1���V���/��kO^:t��5:X��l�9n���3�%���ز�M�ᴔ�j�U�fhoTq�6RQ\%���?{�4"�7���u/����4�9/
�
+@3b0�5�4�����m���+�B\�x��	��T��0@#�a<"��i�#?r�A��M
0x��~̋��O�li��MΣ]�ξ����/0�Q���?0�(z�w{�O���nJ\�g�m*Pq�5;�S�3��u�%��c���j����*����P���c�!9%(�`�Q��VT��*��!L���Tl�NݓA�o�����xv�����!����d��c�ne��Q�Ƨ.��)�s-y�������,?���ئH�t���bOٲ�/}GsY9���0��Tܞ�{V^\�kXiW��f�z���������~V}@Kk���Y|�U�C
�	y8�<Sh�Wfb�'Q�V9��:��vv]i]Hˇ�IWm� O���4g���fA�D�8�1�a�%�w5��� ����A��������ܠ�
�̪6^fc�66hc�66hc�6X�u������_w��0�\�u�-����3+N�j/l���p�H#��MpKɳe�f�j��xqv}{����f���N�ʏO�$K�����]��ώv�Z�R��7��j�p�uc��5�7~p:}O٪���D��`t��p��8�u4!P��:ys��+����&�h�z(D�
+�3�:������w�
+������8�a%��u�!x=�^a+!x=�������C�zH�)�v�Yv;����W&�!�Hn�?.u<Z�y{����g"C�B&�Q�5�0��s�Z���;ߪ�[R�=ӓf��מ�uzn�����Ջ�$Vkݢ_��EaH�*
���;�b��E����&
"T:C BōP8B��Ac���n����������/E�K�׫��
+�g��U=���܍WD��t/2���9��^s�dՋ���վ��h�{�"<]�S�J����W��|7q티e�-��ȝ�;t{�f��Y�̖e���Q}E͠B٬�]��<����R$��<��
+[�,���H�O+��� �cȷ?ŕf��6��
�gs��O�濮���+���ݪ����NIq!�oʊ#��P�CQO5�h�?�1Ҏ�բ�&�W;��nҪ>9>՝n�lj:������qN�3�&֬��~������@2=���\s��+��9�u����|����b߭����߈���E�e+U�-)��D��:ϼ{���3&���3�͠���#�Ą~q�Mj<]Sl=�S�zO�ք��Mb�����[Lw�+����i���J�Q'0ț��	sl��WFrT������d�2�2q���o�%�E�/��7�gp��G�J3�!e��m@�۠h!��v𻮱�|R�7���	�f�)��	�fB���o&����̈́~3�@~4M2B�/���C�x;̨���O������='-�3�s��G���O�l)/�D�@M �˪0��"^{#4wO��`"=�eǖɴ[ݟ_��ln�U���35ғ��}*=�aO��w&�k7�[א�Yf<K8�Gʳn{Wenjp*k�N�9�-���|q���.5
+�A�w�V��E�C-��rV]Uz�v
w�.Z�0����_�*�l��֛����s��j��I!{�ԭ�a=���~��C���&��K�
ԣv�0����C�2�'�=��:#������z�#�'e.��mX�F�vA�}��A�YN_9����2�e/8��r�A��U	T��"׎�w��&0t�)��a<�që)3+\�':kw�]�r��!��}�Sޜ����}%9�H�$�}i���pFi�D�'��I�}yEςM�Hr��y�m�x��_��B��lu�X�� 9d��lu�����lu���z�<m��%���V�]h�d� UJ��4�7�*%Nk4��.����Vz��6�֞�5�$���51�l5�cŻ��zjͦ��n�7kh<ՐZ=����h"ˬ����y�Ķq]��hDQ���(��HJ�?%�#��E˱�ꓸjbږ\�'����
'm�4Px�EP�MEQ��U]�ȪE����EQ�u���|����\��mj�YWwf��w�y����3!g&[���N��7J�u�/�����ݍ�&��?��}FWOx8��uE�\�8[��98��+&x�4�U�ab@\�
+ʻ\��u��u�+`t+�C���BP��G��m`�Kho��r���K'�r{�kV�7�]B��
+^��0���Ir^�O~���(.��!.L	bf�������M�[������L� ^/�����x� ^/�����x�L�^L����ihz��⛔揻���ѐ���jK]�̫g�N�'�'jŗ�����Te9�>��;u)�xfv��j!\<0��gBsm������L7]qr�����P �)��C�:��I*+|�
?��!�@�1葸��U��{��H�bD��xO����"p�Osx�+ɸ���S �
+mU M��s��AJʛ�!jD:D3 �+w$��(lsT���Ya�Sb�
+>:�N�G�@��Vm�S�q�'�,[�w��|��7c�ek��\:�~P���{����������C�é��t�k�#��Rcg��-����9�R貓��Y�t��n%�ݾ��5��x��<O��#5��)|�Os��A�"7��4���Pp���(�bk��G��J\�?�i��XcrH��[��
`��* ��u6t7dM%q�1��^lp֖�p^"L�nG��`/'�N<Λ��h�3�d�fnj��.l��>�c�r���
+3������2�/��2�%�F�nR”�U�nRx-$�-ˀb�X�6�M�e@�(��2�X�P,8vĪ=B|"z�wD�n�́�9P7��@���usX�!ˤn�F��4�ju\���3���z'�7�)P���N'G�̀�^�����%���u�Љ������7V˻�uOb:�����n����n-�U�_(��KCSeڧ�����)��>4J��x�U�m�x�/4�;����R��m��G��k��x֧�O%����6���}{1����*�
�|&�3>��|&�3�	�L�g
+�T��j�&�2�k����D��.�{��S�0��θZ&w�u���bO�'0H�Ν���V_ڷ��c�C���V�BS��4%�Դ-闓BIP
+�W�����:��~��s,Wd`�d����l�fw�!�s�&��u�0!(9���X�]�� h�Q��W���1�:V�y#��D#��K?#���AG�}�p̵�T��)�+�3j�;�U�|u�:�"�ࣉ#d��8B&�,F�6��I�2q�L�EG��2�PC�d�gk�%_�#�,��2��q�3�"#��6��|&�4Du�;�L��L��Ip��Ss*�.`N6߻eo����*��tvAg����.���.���.���.�����ZPN�T?�U��[es Rv�z[]%��6^��^�����x8?���@�0�=:�V�y`�刺���\�
+9����e�*���C��|d��//SQ'ғ_:原^+
++����N��'�u��(�TՇ�P��qr]5��8A��>jeT6Ѫ�)U�QG��m��4J�Gc����6�ݴ�U�D�1ABC��4��t��%(ٌ�
�l�d%(�@�J6P�J��%�n6�o@��'#@�����=Z����ze2[��u�ύER�C����D%Z�s;»����}�A�����B�i:�������Ak�h[u'%:eUTI[$k��ߕ�
��!��(�B|M
*��B,}e˹Y�����|��i>�/�~��=�Fu��h�Ȣ�\��=g�L��M#b���y��v�d�"�b/3˙�s�N��r��M��S<uz֞y�nJt�%�j'���7���+��\3�vY����?���� zU�۳:�s�Sޝ��Q���^E��cކ�|���|e�����rJ
+��j�r��U��RG���ϔ}����Ρ��+�y�7kD���~���j��A�\�\y�+=���"�y	l��$��������a��ꕻJ7������E4�(��
+��E�жi$��Ɠ!�aϽ��ۧ�7E|c�N��[A�x�
�pC�9�q�z�->�0�J�%�6�Qb�-�i��2/�e��Ba�s�L5�0�3���k��S��.���>�Ԧ�Q��lFR�ִ��L
�����w
d��� 3��3�kƋ��&��V�:ָ���'���6>U_��V"�;���Uc,L����D��-t��1A��<h;���tK����C+�C�����P"�m�/���g!ȫ)j��5�Vc������q�h�G��B�ܶ��֦�W�#���O����o��UE6�!�O���e�%���jh�ьMZ���O'�q;S:y�hGѾ��Z�t�
+��"�
+s��9����a��	��t�!-t�8���H�����!��&vя ��C��-���]D{�K��Cx�O�
+vj���u(����<�h
x@Ѐ4�ɗ��{@�^"܃�{�� p'	
+� p� p��Ľ�n{-K���~S#��N�8722W�������Ʌ�ᅓ�ͅaU+-ON.�J�S��S˥��r>��U������U�gv}��ٛ�Y{�u�;kǮo�S�8;v�(�������؞fwg3;��*Txl�P��PH��P� EB��
+��J�R#�ZTE��RR��?��q\<���7�9��r��|c��}�=S]���Z�{Cf·ѷ��HW3��j��B��
+N�"\C��n���D�iP�l�9�9�ٍ1�1�q��X�P���	D>�T�1+Ct�B�����o���'4QЋ�/>ܤ|�a�QN�.�Eb��fT�7sb
���Ċ�}'1�����58a�_e�K�w�z���� 㳾�s#g�7G�����wW���8��sdg0��LW�H{�W|���kjnppn��pK��O�ߴ0���<2��64!j����_ٵZxC�r��C�z7��CT�Wq5���QEUR��\���JYOB	u5�8����;��I�.=:�;�}��������߁/����Ny��C���aF��4 U����RYk������B�E-[�C���cdi,W)�\xKA���)�5��/�0H�'a��%�U`T��~�I����K��ڌ����������q�ҫ��K5��]BJV->Ao��I��	�A�2|zŞ/+���	��=
+�%#�`DD�;���my�_��ՓO�8� M����]���%���T�C�<m�W�?�VW��ê����rG�h��B�P!b�B��ꍡB�P!b�1T�*DLԴ�܏t*���/P�V���ѕ�/X�����5��MP��zINJ(#6�����~Y�J�e����*	�5������;�6F�v�G�<;��
j�n����Zwׄ;�HK�H�q�;�1�\Gױ�v�[ѝu�%�����HUu����1n�1���)+�����*-�����w���2�G}W`��o"m��xd�"2*�S��I�	io1�+�	*���*SEh"����^�.%�ҩ��1�����qڊ���z
)�?�������BquC�c��;Z}�r���[�B������v}r�M�s�����a������$�,�h�"_Q^zeS3{�F�
�����6�lHײ��T,�'����&tn��*R��#R�t�� "%�H	"R�*R���{����cպPT_0R�����m�re���z�����T�+����~����W�#Hk��~Q�&�o{��3ѓ��[^?�i��`BzA����&j&֯�r��&�D-��Hܺ�nP��U�z¯B*5��B*5��!��RC�!��Bj!5��?*�L�������g �{��j5q�$����������OO�l��h�hiK&g}�>�oL+*:Z�o��zxlUA�o�n��nB�
+@��
+(�=���?|���j�[T�D?M��Ed!2i���{h1Y�r�Ⱦ�(}U�O�4�S����_�3����ux�.Z��%�G��3�_+JWD#X����a�ie�V��;-ZD?�!��j��D�X�}�)��/��S�䛂H�v ��%�WI�D��1U��<�]5�����o-��t����>�H���[|ރ�&|�a���?z���W�k�5X�����K^~jW�+�)��?���Zy��h��
�����ρˁ?l���C��3���;�nݧ�߫�_���q������;ѻѻ�#��ƕ�}���{����ݭn�*QztF��G|{�?�ex�ʷ&q	�_%.վ᯦(,Ba)��Wb]{��Tb�V]vHb?�g$.���%����T��W�0]�p�
o�2���황ԪaeN�Ji3�hg,õ̤9��g�K�D��Lc�q���$�vf�O8����̈́���e���sF�ɮ��¢�׷�ޞ�>c�0�����Hzn��XYY���T<���X*e�9��e+7N-�М3ﭘ�E�4f-�[%a9�̐$�&�9F0m%������mBI;�H99��<����-����-Bl&b��9cŵ=��f��4S��ƍq%4D�;<4ԅ����3ƒ�Y�yV:gLB�n�qM��#�O
+�O�E��R˖g'L�$��V�t/�	�N�Y��ݥą��c�e��#]`:�Z�����V���]giaq+U��l�G*�3F�5���%��%�g;��4�'�N�9���@#�G0��q���c�[J�}GS�&��Y	��H��>wf�3���֒5}j�I�9��̿��_�f\YM��-��m�*���I�Fa���RxV1gi�,�,e���"ve��&VLm_X|�NH�w��h�������/����h�B�@54�t,�Jj�2�*.l�'����2ߋ8C���ѫ���ړ��f��
+:Z�4x��e�@cOO\r��+δ��ohc���jN�!��O�"β,JNs�ig��SI��f�{��.,��ܚXS6�o�N��2��&��x�Ö��PsI�'��"��̡�tr�Oܹru��dQZ{]�{Vj#��\��'�ټ=M��?��49�����{�a��X�ð�:�ck%���N�1'i�mR��������NY��+�|��'�R��ao�=]X���/�4����w£S̟:�� ~�b��\A�e��#,#i�,��6��b.,�����eK�$O�U�޴�ry̥��ǡ�[S���J.�M2/�����#����H;�i�GD�Z�u
n�� ۇ�Ͱ�=p�dꊲ�H��6�)|�dL��ɓK9�w�p��:��~Յ�UD�3s�f76?��������7c�mZ�v��lv�O�{ci#IMS���VJCXP|/��������Z0�%E
+R��B��PT�&���n�������sf�̙�33w�ɠ�<0���d�An�[��,�G�3���뿜�e����g��X�E>2uV��S��ST��{W���g.��&+N�o�H�Q�����Wn}G�y��":/J}�
+���ro�����M�p�)��9nG��7�T����=��~@~��f�F4��"�<U���+���W�<%Z?$|)�8ޓ'���B"�f���ZF���Óv�ce�쑹�5l�zr.gv
+E�ɬ�k�h�^��Q;��9�R��@��zLѓu�ᴧ�h��?1�IY9c߸=a��\,�C!+�[�s��`�q0��d)�+͞�~�\F#P�2P�d��=�[�f�Y����*1O����3�W��ҵ�
+��=��YNKUGRSv����kNS<>����C�Q��G�@���j߅��6��O���Z�y�0Ϣ��+~#j����4�ޡgu���^�~�B�[@_�2�M5���>Ȧ�9�#Ӵ�Ƈ�ǁf�����(��ze<�����7,�_v��RF�S�^��-�딁
��OM�<d��4yZ��ː1���ԉy}�z�u�T�r
�M�3�6�֏�6�uu�	�hSՅ��hGPW�ĸ_aC���cc���<�Qiz����T�}h��Mj�UT�v@}���x�lP�}�ĸZ�I��?�����z_�gi��{�f{�=4�Q�%�n`O�澆9��
Y{/�0^�]X��_�3t��{9���=/̽�X4���E]E�8ֈ3�з�s�A���01/*>(�D
+��Z����8ux؏q	��0.�r�g��{@�q�&A���R�+a�#���rpܗ%gb�`<�����=
�~|_����|�ډ�����!�|����s���6�V�A�uP�u���Q��}��ey�l��WD���#�~��:*�T�>� ��ՙ@|(56R��n�
�sF��Nj���ȫ�ҹ��O����=�r���A��0'ee̤�'��~w�P��g���?�uW�50���_�=����Y�9�w�N$8�q�/���5i���r)�௫���㦻�
쇞[��2�y��A�UO�L�9#v������$��K��x�\�|�o��t6|S�%z��x��޽q_C��p�w�%����%,$�4�s�ѽ��+�3�g`��=�������Iu��@O��b��P%�G��|�D�4�Yw	_O�hߍ
��Ƴ���-��V`��6t��� �� =�廨�n���@���/�6j�i�I|�
+�������f
+endstream
+endobj
+32 0 obj
+<</Filter/FlateDecode/Length 354>>stream
+H�\�݊�0��}�\�E��iA��V�b��>�5cWXc��·ߘS�������LXT�Jw���\hbm���q�ۆؕn�b�T�L����M:�e'�+�A����]����꠆+����*�����WqY��r7�z��X�3E�{�6�uO,��M��}7���S|Ά���fP4��![�Y�Nβҝ< ���s	۵m�k�剓G���t������$A[P	*<%�	�(�ΰP��L��B�>�;���A)����#h*@{�t��u
+�M�VĠ3��J�$�%�NHtB �D�2H�!N�:�c��h3X��v�=7��[���O}�w�鹓f0̹�/�`�Hᶫ
+endstream
+endobj
+39 0 obj
+<</Filter/FlateDecode/Length 326>>stream
+H�\��j�0�s��;l�����m���0���f�x�K�&(<$o|I�W��V�B���D�������jSǗAyaDr�]�+�V{�
7��X�~��;8/f��ANo=��H6����j��7W�xd�P@eI�{��K�_ۑ�w�]-����;����j��9D1I�u+ش��^ا��l��c%��Gb]/�[��vzDA�TA{�UNa� �b����J�'(��P��g���$q�B(�bkfX3A����:Bh��uAg�s�N�H�%G���tܦ�wﶽ������{l�Ԡ�q���ɦn��+��]&��
+endstream
+endobj
+41 0 obj
+<</Filter/FlateDecode/Length 333>>stream
+H�\��j�0��~�=6��_E
C�$�Ch�p�uj�e!;�}eMH�>vg�(,�C����w;4g�����<7�0]��� NHu�t'�o�����q����9��8Nv���.�
+�7��v�JO_�yE��f���'��(Hq�����B/[W�ջi^;�_��l��1�4����
�Z_9�#w
+�O�k����e�����oO]{%Q�P�y�b�3HxJ�'��6��S��L�2�l0%ہ$�mAG�t��D:�b��;H�A���O�>�I8p&��8�/��G^��RK��_�}_�B]��H��Y���'�d�i~�3r��~��ͦH
+endstream
+endobj
+44 0 obj
+<</Subtype/Type1C/Filter/FlateDecode/Length 493>>stream
+H�bd`ab`dd��t�t
+q���,�LL	(��
JM/�I,I���a�!��C�G����_�~��~?������B̌�,w_dd:����g�(ZZ�H0i��`d``�&MS�R�+�KRs�<��
+�KRS�sr�F+����!�R�,VH�,�H-RHJ�g���(�%���&e+�d��ix�R��S�����	�����R��mI�/�+)�L-��w�,HU�PHIMc```lg`bd�����{�7{���^�n�?�t�}��}T����ɬ�_��V�}@�;��g��80���o ���~Q�7���S�k��Բާ7>�nx��}��	?�D=�5�~+H�v�.����I�w��������z��J껆�S�9Ϟ�e��;ٕf��~#��ę�k��p�᣶��Y�u�����4��;n�w�i{��)����k�o���호b"�܂צ��y8�q=�~���`�t���
+endstream
+endobj
+29 0 obj
+<</ColorSpace 46 0 R/Intent/RelativeColorimetric/Name/X/Height 409/Subtype/Image/Filter/DCTDecode/Metadata 47 0 R/Type/XObject/Width 78/BitsPerComponent 8/Length 8335>>stream
+�����Adobe�d���������
+
+
+
+
+
+
+		
+
+
+
+�����N"����������������	
+���������	
+�s�!1AQa"q�2���B#�R��3b�$r��%C4S���cs�5D'���6Tdt���&�	
+��EF��V�U(�������eu��������fv��������7GWgw��������8HXhx��������)9IYiy��������*:JZjz���������m�!1AQa"q��2������#BRbr�3$4C��S%�c��s�5�D�T�	
+&6E'dtU7��()��󄔤�����eu��������FVfv��������GWgw��������8HXhx��������9IYiy��������*:JZjz�������������?��i�W�
�>�_R�8�(p�\����l2�qsb���C��;F]΢g�0���t���/l�]�P�d��y[�g���;��]O�G��+�\&�t�vGq�������e����C��	�`;�)��>j���\l��G/�PӴ�a�(�F^�6�v��O�����ĭ?�53�L����6���pNb.]{6�W���v�8m0)*J;�mJ#P�됄�@����
):p��^�Ќ�F>��1��e�]�@$�������0r�Sp��U\S�&��l��Q�#� �!�_�0���)���\�p�����}gL�7x
F܁{���rr��?���}2��B<�Z�d�*�D�jö#�K�٘�U��Q�V�DF�I�����D�27�2�ؾ��Ua���w�}F:��{a<{6dǓ���)<�5���P0�2���l���Lb��eG�:1}8�[׶~�U8o���qb���O�½I),W�tp	��1�Q�K�g^-�˦8��:�CvC�8��>خ���^��w8l��/gSx�VԒa�b���8�r���/�G�h�,�8W�*�b���5��R�{�j�=@ø���|.}9v\G��M+���l8���5a�]�y���~�&����'��v��9����Y�5�G�p�K�o��w	x�Ɗ��	�px�6:��n��>��F��隔��x�(���ŌWKփZ��`� ~�J�#�y��}�J�c�;J��7є��#����.6yV2c�K�9��}����%:���Ezu)�Xd���8E��Z��	t�e���{�_���f�0�
���a���f^M���S�ė���<�Ya��u$״����#�$�*x����$:�4i�5�|���?��ǚ;B�g�r��_f����[y���편��N�0L�M>��r��巩�r_�$^c�nml5����Ad=����~�EX���룚X�67�R������f|@�cl5�z���Oﭠ�����E�x4�-B7���1���} y$�~D҇��R�������e����!=y��yگ	�8ojj�]������Z}�ə�۽�n �b�|'Q����
d��=p�M�6�1�;\���ʐ�NFH���Bā�YcI���u8����k?H�r��M�W�^0O��߫�A"����'o�J���ա��)u�lj����1 ����u)���pC�G��}�D��dN+��W�[n�R��q��3y���h�Z�o"~�]���_��+����}6��d�Zn���d��$��u�8���T{0��ī��ֿhh� O�H���y����An*;�-z`�\�q큭��(�;m9��c����S�ēۉʏ1�L�/{��`ǯ\�.YG��p��}�8ae+�#Ɔ�sB;P�	��A��b�0�/F�[*�>߼4�;bB�;����=BG,�A-Z��$ZF�qg{)�^u���nLXt�z�ȫ�kr�}3Q�87O���Cw	�,-Pr���i��T!,d�c�Ƕ�����[��a�N�
+پ,8Ռ�W3O/�����߶�Uz{����ie�i�{�>��>��?	��	�9Q���dy{��QLԯLp\�r�\3�"{y-���}Z0E��op���Fb���0��k��{�wb޺'3NC�o���#�ޛ�Lo���!^�I�~zC��,�5��[1�̎�X��^�̲F;����hT���:�F��*~�#�Ls���\��C:�FD���z�oPP���,$�G�{Q��?�~�z�މ�5\>��V��`��8%��{�>`H}�P�f�2�)�p����2�b�G?�Dx��8c9j�)���>Gq��N9�[���Ol2�&HZ�B��:e9�?ap�p���B_��a�}iq�,l�Q<-o��o�#��dqn����>wH�xX0��9�����!
�^ng�:mnL_�A����2CY��a�?��3��~Xf=c��A�����)���f�d9V��2�LQm�*��Pk��II��a(���u0m�	�Dy�˾C ��5戔~ϟ���
+Gz�vYF�OLu�r�I�*~c���\'U���0T�&a�|�l�;�����G�C��/|O�)5��%0����am8NG���U��'�D���iQ���~��F*E��&?����(�#:4�c+���dۨ�L��S.�Y��NPǧC�8�1�`;�J�#�6�yC%�����7.Йm��FO�0��rw�
�0m��~�_՘�?��.�]Y;��z%�
+]p��7���+��c�!#�m�G|�я����ٯK��2�(��覫f��"x��������e�+"���~������x�z�Jȟ������=ݛY�r���S�GU>�MP�t��b���i�h�2?�O���#50U�<%�N��b�D�;Xd���T�:�t�l�fI-v_��L3���/�����<lU���
�+-�ю�I��va�W*�7�]�1�g�2���/I��z~~�n�(„
+�~���3����w�GuQ���+������r>��t���W����)d�e�~���'��d�L�[اIcY �u
+������/R���&����0V����q�Z9Ic`�~Yd���^�����C��D�z[���VD�</�t=Q©a1�:gX���@y�E6�-�%�U5����Q������Au=*���m"�>��W�2/j�n�����n�ՏU,59�L��:���}�t�WY�*hq�rV牂��sєV���W��8��b�c�7<�Zۗ��#=�F�x�d*��{��25x�,%�NAH� �S�� �L�ܚ���i�3�f��éDcj�����1��?N��Ξo�K��$��
+�鏏�!������0�X��{�s#n1/��5�~���O�c���Z����[Kk+E*�e4 ���fM���q�^P!��er*S����~�z}W�u�1����CZ]�i �#C��v ���ug�{N
+>�>�#�njL�~�ݓ��Ed��&�P�5؏l}��w�v#$H�3�y?y��*������p�"��g�$S���Jd��O�R�:���I�Z�gvc��:�����"�h�����vZ]P�7�O��>�(����㄀tEI�n�yH"~c��]��n�(�3[�*x��ZWӑz�??<��%�^���=7�vcۊ�#V�#��B��.��%�Ҵ7n2lk^ïђ��K�,@=Vi�'k ���$C���	���K	�H9���T����'Ծ��O����/��G���\JDue��J���+J�
�b�Z-��ޔ+��ʻ����v�L�*���
+��5=H�9����g�o������$�=)�������h����,��߶���� !O�D������j2����IKK�.t˕����?��v�]Z��?Li�I���2x�즿G݄����ud'�u�l�jϥ\�E�m��g��C!��{�'�
�៮�?��	d��J�n�p��[�L�wf}[���_
+ֱ���e)����ݏ1��lG1���vG�A�Imo^���[�c���S$r
+�|k��_|9�}k˒�;	�$S���7�:�V����c�;��o�K�daz	g�A$D�3�q��}8:���e�\��ۘ�J�d�v�s�Dw��Ȍw�t�H��ӡ�>�C�a凙`3	�
gp~�Ž?�e������r"��8�B�B���̥߳چ��iR�6BwQ�����c���MG������Ώg{n�u�k7��2[�;�������W�0�Q򍆤�X��cr~.7=�3����U���	�|���b$\H�?�T�����ݬ�����g�����7���j��h�(Z�ʥ����S٩�q;�[�*��t��C���F�����Z�J��^ �t�:�;/т5ˑk�	�����ڐ�Z���M�,�s���~ε���j�\�RU�$e�/�u4!�߾��B��K�3��~)m��Yz�!���_��t��d��^�_J�� �Q1�j|2F�?�a�K�=6#�$�`�.d��V���#}�?�ul��Ev#�Ļՙ�x�ۛю��{��}Yۧ%�k��T���[�p�}�gV�E)����dA�B�h;���WXj:Ɠ"��N�0�סې��0~��#69o(�|}�$��Sh^f�����h?��n�{�Q��o6�+]IA4Ш"U�D������0�K�ֿK�mr�+���
+r>uɄwߒ�o����*��O!�>�N_q���N����ߏO���O�r�	�Hzn��?ԯ��h��ʧ'�^��)b�J0e�S&�rh���*#,����X8uB}�"��:�f�ߓ5(/�*=@�:r_p2!�y�ɷ
sO�В������6��W��N��pӟK��x��bO���ռOı�/���*���ᗡV�)�d�.5�V���@��ɏ�=9v���
^}7W����%y5��	?�H��~��f<��?�me4��}��^��=���0���]k����e6�.�DZ��~�?��nԯ{���0��ɕ-�j1ͷBIپ�#<�ʲ��y��/�ŏ�t;|��`��=2��O��"��R���˷���Kr��vZe�l�$��&!��Xx�����
�^45Z�S�_y&����nF�p���c=5#�=������y\�R��V����NTW���	Pk�ލ�u}WF�&�o�e5��@>�ҿvLl8|Ғ���q�qF��W�>5�?v@�I�˶�kӿ�[2�U�"��5��|�L�rJ?L��mŪ�`�ID]�;|G&{}�~_y��Y�ƃ7�9��$V�|Q��q�[�c7:
�����D���U�����00'rw�M��q����YdII�����;҉���f&}@_&�ǘ��l�㏣���k�Z�r!Ԡ�����w�=:��4�z���NP���3l�������
%)"��|��,&�����|Q0�Z������u��$��8$�-�BJA�Dl�5�ח�h|E2b"���1�g���o�5[u;��0=�7;��w]�:f�SJ��eV��ֈ���3���1Q�T���x�V��ŤT�5#�6bry�7���XW�`�l��,�X��q��#6�A����mm/�3I9X�x��T��r���Ȏ�$�Iڸ&��P�~�I3x ,�i:+���*�#�\q�yDO�'
g�{nmwxTS�/#�O�"��������F<C�[v�o�{ر�O*��gm��-$�=�ۊ���i�?2����W�P>���B���Ҽ�������RH�F�r(�Θ���^{��[Gh��WM�R�
+HG����C���O��	i������������O�F�$�M>E�B=�9�œB�N�.x�kg,�N�+���^?�%�>�	p��z7�P����'���'
�����7��i�[VZ{3�#S��|.#r {��d�#\\�P=z�˛���\�������/�E��^��s�$����voJ#���Da���i22^<Q�5���3W�QH��,��rwrm��Ū�\�Q�cbC�\$c�=��&�J�����`|�S��k����$۝�J_��s��1O!8P��x�$���q��`G@e�����+�k7������1���>@l1�z;|(M9��n��_�"r�J[yK�u�N��]���fD!vV�n�-�������o��NV��Ƶ��Mk���ژ��x��wl�i�js������좓���NB�=��	��o���:u��(Ӵ�Xb���ڑ�?�KWN�m47��'� ��>�zg�fZr��P��f�����`�VCҒ��E\l�3��+�u��[jwMx�fW�X7/������X@��s��B�a��%�C�R�*���'��q� ��k���n1x�E�ӄW�c�/Ӗ�*PC#�+A��a`��W�Ƌ�� g)s'���s��ߟO���#6�ƨf�*qe�Z!"��:�5陾�� 6b��p�
+��4���˜�'r~��o.y\^*j�1Yr�&���*��8@247e�����<��#D}C��?�m���]�DA�Lp�k�t7)EHP��Hat�$4�?S�OO�ޭyo��Q#WK[T�RE	۩>=�� �s�3�3f?RO�'/ݍ���0��>�w=߫�|O��o��v2;7#ȓԟ�Zىt�۶��U����(�U�I�rry�_�;5�`�=,��J�O�db��p(Nj�����?��#�w��Cя����Z�[z�#Nd|���o��p��kJT����10��7�Z2����f,j~�C޿,��#v9.Ҽ�o�C������9ZX�=���<@24�r�2y�̠�][�u-b6d����S��
��Ĝ6�|��3����Sh�R*�Pi��v�5w�T���H%m¢���n*�}z�^�_U��o��m���$@Ty�ţÎ��{s�_���E֫qp�I3�y>�+�>����J�Ǡ$���6< Y�y�s��ɼ^�� �T�9HNJ/�5��{L�#��X���'�1�w������=��Bў�m4��+��"������<�M�g�������o��}n6���,M/�i���O�Ȯc�zs,:���!�u�9D�����F[��s�G�V(�9��$▖�Z��[Z����T���]���*%Ω2�]��[/�3o_�U����Bɹ� �O:��d�m˩��^_�.5��ۨ�����6�����������=��W�9W��R��=�M�}s��e��:�g��`嫰p��}꫽�ߘ��K#�Ll��b���h)�w?�7|>-�+��so��,�p��7�nQ�y<�l9%s��1v<���{�߯^��5I���O`~G <�h�3ѭR{�RqXaI��^�t���p����m �hmcԺe���t����֞G����u��^��E�>�����,|��֜��Š��r}�-m�e�SB��[ɤ��ZL�#��ʝ.-/Ay��
+\�4�3�9ʮ<��v[k,�Ѷ�h�����G�=3*g�d@w���K�1��������g�agi����>�������a��W��N�P��yy�}�&�k����ʓS����n�$�RI(%ר������_ᑛۙn���dRk����v���Jb"��Λ6x��ƪ��o�����=Ԓ�ZRT��A'�~�S���g������|�:W�>'5&��Uͮ�ن��\�RX(�;��E���⣒"P^V�X�U�ݒ�&��~�:�JȨ6a��O�*~�*���4��7��e�V��N��#��m,/�@��ðyO�
+nk�����9Z�86H��|���=OR�L~^�=�dz� Ң���sο]��]��OW��U�_�'�Z���o!������՜�O,��@t�v�Tu�"�8L@.�)�׏'�Q��Orz������<��T@j�Ա�'������01���Po���
���~�!��>[��o��qQqmۧڿn��`�.����[%A��Hރ��`e�
t?�����Y���
��/�y���S�F�Q�<���h���[Њ׳�C�N�������[�~��j����x"���z��{��0-��|��k��0���ɥ�w����NN/_��޾��~ވf�]J��$ﲎ��_M�c���2�W���~|_m����
+endstream
+endobj
+47 0 obj
+<</Subtype/XML/Length 4419/Type/Metadata>>stream
+<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 5.2-c003 61.141987, 2011/02/22-12:03:51        ">
+ <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
+  <rdf:Description rdf:about=""
+    xmlns:dc="http://purl.org/dc/elements/1.1/"
+    xmlns:aux="http://ns.adobe.com/exif/1.0/aux/"
+    xmlns:xmp="http://ns.adobe.com/xap/1.0/"
+    xmlns:photoshop="http://ns.adobe.com/photoshop/1.0/"
+    xmlns:xmpMM="http://ns.adobe.com/xap/1.0/mm/"
+    xmlns:stEvt="http://ns.adobe.com/xap/1.0/sType/ResourceEvent#"
+    xmlns:stRef="http://ns.adobe.com/xap/1.0/sType/ResourceRef#"
+    xmlns:tiff="http://ns.adobe.com/tiff/1.0/"
+    xmlns:exif="http://ns.adobe.com/exif/1.0/"
+   dc:format="application/vnd.adobe.photoshop"
+   aux:SerialNumber="1430748205"
+   aux:LensInfo="18/1 55/1 0/0 0/0"
+   aux:Lens="18.0-55.0 mm"
+   aux:ImageNumber="27"
+   aux:FlashCompensation="0/1"
+   aux:OwnerName="unknown"
+   aux:Firmware="1.0.3"
+   xmp:ModifyDate="2013-03-21T08:58:06+01:00"
+   xmp:CreateDate="2007-01-03T21:14:41"
+   xmp:MetadataDate="2013-03-21T08:58:06+01:00"
+   xmp:CreatorTool="Adobe Photoshop CS5.1 Windows"
+   photoshop:DateCreated="2007-01-03T21:14:41"
+   photoshop:ColorMode="4"
+   xmpMM:DocumentID="D82CAADAE55DA09F02F4131D22756EAB"
+   xmpMM:InstanceID="xmp.iid:5C756C10FD91E211B4FF98895222B46B"
+   xmpMM:OriginalDocumentID="D82CAADAE55DA09F02F4131D22756EAB"
+   tiff:ImageWidth="3456"
+   tiff:ImageLength="2304"
+   tiff:PhotometricInterpretation="2"
+   tiff:Orientation="1"
+   tiff:SamplesPerPixel="3"
+   tiff:YCbCrPositioning="2"
+   tiff:XResolution="3000000/10000"
+   tiff:YResolution="3000000/10000"
+   tiff:ResolutionUnit="2"
+   tiff:Make="Canon"
+   tiff:Model="Canon EOS 350D DIGITAL"
+   exif:ExifVersion="0221"
+   exif:FlashpixVersion="0100"
+   exif:ColorSpace="65535"
+   exif:PixelXDimension="3456"
+   exif:PixelYDimension="2304"
+   exif:DateTimeOriginal="2007-01-03T21:14:41"
+   exif:DateTimeDigitized="2007-01-03T21:14:41"
+   exif:ExposureTime="1/4"
+   exif:FNumber="56/10"
+   exif:ExposureProgram="2"
+   exif:ShutterSpeedValue="131072/65536"
+   exif:ApertureValue="4970854/1000000"
+   exif:ExposureBiasValue="0/2"
+   exif:MeteringMode="6"
+   exif:FocalLength="55/1"
+   exif:FocalPlaneXResolution="3456000/874"
+   exif:FocalPlaneYResolution="2304000/582"
+   exif:FocalPlaneResolutionUnit="2"
+   exif:CustomRendered="0"
+   exif:ExposureMode="0"
+   exif:WhiteBalance="0"
+   exif:SceneCaptureType="0">
+   <dc:creator>
+    <rdf:Seq>
+     <rdf:li>unknown</rdf:li>
+    </rdf:Seq>
+   </dc:creator>
+   <xmpMM:History>
+    <rdf:Seq>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:D32363F35B35E211817BA7341AD203B2"
+      stEvt:when="2012-11-23T13:38:14+01:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5.1 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="converted"
+      stEvt:parameters="from image/jpeg to application/vnd.adobe.photoshop"/>
+     <rdf:li
+      stEvt:action="derived"
+      stEvt:parameters="converted from image/jpeg to application/vnd.adobe.photoshop"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:D42363F35B35E211817BA7341AD203B2"
+      stEvt:when="2012-11-23T13:38:14+01:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5.1 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:5B756C10FD91E211B4FF98895222B46B"
+      stEvt:when="2013-03-21T08:58:06+01:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5.1 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:5C756C10FD91E211B4FF98895222B46B"
+      stEvt:when="2013-03-21T08:58:06+01:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5.1 Windows"
+      stEvt:changed="/"/>
+    </rdf:Seq>
+   </xmpMM:History>
+   <xmpMM:DerivedFrom
+    stRef:instanceID="xmp.iid:D32363F35B35E211817BA7341AD203B2"
+    stRef:documentID="D82CAADAE55DA09F02F4131D22756EAB"
+    stRef:originalDocumentID="D82CAADAE55DA09F02F4131D22756EAB"/>
+   <tiff:BitsPerSample>
+    <rdf:Seq>
+     <rdf:li>8</rdf:li>
+     <rdf:li>8</rdf:li>
+     <rdf:li>8</rdf:li>
+    </rdf:Seq>
+   </tiff:BitsPerSample>
+   <exif:ISOSpeedRatings>
+    <rdf:Seq>
+     <rdf:li>400</rdf:li>
+    </rdf:Seq>
+   </exif:ISOSpeedRatings>
+   <exif:Flash
+    exif:Fired="False"
+    exif:Return="0"
+    exif:Mode="2"
+    exif:Function="False"
+    exif:RedEyeMode="False"/>
+  </rdf:Description>
+ </rdf:RDF>
+</x:xmpmeta>
+
+endstream
+endobj
+28 0 obj
+<</ColorSpace 46 0 R/Intent/RelativeColorimetric/Name/X/Height 250/Subtype/Image/Filter/DCTDecode/Metadata 48 0 R/Type/XObject/Width 246/BitsPerComponent 8/Length 22422>>stream
+�����Adobe�d���������
+
+
+
+
+
+
+		
+
+
+
+�������"����������������	
+���������	
+�s�!1AQa"q�2���B#�R��3b�$r��%C4S���cs�5D'���6Tdt���&�	
+��EF��V�U(�������eu��������fv��������7GWgw��������8HXhx��������)9IYiy��������*:JZjz���������m�!1AQa"q��2������#BRbr�3$4C��S%�c��s�5�D�T�	
+&6E'dtU7��()��󄔤�����eu��������FVfv��������GWgw��������8HXhx��������9IYiy��������*:JZjz�������������?���j�n���;zr����8�Z6ܛ��OL���k�������w�>�ӁiJ�fk%;Ҁ��ou�7�>��sm
����P�dZ̬~�i�D�!���gQ��B\P&^�����z�\#�a?g����6�k���iÓg���$�D�Ik5����*���98���|HF�r!tm�w+-�D���Ӥt�e�"2��l�ڒ�/
+"�Q�g���ܦ��I�+�:����!��'�+Ǯ�AW���O5 ��`{�X�.-qtP
���^���r�L�i����Nb�m�4rG������(!{��ď�,��8����~��0�����sL�I�}OL��^�M)����.t����{B@f�3��n!TVeeC"�V�|]���s5�8:�ܖ>xqo�s:�V<� ���l^��Q��btCF<���L(��V�$u�:�QƤ7�\-��aK%���|C��Q��d��;��a��c,�xuk��1�/JsKx��HT�	���#=��J����F(E+J��ȣ^?�m��$���N��pF�2O�Ao=��Dn�gsV#+�r󑉏3@�����X�zc�v�	�ƷMϛ-�U�U���[���O��I�\����x`�.�e���I�������5m�A�[H���F�Jq�ǽ0��6_����t.,�b{[���6�U�h9�ja:Xd�Ch�˸[��{Dt�ǣ�"gñ=MXޛ3�)��64����U��z"5߫m��'��ɹ$������Uo$�'J��Vb�E԰��xEޚk:>g�%G�#��Ku�f�
qF��i��@�����a_���+�\���D�b�6�I?��y�₤��͠����k��A�[CI|����F��k�zvfŗ�i��f
+P��W�����w�]��t�W�m�7����H�'~>�R%@4 �N���"��-ߥ���M>�L��{"��#�����{~���Kʮ�V���`��������4�?ވ(?�#�Ӑ*�_��B
A8�:S}HT��|>g0�cc7�����;b$
+���oDe�پ������<Ŧ�O�	�@d'��/|��(���nf[��@���O!Ԟ�(�<�c�&�y*CivL)x-���9OI�(
Ӗ���+�D�I���~C�6��*�c~e�$���-���#��8��M/��������wo�x��F��9�&H��ȫ^$0�g��o�}�1�v2�rI�~���:���̚U��{��ۏ_1iF����u�i��(~�SC�Z0���[���}MǤ|?���� ��$��}��^�5�,�FHMz���qOӺ[)Vh
k��^��啄�z���� ���0��w��D�m���e,/k,SۉDW蘦�Z�+�j�h•c������My��c[���}��=j&ѥ�IUBƦ9*8�r�|�͑P��6cڜ8��~4�͖pd��~������L�8�t���ۢ�O�,��sY�~L��y�`�>�-��W�X�NH��P_�XS#������%�rA#w��M����;���I�q�f�Ԯf�ć.[��5$^����w���NqfX�	"�@b���H]��\Zj&g��٩z�n���M3��(ަ�����hO�M�QAQ�v5��yO����4|Y#��#\6%���l���r0^����$���5��G�=�ɹ����gP<kJ��b6���U�\Je
+��v'�=���º�C4'�	w�y^Y���_W̩֙5k�\8!��P�0��EՌ����Yd�ݡ����}JjS�uYV��+�S��?�����uU�Ҁ�F>�� s�&�ܞ�՟�~.�~/1բ�΍\:�q���P�B0�����^
+�uQ�#��1���:g� �fb>�S_�#�I(k���0�-�O�3+K������c?5i�]��d0cPYj(�,[F��5���D
+�.S���m\+����woZ>��m★���@?�A�؏�6ˆ1D߹��K�p9r��&ڂ��yy�[^��%��kiLbU�V�V���8��_M	:�����X@�#J�bHܳ5NG/c^R���
+�PmO�|3��e������/��
+�,焁H �����l=���䜱�0�En.�����$�a����zt�G�R��t&����J`m̞���#��N�OZ�=1g	��>�ƀ��k!�s����L���J��[����J��nn�m
97"��2,��..)@z�27�9T-
�
y|�٦���M���8T���7�lӁL
+7����=�����!�W���y2s������P��8`i�.K�q+⡉'�:�]n\ ��?b$a�R���Uai$��²ڀ���_�
+��s�C�z�Y����3�#���9���a�	�~��a�\�z}P�c���w"�u�O6�OP�n'��:R�,��<��*?�lB�k�t�a�EiqbTi��+%A�~/�0d���LP���K��1��8q���4n���괽�2aǒr�9NFoE��\�=�A�"CpJ�P5�P�XU�W��X�M<�.�u��ե:W�;?�	]]�O��g}fG��s��d�∫A?��{6�p�M<];T��^Z�O?!��S�`zSE���޿R]���W=+�uoQ����b`������<�n~��/�1�؅o-���c籤�Vz�G �n���5�6�M*Z�56��O�k��o�]�D��I��0d��CZTu��YW���o��,d&�<���<fiu"��:_�ֲz�����Df!dZlEWv�
���R3]5���'����6w��o#��$��}��91������;a�,�a!�+���r<b_I�W}���������J���޿=�`�d?F�?��׽t���Е�l�����j�<._���<?GD�����X�Ԋq��o�N���E��v�o�W$6ń:L�^J�P�;<t�?�r;;��?�X���U��v͈����Z��?���ZaE�#�j��ӕi�zV�&�/��v�����B��w�"H�����O���w%�2�h��n>&"_�s?C,���@�.�<x�u�?z<А�lT�=~�=錘X�>̣����d��zt��y�8}Ud>���ۥq���!_@MuMAO�T���9�+��O>L�e�q�vORB�Z~�������a��m@v6�}�Ӫu�#K>�y�w��K�e�9cM���k�Mv��o��+n� t��z}9�����ds�fh�x�.+$�G�����lG���pq9(��4�G��-rM*e�Ҁx�y����X�6p���96�9->�ܳR����x�����7/��V\Y/	�߾O��a������n�$��6�bi�1y�^�߾��#�?��nKV��M��x�\
O�P7�y�M
$aQJ��gX՛�u��<�*�
+���p�c����yw&������9�ui|�q"^u������m O*.��)�=m����ʞ;-�D�R{�� ��P���Gl��j��6��I��Ɣ�#|��_Eg��K��iM���ߗ>�>w�⸙�q9j:�H�>#�Tr�Q&$�g�Q��.(�H�D�[�l&��7I!h�e�$�!��l�c������F[�kYn`uW�(Yy�V��
+�������pU8��x���59���"R5{W[�#��G��;��,Z��<q�T��u�G~fդ�	����'�2d�n��F��nh��ɿ\�~i�/��8�v�Z,m�cD`A�ӝ���C_���R��ĆIN~�Q��}��G"[{�t����=&����c�	����5?o�T�iv��7��@����\��v��������I�����>��wU�܉򃑡��M'�z�~Xn����l%�}?A�*O>ԧ�9�C@��d!q���|��Je�Jo��q�]�)���zv���HW�=�ǽ�;��xv�3++���+��;ˠ���E?�N�.Y�ƔF�n��0-���M�#�Xv��a�g G_��������ے��c�z��D  vH
+�jwU$u�\���LJ?�4�M:e�VS�o׊\q������s*8����5Ǹ�/����
+�ߧ��!����b�����9�QG������xݿ�E�z�i1Ԁ�d	�7�'읏��~���o�lK�e�[�eX��+R��|@W��z=Mk-�^��`�3�zF��y.����6('���Hc��I#J����^ ���r�|CF�R�qpē�
+������ն�,�U��`[��FX�yGr�Ш'bյ%��l�U�!+��G"�G�\	�}v�\���P�������V���]����]���6�?�2�3�00��A�}Oz�v������"�Ms*ͪ�1$3zP�|#�\�w�d�e�
+�d�bh�!v%�+�}��F&�����&���c��u�Ù�T����̠L�g�%��OG�����E���@��[�s�U�I��CI
+�v���q�]k�H�k�<,RE,r�%�*#��J9��Zdd_~��޹g5��R<p%����#�,��S?�$�ui%����i25��p�Ɂ`E*�z��=vla��"7��|�9e�b�ÿt�"ᾱo5��I-�����@0n^"���V�_��Y���@�9!W��dI}R3�_N�
z��
�#%�U;7e��f���ispc�/q}|�y�v�xxb$e�%�?���� s�Z�}�xo���]^OW��YB>=���M�&�;����`yE>�k��_���_)�m�������:����@����o,�pZ�_O�U+.嫸�M��m.�:��n�d����_�!<61�M��DLj��84�v�X�@���A,��u5�y�&�.���Y1�t#̖�v�~��g͇o08f%��C~��-o�/^����>]�ӭ�M<�4�Hܼ�(yX=._k�+\�7���������@�!_r��	��$M
�8^������b�!!FQ�ɫ�R 3˟5�<����-I�����1��/UXڗ�����JR�2=ʦ�u�Z������f��}����~��1���?z+�����,�#�j	�Ň���Zv�qcf�!cp�$�֬��!��5����]$��"���W���ǖ.�/�
NDj@��NY�a��01�/�F���Q�ǩ�
l�4��@F����/T���������D�,ŀ��N�%~׀�e�<�g��\��'E�
�ZW����1�w
���Ī���]��H%MM7�2P�]<qО������g��tёǒ��LH���g�[֢���IJ��`��7
+y�GR<p]��-5�t�.�S���Zv�ߧl�X��\ԙ���j¢�K
+ӷL����4?S��=R���#i2�:�0�7�+�� �nvژ47�3�5�B;~��{Bp����x7����j���xl�꼚@��ح&����1�;S���ޙ򮰚טn�}(�f�r�r�(�F��3nO�NZ�Ӧ�����2�D
���v)���� .`r�B]7�� G�Mk�N��ӈ/�@yu�造$�ގoC�8:f�QUG���|��n{�ϓ������!�7�t�6O��;N�����}�w�]ל4��mr'�I����H��Q�D�k�Ѽ���o�ވ���oE�r��\��'�y��Z�{4
<u���\F�pJF܏��YI�ABGA�	���߈�C��9�j#�@<�D	�A���7�Z�>_\�)O�l.��>��]Z,eM��5G�^`ܿ�f��\}�x�ۼM�}?��g/�i��)�F4_�܍����GS�k��ٖ32j"н9��R_�#E]��?�:Q;�Z}`���w"�K�!��8��ƺk�м�kF��36�|��9�k�(}�ރ�-n%���^�!P��mԕ����5�����#�Ӎ�d�n<T��\nM9zg�{�
q=�3��ꄵhKS�2p�����ܷ�^H�y%3W��I��-u�ܻY�o,g����pn�����.]T+W���#����k=J(�U6����h9��l�����\BWӒBV�
�G3�(T@�� ��y5y�y�_¶��d���5��qHT�m�P�M��s�̑:�.F1���:��pS@���W}�i����^���(9�J�w?,��$���5v�䆚s��rݝ�w�6������Z�O���#5�:�R��z�:���l������)�c���7�-*���
��f7��{~���9a�)�℈��t�t����%O}��Ġ[V���cޠH������F�|�|�\rȱFҷ���i�#O��pŕ�@Sq��F��^�:��6�ax��:u���{S'���rgɊ<$�oӫ�����3O�94R����p���2Z�X-��N��Ic��/�Q�'����R+y�5�"�)�pa�q�_�/���Y�bIt]F$,�Vp�H�1�J4�ื��*��Kj���*�:����|da�LE����tz�CO���qHP���	�W���X!���2=�:�[����8	���
"�q�N{�-o'�T|j�?
+R��������!sIM�.S�1p�>w���Og��C<�3�O�L�H Ɖ������j=���n?�_�o�U��� �����4Ɏ�V���@����;u�6ncf;mC�Y���%��"�&���p�(�%%�n|3(���Zm_�C]��q2t�_���q���A3ơ 2�d,IV_��W�$���՚�D�Ф�9�O�UH#ڸ����E�G�Q��"�\
+�k~_���b��O�Fi���Ǔ�qF��z�n��gK��Ǩ�C0#ʶ��͕P�&�����	�n�
Vx��U	4�}��/*�<C��?�VrgF��/??'_�#�YE��ޖ�y�[�i��N���Ij׶��[�"���i}"(�������zW���T|��m��a~����M��&(��Hqpؽ��Վ����ob��܀�6�qeq3ZYƥa�NYy�r�}8>t�Z/@i�
�W�o
+xd�1���Ud���#�ܚ�^?��d�H�P3#�{K>�8�3�;����\c=<x��F���Ʌ���^�y&��7��^!�
+���/u}B(�-,���,����Gt�O��0V����OZӡg&�<�$�E�4�����޷�53F ��3��]fe�L�8$D��I#����5��f���$r"rR���'zS
+�ޛ��3����Ε����f���n-@;�cJ��fZ[����O��ot�y#Lˊ@��W�e��f['z�'Zm�~���(x�zOoU6�B���׷�`�}d͖�G��J�e{t�?K7�$Zh�k�=����"��	��uY>��ES����4�kF?Pҍ��j@�i�݅��:�8%��Π��-)k�q�@z���Q������I����HA	Qq�m�֡
:oQ�5�vz}�Υ���kŽE��c�4�X0�'/��+T'��:�:t�J�,�OY�H��º75e�Z�W|���2s[te���H��יK�Y���c&$����l��t�^�2��t��8�ı�Y��X!���#
o���$�ؼCA ����¢�0QV�:�f���"fO�ܗG���*:��(����<�I�RG9��KWT��A*#iI�6UeRwZ�a�������<H���q��R���r��־v-e4O!��+�p��~
�M��ɦ[�qs$�n��Ë���%G��z����a8H���OJu��j/�)W[;:�P����/O��E

O-�с�l/�7h�<�I�*�@90�����]g�Z+K�IVr@�)4%Y[�=��
��(5��<����\��8 ��l�]$�NR ���F�t�Q��)D�פ�B�k�#�~2��ʅ�GNN9(�|�?e"�X�:+�1�W�{�8aq�oi�Mm�H�r}]��@H�:HI�i��<0��
+����+���1UJ�6kb0c҃{K`9�\����_��A<3�p��սu?rV��Q�(W�Ԗ2x�D<�s�5m������������J	䴶EoY���U�/Z Av�p���Y�h)VfR**}�])�(�m�m����]>�y��gR���m�fB�G�5��4���,/�%��;��H+��>�c�(���'"ڙx���Ĥ�I�������ܪ|Nc�^!���w�ۍ�CU�쓔w�"܈���T=N���
+xb�,}{5�Mf�P�V��O�'�g��~K�l���ʎ��wS]�=9��|�Q�[�m�ɿ����Yx}0Œ2>r �ғ�Ph7t$~�C�o�pD4:�;�������G�_���[ZYJRWr%ۉ<�{��Ûzf�R���S�P�FC�忱\�����I;?��a���Y`	�I6��l�7ON!�Djwۮ-@��w�9����4MM{��iO�9O���t}S��_�Jס�9�K�sH}Q���)�'g�ɞy"�d,�o����߮���O�	��}�jK�vB>^��m�~}k�a�b��n���TI0K6�Tˆ�&*���A&���zH�a,~��Ow�,�EUo6z{C����6��|J��-2?��6m\	}:�d��X���@�x��"3���� Z;>ÈFQ ��H[�..
�ޏ��ކ�k�����Il�
�׸������ `�R�j*:{�ô���e���jL`���P�|x	�G���'�+��g��#�N��/9�~-^�0����o����w�ʕ?�uw���	�8k��.�|Ԡk�cSJ��-x����^=�zgK��0��C8ăD"��?�5{����Zy#I�I�o! o��Z�̏������z��Y��i�w�V=+٣=p�Xѯ.��>������K�v�z)S"�Gi#V;W�w�o�7. Rzn�����uqwor`k)���7���:�)��K;oŞ��0��?u�__xF��`.g�ү�ӌ�����L�i"�U�2��q�a�����׆��5wꦣ��v���UZݣ���id��&gb;
+ᮎ�n�C�jx��W2r�d
�c�v�̄�c�D������BywF o �d���$|��
�=*O�'�%~W
+��X��~�)/�灉�I]�J�!��9Y�?6�����w�	\>\�ZdF����y7�Eq}G�5���[�������@ۿj��J����Ii�`�`�	K��q�yrx�r�_7W�Ï��bM|��򾆴U�a���z����H�����ZV�ހLd��A�AJP��a9���>_2���a��$���%��%O_�>���5����W���Q�ֽ)���@��vF<�B�zo�ێ9	js���K��j��O�G�s�`��A
+G3I�D{�1�k�DR���*��P����N=�oa�d�?#yNR]�����3Չ��~.�#���ł�J[�q���2ٚL��N#��7�*���;��>�����oG&�\�x��o�u����M@�-��7����Q�{���L����a��f�/jhG����6?�n��u�>מkI"�\��j	|ͭiC����_�:8���ku�-��6������k�A}&Հ5�6wŘY�kA�??���+�<��9y��˵:<�����z�i�i�t�D��)�F
Z�5�9��K{oB%���2��6�B��FNn<��G������N6$��V`>�0�2qg��֏��tx�<1�1�I��!<fiŽ3���OՍv����=�b>U�l5K׷�L�>K�O.@o�g���M*�-�=N
+��*MI> xe"[��0p��/~�������>�`G�ڜ/gs骂���֝i�M�Z�z�Z`F����o^$���h	j��q�J���L�@p���D��`��h8�Z�Aʃ�<q�Z� ���X2=Bm9fkc/Cߗ߅��$��E*�H�1��|@Ɍ��7�ltz��e1*�ʿ1�Y������q��
+�R��
+K3:��ç��+n�w+��e��[��U�e���;	��D��I,�/uK[X���®�$�Ҩ,��Mv�>a�%��t�Ea��i"h���ԐZ��Q�*0�[�M��X,9A=��[��7,R�
+è?���s�K���5I����$�J�RV�֠f�O,p�`xlP$����Y���f
F�i�DZ��q7q)�j����-HSJ�w4������|?�y�p�𿵐�K
$P�[�˗�#ۉ�_.�z��'��qZ(��T��8�NҪE?�kPH�����o�5�LI����I����G,��r���#���=:G�*\ꆋ@@X�:o�,���y��v~ ���+��x�%[qw�����v�t6+sP��~$�n�l������Y��?�y�I��(70�zQ����p$�RG�A���N�6Y��
+n����$�W�����W���^�q&��Ӻ�އC"���pG%��]�x#Y�_������T������5w��O�4�iCރŲ����]n���՗�Kއ�?<M�B�k����1Vd܃Q���X��h
+���q�p�}�a��#�7��|s��B?��u�]1����t��q��Jx1�+��eru���ƵͧZ�S׮��|)���C!*(G�Z��Q��[����NqCJr��"pa����ݫ-.?�~�#���VmH��>��?w�,��p"�S�~Db��ұ�#9��"����n�B���ڸ�+��k����[��G�G|�j"Igdw��֪���}�����3��7/˱��C	�}�ޙ�mH� ��\P/O�[�L�����_�熀�����q��{��`�Nc�s��t����Rh�X9?م؟��5X�"շZ;�Lw�܍Z^-:�H��qɾ�؃]��{'��$rc�u���7Z����=�c���b{nv8�d�fc�p���x��F=Z�u��FI�j��l�)U��_�?���t�l��#�w��m���u�����мH´f�Ku�"��tdD���z���a�4�#�"I=㹧G��6Hw���g�K)�?g)�*�sZ�#��%��Q�77q
Z;S,(�����^K��N��e��1��n�9�#}��[<����b�� �>"��s�L){J�n�H�\^q�y
T�l|��|"��]����i��<�%\�C�{���;B��~�{�c�lD��#���O���:��%/�n�5��,�Mt�MҤz��ܔȃ�
���M`F�\�f��+r!Ē�Ƶ ���!}K!q�,w/��:P��~	���j���,��qu��q��˰S'O��/�<�#�.��0�������_Ǫ�h��B�hN��-K��t�gt=�b6W��t�4Wv�sz�MB�J�KF�����N/�N��:��1V��r��PoKwg�3ß�ߦ<��Mf���E�N�G�r�X�
+��:���%�&��j�zr�F>ՠ�L��}���3������*ۺ���l2�<l�H*ۯ�ض���Ϳ�`v��q�#�6fb9�|:�|_Y���������Y/�c�.�T
+�l�B\�(I#n���6��e������v(@4����ձ�������������TX���=>��Y�����6��|�ٟ��I��G_��9��h+CF۩=��H:�_&/�m4d��Kӿ�[�$���W�{~����	�hc�����XQj_x�5m�}�tD����9��fjZ�T����H}�[n�1a"�V��P�mV�?�qU�z�w��y��0��~G�%�'�{E�w�} ��Յ8u�0*͹x��lj�`*����59��._a�I���T����-g`?z�3��o�v~���F����t�qg�z��?�7;�Os��I��4
��]iB}A��6������~�@d<\�ן�R?$T��o8>����F�:�c|���hx���y�w؏����C��?~rO�9�R���2����8h^�WI���ec�Y�zb�]�oO�־����f?�۹ŤIO�
+�w�#RI�@�c#�l��~��kQ�ڻ���ڣ*�#kk$R��J����q3���Z��be��KS�$��z�0�Y|Ry�N8F2��\�ݒ�|k��p�Q�Gz����T�yO�p�tǷ1���U�Xn�&��#"�	��Ѝ���͖����W,Hp�xm�˽>"H&��_l%VE*
�z�
�L46�蒴��T��	܈�M(w�&����rKr%Daň�3
+������۷�Ww஧�������o����ޙ�����������<sd����C6a��]����P�z������ga�K;��y������c�[�J��HԩUZN�E�?���r�K*�5��${q�у5�m&�5[��t�k[�GG(�2�k~;|��R1�`_���W���.�u�7�~d��PK=�H������C<6� #�V��4>"�� "6��>.���+�y��%7]Y��G#q*~֞8����$��RH�&��vL�%v*���Dc�:�L��+�$ĸA<�%��m�W���=��^H�ӭf��ICHN�K�>>=p$zU����2�7ڒ�YN	�|�⦹v�	���~Ě&�d��!�n�<_X�/���i^��h�U�#
+��u�w�����Ndb�K���1�)ھ�a �Lt|�ݧ�Yc���"�u�?$��J������6Z7���͇�ZnaCԻS�j�bO�ک�ym�y�:|�d��d�,'��C���:�U��S��5f'�RZ�1����Xc%��Z��)����^�?H/P	?�Wn��D�p5]���R�1/1l��p�������Ϭ�b$�і����\x�8�@����"��L�\�I����у��ɥ{u�
T|<1^�ϓ��}�9jr���P�����|�J����KPE}�w��ϡ��Ƞ�kƣ�"�n�1��niOվk��;����h{S\?�O�>_r��O�)�c����/sZ�R����W��N
+
+�,	��,����C��]T�xv�� %^��T�]��<�HЮ���,�i@�$�S�ZBM)�����bn�q	�QǕi���m5z����#a��}#��\�T�ȭ����>IO	�7"	'���*����䵑�n*���HbN��)��S�I���sH�f߀�o�٩�M�G��*���2���+M�@홿����i�|��;w�h-��a�ݛ��‰�I����!$�6Ń�T�=��J�OPj~`���������������DE��*�wCKkK�K��]�%ֵ?�2�)*p��W/@�"э@�x��r`�`h��gz���]&���Ԝ���~բ�+^��oaA�(�D�ؐA��S��˕��$�1���oM�sJ��zrI��E���G���;\����K��-4	t�J2CA|�r�r8��PH�*]�j�����/�3���5Ėz|�-�������rޟ�SSZ\-�{O��B:��J���jlw�m9M���/�cG�x{f����aӫ���Ld'zo��^��榛W��`�ї\�G���>|7�q'�G6K��}�i��x��'zp����:-��#���Pvĵ���������\����~�_|�1]+Lo�BY�jR�G����
z3%޼�F��R�%W����Z����v�S��+��M4G�
~�Xك���U�v��u����.f�-��wwf'��c�����:�]�2�F��d<��:-X���+G�ӞU���(�h�H��Z�Q�e򁘐�tH��\�ş�������s���&�;�v�l
�!A�V�t�c��ZkrC�ܴ�PRx���cWN�
+I����Y\PV�9�G�W�r&�{�.xp��w'��3�lj
���=����w���G��e�tj�Ԯ�/ۧq�5���Q0�K}��o�x6�&3�..Xc�c�?�4����
�;�m�P:t��
i�Ҿ���*i�L�Z�>7�
J���~����ԫ��c�ȦR�G5����/���K��C�i��B�����d�֥��x��~yv8�&�~�?K���4x�KQ]ё�	���R䈁U��S���z��M���*K
+
:U6ù/,o[��K����;j�X[�0�!�}��Pt��3L�RſK1�����K�&CY��g�D��*q�j/��������f��A��sTRÜu�u�إ���~���
+�����$������bk������hK��2e�"#._��3y��'V��¦���]�]z5O�(f�+$���{d�ҴVT��T�ƍ]��\F�]����UKt�`H��~MTx�ɏ���4tH���{�>�ƴ�m��������Z�E�<rS��Ա��N��eU��$o�x�<�@��O��ڡ4���օ~���)�Q�J�|���qe��"����4Xd!}��ȫ����kp�r
_�5�o��K/��
+ToA��E�5%x�4�qU�S$�F;�c��%�S)ϜJ@�d�;��%��b���A�w���|G��K��FS@?с���\-��.���!$1E���Wb�'*}��
+�dy�n�Yb�HT[�Į�X7*�]r�`�g,�p0�}��3N�ZbUW�A���q�� ���.т�hi�H4����
+�|]
���A/P�NM�6QK�ބ��%2��X���~����L7'���7�q�VJ�p?��zu1R����q�c���^�&���J����ѹZ����=��#޲���q̃n;��k�O�\n�G���Q_��l_L}��;Ht�ugT_���p4��w����O�-.	��Z}9�q{��G�?���2���4�*Պ�T
+�=D�뗯D�u�	�Zw
+��޴��v�������u[CM�H�9$�-�ܾt�I��[���:]J���!�	�3J�2n�O�g��:��-��q�0Ȑ�������M`MM��q^u��S�������5�BCFvF4N�x�H�,-��2\iןX�T��[9���Ί)��T�0�]��?�\5���NECU�{r�Vߵ~Y�
+��7q���r#G��^d����Z$z��ӆY#�,ѱ�AJ��_ͫC��@x8��М�
�L�KO%=5W?.���.�4����o��|L�dƏJ6���|�&A�ě���"�?�$���O�8���rQD����㑝��KV��T�ڭ�8�k��U褚Tm\��Y����>��K���7:v�LS��,"GGD�׊��M@45�ю\U�mn.�x1��b���H���H��rbk�	 ����i@H���.!(��g�� �8��M������A�VX3�dI�5�,��f�W�9�D� >���]�Y�����8��T;9������n���n5ٹ\�H����(M<z��HI���~���`e�lotO�i:�k��NX��`[���)_�Z���X��a�֡�E �����R����4�Rߣ7�$eN���f�(i�͏>OW�g����y��>���S;88@>���;v*�G�U��ԌP��V���LdW�F��0qC�ze+C�S�ז��Q�+Z����v��!$�f�P��9�]_e���b�՘�@K��WӉ�=�>�e�
+l�R";��ߴ7'���b.~���'��M�A�}F�=����;;}>�E8�.9�A�1�*8�4����dߨ���@�•ݹ�F��-LJ���B���?��ENbO�;o��Ŭ�� gD��7Qe@��5���k�Ԅ��*�*>&��+Ǡ�m�0��J>�Ǒ�����ױ�tU,��!`�lwr��1Ȏtŵ[�/wӛ��<*�"*�B�	�&�k|�􆰤m�}#��`�Y��H	`߿�$�Q�t�+�bx.=[� �eH�"C���V&�ۏ� w��7�͏.���
�G�u��LD�~��Qz�-7�ĦZByׯ�`��	4�]������Mk)C��\+�'R�I�$rjZ���|�K���G�0��+�X�r�� j��HGJԳ
��!4D�Ň��7����3$,O��-B�qs�38��j+SZ���{.˟���ַIo���]��v�����WJ��`
/�*)��u�I��P��IR������Dy+&�T�����屣ȏa3ȫ�=���|'��i����؊.,q��@;�� n���)�)޴�lS���T��3�0�O�o_����L�6Qc����Um��H�;a�������ި�i��sȳ;I���J�Gt��I�J?�ڔnaO��J���c��Av	"j�U ߽�� һ�ͮ+�;m��j78l��t�L�/t=C˚�P�N�h静���*��v�|��ww�U�7�41�B��Q	ju�FM45Yu�JK6�sT�4?Ujm_��iZ}��E{vnoᐇ�p$�O%s�fV9Kijτ[uqeP���Goq��ӯ����^��[�x�l��1�ͼwr#�Y�A�ӹ��1�4��RʐCs$��b�iP��Wzt�o%�Ɖ�y��P���sV��^��>��sNTy|�n���N����\K���Rѓ;o��幬e�[y{n]��K��9Ԟ����zdڭätN+�R2ʀ��`����l�a�~�e`̭��lG\&XH�G�n�C��W;Dذw�G�6ԅ��Wu���dd5�
��x��v@,��R��+^��¿�X��[��	R*SGqQ����y�Ď"�E}E(��u}����Q�Q��fq��󯱞���b�GM�lzfo�=G�v�n�f��j�jG����D��5��V ��dn?8�a�H��ѕ��&�Zj/�صՔK�d��5k_� ��i&?�����/������p�9H�H��"@oVLB�Z;K+��cZ�$d]��l)�ig����������$��zR����?��'_Rfi-ġ�������qᔤH�"n�?�X�����l�p���]i=�t6���U��=O�
�q�Ӛb����J��n��>�d�V�2Q#7G��}�kk�R��B�Vr�����ck1�K��k ~��)q9"	?L�1�y��Z*9�捭כ)�`a�U�ԹĤ6	V�dP:����,b�U�W��Vߕv�1T�,X�a'J��>٥���CD�W �K��^�.�~07��{�%�{f����ZlH�F�̗Vٔ}��д�AY�w���˶�i>$7�]�Q�(��=����������8�:��	R�V=�Z�����Q�����|_��p��Ƥ[G1���EU&�7��0��2�9�Q��=!��������F~>�r��HI��CS��*��9�PFI���N���w0�ˈ��\EѬr��P�R�P���Kkw1���PjJ�Q�5�\S�3꺂��,Ĵp,������s�tw(�c��
���������o��W�[��f� ��HP����ׯu���O����+#2j���I�
�}��q�
�W�$��J~��v�C�~����{|�����<Y�@�]���/��?O]������mB�<}��nY���؊�SՍp��e��L���3?��C��[c�.����Uf4j�J~�lË�t���F5���5���-��n�ծ庂X�)�P	��~�Y��y�\^�\Mv��G%Į�T����"���'��F.�@ʼnj�e�����5�*_�3�j�z
��>�8Jx��rG��.�֝G�l9����OG�>���p�_C��͚�����\?o�w�/�rOW:N�8�oFފ���q
+�V�w��G�Fߥ*~"�7��l��ʵ���`��[�c.������0�/��SG]^��H�$�܉�����ќ���C��)��a/�(*z�7�'�����Yu� ����&�4Rj�*T��O/����,��6�qZ	'�w�|2+H�h�XW?�%��*[���?@̜F��?K�N����5�� �������>07<���+k�W�D�?P.��,ۚ���b�.��HXХ��rO�����l��������z��|̀���l��چ����ncj��j��2
���?��<�@�����0�YdT֣���5wjq��oW��]됏Bĵ�[�0)��Y���>Nh�媔x�#�Mܹ{Ž�/ QN5¿��|(F�����@hv��������-ɣT�8��}8ź�Z�xyӱj����FZ+���Ȳ�MgK66J.�$Z[��k��tImnJ�o�x�5R>���X���ي�V�#�~ɩhx�\��ΰ��^uR���e��1r�2����'�<0C& F�y�b7��qC�_�$hV�T��F�l�٤�E��C��K�L*k��#�)��6�'B	��=���Q�ĉ؀6;�(�N��NbCȚ������`�>�x*��Р /OrOりj6�|�v�7�}���>�r���v?7M��<x3��c"yd2�+�>�Uy�cCʠC��+���J%������࣢���2����&�&A�S����]qʎlx	�a_}�V��+�����
gQRI�a�.R@;�et��S0����v�e��>B��ד���D�uh�T�=�0��?�ɔ�(����
N���~a#�T��t�����DL�����g�O���LfQ[1
��#q�����)oXTP٨�Ѓ����6��
+
+�/Po��m_�|
"�ߏE"�n���Vt��~pt�����T;1�߿��[�0VKqԵ���ţV� �J�|T����T�W���G�O����#Vj�k0y4.k_a����h�[O
+Mx�
kR�h��G��+B v#��.�T<0Rx�Q������z�㛙�
�ʶ�[I�"������?��CMH7�k�xI���[J�k��z�늝�1����������_W���T�
+WX��"�mN��U?�6_�,O�
������o����
�"��R�ԿJ�.4�*݂���y
�@>�|C��|����uK[���?�ZH�B��"����a^��l4�#qB�~Tj��nF�����KPM?�VW:�Ww�\���6�4f�����(�T o��0b��G?}tqu��|�L'�ȃևE�&�4^r��*a��f�Hg����ّ���}��\���4-M^!�^D��Q�e��^��8w�e�1y����$�XZ�����,���U��;�7�y����(a�4[X���H`-�mW'��2�@�8G���w�����zc��->�o1+�P�>���MD����|t��
+!�$?3����'�ť&Ņ�������x���6F6~*�
CV�W�\�a�1���h�f)4{�bK����a
��4�ѡ���f򽨱�R���Њ5�Yx)��NtJ��~��l�Io�s���i+�&.dnMm���
��#6�x���:�=B�i#��W���M��	���ȴ�
+��;�~9�U��s�)�k1F"q�(I��hDW
qu"�2\+Gl�	
+�h����3Y�����MH	j���zt�6���(��J����?ddy�Ȩ:|´<EĔ�Y6��T��,�;rp%�D>H�J�E��ٛ�f?ީ�?&<9q��$��ݵ��
�\��7#�<J���9��Ãg{�$�,��qS;�e�߮	�wO�m0�y�K�F4�I�|r	�.����r��=����ޫ��åƺ�I5��l�?h��)�X��
�O5��J��/)�e+"�,��u+,��Uk�/��5Kw���B����c�,���$*{v5�)u��1�N�z���9p(��R1Uw����`ԛ�d	�7|�Ӭ�geC����&"#@��*6o䚿��U�a��b���8��b��oO��鑲�- �m7�I����+�qU�m=y�U��B�s'g`�0~nw�毾!��?���V��
+�����W�,���b�b���<��>c!,כ��ܓNҷN�2Q?�c�dZ,���ek`G�����-U�.���vf�H�.f��uylX1'��(�u���..a�8�<c��Tb	�3
+W�s��k7�4R����u������9��%���@�z���	+rw�MQÐ]�Zi���m��O��O�9\�J"O�������}_���L���ySқn��;�����=iN�)��"�2+ʼn$��C�'�jI0^/O/U���+iw{cu
�v�.bna}W�w`g�!���a��8���v���f��� ��/���UmJ�P�ӛ�f��(�j7+�ŶhtU�y��v��/JAM'��Ƙ3��s�E}X.�z2s.��8���i�������o�i6��+F�腸���#�S2��J$��o�X�1*�[�ظ���k���X��АF��X��;�_�ۚzSIT�ļ�������+�]��[�_�"��@��q�y0Q���B
�3�+އ����^��X�Mo�4ۆ��|,�al�C4���Fx@�Z�|]U��c��?D,ғ� V�M�������=V�#j	ۈ��#��_B����r'�:d750��m�Š[��V�d"{x�G�Cq!و=1�Zux�a^O�E�,-�H���`����6
/T�R��E,��	�����UbM68���{MBwѡi.-�d��3Jś�&H�w��*J�P2�c����i��!�b9�!��ܵ���b��O��=?G���l(����������=�f����8w���c������������6Үd���Q��m�bE/Z5h܊��̶Z��_�ZE%���`?t9�iR���;{�CO��x,>������%Q��w��=�}sq��]���<�h��<`L�\�^�
+���e�^
OH���F�d,�"���*Ehp���"V�Gq�.
8��7�����H��һ��Ӷ1���ҿ�O�21p���W���GI�x����_��oWQ���l-���u`G�
.��øA�5.ëD�M\��Ň%O�����F&z���_�́����1�V������Ĥ�O=��6�l��y�iVGr�
+ƃu�Qp4��1�4�YA�P��� �FHo���N���c���{��������d:>��[��?�,gQ���[q%�h-�*�LH<N�.I廏���������9~����>x��O�����񿆹�w��g=�F㛟I��'����+źNP���N?Lѵ}3R��Vf��eU3
	j/�ɬ�o�=�v�8������G����'��x�+��]^C�^'�:�+�յ�"�d���[���Zf
(Y(�$�}����Q�c(��7��%ߵ���_lc������1�jځ��j՞���#*ۊ�/���t�ʚ��z_/�-k��Q�7��AB#�J�˽���������}8�������Q1���+��Z���m���O۳>S���iҢe���p��n[�
+-Y�[ףrw-��;2@z������ӕ��"z��3Y���0������3��[�_&3&�w"�-�,����A�1;�Q��i䋋U��So�m�J�o�e��Ȏ��ѕ���*���ݠ�O��[�F?��7�o?f&'�L���O�9C˗�
#s��^�����!���q�2��7^�r�����wf���1m3I�t����v�N�U�/@;`��Ԉ ڊQ��ӑ����!����ǯ�w�ӡ镟�ٯ��\�N/�����ᾼlg�]���iok�g*]�u�V��������)�Ӓ�$~�0*���|NM�������1����N���t��_+�m����:����8���?���5���Z�^�Ds���/�cZt�LJ���tǎ���w��r��j��d�c�N��Le,/�@��=4�N�*v!��f�9�%{̀q>�
+R����2����N�F<u���N�,G����d��������i,vw����������a��TԷL�,ӿにq�����O�L��q�\}�x������]���������?���
+endstream
+endobj
+48 0 obj
+<</Subtype/XML/Length 2662/Type/Metadata>>stream
+<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 5.2-c003 61.141987, 2011/02/22-12:03:51        ">
+ <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
+  <rdf:Description rdf:about=""
+    xmlns:xmpMM="http://ns.adobe.com/xap/1.0/mm/"
+    xmlns:stRef="http://ns.adobe.com/xap/1.0/sType/ResourceRef#"
+    xmlns:stEvt="http://ns.adobe.com/xap/1.0/sType/ResourceEvent#"
+    xmlns:xmp="http://ns.adobe.com/xap/1.0/"
+    xmlns:dc="http://purl.org/dc/elements/1.1/"
+    xmlns:photoshop="http://ns.adobe.com/photoshop/1.0/"
+    xmlns:tiff="http://ns.adobe.com/tiff/1.0/"
+    xmlns:exif="http://ns.adobe.com/exif/1.0/"
+   xmpMM:OriginalDocumentID="xmp.did:FD7F117407206811A6679C02F292352A"
+   xmpMM:DocumentID="xmp.did:8D5C694D6F4611E0A8F1ABF9536111D4"
+   xmpMM:InstanceID="xmp.iid:FD22A738F8D7E211A771F3AEF7954E8B"
+   xmp:CreatorTool="Adobe Photoshop CS5.1 Windows"
+   xmp:CreateDate="2013-06-11T08:23:53+02:00"
+   xmp:ModifyDate="2013-06-18T11:35:21+02:00"
+   xmp:MetadataDate="2013-06-18T11:35:21+02:00"
+   dc:format="application/vnd.adobe.photoshop"
+   photoshop:ColorMode="4"
+   tiff:ImageWidth="643"
+   tiff:ImageLength="652"
+   tiff:PhotometricInterpretation="2"
+   tiff:Orientation="1"
+   tiff:SamplesPerPixel="3"
+   tiff:XResolution="3140810/10000"
+   tiff:YResolution="3140810/10000"
+   tiff:ResolutionUnit="2"
+   exif:ExifVersion="0221"
+   exif:ColorSpace="65535"
+   exif:PixelXDimension="643"
+   exif:PixelYDimension="652">
+   <xmpMM:DerivedFrom
+    stRef:instanceID="xmp.iid:FC22A738F8D7E211A771F3AEF7954E8B"
+    stRef:documentID="xmp.did:8D5C694D6F4611E0A8F1ABF9536111D4"
+    stRef:originalDocumentID="xmp.did:FD7F117407206811A6679C02F292352A"/>
+   <xmpMM:History>
+    <rdf:Seq>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:FC22A738F8D7E211A771F3AEF7954E8B"
+      stEvt:when="2013-06-18T11:35:21+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5.1 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="converted"
+      stEvt:parameters="from image/jpeg to application/vnd.adobe.photoshop"/>
+     <rdf:li
+      stEvt:action="derived"
+      stEvt:parameters="converted from image/jpeg to application/vnd.adobe.photoshop"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:FD22A738F8D7E211A771F3AEF7954E8B"
+      stEvt:when="2013-06-18T11:35:21+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5.1 Windows"
+      stEvt:changed="/"/>
+    </rdf:Seq>
+   </xmpMM:History>
+   <tiff:BitsPerSample>
+    <rdf:Seq>
+     <rdf:li>8</rdf:li>
+     <rdf:li>8</rdf:li>
+     <rdf:li>8</rdf:li>
+    </rdf:Seq>
+   </tiff:BitsPerSample>
+  </rdf:Description>
+ </rdf:RDF>
+</x:xmpmeta>
+
+endstream
+endobj
+11 0 obj
+<</Filter/FlateDecode/Length 7178>>stream
+H��Wks�6��_�O�c2$���-���&ݤ���N��#[��X�=�~���B�&�Ԓ(�u��~��2K]�:+S�B*�*%T��U!���&_&O^}��b3Ʉ.�2�e��"�ZÏ˼���(3�V��e��uE�5Y����'O~�3q�����M'O����F��Olnw���4�g��I�f��ވ��<�\��_T�T�E���e���_ћY߮�2�m��N�5y1�;tՈ,�2��Cxd,#�0Z���#�E�22�>M��Yg�0*���3������R��%�	O?c��C�����ö�����j��{>w�I]֩T�P�M�C��7諳�{��*�*���!vne���a�vߪ'2������\9@����s�Y�E^���(�g�#=o=�gS�Ì��d�Ë���t}^�B�y��Z��J5�����
+0��3�B`����A��K��+	�„�O��}��%��㢈��r������'�|��7�fO/ħ�����ڮ���&��\攭��|�����*�uS��y�n⤎�^��2�OE��;�'�8)��Y���:�U�bٵ���1o��.�ѕ*˄̃MUP�E��RK1�x��?�L,���h�M��qRE�qAŹ���x'2�Z�?�8���ݎ���>�ƴ���!4�>V�Dҭ⤁�{�])�����!��YA��81n�#���~�������v#�;1�S8ow�:\v292�}I�����+4�<Wͩ9S��Sr�~{
x�v�>�gL�,L/�~�u��;���N5��~�uu��vF��:vݢG6�#WYC�&�OqSQB�dk����S3�*8�1�}sFq�=YM�%�sv�0��C�=�wv��n�Wa��X������t�5�H�!���
R�҅��{���v�����uչ�f:�[FƏ�x����<����/1|ܵ��G�>��Twd:z�U��
+�@��8�6��'hl���#�	�+w�
+'5s�Ƌh�wWJU��xJ�!W ��ZE��
+F��?����Z0{"G�w��5:����BtD��<˃L�*'JZ���7�&��4V��u���q���($���7&���3��X���Hu[���e�SQ�G."83%�y�U>Q��4�&�����x�I;���ɈԦ?�}s��c�”3��5xt�v�s�$��ޢ����{y�֕F���Y�}Q��`���GhAoW›�8�wx��Nj�Y\!%���ͅ����6�x�
@VO=+-��M*\�C~�,��)<}�����S�9\�€g \}�C`{˂3����)�&y������1��"���������*�7����+�GJ�Ы���^t�
++|n"�%����hh�@�Y����!�ikE�t|�ho�
+kЭ���L6�3��)��O����O�@$ЋG-�&w�9������Mǯ�=u��e�Y��I@�a�Ϭpp�:]�]�D��xǣi�����I�Gq�
+'bE4��8cpK��&3k��M}�U��.c�N��
+^.>"�� ��N,�E;`��8�ȊC�
�CV��4���7w�j�Zt?�Z��S5�������������76'�c��xy��JwBm��\�F�t&�)_������:P�����8!
+`td��N�yj��;'g�
+�V�̻AV�91;�$���>̝%v�g�B�R�:�T����;��	��V�Bam���	x�q�N:%��v
+��:�"7dB�o(�?Ĺ���V���9kD8y%e)`?Zfu�ױ+H�ذ�ѓ��@3��x7�Mzl�����(Z���E��l�
+g3gV��}O��<X��iXa�?����&e�hPZF�������xf1�i����+��U�q�+�9�S�KŅ�h�˾y�����̟`�KՈ�w��k��g<�8��[W��>0�˱�׳q�����Q�mj?��Ud�LQ��$��'�1ۑ��A��ܭ
�᫪U���e]!��L|O��c�[��B��cFՙL�
+��st�&7�L튿HT*���s��`��/�Q;rzm;uԼ�i�)�{7�
+��$�m暴fmE���7��v'�-V��#&f#��h�ت�����pN���v�
�;vQ9��_l�Ҕ��*��Z���7��V��x�ZA\fM$�Q;��iql�^t�x-�-��
�&ܟ���iC"���g�(��q��.sQ����+����:w��Qӗ�3���M��<mC|����1z�-q����;�y˜
��v�%��i6B�҅@�0�V&J�Y8'����ډ;��6. �����&���QTѣ[#�R�p|�-��Ur��J�g�ph�1Vjn�,kW�$��vx�	�Rw����9HZ�t�S;�����W�,y�B*Zl��N�Bkq��['�2H�+���f�^U�
+��mX1y�BP�qD�8;�X��m]³�@C'�F���B����	>�
+rfT�~C���-`���X���j�t�a�q�It�4���vJ�s�
+}@���/4���wV\�b��$��(��!�L�.|�h3���v�M�@�alZY8[��#��3"q��u��}k:�x�8�JbT���g����vU��~���=��ڈ�k�%G��P�i�[|�y9��,����Qj��Y����[��6��1j�df~X�&��5Xq1�[hK5��+v��t��c��hh(.���ۘL�c��M�O�Kk����mP�vnp��j3��Tը������Z���qy@�K	DF�/i2��6�Y�?�U�۶�D��+�Q��d�_\nap��4��B����j,:�G����Y��l%q��<��rfv�̙�������$�A�Kw��2J�I��N�;	�To������K�M�\%£ˁ�,��-U��VS	��G"vo2c�����X7y��;�ٟ�L�	e�l�K��݇��`h�.{��)_���.亓ǔW솥�Qv�v�x)[�߷��~��"�P���XN}a��L?�o���(�a�dC�]���Yd���u�W�CJ/O[������m� Jܟ���$��].{9Dm6t����;J�f(jMq@��-�u�^h�79
]t�����ӱW�6��j�P�S�=@�^_�Yf��z����[.�g�7P"�b���ƶ9��DLv��w=l*�ok��ZϚѳ/
+u�=�L���ߋ��)�UA@�K9�U�Ƽ����g��h|ru�+��&��
+YՙխY��4i�Z��ӯG�3��Ũ�����Z�ί뫺!D�U�<�J�&o�l@��:ݼ[��8l�9&SD:��z{;�,�:<�!.��y1�_̦MS�.�rn�Oǩ�_�/�W�XP��d���.\^���1�w(
+w
+�Y�*
+�_o�?�m_������9<���"A���weS���_�Ӫt�=m��e�1j��$/G� 
+U�x�V.�:�s
n��F!t˫����p��V�0��U001��EwF9n��oG���ax�,D�-D��8��}U)mu�so�ު�?��Ds����e��g�ܠ7�Yv|v
+,�^0#ޑ�����/#�@W�r��#�a��HD����D`�ϫR"�5�DV�=n�` �a$g�ÍW��V��Lƫ�;t��e\apX"))@�m�����	Ƹ�G���hK���m���<Y�K	J��դ��XInY��~�C���a�_H���Ag0n����*�ޡ�쁿o�����7�_�
�,}��0&�Ζ���=z>Ho:8f���	�$i%r������� 9�g�e�߶DW='���+��͠�J��t�u2���ڂE��&j<PY@�w%���AX�h�X��l"��}���b�=x&�����s@->��٣�h���X4�Հ���
+.
أ�qX_������k�<c�>�l�ho�ݮ�ȏ��/o��Q>rA�Z��ֵl6������B�A�5���Y��BžF��^�Uġk[b4�?bW���1|����A���1
+1���)h�5�-jDV5ڞc�Ɓ�oQ�	�t�{Q��+���֢��P7|
�&�1�;�R\���i��EdB�J������y|�|1!p���b�13\;KҌ��8	i�W��u��k��0�ث�^)��B�*��-�7wP(�f��T/������Wqc��Y�\�v��H�W�6
no�b����N�jd������ҞC،�U��
ϵ�����hS���y�{ҍ�����Gu���-0�$�Ʒ�����ҕ@>�=w�x��e�,����\�lo�g����3���{в)�sC�-�0s�Sg�0A�
��S����"Ȫ;!I�D�\>�G������خפ��7��Ϙ��1ځ&���R�1Z�
X��f%�T!��%+����I���ψL��������.U���|Y�ɣ4��4���1 c�L	݃�ƊC��2������<y�5���X1��\��)<��b���v�{Xn5s9<">/�����уj�npBf�LE`;����DGz�7�;Smr����ɓ{���r!i.���'�U@*1��^����p%;���*9d����J�,C
+�绌��9�?0߸�ȄT`� 		Y�)!Hj���D:�έhɶ��4xI	����}y�
+	F/��f��)�Qj]�\"E[��Xd�4S"h�� 2>��*�
�����kq�EA!��N�v$ԃ��ZI ��°����5�k�ߙ43ۙ�������hn[�b{bںm�elSoIuVD�%K�W�/���}B�nztMÅ��\���ւg|��nuϳ�ĥ%bJMf�8,�|�N+%��P��D�4*`8��'WW�r�h�����B��5-�d��U ׳#&��؞e�7�٫S�hԼ^�_�7���fz��{YZ.�Wލ�wXx���Z��~s��]���^�kU�n�۫�Q����#���>��Z]���rS��m�a��k���ͥun��ES�U�O�5_���n�V���b�N�k�N�}����g�G
+1�n=�,�����6�zu��Z-�kDt9�i�;�-�ᎼBm�K�g���#�Zc<k��p�<3�>?�%g|{��IZn�^�ُ7������+!8�9n�@'�ʤD��9 �tu<����N+�����ׁ+���o-[bA�T�G>4�� -9�>{�����@m<
� ]A��d.1Qz�����wc����#�zߕY�z���^}�����V���no�j@����l~Pg����# ���	��!>
+����c������k�9��zui�C+��]}����i��"\	��_4�� �˺��m���B���V�n�kt�B���j�V'���V��}@km�����>�Ԫ�]����F��D�?����f�Voo���T�ֳ�E}�h�K4ܚm���\����^v�QQ_����M��3cK�Ph�T����*�Ւ%H�v+���Ν���&�R	�؞�s��q�XF��ټ};�<lv�޳���D��6�};�q���,����zʃ�-כ��a;`�㰹ݬ�����t�0��J&�>���݆p���fEƼV'Y��Y��Yӂ즚'�����ջ�o_)����P���7͏�v�Y_s����a�iab��p��Q�^�����A��n�@��A/~�x3\���J(�7H��a�9�쌊�v;�^X�X�ʃQ��r+��a(�f*��L���g|SD-�3��^݊�N�X�}�z���
+���0���,�%
딍�v�(�[4
+σ�Q\q�R-�4���\�E�G'���F+p�����@l�����g�������q����a;��s.2�BP���j!�Dz�$t`�r_	QCͰw�S8ݑjP`{�*<���uE�*g�9�E��	�R���,�
+vW�˅��`U V�����\L�X�S;�mda��Y?5�t/���p���|˽z��餐z�&��{X�I}ݿ�.���r&2�\���+C2E��t.ꪄ�	�Rq��pl
+%��{*�ɈL�����ҥ��1'@�6f����P?��\�
+����6$1����鋍&m��wS+#]b��J�q%wz���b]�kB��T�#V���L0�p&�zؤ|A��ED@N9M 3%��g!K�BV*���rB1�$�fUg�&��&!(i���m�mp@�tg��q��f` �hVR�t��Be`5L�{��v<��4E~Ok����o�(��sz�x�r��
+W#<I,ڙou���ĝ=o7m�h�J3��J�h!�@3Q]L�Wb-D�|4�{�7H�B�ɝN`'�"%�J��#�˼����$���qO����D�i�Jӓ3Y���,����[ǎ
Z�L��P�3I�����ת�����>#�%Y!Cs�f�ʩ�'�{G��Rjh���Ӧz�ށ����:X�D��	!� ۻ=�X}���|N+G��-����Ƀ�ν�)kN�QG�k	�x/|�ͅ�9r_FF������:�1����a��E��q��{�U������#��5w����C���z*9;*��OvV��y�yy�ޙ֞�o�\��;WN\@֝y�8���!y-���u�<��k�r�	]8��Y���h�j0�f�4���=��\��k�:��[f�?͡�3�"T&[��䥋LeF�����΃R���G����Ku��
��X�W�+�Ʌ���b��~���B�"=?O��58��c1G�dXa��M��JѪI���H?EP.����bf�9؟	�I]I����1!ڜ�XN�Kۿ}��3�G��󝾞�������"g��N~� ��||b�]�̹T���>����ٱ�y��1�<�[�?��+�y@�z���W�����Ť��r���1�A\/��f�����,7g�>v
+Q4��������^+ˢ���.G�[��w^�t�º>� S���$Q�B�jHS(_
���B�����,�fTT��� gY\j.�B�*��Dv�^��蘿�-�?�P�?W�/�ѹ��gO��fx<���.[�V�`����	
+endstream
+endobj
+57 0 obj
+<</Length1 34798/Filter/FlateDecode/Length 15539>>stream
+H��V{PT��}��Dpy�{wY�.,,D�D��Q�]�"oA����F�n�NZ��kmڬ�4Yh4��4�?2N'���v���S��L;����w�݃35ә^8{~�;����~�|�sA�q*��4{�G���Ɩ1n����V].\�@}b��^������<�����>�������hW�pg��C���
+�j��hk������O�y7l���y,��u��=p,�v����۵�
O�[�����P��汃�����s����j !4�k�޽��NG������ڷn�G9�
+���a�E	*~�h4{�
+/�J�զZU�fq�^�y��_�ˠ�V�KS�E�|��sJ�r	�Y��dܐ��1RraSf!6s�"�6/��2�T�	V�`G�1	��o*�W�@2R��OM�}�@&�0����b.�M�.���|���(�|���^��>��`�A,�RT�!�|9��ըA-V��h@#V�a��4a-֡��?؄G�[��Њ6^�Q<�o��x	���c��+8�W�N�'8�7�S���!���9~���6�x��8���ю�P�G�چ!�R���Q'�P/�@�Sm�Giu�_�F��Q<���I�����8D~��	�1�Ez�V�:j�5Ԅ�ï�G�im�Gh�i#�i-��������A��y��g�-|�v�e�xW���S��j���VR,��,�g6����B�����E%[�P�H����zZ}W���i��\ͩ�k��b�Z{CO՝z��Щ8m�Y�g�s�3�Y�s�:;ܗnDo+Ѩ��8Aig�"���p�+�QS�L-Gӌ���$j2G͚��nD�h4�y�������g�L�������'��X<Q1Q>��NL���g�Ǘ��ǽ�m��Hsd]�� RY�"��N�
�v�S	����%E��:��xw�f�7AToR�	qJ&����]BR(j#�����@��G�xr�W��m�0�H�jz�C����
�uFj�VRݵ����Ԃ��A�V�nkY�F���G���ÿ��zhY {v�9�Eıq�����kD����d-j�Bj~��?t�:;��:���ZM�B�?t�:[��6�R��d��l�5��3�3�4s͑K]���Gl&��aY�������&L�����.=[\�K�e��Q�E���^�X`� K��1WW.�N����F��cadr˽̍ǖ��*+�4�T��i�>��vts��>�[�j�a�{�GX�fUb�0�a�n4&�3�йO�46�R�Kr�"�).!^@7�*ma�gm�A���kLݔ�����/|(��%���l�jD�S�R.-�H�<ii��������,������P���e������r]�S|!�<������K��c��r	^�S� CN�������z����&!9��y���.�u��z�=\߸�닒�&��3W�D|פ0�6^>����Av=�`���zK�}Er[cr+x�1�3�T��Ӓ�O%�)A���*-���u�<+g�͇s6a]$��*��-AXw�I�xp��	����܌���
+_i����+*�W��r���*��{_z�͞D��z2q��>e���,�;w��ko(����w�|o��>�¾+�%W��s�Լ��Ҹ�Uuj>���+=�ǔ�^.nVq�y��/��\�^�ƴŬd���M���f?����x�8��#�n(``��j;A�v��m��"�,&ۤe�	�8� �M,�20��'T�Q�����a���_..q�|��ai�+�E	ccU����D�}\W��'��H��Y�0��p?sZ�ŪMEO^v���l��E5��7�����18�XqIEizZ�b�U�Rr5l�����)E�-sʊ�xO�37��2,�u�O+8��G*��B5"��'����
+�"�Ka�j��bNF�z��$�S/�v�V�6�7��Y�S�7s�9�"GU�h,�׺���<���Q�}��B%4����xcYƦ�r�����N�$���B�G���4�(
�ӊ�&i��#�AE�4��s�cjz��7u&�KP+�ΐݬ|��/�P�=�{Z$J�Dy{�kI�Rӝ.	��g���[tqS�c!��ΝR��K0���s��Α)�w�%�ט�`�*�Xr�@&0߼k��NwH�6J0tXdN3�N�m��b�`�ӱ�-4����Z'�9 A�ݒ�z	���Ò�{��DKpS��d�$⺙�XW�(0�n�� 5-W)�V*����˙����j�m+���^��I�&N���qv��qk;ΫNi�<�JC����>33�)�MZ*f�h�V#��f�
+���t�c~f��$�`F#�&�VU���@���Ϲ�N �hN�|�瞽�Yk�]��&z�:�i���	��^��
�#^���{CNKE}d[������x|�D:�Ҿx|�K���=��p����TK�����{��l�W\��]d�
+�v��"Bn�P����Jv�
+�,���{�R^n`| �@s�=�5y�C�f��>�����x؁��,�ǁ�8��\�AN�MJ�*X*��J�$ӵ��Y���v��+�(+.@�gž��uM�fA�eQ��=�� �o��~�
+����%���C�TbSpSp[`(������莺��
��u]�g	ߠ[_-����R+S-�d�Yre>c&���SЁ!����2�'��\��=�Y �/R�5d����}�ƾ#/gV��|܇{�SƊ]���P7�68�����u��hB���`P�d�\r�F�^���L�,�^��04�H��@�qC�� ]!0� G���aP��>-��2}�<ɕz	�U���ئ�u�|\�W��[rcK���cwxTT����o`��i�a�$4�{�hګNu�O;� �!HPHUK��E�R���E�=�XSs��x�o1��m�W۶㐻�[���jLTc�Z�8�6k�*~�o�r1�^C��/x������Rrr�am�>�
YX�j��0�Lwk��=�-=�z}Wʟ�	�*6ԅÑZ�Lmlgrp<\��9�?�rh��T��>^�2�j�m���fw��@W�ښ���}�AɧN���i�r���Q|z�<6-j��w$�Js�C�3d	���r�I#�� �чW��Ђ;y���]�e�+�x���Gp�k�a���]�t�@�J,\x�E�b��h�K�>eO�T�sz��WDT�\Iz��1}K�sZ�r���M����`���P���x���|;lyG�P�)�NY��qlP���\h��BK3U\7��5f���`Qɓ
[/.`���	�=w�3�\\�������3�FGW	x���	�`p���B�K^dP���j�;j��]�3	+9	W8	�*	6������-Xˌt���=�''ኊ��jF��Ha�"�u��w���gB?ρ�fp��Ы�����^�׈��֨uPK�NG��aOz����Ք��Fs�T.���7���'��H}�e����_n��h��[�[���bՑ�?����S�z�����L����	:�u�a_yK_d8��?��'�uD�v�.���i�
v;�0���”S���%�*���p��T�~��W1�
	���WU+jQ���~,�z�W�t�wJ�)�&8�`��y?c���2�.)O��HAl�5Fx!����h9��@q����8!�sX�M}V���Y�x
6�2�
�0��
䭕�r�$��d��%��c�����z,&>���>2�￿ۮ��{��?�}܈���Z���y��I�,��m�T=����Q�C@e�K��evw���B��u��0��$�%��
�K�AF����rP2u�����A��+�6�!ur�x߈����6�bp����st�-�>�Y�m����i��3,�IY��$��i��v��i�v%�����2j�
+�~<��d����L?��a�����Ҽ���Nr�G���a��ǿC����#Ef2�s�Y��d,p2FQw�Y.�\��%3O���.2�8+I��ge��r��rZ倖�`��Zb��0���J9�a.�����=����6i�!{'�b_x�U�t׻�u=�`O�s5i�j���u[m ��nx}��x|��Pi�T6���|��~c&���2��_�#�_��kt:Ӯߦ@��x�@��)12��=t`��L�������
+���o�B)l�(�
E�!n�tP4��{�A=;�0����#�щ�_`��bȏ�ӏ�r%S��#�p[�K����)	VH��H'����n�lNJ�v҄S5�dttI��X�:P�����T2�о۴3к��U����>֗F;�흼�����E�n�]���R�UEruR�Q��\{�u��׭	�ʡ��)S��L!��K�e���K��0/o3�S��㸓á[,�[�����<y���E�1�g�)�t��>�ŧ�$]`�`�g5:��R�\箱���^Q�8�w�9��N08��8G{�I>�!�V������Ķ��9Ų���9��8yVR��b��%�*b{��ȁ^��jl�Lűxj��,Xi��@��{�Bq��v������R3�ksi�~Jk=��7��i�d/i��w���uZ��	qS{(��ʐ�"v��N��U�|�avv�:�b
莵X<�A�c������a����X��b\���X�N��a�v�E�\�֤ziR������F2IYr�6���\�%X��ŻF�����{Qzq<�K}X�K��e_ݴق�O�nONM�;1������}E�#�E��D�JG�R����Hi��:��{#��$�r�=�U��ِ5ۤW%(��"'���ݜ�%c�Ȳ�M���kJ3X����������'&�ݠ����f�mdޙ`��-�������-��	��rk�{��M&�XRL��_1��,,ڥ#�0��A�K��hR�.�d�P�&~��a��6&�	��`��i�Et�fFX`�,Q�?������=��Pڳ(HABm��h��� �o0�`�1�����Y(���+`ÂiP!��7���|���l��Mw��VY^�����z��]�bW(�x�"�-�-T�[ZQj�Xo�-E7uY���&J�l3L�'A\�n��f�_]�5f2�4�lS4[’�j�u�9�{.��\Nx�~��~�w����^'x��$���e	�"��b�Zo/~�̷��
�C��߁k��9�_�t���D�u8���ͤ�V�mkTR�f�K�M	��>�x�B P����I��%���$���"�H��`�B�-g�(G͈ �����K����
0	�ؤph&�q��`��(���^�z��La�g�)u�p=��!��F�#7�&&������a�L��;����G����18���v+p����)��!��hw��!�OM!�Ir����t}�l~�]���)$���8�f!���Z�x���$��H����l%�x��b�\�C�g1E��X�M�Q粰&=qx���`�b��[�AI�e��Vf�ߙm۹9�ټ��Ė+�lڳ��u��M�{oi�\����?�����5���4-X]��}ymz��ayUY�������N+Ò�;kW��]P�������켡G��-�6ߵ�m�ƚ9�*�3jV�_������Kۮ�[Z=�lq�Z�a<qސ?�B��}B�}b�PA���D%MJ�t�*��	�%@nMF�0:���D7���e�0yx�$F��@T��酿J�y��>�m��?�]*+�(u����Rp�C(a��JET���1'�R�K`L>�8�;�ݒ���Z��񏼢�1o���H��/A��_��ܿ'�??jc�*�>�KM��ʺ���<p�*Lp�:�	�o+����u�~}m�7����v�@�����Κ�@NvV�$gMpQ:zg�W�z�C�fbۇь�M�f�/�s\'����ܯ��dfT��ץi.MsiZݜ�7�3K�����3�z��r��V�F����n�i��T�E"�_i�@5L��"qɢ	>���*��"�C��*��S^��UE�>.�G��`9D�[$�܋��5m���}6����v_{�	��X=F�;�����'�̛ê?�{���&�a2Yj�<��y�r;�^���b��Z�+�̩�d�eW�s��~�&�d�L�F�Ů;l ���FNK|䐽�̓t�����G��a8�ezf?=���[+	�2��{�v���
tʭ�F�"ѿ�[a�I`=�2�0hN	��K�K�#T����/�AX����.T,�tS��tӤU/���U^>��ynQ��溺���/�Z��'SZ�즍����b~|N�™��;׬��;�б��c��o׵��m�c��;�fk��xZ�������y�����.�L<�����5Gf��-��h���}��Y"@F�q$ˑs��B�31!\�2�p��
+�4�qV�Ѵ�\�g�����7�OO�_���ӹ����ˍ�G�����W9�?��.�o>!ه�3�L�E�aͼN�0�@�z�ި�Ͼ��d��6
+��|��k�u�g�m����	�W�>m Dj�CZ��X'�f�l��&���:b5;1*ť�ӎ�4PvZ�%@��.����u(8oʡ�3�R�N�RX=,fi>j
8��$q*3�g��&�"8G�޶'��]�����Zv:	����(m��0��x(�9�>?^*�lQ[H�]h_p
�7a�������?�� ��O�%�rA�
+-�(�����Ԩ�(��)n�–)l��-�'r�Β�Y�����^�y�(�5"ݪI�r�v���s��7t�O���1��ő�����B�E�-��[��a���{��(�YQ(�$<���r�4O���)L=U�(�!�C	;�JX���F����kh����9L�-b��<esc�����M)�)�]���O`O�~	K�IE��nEe���ozz�c��}���gr��PY_���_4���xDL���y��G�n���a�,�ĥ��׳�m+4>�p�L�HN;���#�B>ZF�U�el�a�UB�W".+��P�wUڥ��{�m�G�٥#��mrB W�@��0��t�0��$&I�<�@B�.�G9��=0��{���*{�Do�f�6�u�q�풖�X���"U��_f�j	z!2!�,�H��Q�}�l�^,�)�d�����.��&��G_Zָ��i��%^��st�K����^��u�-�ߔ,mh���j�v�9o��U�;�pV݆�e��*�߁'��7����~B�Ǻ��38�T�\�(�0%d9rL�%��f,�I;ƾ%f�_�H��H�.f~��>Qz�p�Q�b�K�B۬�i��
E.j��p�#_l#8F�%8��|�~�>F�F�(��~��旧��܋�+/��vv������'�Q��Q?gYg�1�9	PNx6�ڦ*i����Oi�F1��v?�U4��I��X��am1�y��G�\j��	-�
�"��+��pLA�<�I����F�QؗW-JA%��Z��,�g'U�>��P�x�Y��1+GZ+��
+�P<B���>�>��Q��g53tH0[�u����ed�lJM�@��<�@Uk�u�FM��
+�q�#
+��#��1ʁ3L���wG��d9� GvM�F����b(6�Uxb�4�I�Ld����,��F�w�`tE�zE��Kw)�.ܥ��D�I�#�ʲ�U��Z�A�F���Z&[�Ȳɻ-5�=��0ߔB���f���~�}p�k���H��`u����	1�j p3j�9>~���9ctʇ�\�Z�ʷ8���>��LP(�n#�^&�%���2��2��X��N2�᫓�m�H�-h
d�+8��|W[L\�����0�3À����``0��1~�I��R��J����DQj���ԕ��`�U�`�V����U�|�U�jR~�O�(��HU��?���~�ѵ�>'��ì�Ϲ�s��k�S��Y���Q�(V0���D��]��W���	NL���9�����8*�t+G	������!�J��4��Z?ӗ�f �wq�U:�s�C�+o�|�9�2j�yg�[�|�G����c��4����~klR����CPR�[�J���}�x�����y!��	�a6.87���C�.Hk-We�x��rC���4�4���<y��x�Zt$ё��瓜wA����ی96��oQ��"
+�Ey%B4��Vx���R����¤;�}��Py��`$��Y^��ׂ�"�[�uT����dwN�r�!�G��^��Lx�@U�`��׸�3VX��<��U%����arY�DT1
���xL��7'A���92�Z�K�ګ��6�4�/
+.)�L��
+p;�������dȿx�����x%����"s0m�[���r�^v:��SEm�Zث�dU�.Y�V�z���*��z��a�ְ:i��I�MQ�M>�8�r�*�{hn��q8����M~�.䃘0�>����=�_��_)����
+�P��A{Q�����S9�j�}�S3>9�>V��OhE��+��`\A���u��`���7`G#����NO���k�
�s�7�ө'��ߦ���&���ӝN��m��g[r�H��-��?|0��`��$R�sy��5Օ���4����XuM��;�
��Ly���W�M��
�e��~��d�'��Z���v;u�������`�[��F�d�^��i/�������3������i��6@
+,��/��f�Y6��~�+
+,�+��Ӕa�2L��o��n�t�4�KA����WBdW���,2X�zq%B�B��
+2�E��%L��|}-�������e�Q)=ȝ^��3
+>�AW�b`�rF�&���r����ٳ�utN����7�F��a��{��i�Vk�L�U���C.�|�b�c5�[I18v���B72����R�t7��*XPpI��ntV�ф0���ݯ�	�?��KJ�M���
+t�{t���(X�[��v�ۜc��#�MWE�ο����w�	lm-nN�#�[:bM
Q��k�4ޚJ4�|�XS"�/u��L7��l�;�|G ����[�[�nX�|G�b}f���$r�j���b�OPƐ�%اJ0@V���.H�˲��2
������#�9\E$�r[B�C�#�4Ǽ����9�*�������A��VM��KjUBՒ�V�'��AB�K�S�eC�).�y�qʪ�\^���DG4QvCb�9�°H���A��<1�$^\/����[��l��U'�g
+�e5����F�u��iQ͛�$��b�`e��
8�`F�A㲲����;k�y��8�ĸ��m��i�ܖ-Iı�8R,.5ǔn}V���u|<���k֕D~'�ڬG0��<A�0�s$��%#){
��>|��m�kQͽ.NH�\���i}U��H�>슴4�IE�c���k�W����nɖۓU��H ?1\;4�Q��ǒ۟�M]^{��=M��E��ȿ�F�~�S�d�(�L)M���b�j���<�
+^e0o��P1�
+�U#Ҵ"D�oW�jʗ�U&CxUF�d�%�JY�J�0rfeI�Ta�t�4�WIyɣAh��f�+5�O��i�Y�[hhn
+�i|b���?��/dpF��c-,�Ufit�4�B��_㨎 �#��&��$�X�-�ޣ:c�3mQ������I
+�.ŧ+�i��u�f�_��v4��ڮ����愂�
+f4�FIK#���(Լ�vHw+�oL��.�!��XuWO�����w9�hCS�0�Z�Oo��l
8c��B�V]��4X��Zv��zp��9���A�!���ȆL}�ʇ,�azTo�9�E�F��C��z�zk�x��F\d��sW��4�����	�۲hP�2L;^n�ӽ�/	�\\Ma�8/�d�ŵ
=A֑�<��4'h�G*Q�-�Kՙ]��^�����. SLavQ˔�o��O�\K��r��O�L�� $jdҷ��l=	p���n7�?%��q!5�Fu�-4��z���%yVڎ�#��tB3)�q��
��`RVD]��U��a����)���s�	�5pr�>SU2Vټ�W8y�P�?Q��5�"%��0�߲&�Vݼ�MH�0��`"�o��;����
�{��~t���݆=�0���R��d���e�FU6?a�����;��
��9�W��K�����Ӯn2�WE��^al�q׽B'J�t�+��0�����=�aj�����_����p#b�;��
�pc�n�ؐSY�6yl�,P�b�TpV�����y��de_Y^��L�d
+kA�!i�i�Q`=ǤfĘ_�)GJ��k��?sH��W�U�]M�}z�٤���d�7�ǧ�'�թ{���S�����E�pY�)xQ�
W��4l�����:���>#�?�PL+����*a��꜃*�^��a�gF�i�7�u��z�����mC�|t෶o�g����(^�Í;X��-7Gc܁���s��������A���h�����{�]O�]���|p��E����w��(�f�y�����|��\֟��O������'U��X,�$@m��%!c�0X��A�ƌ@7��
�ط,��{�a��xC�#�Rd{���	/��(�+�ߙ�O�?v�;�acl���],{��6S�!5#bl N�i�R)1*$���PA#�*
+��D�j�CC��
+T���@B��
+�A�5-M��=���_G����ͽ3�Μ�C�M��u���$�+���'l���j{���@7�nc�,�5�=�x	8m��<�ll���'u�O5K�k���	�L��p�,���o��\*��2�R�e҈��<Hw�h�R��P�����kE��8AFB�T8~��Q�.��|�v+i��6�7�[�j5
9���J �T�VR������`�A�;G/!�<ˉ�g�5�V	�*�ߵ�.wf��M2�t�|�x�~`�v�6�Aj6��v��@���C�� ��q����.���B�,�H}�
Aha���L@d��z����Wg�@�E�^!�"�<���)	|%���	2�|�d �AH5`P�K�&G��ArFă��ɑ�x��n�������H:���1�^oG���Ί��bW�g�����JύM���^ДmI/J�/MN�vVe�e�|��ᙬ��I�s5�jN5��s-5*i�����2sYՈgr���%D�c�\��s+t�)�kP���diV�&dOO�x"�ki�#)��:>DM|��{$Fغ}	��0Fԉ��Q�,%����5���;V����GGPM	@����)��:7̉,h0�����\��Vu�ߜ!�DU���qJk�N�܊(��J'�#g�w>�B���I��1���d��Q'7>�(�e���gshwr(�s3tf��iU�D�h��G��K;�-52{����|BG�lvSp8�B7���R����	@���d�&'8(��t�4��{�X���G����밂cx����'�.{��4p���˫8:蘃����٘�ĕv��Np��^�.�p͒%kF"�}�{�1��y
M�@ ��ʻFֶ���2�g�K�5/�_�N/�_ؼni���Ω��/Ȯ����]��L�]q��${I��&u�S��܅&��S�Tن�k$x�Ɨh�CG��JtA�	cb�o�5��
�*QOr�$O$U�4Q���}K�j������J�R��}bx\=��J��A]>:Ԣ�:r���Wn�G�ƽ�����c���s�݀����!��fr��0���9���c�L}�LW��Ag��O�U���������}���WP�䃔���P�j�c�K��
|�F���EH�,_�r��h�2��S|�
��>�G��xЋ{� ��7D���l����t���PR�����Ш��7EDӝi��d�n:J^�6�K)�{��L�Г���+��?]i���yu�x0�ԥ󉀝쩪�W6=�Ⲗ�����߭��5���]�����XW�x-�r���U�C_�!�����yR�\&F�J�d�0;��
�o�U�tH+�����i�LW�!���wvig�&\ڙ����Da@��ɾ��;�5i�r�(��r��1�q��Y�� ��b!i�AwN:�*�Ec�Lc�R��輚*�˩�/4N�R�1���e��R��w(�yy)���#|�$�H��.�Qgi8�[�Qr��Fu��tx���Jw���~�$Lt*���r��w�R�UJ!9�պ�'�J��/��K��.^¥�p�%ܰ�I������I�Zj�j��U�Ze�E�5�%Ԓ%�jK8*���X1�КVl�'�Z�)#�EոN�t��`)�n���Q����Z��-t�;���9�9���W��DH��;�:�
<qR/Gi9��ǰ|H�
�MGb�E�n����I��<�|��2�̫�t\��~�'=����V����m��v������Q;�BN��G��*�;D��¾w!x(�$Nʼn�*�.������$I��	,m�^����2��C��\�]V>�ʙˀ�12k��Ǻ<�f��=�R��S��7.R���a�l��?��o\��~��Ɇ"����q��CP��Q��ͯ��؊®3U��L`� ����j�f� K��_���v�
+�Va	�U-��A��^��+���A4��JlS�ik?m�ނf��ݿe�FŹ,d7����
+�Kp�%3�cm��}��<������r����t9���9�<��%:ۜh:�OG��_}a��������k�S�߹c%$��Q}�&���D���|�#'Wz��3�C}���\��tTO���FK~���@V��5f�G��G�;��<�S��O/c���g�3��1�^y����-�>_L���6��U6��6^�^\,a�!B<��Z;DH��.���NW6�[���'8%��p�A�?"���w\[��U�q
+�f���z¾��.̜��	�8��;xE��=�.�����4x���Qqg��.���&j�N�%�]@��a
^z����Ű���J��D��Q���?���1|�+n���J�K�"��n�lx����Ӂ��B&�
+U����vrr����ޘ�(���gn�U�I��g�W��f}�9O9�~j���q�r�vD�O���݀#���S�vD��å,��ؖmlˣ��(q���W�H_�q�t_p�û8������OtO�t�c��4��0'66l.	m�jOE]��m,RP�VL�c����o'WM�~��:�e�����G���Z��_*�<����~ǿ���VN
O������S��ɿ��<�:L����>��~j.'�m��uB
Y�T��k?R-v3Y���C�_*��ME�MTN�>���O�����XA#O#Kc.�&i�������{`XWU�iPu֧S�c��b㨮��]�?)��B �� P��vc[�v#�خ����6*R:;sww���jf֋%*��xk����Ԩ/-�"���*���B��@��
+�U��wν��������w���;?׳}4	*��tf�&�6�݅�.����+�m�l�ߦ���2��X{���d.6��^�~�g�����͋�x���w��A�w��l(�#�a�1�!w
��C�!�o�c����.���ڞ}��{�ϼH�]���x;���A���
��Uz6�%�yq�H��g��v5�+��q�	����m��Q��d��W����)Ѓ���{��f��$�(H���ɵ�~Ⱦ��@������H7I->}��&��-�/��:tl���B���_��t�k�B��/���Qb��"z4{=��g.K�L��_����+7�I��������Z�t���}B�7t����n��Cֻ=gp�� ���5�!g�s�s�Q�K���7��:�_� ��!Χ=#��)����7VE��C�m9_�Og,��s�?j~�޾��m^�!�@9P�������Op�����W�������w��tv�>�Y�-��<d��������i&w��ЛÙ&=�]��l�vI�^o��ݽٽT�>�{����$�̳�r�^��e��f��g�����ąG$l���e�%⃎� ��n�z> ���Q���ƈ��x'���=��r��d���=�9=Z�H{^%z���&��F*�wh��ki��M�����/��
+x�����D?:���
+��1."��#��u���3��{�N[K�_iȕ�-ڢ-ڢoe�K���w�)�X�G�v� �?g�4�3�o����{�ڍ��{�����Y�����Z������,����/-��;X��3�����{��a��L~��cN����ҊV�_�S'����Ӟ�:n�����9�@���<�9��JQ�jq��]?,����.9�Nx~�g~�ܨ���J���j�PU��9?�R�QG���a�h4�ʻQ5����	|Ht����Z�@i�҆k6��a���t��Y��	9�e�IU�`���V���K>s���A��ED��N��tc���
+�#���HT#��T�ʩq�N��h^M��F��'�LJq�(�uT�"8rb9Iu5QG����N��q��M���!��`I���p�����fó��:.�X��u�t�I�n�:|dN�A=�4
+��9ȴG�r�F��4�}��)�C�"���z�'��~&���:�!f?DF�W��8ض1���i��#ތF2S�&��j7��ɓ3G����	��u=�8U�M����
�.#�bJIQD%\S��z�S�b=�s�Br!��0�bͅ��y���`š"vh�Y���-�k�Q�VyE5\Yc�X����"����Ď��-�ٖ�̳_5���w:��ˠ*�?
+n_{t�ܜX� k�(:���4K����[�;Ry�U*:,�Q]V���O�"/�T���d:�nG$[��6�d*y7�����Z5�=FRa�U��L�W�"I�K�֜'���+�(<� c��@����jC��b�݉D�sb����\,��t�\k�ӑ<~U����
�6U�O4����'P�?*���d�4$�IU|;j{;��H�L۵kU���*ߤʳ���u��Mu�v�<�ɵ?ݎ����Ŀv��|W��iٓt��P<;"1r��buH�bߜ�@��R3S+�M,��Hn��:�T��R�uC9��o;~�b����K$��=麳��[d���L���UCV:�njԇ��R��xb�e�ܑ����iz�ct���[��$<9JS�8MC�t�<�:hN$�βJ��Hķ����o��Jo�̛z�M<�6��E��
+��s�ʬ�5[��`����;��<m�'׌{Q�)��dL硢{�<^䟒qkCAw�ft�
���_��ͬ���[�IW��/X�b�iq���B��x�vP&�y���,�f��+?�8��Y<dq��)�/X�ҽ^�����,^���ƟA�Ǡ�o���mp������`G_�������?�����
+endstream
+endobj
+55 0 obj
+<</ColorSpace 46 0 R/Intent/RelativeColorimetric/Name/X/Height 333/Subtype/Image/Filter/DCTDecode/Metadata 58 0 R/Type/XObject/Width 378/BitsPerComponent 8/Length 15746>>stream
+�����Adobe�d���������
+
+
+
+
+
+
+		
+
+
+
+���Mz"����������������	
+���������	
+�s�!1AQa"q�2���B#�R��3b�$r��%C4S���cs�5D'���6Tdt���&�	
+��EF��V�U(�������eu��������fv��������7GWgw��������8HXhx��������)9IYiy��������*:JZjz���������m�!1AQa"q��2������#BRbr�3$4C��S%�c��s�5�D�T�	
+&6E'dtU7��()��󄔤�����eu��������FVfv��������GWgw��������8HXhx��������9IYiy��������*:JZjz�������������?��NlٱWf͛vlٱWf͛vlٱWf͛vlٱWf͛vlٱWf͛vlٱWf͛vlٱWf͛vl�N�4�0UPK14�
�'I|���#ɚSj���S����O�����
�Ϗ:j�\��p~�Q"H���)�V$������~vo8y�{�$&���6Jv����s��;dI79l ��ɐ�@��e7?��~����կ�HS�!�	����˖->��9;�WS�xI$�w��I�ag�6o7y�
WW��.e��������x?���$�J]���5�rI@�X_y�$#sS�wșQ�����_�6�=X��z�7,��4�C����+����#Ҽ��o�׌SP"O�b���#�@��_�5��Zh�/�����
�5)�=x��Ź��$���l��y�~�f��C���py͓������S��}��U��2�An9	DǛdd$?C�f́.͛6*�ٳb�͛6*�ٳb�͛6*�ٳb�͛6*�ٳb�͛6*�ٳb�͛6*�ٳb�͛6*�ٳb�͛6*�ٳb�͛6*��_��{��+X1��e�phc����A*�M�uM*��S�q��O4�{*
+�՞9������r�f���J�$ch����8پ漲�9��Yz�Yeڟ~[����.q�I&�jt�4�m�p���R�8G�~��,�]}�zK�wc�
+�$����OS���i��s#��վ^�o�Ė�6��v�Tldc����9�e�ț��#sɜ*F����Aei�oN�=;�”��`�߁�������C��3�������kf&��5��u�m���R�pN���f<�A�����Q��q��z/�]���E�o�lӹ�}����Zv�2�I��	��A���!���iwmm國4(x�CUe=�sɟ�ߜ����WL�]i��j�^�zn��]\Ҽ�a��\%��C)�O^,:���&>���Kn��ٳdY;6lث�f͊�6lث�f͊�6lث�f͊�6lث�f͊�6lث�f͊�6lث�f͊�6lث�f͊�6lث�f�H��hS��{�r'��O����lԛP"{�uF����g ����9������r*���ΣSӥ�K9m�%��QT8B��T�&mB��{A�����5�����П2��Q�I���<p̡��I5+�Z\Y�Q��	<��/=���/�S���0�D�{��J:��±G����tsA�jB�ֱ�uo�6��ПՒ�+�n�ZW��+��n/owx�T�#���C�2���#a�ص+뵞C�ܐ;
���v���[D��� ��>X^.,�ks#��!�o�P�r	^�S�����7w5f5&�|r7�$��݆<0 f�W��|��t�*��
+��Caݢ*Y��b����ݽ�4�S��g������?�m�ב��2@G���>�Hˈ�~T��JF����8�'����Z�w���ޙzu�{���)�ODyr�Tv98����ռ�����,��<%N�7�x7��j�������_�G�Ǧ�#q��ϔ�ѧy�C�\�	KUdo���#|�� �W���Λg��w\�&���f�D�#Z��O2�Vi�]��ٳ`K�f͊�6lث�f͊�6lث�f͊�6lث�f͊�6lث�f͊�6lث�f͊�6lث�f͊�6lą��
�=�Wd3ξ`'��_�}'e���+���<��sG��ڋѤO��9���4jĵsQ�}�~�E)�{�>����R�*��VZuTS���y�Q3|K�{gQ�����K�� ����Ȋe�#��r	'�C���?�eQ tE�������	��?��9T�I���`��2dV�<V�[�
�V�b~���*)�c�v¶�r&^Cb1#3*�]�D�j�O��
+�<w�_bi��%�qw�'#��D*<	5�Ǔm>����J��P~'%W�y)�
@~[e��b��|�9�^��ϼ�J�ҿ�ڦ�~)d������4���}Dcӗ/�o��^6�:|���������:䏑��|���>zv��yxٕ1�א��Ms��3���K����bt��YT���0�����S��E�24
��3f͕�vlٱWf͛vlٱWf͛vlٱWf͛vlٱWf͛vlٱWf͛vlٱWf͛vlٱWfͅzλ���%�	�}�""�c#@YG]�[X�g�p�:x��C��2K}X��p��/v��[�^_\�����]��x��;���l��v���itQJ{��B�_��w�G��D�^�%w�
N��|�[J��S\��##N�0�xo�����J�4��($���:���f��L�K)�Z��${W7X�a��7�'Y˾Ed�-���1j{t���p7��
n#�)��W��qZz����&���!W�J^�&���[vm�\�k?�>k�y��[J��I㸈��c�S�b����Xh6耞��YL1Գn ��а�EA�L�����M-+��$���kuyvª�_�F��>g���ؓ���ŴB�J�u4$ҿ!���6?2ܞ�x��O̵b�e�?1��r�́�(�����8 A��}`K��W�Oʼ��G���T�G�h�0_�����3�~F�w\�3P~��>�r@�*�����0{���_�7�Zy��G�5�����7�d�����/���!y�k� �9Ϣ��x�m���H�'��̫���>��U��QӦK�k��j���/�?�����w+��R4�EÀ���w;z����={��Cͳ��J��*� �;����͛6�6lث�f͊�6lث�f͊�6lث�f͊�6lث�f͊�6lث�f͊�6lث�f½wWm��	�QD�;��	�YHF�2���QT�`n�7^��JIݥ�����;�rىb��Ēǩ'�&�>��˗��9�|<��J�ȵ��ҸyDjKc���<������FHɊC����y����u_=F��o�:�pҮ����S��EZ�;�d/�34�=�S�=��R�ߟӈ�s�g�Ҏ�z�?��_�iZ -A���Uk�D_��
+�b��Q~/���o"$yyY�N����›Wo׃,�ʐĵy*��A����Cj1�VXHҠ���yb(��ɥ\j��W��1�U-��'! l���
w>�����m|�{
+\ZCg����a:�����������j9�������Ιmyi{��x��x�]OҤ��smpXA*HTт0j���Dun1�x����7�]H��/b;��H���M�����&��Jǰ�Џ�LVw�+�7�4�*h��Z��"�O�
,��cAݛ�za�47%0���_~a�r��WW^\3%��\$<���+QAޢ���(Z��/3�9��h���a�d��Z4�i�&󖲜�Y帅OORBy0�V���O������3��"$��7�����(���?\�(���?��ʚ����W����4�����I8�X�dz�����䙅
p��uEf,(+���e`����i��p��
+��R���>}n���̐by� �����]+O�����V���B�D���c�{򷛎��V�bA0�G&2��8Ҵ=������=$�����\Ύ�1�|Yz�u���9F��>1�a@�/��������^MGJY$��I0IJ�a�������C���r��[s���?�,��������lO�1��s�j:��1tY����_J�@��G���Ta_����?>�'X���d�e��-��l��K�ׯ��s#Xe����3i���Ò&'��@�g����Κ|~[��Q���=I��zcQ��P7���b�E1óf͊]�6lUٳf�]�6lUٳf�]�6lUٳf�]�6lUٳf�]�6lUٳ`=OR�M����hӹ?�@T4�5H4�y7�+��ܟl���5��=�U��N>��k�Zy���S��=�$����36kۣ�����J��ۯ��k�S��_����;��7,c"HȨޙ�)�x�+��D�S־��9!�}����I �=�3��*�����lӑ�������ن���ͻ��0s�X�_N\P�P�W%�$�n��cJ:}�r��d�ץGZ`�jPt�Fy3�GN�E��m���������H*���7���PO�۩ �u����o�e�v�T��n4��q�0A�5�8�<��� ���x�H�:��㐫C30
�y9�? �4�؞�Ho#�#�e�@��8#1�\�j�7���5�"��9
I��>��_�2�xȃ�׍<��W\Ɩ�b4c���6?~
+�"HЅ�7��j�̐M<V��	y�$v�\��N�9"AVd�3A�,��ˣ�1�M��9�'�j�k״;v���o�0{���H������S[ռ׬�iڕ����ӪG���WD;��٨��r�T���\����*EG\1������w��%��%�I�vdO��*�ja��Dʹ���ߡ�e�HG9���"v/�m����H��P��P:�1�*��bW|��咈[4����`�"��ocX��9�n��Y�bj0G߇,./&���p'
m|��"w���X�*+�lđ3����Sǀ�/Q����ZyLRJ�Y�nOZ�/�GaQ�rntm.%(������Y�m��.������R�`G�rS�D�� P�g�l�>IHB�p�x�I��|��W� �1�<��?�IW������~��]s�-�
,����r|3��M�z{

��Ax��t1��U���&�*e�όHl7_��
.N*��c������y����&��հ���6�̎������Ű����Z�l�}�X�q��l�zT}����������W�aT׬�L�s���ſ��/0~Y�`럗e[�	V��W?ْ:�H�6Py}�m��l+�Ϙ��5�v������0�7FF�S�dثvlٱWf͛vlٱWf͛vlٱWf͛vlٱWf͛vl�O�y��[�,�c�§���`2S��+Tՠ�"��R��#O���}�����X����br�-ĭ4��۩8i
�>����6����{ț��..
+�����M���M0aJ�f��;xDB�p�;��<�NW���q	f����!�[�/�U�1��"�p��oT��ơ!j���S��X�&M�<p�r�j"�v���ʐĥ�BnK@3��?�O�/�·w����?�W����Z~\���k��Rꜭ��u^����x�>�'6:m8��ߠ�tz�q��1J�6$up@U`lٳ%�vS�H�C)؂*^lU#��'���{��3��kXy�q������>�����`Q���&͆�yE�
��H�Wj�k�܊����z#��ё-C����~���]��A��U�0���@Ej֝�d/�-Oլ$��$�~d?VG,��[��� uy��p5ܼ@P:�)���T�l�������?�0d!���T,�M�8Z�$aY\U�϶"�뉼�$�[�,k�p�Pe)܊������:�~�KT�:��;���JZ��r/?ٛ1��ٚ$;����2�8��Ndz8Nb%������������V��&�`�Iޔ�����x���T"F�QTP���R<�M�%Ug�7gV���&���5��=FLډB[G0�h���������Q�>��<`��!ch�?���~���<�&%yim�Z�cy������+
+FN2�.)�;�l��Ú�t���3�����T�o�=a�C���y�g��Y�x��̒]6�zW�]��>U
+A��M�\t�����z}K����rW�4j��6l�7f͛vlٱWf͛vlٱWf͛vlٱWc&�x�ӸD^�q
CR��a���� ��!Z��s�K�f%A�c_��	���b�r.��U�D� �b(���;A���;�P�����'Uo���ɐ˙s���B��QC��d�����%�0�8�J�f.I9ذ���]B���[�6���;|=;��BA�)!Ȁފ���\p�NU��Os-���<|04��ր�m�}f��jw�:g�w�~(�ϬG�ⳅ�S�V���?��G]Zo��m"j[��i]z���%O�s���E��2O�?K��Mu���ym#��6l�uN͛6*�ٳb�͛4��B���8�����;�Ue����k%��(b^N�������{�MMca�Y!>����ǻ����i��u�{b�c	���������i’��)�A/H��i�T�ϧ��ShEA�Op0�ih
+���-҆�6�p��1���]�6����Y���<��cQ��s�edە�aH�\�uȿ��om���j��Zxm��%h|1y���琉���|i`����?#�/�u?��Q_�'���� ���oo�\�Q-�jx�@�L�f��#�<��\Z�Ҫ♗�۳f͒qݛ6lUٳf�]�6lUٳf�]�6lUٳf�]�6lU�Q�-���Jy;}��S�����C���2r6^�������V�v.�jI�d�#��ُ��5���q8
+������v���"�f4�{���8�OR)���Ll��`w/]���nL1��vҾ�SqdA��ϵz`�v5�2\�q1�Iq4/��/q�71ʀ��ڙWTf����ۆ1��{��D��U_5[�����?#���7���M�ǭ?�Q<Z�vG�p�q�Q�g�<����V�/�_^aߜ��h>���`fL��[��z�i��Ԧk�ԧVv���YY�"�	�F-�6m^|�ܻ6lث�f͊�6lث�����S�����E��x�������Fy��q�GM���7"�N����6���9�����������y!��j�����g��_R2�8�<�{�'��!B���12w4X�zw�?�H���wo��q��mc��!�v�����$��I�1�3Nh(q�*��R���*�x\�s����
�L�Ju�����b����\�!e��,���Ů�u��R>D7�Ν�{�7W��c@�c����v��J���t}��}�;6lِ�6lث�f͊�6lث�f͊�6lث�f��na����`��I�CT$($��'#���ܓ�x������W��v��Ϩ���:��L+2��ʲe��n<Vl�<��XԓRO|L��&�Wq\L���1�'/_��ҹ�*q"VKtcާ,�Ҕ�b%��׏�c�6� �}�ʤ\���1
\08��Kq�pĞ�L����]�Z��*�*��p{�˧�������|�m!5�����k�����-<èe��~�bz9/����Z����-��k��Dd�A�~��Y��F##������-A��|z�6lِ�;6lث�f�n�-�m����F�I�C�J�Y��������!�d��Z+A���63����A��8M�_7�j�m`�V��ݽ�����$t;��&}]zq�ݎ�����s�֡<ڔ�qq#I#��RN����-����q9�_H��1]�>�c��f8���� 8~I�JUi\��)O�r]�e��FZ��.$�Ƀ�T��P�B���r;�|$8ꦣ�9$�"�=||r5�0�GLdYE6��K�������`aר!���/Sb)ߏo�e��e8gɖ#f�d�I8�M�A���\	$�8X�';�s5Ӑ7�4���p�)�)��Ms�++�u5��	�4#��y�c�����{��P�s��U�P�u˴��$Hgg[��S!(H
+m�����gC�gy#��|>�&��S���DԆ��Zj��~�
+I�t��͆��.7�;u��]4D�F$�#��6l��vlٱWf͛vlٱWf͘����'�*�i������"���!޶���S�'���}�o0k�&�ac�x����?��@����r����2�ū����:��A��je��t��4#��C]�4��k�Ɇ���D�o�E*��lƕ�{c���w���ӷl��Hy&��қS2�{tʓ���=:�R�|@S�J1��05=2�_���9�������t×p��Z1���»��Q�"�k��r�ڍ��I���S\��74W�9�i��+�|�����?+���y꒷)�[�55>�n��}����'��:���H�P>��Ř�͛3�C�f�oo ��{���*}�`>x�P/`����M�k����wc��sm]���3K�ĻG;(����r�W��e4Q�F�0��1�a�3�zG/��h�|9�_r@]��L��/�
+H�i��P���"�n��t�$�+����]<�p(�hv�{��4;���֥(m�'�B<=XJ\N��3]z�x�c���%6�6�L'ЬR=�(&V.�M0�Ā�^�FK��5�^IOׄ��_^�	���S�u�+騤m�3@@�sK��}?�bW�(��'���ї�W(�9d�5䦸
+�X�|�qD��9u*�8u����%t(�?v����z|w�P�,���O�&�V4
+:�c�z7��Q��A�LP*(5�_��v#-�����C�hB��g�ܑY�B���`����w�*A�_,i6�Y@��Z���f��dK�k��|�Ѷ�z��6��T�������;�.mδ�lC��$~`*�͛2�+�f͊�6lث�f͊�#^c�CrӭڋZL�?d4���Z����~�-�d-��$Ԝ��P���0P>��O pC����EI�cI�����1�����QCQ��Q�Tc��@�
w�1�1�������_�T��4��|���=q�;������6���08z!�z�9%�7=qY߱��\�yW�ʥͶ
+s�C�f��b��Z���_c� K~45ļ�l�����b�=\e�����"͹�2m�[�>��$�SX/�������^B�+߶>�-�d���B��ר��V�ώP<�)�.l�7�6��i"��Ko�"Y�o�O�j?�:��1ۅ�O�+�������a��V{�����FS�(w�����--�y��*�����O9]�C�nR�eUnU'��A�aSܼ�]��cS]�r��m��s6�s�C��i�6���;
V���*�O��ާ�Kc+_�{k$#����:^��a��Ў���9��;4��qG�ܞ��7�|B{�+]�;\�p��P��|��i���\B��M2��Y"3N9H´�=�U��$\�>#�G�x��^L��	�l��d��œpW�����=F(�3+
+�~��1�>!��rxM����P�@�^�a���de�+�
�����ɸ��Mh=�U� ��8OTP>��o"�.HŪ��p%Ԡ�=0d��
+o�?�1<�nV�6�,���<=���zdC~2J�ݩ�a��1BNK�6s�so-F�Iw�3R�z��du�eyC|M(4���f�m���������\���|�r��j�w���Ǿ�2)@�Y���
+����|�Ya���mٳḟ^�ٳb�͛6*�d��m�3qD'�_0��o��u܎��قF��4�jZ����\E�{(�9�4��8���kQF���S)���O`y])֤cݩR�~�A�V�*q�JNF0�ԡ��,����N+�0�c��*2o�T�&��z��M1gM���z��U&�"ĒYk��^b����j��}8��u��-�	�B\HJ���l�@y}*g�j�.�r�sZ�m��Q�aJ��r),��/�R�"I-��Y*H��kS��&�
+ᕲUA�m�ȓM��W�3�p-�H�/P�K3l0xɌ�m_՗?����� �^J�Xy>���b���3o�^T	>��
G���F�\l�4��۹�5�}�`}��E )�f%Ӛ�ޣ"z�R�m�Z��ć�ɔ��Wۧ|,�t�h�(#l�$�X�
t<�ȵ�n�䍱/G�7R}jM�}��rikp�d�76�G\�ZzGM����3�#�\��*�`�6�ݲ�|���@�����6�0F6X�B#d,Ч����ڌ+*�S�
��7��G�o!��g<SrOA�
+
D����d�@�2���֟<����0�M�<d�]�6��6Í9��hSo�A "���C�"�@�É�q�z�����"�hNJ|Grƿ�
\5G���%kш����LHm�4<�������>��&��y6۶U�M��L���O1����/1Ćv��Yjy�On��Y��$3�fV�|r��n���?��.͛6e8N͛6*�ٲ7���V:{r&�$à�_�RR�
+���
+�V��S���A�|��|[e@;��9J��<�9�$�{bR������$��P)^�Y�ۣ��\�r=�O�i����6����Nd�r1��n]-�{P�|q6�P�q�[+lF%�$���YstaC"��HT�Ƅ��(�r1���04����
���`i�
+�F�Ş䍟o|q"���x�x怑��+����]\*�W	��
��em�ơq $��q���>���t�
+�i��{�끭B
+W��0D����&�9�BE���}Z�з���G̍��K�5O��0�;9�}F�Dq,��)\̇yd�U�6�[�N��$��A�+��B��ژecl�z.�m\���)�ᔌ
+�`I�*�:�a�cՙ�[a�2���(��\������N$Wl�ʋZo�[��ᨮN�[����^��/[]iT�uK��A����
z��au����@��#�e�˦�g&���?���c�
��>0oil�)���2�゚��&� �~G���F����dG�7j��Yr�\��bz�eE�zq�B�z�7��Ux��w���<+K���4��8�ņ�c�`��9�����ū�z����@���h)_c���.h��:�y�ZTS	u������L$�� ����LV����w}Ϸ�Y?$D�i��Z��-9�$���H�Z�k�-�����㏤��]iL0;z��c�v�$$w�A$`�P1��ވU�����1��O�t�'��T�wl�?��l���c�a�52��jQ>�G;6f�bc��Ժ�Ґ9�#��f͙.���"F�F
+�*�v�c.na���." �'!ƹq�JPU SU����FR	�L�}̒�3Zْ�ա=O��~@-zbN���W
���<��Ɇ��I��cQ�3�y@�}�Hhy��������J��h7��)���c��sm�eg�tM
+[��o����B��}�bF�9Ch��_!��WRi�3>����HI���e�!縐�o�74��:��k��%��Q�ڕ�y^۷��k��Ԟ$a-椨IW�a���ri}�oׁf��2����r�#(���֮�wq���F;�O*Z
�|���V�5Q)�=����'6��7
+*���o��]��l��7��8mc�Q�x�$wI�K���㠧Vۮ�F��V�ɦ�F�~ć�t>غ�*�~#��Im�[Y
+�k�e�J>��!2J��1��`
�����X�[[�h�Sl[��5�]s$�帡����"k�'*�'|l?�F�|����-��я�HL�
S���$�HEޝ�5����ݍN�Q�ۜ�8��ꌹMs����#�˂����QE�;���K'A�{�0�.<�)䇛M��E!���/�T��!9�HO4q��}90�[��N��[��6es��E2�h���=q���g�NL4�8��s��Qӯ�
]&|���Γ��%�d�Pߗ���B�r�D\y�Ga۶,�{���1G�i��q�.�*(-��"��(N�������:a=��GZ=r[��dڥ�$�ҹ����=��(������MH�(�G!�������+��U�q�$&�}>���C�f�C}�˾�����ɶ�pu�
+Ҹ#�ZSkZݭ�>p�������2�D���DˠI�����tAq0���ea��O�}ƹ*�UUP�(���y��D""9G9��2<�vlٰ�`:��s�H��؈n��8�:�Ǩ
+1�Ƶ�LŜ��Ō�E8c�65�I%v��ۓef��_(���,��� ���x��4p��=�6�5��F;��T'c�ɔT� ۞�����7L�)�+�7
�}~�# U�,�*�e]�\��*��}FW�AL,c���y�D� �Z�+���-I��q��ͶF�������[F�������O&�4ȓl����m:��F�ؚ,�!?,8)ê�N�d'��V�3�KVeR:�<qf��w�I��9�8��c�Q�'в�T$����>	V?�
Cƀ!;�/Kf�٣bKm�#�v߈�N@F`��!��䴦������1�9�E6�›�$2����}�"\�B@�[�,1�������$}�?/��:|&`���a�V�Tm��$�&��j��B�����L[�ľ�i\^�698�h��{�� ]��p��ݷnKؑ�
$�S�gS�#�"�Qe�K�Khu;f�U��G]�>�ͧ\�2BJ�|;`�VY:w\��rO��p�u��ž�JP�!�&'���^��ҵ��Ħ�E�O�h��[��L�c�c�
Ŵh�*|O�f��N1�8�K�3J���8����ͺ{a�AR)�^#�[t}��p	Jr�MF#nh�'{d�����a�`�za}���C�*V��� m{�l��'�f�($+PaE��*+�d�����SU�t��?/+֮ғ]
+C^�/�G��O����fթ!���v���glDX�Q�UP���fn�
3�u���|8�[͛6e�N͛6*�@��I�1�'��yd�j�f�s����
+��|k�=1�x��~���h:�D�sn�D)���Q��+�ƅ;7X/��,���k��Pr�:cKh3r�l�[lIڛ�f�ӿl�O�s�S#�h7�p�J�]��q9�-����&bą����"͕)��3Ӯ�-�
�\k�.B��\�Y�S���F�
+��Y�j��%�q��G�S�����Wo��yK�����iL�1�i���c+(Af��Q�@:Lo�&P-m��6j��bM6ƅ b�P"�6\�j��`�M�"�8xTH�M��P�Y�d"�x��K#!(��U��o
���cVV����D��2%��5��C�9j�J�Ml�֠c��LO��0���ދe�12V���4�ej��'=�@Z�Hj�8���L��FS�#}�"�y�t�AJ���t��q#^��1�����'oz���\�*;*Eh��t�L\��|UW~��u����㌄��WowfY+M�r�_�OqBFK�R$�[j���h{��g���ӈK W.���=���RLG5�wAT��==��]��4�\3�>�u�1�����՚�8&��Lqc��6O���'D�D���|i<�Ni�f����6��ٳj����H�O2�ٳaC�f͊��4$�8��R2��4��%e��Pi�!閭ɇ�$�\|
+K~9���D��)�*C^�j���ˣȵaԦ1�Z���sN�(�*98�eb�,��t2�QZWa�b#P0/���SuC���#{�rE[�~�I@�X�$^u9.Z��@�V>[���ePe���)�0ݔNʁ��B+ᔲr�2�A�m�Ƃ��q�x���Em7�
+I+���yPo���m��A5ʼnTp��o��)�\g!Z��Q�\|D�(��#z��倭N  �ScV�p
Zf-V�HE��%If�h�D����1P�r�#M���m(V��%�S��h�J<3��SM���	����m���oQ��fi�`'���� h-���d�N�(�:�
+�ฯ��X4��#ʻ�".�W@v8ca��P�]t	�� eH׏p2xEט��#�)�����J��I���`V���4�]Q�N�i\/��Z�
�ꩭp����s�6J,d��*��;w���,�#�3�*��'��d�C���n��X�U�b�+���`z��}4�1��C�a�SQ�5�u*���x����;�����U�ynKyt�@�@7�+�w�yK��D�I"Ʒ7�|wN7��
+I��Q�x4���~�_�՜ޘ��ٳf��gf͛vlٱW�1��ĞE����@���.�ٯ�v����S� A��@����C]�&fF t8ő��HKrS�c��u��PM�A=�L���\���,�!�ܘ
+٧]��.��r�{�/B*p��V��VM��أR�:c�MN��<�%����¥HƔ��ľ�CМn�c�垿�˹�cޜ�d��r�z�\%[�iC�.��F�23�cꃮ�u�q%�2l2%)�����։��RvǤ�Z��Đf䍱D�,v��!fۦ���p��� ��B��',���7�%����$d3o���H�������̧�c~�"A��O�;�cQ|�N�k\��(K�NR��P�#ɖ2b�6�u=�<Src\,k�d�폶��Z�p�Fb��k5f_�iH��Q��H��9o{N��팦�iyD��o�kƻ%�SP���f�Kl;�጖����ͩW��]>�Mhq-/G��d�Y�8&�����(��k�~J�3��^�H���9�ˑ���/��R��q�j���s`�I#pJ��0��f���՘���9(aY_�S�s�h^L��@m6�}n������E0�2q�c��<���h���%�<��yr5u����Ā<����F�E0�6l��61$�;�6lث�f͊�6lث�f͊��ՙjq���n}��+M�[d��UF4�EMq��|����n�Jާ	�r�Q� ��щ��w�ȓ͜@�W
��v30�7������t߮V{���U��:�d�\����oN���+��&�{!9��Ѻe���
C��N�ڙi�p���":3�6^�4���&�7# �w˹���˃��9(�R5ȭ����L]%�q(��)LN^\�����O"�g�h�qʤLJ:�^���Ƕ �qHrի�.�q8�q�׾g�l"�{��4�qf�(��e巎T��-���=�_֢��hܤ�q�)�������|��̠PX-���"����r錟��<vȞi�9Z���ūצ]ܹڽq��b�o���
r`Gɉ����x���*.�$����7��n|�/�%C� �}����d��S����]��k��^�̫_����	�:���[�� w�Tpk�T%���Ug@k�W,�8K���5e�8�|�b>^���W�BLa6V�w�⩷���?u3��w��z3-��-Jq���D�b��98͙X�?I�>�p��<�� =�,�m�X-�X�AEDP���ٲևf͛vlٱWf͛vlٱWf͛vlٱW��
+endstream
+endobj
+58 0 obj
+<</Subtype/XML/Length 9871/Type/Metadata>>stream
+<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 5.2-c003 61.141987, 2011/02/22-12:03:51        ">
+ <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
+  <rdf:Description rdf:about=""
+    xmlns:xmp="http://ns.adobe.com/xap/1.0/"
+    xmlns:dc="http://purl.org/dc/elements/1.1/"
+    xmlns:aux="http://ns.adobe.com/exif/1.0/aux/"
+    xmlns:crs="http://ns.adobe.com/camera-raw-settings/1.0/"
+    xmlns:photoshop="http://ns.adobe.com/photoshop/1.0/"
+    xmlns:xmpMM="http://ns.adobe.com/xap/1.0/mm/"
+    xmlns:stEvt="http://ns.adobe.com/xap/1.0/sType/ResourceEvent#"
+    xmlns:stRef="http://ns.adobe.com/xap/1.0/sType/ResourceRef#"
+    xmlns:tiff="http://ns.adobe.com/tiff/1.0/"
+    xmlns:exif="http://ns.adobe.com/exif/1.0/"
+   xmp:ModifyDate="2013-06-18T11:29:56+02:00"
+   xmp:CreateDate="2012-05-11T13:11:56+02:00"
+   xmp:CreatorTool="Adobe Photoshop CS5.1 Windows"
+   xmp:Rating="0"
+   xmp:MetadataDate="2013-06-18T11:29:56+02:00"
+   dc:format="application/vnd.adobe.photoshop"
+   aux:SerialNumber="G28503826"
+   aux:LensInfo="14/1 42/1 35/10 56/10"
+   aux:Lens="OLYMPUS 14-42mm Lens"
+   aux:LensSerialNumber="212481007"
+   aux:FlashCompensation="0/1"
+   crs:RawFileName="P5112849.ORF"
+   crs:Version="6.0"
+   crs:ProcessVersion="5.7"
+   crs:WhiteBalance="Custom"
+   crs:Temperature="4300"
+   crs:Tint="-2"
+   crs:Exposure="+1.85"
+   crs:Shadows="2"
+   crs:Brightness="0"
+   crs:Contrast="+38"
+   crs:Saturation="0"
+   crs:Sharpness="25"
+   crs:LuminanceSmoothing="0"
+   crs:ColorNoiseReduction="25"
+   crs:ChromaticAberrationR="0"
+   crs:ChromaticAberrationB="0"
+   crs:VignetteAmount="0"
+   crs:ShadowTint="0"
+   crs:RedHue="0"
+   crs:RedSaturation="0"
+   crs:GreenHue="0"
+   crs:GreenSaturation="0"
+   crs:BlueHue="0"
+   crs:BlueSaturation="0"
+   crs:FillLight="27"
+   crs:Vibrance="0"
+   crs:HighlightRecovery="14"
+   crs:Clarity="0"
+   crs:Defringe="0"
+   crs:HueAdjustmentRed="0"
+   crs:HueAdjustmentOrange="0"
+   crs:HueAdjustmentYellow="0"
+   crs:HueAdjustmentGreen="0"
+   crs:HueAdjustmentAqua="0"
+   crs:HueAdjustmentBlue="0"
+   crs:HueAdjustmentPurple="0"
+   crs:HueAdjustmentMagenta="0"
+   crs:SaturationAdjustmentRed="0"
+   crs:SaturationAdjustmentOrange="0"
+   crs:SaturationAdjustmentYellow="0"
+   crs:SaturationAdjustmentGreen="0"
+   crs:SaturationAdjustmentAqua="0"
+   crs:SaturationAdjustmentBlue="0"
+   crs:SaturationAdjustmentPurple="0"
+   crs:SaturationAdjustmentMagenta="0"
+   crs:LuminanceAdjustmentRed="0"
+   crs:LuminanceAdjustmentOrange="0"
+   crs:LuminanceAdjustmentYellow="0"
+   crs:LuminanceAdjustmentGreen="0"
+   crs:LuminanceAdjustmentAqua="0"
+   crs:LuminanceAdjustmentBlue="0"
+   crs:LuminanceAdjustmentPurple="0"
+   crs:LuminanceAdjustmentMagenta="0"
+   crs:SplitToningShadowHue="0"
+   crs:SplitToningShadowSaturation="0"
+   crs:SplitToningHighlightHue="0"
+   crs:SplitToningHighlightSaturation="0"
+   crs:SplitToningBalance="0"
+   crs:ParametricShadows="0"
+   crs:ParametricDarks="0"
+   crs:ParametricLights="0"
+   crs:ParametricHighlights="0"
+   crs:ParametricShadowSplit="25"
+   crs:ParametricMidtoneSplit="50"
+   crs:ParametricHighlightSplit="75"
+   crs:SharpenRadius="+1.0"
+   crs:SharpenDetail="25"
+   crs:SharpenEdgeMasking="0"
+   crs:PostCropVignetteAmount="0"
+   crs:GrainAmount="0"
+   crs:ColorNoiseReductionDetail="50"
+   crs:ConvertToGrayscale="False"
+   crs:ToneCurveName="Medium Contrast"
+   crs:CameraProfile="Adobe Standard"
+   crs:CameraProfileDigest="5E391BAF6BB6B4EF9370AABD6DEAE372"
+   crs:HasSettings="True"
+   crs:HasCrop="False"
+   crs:AlreadyApplied="True"
+   photoshop:DateCreated="2012-05-11"
+   photoshop:LegacyIPTCDigest="25C4CEBD4B9063620B578F2F8B72F91F"
+   photoshop:ColorMode="4"
+   xmpMM:InstanceID="xmp.iid:F922A738F8D7E211A771F3AEF7954E8B"
+   xmpMM:DocumentID="xmp.did:F77F1174072068119109A0681F50ADEC"
+   xmpMM:OriginalDocumentID="xmp.did:F77F1174072068119109A0681F50ADEC"
+   tiff:ImageWidth="2467"
+   tiff:ImageLength="2168"
+   tiff:PhotometricInterpretation="2"
+   tiff:Orientation="1"
+   tiff:SamplesPerPixel="3"
+   tiff:XResolution="3500000/10000"
+   tiff:YResolution="3500000/10000"
+   tiff:ResolutionUnit="2"
+   tiff:Make="OLYMPUS IMAGING CORP."
+   tiff:Model="E-520"
+   exif:ExifVersion="0221"
+   exif:ColorSpace="65535"
+   exif:PixelXDimension="1102"
+   exif:PixelYDimension="969"
+   exif:DateTimeOriginal="2012-05-11T13:11:56"
+   exif:DateTimeDigitized="2012-05-11T13:11:56"
+   exif:ExposureTime="1/80"
+   exif:FNumber="52/10"
+   exif:ExposureProgram="5"
+   exif:ShutterSpeedValue="6321928/1000000"
+   exif:ApertureValue="4757023/1000000"
+   exif:ExposureBiasValue="0/10"
+   exif:MaxApertureValue="925/256"
+   exif:MeteringMode="5"
+   exif:LightSource="0"
+   exif:FocalLength="33/1"
+   exif:FileSource="3"
+   exif:CustomRendered="0"
+   exif:ExposureMode="0"
+   exif:WhiteBalance="0"
+   exif:DigitalZoomRatio="100/100"
+   exif:SceneCaptureType="0"
+   exif:GainControl="1"
+   exif:Contrast="0"
+   exif:Saturation="0"
+   exif:Sharpness="0">
+   <dc:description>
+    <rdf:Alt>
+     <rdf:li xml:lang="x-default">OLYMPUS DIGITAL CAMERA</rdf:li>
+    </rdf:Alt>
+   </dc:description>
+   <crs:ToneCurve>
+    <rdf:Seq>
+     <rdf:li>0, 0</rdf:li>
+     <rdf:li>32, 22</rdf:li>
+     <rdf:li>64, 56</rdf:li>
+     <rdf:li>128, 128</rdf:li>
+     <rdf:li>192, 196</rdf:li>
+     <rdf:li>255, 255</rdf:li>
+    </rdf:Seq>
+   </crs:ToneCurve>
+   <xmpMM:History>
+    <rdf:Seq>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:F77F1174072068119109A0681F50ADEC"
+      stEvt:when="2012-06-13T10:52:54+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5 Macintosh"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="converted"
+      stEvt:parameters="from image/tiff to application/vnd.adobe.photoshop"/>
+     <rdf:li
+      stEvt:action="derived"
+      stEvt:parameters="converted from image/tiff to application/vnd.adobe.photoshop"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:F87F1174072068119109A0681F50ADEC"
+      stEvt:when="2012-06-13T10:52:54+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5 Macintosh"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:018011740720681192B08A1867381310"
+      stEvt:when="2012-08-10T09:15:58+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5 Macintosh"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:078011740720681188C6F490B9B78EC7"
+      stEvt:when="2012-08-30T16:29:38+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5 Macintosh"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:5BC63A3BCBBEE211A2D582B7FEA405ED"
+      stEvt:when="2013-05-17T10:24:45+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS4 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:62A2F2C7F0BEE211AA259042406C5405"
+      stEvt:when="2013-05-17T14:53:33+02:00"
+      stEvt:softwareAgent="Adobe Photoshop Elements 10.0 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="converted"
+      stEvt:parameters="from application/vnd.adobe.photoshop to image/tiff"/>
+     <rdf:li
+      stEvt:action="derived"
+      stEvt:parameters="converted from application/vnd.adobe.photoshop to image/tiff"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:63A2F2C7F0BEE211AA259042406C5405"
+      stEvt:when="2013-05-17T14:53:33+02:00"
+      stEvt:softwareAgent="Adobe Photoshop Elements 10.0 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:64A2F2C7F0BEE211AA259042406C5405"
+      stEvt:when="2013-05-17T14:54+02:00"
+      stEvt:softwareAgent="Adobe Photoshop Elements 10.0 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:65A2F2C7F0BEE211AA259042406C5405"
+      stEvt:when="2013-05-17T14:54:56+02:00"
+      stEvt:softwareAgent="Adobe Photoshop Elements 10.0 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="converted"
+      stEvt:parameters="from image/tiff to image/jpeg"/>
+     <rdf:li
+      stEvt:action="derived"
+      stEvt:parameters="converted from image/tiff to image/jpeg"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:66A2F2C7F0BEE211AA259042406C5405"
+      stEvt:when="2013-05-17T14:54:56+02:00"
+      stEvt:softwareAgent="Adobe Photoshop Elements 10.0 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:F822A738F8D7E211A771F3AEF7954E8B"
+      stEvt:when="2013-06-18T11:29:56+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5.1 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="converted"
+      stEvt:parameters="from image/jpeg to application/vnd.adobe.photoshop"/>
+     <rdf:li
+      stEvt:action="derived"
+      stEvt:parameters="converted from image/jpeg to application/vnd.adobe.photoshop"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:F922A738F8D7E211A771F3AEF7954E8B"
+      stEvt:when="2013-06-18T11:29:56+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5.1 Windows"
+      stEvt:changed="/"/>
+    </rdf:Seq>
+   </xmpMM:History>
+   <xmpMM:DerivedFrom
+    stRef:instanceID="xmp.iid:F822A738F8D7E211A771F3AEF7954E8B"
+    stRef:documentID="xmp.did:F77F1174072068119109A0681F50ADEC"
+    stRef:originalDocumentID="xmp.did:F77F1174072068119109A0681F50ADEC"/>
+   <tiff:BitsPerSample>
+    <rdf:Seq>
+     <rdf:li>8</rdf:li>
+     <rdf:li>8</rdf:li>
+     <rdf:li>8</rdf:li>
+    </rdf:Seq>
+   </tiff:BitsPerSample>
+   <exif:ISOSpeedRatings>
+    <rdf:Seq>
+     <rdf:li>200</rdf:li>
+    </rdf:Seq>
+   </exif:ISOSpeedRatings>
+   <exif:Flash
+    exif:Fired="False"
+    exif:Return="0"
+    exif:Mode="3"
+    exif:Function="False"
+    exif:RedEyeMode="False"/>
+  </rdf:Description>
+ </rdf:RDF>
+</x:xmpmeta>
+
+endstream
+endobj
+53 0 obj
+<</ColorSpace 46 0 R/Intent/RelativeColorimetric/Name/X/Height 206/Subtype/Image/Filter/DCTDecode/Type/XObject/Width 250/BitsPerComponent 8/Length 18115>>stream
+�����Adobe�d���������
+
+
+
+
+
+
+		
+
+
+
+�������"����������������	
+���������	
+�s�!1AQa"q�2���B#�R��3b�$r��%C4S���cs�5D'���6Tdt���&�	
+��EF��V�U(�������eu��������fv��������7GWgw��������8HXhx��������)9IYiy��������*:JZjz���������m�!1AQa"q��2������#BRbr�3$4C��S%�c��s�5�D�T�	
+&6E'dtU7��()��󄔤�����eu��������FVfv��������GWgw��������8HXhx��������9IYiy��������*:JZjz�������������?���V��wm�U��A͕j>Q�l9t�s�i��WM�S�y"��]��
>Xʠ4����̽���3���ھ�pm5���A)��'��g`�0�p�Z��ϧߙ���MR����k7���d!v�"�&]6�ɖ�]h�RW� ����,Kw�Aٽ��u�L5򯖵�0Ks,KX��bS��X����ʳBD�v�(s�M/���?'��H�aS,�o 
�>��=K�_?y3�3H�[jfy,H�J�v��PzP���!γڻ[���5jc�x��ӾU�֡a:]�<���!*�GuaBS,b[˗���N��|��y�H��]�bi�%$�̀��~A'��5�CZ�M�^6��J/�j���ѐ_�OϿ8����@ofhH���,d}�0�"�ׯ����;ykʓ5�����^O���.��07��Ê||�@��O}P�$�)��M����;�w�y���_����-�ܺ�u�ޚ��}#�^4(��B���
ύ1
"J��o�)�'�GS��F{�u���㸶�q��/oV|j�+�c�G���^S�j���S�&�宝�^K��CP!�($Bz����>�y�͚U���~��YiV�:8+�Y�*����z������Yզ�J��O�>X�b�%�»�$l;dT����,N�=�]V����-�����{����|�e}ao,���I�`	͊��?	
�F�m��_��HbO��0�
I��f��>�W�_��ё�G��� D��xĮ��;�8����N���T�d�}A��x۟��,JH`��9M���ɤ�G�jzeŽ��4k+�PYhH?�Y5O5�K��@���)�goz����/N5Q�;��Z��-v�iR�V����SuQ����|��1<����3���(����#�<˫�wP�W��X�3O����=+�i��9�k��� ��EK��	�u�ߧ�<�$ 1h��{7�Z�U���ֹt�s����f���t�H�:8����� �������<�V�v:���ұ���n�����~O�ߖ��`��
���V[v M�d�#l�P0����d�fȲvlٱWf͛vlٱWf͛vlٱWÞz����iɦ�_^�I��ؼ�=ļ�)'ԍ8_v8M�O,���țQ��-l!If��/²H�F� v�;�^�Si�a�B���p��^`���z͜qLvY�g_R	�ӕ*9/%�+��^.�{��j�L�y��y!���������H'	�(1�Q��T��b�� ����I-LH2c��g�G�׶�
+�}K��ޡzܥ�������v,�z���ron�	
+"�,K�$�
+�*BՏV5's�D-�o�lI�v������rj�Z=��Gd�ȑ���Ҫ�,��qۙJ�ʧ'�M��o��K̯h'ִ����)H�O��$����@w=�s_/��j����P�$7Oc*YzoDž�#vb	*�?Nӣ����M���Ϋ�٭Z�{����;���Zi�i��:�wL}0Mh��b~y���6����I�b�i��;�C�l>�꺶����s�(
+���zVG�P}��t@��>�t���v�ͻ7�J�!�Dl�71�ve*����
+�~Zk7~G򼺢\�^�M/��4甌eF��R6�|UQ��l.�)��ݤX��6�0$0yF˸���`Ҥ�o崻�]>�1���%Kʥ����2zA�v�j�G~����L�}����R�����A0��V>�ܘ�oKc�#��gg}w��#�H�ү�
+�x�/u���`c}ԊS�m�E� �F�@�9��J��a�P�<�d�UQ<�,�% �z�u�s�Կ�{۵�V⊧�0�.�?.<�������#��k�+�������׮ԧ��~�����e�p�n���?��Ǖk`�uf��A�2-i�M�b7�A�����)����u�e=��6~a�~,��{o�$����ݳ.>'��s��u�}2kҌ{��ʚH�XL7b�W-�(�� ҾǶ��3��b�2i�?�fV<�Xď����c��rf~S�����:�1[B�	%4��w�G寖<��q��-�94�<g��e�޹�-�.�r��.@�R~I�����:4n�5��z�DEuo둖Re��a ��@�G�;����%�ҵ(�֘q�FU���6�g[�X��f��H�GRX��8o瞳�mo��Z�hΦe� ,�I�h9���n��K9���Yo��C�W�������Q�/��u���6G���/y�O�$�@��ނX��K^��$9n͛6*�ٳb�͛6*�ٳb�Λ%
k럄;1E��$��]G�R0����5+w���D�
+Q�񪍨H�\�~Ry.��^s�۴��O�H9���-(|��.�Z��9�C�3<���]]NP��۪�n�;a��"���@qNT�N�gw���6�b�j>T��QT��F J�ވ�<����kU��v���x/VGL��'�.E����w�y��=������n@mQZ�'���֯[S�T��:��8ɕ"�rfpzT����좴�o�^���KH��5����Ip�c^o���Aw�Y�Rɹ%��ߎ(��]�������ЭG+��D�,����Z|�Z����5��]���v=91$
+�v�?����Z֡6�U/o9��hb9m�DK�n��Dt�����<��|�a�Dx!�
+�V����&�rc����6��Y~p��g/�i���$��D�y��(��Q�z��vd��\�������h�x&Eht���z�࣭Tw��e���S���,�ܻ4�|t���r��{e:|���*�wՖ\�&;H��l��u��|�WL�]KЂ$���@Y�U��� ��xd�%5 �ay����j�]:���$.�ez՘m��_"�Y��oѵ-r�Y>8����Ԅ.���f@����˂6"7$�9ggn#ϣ�?)u����|2z�d:�
+�9'�5?J�zǗ��SGMy�?U3�ꈪ>���΋��1��b�6��#��q�����&<B��L��&�bj�T�t�M�����K-]#oS�w�gZR+C���QI���b5p�i���#��,�� ��.�-Kq��B
p��n/nx��nI�S��`7wI
+��XV�bzS�B�ҧ������r6;��8J8��\���s���f�s��6��S,6o2��m�
+�9�O0~SXM����(Ȭ����r�
+uF#<�g�GX�Ϧ[�4%|������������m��O�\�|T=V�1]�Dw��Z��S:E�)�/�E=D���2��|����V��[��C*Mq_�Ý�g2����ˬ�j�lZ[��W�B]�?�&����X�T���w8g�Q�ۿް�ș=���$���[+O1y:���#<h�O*W�"���J����7��~A���O�$pFa�$_�>�>X��?1��@��:�)��ؐ�`x�g
������<꺕�V&n�q$(�"=�1����e��q�����|rG*,�0tp
+���B��(~[~p�^Y,cs���n����&�Nz7��~�ߞ,���\�*��k%h�k�{�$u��v,�6l��ٳb�͛6*���b.�#�y�$���D�Q�����4���O�{o�E����.畔���#Rp��'8_䯒��<���Ϧ鬗��qVc=�x™�9�M��:5��)Ү#��Q���d���^E{�'j��1qD��!1�_��C�|ܝ��� @< ˄��o{�����j���\�v�%�Kם��JL?��ߚ6F��ћ-VH��
+�n
�h��~��mJb���.����T����8��/I�0ݐ@�QZ�Ҵ��+�Zf�6�4��n#
+�P�v�B٠��ڝk��|� �r���r���y�?&(jt��`�]��8xE���o��g�>j�5��Kh�IoqV9�=GCUp|r?���i���VտyzD� v����~ڼ���-Z�������/�]�����3��ޮ�s�ޭ5L2HL`�Xc��>���#:�1$�
+��N��
+��^�oV�v�rY'���7E�Wԭ�Kk��.��y������o�Ɵ��\\��Zi�R�UL�"j�*�����<�s��Z��ɧ��� ;��RH�4 �=E23�1�@t�c3���󆱫�V+D�TW��KH�P�q�
+���	�X!a��sJ���1A7�Tq�@'����˙8�> �O�bz�ɏ�I(�͒Ąr-R�c�nF��M�0n<����j[�
+u��ѭ%1Iqh��a:҂_\0v�]���ڙ��i7;��ՐmwR����Ҵ���kA��,,�VG�c�l�v �lV㗛Ю1��.5X�-#��_F�+��ly3�-�gX����Ѽ��ۍB��{-\�0څ�7��z�<����0�6�e�-,��GS������ס���[4
+��GB7��*�I�i��F u�S=G�����5��|1i��)�+Ǔ��
+����g�u��oP���R���}]с.�T_4����5��i
��j�CY�K�_ݞ*��)�q�O��k�}�C$W��nWԋ�7��H]�96��Id&U��=�Ռ�F1�O^�%�1��$��������+4iԂ�
+
+�l3�ץ�s�_B�}0�����U򮍩�z��-�oQZH}���S�9R��`)Eq���t�Q�����aQ�Z|�Ry��-�V����?-�ZH`��!eMH,���!���Z�ƹ��j@qK��^ q
+d=�(2e�&_yW�?��I�D�h�i"bB��LGϷ^V׭-t��+h����@O!���Y�҆�v���D��K�~���Z�nc��j\UG�v���cC��6��ݪͧ�BI����2�>��BVr���ɟ��_�d~p4h�R�*���2�_#m�Q�-�玃��+��^��?su��cbrq�=��q̳\�
@z��j��~l׵{D�/�e��(KM�%W�A��7O���( RU�-}�xr'sKǿ1�>�������$[��RP�Oڡ�ɗ��+u��e�/Z3	��1�R�dO�:t�&���k@e
+x�@~������_땂�֌*�W~!�Ҹ1N@��9��܁�|�S<��������-'�!m�	��)���_�����vh�h�X�:8��A�Fx)ݤ���~�ENt_�����1�%��Hot����f$��z���22����j�O�|�\����<Z}cE�U��k%h�k�{��E��;6lث������ǒF������$���P�Z�U�V��Ύ��47�l�{]雙%B	*�9�?ol���^�7��2��	VRb7�I�]~~Zj�
7ͮ�����A��P�~��r����cD��Mخ��͘ LH�r�R�<ǬGm��u7�C<o?���9Uڵ=����?��j����w[[8�������wX�;���g���N�^���)��>�RC�-�Q��\�������E�r�X��#*�%~|~��O��A�)'!���EFʮ�}\�ݣ�����#G%ّ��ܻ�sζ�V:N� 7�0'�B)��G���+Ѭ~���v�<�o�a�p����_���,��$�����!���OH��T�1]�JS6�œ#���< Duz���\W�i�����~����q-��=�`8�F܁U������O9�y��J��Ԍ�D��1�T�?v�Th�v����+�]n*������2�$
+ ���GW�.,j�Y)򭶅�-R�&����Wzn��[,��(��%PQ��.����/�V�/~�S<R��4?.�O��|�u6�]��hVh���e��DAǐ�����#s�ȧ�eC
+S��?ҙ4󽟟d����u��j+��	x�F�"��vZ!P)�s�V1Z����#Pw�8���8��ˈ�bZf.t=��W7���"���w��>�r%i�]��n�E���Ո�i@Zd����ZϘ`�ŚzZuY��&Z��Q�2k�ˈ�;ɴ�$�h�fX� n,cBN��"j�f8J7b뻛���}��|�g����5:7
+�|Ek�ŃV����������L"��;�$���ǵ>Y��-~Z�拋}:�]�1�(Ie��5#��c�Ƕ"V�<�^���41� jR�9	���;��?5�N�q�i72[��A֜�8�C�dv�U��5YuBg��y�J��f؟�q=�$�j%��s�1��'�=o��/
+�Za��1V��1�ASJ��e��P)�ٳ�mN�Z��ڭd���N�o��q�T���Q��$3ZOp�%��_ܢF�s�푯/�i�\5��iq#�ق<���r5���k󆥮i�[1�oaa�-"E�6 ���-�j�rc6L2������^�ұ?Z���b��\��"��������]V��m�+I%
+\��P��Ub?.m[�Z]޽y"�"��A��!T����� 8��H�#�������s�ȥGJ��\j�ť5���z����ѝ�̾W�����̐X�Z��C�Gj��eR�����t�g2�<��%ݚ���K�V�)#��H�;��Z���c�H�8�d�N�6"���Z��u����7�i��X���@�����$z��cv�x��I��\{Wޙ^Q����˭G��"����J�R���Lg�;e�,yr{���Ι�Y�+�
֝z¼�<�R�s�~g�����$����B�ҡw<kڧּ��Z�|��ko>�Y.o�������yS�V�l��Yt�`BRK�T<�	��=p�#�F�alg>/LG>�}��8��Kĺ�u8'6��PJ�5Iڼ�2��4��Ơ�/����
=6�vr���:�Zi%��.Y�O��v�

0-�wV�5�”u4 ֿ�1��z�}rK��#k�y[]���o�X7	 p�E@��=���4�y�@��,�0��֥$]�Nx��6=3����7j�ޣ�;��."Rv��F��T����Y�fʛ_�V�NS�(v-�|i\>�K���[W��I�A#n��m?(����ku�m
+�
+�i�
+x���xÕj�.4jbZ
ǒ4xn�������о��B(b_U�%~2x�~ܞ��|:'{��fH�c�U���.��4�}`pNi=�e��R7��ܪ$R��F�����z���O}+Vy��ط��d����	������!%���*
+�S�5)��;x��6VR{�Ӓ�35�Sd����տ��gf�6�;��^\�n<��[%ݶ�0so8/��\-*6�|F�(�4�mp�F�Y�o��7��t?ɿ6��4�Au=.�T�y#�E" �g*�2�D?m��Dp��7�L�to;�廽r��M��q�,ً�o$�ܽ��q��0���{'���f��nB�oNQ	y�@a�?N'wusr�sʼ�<�P#U��I�/��Z��O
+Ϊ�"�@VVjt�@o�B�͏��d������1y^8�ӑc�/eUoR��T��4,������gP��1��&1�4~��trv��%>e����^��kr���cƠ)�>ʨ={�	m�s˱Os�\�ܡ�,�����´�׮F�bDh^�;�I�����}�N�/����E��#G}�K�������g�����W�$1�����f%��(��,Eē�!3���I}��w
酌bd��X�� H
�m���^[M\[_ܴ/i��J�Q�T��}9T�q�rnǨ���`�ʲ����ވDz�B���>��
A�ۦsH��/1k0X�BZ�Q�C�4$��}��6昷��
�&�o;5�2㔚�o��'z�����"<���y��]ӧ��K�QR�{��^y(!I?ey~;��+��|SUq��ubh�o�,���/U���m��~���B:T���Sn�rʼn�or�_P��|"D܏�=����.�#Q��X���6�� ZIAPH 좬~Y�+=2�Z��[[w��Ʒw"��+Ev5�:�8┉ �]W$c��_I���������=����#�kRp��F�{�b2���|�<����,�y.ח�Ss��A��
+����=�q�I�Fcyz�������2
A�9zc���k
+�K��HEF��p+�1���Ε���6����%���嵲����+ph�)��U{�a4��Y��kİ�N؄z���Z>��H�H�ʕ�PsQ��+#�ݱ��x5��@�g���ͤ�|��iW�~��٤����a^����
��!�u��K�/R:��mBF>�NJA�TEc��S��%-�R��9�n��H�(�t z�ȀF��������xӣ����"Ш�mx8It���%����H�w��c�c�w�P�~�7���O#~l[�����ay�4���(e��:���W�9!�����/����g��c4K��"�d8dWl�ߐ��:E�����l�]��h`^�[�n6o�?�#����e��
�����<D1����ߐ�i��	\�#�u>����cDw��=ϝ|��k�ֱhv��-�7.$�)⯾3�k��y���K+~�ZڗX��-��K7�9��S���[i���F�~ف��r+}"�:�����NQ��5��ʾl�J֤�։ k��P��'v=��?͞M���$$������:|�ξI�����gmi��RƲȠ�
+��s�V����_
+ƪ�ڂ���V��\=:�b��]Ǔ�m��V:T�({�i���2m�w�iO�V~O�k�=mG���yGWR��4?��k���A�t�8�HЊ�gm��E�����q�޺�.����n�
����˄̢o�Zxd*�_���ٳd[��W��x�L�*�n.Ā]�7����,�����K� <�%��z|�+����ăS��)_���ҽ��_qx�D\�VDuU4'z����b�)��}�w�\/b´�*K�iO+QT|H��1��Ot;�0�7��{;;�e����^�Pƍ�=rg�w�P���E��Y����on4�F�.�M@UoL���9��5���i�5� C�U(�g�@HT���'����ĖV����#E	�c�Y$-���%,�`���z7��Tw�J�Oȏ�Mi�Il��-G�n�(�Ӫ�
+������J("��;��_X�͉�"�A6��N��럙�q���u厁�*�F�E�, +{�\����f���GiCVv*�z��� 8���p��7����8�$�G���[��
��F�
+�5�3P��-��Q����������/J4Ձ}[�'��T
+E�(ň������?/��[˚~�mB�P$l��ޕw��dĜ����^k��O���$����Gxԑx�-��(���"y�݀��Ci��Fr^���,Ƕ��Y��CJ��
N���H��'_�qP�!}�a�G9oI7bj**A;P~��j�W+�fW��� �&ފ��;
+
+fF�w4mty�b�aԧ-��!��lM>����Ь��/������.^��6e�]ʲ��@>Y�+C��WKy���{�(׮�"F�s�k�;a���cɺ�M�����b�$�T�P�O���eUV,���n�����K��
ST�򍼌F���S�g@��i�9�no$��K{f��#PQ�0��֦��r;�8a���C\�n���A-��E�BĈ���H���z��%��ɢYm���P�C�Q���KO��ʾ��d:"6�"U�BD�B��.�NX�����;“+F����^�h�u��2�,x�A�x��\3d�1�E�\Oo���2X[]��P��/RTT���f��+^ئ��r�ɫ�Q��-����M���@��hN���/he��p�3��9�����'�G�;Y�)^yB��o��-�H#s�ٳ�E(9�O�\���VO4���p��#�HE�܁�t�\�^��wq�;������iⓔ�ڹP��!~���9r,fFW��+�����Ns�!=�c�l�2��G������G�uo-��i."k+x�ٶt(���|�?�'� �G�/��nV�ۚ��*��욧�w�]
�i��Kg���4��M:H�l�ﵖ��Rm������[�JN<�$�`�����⁡D��jm���Wɞ[�|ķwm���1��� ���be��W
+?y���0��5�N�+$�Kn�z�ܻĄ}�5�i���"���BTu��(�4�+���dt��w�q��I����k��[=Bw
^r4��Î�<�N���o�QK1=�O�q��&yF�Kb���zz�*^�#[�O���)�^lA�*��})���=7��n^����OQ��_F��ԥ���B�p�AR<> �s�V��~]��4<��F�Z�zU��bs�ߒ^]����3�;��#��E�W��Ѵ�ʧ�����=+���nx����ٳb�筅�W�`� ��B�@�vVm��8��>�+H��7	?�n�8Ae�h���ڵ�֗v��h'F�Dn�e`�]/]hxCrX�O�����vo�d㘍��a, �����'~\�~z���������h����a{��C2A/�#4�Ç�'N���5��mյ'��摀O��
+���:���<�|�7�i�i�,S��0_���Ґ1Q��߽r0�>�u�H�
Z�6�����f�n����r�C��$e������#��ߓz�K��vi�$[X�@)!����Vo9�w�:D�D��~�l;���<��]B���Wr��H����rb���$WcJ�K���
+Ks��{d�Ô��|Z�HY�ݝ�
+�sol��Ҡ-*+��?��5�:K}Z��w���~ڼb���w&[�k�֬�D�\��I=6���@���@cL=�ڤ1�Ŧx�CN �5@ R����v:1�(�}3�i��9
k��o�y�3��"��&)Y�:
՘�
+
+l�?�^l���p�uy�)��q��aO�%�b
+��O�x��4)�H�>��hZ>��I�-��Wr������~�dy�E���޻���C��<��Op�7�L!�c��0.MH#oj�W��7kGG��.�^FX�|B�~�54�:cL�T��+�Z?Rh�V�(�ά�q;u���2:�W�(W�T�蠖TG�ȓ�$����R���2F7�[s�k�]��y�p�:,�B� }��[ᯓ�4�|���YX�uؗ�&�՘/jX�Q�;
+��?�6:Οi`�/�F�8P)ȵ�5�-זu]+k�2Ca�E��Y�1������9��n�56#	�=[�_$� ��^iּ�uu��7qp�a��`��hU��K��.V�Ȗ�p�ol,�]�(`RVw�(�����˺k�gy�H�����5�&��=4[{q�V=Iwޤ�9�|����8�����R�i�@W�Sm!�n��m�y8e��̎�䱇l�|�1^��}F�ՄQ-�4<��;w�+�:f��j��)-
+c�w�E!
+�5�U����O'�����V�h�����F#j����\�b���W���:0����1�����f��	���f
�[
:[��3�.T*��T�M�Z�y�j�v�ʒMp���Z[���o�Z+�%��A/�G�ҵ��v��.cyX�d*� ?m��d�����Dhw0���ov�c����|��Ũ�\,�CS�����g�*I�+��^DU9��BX���L�i��/���2�]���&$�
ר[��$��B���o�t�摮n�� Z�����Ei��b���q<|<d��E�~M���E_ZL�������ͪ���~(��ӱ6���g�|��>Jӣ���g2�!HS@�UQ^*�럘�k�y�+h��ts��ݓ�����Bj{d?Ԗ����������ߖ��x��1������3*4���j�ZwɬSy?�ֺޫ	���t�]~%d�J�����I�6�a�mZ�R�`އ���Ϫ�{��9ދ�����֬����O��b����"�E.�b#|ž�򮫣y�������E�ܵx:2��L��:�Q@���mĪ@�֝?d`�.隅��Z�qb���D�=��#�-�cy+�:����n,�Sv~����%�I�dm�}Cӷ2�+�s��N��#��m�y*��h��ir�������t,d�mv�(H�UDQ�*��>���	�e�P�f͛�?~Uy/�"�G�; nQx��D<JK�K�_l����.�]F�6��(,o`C��+O�_q�=����շ��Uъ��Mb	�������Z-:��~��7���W�<�ڏ��-Xb^������/�'��|�g����O8�_��y��[^U�&���J�V���Ly �.h�������On�B*:��b����4 �s�ljI&����ȵ����� rT��������n��L*�Dk�b}��d�ؚ>m3�c��'KԼ�������E:�ec>�‚nJy�"����)}s%�Ɛ�1�C�F:�+SM��VKG!�xE
+��f)sfcD�H
V۸>6Ê�%��-L��tB*9�~xc�'Q��{M?M$}|,r<u0���M�k�%��J�t��%��|G�(�l��'�b������P�Q䉥��=5U#aQ��r��p�U�t0�>#ϣ�<��i�w�NР�b��0.��F%�8�L�r<�r���Ş��j�~�����t��q(dN'�Ux���yWl��GN��.�ʞ�
�D�՘�f*�/ ���
+mD��n����*�A^<G!V�����2\$ẅ́}<B�=��Ū�{��yXM��GBĝ��d
�G*�~��������ɪ-��-EǨ��uD�7�i�{aF��_\jɪ��`�
+�QG��e�-M:׮),�z��E�J�&��	`x�����>�c�݌���dn��]O�:����k��$��h�;T���w�?�?.?<�7A���iz�!=6jE3�t��T���ߥ:z2RH� R�[�#�޵���,�޷#ȷQ���F������,���c�}y��.����jv�O4�c��e��E��"��$I�E?-ؒ��\�}�$ּ�j�\��ާ,�+c,�C4�3GU_N���;��"�#6����Mi_j�q��V���$(tMa�+H/�8]�n)^����ʑbޛI,����On+��캌�ܴ
��eGu$h��!���R�m.n��.��Xծ&����P��L�݇s/�����[1^�45��}!���-F�k�u��Lߗ�W������f���!̬'��3V��8J�A0��)�����a���9�<��`��c�䷵ya��+4�	�wjm���nYc�q|���J��%����7^��ၕ�_�YXP�)R��G��>�y��v���3?�օ��^k�n���yv�����ƵTE�������By8���S_�#�?,�e�iq�~��u,�텭��E��`�S֠xd������^b�V���㳸柣��������B�����i���=�,a����
�t˭T6yV�� ����quo��9EH�.���:��W������qg�1�L�32��įM�d��q��1]�z���X�h�[:��
+�����]��M���
+/3�K(�
+ܹٙhx����^yC˚g�t8t�"B�3����7 ��h�FLd�H	D�c�a3��Nsf́��f͊�6lث�����c�f�5�8o���	�2�+чb7�vlU��?��ơ`�j���aW:UÁ:��SEq��v9�n��l/&��!���1��RH�M
+��G�>�d7�����K�Ʒa�Y���-J�zk�K�<)�o��
���g
+B�>������è�?]��VZ�A��}���/�����ʿ�4aR5to݊����#Υʮ��������?eǸ=rq�(�w%�2�lYN��_j��i,����\NCA OS�Cg�]�A��-���I���Y�"�~�Qg;#��a�`j<��j�����	��Z����m�޵S�t�1y���Kk�(J�K���m��Q�#���#�QJR��@Tz�(��"��P�yמ����%C�`�6��?�À��0]��܎�Yei&��v�d�*RI�!�3A�j��kڬ���b���fgy�sӊ�9�+��K��/?�m��gK;0��#T�&=7�r9'0@�P�LI���C�?��O�0�p�2]@%��@k'�`EIH�w��yl��jН��۷\��<���ͪSԶ������W#ZO��������m*U��=��ےv��\"ʹ�rj���������hʀ�@Hj�kʻdz�Y67w+��_�{T�C󖔚'��|�pV��H��5)�O@����u]^�;�ReX���I�7�Ҧ����"a�/�d���rM�X���Q\��F<������������j=&��I#�y��8�����d2_Ҿ]�Ώ/ԅ�2@��p7�n6�M��n�M�4�ݪH���J�T��#QǍkQ�NFY'8~�bc���e�n{��6��>\���=���ŵ�E�X�5����9�yH)��cI�?��bԬ�	&��{nUd@)��P�m_?��e����ڼ��[i4v�8��w����
+�v��p/Ti������Yx���!��i�
9�bc�z�c�A�����6�N򄿗C�3js���{��
+�����	�5l�i^c�C��!G�,hWp��|s��u�O8-�*�ܥU���"��/5V�x�p�´c�R}��8̄���o$�`����ݯ<�n/�ɚḼq��9W?~C#��ռ����Q�H`� Ќ8���-<�渵�2�
	f�>q�H���|W���ǝ<�w�h�O|�,E��z(���5#�!��1"7�y{���8I��^��Kt�v]6�'��CI�Z�0_�5��5
=T�0����#���WiNƽ��Y��LE�8si�x���'ia7��?�:~k󞗦�%�ӏ��}�)�O�;}9���>Y:g�g�n�q�L����f�|޿vu̪0��y�#?Y�-ݛ6l*�ٳb�͛6*�ٳb�͛6*�*��`
+�B���c�C�q��^mY5?(��=T�F5R,�?�"�7�H)�6+o�6yK͞C�?D���[g��RE�������4�~���\̱��SS_��~�)���t
��d�6�g����QҜ��Vم�d�����S�t�[X�����֮tو(:��|�o��#m�v����v�Q�x���.�"@�r⻆;���_��I���=(:��w�_
�����M�h/#���(����)�M#�Û_0Cr��|(��*�S՗��vK�Kim�p�};�$�R��ּ��g+}jʴ{I�R������s����~z򅇕u�5E'�u~�vs���)��P_�y�_/�8��*6�Ԡ���N�*>b�t�� 6���E��=Ѷ�v�sZH���Ey�
+w�`j�b�J$�n��<�~�/1k٧�i$4$��;׻d��V��[�-����;Yi�o��)3��^������i9�bz���$��#��\�u;�}b��)���Jd2Ȓ"6���
��@2�䍾)���*?+5侇붆��{$��M+���i��Sm��iha֦�A�Z^C�=J���H<��˪y{ϗw:̳Y�&����u�fn��zz^e���s�=hH�T��I2��
+�3�~���	��T��H�ܝ��˗���/l"E�����x���U�2	��mA��ŵ��؆�K���N nNJu�t?$��R�Ss��@ج�h�����_�Y<�"|#jc�Nj�����==���z�����4�5Է��=���2;}3�]I?y�?Vt_.ٍ*��-��[ى���&F��d����B�ў+ʔ�hM;�?(��Y�n�5����Xg�}7~*�����z�B�؊�MM�/�����Z�i:4"k�Wt�
+��]�d��Kp�	�����6�Z�n|�mյ�oe�	(Ӽ�9 #�+ޔ��O�Z�j7��6�^{�V(�u,���:>���R�:�RCq%�z��gT���y��?5ɮΠ��Qr]�3KUO�?FG<dqX��9&r�f6�����Bҡ��k-"
+p��"��TQ��;��ٲ
�͛6*�ٳb�͛6*�ٳb�͛6*�ٳb�͛6*�?1'|���mf����-Jډp��Љ٫�L��o�o��-g{���Mn���7�'A���3����>S�H��X�EA�#��z�V��8�&&5h�C�^Y�aZ�1#��z��Nzc�C�q˾fj�H��]Q��mB,�=�����^�G����H�h�氜Q�|�i�9Q��N���܉@O�ޭk�C����gVwsōIߑ
+V�:,�Z��ߗ��WՌ�=����	Pq��GF��b�'�+�c��O����}��E~:U�/zU]Ec���eu���6��r@Ff�>��\p�n����u�0�>�%I}I^C�z�O�ί�_��Y���R(�5$o�O�2?�-�u.�i�ņT���n=G��rxD"��cɹ?�#�iӖ0#�� ��~ʻ�~���&[�6��W�P��L��Eu�EI$a��֋�S�"�>c�����Ճ�C�榹~h�io=0~7��j�2+3Q$�`�A��X�D��[�z�(���ov�?�sAƢ2�%��N�U�s�ěF���@���ݕ�%j>�4��,��s�1���{m��%Dls��RTZX�E�O�*
	��pn��j>]���K�6� �K��:�S~'�:���$����jv�MNt��/��|��?�t+�smo=�/���3��b*kLL��]Pc�#�M���]��mR�c�d�OW���;+i]�R�ۿ%��<��i$^7�7R�7��F>\~��4	���m3BnL�3)�U�?� �b�8"HaP�ơQTP������,�.͛6A�ٳf�]�6lUٳf�]�6lUٳf�]�6lUٳb~���>�����ULؑ�@	H}�O�������O���U�Q8W�O,h>nҥ�|�e���x���#
+20��k��������.3���[.�୿�*�s�'�qW[�MW��W�l�Y�%*.�~��n�9���Ү�8�1�XdZ2�Օ���3�kjW�*4���
k_��g���GQ�6�x��Iי>�!�)�����YC��i�H�i������+)�$u�����)j�0~��zK����-�2�9Ugu�Q���{;MBK{�h�nDm2�������4��^Kx��$����%*9z|��^9d%1��[�d�3� y���:}��'�x��ΑJ̱��DR�F$�x�|���ϧ\�d�ӭt���xy��nk�#,��צC��4����dr�
QÃ�j<N&#�u�#j5���/y���E����S��a� �U�؃�gH�������~
+��	���v'�{��Iq��e�`�� a��n�(�T=��L�M���I���^Өض��1FVWc���{���%ZO�#���k/��n.���c�f掌��SơU{�`�(/�n��w���Ůt�J4q
��&�RMj�m@�2c�Kr�����'�q;����F���vHr�����,���y�e�����Fj<��Gӝ�"���v�F��A�Q��X�+)k�k-C�m��ۥ2_��&D��`�7vlٲ,��6lUٳf�_��
+endstream
+endobj
+54 0 obj
+<</ColorSpace 46 0 R/Intent/RelativeColorimetric/Name/X/Height 151/Subtype/Image/Filter/DCTDecode/Metadata 59 0 R/Type/XObject/Width 108/BitsPerComponent 8/Length 5133>>stream
+�����Adobe�d���������
+
+
+
+
+
+
+		
+
+
+
+������l"����������������	
+���������	
+�s�!1AQa"q�2���B#�R��3b�$r��%C4S���cs�5D'���6Tdt���&�	
+��EF��V�U(�������eu��������fv��������7GWgw��������8HXhx��������)9IYiy��������*:JZjz���������m�!1AQa"q��2������#BRbr�3$4C��S%�c��s�5�D�T�	
+&6E'dtU7��()��󄔤�����eu��������FVfv��������GWgw��������8HXhx��������9IYiy��������*:JZjz�������������?��ޯ��턺f�n�6�
+<n*>c���g;}SX���;=[����+������h#����S��u�^�Z����D��Υ%�@YH�������A�
+��_��v��O�.-���2X0fq�t�1�Lj��Zsj~Q�z�Y^K2O�Z�A��7�)����9	�yGDO���j��#$���9Sq���/�C�c��#�(��>w��t���Zǧ���$
+�oO���q�<�w��9%7�.d���+kZdk��ڒ�����%nL��|�s�����묶��^=̔�-x�(�6Q�;H(�=��d%Ȏ�� ��O1���H�e�<GP[H�W��k*P5���l���>i��o�zZ����%N6���z��jD���(|	��@��&�cBf�r\�o������<����W���n]B5h	6�O��P��|���ٖH���e
?�v�ʿέgG�-�u릹�.	����nh@�w���"'���,>�o��lQ��I43�����������M�@`?�g�<����
+�Y���z�	�^۹�����tه������F5���z�~o��m0K]YOL�Ep}�cF?�}�L0���B$�<����8���|��!Cyq��i�CH����F������.%��̒H噎��Rs��:���m(��I&��¬����4����~�'�����+�^������8�������"��
+�rUo�K�/��g�@�v=4�F����8�CC��b��0p5sZ����Z��L:�a���È�ڲն�`+�ȝL��,�C�tmMFN�v����״��/L!~J��!"�c�v��֚HOY�i�(��0?�O�aռ�;ăֵ�t�~�����x/�;6�GX�� -5���}�ֻ������c��F�oQ��RiV,[�g��<�^�C��o�ڒ� t;lA ��xhw���tkK���9H�;��w�{82�4`/���e�pp�?����䘧�}ů�tk�e��m#���U�h� ��`-<�{O勷/%���դ�r}#^�(Q�U�?�Z�Կ,����n`?���!�.����8����-y�'�4��4��EW��2�n1��?��󘖮��tX���U��e���<�+T*�9�����t�Q����Z-��XU�MR�"e���g���	;�cpV�xYFè9>���J��J�dJZ[*�ҙ3�U�H��0s�n�Jx@�2��G���E@U=@�x���B�:q$I^��4��#�"(��X
�qI��X&��H�;eڂc4�|V���7b�<�h�e�OaO�#P؜���-%��k1R��D�J�����2X��àZ��
+���������
+��)����6yq=��j""k̢���_��_�����B~d�����Z_�s1
+X��q+�a�K���O�0n��6�R~_i�j����4E����H}�J�h;�+n=���}gH����j��&��r�6�`U�t��6�-I��H\'�q��Ǔ�?�$|���~Z�ĭo멆`CB�!>�D58�lw�:g�,��4������?�r�/�~i]�����Ք�,av�W!�V�4X$QŮ��P�g.t���#���y
Z���1
+�2Y�]i��N�i�F����؇h���4�pV����Kj�j�z��jEI�
+P�e2�ɽ����,{W�@zՆ�l@Tt}����\J�p���RNq������C�4@1J�J0�9-hs��]yf+�XH�@��Q�u�Ő�j#�9|գB���0�����a�����
+�:��=|s��g��^$ֶqJKdaɖ���gA����ZΞ��hJ��UYZ���9 #+cJS1�^G��<�a)���Z!��]��!� ��4�@�CX�_�ƛ�]�&��<�l9A�Y�uw6Iky�h���Z��g]�:,~U�����*��	^�~�dχ�ם�k��o�G0��l|�����/�{`����s�y�����y|ރE�X�
������w��̮�	Z�$n+���ا喷2}��e>'
�3�i�I{aoy��H>L��'gJ�Dy?�H~�1�����,<��?�|܀�Z�\�؎R���ќv$��*�EQ~��ֿ��#�zUޅ��'��PkFn��:2��<����H�����r	Xl��]��Q�t�M����ТOC���|��1<�� t5����@������l���VE�PM)�-F���Q��w�����T&�0&.���dn�8 :�ZP ���J�s�A�vP7�Q�]ɫ�3]8��
���V�ݜzB$�*���
+S�O,OA�ӄ�dr*�yoK���Fcv��%w�0�O��R
i�����\� npK����V�"di�e<Şn@�Zk8oB,��F��|^�
��ο�Y�;M��a�U�֥X���d/Ⱥ|W�]�F۔���3Wz{q���;E��!�!C�Wh�L��.�I>���������5����̧��R�u�e�@��p&U��U��G��5o)���ͧH��_��š}�你��6���������5¯"�+�ck�Y��.�[�q��h>\��f�K4��8����\~�<��������_���%��ʚ�����/	(��P�蘾��Ŀ�"�k)��4���%���D�@O�-~���.%�����e��4��43��$R7�Q��F�lo�t�����9�85A��Llzf�01��X�ģ�u|�4v�}?)��Tw���':
���xGu� �?p����z
+����B�'�5��G쑶�<�&6'�ُ 	��w�_]�"G�J������T|�2�l���Vb���PL�T:��4w���#��(�8����Tc,��g�8�$�7,��V��I�Fk���~����9#c��#���%
+�EE��b�w��!��1:L�L���#n����o�_��c�*x����9&(��8�"�
+���$��-�=�i���~\אQ.D�7�)h��I+���g�Y��~�
+��I���)ˈ+J��kb�5���7�,2	b��kFV�E|r���Î�(��h��`rE����h�����4,���#���js̾o����WWҷ������=�L2�P�o���¬�I#�@���b�W/��#b�/�`P�Im+Yރ�޽�Ƕ\�Ȓ��>�á���}r�����~�`7��!���pmn����<GJ���	�}B�z��3a��51��9�u��F.����}n�I 
+���r#j�CO�)5����F�h��$x�;�K���8ccNdre#qi�^=�U�|'�:Ǔt�ӴX�KsYdjnA�~�9�,���j�_�,`�tX�G�
+���JC��7Y"by��mO��lQ�������Bw���]����_���5�?�
+��ex�Pl��Ӏ���י`��3���#jx�+x�-V�̓!��㈞�v���\iז��'�	BG`�x��(���C�}'W��dI)U~�#�G�뾓�
�+�J��ၽ�&+�&�����iڝ���0��c�H|T�x"���v3�O��$X<�c	HJ$�70=k��\q <o�R�B@�H�(v��|�k�}wDӭ��Ջ�v�c�"���w^�P|G�;�=
+�+�n.gv��m�PGο
+����5�v�4��.���8OGo����qe�w
���槢�f@����Ї�6سUf�q��=Zh��Q���� BQ���W2�,Q��T�^,�C��[u��93����w2ky��Ed=[���b*Er6/$�)(�������Tb��NǎFQ�[�3�Ŗ�P�꾟Q���D��&���O5����Z�_�J,��'��1�텺�";�J����Z�2�siG^��Z���G������9DíWq�d��>����iv9�=Y�[[��yjލ������Ƹe�j���q��!�B�V=G�&��E�O������������C����İ:��5�U�k��@�-�Z�w�a�\y�\���OVA
)�3�:-+ʜ����g�]oP�m�t�@Df5y��ʮ�X�
��\�y"M7H�"�5�&1�P�F?y$��~F�Jx�':}�ǚ亇P����(xQ�Q�/0��j���qK�H�>G�3�&&C��$t>c�3?.��
�WQO�Ĭ���gC��3-��Pƣ��|��M�i��W���2?�s(d#��,4������ɵ�/�Q
��r�CQ\�U�֍�郻�=�XԹ�5L�EڊƎ�Кv�A����.|G�Zpߍ~X�A
+;������g�~������w���=E��d�4tL`��|��ԯZ�̓�U�^�C�~gB�eİ�i'*�b�S6l1��
<X��j���lm�E�(�����c��O2��o;��m�n�T2�j���#<
+����@+�Woٲ�߁>��}�>^\�:��Q�����S����`��q��;��xnM2�%]�OȌٲ��Q�+�)=Gߌ�4"��?1�6@$�q^%\�4��poO���W�Wn?<ٲ[!��
+endstream
+endobj
+59 0 obj
+<</Subtype/XML/Length 4895/Type/Metadata>>stream
+<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 5.2-c003 61.141987, 2011/02/22-12:03:51        ">
+ <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
+  <rdf:Description rdf:about=""
+    xmlns:xmp="http://ns.adobe.com/xap/1.0/"
+    xmlns:dc="http://purl.org/dc/elements/1.1/"
+    xmlns:photoshop="http://ns.adobe.com/photoshop/1.0/"
+    xmlns:xmpMM="http://ns.adobe.com/xap/1.0/mm/"
+    xmlns:stEvt="http://ns.adobe.com/xap/1.0/sType/ResourceEvent#"
+    xmlns:stRef="http://ns.adobe.com/xap/1.0/sType/ResourceRef#"
+    xmlns:tiff="http://ns.adobe.com/tiff/1.0/"
+    xmlns:exif="http://ns.adobe.com/exif/1.0/"
+   xmp:ModifyDate="2013-06-18T11:37:06+02:00"
+   xmp:CreateDate="2013-06-12T13:15:22+02:00"
+   xmp:MetadataDate="2013-06-18T11:37:06+02:00"
+   xmp:CreatorTool="Adobe Photoshop CS5.1 Windows"
+   dc:format="application/vnd.adobe.photoshop"
+   photoshop:LegacyIPTCDigest="00000000000000000000000000000001"
+   photoshop:DateCreated="2013-06-12T13:15:22+02:00"
+   photoshop:ColorMode="4"
+   xmpMM:InstanceID="xmp.iid:FF22A738F8D7E211A771F3AEF7954E8B"
+   xmpMM:DocumentID="xmp.did:5A2AFDB867D3E211B285BE30C263D3EA"
+   xmpMM:OriginalDocumentID="xmp.did:5A2AFDB867D3E211B285BE30C263D3EA"
+   tiff:ImageWidth="3150"
+   tiff:ImageLength="4422"
+   tiff:PhotometricInterpretation="2"
+   tiff:Orientation="1"
+   tiff:SamplesPerPixel="3"
+   tiff:YCbCrPositioning="2"
+   tiff:XResolution="4000000/10000"
+   tiff:YResolution="4000000/10000"
+   tiff:ResolutionUnit="2"
+   tiff:Make="Canon"
+   tiff:Model="Canon EOS 500D"
+   exif:ExifVersion="0221"
+   exif:FlashpixVersion="0100"
+   exif:ColorSpace="65535"
+   exif:PixelXDimension="472"
+   exif:PixelYDimension="783"
+   exif:DateTimeOriginal="2013-06-12T13:15:22"
+   exif:DateTimeDigitized="2013-06-12T13:15:22"
+   exif:ExposureTime="1/125"
+   exif:FNumber="45/10"
+   exif:ExposureProgram="2"
+   exif:ShutterSpeedValue="458752/65536"
+   exif:ApertureValue="4339850/1000000"
+   exif:ExposureBiasValue="0/1"
+   exif:MeteringMode="5"
+   exif:FocalLength="46/1"
+   exif:FocalPlaneXResolution="4752000/894"
+   exif:FocalPlaneYResolution="3168000/593"
+   exif:FocalPlaneResolutionUnit="2"
+   exif:CustomRendered="0"
+   exif:ExposureMode="0"
+   exif:WhiteBalance="0"
+   exif:SceneCaptureType="0"
+   exif:SubSecTime="60"
+   exif:SubSecTimeOriginal="60"
+   exif:SubSecTimeDigitized="60"
+   exif:GPSVersionID="2.2.0.0">
+   <photoshop:DocumentAncestors>
+    <rdf:Bag>
+     <rdf:li>xmp.did:5A2AFDB867D3E211B285BE30C263D3EA</rdf:li>
+    </rdf:Bag>
+   </photoshop:DocumentAncestors>
+   <xmpMM:History>
+    <rdf:Seq>
+     <rdf:li
+      stEvt:action="created"
+      stEvt:instanceID="xmp.iid:5A2AFDB867D3E211B285BE30C263D3EA"
+      stEvt:when="2013-06-12T15:55:21+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS4 Windows"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:5B2AFDB867D3E211B285BE30C263D3EA"
+      stEvt:when="2013-06-12T15:55:21+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS4 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:5C2AFDB867D3E211B285BE30C263D3EA"
+      stEvt:when="2013-06-12T15:56:37+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS4 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:5D2AFDB867D3E211B285BE30C263D3EA"
+      stEvt:when="2013-06-12T15:56:37+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS4 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:FE22A738F8D7E211A771F3AEF7954E8B"
+      stEvt:when="2013-06-18T11:37:06+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5.1 Windows"
+      stEvt:changed="/"/>
+     <rdf:li
+      stEvt:action="converted"
+      stEvt:parameters="from image/jpeg to application/vnd.adobe.photoshop"/>
+     <rdf:li
+      stEvt:action="derived"
+      stEvt:parameters="converted from image/jpeg to application/vnd.adobe.photoshop"/>
+     <rdf:li
+      stEvt:action="saved"
+      stEvt:instanceID="xmp.iid:FF22A738F8D7E211A771F3AEF7954E8B"
+      stEvt:when="2013-06-18T11:37:06+02:00"
+      stEvt:softwareAgent="Adobe Photoshop CS5.1 Windows"
+      stEvt:changed="/"/>
+    </rdf:Seq>
+   </xmpMM:History>
+   <xmpMM:DerivedFrom
+    stRef:instanceID="xmp.iid:FE22A738F8D7E211A771F3AEF7954E8B"
+    stRef:documentID="xmp.did:5A2AFDB867D3E211B285BE30C263D3EA"
+    stRef:originalDocumentID="xmp.did:5A2AFDB867D3E211B285BE30C263D3EA"/>
+   <tiff:BitsPerSample>
+    <rdf:Seq>
+     <rdf:li>8</rdf:li>
+     <rdf:li>8</rdf:li>
+     <rdf:li>8</rdf:li>
+    </rdf:Seq>
+   </tiff:BitsPerSample>
+   <exif:ISOSpeedRatings>
+    <rdf:Seq>
+     <rdf:li>100</rdf:li>
+    </rdf:Seq>
+   </exif:ISOSpeedRatings>
+   <exif:Flash
+    exif:Fired="False"
+    exif:Return="0"
+    exif:Mode="2"
+    exif:Function="False"
+    exif:RedEyeMode="False"/>
+  </rdf:Description>
+ </rdf:RDF>
+</x:xmpmeta>
+
+endstream
+endobj
+50 0 obj
+<</Filter/FlateDecode/Length 6287>>stream
+H��W�n�H}�W�Ӏ�4�M6�G�v��$H0�&X�;�m����q�ߺu�)Q��d^�n�N�:>���׍�Ңj���S�R���~v�n�nf���2��r��wU�\������|v<�gJ���^�����1Y�j����5_��===�e����}��n����t�����c�ѻlt>�|��t��:�Vʘ��V�Q��x�
��8`mj3~Z����h�f��/f�Gt�O��ΑY�博?�H�	��ف\��U��hS5�n
+�t��8X�ӝ���ii��c׳�hK�4y��ܦ[�8ϴ��Cuc�܂c{9��0ݚ��A�:]i�������ɛ�\I g�Ũ*zrR봮rçB��y�U��,Bw՗�ޟ�+�����[��Ӯ�n��7���u�Jջ7W'G�K\VQ��wm�Tq���	�]fl�)�i�Y�oȆ�jU�����١Wq�gp�Z�k5�k���F]���*n�>���m���_�M�v�X�Q��:z���ѳ3P��<H4���"�RAW�I�:�&Z�k����?�!�7���Et�=�G���;Z���8���������8�������!]�?�w�\�4��G��]�+t�/RLbϥ��$��-���_��ů��dB=�}"v���1��Q<[ʘZv�ޣzkJ8���o�8�Q�qRE�g�F�⤉�H���
+��7Vފ���-:y���w*��XG?7�23\�(�d4.%�>Ug��8/�Kc-E��l\JK�Ѯ�k4I1P�K�Ep�bHm�իwJ�����C'h:#����3���/#����oT��A��3���a�p4�t	�ߏ��SiZo~�6լ�z�^y��x�Y �x�Ѿ��ݒC�9z�кD�@��-�Q3��dtK�؏/���d�C�}74�����-���w���������C7]A�x�~߲d�*�O�tc�;2�g�F��.�Z�X
i��NZ�J�<�3�?�xNE��J�ʴ���m�Y_�D�|��S>��y�=�y;_��^��,|�����w��^�!�����������:���	��G1�eD�D��IPm�����
ï �*ßX�'�0j.�͡�rʹ�@�䣡Dx�C�ңR䣑�ᐁ뇓�/y�g���@�d&�;��«��(630�sS�]WC~ʔ�/x�nM�]_3�50x�B��R��Ֆ���i�g?�������F�:s�'��~e�@���YiA�s�_b�ٗ G��~D��J�|x�hI.@?y�X|—��Ý��Z��,�{��4���$�Rhb�B�%43�׮��1BE��	�ѬB�dh�)f��HA1��ep��dL �_m(�"�؞K���'(�t����~�c$���\v�:��z�	�~Č���О�H#��������@޸�,c��r�W��T�&���,$����P�ݥ1� �&����H�]]�J��y�u��(	����¤��M�	����>$�CDut������TE�;����N(F�L5�'#E�H�\@�߆�3�N=<+3��~YRДp��?������Pw2��
��|� ��d��L��<�ݞ��g<��e�I�p��'Y5h�]�`�6���?Ĺ��++e/v��l�h'ZīO����p��[��U��(&Lw�'h��C�L(�����oC���%��@9�E��F��l�}C���H�۶V��nv��ۻ�u
+���L9"���n�5�{���1?��&b�ߛ�}�ާ�≠��C���ƅ��<h��Xr�R�. UO��<fX�x���VyiB��6�t�L�xD��^4
+i�)�(��"r���G7r�,&����=T�ܥ����2ѫ�΂�Kש�r�7i�uB�:���s@fs�fW�v�Ak��Q ҫ�9�&͔p7qRE���)MJh�4�w�O�鳔����;��P=�ա�x&R��D�$�!q~�\���
+�-y�m�)J��s�`m<���}�� �s��$�Mh"��7qy�yR�d��/�`u��M��Y��2�ɤ��_.G��+�	{�7T�G��D^p��Q?�d�:�*n�N�BC���,�(8��N<KUp�{���}��KU+rܩ@�x�"#&��v�	��n����hr�xA˅�^���ajMp:-!��4\?-F>qt �PË-��XYy�O��;Z�H;#G��P~mt���n��5�s*P]�UOW���^��;��a'"k�U�9�d���J.�����H�,�2�ԺTj������fv<��~3ˈs5x��W^����Q�)a�e0o���j�Ŷ���o���vj>P���Ĵ4��MZ҃�=��-࿩�\��U4M�f�q�6펔�*��nC����+����/Tf���L�03�Q�D,HMž���.�*�[(����OʝPRxBA ��A�1�rĩM�9U��	����y?����eT��
f%���IK�Y8)��pM+B8�vZ�:[�M`�n+*ͬ�Sc�9�;B�,�v��O�k�؂
y*Lk8o<��"���E������l(Bi�� =ztew��O�&�Rm���gP��K���l��	MRwo��E�C��F���Agp�MQ�W�N���3P����AeƧ�N/�c��
+�z�l��N�b����vj}٩p
���@i*E�~T�����y��J�W<~�]�I�����yo_C�SN�d>���jb#�AEuw���)�
+Wv(�9�X���=("ɑc��-<V���U���u'X'�y�;����ӣ���Mxt�
+��"�j&7*]���Sv��2�����Җ�>/*=������c55֍��=-�P�*��P�R��/���r�[3:��W]s7|ׯ�G���,��{����)�٪ʋ^d��x!)E���~�u��r����U21�;�fzzz~&�K����gi���@I��<N���F5$��U�*s�
�7UW5�&7��q;8��aQM��db��Y-WɵP,�#	�e�Cy��
�Y�*���A��/�����S���>�M����{}^R����21�F�=�<���
EO�S�!�ƻ6��Sa��)�Z�����|M��ʢTM��c_����<��"���[n�Gp�̷��V�Xl�%o�W��*"ّh�:������Gh

+}�\剀�4����N�|�,�싢�3��w�^ub��3�*�ԫ,]D�1q���$�T��Ee������?�s#~�>��LMu�e�� ?*����p͵WW�Zt�Î�#F]�O���?�:��U��ް�\��z��rΛTtz�r�ׂ^}/���H���7R���c&I��[����:/��"@�֑��}}�2�.����已c��i0��k�ϫU��3y�=Z�����R�fV5�-��#�U���C�����g"�i����#��m�AP��x�l�vpit%x�-C�Ӭ&��i�qU��է�-��W���n=L�=�������2Wj�ۏiM���o�z��;*@k[���>j{��`�Ͱ����>��{6�R>�s��]k�����61�d=�
+��:���7�����ݘٗ9��Ү�U���ԳF]���9p��Cs���\�U�Y�J��F������J���
+��� �h��
+'y����I8>O��X�Ba�Q��[Mu��wskfp�ﺢ�5�Bc�斑�ƃ����d�����}}����,P?/ڽ��5��f��
+x��#�Mٯ�G�?煶�C��ۋ^�OH�u�u�*?��lf
+&o(L��mFv��}/�8���=~'�/sZi���F�	��T�BS6�+���<�|���/K�u���j6����K.�	���j��X8?/8�un��άJ��jo����߼���#O�6��%Cy#L�q�{n�.<Π���r������2�������y����L���6�ʶ�8������b�9�=��MJy�J'���W��n��2mJ�G�09C�j
+����VlM?�������\�AP型s^�����ReB�v��v���U��7�w}����m�'��������{����]�N��'�G��Cib@���H��ښIOr��G�x���指��j�1���3�]���6$���/K���rSH)l�\�����ٿ��E���w�w�FS�N��ɰ�;�_�G|h}V_Mt�2�E�[��6������/�7��.�@t�6��2��o�rwˆ�폋��6(I��z�f�+m5Ĉ���j�+�}|D��@*g���m�|���?w�[����WO�'h���S�0o��Qf�ś���S��pq�lj�4�5��6��6����;�h�Z�'����\w1��j�\��-���*X2��B�<|���ʷ�_X׺��cd���,�M
ם���E�m����(�Y��P�1��-ugZ�u�a���i�vk=Ω����dlYÂ��1*���x���*`��'�l�ad���h��!���pBkk�!�)b��F�;��>F�P-�F��b�6(�$�5�Cx�:A��FiD:Y��wm�u���Y�ժ�1� v���O�vfxY���YZtk
��d:�S�z,8��)&x�\cd�c�D@x U���1\����L���?\�,p��kdH�y�s��#�AQ�{�6IX
+>F��LZ��#�ɴ�K��GPp���92���a�N�ji90�ꄘ�1}��$w�F�}��)I�;��~iT�
+ĢF��f�5&+%wX�,��>X���W4$�d�ݠ��?0��%&`a����&H�DV���i�P2\#7�e}B����d��@}y��t	,�O��{�ި6b@�c(��暥�-�X-م� �(�&�S�2u��Q�����X"�];@-:)d�S���s�`�
+�G�쁱<�6r����'��I�9r��x8�0	xH�N��t/kK�Kb������C}�,Y�'诉@*�
+kDD�e���"��"�"��q��KF�a����c-k�X���R������.����Uy�BA��BU��j�2���F�UyAk�c�G(`�$��p��XE����:H�]دs����)s`�;��B��ei�5\F��D"[ֽE�K�`�ӂKZ'Y@��m�]Η��o�#ÅkC��ۄ��N�0������b�o�7]Z�#2�l��6�����7�<���:ԍq�CF�@��P
+�	�C��[�Udw����
+}Z�'w'�5[���'5.�S@��"���V�ZH��C"BH���#�_oN�;�t�������ŧ��^�~��d��̡G�I�ދJ��E�����n�]R�Ct%%����2������rMu�[�0Ľ_�W/���5� ���������1�x����#&����M�'�4
+Х�%�)�&�����<��,$�Uo�tteѼ �>"z��Q��>@!��$ME匣N�
+���
+�)+YR5K_+�Itw'p�v�ذ(��ܞ�.��D/�BQyRU&��ޕ�5�e�.@~D�g�
��c�, d��B
+S��U^�ď��]d;o-�_v���&<��̓8�6D.x)����It��8N���������@�K�aF#�щ�Ԡ_��}`Ր�t~�w�P�|�y�5�1�5gHB�D>s���\ �N�����7K��uU]����w-ov����\۶<��
+��KqS�g�~�|�ʒ��Ǧ�����������	��޶a�Wtt�9�׬����6�4h7�ҋk�E2bY���(9��(rq�'�&��vg��_f!�~���{Aػ11���K΋��l5~���@�͚'~l{�w�vmǶNuHz���R��m�<��	@��Κ�_K'h%���N��pƹ7Dy���c�4[�[��ˆ��y��uVjHM@�G*����� �c?Y����l+N�΂�T3gO�7K��N�(�JN�~��T�7
�ϸ��{h����iԲc�����u�E��ɏ7V��c��C���z��<���&�'"��U� �!��!F7>����a��1
+�,N�ȝY��K�>�����aq$�sƋH����	e�g��ʤ�S�'~7˒H��Ɣ���Q��P:���m�9��]
+�G��Κ/�4��I��1ir$ꌟ�A�\ �3t
++�4��?4ڄ�A�y�T9�$X+ҲLa�X���O�@P-}��Ks�ϡe�:�W��K�x<
+endstream
+endobj
+60 0 obj
+<</ColorSpace 61 0 R/Height 105/Filter[/ASCII85Decode/FlateDecode]/Width 74/BitsPerComponent 8/Length 895>>stream
+8;Z\74-nnL&F=?[.iM\[H_(`.\%s&<I8@Bnm3RuHnj0e2!Wn9U""bI=AnSUGY[eVO
+<u2$0;Z72%9]L54dltC,Il[SbL&D51oldV?^d3!L@).3o\*^;a+]T0rk^$`K^-7WX
+!`@B/JA+@2+U+P'eL/=kD/-?GmS7K:iY$9N4fYfVWYl\J1=:mSj[&f_'_'Icqs-QX
+rWf>iO!X*:^a*1?3X$mtOMOLsJ-J=kFc/s5n_#ndch04,JqU??cU(041e@4a_0O;b
+#_GtVVKC7Vjt!U@3oi?F'G?fp.3ro(&MFLPU>X6u6gdKI*4m.;UH#!q8Mc56<?,Jl
+M/^;p9G!2S.o[rG#972J7&59h6@n)ljoc9oW:*/Y[H:J/b5063Bb@(dn&4?9;cIj0
+*"kIJ0m9It;bcSBMJ^BEo,dWcSN>unW_*iriAE<>7EXlUF#q#->sBi(.91D>Ou%9Z
+Y4)8R]_j2%4.aL)[pFj*T,VWc4n6$<4YFZ++bo46E.R"f@]c$>T5%'@hsl4"[i>#Y
+AN#$PEHT\7@a>P`G:<GL&rMI8UKRY."t`msGV+>u;-3:u?%8lsG)B3ZKhPepcAmsP
+7mW6^85hFHV*mr<08MjXPZ,;ZqRmd[(]=LmL&"mMlIFuP#6!;CFVgr#e`EZG*N<?2
+_mQD:noLkD#O,sA4`0%I<d!uTeT$6"(NroJ7t%5+o]B_Ioi;O0&$fn!S>q<ASBd)5
+R!3A;=9[tJN>89V1-&*KFR8QCfHU7+H>g0$=>JG/1VgGaPY1,p1WRcc7IM_8c_Gs[
+]H6k)hf[:%GZOsj)8'a1B]bGW&H6G.-]Us[Ga-b]"t.^M`%hAJ:S&r?A(OJRVB5au
+&QF(J$84R)AISK\aF`3rfS&)TME_1Cb>#b?~>
+endstream
+endobj
+12 0 obj
+<</ColorSpace 61 0 R/Height 105/Filter[/ASCII85Decode/FlateDecode]/Width 74/BitsPerComponent 8/Length 1606>>stream
+8;ZD/]5Hpm)!2M[^els;O3l"oE0NfVdRU5l6jb%s6gCXl17/DFab+n?Boaj=[L*`<
+G2NU=4JY#'P>:6*oVY+5Bo>#h:4_ODGUG_)s$[Vj8.Uj4!ILV@8X!+FOhOh;j]%[@
+MUD`c%+#S)%:JQg)5p'#,?9nKjeDm-]$q5sQE$+F:4_/U1/i^gXt.YZ6M3Y`0u*hZ
+JM`JF!<OsD^&oe@nnlcB#*d::?q-@on,oMa^e+>&i-KBhlf),-Q9ZXm.Trg'A2QHY
+@L:8h&OQ'SSC$G?Z3?=X3H^7YS_=9d0(`::CS3"5!nu>76_hMP@,3$@UR\XC*2ipF
+KTfu9RV&!8$A1$_nuS"]Zjah,:?8V_:qA<NV\0]p.FF(U$2T\-%k*eLBGjdhS.;27
+mBEqA%`d&q'IY!K&.'86fR0q@5=<Lc',V+Y;R98O9J`4cTUh^KB0.65-l"W"b*\A:
+*m<2P+PD$NVO:IMYo4RhpgN":PeiSTf`I.d!?ibeDQ1<n..\7KYuU'^1-7A\IIJf/
+igo12U`S\4>=Ut9UhQWUA.*'p_qD+C4AIIi$1*Bbk7dlR+SopeOmC$dFUR<]DMt8^
+Wc[>kdG9f3>*(U=F$p#-$//o?oYp#=]fG0T>-kfM`QL;O-E?C]6UA)W?9o<eK!()1
+k;iP'V+-3C*GY-,g4V9O79FbRlL8(D,Z8fuRW=sh#cFk]G+9uA.JTpsn]q"6)B[P5
+&pi=]jFFqHG'c(iSA$WC\lNqu]qT[s?A"DFEI;jOm:#s#BLg4`]uYD=l=m.[8H5]\
+.LPoW<F'nn1.el`5>msYYL\WpkK/0I\D"Vr&\3gtp[Q;Y]=Y\5h8gQTmkMhgeRDfS
+C;43=]shHXM`a$qFFS48TBd0H0Kd6l((F#Je:A9nUZCELE%uSlP4_K=&jL@qjmK7\
+3W#R'\h13mhg^Neb]?-OeE$Ge-2Fq-E%2FaPmWZbG1tc4mI0^H@kk>Ca2"[*j<$W2
+_)?5a_!,jJgb$hmcS_fSq!$rff1nW7;oi^TA3#5N+!!I%eSBRsSdbQ8IeXZLHl%H]
+1U"'haME(M--82k4RD#AI24Pu;pM1h-iiiYI-2bJ!&C-IJ':qL4BpT!O>9R&!OYY=
+C-,f0I(*>^-Z?_JIW@/k*6dFWQ0M5.Z5p68Ef/k!D#XG+1g=8F98N2WSAHZ\BPMU_
+<f4SIiRtC$=miAJeE.I!1mOS_Cr8d@BDfUCTMH!JE%g/fGM"A"-,Fc6/]L/@efgWT
+.#rl-`,j`VJX;9T^&ni[."G#A^<ASh0;A=O-U'[n>g",t>s.8fG^(DSQg2]5EY2Xa
+JCA0:4#upY31GAlgHW6DTn2_9LQd*0#Fh74Z$`i/@Qp&;oj*/Z#@??1)S<?+E;2<i
+Y%ag0B"k+"2]IW3hZUtIZ5fa$%?/Ca6I:6"O.p"D)ftT#`%qiI`j]rOdl^jIacJCl
+!cm"H`YB2l$#SIZ(gc*U7QFd@N%Z6$CS.!p`F)5/Wn#a`qLQ0>;e<@+)\Wh<Yk&Zt
+F&V(Th*"3r*;<%fDjYI"4U:V_8pY/`A[-A\Zu.gLi']4-c'/./$2[l-GZhfn^<NgK
+s4D"N?pO`VjR3*LQo)Nd~>
+endstream
+endobj
+51 0 obj
+<</ColorSpace 61 0 R/Height 105/Filter[/ASCII85Decode/FlateDecode]/Width 74/BitsPerComponent 8/Length 1233>>stream
+8;Z\74*'(0&;;XFasik#M^ED09;iIY,EHc)c=t?laGa_Y";e<mCdiH[KNX.GN9b"?
+82<G^+sbA%AE$*VL8!_HH]&C8CHmo5ja;?%Resdhr/4sA4ra:F5ZeO7j?U6M!^cJ"
+Ocm.i!@T/NlK9HLE^5iUBhF3d!FQ$I"jC;?f=80%DEpP)J-1E?=ekE]9pl4i+INt=
+iW'a='P@Y-"i"qB/.b\#PQ]+-aFTgjh/NHn8hfDnPpAq?PWRW7RLg-DMI50Fae2@-
+Uh,V!:>a03AEa0*U`h$F9L1*tW0g,SUjT&u!(%[d/lkcJ\8hf"8<m"cmGmV^"Gr8V
+jFjpN+J"3[(nb<Z3L;l"JN^"NVb7;`MR.SV[fZoUA4G@21#=9D>TCJEa_p+mO(Er`
+#J"?\pQ=laj)RZPiCp.:ZE9Xs/9f4Ic5[qU1sBjHT3am'Pbk1hCr`;=M>-E7O+O&d
+)b1;j`LUMNQ1+&l\*fWtH[g6Zql+/Yf,pN\-nafM[s+/X2k9DEG/fruqkqmd6Zm!G
+.O'$TXt"@%BLGd6LU.:n^4OJH<.>,T)=KRJJ>r/&lS'pR5"Dfc3FC(P#"%2NQi<cn
+CUtimf50mTqKn0XNYdlcW/gBVftG%]U,*;=VSB%)'.=mI<,P$aIAR7CIh\(_m'<H[
+/=.;Md1Y!Rgjta"7$/eu;nYHXGI'l@fKc4Jl1m^Wj(>5P/Jk84P_B<!^k=Ip[I)V9
+_'MdQX$$uKA#)E'ST8_!Nqe$&67fXZSa$r(g?dE]'\0$t98Li@d#NHtAnSFOo?<>k
+eOI6TnON'Bfi4G21G6b1U6Sej'dDq6-NBp,dYjWK<sEHRC>(Fc2JVW9G("#Qh47D4
+fp?3nJ>-dQk.ERiC0M/l*Yhc=cA\,XK9b;($*hE;V:TtlZGk*q1Q=[m\(1^[@>u?$
+n'9"u+KAU6Ni:<"R3kpFXYXOABW;7c9*K>![<D;uM0]ZpA.[T\6O)D'VC!l`To5gK
+h,9qi.IX>f4.\:_?DI>i5X^8QU8cPj?n-K3ECpd?Np&cA8f.=iP<jUiMU5(VNi%r/
+=,jiC/iRR4-1mY+dY@Y/r>ZkhK^<P.QgC"DbFYo"s4n6b!ie0S>G?5V#PD(R^>$BM
+[(*\cX\7Z![.+GW_d2AH&BUZ[b=-HcL[Vd)pY?ro"Rg0fM7CJ:/=E&Q6M0#B4_H"_
+gBTq3JS^#2k^'f&S_Y@W(^h,'4]=%aiVJW)B)ps6SYc~>
+endstream
+endobj
+62 0 obj
+<</Filter[/ASCII85Decode/FlateDecode]/Length 428>>stream
+8;X]O>EqN@%''O_@%e@?J;%+8(9e>X=MR6S?i^YgA3=].HDXF.R$lIL@"pJ+EP(%0
+b]6ajmNZn*!='OQZeQ^Y*,=]?C.B+\Ulg9dhD*"iC[;*=3`oP1[!S^)?1)IZ4dup`
+E1r!/,*0[*9.aFIR2&b-C#s<Xl5FH@[<=!#6V)uDBXnIr.F>oRZ7Dl%MLY\.?d>Mn
+6%Q2oYfNRF$$+ON<+]RUJmC0I<jlL.oXisZ;SYU[/7#<&37rclQKqeJe#,UF7Rgb1
+VNWFKf>nDZ4OTs0S!saG>GGKUlQ*Q?45:CI&4J'_2j<etJICj7e7nPMb=O6S7UOH<
+PO7r\I.Hu&e0d&E<.')fERr/l+*W,)q^D*ai5<uuLX.7g/>$XKrcYp0n+Xl_nU*O(
+l[$6Nn+Z_Nq0]s7hs]`XX1nZ8&94a\~>
+endstream
+endobj
+66 0 obj
+<</Metadata 5 0 R/Type/Catalog/PageLabels 64 0 R/Pages 30 0 R/ViewerPreferences<</Direction/L2R>>>>
+endobj
+83 0 obj
+<</Filter/FlateDecode/Length 2574/N 3>>stream
+H���yTSw�oɞ����c
[���5la�QIBH�ADED���2�mtFOE�.�c��}���0��8�׎�8G�Ng�����9�w���߽�����'����0��֠�J��b�	�
+ �2y�.-;!���K�Z�	���^�i�"L��0���-��
�@8(��r�;q��7�L��y��&�Q��q�4�j���|�9��
+�V��)g�B�0�i�W��8#�8wթ��8_�٥ʨQ����Q�j@�&�A)/��g�>'K����t�;\��
ӥ$պF�ZUn����(4T�%)뫔�0C&�����Z��i���8��bx��E���B�;�����P���ӓ̹�A�om?�W=
+�x������-������[����0����}��y)7ta�����>j���T�7���@���tܛ�`q�2��ʀ��&���6�Z�L�Ą?�_��yxg)˔z���çL�U���*�u�Sk�Se�O4?׸�c����.�������R�
߁��-��2�5������	��S�>ӣV����d�`r��n~��Y�&�+`��;�A4�� ���A9��=�-�t��l�`;��~p����	�Gp|	��[`L��`<� "A�YA�+��Cb(��R�,��*�T�2B-�
+�ꇆ��n���Q�t�}MA�0�al������S�x	��k�&�^���>�0|>_�'��,�G!"F$H:R��!z��F�Qd?r9�\A&�G���rQ��h������E��]�a�4z�Bg�����E#H	�*B=��0H�I��p�p�0MxJ$�D1��D, V���ĭ����KĻ�Y�dE�"E��I2���E�B�G��t�4MzN�����r!YK� ���?%_&�#���(��0J:EAi��Q�(�()ӔWT6U@���P+���!�~��m���D�e�Դ�!��h�Ӧh/��']B/����ҏӿ�?a0n�hF!��X���8����܌k�c&5S�����6�l��Ia�2c�K�M�A�!�E�#��ƒ�d�V��(�k��e���l
����}�}�C�q�9
+N'��)�].�u�J�r�
+��w�G�	xR^���[�oƜch�g�`>b���$���*~� �:����E���b��~���,m,�-��ݖ,�Y��¬�*�6X�[ݱF�=�3�뭷Y��~dó	���t���i�z�f�6�~`{�v���.�Ng����#{�}�}��������j������c1X6���fm���;'_9	�r�:�8�q�:��˜�O:ϸ8������u��Jq���nv=���M����m����R 4	�
+n�3ܣ�k�Gݯz=��[=��=�<�=G</z�^�^j��^��	ޡ�Z�Q�B�0FX'�+������t���<�u�-���{���_�_�ߘ�-G�,�}���/���Hh8�m�W�2p[����AiA��N�#8$X�?�A�KHI�{!7�<q��W�y(!46�-���a�a���a�W��	��@�@�`l���YĎ��H,�$����(�(Y�h�7��ъ���b<b*b��<�����~�L&Y&9��%�u�M�s�s��NpJP%�M�IJlN<�DHJIڐtCj'�KwKg�C��%�N��d��|�ꙪO=��%�mL���u�v�x:H��oL��!Ȩ��C&13#s$�/Y����������=�Osbs�rn��sO�1��v�=ˏ��ϟ\�h٢���#��¼����oZ<]T�Ut}�`IÒsK��V-���Y,+>TB(�/�S�,]6*�-���W:#��7�*���e��^YDY�}U�j��AyT�`�#�D=���"�b{ų���+�ʯ:�!kJ4G�m��t�}uC�%���K7YV��fF���Y�.�=b��?S��ƕƩ�Ⱥ����y���
چ���k�5%4��m�7�lqlio�Z�lG+�Z�z�͹��mzy��]�����?u�u�w|�"űN���wW&���e֥ﺱ*|����j��5k��yݭ���ǯg��^y�kEk�����l�D_p߶������7Dm����o꿻1m��l�{��Mś�
n�L�l�<9��O��[����$�����h�՛B��������d�Ҟ@��������i�ءG���&����v��V�ǥ8��������n��R�ĩ7�������u��\�ЭD���-��������u��`�ֲK�³8���%�������y��h��Y�ѹJ�º;���.���!������
+�����z���p���g���_���X���Q���K���F���Aǿ�=ȼ�:ɹ�8ʷ�6˶�5̵�5͵�6ζ�7ϸ�9к�<Ѿ�?���D���I���N���U���\���d���l���v��ۀ�܊�ݖ�ޢ�)߯�6��D���S���c���s����
����2��F���[���p������(��@���X���r������4���P���m��������8���W���w����)���K���m�������
+endstream
+endobj
+85 0 obj
+<</Subtype/Type1C/Filter/FlateDecode/Length 346>>stream
+H�,�?K�P��j���B���wp���v)����Z��8��
�II��ɵP��|�7碋�Xpp���Ip9�����!�<8>;�W6�Љ�uK�խC�jW�����[�z)Qb6N������\�"=<�F`j�
���N��b.�B
+�\�Q(��e`٤gAUof�0�M���]�ԉE���V2��@���Iښ�kl����K0Z~�ulk�����I7	���>�d����6���qA��o8�x����di����b(�1��X����K`��%��[��NK����&\���o�y�e���}t(��f��ty�2�ř#��	0�1���
+endstream
+endobj
+87 0 obj
+<</Subtype/Type1C/Filter/FlateDecode/Length 4352>>stream
+H�|U{T��!&3�-i,�ؙ��VY�hp��*JY�����*��� o��� �!��D@@D������y��c��u���w�G����9��g����}�|����ߝ�����W۶�\���J�V�jԿ��(b��q�]G�5���$EW��<Q�?���l'���y.u.�����ޓ�Q$$jeKW�����i\6����;�~�e^^^Ӹl�e���1rYxV�V��&�R���Qk���8O���d�t�4�F�&��/�\�L�&�+��r�,��LP��5�8�V'O��(ej�������J�P��X�-*���k��4Y�*n	E=�%V���j�4�%��Y{�2?Y�<�p�œ07[�a�0��<1�Ǽql
��b�B,Þ`�3�-/<����X#v_�_r���Y�邫3V���³"7Q6! T"�����I�'�3?�yj�;�$�;���F���d�pVڬ7�o�
qC8��\ɜ�Щ����$#E�Ȃ��"�DS���;��);�i�쌳3��<�<1�@����]���TeTT��}�'�$R�*U���&	yz�%�p��.F,�m7��`�0�HQ�(7XE"q>0��pΗ�����V���U���V�V�e6��1x#x�%HL�ϱ!�}֤���j=��5Q��(f�V�|_ap�\%�ڈ5UF��akOmwM'	�����_W'�E�6�H%Q�R�Rt�Q��Q�\$�e����]J�����%0G��'�d�v�X�+j0�7��iN�<��l"�%�Po6�PS�[�_i`��H�=�i��^>u��mJ�j��9�2ȣV
+ĉ�53c27T6ђ�������b��ri�H�
+��t_\�n.�<���P�Y���r���U2z��䜒x��2.�����W%$`�����W_�3%�A~�y�jȧǑ�!����X�嘩��3���^�i�l1�V-EN�s]z�F�r�����U.�@�k %���[)�T���L������f2�9��|�a1�p}�p7"��f��i��pI�*���
+Š
�e�HX�(�����ګ�����I=%nTVW1�e6n��;H<��o.2=�8�xf1���Q�����������s,k��l�G������?/�>b�L0�����\2��D���Za��6F ��n���p��E��@��r�NW��~�!I��f$�@�N��ZJJN�����&�
+$L��� �kV�ʧ~}�ݨ��
H��Ϲ=܏=x��"����I)Ҡ��(�E�o�|��~ɰ��ٌX)���H�m��\�������o�����Bz?��ħ�
+Ao%q�6����B����6�x�`q7Kh�/+�c�Y__W�֞�(s26��z�����j��1��x;޹�f ו��S���|Vܓ�ͽ��d�\~�KF����w������Vi�Շш��u[0���]����*(&*h���g1�Bsx�oU�\��Aߢ#�Z:]4����W���<���~���{��xG��F�}�|���=M���?��ˑ���(�(3;ߐJO����7�Z��m�uŁd��:Xm�;L6�6����E~:��]��6�ˬ��Q\�0q�>�0[�mM�n'���w��y�Hp� �v�Iۄ��}��tvN����]o��iԤT��a���L���Ϧ4���s�����j�֒�CVS=���%�Jh3�Є�%�fhR��6�|�4�q����3��GX��i��K�3t�t�����Ǝz�h�I�~R|/�>^��e#.w��
+g�8q^R8Ol�]���@J^��+Ɗ�Pj��H�q�p���D����5��[ä���ʣ��[�H�����q46�a�ز�ZK����0~�.�d�(M�M)��#=����¹��s�ΰEU�]��|��[[|(����N��ӥ"yH�Ml����U�χ����v�?Ͽ��c�!�c
+���Hi=��b��z8H���i�Aal��b��;�V�mғ*��d�L�_��Jmvy!��'���7[���Ɗ�6�j�p#!Q�oznm� %�(�i�6�HCo
�������n�Ild�{�Mw�Eu^q��~w��	�.��v�6c��fL���1�FW@卼D���e�}{�.,�}��K�Y��ƪA�Nj�IL�Xkj�M�3��T3M����7s������ΞE7���GB��k�4�hJr��-���%��c|��9�uZl��[o�)2���Ծ}��M������wgD�xM8ڛ��fLj�.=˜A�v�� Y�BF�*�x�T�*bQ�>�)��d<��Uϻĝ䋍�8Z$�n-7�z��#�����5:1㛇j�Z�7�/m��]���_Y�E�����hr9\���ܢkR{�ܹ�3Ieޡ���4Z�JM%ń|��p�me�W�0s�fPe�;"�_�yp�c���lv�, �7����:?��H�h�o-K�x��s�Tey�bo�YoU:5M�.ՈïYK*��RV���,%�?��$\
+�`��~�C2����N�v69Eu��Z��}uo򱿪����2��O�=�v��A9�9�F8ٷ'z�|��T��k�F�Tӓ�BT�xw���b���yD|F�;�G'���
+li鲍��/�Y��e�з`���8�A��,�ɂ$;��1���tnMJJ�D"��A��}MJ�L
+p����O
+��6�6�ᔳ`��h�&`���w�����S\�e����
h��T_B<�Ĺ��r�@����?&�J$2y^�O�Pc�O:�,��m�	x�T���И�1XMb���ь1n!��U0,�ի���	7��E��&�Z�,*~.)�po��dk[o�ѣ>�E'�\^
��JN-U�bm�"GY�QWh�,��ܮK7bx� m�vH���v5ifA��`�H�`��Ro1c�FF�Ƣ����\����B�c�8p��Fk�QKo*�4m�O���1�dx������ŅO�wgQx���0C�D�׽Soˊt�RQIe��H����M"�E�
+U�)W+5�-�P��4`E`��B�\.��)�<XƂ}�Gd@�����)V��m��)85�dA��`EsM��&�[�����C���vԘ���w����gXCC8������ԩu2eu�LT�)S���
�X�K�5Qm5��~��X+F����6Ƃ
�����-+�Bq�3߻P�����1��p8́ᡰ-�"|���qoq�o#e�
+C!v�Yao<�R���k���"�eY��љsh��GϏQ�?r:&���O�����������υ����T��Y�nBSOq-6�5��B(L�C	�^���>?/��u�/mX+�tz��ȩߥrK~��u�<��CE	�(�8_�`a�]A$�"�G����zb3�!���Ƹ'Xх*ڌ�>c����.���h=~��8k�lX����%�"onNN��|��]�$�X���?
+��º.���.̡T8�����(b퍃%�)y{I>!Q��e\��L$��e9����}�|���˱��I���TZ��{����ȉ�	���/e�V���#�����쐰��G�����nS�a�4�ra��K|�u�&�{���"ϷV�p�E��ɃW[,�m"�=�լ2_�?C��7��U�\���h4kPvY�����0�`� ��2IM���*����̘�<��N��)�5R���#�R�E���ԏ7�JM�C��C����`L��a�F�	VN��d{�m>�����ނ�A#��zm�Fֆ�2���5`��,� �t3�
(�O�M�?g��u��m���w�&s_�[�c���RdBWuG�����c�s� ���N@��#�.�D#&���Nl�[Е�]l0�XʿPc����%J_�L���9_�ϛ�$�ܗ�W�B/e��Wˮ臭���jZ)56�xRG8p��°A�����Yyⵑ��}���<cw�F�+��'ǵ͝����+�L`9|E�V�Ĺ2�fr#�݂�UvFb��������M�f��nŔ��l�N]�O�>���;��h���U�M��n�k�H������o��ƞ�S�St�'°Y���Y�A�~��^���o>�J�=�����h|~5�����}rE~�Wq��	���譨/���gB��C.����l5CԶ�����V��&�����GZ��/�E!�Օ�z^�?��C�E=ӛ�_r�&�?L?�DA�XW����?���������`i\��t�O���~7�sx�ﶅl�',d	��]��Ŝ㳁�g.�[�\)���S.�s���<\��3q���}��%
+`���͢
+endstream
+endobj
+90 0 obj
+<</Subtype/Type1C/Filter/FlateDecode/Length 7305>>stream
+H�|TiTY�2&cKQ�T��t�E
��N+���(�y�qi��QLaBXT��*" �
4� l�e��v�ۅq@���֣�8s�~�9S�s�̯����}������ñѣ0��W����J����5:~v�:���9a.L-L���94<I���)�2^�:��$��&s`rl����O���O�ibb8W��]G�m=F�k�]87�J5�n#���E�j.$E��ޮ�Vh��x^��������q#���N�W������i��Z���qb0F#�ש��]D�z{�n�[#�C��O)N���\\�Vce!	⢞��F���#U��m�N��ϙ�4$4%^�ysQ�hL��a686��p�Ô6U��H1w)���V�a�։a�1�)��c�~[fmd�ebؿ��,|1~�1J9�Db+�K~�5�\:OZ(��ˀH"^�'�+���90�f��1�&�}i���Ƹ�.+�*Ώ����YƄ�	���5�I���s�{����l�#��g~�|b��:�;���=}E�+OM�Oz=���[�m›6\�im��К���!�!�BQZyRA6�NY��ʆ�F�2�ʬ;E�@1��C��=�@�����|i�����J)��y�et�V�E�0Ù�t6wK�*)X�n��=������!_�� ��^Li��?�WXN+P4��l�ݑ�������3a�k1���_���Jo�v�<��B�c����_���� �B)��P)���TF ��0ZR#U_Yqڢ�ڶM��[�P�*���IxTK���l*X��~�v���q=~z�vH�<4�B.ah)��ܯ �3;`)̀9k����J��BNh*��h��;/��������BMU�؇�g���$�:j�릅"���G�'.eJ[3s����)b��������t��$�
+8*`{��0%Z-�)�,����/��wްck<�<I�\}��}eO�z/���U~������Py�`C�/h-j�+���t�m|NvA�����Se%�O��J�U�ҏ�t�1Y��uq<�U@Y��L�ٖ�ʎ�m����U��\��ȉ��
+%�u�A�\��%��5;X�QS�ɜ������i��S�&7}k�����0�g�-��Y��2�_��=�<;x�G����E
+Re���;�<)�
+5���4���X�Fu��F�{������<SV�ٸ7=-ko�1'],�Z|.����+����j:o�_���ذk��$�I��0>��d��:����6~}�Вk�L���`ܒ*��F.H�x��i؀m�{�����KJ��k`*�����{/�y��l�V)e�*���
+�E�؝�
�B������4ؒ^n�˝h��1D���-7$&h7�\�����{�ypii3C��PW]��//)/�����ϖ�ua�l�.������ijKN�?��.�`ȁ��^jeP۫��m�,쓨6��7ld>9֊��		��U�ߪ��[,�
�	�ql��~���O�p�U@nv
��	T^ӡVH�<j�����qϿ���
+ťOD[��Ѹ���Lw�A[C,:j*��vQ���堑��.+�*�J7��<��) �{V���_~H����
p��-|���æ��ߒ��>R䛫'z������Ly��B_%*��Oy�לc����o�����xz���2{���~�&�-lu:� �B
+�����2��9�J��`S�2����n�P�e� (l�>�?r~�2��[����Q���d����~�ŔSxX�t��Q`��2 ���n~5�$[+z�������SNvᗕ�W"���1(��|�Pw_Yn
[�P����\mٍ“z\,�+9'>�<�؜�ߓ�A�3Y5��类��>�~q�l�ױ=ᗘ���T!R�FM�����e�
+���(�]O�ϳ��\����qh����G��k�hl���O�%w�0�J�J�ݙ$�a�M����E�lJ���"
A�ċ��%El-�heˍ�L*�lH��:\�<�u��0�E����ڜ�.�_E�'�7����T��dRe0�n^H$���^���/EWY����*bj%�^!��i�Y��&�4�&3G+mɉ�3nF+��T<*��>�*Z�ŊX�ȋ��d��A�!*o�(A]_�"R@���ݭ]�w؛S�N�99�������w���wn4����@�$��-U�Wp�Z�`>��'��_�����6TR�aA�\��!��BP��9PI�
+I9U,�����#�0\�[CvΘ��d0�A�D�/�ZK�b��li�8'-AO�p_���|������A�Q��|�궦K[�_���#�.�EC&���_@ҏ#�%7(5Sp��Ǯ�
�&��0bz�<�0.�/�x��܆[�$R�4�d��5�i1mlQ����Ը��1�?�>J�H�Fr�]N�3�rZixL��xÈQ���B�}����q�q����L�7�k~�/�������<�y�8�����k�ht$�pǵݝ���~y�n����7{�,F��*	vJۦ9��x`枰�ua;$�RY�k�	�q�C�J�!�Ƈp�–(U����l�0�D��X��X[�41g�o����.�c���*��(+*�Prej�&���/�J���d'�RظY?�!�p|h�|T~�ݍlfI��=��.=<�;�ۂ�w���-4�M88�Q\b�������_�	"��Y�|�����2�����ep��sէ��8Ut�<T�K��������������Q\8X
+��E��<�?)�Z���蝟��h��*��=�L��ɩ�(Q+��*l����#�מ�6D�z�n�,�bN_���K���J{
+^�Z�	$`棍�&�5��۵��\۠O�i:pm����E�0�]��ϰ�ϋ��g���U��:�7\�掚!�
��qP8y�0�/8l��YeH�Q����J; B��?@O�-����B�f(}��PL~�$o��g�cp����EDK8,�~�s�L�t�����
+�a?�[N������&�N_�3��~��6f�-�\�#���L��h�r�^t��q!���l��Iy:�A[�<��=�\xyrj�(�>bMv����>p%�-�α���s�Wlj��}��ձ�@w5�֗f�[�@)��
+���,�Q.Gi"��h��>�-�5�a^`��H�b0(����b�T��%�c�[xf��D�h�A�nң���:��G q��z��Eʛ���z��m�5������A�/�⨁���5D�7_���痾,q��Y��xt�ES��V⪪s�V̜x��/�R-���κ���j�o.Gq�~�ν�$�A]4�jDꞼ��,�:�tRIv�)%�iB4[�Z�킩�V�W�4��7����e�'E3�y�J������)��S�LK�%٨C���n�p���ʛ�˙X!�k�r�~\���^H�Ơ�8
z��7=K�@�=1я��ʜ���E�¬��_�&�N޾co�D]�S��F���Tq��VX�NNc����
+,�M��`�$��\Z�����Up�n��6�Ձ`GJ����˫|	֫.S֮��F`��	?#xb�c����b�]
+�k~�Գ����
+��A$!�(�9��YA_�]ѐ@¶-`�6����vA��py8\���|����Qdz`F�y�OU�����a�U��w��n���P�w�w����8��U�r��M�̸�qy/`�ܛ��g�<v]�(�4_�+���c�m�����W�%�����&I�t�\.#�T�W�X�e��a����6k���QH��2��J���b���~G{cM��p��g�9V���|%JG�d2!�����Dse����V�㾼��u�2���!�k�����be[��V��-u�z3ἂ%wu�r�S��ޑ�.z6[��UeR�CD���f���vS|g����
'TT�%�RJxd����y,x��Q��{�����Z���l��R�Z��VVڭZS��*�<X��,�n=��
+����=0x^�}�����f�;7��bsܚ�R�S]'�lz���
lI�萎�W��,V��S��`�k�Bx�ń
��D�������!g�
��ZC�j���!�P�c(������(�Hlf���y��.5~�WEyd#na��F��G r��~��(>66s�.��*���z�i)��'���ʣ���8
�i���c_�<�3N�:g윱S�B�Z�JQ)���M���I$�� @��$,y쫀⾀E�"���u��N��΍�z��
v���r��������g�:�٠[�&�̊��㣳�)Ļ�\�=�tgJ���Oe�X�RX�;�!�����˝nb8�f���"gϹ���$�T�#g�����>�Jq,�f+c�I�a�j��m��@��}
wkN�9�ƚ�ZN�۩T�i[���VUm�X�Vs��V[��g֥V&�S���U��\��-�<D��.T}F������-�^S�U�-wh����=߽n�Vs���*w���UI����|sSF�-
3�z/�90����X���4��PlT�
+��9E��2�"1].'
+
+��\jO����'Omt�����C� Zi��h��ѥ
����g�W��G�LO����l����*��#�lNL�1П'�ߙ�[^n�cG;K��t��1;�
+ۿ��v���ptp-x�k�oj���Xι1ᩓ{.��qW/��n��r��r	.�szc�ޤ�d!�5�*1C����׎��p�� ��T�7ܢ����Z��1dLw�H�� D����?�}m�?�g����\�<�{��-m�]�-YYn��M���
��e/���r���@�R���I�QF�xȰ��2���^�~����V;^Y�d�`���vl�w��%��pnF���v��B6���*��4QX�0jJ�*��ZMU��V����DŽ����e
+�c"u���P����/��Q���;�g��<��/*%�)C����bEבSM���`s�L
�(V�����uy�
+IFbnn�J��`	�!��ޓ/��^ԛ�`�e:euA���y°�����+���U}I���tLD���r��v{>P�����@�&,7�b-e�����9f�70^_��O��}.�#�^yx�0�i
+F�G��dJ5�y�lU�23�8��-��4*)���<e^!1����1:�M/`bB)��B�l�w2�c�r�"�8���.qq�i�чyΤq�s|f��Q��P[V+�jL9�w�h�QT�k0�^�����m^�ʚ�z���|���f����QЈ�չbKI�']���@{����W5��:�L�E������^����uӽރ��8��ݑ�u�W[ч��[�m��	.���å��e�:���ZD����a"���w��$��b������d�������ðŽ���q�"��3��P/E^p{���v�3�6
����@��m���p-V����G>��H)\����>��#�~)���">� %��5�����w��5R�05T.�1�v�Ic�F�97]i�h�y�C�J]�
+t}E�Ic�3'�$�@����-�������<��`��%���
+�.ض�2�%\|e�jϨJ�J��s��ӀY��.r���7�c4�R�1�{R$�a����{�8[�A
+�[z:9AC-[XO�3�j�5v[��A��M�F�2�~;0;�z�'f������ڊ&�-�M.��օ�����I	}*�SCŦJ�£/\�J������BaA�1�i�7w�?#�����'������'��B?��f>����qz���x�:�8[ M�����ߠ�jl.�!pxV�V��>��`��Ț�AG|7�'LUכlX��k���~�����^���E�����
+^�Qg9E�{��TV���JMfIj��6��z�6v/�� -���dAtl�}
+�i�.?n��J�J$%���4�4�s���9�n���򚈟no�6v
+��:[�Z����H���rU����K޿��yw|��|��x֡�R�	�R���Sʆ̿�?Ͼ�:Ri=g!8'�tUL�h�1�;������3pi߭�����UX��2l�k��"��zI}��V��Q�mY�݉�i���/B�����Ik���l��v�1�e҆�%��Z���Q�d-D�c��g䧡'#o��;����0c�ͮo�m��0�>�=���	����v߀[�z�9��č$ܪ�%M���_�{����#��u拞�J�Q}�^��L���3vͯPt������Rp���u��߬Ȉ�$d������H�#��3���6G�}�u���&Z��Q������r
m���Dr�طG�Np~����P�n�a):o�6�)KR�37�4��I���x)Ic]N�M������Q���z���!���[�~�9��&M��:���9/��?���q2r�D��Z�uV�׹Mo6Rw�~���J�p����Z�x����v,Z�+��m�)���kS�����'\�pY�5�:��:Y=�j>�0��q>��~P���z�k���Ɔ�Ab��E�R#��Dа��a�l�ج_-�PW�CA
+dh��Kp=��Z��(�vU7�85���d(=<w��������p���5B�
+]��_�ɓP10떸z��*��wK��}c��!�=4��O�����h~E��	��rJ��$I�>j����#�buL��q�?*��~ �7K�Y��kVr.k2�*զZ�⛘�v�:���Xm�b�^˷���&}�t��^'#]�f���>g�S�t&��Y��*��^U��[����Q\�؟�!=)l=}Vђ*\~r��}u��meG�.�}��5
+9|,~elC6����>�[��q)��ʵ=��KPW:���.\�wa�e
d^�jɔ����e�J�R?]G����0�W[��9m=����Qw�=�zCywFm�-��6Z,z�`9�ѸU ���/��fU����V�FR������^?]�vL
+�-'|���� ��P[����>�n&�Cc��'�6b4X�zc�UºS��b�4RfO����J�<v��&�t��t���x��˞by��M�~��Gc*���&���T�dU���T�m�	T���PK����"�����4��g:To3A5���,���.��!��:a�1:���ђ�ur��	�F�t�?�}�yA�D6���7�)����yy�F犇�b����Smz�
+endstream
+endobj
+92 0 obj
+<</Length1 29447/Filter/FlateDecode/Length 14235>>stream
+H��VkTT������a�A�wf�72̠b��@PP� �������e#iڱ��1ѕX��Gbjӱ3`I����H��?lk;�v����\50���\u�Y��;����>�|g�}����$�$��u��?���Z���ַw���\\@���ޣS��'J�,���/?sx���Γ�cO
����S�#����ts���+����+GƧ�]�m��%��<6������ׁ�7y����c��E�Q` ������S���H�`r���T��e`��<48�Nn����f:E��]��}����5L&�(a8	X����e��6�@} ĭŏ��-�V�W������ ; mB*Ǐ�ȁI�K!6��!֎>����Tx#L0Â8�#Kx����dذ)��iX�td YX�l�`��.�����*���(B1JP�2�F9<��>Tb
�b���A56�絨C=Ј&l�&l�4��؊6�c�����;�]��	tc7z���C?��E<�/�4^�\·q����]��+�>��U��?ě�~���'����۸�w�.��!`����-Lb��p������i�i�&1���^�Aj�1���ԏ�1�/`/�~ZO�4�q<M~��3x��Jɴ�ڨ�:���a��=�R��n�MG�(��S��v���Wp
+g�Wq_��x/���"��Y�7��M8H
�HM8N�i#���ւcTû��TH�<r��.�ć���9��2����j��Ts�2u�ڠ^u�8��\G�S8MN�s����t�8���}�A����"�9�oR	��fM0�퇬)j��\U5ֵ��jc֌��+E"�{��E�"7#�Dވ�?�x|ql���关g����W���=������?��r��n�݊pxOxgxG�#�n
׆��p��_�1M��I��<��{[8�8O�p�	�DH���"2aב�'u	�����ڻ�=�Q����g�L)�{�C�"Um���>�"8)EjSPq7m�r�Հ�<P�ԑ������y`0�/U�������]�`�?�!���1�A�4������kL��JƢf5��wm�
+�l�
+�4���18���k�r���ezh)��ޗ���6�
+X�,���@ �s9�s�@V����C�����$P܍!:ٮ
�t9����p9�,/W����Ȇ9��Z%�&>����zS�Wo���E�Px�˒���D��V2�5b?��s=���V.aV�r)�r��rI���B~r�w�2�������BXU�5õ-���KA>��7�Np�4J�^T|������z�TQYM�Պ�"��4���S�fO5��g_fs؈��xE�9͖�o(^���Tn���5e�M�����"������H̶Y����V�.�X6�n\�z]��Ԋ�طp^\\(����^�?��:9V�Y6R�R�y7#+>ۖ�;�ɴ�/���x�5�A
+V�`}�T�fH��"]�a5�t�cwm8thz��F��;��۬qe���Z�
+�ʐzHz:Ƞ/*1rA!�����9]�8��r�Ra$�v@ځ��BH�����&�� �\�u0��_�A��O{R<���{~q{!S��r��}�>�wy$f!�WEfP�^�xr�:�+h�]�E����mO����g��H��(�6>�U
+���f@�)F��b�jL��r�ޗH��1��5p�oh
��Wt8���#��Ѱ>�%]���}A׾�i�TTOy���&	��DT��p�Zx+J��$�/��y<m���Cqi��ٹiq����Ow�\���G�i��ɩm���� ��G��%xf��D4���d��a���O���Ķt�����ˏx)H�MwdڊXG���ր�]4����^����ꅄo���!�˲G҂4�������o�?�(��zr�Eʢep>d�-�-K�57>�KcY<$��17��Ku'ӥO���,gQ:�a5�H䌙eJ ]�--����z��!�<2_�er��߀
+�)�CY�*���(��A���2����E�W�̨M��=ELҫK2dإ�@-��L<�u�fL��4�s�9��ߣڤ�(0���*�4��Z*��F���(p�	�9�b*��'�27���i:ب��Qwd$jYݔF�UN���(�	+A>UT�cdw=:g��n.>YT�ZWow��yv�ز򶵾��4��n�7�V���U״V��e�2�Y;�Ԝ�����ZoeM!g�
+�c�q~z��o��6���
+�{�m��8�;���Il'
)K ��ֈ$|HZB��L��Z�1D'�	�IH�Z4���Z�j��TH�JD�����6�mH�i��cE����=��8�V���������9�9gExF��Q���`Q˘A��	3�
+���Kn��Z�YT�$��I� ��i��%K�Z�ъ%�ZO��/�q��5�zE�$r�WXs񚗴�x]�����N�{)6�i-�⧵`}u0���~X��CH
+�_
+��pa�K�'���Q�����#�2�G`��+�8e�P���"��;ME�.�_�i9P��e�1��c����l�[tH[w�E�����$�ϜuU4ֆ+=]�[�m.��:p�y�n𱨷���g�������d<�oHVoٲsk��XShh���.Kl˄2͵V�mtxv~��lh�mpҢ��]�Pl�b�6Q�Q�|����U��El��"6�Ehg�0�v9��&%Zh`cJh!��벀s(Q,�
�bK��
�oN�GN�k'���[D�"a�����@8�_�I.��L�b.�tlc�QeC�Ê�iZ�cj�T!�ѣ�T�/U��+�D�r��~���O�=4«w��W��/\P��񿂮$a�'�OH�'�[\Pq�w� �p�W���{Hp�¥���O�t;���(o| ��i�y�#=�ra����2�ʉ!�Vڍd��}�
+\E�����Q��c$�X�t�R*]h~��j��.0!&������ �8�ި�ɂ�S�8�Ŀ~r����q2ĕu�?f�鐚���V���M����j�ߨx�dT�dT��m%+�6
0D�y��%:�WbU,�0���`-�$v�~؏a��Ω8�]�sz|��r�
��Ӯ��F��������)e����%,S\l8%>��B���5�Vy�<�?KO�h��$Z"�EAj����g���:�MA����X��HT[ӂFpS���foUf���#�|�m�Dz��Z�����Q]�<4���O"��B<|�)�����8@���$��#0��7u��*O�rP�������|B1��cL�X>�b r�N�k<
����t�:�D�&��|��N�Y����;1��T�)�E��:��%�
+15���!���[@�]�r`���? (�F�^� 213���C^���9=e���jg����9aB~���nj�1�$.JW>I[A3k٩)�p�Tx�����:V�n묋?u��6�m�
��:B3�G����=��HfC���'2�LUk�-��%���dWg��o���w�������wn�;UH����e�_a�E�C�j�X�=&�D�:��bj��A���sFښ�>ø��V�}������>��]3���%�|���0�U
+�Hb�cd1&0���P|�����X�w"Ե���{��U��v��M\�Q�aB�`��F�$�|�·��t������[t����xNDg�/]�;�5s��F�J�#�G�w�����oI�����m�ՇʩOyq��G6����d�+��۲؏�r�+�f�P�twO�]�P�U"��WS������}��3������t{���'���p���%=NC(��w�ߒ������>0v����v��ٓ���S�b�5�_=h
+�����x��,׸f���?14���-R��hmA�����p�B�(��4���zw	�lh9!�Q*`����W
$Fc�_��k��+#NyVSѵ&UTV�+.
+����~�7�ņ��A6���JŻx�95�jqC���E�����@*�?A?�(J5��S���b�8�e�v?w�T)�n:�zL7#'V�0�P��'I�Ց�ՑN)s1��X��X>�XN\h�o��
]+�G�Y���uљ,|�E�I�q
+�s\f,Q�7A��,��cc��C�5x:�"�I�HEX�	�^���yN.���ķ���B�6z/����Jm�%�AC�"��|Ѧ�#�[���}���[?iY?��3n=�|Yw듫��}������*}�ĥHqk���P�lE�?*R�ϤM��B�)ɣȧ
+�*P~��ǐ�
+!Y;TPn��Y��J�J=����}^QԆ2�D��7#LKN�.)��
+�A�y�*��<ᦨV������Q�M���e�iY�G-O�VV��OJ��GԫE�=:��z�S��ESu�%�T�!�Vz�t6{z"���k3�OԀg�������/a�`�/����d��P���$0Z�${���T��2�G��#�����$�ӌ'������Ɋ�K��Y�=�m$~�>�����/�&oL�~�#uc�K�V��Һ��g(�
�� �r���M'Ob�kTz�(�l��
�M�X}�#�� �A��Re����;R�TS�XV�q3b�q(�{��z���f���kbK�����;��8�����c���3���o�'������R*�s6
+xK�9̀�~l"I����bL`ð�
�R�Z�!Nv0�M�����FB��HMr�-l��9"���;;��;;���R��^�ɉ���}�+�;�QxE@�V����k�}+�<ya��ˤ�Ge�;"[�UL��pntSĨ�)�8����˺M
+l�9�E�T:����1�N�\�	��d����"H{�+`X�U���C�%aD�����'L8#�K'�_��H*�K�gR�K@J�����xD�7lpT�X^�v���9��*@ň�nB\��,�����-���՟���~��M�g�׫�������6��y�S䲈���A����jcں�����`_���
�&`��(��hiS�A&�	����5�"���MZ�VM�hR�mi�h[&-��D��K�?A���G�����̴�&
u�����lB?�=����~=�%@+%��D��`E��U�8��{�_^��"j(\j�Aj��(}�.bjۈp�HM
+�$���Y/	�B5�Vv�M�Z�!��,��P���a1ᰊJ�h-��|�����oKy�Q@�GPB�1�uV=MS���s��`J�.�vHەt�꺚�P�~I$3&�@�Q;�q@;��[���C���6S��=��.�t��.��ݨ ־8- E`Ic��.�(��;e����[Y(Ҩ��O���
+(�ˣ
��r��M���^Ű��u@�u:�r��r�销Y��.�Dפz,�\龎l��d;:�路󍥥70����c��c�9�8{l�b�pwm�.
$��W�пDL˫Zw�q�?�H��0Xцܔx��ױ��nJ�T>�PZQ�Z�$]т�A�϶b]R������G$�F$яH��c�I�>
+_M��`@%v
����0���x~�%_�)�����
Qs�M�cXe[%v��k���c!�g$�}Grָ�E
�@�(�������>����A^�[�5�R���ἤ�~+W�,J��J"��]��䂸�9M��Yhǒ�\��!J
	x���&ng��D�s���2��F'�z�V,T"�!=��w_X�҉qǢ9c����@:�,��O
'Z�G
�>�iy-Wj���}{wG*/�v�o���ͶP�;\v\��Xr��` Y~.�vm/5�����/82]�Z�==�/�J����:����2?��"c��91apbYnb���+�ew[�+,�Եes����;(�c?��Euk��l�w�kp<���X%�o�!F������^3Μ<�q�:������z�:�{;ԩC�שo�1��ap �:��Ts�`�p(�Rtx��!��L�41̶�T��6�ʩ�Ʃ�{]ѹF׉�:q~'ʯT�X�S�,�5����:%Z�sR@�	��l\c�-�a<¨;�7Ac�nDٚ4;�~�&��Hp��--b0q�$����j��Hg���P��c�Q릇%���h��dSǟ�*L��]jǚ���蚾ɜ��&�k���϶o��g����b�ɔ�����
+8pR'5`;�,݃�{0��t3m&��Ƃ�n�='I�s����9m�E��Hd��]r)�p�'<���	�.��*�젽J����H��)��(��O���AZ�����i���w�Q��q�%�@=>`=�
�_�p���z�W%�ޒFp`5����p��
��%�I�PWq3�0Еj�����p���4뼇��)X�%��\j�)\��=��g��_��N'^?i_��[nj�o��xl�n���m��΍/a-�p�Hc�qqU�hQ��,�q�ˉ��ԋ��"3j1N�YhR�KV�
+V�j�HN��^�Q����`D�lZ�-�Rb!B2˄xԄ�_t˚{,��)�ߗ5g5�$�DF;	�Ud��4�_OC1m'}.�#�W%�����ʆ�����ꅵr���!�`QN3�h������?�Z�����E��`���0��Q��J�k��vL�O�#�&��0ElKjzZ�<��1�������CM��y��%:>��J��~(`�b0"নRpY��ͩ���u�����ہ�ہ�N��	��(��3d:�X4���q�q����Y�����H��oF���O�[Z��x�0|�;�=�\��h�7��f?��*�+��l���\ߜ��w�r�m����T:5�?[N�����X��-������]�89�V@��uף��o���*~��K�����\4�uN�V#�.��T9�p�0*g�0���[�yM\�2��T�w\�S��N@���lC�&	̚�%A�&DzHУ��j��b���~Q��)��Y��~ts��5��m�ܰp�q��@�綀���(�f@��]������A5X�U�����^�'�����k
�X0�:�����8�`�*��h>t�D(h��tG��@ X�ʕfr�@a����x�3�0��+��ඝ̄8�02ZJ?O�I���A�mk�ʩg8@܄T����P�Ӌ��͋�4�)S����T.t�m�*:nGR̀eԶ���/0�ER��U��`k���+���@qNr&$q�_f���ə;��N����+�7U5�l`I�H,@)��adQ��Z	骼vEV?F�f0+��yu�i#s�Q�t�3�h�%|��~�f��ݗ�x��FǦR��[��Cp�7�&>��?��̏���d��خO&b�]��lv�ԥO�Фf�̊�=�!e�u��	��-æ���E��FCp����T�߱:Ґ���e�j`WCݽ5Ե��i�4	�5n��ϐ ��#�[��Ė?,�o��@4�2��!��zǠHÐ�!՘NYo�%�Kt+`M+���3��F��8�ӟw���������B������2��B��p��2��{on���*�l���~ovaԓ�?�(il�lx��	6|�lV6�*c*#��\�T�(`�����uzeE��׏h�)��I>^p]�������m�k� �ד�ŮC����n���Az@˂�B�84�cD�me2�NJV��oE�}��joP�`S��sY�1eeZ�K�
+���I����+�D��)+��e����9��!���A���V��z%�댚�vO�+E%�Hd��p����*�m�:�3�M%�;E��!5�(���h_�ȒMٖ�؎�X��j���M�shZ�C$rp74�[�@aT���譾TH��=�-|I.
+#��/�Sc9���y���{����tf�^𫇺����>O=/�F�����Kk�ϗ3�'Gǐ�[�OL�H	����Bؔ�1ֹ�ES*D��C
+Y!鋜�n�m�f`f�ϺΔ���������m�[��a��%\�w�%o}8�DF���>r����pK����;����������E|6e��:�-�(�p$�W��J�<
���.������9�3�L��n��W��۞�Wvj�g���w ςʤ6�P���(s��YJ!P���<�W(��<�%`]�,�Me����`,0	����!�8�yI�tZ�tZ�tZ�t�����@����҆)�;�������I'D�#����!X2Jy´������q���<�� ��I��.h)�4���7I�IB��Tl�)���8%S20%�S~�O�k��oQ��oIZ�F��`�
+�`�
+��М�ˠ�ٶ
+�	�X �!�k����?��UrA��T���D@�!��h�=��;�ʅ�@w꺳�/�7�u���\Gkv~��7�k�"�#��ŬGM�����|:3W�$��v�njj��(�_���Uo4�1���u�
+E?rF�`����P�"Ѩ�"P�#��3��
+����W1闥
+�"��n��i���_��q��N�Nv�@�o0^;-�b4��p��I����8����S��N�!�T"�(��+�-��s�L74vn�з�[6eT^�7�¥qaqK� XGM����H�N��
[���Ԑ���e����ntt��w��^�ʵ�w���+���&߼q�nx�ME#.4iz�����-��E[ޏ�����nf1�-d��P��be��٨`9�������0���
��}�q��4c�S��6����M��6a��(���_���pbQ����e�:�M�Nl���������,�L���ȉ�;XP�poQ���3��������z|�?5 �0�<��l��H
�z����6�J��C
����?�6�[���OJ�p�C��)
�	��!���s��;2鄀���8�o�k��������
� �/^����`
+bw��?ԭ�;����ޑc�>|�ŻpV�;3	Z�!�B�!���uX"�	+�p��6�0����oj������BQq���,I��s��Ȝ82'��8ɚḩ�a���
�K����Z�^
+
+�R�
����+�w�nˣY{��}:�yq�k0U�n�%p a�SvY�I1�eĈ^��X���&7:�6v�=��p�U�����{>祍��җ"��fE�? �uz.�G_������g�����Q�X#V^m��s��bOW_W���o�3@���l�=����Gw��6iW�`/���f����T��VƧIJ�xV@��ņ�{K�c����l�J#_����L>#��Y&5�L�W��;7]ǀ�^�� �`WZK�c+u��C�R���[�4%}�m�ӆ��Pt����)���&���戁/bԁ�vU�,�lj^]��Vu��ʓN��3�5/
l^
+I^�y�b�$Z*i�eBh:�5آ$I��Z��8!m�'�d�i���+�F� B�l��At�	�������4̟M��x�'��E?�A;���v%�!�&A6�N�.��G&Ə��#j{��p,6|�<P�%����f~���̅����o*�V�?�ozշYփoܤ�g�Z�k.��!1��zS��r��Z���N%E��F����]�����g������4�%mqT�XLW�5b8��1��)li@�1�����5+�I0��$�tb��\��N�)o��{�����ؐdžОY!�z��^/�7��>�#����D��،��zY3)#�
+A"��Z�|wE�#��J���m��8�
�3x���/J�+���R��ũh>��v.���"=���U�y
+���N�^��JQ%���]a�sL���
+�@`��M��&�
�tfT�Z`j0�rƼ#�96P�_���Rn�O����O`���9yf���kRp�؂�t����`�q����pl����L���a�n����"�d��3�X����5H���.�u
��pٛV�G��-�7`C�fw�)O>�m���'x�`�n`5Z\�
+k���~��u�i�D�,��S�I�K9sB���]�\�c}"��8��Y�Ȱ�K��@ X�\y(�a��E�4�r"cM��>Lp9��J��O=�\A��XB�d��t��<�*�hJ�%���~)'yG�����fS~*�c#|@H�PD��%��cf�
+��pi�LNN����x�t��&�<�r������X*�	g8I�I5L$1�M�e)ת��:�jX>�T5�����L���X���[���׳�����T����\Ϋ���%��tO:��Z�-L���i#�/��r�Tha���B%NU���JT�ޥ>6�}�Y�b,�3E�MjZ����=n�6�pH<1�MJڌ�/D\c�i���aI���>���k��_����S�׶��'��%ޛ�>du�8y��!����j\���^e�P��\�cU�ހ��!#a'���������p��C���qo]�w���·�m�ݚ(�գ���q����n}�t���V��el�X��R�m��R-~A�pYJ��T�YK����j�m�:��^_DZ�P{Iy\�h�d)ƫ�	��)�%���7�-���!�U��6�N[UUe�g��DK��&`��g��*�C��P���؆�v����s�*�c�W���~������vB!�d��U�ѵꅛjUu/��L	1J
+1��Z��g���,uJ���\	��@��i�Y:-5�^�o�T���'��������N�ź������!��h�*���j����H���,N��._����y��ƚ�Fc��	:3ağ�~>��o`~��_� ��5����ӽ<q�z���!o�jEH�p�ny��(�����㕎��9O�2�4�C��PKw�����i}��)
��k�)Gy��C/�(OY�n���Z��;�S�EB����y;�R��Ke8U����݈ԋ%�����kJ��q~dXU���i�x7�E��A~%���_��hz↴w!�;�8_�*&7)�#��u �uz<5������5�ot��8��`��ʬ�70�%}�q��qi�f@�pT���u����t5�h+�֕�T���9۠�tEW�Ƀc\�1o��%��U�BAQƅ����V���z�^�(�݀�C��(9m��t0�>����y��)��"�MQ%����;x��Ǹ��#�����os�Nl~��b��R��˳M�i
+���UN`��S`TZ��/5�k��`\�N=�Ss��z|���B&H�`A�TyU�PP��Ǥ��L�q�h�����f9�ܽ�Q5�n��sN��R�	�� V��kT0WF�w�Wy٫8�2�!�"J ��+(��,�EQK��4X��^OJ��>��{:d[5��1٪A�k5���4��q�5h�~�~��N�K���X7|��~$�򙱖>w�4>=�O_���������}�u���ڡw�Љ�^�#:�~�:ET7o$�����0�^�n�L���zZ��iF*f�i�+PLu.	����.o���6�@L�j�h�z��3��'_��ҡL�����dv��4�%�1���P�Q�Hc��-p��|��9��\�[>�+PORi�Z�˿��	�}	������n��Q�X�i��EöJ�{�[��p�f��
\�a%%�w(�8h�:�u�F�
+�)݆�n�R�[R�RtX�"�S�6���AP4��x��S�# J)�
�!#�C�y>��6>��&Lث#mP����>17�X�`$���6�{���`l����< }��X�b��~@G��:�6�����'
��:�+�.�>8��=���8g���+<�~��-��@���T��0<��ͧ^*�S�/	�*f�������K�O>���o��SVyu�YG�4�&V�����ֲe�E����F_}���-�F��s-���W��]�o�8+޴l|�l�x�����:��+j<#$6#b�1��n��r�5�C���@	#~��b��uD��W�&hh.h�d��@�AՄ�?͡�5[�2}��f�Xj�D�� �g@�x����Xb��>���&�$�-5�z���?㊀g��1�E$'�'��9;xϡ�
�a�½f�>#Z07�s�>}�
����;>c�=���z�Y�v����y#�g�\������}�G�}J���qR��T
+*���n4���I?�2��u���/�E̋�r��)�Jk��s7���y�E@R���~'z?�l�v( �y�l��4�
z��Q�xL�ea!�-.�|
��;�y�1e�š�T���'��9�i�L�B�$+������>��O�_�V�Ky��hɟI��[��ƾ�EgA�A�
+�ti��O��/"6H'���M���5ZT�5j�W�g��z���e1�ck�e���̄MO��F>K[�1�8�[K��1B�vՆ�פo�<�[����"b���8���Oܺ���J�B+@^P�7G+���h�� |�9X�2΁#��I:�8<�=]�藴l�{�L��7#�)�6�M��G�Y�Z�ob��MQ�9}i��_�����|:|������NyI�������Q^�r�XQr�������J�V��/��F����5�}�W]�Z��?��\�S��b���>~�
+�ȧ�E>ӏ��LO��۟����ϽY�v���'f�eͲff���]k_���܏�X�=1�@��\����o}�|������=����f*ɹB
+P��5U�x+E4��������V)f�MbxT�"�(��<%T�Z~!��<�6�2���)���
+{�����s
+�P�g4��h&g�z�\�c��^7M$�m'Kŝ�����I��8�xt[±S��1��\��Ie�t&���^�a�p��Ɯ,�w�4���c��p����������y��H�{��L�0�n����`

5%�)��o�W%6�bY'�É7��0i6Փ�fZ��Ƃi'�&an�6�$I�m0�m$�ܴo��Sq��%w��D*�-B�\&���8�^��\��E��il#ke�\�I��4�Mܶh���"?imm����٫�)l�+�ٜӟ��Bۙt*͑t$�)�I�۝�'�Ƣ$n��w�����p�E'�9�^'c��ƶ�G�l�������%���M�1'IB��2��޾������b�7iǠ�(�g3n6��rn*�U��)���R��MB#𫡔���k����_��f��(
+�� &FL7j�qⶔ�$ݠ)�$�����3���.o��n]�ƄtA�G�#���s�L[ �B��~��s�9�����Z�b���f4�YR��f䖣�\KK���\��e7v�~#����H���
+�PD�xk�0��t�Z�A^���\�=#�de�װ�{�ڐش��،a��j��-Z��Cߢ&~�bK�U���M��ŏKa���	���3�&��]�
#����r�$F�t��E��C���h1m��e�]��+��ָ�j�-z.��z����W���b\b�*�P2��-�II�&¦�$�2糵!Kj�{��yR��L3�1����۸�5G���/Q8� ���	�����J�T���H\z/^9���I'1�fZO[���H9��S�6U�����c:~�*wǓR-{MF�`!&
��R{;�m_���%U^����U~�Q]�t��馺v�?G|r�w҈y�����Ǖ����+�#{ž��	�9��3���Q���օ�+53���R)��]�����	��zrM��:O��:…����S�8�I�|�3[3=b�W�"�f�t3R��I�#0q�{��ܑz���4��1����ţ,R��p�����e�c\��T�;K�ƞ�XҺD}Kk��q�^4�￾�_��!�-<��@.�ƒ� �1���t�n�H�F��	�����r:��~�Z��l��
+=�|��a�Q�;젅w�!��ak�c��L��S&l���W�/����Z�FqEq�n�;�EƸE�Gu}��ށb[���Sq��ӊE�M�=�N�<�u�k��XT�ӊ��}ݿ��=�*���1��(z(vGc��UY�U>۽z��M��>��=��a��o�u��
+endstream
+endobj
+94 0 obj
+<</Length1 10831/Filter/FlateDecode/Length 5128>>stream
+H��Vp�W����#tI�&$Yv��#a�XvIv7�	���$�@w+XHB£	(�-��.HNJSm��h�n�X7Bk�;�aU�b�qZ�qƱ
�ww7�T������s�y�s�sn ��b*:;��u�sG(�LZ�q[txE��+���n�=��S��+���?�i[��;�9��
���ӿ����g��n�/��~L�`>���
+��F��S����Yr�s�su,[�6F?�������-�gX}�ʖr�vWt[_��9�̠/
��z
+�j���;��M�_~��>�?�"�('��N�)!J�eFу:S�N��n��m!���%+:�A��(�n����7�;
��\S��9���@��2��mR �]��'�;�)��/��҃��I�
&L�M�a
+�af
+��(��P�2L�6�36;p…
+TbܨB5jPf�f���#� 0s0�0�A#�	�X�EhA+�Ў�X��X��+ЁN��*t���w �0��g��`�#����Sx���C��x��3��+xg����qg�K���xo�m�
+���
.�"6�K�Gd��N|'q_���2�}x?����xD����n1$��A<*v���5�^<$�E��+v������q/b>���v����x���Oc���_���b#���³x�ŷ�=|��8�xA<���c$�b�qXlb_[�f��x�Y��l�y{�5�A,+E�X'��x�xD)S^S�*���ԣ�q��zZ���j6͡Uj>m�֢=g/�;����apLq8����Ʊر��Wq��u%���cb��#��U�[����(g��r���B͢M״��9�Ç�>���ћ�!R��R����k�/��ԟI��烙�����_q������6]���6��՞��˯�&w$�������d4ٓ\�&˓�/%r��Dͬ��@d�o�-��D������O����mGDob=�½��[�hؘ�E8@���-D�b�E'��Qt'�槔�-�yŒI���;��(zLg�{�#�.K`Rg�!��"u ���Z]��I@�jZ�`K\��D����NN����jE۪�3�Ŵؒޘ֦
D{㺊�ȅ�Xث���ow�o
+��d��Ṵ��vti;�0-l�Z؜�@�ܤ�]�������P|��,��.�۵����P�tK�=�.��H9�?h��ld̆j29+]�A�X,3s��c��ϑ�'>)h�
+xpi@�hM����Ҩ�^&N��ΰ�-t5�vYW����Þt/f[Uγ�m��J�{���
PWs�
��?ɹ8��kֳ#���LP�7�y&6Q����ĆgbS3��M�~"}�t�4F:K�H���/R~�'�(�p�'����/#�5���[6��lmg.)p!�1|�
��������c*����f5ꋼ�lh��a0�h�u�ES
�<QTb�����Rj�
+<��C�qi0�>��b����
�Z�h���o/��f���/\<O8핁�28����x�(�Y���RB�����@-O�!��h��H�f�]�!s-���--�(��I<����������M?n6�����%�^>OH$U��BFD������c8�TER#�f�*�h�%�3�`�L�;�"5�dJH3H*
[�Ysf)����&�߬©6���Q	�g꜎<���ȹM)���83��uE3kj|E:a�-u[����đ������^U����GڪDԶ�c��ն��`����:�/�?��ru�<���Zs8�⛙�R�: .�`F�(�j%�Ks&r�%re|L��	�e�����LFb���������j.��@���B&}
+�r���J�}rE2����)��l�����w}��Ɩޘ?�,`�Y���Ҭw��˓- )r��tCd��&�EI2�2ۤ��:�	����:n�'t���n+�?7"}�	4�B4y%&��0Kp�oA�C
+�"��H���ə�H�C�9�&�`�	up�g`�ø=i	F�W��2FUX,�$��٨f�d0����m�9�����:F�AT�B���Eճf�]X���~^��B�6O�oa�oN�xR��l_{0nrN�[���Sr�װfn�c��ֺ���u��B5�*��#_(��&y�g2RLt�2��ā�ӱ�s8NΎ��'��(�eD���JA�ٯ�8�+���J+�:mݖ�_��ձZɺelɖ�Z�Y��*�wG�4����H����K��
+M ��BiSRRZ�$MJ[�1���┶J
)%��u����{�Y��� 4J4�}o���w�~�PP�l���)�-����{�#V�����Pu��A_8Ȟ�d<
+�o����&�@��hMM� ��`wEC��]�]�XѨ
�{a�
�sej�1}�_.hO��]�߹�1����ܑG��/���GX�Wq�@�a
+gR�y�V��q��������@�� ����'���T�dKIDi(��R_C��v��a�7�J�O�A�J�?���'���p�PYf����m�8���
+�	@M�s ^�0��.һ�BGE�9�s� _)ꂢ.(�Rg~>�J@��؀���Z�l*	‹fW�U��a�]E� 7�I�!��W6�RjNM
r��\ă\�<�ȃ����J<�
+��0��8�YX3Ƚ�F�F���	��>T�[P��ж��m���W�t
h?��]�Ӈ�?2������ښp��-
m5=��j���Ž�����]�;�;��Gjz����gG�O�o��=S�[����=ee��kkZ�K�|÷��ޗ��A$�:���c��5�V�ܛ�j��@��V4M�_�,Q��h9W����E|�D'����Ìb�����`fE|�����l�Q��r����7Ns�F�Y{�`c)�*t\�z���2��ܨ<IK_Z�Wl0�8q�E-F���`�Fu�k�ۛB\�r�M!������d[���P]qQm��`��PW}��o;��~}]m#����Pu���᳏j�U�設��YY�#R]ӱ��ˍ=��m�[w��Dk��X��셑}��G�)��;�^���O��&j����aA�����W2g2�@���m@o�C���;q1�0V����&�GIP���VɰW{M��âAg2�sXVH��ךE�֜yW{RT�E'�>�[M�أ�3װ�p��e�R�f�=(&���#Bz��4�ء?�yW��ӯ�&���8�>�u���@�����ˎ����i�3�J��"��苙w��C:��u�Ҏ��3��{)����	�}}�M�xd�&���t�[�̼
�(��g�n��z���>
\�}�D>�a���	‡��Ԏf��㷛u���(,J����?��>��4x������*&���-��>`�{i�vM�S�{{.����
+��.���٫A9�?�7b�ԢT;
�e�ʵF��gd~�}A�0�q�P����z��^�Q��~�Qm,3�:s���_���rlc���2WQ����ï�z�_���s�s���@}����&�;j����9�]��p����k���_��{Ta��0`y:E�D}����|ԏ���62�/|x��rΆ�\�x�U/�[�1�2��'�)���u/�[�<�}��ʼ�\]�5�E�/�n���9���V���3�����u2~�{�.a̬�Z����S�k�Q�r0���\�w�Y�9�	���&����e��~R6�
+����O=H}���Y��:{^�uR����+b#n����ġ����|1�N�0z��d��E��U����e0����u�]w�Q����@}���u7�v��)c�z�ϻ�������нI{T,�2m��w��HW�+���B�%�ve|Sݽ��:�):�����M���ARl�ˉ�)������{�.�7�֣
����1P������ztP<_��fʕΌt�,�gm׌Ǘ���:1+&��蜝�d�2c���%�7N���r�I�dʉ-D��,k�\NZ3f�J�&vZF��rʞ�s����������;��#Ο˥��H�cE��|D��%oMÇ��Z�b99�ig�]2S �0��R�2�N���H�%��<���ba���ߘ�N��e"睘=c{�h���D�n�L�g�T
+�K�;�<�U�$�J˥���VB�I
+܌��ED���Jq6�ۥ7nG���:r�YX���h��580��`'�v��1џ\N��|Z�vB�v��S�F��A��N�$��f��}sR��Y��Ɉ�⋖kGM
+�$M1k�L=xSq�~���6�#svBF�	�������Q�vi/M�W
�� �v�1NK�:��=��'݅��DT(�g��VԽ��#�mˆt�Z��L�:��#������G������3�uŜ��c����q<��Y"!������}kQ���Ӟ~M���<vX�s�Y	Z#���K��/i\�&����aV��f؎%�Y�E��^��'���O��sСV�t���)�A�g��Aa�d�)"��b\����O����T�m�K��s#s���<�)�I^D8�9��4gڅn�%���U�I��r�Uenשּ5�&�GIJ�����4yE��OE�؏$�!
>w�����^'����^5]��'iȓ�)ou�;i���Z�!���f�K��kU2[q�3}�\P�U��{]|s��o�R/E�?o.�KHRf����Xi�%����G��o��ܗ1�P�?	Mi�1�۵k�v���[{"�C~մ4D��j�=H�(�
+=A�xT�V�Q�c0 Ui M���*H�k:;���Q�Xݗ��{3o�%�q&��i��v[�73��5?d�d����T(n�4#n�'�M��pr>�9��N{�—Tݎ�j����=���)*�/n�j
+ߝ�$v?ּ��b�o�'=�EC|��)-d$��'�m=���㴜�������stCN/qgD��9��ji�y�l��\�垔�x��^��TX�M�k��#~/ ���"Ɲ�;/
"jX�*l
KN��r�4��c
+��=�J�����,���Jw�*{u��E�i#��썈=�N�.�\�����1��*{MS�*��9��Tf�S�7���ƛ����(
+endstream
+endobj
+95 0 obj
+<</Length1 46108/Filter/FlateDecode/Length 22386>>stream
+H��WilT�>�y3x�Y�=��ϼ��3Ͼy��,�Ivd�d�.f
��`(�RiE��HDQh�h+Uꦦ����URuA-�*��4�jU!~�������7H�J}�s�s�=����I_&�F�GSٹ�9v}/
�g�L�O������Y~���lDY��;��b��}��'������5"�s��طo�Zc7�^�ف��7�u��� +���T�=��������r���D���S{�oO]�Itߊkr���raE���a~��ɽ×�o]x�����#��k����l|���ӯ|��޾}V�R�I��ֵR�����\�F�5���,P���yf@�:�&��6c�A�p8|���#��߀�:`��&��<�=�����I,�m�1O�T�%��^�	�L'�<�@>X₎l�K�V^��X:S*�|))_,��BވE�������}^��%���I���
+r��O��	k��x�0�d��|oiB�`���_T+�����t�V��b�Q��������,|��!}��<'_!7��	%�@`f3�eL���Y	��Y�™,n��%,x��_�t���&`8��VT�����`�&��*n��N�����<t��*��$'�ϡ�4��M�J׆J�P�4,�
��Cg`�l�F4��#~����c�S�>����y
���z�v\�3 |��12�`)� �`1�A�A���,Hj�̨�^�0xY�# R hV���3O����W�o#�
+�����8e���^)�o(o:!���LG��\����\P���p�&v�F6�����A`;��<���"�9@��3�9K�y�ZF���B2K�&y�B�Դ<�[���I6 �9���e����^+.��b�����Ԏ��_d���u� �H*+f����r�K�E�r!_�KI���˶�>�K����K�b�KaSs�bIjpG���Fk��m��'Q�

�H�[�Qw0��u���t�o����U�Tl������M��7`d�3�����[��D[�'n]z��hr&������:�}���*t4ַ��R���f�;�0��)��Z�T�%�O�#�(��#9��� �Ȏ�U�X���=�K��"�k�aGfؑvd��aGfؑvd���9s6[�d��E��E���IL@�Pc�����x�}3�HD'u�b�J_��s�j�����Y@6�6!uB�R�N �"ˮF�y������l���<��cw����_�ųm���=���+Ȁ�X�SP�Q�T<I���)�uOA�SP�xd�V�ih7�o?l��{�)s�:X�3��pf��w���:��l؅a��Vnl妣�`��x�iϵ*��ؙ�����˦�]SG���#X��hty�~AS0b����4
��Ya��;��V�*��R�Zs��A��I��`�;X�c�[X'�1�
+e�=6:���N��;��{T�Z���J�S��H�R�;�)-�T:Ӡ����/ݔ�#\w�f�+����+��"�)
�qpg�i7�@�?�}<|��������� 3���������7�O�e@eNr��ٓgA�2?8Xk�U�V�Zk�V�ZE�U��}ЧC�a�=�1��b��ҵM���^�ٵoY�5*|J��4�,�q������Fnt�0��*ۿn���Մ�g#{jOxj�F��xd�\O����G��K�(-�óv�pvĸ1nG�ّpv$�	gG±$�1C�4�Z��׬�.��F]LES�FTt1]LESESQ�T�!t@M4`

X
X�44`

XC�Ѐ54`

��
���-��X�e�=l�NZ^P[��žU���l<�յ:i�#�{ttw�a��K��|��"UW���׷}Ϫ�J�/&�*��ޓ�6n<t��}��`1)�ley��L�.�vIY�'�1��=�Z���S�[zL��z)'Ml��o(�iq��5���d#���3��?��Dj��`S3O�`��M�Z�7�۫�!�!ȟ���ԈS4��>�ZMR#?D��$n!���
�0/:��n
+K�I�y>���Efy�bV[@v�6�>��vp���s�B�2){#]e�8��+���W@���L�s8'`�}���#�W��M�?��8~�\�'���H���Tё*:B^G��H��#Ut��^KQ�Uv��#��g��.0
?YTd��7��~` �
���7���@�<���//^[�-���U�S�rO�;=%���x���]
���iAށŜ�O�ڃ��90Ey[�
+2��㑗^mh%e��^zI�*:�Pώ��}�P����J�V��RF���
���-l+Dݒt���z�%�9��N���9�3��u�@0��j����l%��	-ߓ��3����/�ϗ؃�[A�:��]֛����
+f���� S�e���bJY��3���x8�dJ������v���u]��f��0�<�;/�`��e��N�`���`�"\��$�j����4��D	���b)D�O�T�+�#����Hi])N�"��G��M��G�>�*�ӵϾH�H�Y�s�s��k��'`��%��V^j^A���f�\bP����y�Q|����<'��<M���p1�!\��~N��x4���w�%`MK���o�]߆)�
�?�ī�
3��
+���|T�^rE��
�&`��%�e�҇u�M���t�����{)%��j&R5�aY'
t���!�*=H��6vYQ~��@��P/���zP/��^���zP/��p�-<6�?0���Q�ub��H`*B���"$��(B����H`XT8���lS�ʬ�ڧ���>�*�A�	%+&3����D0��v��j!��}PS]]-��s����ՙ*���c���ك��J\�1�M����-����}C�����?�����É��c�����ܡҔk:]�̙��m��d����1�9Q�OzpV)d��{fγ|�\�Vo����"~ym�7�i��nDmA�I%LLo��^��[��D@2��o��d��1�B�b�Y&GU�$̗$On2�����zM�Mn��-׸x����GuJ��%�~���蕅���?�7}��w&�x��s����Q�1nC�ﱮ7��B��-L�4�2R�Ժ3t$�������.�G���������U���L�Hu�kT�6�q�A��UdS�Q{_��!��e��j�	4݄�&�n�i,��|S�	n�M��Z��%Jka�B���&,�	i�B���&,�)RR��,���i�����9mv�������V�+��]pN�lXVi"�4���v�}��FՉ�]�]{�8HA�b�BZEqW�*B$J�n0=����ּ�[���5���G��E�;���NQ�� �����?Q�(�O�'��ez�W�ڗ�(g�ˍfvA���uG靗�+��q_sE��l�����l��e������h�H�NWR��J~��BjΔ��r��97�ʎ�'��sƗ~fD�m#s#=3���d!ڛl�Fsɡ��t�G�rOz*rOR+�;�j|��������aգ�k�n��h����'��pBzX�k�gP�X�"Z�豢�G�*oi� �Cg���M��
+�G%H�h�|�:.�	s#����Xd#�mD�����6"�F�C�@�J�	8�����5&&)Y���a4p�XϚg"�M�<�ZN�	�3�y&4τ��<�ٶ@�`Y�~�������h�-��X�h��5<�ǵQ.��/�P�����R���t�&�EP��<&��<^2�G��Syx*/���vT{po(/kjG�45t3=����oĖT����#���+
��ފ�N��F��=Unk)�>��W>��S��SNGCc��L8����c���sY�؝�L3�Gz��?�?���C�μ���s�pw��h��c�x?�j�D���̩�����>3��%��b���U��4��$���A��/�ř�Qq�:O׫3°,l�6��-�.��|DG� �1J���:"�#"<�i6 �:�G�`�Ww"�4�{.@-�M-�D-�D-�_�DLY�8�^��2w/ge��nY[���-��zн�n�@PK�#�ӰE���ɠ�k8��7a߸7��?�Pը�]�h���4
�w˗v�U�=�
+���Z��W�"פ/X������Z��$��ѵ i-HZ�y�'#��9���v�݂F�*֔o��%��q�pS�������l0��l0��l����W����!�h�VU��F�U�'��W$�QE����<=A����宯OI�o��-�9���ش+2u�k��|�?��7~618�<i�8݇*���E��X��p�363n�2���R��d��}�]��a�)��:[��[���D�xc��8��u����Ё��84Jlk-�B
��П��oƵ��;=Rb(��������?I���d;m�`fpeZ%�	K�
K�����R>%�	�V$@��L`��@P$	���h
�(��A.xYt���|��{!�n=�3x4"������.Q9*.���]�rxV����nQ�n���7�H A��b#Q�qc4c.�G�
+Pu\�
+z�ӕ-q���ݔ��ħ���	j����3L��V��*�s��]U��bߌ�p/�����Oq=�d�����3�z����"���sZbQ=�<}(�w8h��+S��3u���)z��Mw�$�b�Q���=F��>gz�5Q9mg�3�_��Oz���N���J��q���f_�K�FO)Y9S�gF�Ǯ������#y�'>����[�_W��O��w�*z<m����G|�̭!sk�>��+��;Vm��
򫡵�8�K^.~Uc{>���6�g�DžbC�d�h�޹D��7.=�xI3>�7�n��j͓w�j܂�j�Z��9�!	��$U5D���{��j���{W��R�hW%��D��(`H��5ԁ���xM5�@
}��݃}@�I��<�4�R0sE���4��Q��
+���n?�%�,���e��ni>|�~7�!��د`�+i����斓�v��Lyd�u�cY�xD�T��}E��5) +�w����[��a�U�xy�,?@-���y5�Q|�q塸���({M
��@
�\_�l���X�X��)qㆀ����,���ʻl�Ȝ�z�e�����ۄ����0^ey`㿬WilT���=�Y�=�=x��g��6x��=��Iq�8l�&
f1�O$p
+	�(mJ��I���UըMK�J�6m��G��*E
��Y~ф(��Fx�����=nB�B}���;��;�pe9ȕ����4�
؎��Rͼ2�N���QF41z�5*��O�㦤��F ��2g���F�����؃ZH۸y���UA�*z����C
+A��.5s��$*�TNU�4�߮A��|H�L�B��8Uoh��k���x�@&!�:��G�y��v�1X���7�����,/��
+ZP���l�2
+ZPЂ������l�83�y)��)qg�{2�(���d^���Q3j��]�z���d/U��h�2}.5Q����Z��rߺ��җ㚑���V�6t���5%��c}AMkɟ�g"���yΘfz<�{�"�9z����=�@���@]��bY���ҿ�_!�5��b��������_���'	USUF��h�4]R�\K�2!oح�L
+s�ܞ9w����,(���(����@�:{�=�W}m�������|b�}˹�٣�=T1:�b�Z��T��酦'�cN�����˕����S/c{��<_.��C!�����&�&.7U���� ��V�z�V��ą�÷��m� �s!!Hddd7�D�G�����T���BKxqO���u��+����o���Tb�3�N��TU;�x�Ho�K���X�+�����[Q����_o9����]�%m["B�	�
��7d���^����U�
���}d^6�W!�U��x"��_�̨?(���D �����Ӱ��b���.�bTq!�]�b�؅,v!�]�bˀ��JiL3�e�)��C�U4�*��������(ؐ�iv��R�"Y��V47U�G�������*�?6�Q��vww��bϿ0�D��r�l��y��%Ь�@\o�m�c9g�'E]Aډu���2�+|ԋi��!��l'I��?��W{'��I���$}��bo���g�z��ß����﨧�\����ez�W�_��'/ĥb�i�ipY�����^U#�d1�����K�>/eu�T�
+����(��l:�:E��N�}��C�qI*��G��r	`�68�C۳N1����[~^�8��08�&<��Q	�����|j)��УT��c(b��vt��<�������q	jxO{"����Y �崰<��&-�����I��a�)
���)�@N�6e #@���Pf(�@���(�ķA�� ,��� (3��2��� Sf�T�y[��D���0���dW�S�a,���b�
�����)�U{|k]��с��}-���j]W�z��hYǶh˖h����U�GG������Y�6�Y���"���v��%Op�}Ar����*m�}�;�&</EW,�P
g�;g���<�.��i5Էd�e��Y��qm"O�@V&_�³�ں��L�	��(g�o.�
%Ky�̠���)^��+���RB.3��>|�W��ʤ���mO�x�|K`S��6㣥s��i��߿���p	�Qo��O1a�y�Ic�Υ�������|ɡ��I�e!=�1ّ�C���>D���C���>D��#ׇ���b_A�b��J�`ln�
+k���}ё}qO�����=-���-)m��=��g���Ql郋|�FoRsD�7.��L8��Q��Ua'��Se�۞?ǠF��R�oЂ��J������M?zF���f3��z�Zָ��F���0b�?��<�
(@��@�lN���b�	���G4S}h4�L7?ʝQqA�:p��q�5�E�]�C�l���Ⱥ�30Ʀ>�.�dp��9J�a�/�҅!�~��C�e���a��^�m�N��W���<���6�r���z��cCf�vk��'��{8�G-2�L"�8��$o$7k��?�x!MѴ����d���D�EmL\�\����t0F.���aŚ��K�2T�[���e%d�z�J5����Z
-q��!
+��.R�%��k�79"�9���QD�����XF.W+�����E��{r&��3x[��E!:uh��ӄ
+e��t�}"���^pӂ;ea	-,Q�Iu�4�U
�٤�39^mQ�h���c�Z{�ӑF��~;�������Z����Ɩ��A�r�!ڙ�Ֆ��6�C�v>w�}��m�GG��t>�S����L1_�
+�_�
+:�$���/2s����6d%��0��3\����Q	I��f���W�ᕫ���+��8�#�{�
+��p�����!����m�����WlB2�v/��J(Ԩe�����dž��}��!$�����f}��I�����T��%J/\�/=�W#+���Gc���A��Q��I���'$��r/;&�D:��(�9��&��g���󶛯���x/�c�f2��1�������T�*�oЪ}m�_�;���;o��^q�JX��� F���5	p��I?G{gE-W�a�1�*S�t�(�.о\u�z.�i�R5֥>6ķ��tȨV��Ex�LI9���.���Ǜ*2L�U�o�v>d��_c����ܟx���m��k�7�6����)]R:�w��Y����C�F����b�{����0�	�!y6��&��=�'d��L_��Y5�+�����_ѯ�RCڏ/�J��U�J�`93�}�v�9�B�f'$m���M��`)Ă4Cz �!� !.5P
+d�P�>��b=���w�%��|L� �J�_f��G���}[����5�Q�͇TA��6@!��G!�ai�t����$��c�:�8~ι��"P� ��.xEh��m���E���Z!Sf�������e�ե����.[V�f���ֶ.ٌi�-]�Ʋ��mYl�t���T��y����4���}����=����|�G�6�
OOC9��g$�"��E3��
+T��c5��P�{��=T��qո�j�c5�a{���R����la��#�_#�������������i&��i.���,P~����mCg熶�M��������=ؼ�'����̿��|��Yn���T���5���)� )�]�DW�����Qw��Đ�*�=gMHD1sa0�s�Fsa0s�isa0Wl@1I���C2��ܩ��T��*Oy���œ(]�e9��Z����1^cW���ո'�V�\�z,m�lݶm�����^��������m���Է���[56��ߞ0���
+窍jq:wBy�i��&"���1JF	�9��0K�O/QrLK<�>|�vG?����j�v�d��1N�*�����KI=���V��oi�pa�H0Q���l}���H	��.����0}�����q�>��a�8L���0}��e��6����L[<���cp�R+�"�p�������y�9�_Q��������)
+w�v�U�
�@0�?t��*����i�*�Vm���97�׹1��K�닋��y�x���8YQ0�[��PiCuӲ+қT!��#��8�#�dBA�1ZBr���Z�?�*�b\4�Ʒ	�̯G�f�mx�K��!���8��� ��q��PjG�bKZoP�L^�.ӯ[�ا�3L,�Ĺu[i;��h�U�C'��IJ����]d�h�ػQ��8#���P;@�4K���iTL@�X`@��,��*%n�5��@@
 �t�yI҄܀��m$ńgC�)�k�g���Pɫ������Pr@=�/�YcL1$���!qĐ8bH1$�G��#����b$t9�k	��0�mqp��W8�79�sJ�۴#t�#H�i8
+�I���%����q�R�)�o2�h��aFzż%����5}k:�S��
m[W'�V��\P��X^�yg����oX��΄c_�ʲ��dElAqn~��Ɩթ���Hw�v&�JVWՕ��+Jt44ߓ**^��*���rnG�Z_�տ�Ux�.H��
+1���s���ɑ�c_C,C<�~�(R�+@���c����`w��=��w��^�;�:3���	_�{�2	�K� �'��PO����P���2ݴ  ��.C�ś��@6�)ޔ-�q��
Wʆ+)��R6\)[�R
+Х~��}eI��������g�{�
���ޔ^�N���}�K7�e��\�H�g�%7n@���{��qK�G� ��\T�VE8H<C�K�h�$��SJ���%����.J�t;�Yc�sr�e�A��%��P:���VOPP����������r�ٴ�KI!_�JY|��$0��[��
+�{vk���ᣭ�u�l��j�Я��I}}��~i�g��
D
�_l .�^�Z`2���.�Ry�p��G
��0�q]��g�)�i�FZ�W����=l��n'��J7X7�u��X5�'�a�R�Ed��u>wP:P��Ugtn���*���go���-��~�M����gYV�	�y�}ڻǑ�2qn��)^@x�šO��i�_�i�BZ0!U�H�q���
�Dx�Y�H��<���Hޖ�C$Q�ۭ[�BH��ld���+�x�uV*�xeo'�S�uQ�5G��,m���ծ����O��{���="y���*!*0��#���x�a��̴��!�~�*�a�F ����;�9��L��|j�S?QF�6J�6�	�`?�OI���p���o�k��]�
+�|����e
�Z|PQ��e��q�5.�G�x�&v�ZBXKw���CH�!]����9����C������,�>C�!�1�\wZO���Tr���J��dX{��.��b6I����="��S�a*S�wtJ3��ܭ����0A"����j����3��H=�z$Ti���O��(��w��;���r�~�ئg����׮�!�g�k��ڃ��a/�O���ܩ~'�j�fb�Q�*��G�&�����y#c�-͟2����:zz�� �Ԟ��.J��|&jMǵ��3�l�k��m������A�e<�a>R���)%�׽���'�_�_�+�ᓊН�ک���HO~����ٕ�v��������B��n�<3�^k�V��Y��w�1>��6'���0ZQr��2B�����g�V[�Y�>�������_f(�ŀ;xS�ݮ<��~��O"��VG��LO��vJ�%\#�J���R^�c�I��zڃ��m���C?$���������!T�i̇h�Y�?q�(�4a�0�	����M����y|h��&��R�0�Rq�g*N��q1�tk���2�!>«����)�Z��L�R���*|��1~��+oD]L��50GU`���2f������g"��E G���b9R������Iۭ�����R�h��M��xct�*,�FQ�jz�C�g�
+b����_wB����\?��$���-����q�	9�hޖ:�q�2�d��	P��2P9#`���EB�PO�@�~�jȑF#��&��v���ó�IH먗���9IC�f�2B���Bk0f��.������Z�J^$�.(8e%�e��Z�D�3	h��$�4x�c7�!W���>P%&��J�|��(����H�6�-��Q�(�
+�Q��ޏ���޹x�Ě�1��WKl���F��#RC��%��"ŏII�Hԏ�-K���u3���#�hm�M'Q��]8Eam��]�@��mu٢@�B�*袀Q�E�E�2=����؛F��ᛙ��{��s���`G`�a߂yW��{Jj_~��s��A��1j��%ʴD<.�`�J6{v�&�m����{���kƨ�X<+X<�n�������ֵ�5;���zu$�EÑ���x��D#I�:R�������1{�ܓ���g�z����ؒ��2���B-�t�7:2��
+��p�')Lg�43[(dC�x�TK[�OpG�7�՞�ړ��j���I����=��#��&��1;�%8>��B��+%�O)W�A~�(<\���8Iu�*������vY�P�``S�ɔ��'�"�=��6�
�f0$��?���r���#��y��#>���|��_6�[�]���nQҽ�l˒{5��h�b-
�@Ƚ�\�~�����G6�*�)?��2u��M��|@�>�3�|�G��H���8�$���M?_����- �0
�‹���x�^��ׂe�+�}C����O�X���j]d��Q9gGmU��Ԙ��
-ztlze*��Z����ySk�7��L�8�
w���t&�����b��|z|�����J�TOfa�/=��ɾ2֟�-Q��U?����/do�Ch���>�
+�9D�4��z��*X����=�
>�
+7ĝP+/�ʽ�.�4��H4�ԝa��l�nS+1m��	y����w��nN��N�$��fiW�i�|�#)�4�ͫز�
U\&����Fh "9�D�(�����V��J��RG��Z�C+uh��ԅVF�m�E0�!�A�`"�!�A�`�E0ǃ�.�۬�nk(�~�����pf"__�6���:6�J���^�0��c�ھ\f��7?u�bn�����b�4_�:lB�,�����FE��(��_$�TMUC���fa_����B1��cI
+��лz,r������_��F���Y�t��Pk�*���4������
��"��"9ȼ$�b`'
줁��נO� B�2��Jbh���{0MJ�3�Fm����
+>:���Gs\
����\��LWm�~.�P�Z3�4�	�7_���r��������BT�V��#��JV����j���V���2��2�qS8�O2�c�)�–:�dp������.08��t'�H�\�$��!�Ϳ!�Ƹ%Ly�̑��Pa��	%��tp[dq����Y,p֒�pNF��)��6���$$P\��b�	�j�=J�G	x�Ot��%x�������Y�a�w����<_�e1_�1��D�x�M0e�GZ��Iv�2�X+���
pӀbP,�e@�(�!���$��fb�K���D�n6�͆��P7�fC�l��
u�Y�l!ˤn�F��4�k��Snx�973<��R�՟i��X���vk����#�c���ٽ�,�j��ծ�z�Y��ɱ\,6�$o�~4�tvj����đ��L�w�J�ԁ�ڟ��?֡Q2p��	�w�{I\������hK���(�Ϸ���颁���n薊�"	%R7�,�O�kӆ(�n�L��������������)§bj�6H�\K
\K
����f,۩:�cՑ�X�JL{B�F��}���}�����@*}�lC}�����mz<���\�RV�Д�R��Ȼ[J(	J�Jȇ?��s�X4v�D�e��"�!��
�b>���=�>f�9�wX�װ�����oa�V�BAÉ
+5x��������
+�b)��t���ۗ�k/�!�C��8�[��ڣ������;�]��:�:�"�ࣉ2�B&R�D���o�-I@9d"�LyD�dke���!�6�'a�JKc�Ҳ�vvVH��p9��g���U7��p��E8U�SE�u>�S����|�%g�E+O�Q�u����0�������������Ը�EڅT�:���6�����:U���R��6�]�,��e�ɕ���lX������'���ï�ԃ/Le�!�6����Zim-��,��"�Du���۱���	�|�`�X�ߜ�'�2�=N_jt8a�IeQ}�j�j��'�US!ʉ�d����O�GTn�UqRj��-�έ�qj~����\^�>�;h`� ��|
Ix
+F�׋(Y%Q�8J�d!J�d!J�d!J�d�(��K��G��͊������(
Ǫ{�Fm87����<����ޑ�B�z|�/M��MѻPy窃��IuQ�B�!�mPy�=�\l�=���ڽz������P�����h�t
R�6��Ҡ
+Lb驆�[
�O;}�w�}�h_��I׿��.�fK�g�H�ek��Y���[7���O~x�������k�=�sCߟ��i{aʱl�鮎��:�*�:?���>_o�6��_���OK
j��������m�cַ�n�/s]5�������$zﯲ1�!������m�G��X˸#�<�7+t!�,��ۺ*|)S\u~g��I���I�>D��<��v�G.�X{Y��u>�^�g�?��*�Y��5?��y�;�c��L����|��CrK�R�W֕(@���\e���%��!&�XTld�x�cٺ'j/4^���[�:H����o��V���u���J��t��_��q^��Q�u�}߻��-�{/w�qm�����^����h��*)8����
���!�s�h h�(,]d�?�fAj��0�m&Mt�G��3Y�0:���}��swPY�\��s��y������-ww��Ax��|�?ϻ�j�!��y+�ɵ#Ͻi�N��bw���34}��� O�qR;^�R���������,�񯨛��Z��:��L����F�A��s��/�+��P!��"�/�(A��Xd0"Egl�*ƿOu�9$C��P�z��[x�t�R �Wݧ�㘝gz�%y�I�3�h��P�������Բ�T|lr�������o�q?������Ov,>co�댯X?����5���:Wz�/z��W�9��8/��ѽZ���=���wU������xZW���aȡ`X.��E��C�q��U�ಐ��T=�gO�_�#}�B뱶c���"��Tqr��Nm�Z}Gu��Z�DE�����oa+e�(2WBg�L�4gZWV��&,wF�/�h	@��N-K*o�6����Z����l����٤�\Dm�H�iջ�x~�T�{0+�<G0A�@�9���ċ��8���X�Jc
bMb��ڋ��2�>&�X�P؅P=T�lB		���i�.B�źh�.Z��+��E�u�b]�XW��(��1���1��.�!���!���!{D�݂�-UJY�rD��f�[Z��Q��n���9��<�stlf��vr�=��=���do}}�dnp�dg�����]�rc͇C�JUSY��y�+w3���Bٴ�-Uq5p���
+�SE�	W�4�ӸvA�M��"�Q�3�>i�A�I�X{����t�X� �!ׅ����5%ZP]!1�L���
�mK7t��`�t�`�J��G0��@P[~5��oR��PĊ�}��R��C���8aï��M�S	?a�8� k�������M��Ñ��tF����h:3��H�e3#ɐ�ˤ�w
��,�koٿO��onX�l�DK������bȩR|����[+sZ�V+]�����qa
�A>9�q��Xy�(�0m��}��x�x��,_�����Q��6�qzA���L^�e$r���&Ő,2D�)
+v��u��RU����P�X����¦�f�cfT��u��U�6�Te�&G&�#�1�'C����Ż��Xc/7�����J�����N����U�j+ա7�+����~s����}�T���TIR)%�٥*�u��V������'>��o}c�����3�ؿ��z�����o"�F��D7��x�I�3E�[����\�,�^d�ˍH����ܞ��>D�nc$L9[K���^"��l[�xeHyT�
+�M����	�qJ�UV�GD��-�Q��	S�;7��M$�uT�.5M4�p��5�T�X�~^�ɀYԪ���dCs���S[���n��8�h�`o0�Y�,�E~�#��f]��T]Q�
�a@�l^��[Hg��lIc��'Pc����4n�S��p�(�:<0vE�F1cЂXu�:y��<��O��	t�:y�<�N�@'Oh�q^�
���.+ 2۵�b��Я�,#�c-3Y��L�L��؀�g���J��
+�Y�6�}�]�꩏d��b�щ��Xb`mkf�`k`ʩmIW�S͑x�HS�h.�ߜ��?�t���ז���u�x��*��ۖZ���\�%RV�ՇkÕ����޶���P8��[�	;�FL�=�(C�$�A	*���_�f�=��&/_)��&w/�2�dBҢ<$ãм�2��s�^P����a�Zѵ�X�~�Gܪh$���\�j�vp��ŋފ��Ʋ�lҞ�xJ��ػ����Y����F��L|�Yzz��q����W륫? �W�k3w,5�q�7����R|�UP)�*�G�����R|T�L,�=-�U�k~��Q�����.�F�,b?��#=��I��*����ڛ��2�F|V
+T�}�
+��UQ��zp����h�B�9�ߨ���o	���ii&d�٣��ԣJ��9~W�l�A�W�W���-��ư��M�����T�-�2s���Q=���&t3䇐C����+�$y��D�/�����o>�-���]��}NW:��瞛(xz�4�AԎ
�ck7f(��4�s�y�(��5(ň��B4��\tA�S.j�?H}�R([�9F�#�$�z���9F�#x�[�����	�FZ.�RB*j�%#)`�i��	�Eoi�,�g.K��)�6��-�?t��;E��)�{|y�g<�K�rR��J�r1���P�Q�CPg�t�d�5jTi��B�I�B7�:%\xӶ����d}��y��n��U��Mn70��	��-�z�
+�s9o�,��x�!�
�c��!��	f�C:�J�K�ĺ�3�ſ �HP���v�>Cp����3�s�LyNy�dt+�6�����#����)NLS�v�9�rO�tչ���%^t��5f�Mަ���������?F��\�4�
+�5ݟ\�r<Z��s�%q� ²����,[�-n��e��神�k���B��Bʰg���i�����?}����|��Ǚ�����̙���=3��C��S���������
~�p���Y�´�r$�
+�@A��j����|�U�;Z�PQ�I���`~	pI���h#q͗m���n���w�U�����-��b��n>��m҇�Tʚ"�I�pP���$����E���Q[�=�:��:��t+�F�h7[/в��K��U��[�ԏ�2�_b�~����l�s��6����=D�'��k��ܺ�)��◻�ݩ��o��56ʬ�M��PJg�����Lo�2�LK;Pz����ﴊ`�K���vݰhVlٮ�
b�ŀ�*���
ºPRqCXD	1Bt��u�64b��9�������~y3O��s��|��9��6��'Y.�Rs��[Ԝ`'�A޲�`��~v�'YdnQS@�G�Dڔ�H�E�5ԤP3�雩1�E�Kc/�V�Z�N��얇��9=C T{9�������,����O�����h/�fn�%�6�ΩXH�h�!�|�iGj{Y춰6��%�Cp�er�.ϝ,x�!�1'ܦ��`��. J���M4p[L��}Z�
+GL��4�����۱�$�Y����C<�@�����'v3d�LУb�2���-�QF�q~�u����>v�K�h��w%�E��"o��z���羲��՝CC�m����i��۷qt��*y�K�{k�H�������ѹTՎ�?�~��O�X�!���Ԝ��Qi,��!�d�̦])�^��#j�WX�ؿ&��!��H��i�ӝQ@�l�ץ&���x�v}��g�A�~jnӦ�Cg�ƒ��l�A	L�Rk����$�����r���eb���Z���n��n�==[ؾ,���]��E�^{�k�f>AA���n��$�op�=�ۃ�=�],^���o��,�
ʯ֞Q�����~�-l�"��vz��l�\vKo��i���G�T���ɨ���(�p��d
+ f�	�+m�$>5�%8,���:�Z�v��v-�~M�](^�#E�R���|r=�ē�y��N#H@:�He�i�M�H�K���]�'(U�B�"cS�op�C	P��cyD���TS�P�b�PX���󼠤��%}��s���ɫ�H�Ԫ���	�"��Kd��0Sd�+�c�]���d�j��H��&�⬶���`Y�˟��1+3����o���j/��s�Y���iLSRW�>�P�5Ծ�F�NO���a��W�{�FVy�rS�+Q.@��D�n+��j�*b �tź�e6�(����QDs���	��!����0�kd�HN�(��ټ!vr�� ��G����n�HCs	N�B��wj:���AC�S-��i�$�H�r9TΡS{	j���X��#x�v�]�ēP�3�u��E�2j~��e,���S��rx��Ŗ��DL��-�nfp��Jc����W�n����+5��|���c�_����ϩ9L��9ņA�"�HJ{L�:�©H��-�mZ 8>�r�$u���o:ǦA��h'����j�|�<_^��~���+���%-����N�G�6'ov��Q��]Q��o�nT�Og���1I�p	�����E�=��L�K�7x�s�&�m���竍�W�-���q1C*Y�4�c�zjH�!^Չ��d��Cd,]��M�t�0�גn�`��D�.7o�bx�zk�X�T9��)��[���c�j���
+�Z�h��,B�pYf7X����;������U��/T����IW�9���-꣑>�g�Ӽc��[��G���z�������%J�j�9�$�L�K��K<�mL���CO�"+>(J���j� Xo�qW�R����#��i:�I��!� ��w��֠=U���ք�ݔ����+*��y�!�\x���I���	��ъ#_zW$6{�|��󍼩�N�d#kJ�P(�l��v<�g�i�3nb˪0�?v�
+��D��[<�f]�\�Y�D�K�6FA�m������0"��XD�5�h�QT �
+��ښF3R�7�Ba��d�R��N]��m�x�z���$�b��޽�=�jn��~U$��:/���Z]-L�G�[��{[[zVWT��ii�]]�ְ}`钁m

��,��`�Y�&4MU�����(ZPR�Yјj�̉�w�����Z�׾`��ݳ`y��htK��[�ѭ2!�'�"���k#V��r��@Au�.��1�!�����A �e'tZ`�i�:S�"��W ���Td6$
+Y�<�q��YG7�b���|�X/���HE�P��(��"KEd1��p�H�o�'9�	�\�=� 
R��,���l�|��r����#,s��\
�gR�^�	�)��C��l�tC�qj�,��Y��8ߧdH{�"-����U��s�p�M��K�'Ԝ]����c^Ae}0�篍���t&;�WfV����J2<���̲�g��-_T��Z�ZZ�2ٌ˫��Y7?h��meKռ@B������'�ʝS��19i�O>0�G�p���'�hC�쑟\S{�0;���Q�U��NV7�Ǐ
T�5\,Õl~.i]Y���?��Yh���x5��cr�b�r17,Q�\���Y�>UJ�V�]J�V�V�&lV�U,�2a��J}�MOK�{5��E�ޗ�oо�������
	@�b�&���S1�ll������b�K��\z�KZ������L��E's�	�I�x�9lۛ���K�ĵ�]�^�n;zf���ٝx=�z����`쓼�;'�)���iIi;Ү�7d���VVN��������_�
�]
v�.vv�_�}JJ�Z���}��WUVݭ.�~32�_�A��9�uG�wԿ;�⼒yW�5�[��?���}�<q��,������<�����yd>���Q��߈7ق���X�F����L%��xA':d�d"�r�mx@���Շ.��ƒ���B��-���C�
�������x����Q+��~�Gu�����$Ph)����8���&M�H)�d		� YJ���7�7�ݛ�{7!Jil��Q;�����T;u4���S_}0jmu�> ��Z_������9���a�����{�s�w�����U��A����
+�k�p���u(����H.u(���yŨ����k��Z��6�}݂B}+�ku�6t�ۑ�ⴃn��t#�Bzك��p�O�F�}��o��=D�B��6}�E�=NG�	��ߧ�Ay������S:J���p�~u�0Jf�>L���q�ҝt;�O�W������[�.�8�	���a��}
��i?��
�y>���KEa�O�-�e��R�n"�o?��;间��P7��!���p5ߍ�۠^���>�=��s�z���~�_C��������}�y��
+�+��,���S���Jn��|W�"�X��������G���lq��/"b���"&>�����"�HidF�ȕ��#�#K#M�=cё�����E}wc��V�-<
+�����b����Н	�s�tuE����s�c�`<��z~����:u�����3Q7Q���&��o?>1m|���N��$���Ǐ��#Sn)]�Gu;��u�9�|
|Grþ��Ã�x�����lyq�8ϥc<��yMp���7�s��/�_s%=�U�{^I�ř��
+������'^K���%���m��џ��^�Vz���n�����
�w�D'x3����I�J��6z���o��������8y3���-��KX�2Nr)����K8�S���h/���R���l�l~���`�g�0_�._���|+_ŷ!6����r����N���A29�E�nT��PQm��I�ˑќ��|3�k!d�{
+�)ppR�Y�����ȇK���eto�2��Bh(J���0�کe>�˦6�p�>|��~҇˸�K�4�V�pS��ͤ�j���0�	K7t��)3k��t�;m+�傮���l�o[z.af��B�P�ѣ%G����鈄�?d�ɔ��QS]�LT��
+�jf-gĆLwSTV���������i��:��1�C��)u�wP�
�@��~�v��2�ղR�!�E�-��6���X��cJH7�D�r "�rm-�����馠�bb����M�5�B�zj��V����P��իVE�\]����:݂ C�kd�k�����R;�r��r{Q�&#=`�fB��jR>��hv�d�d���n�N��ٹD_Fs�߲b݆(�N�s�9`��i3ad��nʶr�Կ3�2S҄|�fV$`[
�ޜm:��pM+���p}�K�,�lfa�ՠ�	X�33�nN7-y��-�<aM�H���rCSkC���E�l6rFkg���l�2Z��m���׾����ʱp�
+@)�A:��$����L8X3�����g�����uyF&(
;��p
E3
+�"�ܓxY�Hm���%���$C'���Q|$���pR�j=���vM�$]��F
��j�F�Ra�P��h��#Pe {�*��D��|��`U)^�J*i1�������!���J��%u��]��fA����~�b(�=���j�+�D��0���li)UTa[��=�5]ɓ�^��BW�JB[�q�/����A')��g41h��*��(<[a�*����Ejʎ�ISiɆ�<�y�Z������jx��LyK�Y�"��(I2J�f?�mж��n�w/�����x�	PZ�]�&T�{�Պ��O�������i%�
>��yO�>uƙ��e�d�����c9`����Ŧ����+�Jjr�Qu&��J/WIY���;��V���d+\]�"�b)y��*�s����,�ϒ�#^�T;g,x�1�y6��-[]q/p�2�d����bS�(擋�R�Z���z4ZK/!��0Z!S��,��-KɒUQ��~Z���h�7O��˸���e<���F]��xo�5�w(0�4���0ь�I{����[П����������1:*�>������@�>�Zp��.�1S�M�$\C;�K43_Kӹ��F�s�_��@	��GAP��E[�hj[�A�;�c��}�b�&t
+�wU�G�h4c���@K€�Q�~4�h}{�¸#���X/R�>Z���0F�׈Q��2���
+���+���ׂNH�	):#qP��)�*
+ p
+H%�1\�ֵ�kt8V1Z�W�âq�p[���XE8ViQR�o6�}�� s��S<*��o�0<zxd�bz��1�s��(.	5��p��^����+��і��F�W��\���-޷{ƚWhN���oL��B�������/	-��Sٯy?��b ��;B�=b����?�
+#H���	D)�!9�P{&���`�Uރ�j�u�w�Q�M���c;F9�<���b��}���8~�}�V�jP�VZ56JDK%h	5��k����h�&��_`�஛q�S5&�&����眾��KI����{���s��p�AF��z���j�dN������k�tɺw�i0V�Wd.��t���dҟ�)�Kn�=fS���ҋ�cn�M��R�T2�ى�q�)�7��w�%=�,#��Q�e������2ƺ��0�m��t��U�$g���
�����~Kڵ��)�+��x�T��T.�X�n��ć�"����c�ωG2�g��|"���rŸd��g��>Egѯ������}���8c[y?�q��h��]&;���-7�5q�1���N�XI:@��qPˉ��[/zOL�/JO���7���q�����8]F����E��8��d��=
+����L�b������?gA~��`2��!��Bw���sW,��Cs^��J��<���X�L�mʏq�	�A~���zP�_Q�V���:���К�#�q����ơ��x��2_�j�A�x�.���W�ƽ��6���b�Z]6��5�Q{m
+ᦌ�������Z������}��O|U��W���u��nQ�j�,י�N,4�E���]c{ʥ��^ ��i�Z�Q�;ւUd��_��qn���.w
+����9��/X.�X���Z�z�[��Ը��~)�g�X�<[�wc�~�����_�foy-4��]��9֥�e�9��<�a~�!��$����m��=���	{���!�o�Ns�ii�]�"9!���~O�r�=��,C�������3r���v�2�K�p1~̡D̡9���-[��s0s0%'y1�.\K)'i8Hf!����'���V��
+endstream
+endobj
+97 0 obj
+<</Length1 25658/Filter/FlateDecode/Length 12197>>stream
+H�|V	XT���{��(�� ��f��
+�B�
+�����$���EE�IT��6�Ijm�h�M�M�I��Am%6q!��4ml$��I��41ZY��73|]���]�?�=��s���`ڠ�xqIZƣq��d��*�6U6GX>�v�\���E����T�\۔����g賦�qk��HG/��6�UWV���v(|����HXm� ��hO�kj�r��y��5�k\����;��%�VG4UniV��i�AB[W�T�2nڌr�y�����������������_�xk��%��R���Ү�ң��+�Ȑi��ZT��*�6`4́�)\P��[��5�.Ψ/�W��}J�ҥ����1�?"%E�U7���}������T9�X�`�@Fr�B�i��H����F�!6���!����@&"N$a��d� �1iH�Td Ypa�cf"��v܁Y�9��9��\�!�0�(�"a1��K�H˰+��\��p7V��X�
+T2�Z4`Z�
�ۋ��	��_�$N�5����w�=�E��e|�����&nH����.ŲUe�,���
+Y.+%AR�	��/H���f)�V�&m�l��R.��g0 9r�l��J��FY ���Z��i��H�̕\�-s�Q�X���y2_�E�%R$y�/u�ƽ��Vl�#�"�N|ߒ��?�?F�$J�L��2]2e��$[�`R�9�8�f��vIa�f�[�)�������!��ک��b�8͡9�t-[�՞�G�v�}�CqX��(G�#Α��pT'v���
���;���Vb�S�zq8j����4#��ωΨㆣVQehh轀�������@����c޾����}�}������z�z����س�Y�V�S����r���g�y���}�4�ݯP�ʿ�EQ���s7�u_��g
�W�G)��*I�>�9�d�2��i�JYCŴR?E��
+jc�TK��J�Pŕ�1t�H�n�V��*���r��G_i�e�܃�\i}�_D�+��IE���OQo�Qѯ�uM_��?��?�
ܔF*r��+;FFRmux�x��4�p�Ѕf��z���8�M�
��p?����m�#��
l�[؁K<��� ��Cx�_���2��+��vj2N�x�q|�'�'��_�U���7|
��q
O�x�'؇�۸��_��.��=.�3"��(���8(f�PL8,�8$AxNB������I$~.cpD���D㨌�1~%�K�gł��$�n���q{0�؍���K��ۤ^���K9g��U��o�>t�AJ,B�#l]��^�� ��+C�x�sBONuu�d/$U���s=RACI%�l'RS�|�����4���k��ڵ|����cJ4~�Q��N�<()���e�vO��6���l�1�qLF�v7#4�#4`�N��ͣ:�K��z�rm��\��n��<�ť��\�����e8S�n����leΖd� _��`w{��J�{:��m���o{�M��	�P��Vlt�%�m:�`O�3-w.�
+N-()�cbv�d��ե��)��ũ�")͋IlIa^$^���M�x"q|�Q��@|x�L�����V�Y*o ��
+u;�n���1|=�-�\��L�qݠc^��4[/��Yn?������N���#ۇG=�(�j4�(��d�ə&<-}��6�2�e�a��
��n�� �w���A�̙�����
+�����j6@��׭��
�	�q��r��n����o8_�A��D�����s���4��lJh�XR^��,�(뢯,3�0fSt���>C���/���3G2�H��,�}6������ʌ��At�:?x�W��'U,ϡX_�C�RU�a~p����)l�^SK=k�K�Á��(�.�-LweF�Iִ�ĕ�LpX�I�$3cl��5T���������k�"&��O�dXNs��Lyb�J�b�ܹ�çg�����f͛3�6͕`w��J��e�@���E��+<�VQ�{���j
+S�9�@��9k�1f���_)3X%���<&��FW��SuV��0�*t�3a��`��Դݽ�򏲻$���jm�}���X�)/#K�>ʩ����I`��06CG�:�������S���g�����`����������٣H���揄m�:��\�$��8��8�����'&�0ݨ`�A��|f#4��J��I�cPu�U��nlBj��Tit��J;���G���Zm?&�MC�Ԭ�*ڑ��}�}���dNx�9�������f�q�5���(_Yé��ˆ����蝲B��3�����n�u�
+Y]0���s�=-���z�P��M�F��FO=U��f��ⵋx�
j����a Y,AI+a�Y,Oy�b����̅�ۇ�J��'��5����y��ދ��<L�]��)���w��X}~,Wō�o4��
FY�)$gǑ<��a
����g��}I^;.��~�k+�Y+͡�>3��4D�5���`ъ�p�
+�6���q�s�uƃWkow>���$S�������~s��f�epۺ��o�\�ϴwd��;B�m�L0��LM>��t���=���{W-�Τͺq�ˡl�O�ҥP{�N���"�$�1gT��rU_֘^*W���"C��fpw9r9��y����g�!�����9i�Gp��38�"@�)��ҫ�p樒g6tUI�V)a��	�9F-�W��* �@��A��h��^����:x/]w�~�ŋ�]5*�$�(Մ
+�T�J�S�T�5�C�~e6���,T�����7������E�I�oU��i:9�M���:�wJ�0!�Ā$
+e�5N	���I�z��1I�_�z���g��q���ͼ�LS����J|�6���4D6(,��x�i����PػV�xZ��z��"���
+H�KO��D��H�@wY�;{�OIi='�'�B���Z�U��@���Y���`)[ڱr�؊��`�_����ű{چ\�奄��e�l���j��S��o�7���;��rr�8�EFĸZ�WGt\N��-9e�:"�="�5‡�q�sZ�x0;��!Lk�{��6�6�m͉�BX�)~�|�-�R�,�#�l�{���ě\ B�.�)�hP�ō�h1LԾY����#���+�\����>�c��α��ߺ2>տuf��Ƿ,�:�8Qo�G��!ŠW�xR�II���v����j�@���>���eڽ�|t�7�x�n`w�wl���#>��w�$���V$�X}V:!��D�l��J*\@K���^�>7�<����/L�����=�\Kz�SN��2� S2� S�A!�ýA!��y,y��1A��Fٓ��')��2��k�� V������4,�&���u��	9��f'�P?7&f��K��Ǧ2�72�*DyR�R}>#`��9�
�g`}&gi2wI�%)�$������דu*�8���j�Q���Å��Ity×[9�ƗǨ#f}9utPG�.;aL�Y;
+���]�����X���P8@��O�.�U�hG�l�&�������(D).��n��v\��
+��P�Y��}��&����o�s���x�e�)�s]���s���mk���OB�T���WO�K7wec�ޝY�?���[�l���m���(�����}�X|�>!��$S�x�d��Nn)���(	�`����쌷j�'o���@}�@�X�k���MI��u���YID��r��r!H��*�W�ϼ��
+�����ا�PK~���������MX#��M�b�_r����HB�s��j�2���͢�BsЋg�嬉�
+���ż�2֠=���W3���}\��&4�ݦ�q4"�;Y4k!��h:zZ�*'��.�O���)�|8~�����.X��^X�՛��(5X��)ӁI����q^��N��(ڗѶ�9�-v�xͤ��N��xjP�=p�ԙ6(�/����T3u4[��f9��[<�gy=7Q�Qd;���[�KXV�NI=;�d�
+�#��ᇐ~o�9��!�ྃ�!��[]�/��󿧦�����77$ջt%k���$�v�v��v���ܦ�e���/�~���H:F�z�L}��4D����4P�Fm�=F��Er芤��G��$h��z��
+�"�H�S���C}űʵ��Z_x����~0��8<�xl����;.[��m���U��.�H���U�.��K��j���.�e�b�*�<f���-:�&$�`
|���/��;&`?��º8%���1��������7��([����ɪN�ª��['|V��1�m�����:8�ڟ$W��l�1��3ڇh�E��t@-Ky�Lg����4H;����]k|��^x�hWe57@�R^��y(������ޭ�é>�S}��B�JV
(�]�㋗wx���9��\r׆Jelpxח�K[����d:���������t�h&��G�d���
���������e���9��7����<]Il�I6>^[�r8e�F�� ऀ�/�	��?9�^ܧIr�Iz45N����Y:��&�h�<�&9������a�y�qkR�˱��d4��9Q9�6������+�v����m�U����H��M
�g�0��|*��z
���"�d�6�z��.���������7��P"�E�[戳������Hћ�‹2\�!�@:`�C��>���}=B�:�����=U���������B�((��a>xK���C�Tz^��D�
Qf�F��+��R�Ξ6��r����
+����;��m��IN�/��~R��񨀿xF��z���̆�U(w(u?��m�س�5/�ϙ�8}?p�t�XKx�}̊��;ŋ>�xy�&�u��:cU6���r�kP�'���Z͔�/���m�*��^�ډ��_qb;���؉��n��ڤ锶�j���
+F��T�
��M��t��~@�v��
++��������g��D5i�~TL���)��y�۔��n�����s�{�����]�������G �@��o��Q�
+o��	�*	WW�>t�E�e~ ��q;p5)�1�h��{]@�
�����8����"�����vLÇe��ڌ��5y�W�l��K�.�[b�f��K��d\MƝ��~owņ]%촔������.��������|C��w	������������O-�~9*�Dk�8B�Ğ,�D��m�yc_}�1_��K5����,��s�0A�ֿ�_ð_�+藕�����[M?�\�cF��
j�RC��qQ�|�"�Dʀ(ۛ9�
,�
+\$��~vʹ�1w4�,އ眞�Ҡ
k.�t��ܘ�������)5�bV` �"�AƑ
+y��zջ܋p~/�C��9��}ZGhxz�x@��X��<1mL����r�w�^ߢ��2�g&'�Vypw����dN2��`w*��c�O�[r��̓k�d����@>��&��\�kK%�v6�d��՞L��G�q����߀.��>��*�_����KaJX�bU^W�MR[,8u��$��m:��:t�|�h�{x�y(�J�������XP5�\���ڟ��Edx5�)z�B�"Sa�:1�)�ڣuM�A��<��E�n�9�j���\�ъ*�,����Jʊ�ه��0�c��2Z��,�K��D�+O�e2��@6g��Ҧ�T*ԍ�j��Z���Ƹ�XOƓ��BҸ�v&sry�dv��?p����%�?�sn�U(If�E���ؼ�%�� ���&�vlF�Л�<�$*r����P��۴��o�E�y4����q�E&�:��]C�]���e����7��ip�3��8�jF�fhD�Hz���}O�����6\���%&�yn�x��1��t�wJ�Ǡ��'���:��3���gIȴ���Wd�Sʄ�Kd���f>NLN�i��Gs.^pF�-C�,�@�:�>YXFZ��!I�
��'�H[���3��>7����|�V:�q�@$���Ƿ&�b��~�������l��a䦡�7�4��*����\��z\N؋����(O!*��DH����m<ұ���&v�{(��Nw���5�K8�CJ�
b x��f�4!���h�
�|��h�D�|Y2�q&�C��sv�(��2��D;/2�L`�jVvj�/O�C�ˡ8�V#�1����v��3(7(;��(8���7+-�1�zӫ��UX_X|EX�4,f��DJq.���qHI�I�P�
+;���EJk�խ�Z����V��m~^�5*�3
+�3�i�J�Fe�Q�+_��b?���q�ߔ!?��(>��}oG�٢��V�&�B�4���7G�p#�!��T�e�P�vJ��iJ
��)����W�2��+3��%�&�ѣj�~+R�(�ɏ>w�\ϊ�\�<Jch��o��^UJR-��Z�^=Wp����/{�D��x����'v�n��۶iט��{��X,iy�=�=n�&�{lrʣ���M������H:1��[;�_Cڤ�.�~�yx��}E˷�vUy]�wX�Q��M2�L�[,'=��G!2� %���6%�2%ΥZ�ȜQ��HO歀��=mv�E�W���IL:9���
+c'rT
+Q5��TWE1M�L�si�e�8��'`�	Xm��������a�9�H.i��SM�	t�Q)4�N�5�'���MH�a �0�V���)Gu;'����y&��W�R���a�}^v�^X%���FC!2TP��}�*�u�=�d |?6�ɮ�ސ7=OY�=�́C�1_ܟ0b��������@�t�x?5��LJ7<�':��k�g��K�m�����/}����D&���W���g���CD%�A�[o�g$r�߮)iW֋hF���YT��Ea�(.*�.
+/�S�j�M���0'mj!���h��
+T�md�5��~���JE-0֢X�����`{�?�u{=���n\��܎��'���^ Q�_3�����z^���<{4�
+Ubk4�����M2�&�jc2a��H�3/�Q@Q��xWL�.JDŽ
�
���I�{"�h%ҋ/&�ޯ��u�΀Bz��r�o=���߈g��?��N6��M<+��s��!d� E�h�t�Az��}��ړ�NJ��D1��%Z�&G�w��^��K�.�ϵ�$��<
��)l
+�!������k�*��3���9b������&�ُ
+��?���F�2����`Kg6ֆ�\Vd,=�B��G����'�:�p�W�c�	
+�������D�8�B��-z�0�r�CYK�4R����	�X��#� �)�����o�"ߞ�o��ղ7b�1;�����~z��
p$ז�����_�|�\��x
���+p<a��4BB���-��-�t��C!2圙��R<�&C���6!ߦ8SΤ	g�w=}*��M<Yt��k7+��Ư�?�_^��vPΓ�]OX��ƀ��-J�+���V_�(nv��JZM���_������t�������NP*�$eC��,g���7f�Yxc��h�8�Nޚudȃ	��ik[�#`c���h6)���ί%�6^�f��!���5 �l��m�tK´Ne�����q�¢��/�v��
s������c$�C�������k��O��־-��j���K�p�"R�#Rh#�k;��^�`�:�>��>��>�*R��Qܼu�FFe#�r��x��Fy\љ٧�x�k��f����ر�!�
	6N��I�D�Q)�A��-AU
�(`
+�J�$D�jUJ[�JEU*�C���Ǐ�ʏJQ 1m�M�*$"�
���w�ڋy���wv���qϽw��d踜�qG2�ۜ7-5Ҍ�� ���n*����
o��]	�U��E�5�_RA����?����`��ו�M�kl��s��ߛf�5�7�ŕ���|ڭ�+�#�
+��e�E}��y]9�	��1W���E�UE��v�����b谐���'2o��
+�5DȄ'pաK�����t�%׽*'�^!/	�gO^��č�D�k�}v:�����}u��*������ca��[��j�:��Ъ{���7RSْL�T�D��h����N�{jБmC̓]�#[��Tw
.ԵÈ���%c31����-Jb���x�HN��b7�iՈ�q	�����m��nQ7s�날�J|��}���"�m�BT��L^��k��u��ݭ�"a������C�=Z���Us�e[��>կ�V���)8S���MIr�3y]�qt"�:;h�+lF��(H;N�^i�]i�+
s��0	�ɗ����/����CZ,��	��Ti�8iL]���0u����ol������n��x�-B��k-�`�jh��V �Ԫ=�T���3�ڧ0_�j�7�;l���7�\��f%U4.[�%�gw�v��s5:eυXb���w��{��ҩdCm��}w%:�г��oU�Q�X�����ڞZW�o+9V�jM&+����p_ݪe���DKcMM,YX���e�%^��߈#A�s����Ͽ�Ui�꜂��%l���� Y[�� �-O�FJt7�Oo�n��T�P	����ƷC%0TB�J`������l~��2�DT� �*�T+��bR
�E��OZd����
+��u�P	�� ����K���>5��+�2nx�6~�d�av��Gd�ǥ>ډ���0yc�0�(Q�1V�Ú�z���dJk�Ł���殪��y�Msg�UZo1�p]o��s�7��[�5A�2j�b��Vr!`� ��c��.���
�~!Ӷ�>���(>��Mr��*ZL�H,�ް��D8}�q�:a�{D��� Okw��0;�������H�[�8k�{�b鉥Sdꆞh7���)�&��õ�><w����WW��6'`�'Gq����0#��$�1޴��Z�'�0
+��[i�8��t,��N��q!I"d�c܂ݓ��|K�-i�Xa�ŞC��������M�6��]�V�v̯HˋB~#�i&t�DT��m�d>�å��L�?�rAK��F�d�H�hϻ����Mi�+d�UI���wP�r딐g,�@>x�!�J��}]���|�P��_BXQ
��4�N�f�U>�t��e70!�������#J�flEt��H��r���־I�v��}N���,�q�zQ���h��h̞…Y����ȷ�Y�
���Co�p�Au�3�9g��'�߸x�^�Bf��r�<*��	XQF@�&5�.)�G��c<��'B��K˸(wTȟ�4��-�-C�c�y�`/9���9�[B$f����SE����oR4���v˿'E�������=�8d�4�s��o����|��@�(�'Ix��>$[�(qr\9��P�8���2�T�R��T~���m���R�����(���_�2|�/'�A\���qĈ�,O��v�)Q�ٹZ����Y9
+P;cERj��<+-��b@]��UC�Wd��$�B8��f>(�Uv����{X��I&����۾gēVH��(���:>U�
+n��)l��sb��S�j�J=�ez���&2��N<��/����1׾Co_.��C���7�ʔ|^tJ"]�M����LrEG����e�����ͭ*k�V��,���k?��Y��r���H�&Q�O�E��
+f�W[d�Z�GD�k��HV�"�c�+����1�IL�X��R6�'u��j�|����T�>�}�����Y#E��D�X��s?'3�;�(N*�vh�aO�n/)i��4�1����w��4����s��o��s9W��1
�zJ�2�]D��W;e��2k�	�I���J�tWIw��9c��Y�Uu՜A��A2�a��<V��3 [8#S��S�#��G����
+���U���5����������N�4%���iS�����_���3;v�܇5�5�O�sds���T���O�>�}zN�9�h���»x�fLe�<�7�G�Z�>�m�q���zQ_t?/�7��)�NπϨOHU����U���Pz�
+��T���Z�����x�hv�v��	���C�ϪzOR՘eX�2�e*T�ܫ��ޅ�t���ڵA�)A�m�@��=�O��{���o	�G3���G�������z���W�mD�d�����j�v�]�g?��<E�Xw�����&n��w���}�E<O�2�q��V��\���U��xVb��d{��[�R�2�f1`+@�f���g>��}Clc��p�|l�p���>���w���+%s�n�A:�u&]����\��/��v0o�*��E0�U���֤�
�����|��{���{�x�8̑��
�����~_t��`J�R^�m`Ω�S�&��Ÿ�X~>�$�]^<߭�>O�. �w0������M��xh�Ƚs-��m������Üû�M,۪v[�#wn��Źy�m�q
+9�����c�jb������"�_�4$�m�B	�B�E
+i(b���ޝ�G߼7��N�..�e�+1n+ta�tQB\Jܹ��2l�	n�wλ3m)?Azs�w��9�9���;໋+��w����J�Ӷ.GMљ�{��M�ݣPi�ԧ~���v�������T���T�~�9�5s���=���Uц��b�}+���E�����k"����[1�#������r�����w�>����>���4�xD�s���zp��ֿ��C�
�6N.9^-MPi^׼n�.��K�/�c�����W�������V_[Ӱfrm�����Zo��i��
���%�l�����[&�7[�����;�m����[)�>~�4����/�fz�e>��Z��g�Ξ�rZ<�V"g ߰r�߭�(>O/�c�I����WVN�}M+׉M��F�1+g h���[�1�m�;�h#��4%=?�'&�
+��S�,:n������@ɨl����\�eIG^���<k89QR9�U1������&��/4/˝]]ݲ�`����`�<\쐕J%خ���/$/��!Vz\yYy���q�3G+2@�a���� g�Q��ɄD�%g������s>I��A�"�2�	��͊o
+p���s%ЈeE�ƨP:%��	�����pyǞ��|�t�[�y�����*����K�vyG.�$.��\T��2�됻��T��cdxP!����y��.�cE'漅��pH�A9����
+�IS�Q9_xP�8Ly�(~(]���-k?�|��Q�T�u��g?DD��*QB�fc@�~>�'L��#Z���tY���Q�y�Ƀ��O=�^�1UVC'��b�A�?|���h��[�Z!E$r�Q
+�	_�Q
��	�)
+�<H�E����h|�8b+�,CCP�Ac�Q�V��R��%�eh�Y�yN�j	R��(��cn����ī���y�#���St�tCꬵvCb��&v�8�F��\ᒵ��fe�V(E�Fγ�8Ĩ<�E�})X�1G�@��3�T�N<,�z�qO2�p�ƪ9�5�L��jf�G�Xu��3�C��y�Dž_{�1�5ۉ1��i;Z�}R�ٞ�D�sl���<�3
�$�\����8>�S�d��L'Y�!���+�A���nΖ��IDN@C�L������#���rW��Ȃ,��'˃�n�uy�'�uj���&�~��1�Kvt��4�(������1^�;o!3;�>R�)ce���~rbf�8gI�(7�3E�<�My��0��9�&�ֱ����y��\(.�Yx*�=g���晴%{$��*<2��~p~hm��6`�����F�?k����$��}��^Ď=�rT����!� �Q����o�N�� ����
��'��o�}z"g������w�/u��I��O�]^/��SSxpgH���]��Mb
~,i�Ƌ���%��[m�I�/�mh/G[LC���i<�_B{S��{q�nm;SS���q��Ú��YR8��&�����w�uS��L��)<8�`�K��&e�8�m�+�
+�K�Z��8LX�[�/V��a����/Y�b����-�$��-�[�x��y�3��˶�y%8,Z,�k���K�!���
�3W�-s��V�I�
+�_��:
+endstream
+endobj
+99 0 obj
+<</Length1 13094/Filter/FlateDecode/Length 6371>>stream
+H�|V	XT��ϛ�t��"��K���(*(&�QPP�M��4jۘ�T��$m�����$��Pl$I���fQ�IjZ�h��Q����}��|g޹��{�y�����PUY���c�}
%�I���u�a�À�X��oߠ�WGT��9_��}ek���+8��5�W�l^qʲ��:�FSc]C�sǧ�	cB!q�b���8��uæ��d���ؖ��:�Έ���Y��Z����ph���k�Z#�Oo��=��ko[�aCFۣ@�5߾����kG�8>B{VA�W�CX`ּ��+*�oY�\��hv�YS��6�t<sfͫD1�k�v��[8a��<��4�v
+:f5�6D��DX�0%�|�W
��	�4��A(l��T$�a8�(����X�F⑀1HDm3���HA*�07!�D�!㑃\�!��BL�$L�܌/�S���03P�R�a&�1�Q�9��y�D�c����V,�,�mX�,�2Ԣ����*�������q��<�cx�ū�� ���L��R*��\f���%r���2| N��p��cq�(#��")h�#�*�&c�q�M�.�)Y.2'dn�I��U�e<��5\�O��{2�;���xo�ɕ��»��+������S|�>4�4`36b;v��������S��z"��I�b�	���l�M/,�7-��V�Wȯ�*{�x��vR�`��t�t�ԭG�qz���S���d�D?dDN#��ԜV�0�g�3Ι��p�;k��)�>�����+��K��$�m����׆�F�z���N�/Z�:zHkC@�����=��E���S@ߕ�7�N������}��p�c��z����������$�=\�ڣ]|��_x�)t��j���I����
�f֘3Z��L�σ��r���߈�X\&.�H8�g̷+�8��$щ>�G�K�D��D�h�":��F�AᴅH�H���(�=�
D�~i��A�Gi�gЉ���x�&��'���1�*QMLх
+�8KT�3ⳉ�9D�0�z�B�4J�քߠ/b5��'��Ў�І�X�3X�Wp'����#��װ�c~��x��&k�y|p.�k��N��]2�Y����ji�7�ߔ��G�q�L����R�oI$�-3�O�p���L��xH��a�d���
+��|<*�T��?�[�܂�ōC�O���Lj�s��aY�_H=~)m�K~%+�SG�	ϲ�Z��#Y�'����l�1sB��<d����0�>aN�I38�ե�5)�ۉ}�ٽ�U�H��M��cEZ���i�dtl�6͂H�����2+��U�������~���dm5-[��d�zis�Oj9�2)H7ș2�2�)�l���ѽ�wV�W/ӛ�|����^O��C����݆��?�6z<��Ǭ��z�jX5�aU@�q�%�B��R���ݾ��x_q�'�0�R_w���]ox<\e���-ͱ6��fk:�Р�j�
+���_����w�����J�)��/U����.�e�,O	��eVT�Ki���
+tD67��l";P*�v��6d��b�
+gS\2eI�YX�팸Zʹ1�����vegK�����Z�L;ӎ]��II�����I�5~$�c�y0���LB��Wv|'��Q�$+A2b�<]�f�Ts�RЩ���
��߯��),ȋɖ�	�ER���rZCҊ$/wdL�5$RbF9��.N,��}DVRR�57d�����SS�h�37�qƌ���|#ɈJ����9�|�^�2R�Zs�~�@_.]����?K����š*����)�5ev'�Ve^^��t��;��u�!�ð���;�
_쵨�$
� D	B�>P��T8L,#H
�JŬe����C��R���X�M
0�s�r���<���*H.)L��l9���'d�}%r%pB�ae&���~�gw�cgj+<����{���p�3���)�L()Ϊ�R�ǥv(F�1�\dj���L���)f5`�x���<e�������dV�F;_6��F0�F�h�%x\���A�e��'n�X��EIW�L<�Z\�%)d�̏�T�b�N�<R���R��\g&p%8��p��D�Q6^���$I��4��3-�e� ����EJ2���
+�b�Zc�:��ܹ^���k�����{�X?ֻx�~�����
آ���ÀģJ�ԴU��Fr���UUP���V�*��hi�5RU��J�R5!5�J�m��x���{lC�RK���3w�̜��9�FV�A
+A,� ��G�ƒ��C��
+�8�2
+�P��>����F���gv��J'�^��O�۪e���Fzc��Hls_�(�h��%�bcL]�?�	V�t�����Ω�R�dG�1�\���u�fG���Ě��SM���!�:���,�2��.�H�I�_Q��BŔ-�l�O�fE	�ט�����iGqk�8�`?n�� JW���5��Z��0[3)��aPJ@���6�$Ψ�3ˠI3c�$D�]���|��R�LJ/sGi��FΝ"@�cr��:�;�uq�����~#HH�˺�r��t�M(F�}R%T�9}�&C�����'@�Vِ���:2���neW�՛�n��s2�~G�qk�>���M�����Pv���1�E�(�b':i+=�_ni�l-����m��v��;�w�4�"���������qT3d)	���Ns:9�NyA#My��<���@�%�[z��_5�ѷ"!�"_;�O;P���R�S�W�e	����ePA<FF�ä��*�������}آOg��jgʦ�+�Zk�g���1���
+2(�`�2�%ŀ�Rm�S�����@�8�N�&c�W�?}C^�>�x�h96
'�/Ν;w�8�8D,sp�)F����9�ooH�c��$jX��.��p�7���'����
+�_�K�i�=�c�һ?�I�����xі�%BP�T���2��iƣ��KkhSo���7��5��{4{i���TҎci�A�v����K�]
+�w�F�%|~tA��Y���^ȭ�6�����F��q�mTG�Ѧ�H{�cqL��=��?އ�T�ɇ+�2��HQ��_��pC"�!La�n�ԷA0s]P����Gꔔ�%�G�(K�2�_'A$���ĕ椟�^�+�Q���-�,ز`˂-�mY�e���Gͼ�yg3�y�E�U�Ԕ�xC�`�����j��lI�3��|�8���Zq�'����O���64�9�PWm[���N�j��9y��˲�r��mM�6%������pS�]����:�&���U`������OT-ucB!��:I>Ż:�Oԥ)���B�!�a�r�N���˜�>�`=�@ײ%���5h�Oc#G .� _NK�|	�/�Z�*����JI������y��a�W�w)Ljv�.L��rnSy8Fy�=D��M]�� �L�<� M����;0�u���>(�D/�؆�}�]�a8�̊�Yq�<�C���83+f�5�Z
TϼG�q[����?��q*/��&��U)Cے���5}����A�����ݵ�N�5ZR�Ѥ]Zj'��9�_ۑ	ɦ�ZP)=5P�=;?�}���ʶ����H<SY��5�WU�7��'`�}������WA:1J��Q�a"��Y�%��7��� @)�����ٴ���̚������k^c���%��k��,�C?�{�A�S�`�U��������P51_�o�������L����\Ϳ�ϢY����gY��5_e���:��P��W�{�s-_`�zo���X�C�&�ҁx8�/߯�؏h|
+��g�n��[�Q��yƋ�P��_��P��|�~Ń4P$z9Ѽ��/�"�'�i9¬�~�I P��)k��;�KD*b�Y�D�I�$����;tɦ!����,�h����LA[��A�0��v-�
�1�ld�Du�V��o<�e���j)wi�i��:����y#����ۃ_��GMy�Î˗�J#
dQc����x�hE���@��y������dY����&�	ܪ��]W3+xf��\Sav����Ja��*���a�RX:/oVI��R��%E���]�W`mB��J�[r˃��`sy�Ql�1ĶQ�[z����5�5�#x�<��y.�a���.�C_R��G�[=4�O���u�?��^ݶ��*	�Vd��Xc"�;�Smu��Ȥ��1ㅲ�gtS�>�4t8%����@u��yˁ���[�5vň!�
���
��J;=@}:e�
/;�������K��������fn&W��d^��B�+�7���߉+y'0:�פ��3�����_A%��%�]����Lȓ†�	�Z#����3 �)��h̓Ͻ�?��ZH���B�ǵ帺�z�dG=[�Z�k����U1&)�!c�i�6ȿE3~o¼1��x��1��b3�#�>�?=i�IQ�y�����D�<�����!#�sb�xX������؇-gEV�wV��1��a��z=�n�=�5BO�5��c�V��c/~{�q�ߧp�Y�=GD����F<�������X��!��#�O�ԾyT�g�J�v������R�[���;�c�̽	����u�m��W�PO��J�X<F�6�˓��Bq{T(�+�Zk��7�K��/	��1~�.wdXy�M4}�����!^��\>��we���]���Ȗ�`	B��n��D�a*逮����g5	l�SQ�F�M�����>��;�;����X�u��!���_c�R��)<ob�s�	Q
+)��8�m�Rg����������������H�`"���ɉ!��8v�q"�1R��X���M��{����R�"AKGCAQ!��R���$E$DC$DZ�����?$$H@��7��ys��Dž�8/���\
�p�N#���G�P�磜#�/�G���)��wҧ�[��y��^�"���<�M��?��%����̸B��_�������뻨rK���3�2��8gQA�`&�k�p�y����o�׏��먯��r�o��O���Tں��*�Έv��"��7��?�C���#����]q�!�݉烮���byF�Ęҳ���3�#��q�_<��r��ʟ)��S��׳]���e'=+��Y�ޔ��n�����Q�c'_���O�z�{��*�Uz��/��h�r��3y���G�z���3@ԻX�<�s4��G&�i���L���t&�@���; ��}����s�u*�(W�*0U�0\Q:��@���5ie�x��VqӅ�x�S�ت����o���������	�/^�$ʏ+�Tk�;������)5k��a���/����j�¬������C%S`H�]�AI-԰hW\˳�
�ilh�V�̦e����(t�E&���AQ���TK�I�0N��������E�e\
�ň����D��qNG�k0O/�b���ڤA����xωQy��� ��+���D]��m#��cvLy>�<ߡ\��v������]��.k8:\Զ����M��%�HM����Y�lh|1IW�q�Z�+W�����&R>|��}�iMߙ8J�P�,�-g�#�W�8ر1�S��U�L̓��Ү�&���e��p}z��p�!�uS�.L���K�S��?�Ks�%��՘*x:���;�	SEuQp�DD>�H�u���7��9O#-b��5�X!�h���`+�����b+�V���6 UĎ�����6
+#���}+�kX#��z��Q�4�i��4+c�v̀�"�	7njI)e�׵Jb����I�겚d~HP-p2��p�eH��ڞh��Y�2߉���<���vLxN��0ގL��UQ�c�t���ba+��I���l6ڒ}Rˢ��D�M���HDϊ���Ę�xz���1eONm�t�4A�(�L�@���1�V���G�b�D��ۅlo[�O׉];���<�M�ːBi;������z�b�c��a���
+>;Z|>�'GlI�$]�	��Ȟ�5�a�~������4V+�b&O���wSUx9A���H�0ݷ^־KVt��~��Ƭp�r���8�3�����j�Ⱥ�����H��$�m��ԭ��g�7���9O��e���]��mv�e�M�Y�M��2B��μx�	mf������e��������
����}K��]��wR~8w��K�U'�5z���^ŭ���M:�n�@��|�ޢ!i��:L�0�ۻ�m����=t�^�A���K�pf2����Go�1z#�ޏ��z�vK���Z���
+�'�q��A@�v�ޣ�X�z�ݳ	p/���u;6�����
�V��� �� ����չ�������aT1
+endstream
+endobj
+101 0 obj
+<</Length1 13432/Filter/FlateDecode/Length 6582>>stream
+H�|V	XT��ϛ
P�73,F@vT4(
+*�b
�Q�Ȣ�5����U��۴Y�M�$�i�6mQLb���R�Ř4��ڤfi�D��o�|�����{�}�;���s!�F`,([R���뒳��Wت�6W�}�� 3����[�umY�������u�Y�6��PΩY�����:�A-@�����v 杙@\��5��(��/n9�	
��[�_������Yߺ���v1�C�KXs��6�a��s��R�\�>������njoOi}�nQ�m�ڞyt�>����������uj��Tݱ@V#K���i�j��Yw��#��F�ZX�x	�ǚvv�5��<��:p��l�Y��YUb<��#��`�B�A����_����"Mr���#|�
�p ����X�1�H�=�11�q��xL@��nx��D$!q&!�H�d�#��B6r��<L�TLC>�cnD�a
+1sP�b��<�G	����eX�e(�r|+p*�E%V�f���O�N�y��K�"6��]��!N��(��h#qK�$�dɐtɔlɒ�R �d6Z��Aj�p���}|	��؃;pG�~�<����1�?�^|O�p+V�!�k�3�
؂/2J�qq'�_�:l�L�q�R�e�ƣhDn�z��
�Ў���Y��op/�^�y��]x��x��e܆��
+��؉��nX-�\�J���QI6�e���)��rP�՞Ӟ�^��<b9i9�G���8ݭ'�z�^�uE�ܮ$�d�涻�ݣ���q�8w�{���]�x������R>#[Eߏp��Br�h��}�:w����u�{�� ���ܵ�(��~�_C/����/����92�t`�@��������]�}���cn�7}]}{�Z�J�J^~;h���
+��C�l��6�S�2)´(�G���h~�xM�_I����\�ϛm�L%�կ�Vn���RIU��:;���{���TZ#x7U�����ut���n���P�g��������_W���.�[����2�`��.*�F�;�~'���H�_��M�S�x�qKgdR����f�L�R��H)��`<-!�����_J�������9Y--2
+����X�N�����2/�x�^&�'+$^�D�Y��L�N�O�H�d��$k�O�z�Z*�J�K9:��*�R��H����%oH.�����T�J��e���dޖ|�ˊ�L�UR�J=ޓ����GR�K1>���&�0(�1,"ԮE�&��fY'�����;�W��cF֪��d�n;w�V�>����(�N��<Z��+��>���="���Ur�V���?�fC��,,����@ji7��*��ty�exW7�&�kY�:����ōE>�bGK�a��Ȓ���Y�.��x�N����S��7T����Ɲu��t݇�F�/�p�fyc��:�7�<V�c5x:�dh
+04$�$[j��$�U,���(���*�ƺ\z��TY��TQ����,�uOy��1&ೃ>�'�Y��A
+og���q�Nuv�v�;�n��
�~�"�$wˎ2ch�����E��E|UpjiyE1syӌ����r'�cmJ¹�BY��)=,`�,o
+%����ɪ儶���P8Y�,dN+'��E��M��Du2��Li'��ɴu2Q�x��
���F�*�$��	#�1�0��%��
����g�P�
�!#
+�=,�V�_k�"8�����x$���9%7;:]r�HnN��mw$Hv���(�#L��D�"��oj�6Υ�J��O�g9�������P��a�9�#���]�	���A�fϟ��z\I��Z����}�YZ�qR(��"�������`j��'R>�k9K6L�븰V�ZQ˺�?����6e�1d7����k��&����x��g��P��3���	��9cr#=
XV1c�j�®�ގ��Q���”!�3(T��2X��Ի�����L4"�22��,U+�����H�9��U֝�p���DӲ����V �\�XӒjZ���V^
���	jLP��T���X��0ZC�9-��?�
+D��3L�B�N��l<݅0�!��C�OYS�+�z�J3�M��g�&X���F?��['�u�)���Ӫ��x�~C.��#Kvd�M۷6M�X�	����u��w�s���_�؎?�N $���\\\B$$�2B	��R�m+���vS;	�B���D��X��V��_������Y[���4m��ڭՄԩ?h
�{�s����&z��;���}�{�Jc�wڥQ��;{F^�}EN̞�[�|J7J׍+҃����c`���i|��Vz����D��*�졥F{�>��_5b:-�[��,�/a���I��ihP����Aip9

Jm"`����Ja������P�n�r�΢�\����@8q��|�2�XՊ��xW�Q��Y�J�<�&��70���3��B�I���;��L�e����ֺ!p�S눛�}PtĎ�¸��ש��(�Q�J����\�X;ll5l3�n��`.�o��ü�{ҍ��LDB:���۔���PQ+�̧�K��.��e���.i��۠F�޼Uo�UvF:�V�Fk������Jwe]u��JT��u��%���m���i���������N��J�'�t͏6��{k#�TS��|C��y�NYt���L��|��孵�!�ܕ�~+�i"1�2�pո����V�˙�ud�����צ�
+"�6lfN�g�Z��(+K5;=�`	k�$�1֤/��4�����@J�i���=�����]�����vr�*poR��Nj@aD9����6���=Saԇ[U9b������7E��nh�jdQ�*D�Y�������R�`^x��]��3��K�
��0%A��FLh�D�:8�sUd_]���&aRW���� d;UnQv!��P��6�~*�Ea4PA�Id���K*T�U��B/¡��#�B/�Ћ �"�B/��|�V�A0H��I�'	�$E�
+��l�(L��q��0��n�*����	���*�����&�j'��v�8��$W�|�`=��8s�i\��>N@��8��%>�+�<4����֡*Q��]ʵ�Y��28ҡzI���L˰G��2���lY��;�	&w�'
+[*d�񞻥3�h]60V��	�˷gd[G�o����h�q������ٱm�
����h���=�*ת��cgBH�խ��ǡ���*,#U^����d�'W�4v�M�I	���Y2ч�8�A����>��U��Jܼd��t���� ���������%�6CߣAܣA��I�&�M4k����@��9d���Ѧ[�U/�*�j�;���e&n��X{�؎���~��.p��jȏ��5pQL��vl�)~��:�5��h
+���ƻ���ʛ�kP���{�A���Fk�f�%)���+R��)�u��?y�:%�7�}��xD�֬0�M����0�&B�!7a�Mrf�	Cn�*'"�J��%�w	ֻ8���%�w	�]z���%�w��v�����*�D�p'��h�Sک�[=�����y2��7��7�:0�,��d~�mWq�g$ۼ���

g�Ec�K��ci��
+������Ȗ�%#��[0iiS��v�Ǧ�$�<��^����0E�!�a��~��V�7^0�
+$����/(�Q�~:~z�sN�k2�#o��!&����$ի����1x����T�
+M���C,6'�z�k�	f���֏c��X?γ~����9�
n�t5�Q���p����}��*w�U��Y��{�=WPGuet�PAĦ�3j����SOI�n?3�����~P����P?�C�`���5�ha0���=N�Zd&�ڧ@|���f�n�&$̦�j��ĭDƌϽd���*�Y�7�*W�!Wa\:�:��_��En���2��>�͊�����?t^�N�?<’Hop[,?��{�����TϫnX�VNQb>���)&�ߙ�O�T<7�~�ڷ��NQ�0��*�"G��T4��3|���~����_@f��)NQ�F�F}�T@�-��B�
+!}
��?	�V
�i�D��JM��V��xY�����#�RG-�>��)�A
+}�B��)��>�B�Ч��^]Q&ZD�2Ѣ�DD��hQ-
+�EA�(�����$�J/|?�)��n���Qk*}����~�p�[W��Ma��a���]��61�!b��W��J��8˳,�̓Zs``��������E��Ճ]����x�Jg�*Nm�=��8�5[�������M�H.���,�h����N���7��E�Y�\(
(��$UTPUVp��� �ń��vK+������>N1]�ؗS|��ђH��Jߵ��&�րz�mX{�ed�rW�"QY��u[y��[����r�aTTuF�+Z>ü���co�����t��\v�U�]���{��Җ���
��Ѯ
+�<�N�!�TdP�W�A�S�g���u��	*qN����1�Qz��)��͢�h*}i<-B�s���N��@v����`��m�Z`߆5��=�b�.G�v�M���/ϖޕ��������v�W��5��,ݔgո���Vן���	Q�~�Yc-��N�*����>�Ak��1���u��7��Ҍ
+�\/<��6p!�e�k�ό�X�	����˃~P�;����Rъ5��O����e��e����1��c1���1�J��3��b�Y�Ji�}�_��bm?2^�"�5J<:��~���Q���o���Z��G���6mlLT�1�&-1�I,���μݝfvfy�6�@�/RA�O�K)(H��x�"�$R*x��E(����of6��M������|o������{�(u�q2!rc�7
+&��R_6�b��X�4jdw�~�zaۀ.���N�t�ށ�y�Ϗ�t�~.s�q1�<�ո���q����}�b36� V�g>��!.�o��z
>[�#��)W�?��(��DY�j�U�	ԛ�&^N}�l�2�O���b���OP�!6�K���D>�B���QH�l�����+��ҟ7���M���h��m%%��Q
&��'��ݩq�k:��RKM�GT�I���}R��@QoP_$�x%D��Q������ِ��Z�zi_�k�zZM�R���^���!�ˤ7[�E�,���(���D�S�6r�-�oÉ���!�{�mC)_���s�>u����v��?�?��V�]�}$傀=�Eo�F���Y��I�4�&�P_�|Ko��%���OF=N�-.0f�w�Gnr.�^B�{��_<�+=��ʜi?�~��Uו�+]W{���ˣ���0.:7���!�S�t���#YǻtL�h�ħ�����E���"�
��1��z�w�O2��7�
4���b>%��n�|Z�v�|Fw�c�
������U�w�r�7�Y�Ƞ(MYI�-����|;p�#+�]v}%����)ԌG/�X�hYՁS�]��f��h�*���.�pCi�����r�����ǐ�t��`��&��^���U�*99�y����*=����-CiM�Ҋ�i�*mșUj-�<Y�x)�LZ9YY	��縡�! �+�-?,*�1����و:]�Pֵk��U%?-o�ќK����F������}P�:��,�FUBy���@[��#��#���.O(o^׶�]��9�b�92<�h��tIi��5{�b��7_��g��j�;��Xϵ�ON��j���B�a*��G!p}i#���j�
�6n��q*L�u��g�GDPW� ش�W�[�匩9n@�QHѫ�&�i�m�3��:qxr���)US��cA���yй�it�1a	�3��(�nDY(<|�K �y��)�:6�p��ِw��h�-�X���u֠�kJ�i��@^�*M$Uj�0�UpE��p�H��!��p�=�B^3�2tD��A��k\s4n�-���H1�
+bBd�����藥rl�����b5���4p��R���i�K&$^GVy��G�Y���$'�&���O2�iB�e�3]%��cï��p�f��턐't:��s���l/{��i�J�YN���J"��f>-��F�R$��d:��>1"�qec~Y^�9[�F�����Tۑ��5t�i�.���,O�!��<��\�Qv�f��I��kzL뢊����1��N��5aK���l�}�HS�j�p�~Ա�P��(W�͙"O����~F����}խ�O��fY��P\Ƴ�+̪^O��y&mQ�D�U�g�#�֏<����o$[O,G;R��J?��$��~r�^�Ŕ8�D���w�
+�_M��,��F��UŒ�?�����Q�Omd��z�k|����F ��p���%љY3�=��fO���E�%����^�Sl�q7�=��'E/�{��C����?&�#kωg��&�`:���E�<��@Z���xKi���x���m+,��U�:V�O�����Co����	0�bC�
+endstream
+endobj
+103 0 obj
+<</Decode[0 1 0 1 0 1]/FunctionType 0/BitsPerSample 8/Encode[0 63]/Range[0 1 0 1 0 1]/Length 192/Order 1/Size[64]/Domain[0 1]>>stream
+>=@>>@>>@>>@?>A??A@@B@@BAACBBDCCEDDFEEGGGIHHJIIKKKMLMONNPPPRRRTSTVUVXWXZZZ\\\^^_a`accceefhhhjjkmmnppprrsuuvxxy{{|~~����������������������������������������������������������������������������
+endstream
+endobj
+104 0 obj
+<</Decode[0 1 0 1 0 1]/FunctionType 0/BitsPerSample 8/Encode[0 63]/Range[0 1 0 1 0 1]/Length 192/Order 1/Size[64]/Domain[0 1]>>stream
+>=@>=@>>@>>@>>@>>@??A??A@@B@@BAACBBDCCEDDFEEGFFHGGIHHKJJLKKMMMONOQPPRRRTTTVVVXXXZZZ\\]_^_aaaccdffgihikklnnoqqrttuwwxzz{}}~����������������������������������������������������������������������
+endstream
+endobj
+82 0 obj
+<</ColorSpace 46 0 R/Intent/RelativeColorimetric/Name/X/Height 95/Subtype/Image/Filter/DCTDecode/Type/XObject/Width 104/BitsPerComponent 8/Length 1422>>stream
+�����Adobe�d���������
+
+
+
+
+
+
+		
+
+
+
+����_�h"����������������	
+���������	
+�s�!1AQa"q�2���B#�R��3b�$r��%C4S���cs�5D'���6Tdt���&�	
+��EF��V�U(�������eu��������fv��������7GWgw��������8HXhx��������)9IYiy��������*:JZjz���������m�!1AQa"q��2������#BRbr�3$4C��S%�c��s�5�D�T�	
+&6E'dtU7��()��󄔤�����eu��������FVfv��������GWgw��������8HXhx��������9IYiy��������*:JZjz�������������?���͔HP�li�G|i�|US6k��q����h��3+_V�k�[�k	^ ��x�sQ�#�y�1�C��D�a��#�,��_\����$`9a��̠�����b\�ٲ���l�%�?����jp}󕌟l�~d�7`Ox���	��e@vl/��f�����|a�s�2c��&�DճY|�{6�̎k"f��|a��|��4˴G{$�����kr��as�Ȝ�ӆ�v�蚾�+~��N�!��S�|���k�S�>�C �'�_�gU��k�}��˝��}DDF��A�Sܳ�Tf���
ءu/��9w�I�:�����g.�O����p�h��,J�(1/�ņlq�t��>�S53f�4�6lث�f͊��ﶿ1�˟e~��w�_�·�ϲ�Fbj��f�f0���4?`f�'j�Կ�?,��i��y�u/�O�9w��o�[��F��b?�qa���ņla��
G�}�͛6I�ٳf�]�6lR�Ӿ���t?.}��3����м��~���;n���a�6h~�͘N��t~Y˼����/��d{g;�&���@�p��u���<�*��K��C�#Ҥ?�����t�t���)
+Ƕ8F�SF���	�Bs�8Nx���e���ū��*���g����\�Ԁ��=Xjع�.��M���฼�����=�m�����:�m�0�����A��kN\�nf�M�"�Q�3f;�Ӡu��{�%'�Fe=��H lK�8������?g$t��c�N��x����=1�&���헀̤B#�t����e킳`�)��-�vLj�v���Ҵ"��yy�+�f͊���
+endstream
+endobj
+80 0 obj
+<</ColorSpace 46 0 R/Intent/RelativeColorimetric/Name/X/Height 96/Subtype/Image/Filter/DCTDecode/Type/XObject/Width 105/BitsPerComponent 8/Length 1412>>stream
+�����Adobe�d���������
+
+
+
+
+
+
+		
+
+
+
+����`�i"����������������	
+���������	
+�s�!1AQa"q�2���B#�R��3b�$r��%C4S���cs�5D'���6Tdt���&�	
+��EF��V�U(�������eu��������fv��������7GWgw��������8HXhx��������)9IYiy��������*:JZjz���������m�!1AQa"q��2������#BRbr�3$4C��S%�c��s�5�D�T�	
+&6E'dtU7��()��󄔤�����eu��������FVfv��������GWgw��������8HXhx��������9IYiy��������*:JZjz�������������?���͕P:�Cy���wƙ�w�U3b
u�16��wh��/6����!&����C�8���.��G��C���̈?k�X��Y_��9`��"p������p(p��K�e�(���Y-f�+�����
N�n1�������C��l��.i��cE$r�2��vl�O��\���dGOnMp��f�����6�̮k"&��|a��|�i�%�#��I��`Y5�Nܰ��ǾW#�p�.�)����E�)�a}Nm�cZ宙M���2
�u��p�9�l������g(�@Dl��sK!ݝ@j��m��-�}]�D=��g9���"�����͟i�x����X,����19?��68�:
Q��隙�d�wf͛vlٱT]��=����
+����o�sU���c8���#o�1l����!����9���ճ�]��vs����d�sk���Y>�=q9>�(����������lٲm͛6*�ٳb�]��=�����c'�\����rv���-��-����[0z�n�{���s�5��:M�򌌁y����A��ͯ0���A��P�d���qx��l�ǐ��s�9�W��e���ô��?��c�����!�t3,tB�<[9�D~_s�8./.�����D6dz�y���s�]=�l�E���pT^\ː:���o��Ӥ
2k�Z�q��ph
+�8qg`�҃(˛����|$l�1\�E2�[L9
+aV��,���l��4�-����x��y���_��;d�B��H#У��SG�~��ٰq��q��"��-�c��y�q�P����
+ئlm+B(��y�+�f͊���
+endstream
+endobj
+81 0 obj
+<</ColorSpace 46 0 R/Intent/RelativeColorimetric/Name/X/Height 165/Subtype/Image/Filter/DCTDecode/Type/XObject/Width 50/BitsPerComponent 8/Length 1974>>stream
+�����Adobe�d���������
+
+
+
+
+
+
+		
+
+
+
+������2"����������������	
+���������	
+�s�!1AQa"q�2���B#�R��3b�$r��%C4S���cs�5D'���6Tdt���&�	
+��EF��V�U(�������eu��������fv��������7GWgw��������8HXhx��������)9IYiy��������*:JZjz���������m�!1AQa"q��2������#BRbr�3$4C��S%�c��s�5�D�T�	
+&6E'dtU7��()��󄔤�����eu��������FVfv��������GWgw��������8HXhx��������9IYiy��������*:JZjz�������������?��NlٱV-��m�6�>��Mqp	���^�I *��W��:��櫈�ɧ^Jx�W4
+��,���Ι�=nn'�ž�g,�E
+D;*��S�c���hF��U�&l�~O귺����u�H�̂H}F���Z��ɞ*�ٳb�͛6*�{��E��|�s{2��PD�	44P�����9�v�ȿ3iڅƙ��M�he�����3�UBG}�q�U���իZ~\i
+��Ԓ_��f5�S��m_(��_^�퓺,S8B�R��^��:�!� ����f͊�8�ߞu�}{�+�ݽ��[����<�4fӍ6���S������w�OE�"]��U�I$����͛vzW�q�_�ռ�>�w+K&�q�nDRH+רl�Vw�Ƌo�}r������U�ٳf�]�M��E����5����Q��9�"@;��|�����v���]�P���->�����f�]�����n��/-jW�>�����<���s_�/�zo�<��鐄7�nn$�(�ğ����3�"�H���R3�qW��͛H|��ɺ���š�p����/�3g�	$�z��������@{x�R�����A2o���1�͛6*��ֵK�M.��嵱-�n��5jg��)���_���EǞm����8ݯ�w����o�TP�?�����#{�lٱV#�����;yVM&�e�t�'Nu��+�+�K�'�g�O�jx���qg�u��]�W��q��%@f͒o˯*��|�e���9nX
+�=���?N*ǭ-.o�c����f	q���v�;��g�ƻ�j����p<*��q��y�Z;l^�αa��K�K�iֶ��Ԇ�����8?vlٱWg���hl|�Z�;���듞��1~{�v�I��������,r��2E��E~�U���������R�$�����GEO��}��3�_�F��\�S�k/�N��3ʿ�*��f͊�6lث�9���;9y^�H�G��Ե����7R>}�$y�W��Kk<�өI"vGS����g�?�u������k{����C��s���/
�wRļ`Ԕ]�����0rY��8�1���o]�P�=�0�8�޳f͊�6lث�f͊���r;C�ߗ�5ؒ�c1�FN`)�_�!�����f��l�
+<Y
+��:��uŴ�Z�\0V�đ՟�j��5i�O9i:��y%�#_�$<m�Kb��sf͊�6lث���]>�[��V B��抪:�N-��n�ͯ���/�M$܅��G�R�lv"�q��r����o<�ii��4�&o@��v2��C$ߓ_����m|��oN�b6j�Y���o�/���<ysJ�c���Ԓ&��Ą����X{m�]UQB��@����6lUٳf�]�6lUٳf�]�6lUٳf�_��
+endstream
+endobj
+67 0 obj
+<</Filter/FlateDecode/Length 1901>>stream
+H��W[o�H~ϯ8O+a����P��P��hEW(��Դq*'�K�����I�.��8s9���s��W	\o!�Ӣ��"�O�2��u7{q�ӫ�,�2�b��Y��E�f�����^��	(�/g��*(Ӹ2I��q����ٗ���!̲ �������f��t�m\7���?f	����J�(~�e��*cb�sESB8�����#�y����k�Z��81�v^ς跏Б�$Nt��-���~����[F�O�bU���)u�q�@�r��8ˍ�fFГب��)F}��N�y�eh~PUy�s�DdDr+�qC,�*�,���#"ώ��](J�J��e9�*xjR��,��('ĥ��*K��߾�W���ӫ72�2���+�}��z���M��'��s�A�v7M��Pk��(�\�"*32%PF3*�*3L��@a�8It�����.�]���C{}{��V�vP7���L$��ST�4z��ի�
+�I�������d��+�/�#/R۪�9T%2�5}�
���[�q���4ݮ�h��~�kW7�hi�Ъ�ns�Y�
��)��M�l�M�m�D#�X�H�K�n�v�����ť
 �w�5�_la��p�tۍ�yR���J��'�˸,�B��/�f�>0�����<LLQ����,�V��di�T��k�)cJ}���U����d���\�`zRiƙ'~;Ye���FYpV�.TI�	#�l�4���ni|�ʆQ\��M���	:��
fЇ���Cݲ��m׈,*�*5�Is�j��7՘޶c�݂�
+4x���J��n*m���ʃ�h�ϟ��^��b�g=�S"���E����9��,G����M.è���o��Gw<FO���2���8�:��E��r1��4z�-G\x=i�JH'����@�����I��c��Q�����B�;����NAN.�Z��ήu�T������I�����c$l��bM�VaT��A�(�PI.�#�1��&�~),/����"o�X�d�̳e7�4ڣ<�j3	|��cI��EZz-�c&��m�6�F7+�l�8��v/m�G�Ɔ0vb��W��=���O0/���M�C~M����s=�@"���J4�!�ׇ]�0�|����a��H������1�㕩;�#���v�
M5Db�n�e�f��P�)�>�+|lX́4�:?�.-�<ʯ�lZi
+x ��/8R��e,j�xp�.ϑA�Þ�T��}�kW�aN������psg��C�ɣHA47[>*���L�����ج��	���E��	�B�w&rӡ������@��������=��!�#�:���:��+�����6�����4iJR���St��qh�r>ȼM�����`�b���=�ʼ�+��s7=�
�9rk�� ʞ�{�Ì����+KIi9P���!%y����3�_��$|l�����?�M��Q
+�gb�8�%53�,�Ur�&%k.Cp��\���m�� �0�0�u.G�6tIR[W���0�G��o���wDZ�~�)C�
+�wO�~����Y�o%Y{�=)z���9��C�T����e�����GB��
+���1�aGe.�%Qy��tg3�bk�½b�s��꣺��u�{Ω���t�k>���/���Tc)���	1V|��
u�ۢSI(��'��2��[GRL\S�v�_��Sn#B�㈉cm�	����?uc����e[,����:g<������U�����7rH��A�b��$L����^cU��*q)��I~	u>'����<p�U��tP���M�6�Ca�)�Rn�D�Sd�TK7w�$�Sw��B��ؖ��q,6���v���M@���6���&����w�gX7��F��۩F���w*�\��2�^�#a�W��G?��
+endstream
+endobj
+68 0 obj
+<</Filter/FlateDecode/Length 1653>>stream
+H��W�r�F��+��I	0;�N�r*e�.�If*%� �H�R�R���6��,'n�Lo������gq�T̞ ��FI�����K�΄E�ޘ"�5�M�_M��M����q��,3u�����u�hXlx7-}46Ʒ��h;�����c�~Krk���+�iT)��{۲��;h�TROd^
�-��GpA�WB'��=�y��mM��w���<@��<�w���ݍMH�.����Ţ�&̃c-��Vgh��i�lJ��1�$\�q�R�����RY�LX�Zx��9ȟ
��`E�{C��bR, �I�m��L��4Oڵ&-"[��&���֗���ypM?ZJ��f�2
+G@Kvu~�xiBDB+�ã� ���8*cz�6���g��~�a�={5��`a�<��ŀI�#R��!��9̺��r��=��~�Դ����:�y�3gŜ�3{�>AA弐>�eUU]��/�{S*��'c��Q�Ho�0�|!Wn^޷+��;�G\+-�����V���)̓�n����$�񹇶
��ڨy����2|��5aV���[��`>�U�i�4�|VUӂ�2"
��ո
+��)���$�[�Z��q�)Pړ3rmG�z����b#|���_��Ǒ����3,t��6��
+�m�iZ6���}�	K=��Dz�8L�ϥ�Mp�OR%v*Ⱥ�#VT6�G����b��	���ΒJN1��;�����*�υ\���gׁ�G�����u� �@<��X9�+�.���̙	Q�ǘYҚyZf��k�!��ЭB�I,o%w7��
+�f���L+
��n?'� ������{��3O*���߳⌅訓��
.J�\U�x��eu��E�%�����I� �l�M*U�ȘO��FUYeG��&�q�Ę�g�rAA�3� ����`�!�i�E}ʇ�㖪�����	og����ҏA5�k;q^^<� ���I�dz0�>���a����/��M�L\{Rd�٩��J�vť��FpH7���c���������ndnw�{�8s���X��#'�+ސ�#�$cc�|Z�z�q�E�t|�؅��PJ���܏�,?�d�S��j�=1�J����~ݰq�]3 �n&�S�@�sq����'�u�;܎�=�~t�?R��C&ڦ�^g�j�����a�D��ld�˸��h�~���U>�g���"&�<R�)�V��G��iX&��Ե�ryyE	�:�8,Y_q��|e��N6xY�1?���9oW��j�����17q,"Qeү�?"��tD�u'u
�"�r2"����xD������:�L�W��A��DB-�iI���~'���ZX��Y	�V���yM:��.��ꗕ����;Tm�a�ln���@2���k/I����z����ME��5S�p�09���h1��(�⋁[X�G'������6>��'.�<��l8��[۴8$$�v$�����A��X}꒳�u �uw�i}/�N�B�"GB���0
+����u��d�k�<y�v�p�Z�z�.��n��]�'��J���p�!�7^�ډA�����0(`�`��¦#j>������K:��_�4�v�<LO�=�)�#�f�L��i�������
+endstream
+endobj
+69 0 obj
+<</Filter/FlateDecode/Length 1926>>stream
+H��W�nG}��Џ� ���e�6�h7���+�"G�D�����ϩ�+%:��B�4��O��SU��+\��L�Bre�_dWy=3yٰ˙tx�1?�\^�g��og�����gR�;������T��fѷ[pL��B5+l�e7�F*6�}N�
+�Zѧa�Y���>�n���a��o�Bp��%6	�f�p�2�K53�s�e�]������l�G&��޲�,�ֲ ��G�g���������Cu_��Y��j[��zU����������jJlh������}U?V��u��m������y�P�7͖n��w���*���Zn��!_����]�R���I ��ؕ�[�u�,�ՁT�YVu�c�n�(���=�UuKVo�
K���-6����7���-����a��Yn��Zk�jѰ���Z{s�jy0����P��Ͽ�����TU��a�r�Pիj}WV����8�w �q�j߲��6�i���ׯ6���6bd]�ʦ)�Q\s��5�s�y|�Y�s��fє�����*W	���]Gs���#���י�y��Yޕ���~�t�w�*SnI	؇�)�-�x��nW�<�$�R�9�l�Gϴ\�`���%%�UN�|.���ڹ5�`�;�K�o��fC���j=�>?��e�EE?�LJ
+���.[��e���S�
+t`��h\�™\�żڕ��m�v[�7:�f!;�}���/�MÚ���\j�
+/�rϤ6<:��ev�}=Ĉd�v����-wI��	|(QیlD矽KmSA�f�'��n1yt:��O���ٕʟY
����S�O��[)�u�+�ŰbQ�NK�E0�|��0Q�����6�
+��qE+ˍ7�����6��lǕ x�t��%�@�8��	�n6z���޷���ٯ�+PE&-B��x�"_L޻cb,\@� �������ٰ7���ǁA=	î�k�0B<P�N�|�ޗ���Fq	O�3��;3��߻;[~�+�a�p!�Φŷ�#��}�ѯ����L�l�x��Wp��"��$ǁ[y­28�������$�&r�T�x�V��Hҗ:ҧ���j��r�v%�¨�"��
\Ĥ/���=� �(��>�V�mЫ�
�T�N����
��^���d-*Y����;��V�����v�óR�)D �FJ]
.�"���t��
+B��R���Z;�YPT1�֔ᾷ��'�;栠F�..Z�m$��N/�$.��F���60J[2���GUP
+��8�Ɓ)011�@G�%���pm[�A�\�t��H��m�Nr�P����S��(c���|���[�Nٚ�g�bz}���>;Iy�*P���5$4��Q��_!l�ghǕ�0q؁V�i����Qd�Z8FW��Ђ(�h�-'��]��Ƿ�Z�;0V�[!|�L'�	�B�Ϣ�II�Z
a�h4�z��yz�vC�I��<��4���.��?�j�
Z'���s��a�F��@l�4(�
+��㩠�)���^䌆꜒�K��&�����G�^H��#@zy�Nm������9U�^u���"�d��>�XB����j,�K{�9�Gf"L��O�I���
+ť@<D*��P��}1�)(Q���_�3�H&4�\��g�������1��[$��MmP�������۪HC��yţ���u��:u�T�SI�/+�*�\GMĄ��d:?��}�7�PЋiEQk
+$Z4@����5��mM��E����s��Ǿ5�Ԛt�J�ɼԑ1�1����9
����q���L8|���S8��D1��}��|:�YѴ�	]�%�J؅-��P7P �s�ˏK'�!����wo���_��7���D��?�VO J,�?���S�R��E�EAI!Ќ�)���c-�
+endstream
+endobj
+70 0 obj
+<</Filter/FlateDecode/Length 2077>>stream
+H�dWA�$�	��}��W0�<���׿$ܓ�d3߈v������מw�g=�&�~��O~YO���+d����|���ş4�uAn��'�Ct,�Sl���
��&Xki� �p�Vm�������ұp?�������p,��yw�cK���=�c\
+�p�<�p��c�>��6�Ӂ���_�g�~��v_����0�a��qDV���������װ?��C���q�ˏ5{�~�~6�"xYo|y�}	����d L�j�/���,{�/�Oa�S��_��Z_������@��B�х�c	�޸�B�� Q�8y��@u�$j�I����������"�y�� �e.���qѵwy�����gb��Ws)��_�q0��:u��!rWl���&,x�.�k�MJ�k�������$�v��E̵�@�-v�wХ֋�������'�HYD�(����C���<�`f��K��.�K��W�u����"L�Yg���B2����ٽ@�xk�v�z�)Syc���rj7
+�}����)��
�d�a�O�'p�0&mo��g��>�����A�����N�-p�@���P2���x���1�t%�!����Ru�j0T��sn\���H����z��{�8UDͫo����&!l\/��5K@���c)j��-�A"���*(r����!��0(�L��#���|�lZő!�D�*]�wʍ<1�F���>�*���+61!�6�d�������A��u�H6ؾϦ'ŏTr�_5„�J�>�����U���dPN���/��3��lD�fk���j�iop_�!vo6�5wvP�d)&ᆃ"=R�)��k�HY(�+
[���NG
�٬�F��B!�
+!p��A*
+b������ -
����=6�JU7����=o��@x�}a.4g��b�\2��
+�%!�>�E�|�n	ܷ5�fd�vtpa�FK�\U���:U$�
+<�XppD���p��Yނ��c�;%�\!n�K�8(�4q�:��s�ǯ�N�gwMy��=�r|H�����`����&�w�z�ሔ��Q��"�l��;֒�:?��T�U�A�x�FƂ���8p���cڻ׭�#Qi��̒�[G��$jx�{xJ߆mS!�e#�9`Ȱ��
+	C��p���sǭ"��Ir��
+JS�� �]���}o�]�w��{�/P	��û�����C����k�g�������WFe@�9�b�78,Bnk��>ٻ4}Nc�23-$;h�R�Pe-�����E�{M�o^T�_+��ga����IX�G@��� ����M|��M�-6kn�/ە��}��,� 
+��+0V�P�%�^�y�`X(�$,)a���Űh~���[�?�XO��t��b�X���p�|1c��>�2����|�2P�6����Q֜X����q��
+�8Zu�u�Ǒ耱e��N���ؗ��)��H<�+i��_�m��d��b��,E�����9U����l|n��iw��ҽ4R��~4����m�D��&��,��5q-��r�Z�'����˂k�ZGV;��3�[��C�����6_�Bxn�r��}X��C�b�dzXb��u�֘J��bn�̌s<�K��-)��e����F��tvsb
���p�{�a�"���r�s���{IV�5dg0�{���x��{��p*!.��@��4r��F H0Z�8�&i�p�vY�p)	�:健�����rBn��dT�>n�ɻ���E5!�{�V9�Y����/!F4)�&��s�-�+�F�լh|��7��^N�}��G��>PF��F{�+!8KIQ���(��[�/�S���_Z����z��pK��s-g�%SM\�v���USI`i�C��[m�������m�09��s�'��#v�Є�G|0����/��3�%'|W��Fc�$k�x�%vrr��f����[qJB9q�p1,�L���3�o��/z�-��{bXs��-�.�T9����z��y���`���
+endstream
+endobj
+71 0 obj
+<</Filter/FlateDecode/Length 1983>>stream
+H�tWI)���W �X����\��g"�ԑj���J���陟�O�O��	��-|�^c~��O�)�!^��'������ܑO{$�'�?1��g�oO��
�Y}|~��ԢS�Y���9��3vt�lڱ�w��>���x���.|c�YП�K��%���������N�R����b<��>�O����QO��Z��]���§��c2�j��oYV�k�ZN�a~
�Z��X�3V��֐���6}��g��]����g8U�[����o(�L	v�����|x�vpfℍO'q,���,Ɯ�l�Ʃ��'r�c/Y�5�)b��!��Ս�iCG�x����s%���«�~F�;�9i�v�A�����}�'�ڀ�n���ׇp������i���.M�	�SK6Y������+����B\�����/ٽc��9qV��ZcK���0��������o�P1Q�!�:�<�x
+�%�V\�
���Q<�n��޲���uþ�x�.\�7��LU�N}�j�\��	�a�/!���Y&����K�yw���Ԋ&
+���I��	;Sl����^��0��&Tv���J[iK���1UW{�0�}��}�THEv+Uj$W�3���E�"�G����̺k�Zj����\Ë�]Ѳe��D���kg�%����wCH1��;u����ry�2�������!���1�tu?�;��R h㊐$��䌱?��t�qsZ�-s�X���g���
�ڎ�8O������	�AV�k�н�pdNio#�X�[�D���7p<�6��Qai)�dՑ�-\�X������#jJ��A!-��%�b,Cʉ�zA�4.���鹠�>2�C�����sJ@ꦻ#Ca���Kt�.-G߰T����IÀU�q��?)X�,�-6bb� ���(]퀥�k�d=,��'�5��?��|-�kI\�4��R3���x?c��4����!l�#�c��K��fUr�z7lJ
p.��v�;5�܁�3������s1J��hʷ<Tip�r���ǸЂ��I}t��	�I�Z�eu���Qk�1P��C����U�l�1dIL�[������)�	�<Aw��A�6v�@9���뒅��x����A�o꽜$Kśf�T��N���͉�]<ݝ����Nc�߼����5�nZ�x�၎�7��P��$t�#��������������qp+ޟ�����5cS��mj�c88uT��RL�()�oY�tКy-�u[�F����,�k�y
�q申�;�ùi|1]��ƶ�t���+����3�XM�@�=��;�|Y��\*4p��#5l�1�A�ɚrF%a��Q���ss�k�m;��n�����r��m��A�.�tp���.�Y��Zq��'@��_K�KC�b$	C�\R�n �����m�3����кw`R2���i#��᛼�7��܀��
�]�M��[k.o���vG	�X��¸uK�\�CC���Kr�'� g�-\���}�~l��F�YV�h�=u<n��Ƒ�N��(}b02r8c4�[C�,�(�%uqt|�h�	�x�!�L�Ip~"����YR��Z�-��[����;˶
uP��[�/���M6��@Z(w�	�y��>mp����TE�C�˽�H����A�P��k F�{|�~�V)>�s]ߪ�qC��d#��쑠�iv�/V��
�w��v���4^8��&$>qv^�ťܫ�A����|=.`hT��*�a����#�&�<ʴ�ӑf�=�е����$H'�l*=�ˀ���  ,�(� ����qڕ]��o��˔z@
vQw��%�ļ8�P�>
+*��H)'Q'��[T1}@�M?n���b��E��T�0�̠�T�&��dƒ�lM���I�;*��	IlS_��P�@6�=q��w3�������%�a,�����{�&
+endstream
+endobj
+72 0 obj
+<</Filter/FlateDecode/Length 2202>>stream
+H�<�A�޼
���)|�x$Q�c�����t��o��t�I>�ze
+A*�k?�z{{ē�3
��Fk���O��������o�Kh�0\S��ۚ�o�V{t[��������=X̓v��@����Rq�wv�z�O���W����`��ޝ��������8����C��{�X�b�=�u�0�ٙ
+}�e���X�w<+�s�q�:�9��~�K<��m�fR�9ҹ>��|�z�)_~l�4mXc�$�[�����o�z��ܧV�]��H��mvd�}�3��hY�����sTz'?&�"�cN�=�v�b؃�����.4v1�9�q��Δ��Q��F������>~��L��}�n��z����(,)
+�|@��[h�����.���TZ�@�m��ag����Ã�:z�F�XAk�s�bQ����M%�L���G���ۗ�N(�4�pv'	�Rs�4Z��q��x6BIj|J{N�����x��{7�&�Vb�.�b	���� �
�t;��j�`1��.�R�@�_|":Eq
W�k����(����sO4Ȣ*��Q�� U�<�'к!Fo�Yh�[kc���)@C�S)М'��J��x�(�U:��H;�jK��^9�B��[��$�-$�����#1�	�#F���D)��y��(|,��%\o$&�ǂV�꾌��b�x��R>�f�_b���m�
r�����>�\�&�^�JK��""Q�<�zY�2�E�ow&:����Qv�	���M���6�E�6�K?1�����2a?`9�0�P��"������A	O�۞I:��Q���tZ�<R�,�� �MyW[W�����
������ZHb�=d��P#�M��2�I8�]e�^8�f����̶u��r�C�'���Au��JdEى'N���[��x
+p�ɳ�
+�vus�m���:C倥�M�Bd���������>������s�C#Ͽ���[��.4&�%
+�3K���bm
+]����~��I��,��u
�P�S�ӱ�Ayӄ�U!�#���r��RP[��vK���V:�Hi�ހN�il��,�P�n��
+��`s��;K��	�:9'(���C>A'$1���.�GX��{�<{�(̑�Yxֱ���I
+����_���m��on����ox�ղ�o#Y�D�Q�s~��<>�<���AG�W�Q��ϲ+�D�Z��j弡w� /X�;�;�!{�;K0�X�J��,�*�u�{6��#uR��d�4�I�-g��(���Q��a�v|=�윉s�3d��\:�g���^�qx��h�]~h���[}��'z�ǽ�B���o�!�u�~:���~8��``�Yp��J[i���V��n�;*���Ѣw:&Zx��rTt䆾���rS���\�t)_����Lbӽ_8��?�m��-���^d�{_�OA+`ۆ@��>��+ė���0AEj�Ke3�\���X�Ƨ�'A͟tGN��8�gx S횪��U��$sbWS^xZW�Iv_��*�h���	@kT��
vMj���m��%&@Y'$�[��$u�"\�3C>1V��@J&������&)�a��մ�1;yU
+�$@c2�U�T���՗�������Y_&�w�]J�aCW����t�s;�� S���m��;雐L����[>�=o��=��/U��$��F���Wm5+�K�c��z��K��_���/���Q8�F�����x�1�S[�U[�íR�?�C�Vl��(�k�-�׮>��y���Z��ꞥQ������^�)��
D��Uv4j�[��d	���j��u1O4����VO���r+�ݘ�PAj��j6�&L%)�p�5�%D���e��;���$�玄��A��,�����Γn^��p������3T/��0IAV���yS3��ua%&����!kb��NO����z=��滊����f�0y��W�.
xI�x9Y<�og(P؁�^���g)���>�4 <�,74����ʁY
�T���f�[�����L����e��m���am�����A��^8O�֐�>�ǃ���">
ǟ�t�[��\Dp�}G�������UA�B�pH�����u�e���te�aelzr-�0XC�́�ö�B��ߩ��u���yz�V�^ E�n�Fe\�QoM.:�q��M�S^�ҹCy��n�L��b����=V���'��=��
+endstream
+endobj
+73 0 obj
+<</Filter/FlateDecode/Length 2055>>stream
+H�\�M�&)��s�<A<���yz��_�^����Me��/l�?������������b��~�.�s6p����z�o�>�g~��o��چ�����H��7R�
�����
+���}��z_v�߽2ܯo^w�����/��ѿ�a����v���:�#:���7'��F|�	8�.(xZԉW�z�[d+��!{�g8�mPh�aN����0�7�}z|m���i���	B'8eP��T$��v�u����v_�l�� ������p�
+�5쮘��3�nE8η���Fz�J��7�ǭ��w;1�����9���N��'@���}��+n���O�]��1��2K�69�:%��pRE)����:��>���F�u,�3�s�/�߼�u�!~.T]?����˷¯i��`��r���RB�[_[q�1>���qW�u�����,2��
+
���B1�P�oF��%:嘫���w�>�N��fI>�"/eEw�TDD�J�⽀$wL�8���ƯA'�:����U��џGG��p�c���|��
+z&�Mѿ���=��u|��N�?�ܾx^�Q�V(�s�Z��ZU��]"���d�Bh�8��}�8:o[��6�	���n(�^
"l� +�w�Җ��y�0��a�Cĺ2Uf��lR�����8�of8����`oUgD���u�Pp��`7*t�l���xV�u�E9�.)w��7��R�'����-=�.(����A��
A'Q�p��#��:�B���t�㺮�锻
+�4cv�y��t�_q�F�;���;�ҡ��Kt�$�xP`J��G����JyL��ˠ���y�p� �<�(��NW���D�k8����-:%�; �n�bS5J�<�ܠ�$��uI��I�~,�,Q�A>�<���e�QR�'�^�e�t����6Ƚ྆���~É<�c�	��3�P0Y����Y>�x��E1yl�)h4�������~Θ"5�Bo��5ݯ��tM��w���n+�uh�2��"}�l���b����S�bv���'A�m��t��G��0O9�i�Z�4d&zG��<�MS�h;��LZD(�ߣ\��P�H�Ӑ�O���'����z��Ӡ����+��8���?��,��xN^8F��~uχ�����9>%Ap�-dahw��ы�>���4���"�T���H�S������/�sEQ�"��[&U�����e�$)Jݓ'�$�;̏�Mw?
+x���v�e�x���v��Pd\��o���tՂ�{�/��V��fv�Y���Ue�Z-�S�Lw�bz���1 ��Y
o�|EW�(wZ���}-�q�++-��S����{wY��S���c��l=�`�dG���愚�d�ˆ˜�H��K�5����\��T!=�
+�
+(��:��|;\�F��0�e`�Ǡ���Q��u�k��g�[-%M�|-�=D�d�M��jW���9j2�(��}�WLԽ^/�U�t��pui��EL=�.�j0Y5$�g�
©e���"1-�1*��O8��������o��`��y��ۢF�|�C
a�ƩU�2���pҰ�Y!�so�CM:Y[ٮ�E�UVq�),V���iZM^�;jk�q�YՈ8�S\=����^����o�*����	�5��][`	F��y�W��n;���l+X%\O�e�$<�{)��ϻ�3�?�����Fq-��p��<܎_�fM��U��$P�D
���]���׆�����	��%�:�o�`b|�#��
+4��ޒT����Z�]��X{�fm�]}�b�f�V&VE�W����-8>N�뾵r�����Е�g4���	V�QD\�{mM^x�/�j�[q��O�l��m�{�,^�R[�`m���MV.YY�4R�����…�d4$��51��sǻ��Ƃq��'��V����Gݽ�>����8u��N![,��]_I_�j�dC�}Uw��(&,��6����:��,XGQ����f��0��c���_�VnQrW��ؑ���wŖ5�����ˤ��"���9�������T
+endstream
+endobj
+74 0 obj
+<</Filter/FlateDecode/Length 2274>>stream
+H��W�n�}'��G)0�}�<:+�6D���C������CY��>�TMjm# �p����N�걦�ࢲU����*[tr��X��:��V3b��M:�l��|�	\u*�N��|�]�����᱇��Չ�ӥFUt�\�u([nd����5[�mN���8r�p�}T�h�j�e�ɫ�8'���V����.6EhJvjKl
+��ū)�zl�T
��]�+,�:3y0���:�LqF�`$G��fk�)45�)�P���^����,�7qZ��'J~Rb�
+ܭf�j�
�#E�A�i��L���d�xB�_b' �f�2�a��4�����ģW���f_'|\�'�w���q&6�ȥ*�JDń��Pt�Ƞ�@��EXX��(��	6dl�* U�-�����JS�S��b-�X]�s8Z��h������
+n�TTȠ$?t�W� �D9k$
+q����eD�j�69 �V`Ud��6Ĕ��_<c��<7Ie���#Z��F'��e	���P�KB�&/��4&Y�Y% �&�xfp��r���g�8�̧�L�R �&P�&�V���U4i����n�c�m���8�gH�0������4�@�ga;�H�\�P��C҃`G�;f��}O�m|���x���"�[+�t$�
��Jd]UE��G��	Y��Y�pJ
+#)�V��Y����(Q@�"�D��f2��S&9�i�������"@���zR`6�<�: <Z��`6a_P���&lm@O'�	h�HP�BuP�HY	�ߒ����dz4bG0*]@n0�BjqS(@GuC�As�[E���@����G��d�DA�n����t9&�5�@d�e��9EVPF[J�T�����slϩh�E�X�����CT��s
�����#
+�XA�ȇ�����L����غ#n�-����*��Q� ��Q �(H�=�"G68�"M������V�<Lg*@�-�߆/9�ٷ�t�a��ފ� ^��As�
+�^�
УSк��+�u�&$���l$�]kz0��Q3qh��F#U�yup]hhq.�SR�!pXE�t��ï�L6��� c��X�7��2��FQe�$T)��i���̝
�ΩF=(h��n6�d�r��c��)_�]TdP{�e��x�VG�~�sx��`D�$݀T/+����D�m�F��S/�)%I�Z��ᤏiT'$p�\bG��Vb�8�?��
�ئLNb,��V@+�72����:�N��:�Ԙ�Y�f�(6n�
+�����spN�1�%s`��i�<�^@
�
+����>�͒�ԑ�3���{wB�m�Hw$l�C	�!����1����!7D�[e����n���e1{�X J�������?�#��Hq��g�����}�_�/m���^_]�g����b�_�x�<ݵp!A�+u��}7�]�0����̍��4�,�����)�$aV\sV����;��9����a���P�
+��Y���$�����G�S%��m���#�Ezg�������9�73��OO�a��{��lgǽ���jE2`j������E\��,n��m��}�u�L����t��՜�4A-����A#�G�?��M�V��K.��C�Row7��F5�o?X�i��49�c�Rlʙ�t��w�i����}/N����W�ȼ?:�y�|gåS<�ڎj����.��a3�˭Z~�tBڮ��F�nX�ú����yZѭw]����Y�w��5���{��
+�<��}�
+���ߪ���v��������d���z�^m�c��Y�\٩�u�8~ݮk-��w}{���}�M';���j����j���M�W�M�|D��i������x�������g[|�$<~��q����x��u�C�t7/ԫ����^��H���ͪS�������0���^su�z���ԭ?������j�n�Ն��<����0t��etru~�����G���VC�������pn��S�/�o�\ͱ��v�eT��5'o�~<�J\�y�m����n��evT�lj��^v5�Wou��O:��35�9k��O����\��e��B駧'A?��^�|�o��Cw�����јP����.���+���B�l�s�[����7\^+�`��D�w��9iq��eH���s)���C�v^0��w��@�����sє��{l�#�gs�v;G?��t�%-K���b�e[���R~��
+endstream
+endobj
+105 0 obj
+<</ModDate(D:20250218073805-08'00')/Creator(Adobe InDesign CS5.5 \(7.5.3\))/Trapped/False/CreationDate(D:20130904154253+02'00')/Producer(PDFlib PLOP 2.0.0p6 \(SunOS\)/Adobe PDF Library 9.9; modified using iText 4.2.0 by 1T3XT)/Subject(Optik & Photonik 2013.8:38-40)/WPS-PROCLEVEL(2)/WPS-JOURNALDOI(10.1002/\(ISSN\)2191-1975)/Title(���R�a�s�a�n�t�e� �E�n�t�w�i�c�k�l�u�n�g� �i�n� �d�e�r� �3�D �B�i�l�d�g�e�b�u�n�g)/WPS-ARTICLEDOI(10.1002/opph.201300018)>>
+endobj
+4 0 obj
+<</Filter/FlateDecode/Type/ObjStm/Length 3283/First 403/N 50>>stream
+x��[ko۸�+�آH)R�H���$�浱�$5��b��vm�ג����;CJ�$K��lQܽPR9|�ͨ�8�9DI��O\F��ą2M\A��R_����Մy�h�<Έ�r%���C�K��5X��T�d�d.5#D8Z�h�A��U��BH}�xХ�8�$�П�B
+�3!<�TB
+� �1�Oq��^Cg�'ҁ��G��LC0
��t	4�|��`���`P��Ӈ*���B�tE0XΉ�A%߃U� &�Ht�
SP0J�p���4(��Ꭻp�w����� s0_�ER
+��/��zH�!��f�ғx��V��&ӏfartς9}?��h�Ho���<���w�PT���4x��70"�(�;���j����mM�p܊�
���|�aHZ����1\�!=^�M�n�$^-Gab�����"�x�c�u�K��d�w���o�i/
R�s�u��s�udV�z�zaJ��sBh?��ڝ�PZY����Ь����	y�z��.���ߍ�fy��m��%��ͺ��6k��f����Y�)��9���]=��pT�4���E0>��Π�o�B��2^4-�u���D��71.q�E8���}[F��gV?���l����经o����0�F�e�
+���x�)�F'��������v�d���<�*	��Y���<��!q��1�fQ�8��-��C��N�)I�Yw�%F¶�8�!�������J8�e�H�%™�z�w���-��!�x��Cl�?���f`J*�!l�m�zOh����16$B����3�u�C�8���`
+[�L���%�|�9<�z���'�_�_|�C(�U�2����P�
f"f�x�����v;�b�i�2,�p�Z�5?FI�|zu<����j9��ѯ�6�3�	ga�8I�b��`�>���u=��! �5����d�<l�^18�8>s@5}�����f��`M�^ݜߟ�vڸ˯3�K����n6��L�DŽ��]�p��t�`���S4�c+Fi$��e��&�2^΂�)��s��Ӽ�>\u���y�y�5��0��gwgg���P�S�`5C9��Iڞ`��rح�u����7O�(�!p���˪-^��|�HB��y���6�8���4[O;o:Ni'��%��m���(���Aa_a��9�������d"�����p:08M|ɇE�vNOn��bW����^:;����o\�zu��Pu�l����]�B3��ޞ���[õ��u����W�و������x!:�K�L(�!�ݮ�}ݩ��M��x*H��a��k�&�LN�5�f&s:�afNWei95�–��-f&݌�q"Ś�I��5xA�y$���|�Yef]cV�-����m��ܦ�h��\|�JNl���\Y�v6y�!^
+kIƊ2t�}�Y��yR���!�,�ef�m$�=�u��JD�-ڿp<�sכ���HKd����J�G��������$��(�}8ש��z���~Qf:�Ap�'�x��Ͼ��N�5�7�-X6���Lͱ�!��k�����rTJ��������[��Ɉ��j���x�$����i�v�o�f́^���Zc����ĎG�"�z����50����&�,.��xu̫ӄ�w��m�U�� �=�[lޛs\-í�_��Yí޼	k��@���F ��W��K0MB/������E��	�hJ��euZ9E��� ����O��tfu���m��v�Dןe~����7�oz)���I/��fգB�g��ЪJ�4�"QV��oq����e7�S�M�AK����p��o33�S��ع}]ޱT�f
a�[��y��n?��sͣx�l�΢�x:�ڨ��\nv��gi��@��7T6�(_��Ɨ�IJS����SWk��'R[�+|A<�����]�€0�#?C���|Oo�-ҁQ�k96H�Ȼ�\����dП�X���m��|I^�^���Q<�t򴘄s��Q<�ǴE;��v����p�i@舎iH��G:���N��iLtI���J��SPDx�N����\��Y��M�
+e2%D\G�#�ÓM#�b��
+ւ��f�q��.,(8����
+p{���~yn���t���[������m���gw=��pU����\��e{��mQ������qʅ:�h��@���Þ1<����.����h�M
+��
+�[q\�:�.�0�8�Ұy�=>pH9�E@ǘ���~��à�L\
+F��1��V�y�/�o��=����_o�*�@Y�x���Ha�;��ص��G�s�T�(+�0i�p~Imn��TT�4H&��pS$S�_1M'�0*��$�Fì�4�Xk���'�ng�o�9@ܥ�s�'z[v���?%��F����U
+����i��>яk|��iB�u����D���t��vO7�#3�E���Ft��I��_q�������ۺ܋��ϢR�r��}.�r�����H��Im\�׆u�ϊNC,[;@D-�S^��cC[ ����k��6��S�Yײ.tG��d��\���a���́ޗ4~�bꀴ�t�zB��q_����r�=���w�{C�3u������)��"���l��j��j��:W���<�q��dQtw���Fm�Oa�O���.I��{��]޹�
�����z���`G�•6�3t^��BPۆ��م��sU��B��]���MX_�{CL��B��e�'� �y�|�*2�YÊ�����5��ͯ&�b>q�L��лL��Wѭ��b���8��B�r�C��~�M�'��=�~��{���4��ɷ��
�K�����m������n
/X��p�j�Te�Y����(�N2�2���19���'��,X#Z�	���bm�Gx�����
+�ڣ�)�j5�Ϳ$�/?=ϏD�C��i`_b?�z��|ֿ�\���[v��Si�S_ƾ��G���y�����Mۡ<�+*�Ov��ɻQ���K ��E�w�Y��=�\��I]Uo�߷�y|��W���&�R�
��T��9ez!E����(�G���s���m4U��ڹ�trޫ��nZUSfN��~i%r=�~k��W5;l��̥�����"�Eos�g�H���s�2��vn� "��<�����aց+��x�F��(H�
|���+�hb3C7������&�k���R���L��:�$�Z��8y�(0'��#��"dF��V����
+��V�Bn�Q�쟞9�gοc���"�w
+endstream
+endobj
+106 0 obj
+<</Info 105 0 R/Filter/FlateDecode/Type/XRef/W[1 3 2]/Index[0 107]/ID [<dbc5ea95575f7409965d21af322e26a7><2c343ba920a8d569aa78ce782c46829d>]/Root 66 0 R/Length 352/Size 107>>stream
+x�5�;,CQ���^��}�ֳ�*�I5H:%C�FHLbI�N�E,Sc""!ځ�i��}�z�_���s�y\��r(�S�R�(��Tʔ�7�yK�|l��������F�!� dl��dcl;��z�qv�e��0k�;w�_e?����4�ma[��;�����z��� �<o7��N�c���yZ�����=J'�T��anc]?����=�C���Y�$?2���I`��~h;G7�A�i���E��'����˜��ge�N��a���)di��&��A���o��C���Бs�qv:�'Mz�Q��.�}��f��&9{����H���$h�=��^��8���?�
+endstream
+endobj
+startxref
+309718
+%%EOF
diff --git a/Bachelorarbeit/V2/Quellen/Robotics_Control_Sensing_Vision_and_Inte.pdf b/Bachelorarbeit/V2/Quellen/Robotics_Control_Sensing_Vision_and_Inte.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..578fbcaf0c5e63bc6b1939ec69758343abf0a353
Binary files /dev/null and b/Bachelorarbeit/V2/Quellen/Robotics_Control_Sensing_Vision_and_Inte.pdf differ
diff --git a/Bachelorarbeit/V2/Quellen/vl53l7cxhalterung.pdf b/Bachelorarbeit/V2/Quellen/vl53l7cxhalterung.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..9f1c59dc94a25219d1b02b3dd273fd6fac36cb26
Binary files /dev/null and b/Bachelorarbeit/V2/Quellen/vl53l7cxhalterung.pdf differ
diff --git a/Bachelorarbeit/V2/Technische_Zeichnungen/Simple+Mount+with+FOV+Bodies+Drawing.pdf b/Bachelorarbeit/V2/Technische_Zeichnungen/Simple+Mount+with+FOV+Bodies+Drawing.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..ad67ddc748de65cc2c122d6d730b871aea16fe32
Binary files /dev/null and b/Bachelorarbeit/V2/Technische_Zeichnungen/Simple+Mount+with+FOV+Bodies+Drawing.pdf differ
diff --git a/Bachelorarbeit/V2/Technische_Zeichnungen/Simple+Mount+with+FOV+Bodies+Top+View+Drawing.pdf b/Bachelorarbeit/V2/Technische_Zeichnungen/Simple+Mount+with+FOV+Bodies+Top+View+Drawing.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..b1d4b6c6de8196858ead3dfdaeb94d58bf3752bd
Binary files /dev/null and b/Bachelorarbeit/V2/Technische_Zeichnungen/Simple+Mount+with+FOV+Bodies+Top+View+Drawing.pdf differ
diff --git a/Bachelorarbeit/V2/Technische_Zeichnungen/VL53L7CX Angular Distance.pdf b/Bachelorarbeit/V2/Technische_Zeichnungen/VL53L7CX Angular Distance.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..30d6b31ca1db6bbbebd12fe7ed1dd37a02d97c45
Binary files /dev/null and b/Bachelorarbeit/V2/Technische_Zeichnungen/VL53L7CX Angular Distance.pdf differ
diff --git a/Bachelorarbeit/V2/Technische_Zeichnungen/kleine_vl53l5cx_halterung Drawing v1.pdf b/Bachelorarbeit/V2/Technische_Zeichnungen/kleine_vl53l5cx_halterung Drawing v1.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..3453d3899bb0ee4c19b22415e100120be70ae205
Binary files /dev/null and b/Bachelorarbeit/V2/Technische_Zeichnungen/kleine_vl53l5cx_halterung Drawing v1.pdf differ
diff --git a/Bachelorarbeit/V2/Technische_Zeichnungen/kleine_vl53l5cx_halterung Drawing v2.pdf b/Bachelorarbeit/V2/Technische_Zeichnungen/kleine_vl53l5cx_halterung Drawing v2.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..1347169129b6e3140983becd799b218cd5b46ead
Binary files /dev/null and b/Bachelorarbeit/V2/Technische_Zeichnungen/kleine_vl53l5cx_halterung Drawing v2.pdf differ
diff --git a/Bachelorarbeit/V2/ba.tex b/Bachelorarbeit/V2/ba.tex
new file mode 100644
index 0000000000000000000000000000000000000000..de0c3499adc17b387db8f1d6d984e489bed4caf0
--- /dev/null
+++ b/Bachelorarbeit/V2/ba.tex
@@ -0,0 +1,236 @@
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Beispiel für eine Bachelorarbeit mit FH Aachen FB 8 
+% Titelblattstyles
+%
+% Prof. Enning, 18.07.2013
+% Überarbeitet, 17.06.2014
+% Weitere Überarbeitung, 02.04.2025 (Grok 3)
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%%%%%%%%%%%%% Präambel %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\documentclass[%
+% draft % Falls ohne Bilder gedruckt werden soll (Entwurf)
+]{scrbook}
+
+% --- KOMA-Optionen ---
+\KOMAoptions{%
+	parskip=true,
+	fontsize=12pt,
+	toc=flat,
+	twoside=false,
+	numbers=nodotatend
+}
+
+% --- Grundlegende Pakete ---
+\usepackage[T1]{fontenc}
+\usepackage[utf8]{inputenc}
+\usepackage[ngerman]{babel}
+\usepackage{csquotes}
+\usepackage{xcolor}
+\usepackage{graphicx}
+\usepackage{textcomp}
+\usepackage{amsmath}
+\usepackage{mathrsfs}
+\usepackage{listings}    % Paket für Code-Darstellung
+
+
+% --- Formatierung nach FH-Vorgaben ---
+\usepackage{helvet}
+\renewcommand{\familydefault}{\sfdefault}
+\usepackage{geometry}
+\geometry{a4paper, top=20mm, left=30mm, right=20mm, bottom=25mm}
+\linespread{1.25}
+
+% --- Pakete für Tabellen und Grafiken ---
+\usepackage{tabularx}
+\usepackage{multirow}
+\usepackage{colortbl}
+\usepackage{booktabs}
+\usepackage{siunitx}
+\usepackage{tikz}
+\usetikzlibrary{shapes,arrows}
+\usepackage{tikz-cd}
+\usepackage{float}
+\usepackage{siunitx}
+\usepackage{pdfpages}
+\usepackage{amsmath}
+\usepackage{tabularx}
+\usepackage{xurl} % for smarter URL breaking
+% --- Pakete für Listen und Nummerierungen ---
+\usepackage{paralist}
+\usepackage{enumitem}
+\setlist[itemize]{itemsep=2pt, topsep=4pt, parsep=0pt, partopsep=0pt}
+
+% --- Pakete für Verzeichnisse ---
+\usepackage{chngcntr}
+\counterwithout{figure}{section}
+\renewcommand{\thefigure}{\textbf\thechapter-\arabic{figure}}
+
+% \usepackage[automark]{scrlayer-scrpage}
+% % Untere Linie (schwarzer Strich unten auf jeder Seite)
+% \rofoot{%
+	%   \vspace*{0cm}% Reduzierter Abstand vom Text (vorher 0.5cm)
+	%   \rule{\textwidth}{0.5mm}% Schwarzer Strich (0.5 mm dick)
+	% }
+% \lofoot{%
+	%   \vspace*{1cm}%
+	%   \rule{\textwidth}{0.5mm}%
+	% }
+\usepackage{tocloft}
+% --- Nummerierung und Tiefe für TOC ---
+\setcounter{secnumdepth}{3}
+\setcounter{tocdepth}{3}
+
+% --- Einrückung im Inhaltsverzeichnis ---
+\setlength{\cftsecindent}{2em}
+\setlength{\cftsubsecindent}{4em}
+\setlength{\cftsubsubsecindent}{6em}
+
+%Dieser Befehl steuert, **wie die Linie** zwischen Kapitelüberschrift und Seitenzahl aussieht:
+\renewcommand{\cftchapleader}{\cftdotfill{\cftdotsep}}
+\renewcommand{\cftchapdotsep}{\cftdotsep}
+
+% --- Formelverzeichnis ---
+\usepackage[intoc]{nomencl}
+\makenomenclature
+
+% --- Kopf- und Fußzeilen ---
+\usepackage[automark]{scrlayer-scrpage}
+
+% --- Caption-Anpassungen ---
+\usepackage{caption}
+\captionsetup{%
+	format=plain,
+	labelfont=bf,
+	justification=centering,
+	width=\textwidth
+}
+\captionsetup[table]{position=above}
+
+% --- Hyperref ---
+\usepackage[pdfborder={0 0 0}, colorlinks=true]{hyperref}
+\hypersetup{linkcolor=blue, citecolor=blue, urlcolor=blue}
+
+% --- Abkürzungsverzeichnis ---
+\usepackage[xindy,acronym,toc,hyperfirst=true]{glossaries}
+% Stil für Abkürzungen: blau und Hyperlink zum Abkürzungsverzeichnis
+\renewcommand*{\glshyperlink}[2][]{%
+	\hyperlink{glossary:acronyms}{\textcolor{blue}{#2}}%
+}
+% Seitenzahlen im Abkürzungsverzeichnis als Hyperlinks
+\renewcommand*{\glsnumberformat}[1]{\hyperpage{#1}}
+
+\setacronymstyle{long-short}
+\makeglossaries
+
+% --- Titelblatt-Style ---
+\usepackage{fhacmb}
+
+% --- Blindtext ---
+\usepackage{blindtext}
+
+% % --- hyperref für Hyperlinks ---
+\usepackage{hyperref} % Ensure hyperref is loaded explicitly
+
+% --- biblatex für Quellenverzeichnis ---
+\usepackage[
+backend=biber,
+style=numeric,
+hyperref=true,
+backref=true
+]{biblatex}
+\addbibresource{BA.bib}
+
+% Citation command with hyperlinked BibTeX key to bibliography
+\DeclareCiteCommand{\cite}
+{\usebibmacro{prenote}}
+{\mkbibbrackets{\bibhyperref{\printfield{entrykey}}}}
+{\multicitedelim}
+{\usebibmacro{postnote}}
+
+% Bibliography labels use BibTeX key
+\DeclareFieldFormat{labelnumberwidth}{\mkbibbrackets{\printfield{entrykey}}}
+\DeclareFieldFormat{labelnumber}{\printfield{entrykey}}
+
+%%%%%%%%%% Angaben für Titelseite %%%%%%%%%%%%%%%%%%%%%%
+\input{titelangaben}
+%%%%%%%%%% Einstellungen für Code %%%%%%%%%%%%%%%%%%%%%%
+\input{Code_settings}
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\begin{document}
+	
+	% --- Anpassungen nach \begin{document} ---
+		\renewcaptionname{ngerman}{\figurename}{\textbf Abbildung}
+		\renewcaptionname{ngerman}{\contentsname}{Inhalt}
+		
+		% --- Titel im FH-Style ---
+		\fhacmbtitle{\includegraphics[height=5cm]{fh_logo.png}}{5pt}{40pt}
+		
+		% --- Vordere Seiten (römische Nummerierung) ---
+		\frontmatter
+		\renewcommand{\thepage}{\Roman{page}}
+		% --- Inhaltsverzeichnis ---
+		\newpage
+		% --- Erklärung ---
+        \input{Kapitel/Erklärung}
+        
+        % --- Danksagung ---
+        \input{Kapitel/Danksagung}
+        
+        % --- Abstract ---
+        \input{Kapitel/Abstract}
+        
+		\tableofcontents				
+		
+		% --- Abkürzungen definieren ---
+		\input{Abkürzungen}
+
+        
+		% --- Hauptteil ---
+		\mainmatter
+		\pagestyle{scrheadings}
+		\renewcommand{\chaptermark}[1]{\markboth{\thechapter\hspace{1cm}#1}{}}
+		\chead{}
+		\ihead{\leftmark}
+		\renewcommand{\headfont}{\bfseries}
+		\setheadsepline{0.5pt}
+		
+		% --- Einleitung ---
+		\input{Kapitel/Einleitung}
+		
+		% --- Stand der Technik ---
+		\input{Kapitel/Stand der Technik}
+		
+		% --- Umsetzung ---
+		\input{Kapitel/Umsetzung}
+		
+		% --- Fazit und Ausblick ---
+		\input{Kapitel/Fazit und Ausblick}
+		
+		
+		% --- Quellenverzeichnis ---
+		\printbibliography[title=Quellenverzeichnis]
+		\addcontentsline{toc}{chapter}{Quellenverzeichnis}
+		
+		% --- Abkürzungsverzeichnis ---
+		\newpage
+		\phantomsection
+		\label{glossary:acronyms}
+		\printglossary[type=\acronymtype, title=Abkürzungsverzeichnis]
+		
+		% --- Abbildungs- und Tabellenverzeichnis ---
+		\newpage
+		\listoffigures
+		\renewcommand{\listfigurename}{Abbildungsverzeichnis}
+		\addcontentsline{toc}{chapter}{Abbildungsverzeichnis}
+		
+		\newpage
+		\listoftables
+		\renewcommand{\listtablename}{Tabellenverzeichnis}
+		\addcontentsline{toc}{chapter}{Tabellenverzeichnis}
+		
+		% --- Anhang ---
+		\appendix
+        \input{Kapitel/anhang}
+		
+	\end{document}
\ No newline at end of file
diff --git a/Bachelorarbeit/V2/chat_text.tex b/Bachelorarbeit/V2/chat_text.tex
new file mode 100644
index 0000000000000000000000000000000000000000..a2b32f64d665cd6c859df57da9c680138b86dc32
--- /dev/null
+++ b/Bachelorarbeit/V2/chat_text.tex
@@ -0,0 +1,23 @@
+\chapter{Einleitung}
+Mit dem Fortschreiten der Entwicklung hin zu Industrie 5.0 gewinnen kollaborative Roboter zunehmend an Bedeutung, da sie unter anderem eine enge Zusammenarbeit zwischen Mensch und Maschine ermöglichen \cite{vogel-heuser_von_2023}. Diese Zusammenarbeit setzt jedoch voraus, dass von den Robotern keinerlei Gefährdung für den Menschen ausgeht.
+\\Insbesondere im Kontext der Assistenz für körperlich beeinträchtigte Personen erweisen sich \acrfull{Cobots} als unterstützend, da sie die Handhabung von Arbeitsmaterialien erleichtern können \cite{noauthor_iidea_nodate}.
+\\Auch vor dem Hintergrund einer alternden Gesellschaft, in der das Durchschnittsalter der Erwerbsbevölkerung kontinuierlich steigt, können \acrshort{Cobots} einen wesentlichen Beitrag zur Entlastung leisten, indem sie repetitive und physisch belastende Tätigkeiten übernehmen\cite{haddadin_physical_2016}.
+Aus den genannten Gründen sind Robotersysteme erforderlich, die eine effiziente und sichere Zusammenarbeit zwischen Mensch und Maschine ermöglichen.
+Derzeit werden Kollisionen von \acrshort{Cobots} mit Menschen oder Objekten häufig mithilfe intrinsischer Sensoren detektiert \cite{popov_collision_2017} oder durch eine räumliche Trennung von Mensch und Maschine verhindert.
+Ist eine räumliche Trennung jedoch nicht realisierbar und muss eine Kollision dennoch unbedingt vermieden werden, um einen sicheren und zugleich effizienten Arbeitsablauf zu gewährleisten, kann das in dieser Bachelorarbeit vorgestellte Sensorsystem eine geeignete Lösung darstellen.
+\\Ziel des Sensorsystems ist es, über die reine Kollisionsdetektion hinaus insbesondere potenzielle Kollisionen frühzeitig zu erkennen und aktiv zu vermeiden. Durch das Vermeiden von Kollisionen kann der \acrshort{Cobot} nach einer identifizierten Gefährdungslage eine alternative Bahn planen und ausführen, um seine Aufgabe trotz der beinahe erfolgten Kollision weiterhin zuverlässig zu erfüllen.
+\\Das in dieser Arbeit vorgestellte Sensorsystem beschränkt sich auf die Vermeidung von Kollisionen durch das gezielte Stoppen der aktuellen Roboterbewegung.
+\\Zur präzisen Erfassung der Umgebung greift das vorgestellte Sensorsystem auf \acrfull{ToFs} zurück. Aus den erfassten Daten kann eine dreidimensionale Abbildung der Umgebung erzeugt werden. Diese digitale Repräsentation ermöglicht es, potenzielle Kollisionen frühzeitig zu erkennen und gezielt zu vermeiden.
+\\Durch die Implementierung von Kollisionsvermeidung kann ein \acrshort{Cobot} autonomer agieren, wodurch sowohl die Effizienz als auch die Sicherheit im Produktionsumfeld signifikant gesteigert werden.
+
+\section{Zielsetzung}
+
+Auf Grundlage vorangegangener Arbeiten, in denen verschiedene Sensorarten, deren Positionierung sowie geeignete Kommunikationsschnittstellen untersucht wurden, wurde ein konzeptioneller Rahmen für die vorliegende Arbeit entwickelt. Dieser wird im weiteren Verlauf detaillierter dargestellt.
+
+\\Ziel dieser Arbeit ist es, mithilfe exterozeptiver Abstandssensoren aus der Ego-Perspektive eine vollständige, digitale und dreidimensionale Abbildung der Umgebung eines seriellen Roboters zu erzeugen. 
+
+\\Der Begriff „vollständig“ bezieht sich in diesem Zusammenhang darauf, dass die Abbildung hinreichende Informationen liefert, um potenzielle Kollisionen mit Objekten im Arbeitsraum zuverlässig zu vermeiden.
+
+\\Diese digitale Repräsentation des Arbeitsraums ist so konzipiert, dass der Roboter selbst von der Abbildung ausgeschlossen wird. Unter dem Begriff Arbeitsraum wird dabei der Bereich verstanden, der vom Roboter mechanisch erreicht werden kann.
+
+\\Eine Differenzierung zwischen dem Manipulationsziel und anderen Objekten ist zum aktuellen Zeitpunkt nicht erforderlich – ausgenommen hiervon ist lediglich der Roboter selbst.
diff --git a/Bachelorarbeit/V2/fh_logo.png b/Bachelorarbeit/V2/fh_logo.png
new file mode 100644
index 0000000000000000000000000000000000000000..4f231037730a4b1986cb1c05dc3a08124be508da
Binary files /dev/null and b/Bachelorarbeit/V2/fh_logo.png differ
diff --git a/Bachelorarbeit/V2/fhacmb.sty b/Bachelorarbeit/V2/fhacmb.sty
new file mode 100644
index 0000000000000000000000000000000000000000..a8bf10bd4f3dacd8e377c0d9c196e7d6022d9831
--- /dev/null
+++ b/Bachelorarbeit/V2/fhacmb.sty
@@ -0,0 +1,172 @@
+%%%%  Package fhacmb.sty   %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%
+% Erzeugt eine Titelseite für eine Abschlussarbeit nach dem CD des FH 8
+% Voraussetzung: Documentclass scrbook (KOMA-Klasse)
+%
+% Verwendung: Felder mittels der Befehle definieren, z.B. \titel{irgendwas}
+% Befehl \fhacmbtitle{logo}{abstandx}{abstandy} aufrufen
+% logo: Anweisung zum Zeichnen eines Logos, z.B. \includegraphics[height=4cm]{fh_logo}
+% abstandx: Abstand zwischen rechtem Papierrand und Logo
+% abstandy: Abstand zwischen oberem Papierrand und Logo
+%
+% Nicht definierte Felder tragen als Inhalt den Befehlsnamen
+% Manche Felder können leer bleiben \cobetreuer{}
+%
+% Autor: Prof. Enning, 17.07.2013
+%
+% Bitte beachten: Wenn Felder vor \begin{document} definiert werden, müssen Umlaute vollständig umschrieben werden
+% Shortcuts aus babel können noch nicht verwendet werden. Also \"A für ein Ä
+%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
+\RequirePackage{color}				% Für farbigen Text
+\RequirePackage{microtype}			% um die 9pt Schrift etwas breiter laufen lassen zu können
+\RequirePackage{graphicx}			% für \includegraphics
+\RequirePackage{ifthen}				% für \ifthenelse
+\RequirePackage{tabu}				% für die einfache Tabellengestaltung
+\RequirePackage{helvet}				% Arial-ähnliche Schrift
+\RequirePackage{picture}				% zum Setzen des Logos
+\RequirePackage{geometry}
+%\RequirePackage{mmout}				% für Kontrollausgaben der Maße
+
+
+\geometry{a4paper, top=20mm, left=30mm, right=20mm, bottom=25mm}  % Ränder explizit einstellen
+
+% FH-Mint definieren
+\definecolor{fh-mint}{RGB}{0,177,172}
+
+% Textstyles für das Deckblatt
+\def\hlmint{\fontfamily{phv}\fontsize{19pt}{19pt}\fontseries{b}\color{fh-mint}\selectfont}
+\def\hlAbold{\fontfamily{phv}\fontsize{14pt}{30pt}\fontseries{b}\selectfont}
+\def\hlAnormal{\fontfamily{phv}\fontsize{14pt}{14pt}\selectfont}
+
+% Variable und Kommando für den Typ der Arbeit
+\def\@arbeitstyp{arbeitstyp}
+\newcommand\arbeitstyp[1]{\gdef\@arbeitstyp{#1}}
+
+% Variable und Kommando für Fachbereich
+\def\@fachbereich{fachbereich}
+\newcommand\fachbereich[1]{\gdef\@fachbereich{#1}}
+
+% Variable und Kommando für Studiengang
+\def\@studiengang{studiengang}
+\newcommand\studiengang[1]{\gdef\@studiengang{#1}}
+
+% Variable und Kommando für Matrikelnummer
+\def\@matrnr{matrnr}
+\newcommand\matrnr[1]{\gdef\@matrnr{#1}}
+
+% Variable und Kommando für Vertiefungsrichtung
+\def\@vertiefung{vertiefung}
+\newcommand\vertiefung[1]{\gdef\@vertiefung{#1}}
+
+% Variable und Kommando für Autor
+\def\@autor{autor}
+\newcommand\autor[1]{\gdef\@autor{#1}}
+
+% Variable und Kommando für Titel
+\def\@titel{titel}
+\newcommand\titel[1]{\gdef\@titel{#1}}
+
+% Variable und Kommando für Betreuer
+\def\@betreuer{betreuer}
+\newcommand\betreuer[1]{\gdef\@betreuer{#1}}
+
+% Variable und Kommando für externen Betreuer
+\def\@extbetreuer{extbetreuer}
+\newcommand\extbetreuer[1]{\gdef\@extbetreuer{#1}}
+
+% Variable und Kommando für Datum
+\def\@datum{datum}
+\newcommand\datum[1]{\gdef\@datum{#1}}
+
+% Variable und Kommando für Danksagung und Geheimhaltung
+\def\@dank{dank}
+\newcommand\dank[1]{\gdef\@dank{#1}}
+
+% Hier wird der Befehl definiert, der die Arbeit macht
+% Beispiel für Aufruf:
+% \fhacmbtitle{\includegraphics[height=4cm]{fh_logo}}{5pt}{5pt}
+\newcommand{\fhacmbtitle}[3]{
+	\begin{titlepage}
+	% Berechnungen für das Positionieren des Logos
+	% Variablendeklarationen
+	\newskip\@logobreite	% Breite des Logos
+	\newskip\@logohoehe	% Höhe des Logos
+	\newskip\@koordx		% Abstand vom linken Rand
+	\newskip\@koordy		% Abstand vom oberen Rand
+	\newskip\@offsx		% Offsetx des Picture-Koordinatensystems
+	\newskip\@offsy		% Offsety des Picture-Koordinatensystems
+	%
+	\def\@xcorr{#2}     	% Abstand des Logos vom rechten Rand
+	\def\@ycorr{#3}		% Abstand des Logos vom oberen Rand
+	\def\@logo{#1}		% Zeichenanweisung für Logo als benannte Variable
+	% Größe des Logos ermitteln und speichern
+	\settowidth{\@logobreite}{\@logo}
+	\settoheight{\@logohoehe}{\@logo}
+	% Offsets des Koordinatensystems für ganzes Papierformat
+	% in X-Richtung
+	\@offsx=1in		% 1 inch (Standardrand, Systemkonstante)
+	\advance\@offsx by \oddsidemargin		% Rand abziehen (Logo nur auf ungerade Seite)
+	% in Y-Richtung
+	\@offsy=0in		% Standardrand offenbar Null ???
+	\advance\@offsy by \topmargin		% Rand abziehen
+	% x-Koordinate für Logo berechnen
+	\@koordx=\paperwidth					% initialisieren mit Papierbreite
+	\advance\@koordx by -\@logobreite 	% davon Logobreite
+	\advance\@koordx by -\@xcorr 		% und gewünschten Abstand abziehen
+	% y-Koordinate für Logo berechnen
+	\@koordy=0pt			% mit 0 initialisieren (oben)
+	\advance\@koordy by \@ycorr 			% gewünschten Abstand dazuaddieren
+	% picture-Umgebung am oberen linken Blattrand ausrichten
+	\begin{picture}(0pt,0pt)(\@offsx,-\@offsy)
+	% Logo zeichnen
+	\put(\@koordx,-\@koordy){\@logo}
+	\end{picture}
+	
+	% Kontrollausgaben der Maße. Erfordert selbstgeschriebenes Package mmout.sty
+	%offsx=\MM{\@offsx}\\
+	%offsy=\MM{\@offsy}\\
+	%koordx=\MM{\@koordx}\\
+	%koordy=\MM{\@koordy}\\
+	%
+	% 9 pt Helvetica (=phv) auswählen
+	\fontfamily{phv}\fontsize{9pt}{9pt}\selectfont
+	\vspace{25mm}
+	% Mintfarbenes fettes "FH Aachen"
+	{\hlmint
+	FH Aachen
+	}\par
+	{\hlAbold
+	Fachbereich \@fachbereich
+	}\par
+	{\hlAnormal
+	Studiengang \@studiengang
+	}\par
+	\vspace{20mm}
+	\@arbeitstyp\par
+	{\hlAbold 
+	\@titel
+	}\par
+	\vspace{15mm}
+	\tabulinesep=3mm
+	\begin{tabu}{X[1]X[2]}
+	\textls{vorgelegt von} & {\hlAbold \@autor}\\[2mm]
+	 & \textls{Matrikel-Nr.} \textbf{\@matrnr}\\[10mm]
+	% Zeile Referent nur, wenn Angabe vorhanden
+	\ifthenelse{\equal{\@betreuer}{}}{}{
+	\textls{Referent:} & \@betreuer\\
+	}
+	% Zeile Externer Betreuer nur, wenn Angabe vorhanden
+	\ifthenelse{\equal{\@extbetreuer}{}}{}{
+	\textls{Externe Betreuerin:} & \@extbetreuer\\[8mm]
+	}
+	\ifthenelse{\equal{\@datum}{}}{}{
+	\textls{Datum:} & \@datum\\
+	}
+	\end{tabu}
+	\vfill
+	\@dank
+	\end{titlepage}
+	}
diff --git a/Bachelorarbeit/V2/images/20200501_Time_of_flight.svg.png b/Bachelorarbeit/V2/images/20200501_Time_of_flight.svg.png
new file mode 100644
index 0000000000000000000000000000000000000000..4f7e978454738c4bbbe188118ea70749da08adfd
Binary files /dev/null and b/Bachelorarbeit/V2/images/20200501_Time_of_flight.svg.png differ
diff --git a/Bachelorarbeit/V2/images/AMA-22_dynamic.jpg b/Bachelorarbeit/V2/images/AMA-22_dynamic.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..5bea2868e925de18083b8953bc265076d3ad4f63
Binary files /dev/null and b/Bachelorarbeit/V2/images/AMA-22_dynamic.jpg differ
diff --git a/Bachelorarbeit/V2/images/AMA-22_static.jpg b/Bachelorarbeit/V2/images/AMA-22_static.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b13712d768890b51a4b3a9eeec334f331f877d52
Binary files /dev/null and b/Bachelorarbeit/V2/images/AMA-22_static.jpg differ
diff --git a/Bachelorarbeit/V2/images/Angualar_distance.png b/Bachelorarbeit/V2/images/Angualar_distance.png
new file mode 100644
index 0000000000000000000000000000000000000000..9d1186396b61f5d0bc520e5689965c71f76b2cf8
Binary files /dev/null and b/Bachelorarbeit/V2/images/Angualar_distance.png differ
diff --git a/Bachelorarbeit/V2/images/Cobots-Forecast-Global-Market-1024x576.jpg b/Bachelorarbeit/V2/images/Cobots-Forecast-Global-Market-1024x576.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..1353eccfba204b8a350fb55e58478a7c86b43aa8
Binary files /dev/null and b/Bachelorarbeit/V2/images/Cobots-Forecast-Global-Market-1024x576.jpg differ
diff --git a/Bachelorarbeit/V2/images/Cropped_Bracket.png b/Bachelorarbeit/V2/images/Cropped_Bracket.png
new file mode 100644
index 0000000000000000000000000000000000000000..d5fde9e5bfe30ce3521ba18ac7cbe66abb9027ab
Binary files /dev/null and b/Bachelorarbeit/V2/images/Cropped_Bracket.png differ
diff --git a/Bachelorarbeit/V2/images/DIN_EN_ISO-10218-2.png b/Bachelorarbeit/V2/images/DIN_EN_ISO-10218-2.png
new file mode 100644
index 0000000000000000000000000000000000000000..e95c4b68c20b2c7db602b5e6831954da03b4e1ac
Binary files /dev/null and b/Bachelorarbeit/V2/images/DIN_EN_ISO-10218-2.png differ
diff --git a/Bachelorarbeit/V2/images/Detailgrad_Nachweis.png b/Bachelorarbeit/V2/images/Detailgrad_Nachweis.png
new file mode 100644
index 0000000000000000000000000000000000000000..837188daa18ac58d37b8e434b1ab518cd987238c
Binary files /dev/null and b/Bachelorarbeit/V2/images/Detailgrad_Nachweis.png differ
diff --git a/Bachelorarbeit/V2/images/FlowChart_BA.png b/Bachelorarbeit/V2/images/FlowChart_BA.png
new file mode 100644
index 0000000000000000000000000000000000000000..c7b078d0011e8ca4f3db302736613db607ef3d08
Binary files /dev/null and b/Bachelorarbeit/V2/images/FlowChart_BA.png differ
diff --git a/Bachelorarbeit/V2/images/Gesamtaufbau_im_Institut.jpeg b/Bachelorarbeit/V2/images/Gesamtaufbau_im_Institut.jpeg
new file mode 100644
index 0000000000000000000000000000000000000000..8a13230b28741d742e4a2f11e96dff636f8026e5
Binary files /dev/null and b/Bachelorarbeit/V2/images/Gesamtaufbau_im_Institut.jpeg differ
diff --git a/Bachelorarbeit/V2/images/HER-18_freq.png b/Bachelorarbeit/V2/images/HER-18_freq.png
new file mode 100644
index 0000000000000000000000000000000000000000..157f31168c483d4a0e78e7f6887b1048657567c9
Binary files /dev/null and b/Bachelorarbeit/V2/images/HER-18_freq.png differ
diff --git a/Bachelorarbeit/V2/images/HER-18_tria.png b/Bachelorarbeit/V2/images/HER-18_tria.png
new file mode 100644
index 0000000000000000000000000000000000000000..e30df80723d240f79f3412646d0afb56c1c8eb72
Binary files /dev/null and b/Bachelorarbeit/V2/images/HER-18_tria.png differ
diff --git a/Bachelorarbeit/V2/images/HER-18_wave.png b/Bachelorarbeit/V2/images/HER-18_wave.png
new file mode 100644
index 0000000000000000000000000000000000000000..ae13a9b9c60855b57d9d2b11a4954a7304487ee7
Binary files /dev/null and b/Bachelorarbeit/V2/images/HER-18_wave.png differ
diff --git a/Bachelorarbeit/V2/images/HRC_FIS-23.png b/Bachelorarbeit/V2/images/HRC_FIS-23.png
new file mode 100644
index 0000000000000000000000000000000000000000..33ac6e3c78c7ec6a1842fc2d7a0442259c6f718c
Binary files /dev/null and b/Bachelorarbeit/V2/images/HRC_FIS-23.png differ
diff --git a/Bachelorarbeit/V2/images/Halterung_Seite.jpg b/Bachelorarbeit/V2/images/Halterung_Seite.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..488db86217f97e0d2a063a37d8fe790cf4d35be4
Binary files /dev/null and b/Bachelorarbeit/V2/images/Halterung_Seite.jpg differ
diff --git a/Bachelorarbeit/V2/images/Halterung_Seite_Oben.jpg b/Bachelorarbeit/V2/images/Halterung_Seite_Oben.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c1bd53a6d23460ddb13d9d29c7fcf11ba03cdece
Binary files /dev/null and b/Bachelorarbeit/V2/images/Halterung_Seite_Oben.jpg differ
diff --git "a/Bachelorarbeit/V2/images/Halterung_Seite_Oben_schr\303\244g.jpg" "b/Bachelorarbeit/V2/images/Halterung_Seite_Oben_schr\303\244g.jpg"
new file mode 100644
index 0000000000000000000000000000000000000000..39577a478c909f6911dcd0df695970437715794b
Binary files /dev/null and "b/Bachelorarbeit/V2/images/Halterung_Seite_Oben_schr\303\244g.jpg" differ
diff --git a/Bachelorarbeit/V2/images/Halterung_Sete_1.jpg b/Bachelorarbeit/V2/images/Halterung_Sete_1.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..be423756a2ff671a06ff1bf5583b842f190d1c4b
Binary files /dev/null and b/Bachelorarbeit/V2/images/Halterung_Sete_1.jpg differ
diff --git a/Bachelorarbeit/V2/images/Halterung_Top.jpg b/Bachelorarbeit/V2/images/Halterung_Top.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ffc024121ec87a633bda45b0b870e8b2fd1985db
Binary files /dev/null and b/Bachelorarbeit/V2/images/Halterung_Top.jpg differ
diff --git a/Bachelorarbeit/V2/images/Innere_der_Halterung.jpg b/Bachelorarbeit/V2/images/Innere_der_Halterung.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6e99fa101d6177c6c0ad7cc9da73de9b55fcb2d2
Binary files /dev/null and b/Bachelorarbeit/V2/images/Innere_der_Halterung.jpg differ
diff --git a/Bachelorarbeit/V2/images/Nachlaufzeit_UR10.png b/Bachelorarbeit/V2/images/Nachlaufzeit_UR10.png
new file mode 100644
index 0000000000000000000000000000000000000000..9fab5fe6b1cb997707bb0107dc4d7c558ee9fb1b
Binary files /dev/null and b/Bachelorarbeit/V2/images/Nachlaufzeit_UR10.png differ
diff --git a/Bachelorarbeit/V2/images/Optomechanical LiDAR.png b/Bachelorarbeit/V2/images/Optomechanical LiDAR.png
new file mode 100644
index 0000000000000000000000000000000000000000..242b387a1154a868fb94465384386ff4b63383cd
Binary files /dev/null and b/Bachelorarbeit/V2/images/Optomechanical LiDAR.png differ
diff --git a/Bachelorarbeit/V2/images/RAS-20_Graphik.png b/Bachelorarbeit/V2/images/RAS-20_Graphik.png
new file mode 100644
index 0000000000000000000000000000000000000000..87361c04e7e100d45f81303918e5a39f108d74ff
Binary files /dev/null and b/Bachelorarbeit/V2/images/RAS-20_Graphik.png differ
diff --git a/Bachelorarbeit/V2/images/SCH-19_Graphical representation of protective distance.png b/Bachelorarbeit/V2/images/SCH-19_Graphical representation of protective distance.png
new file mode 100644
index 0000000000000000000000000000000000000000..4a6182eae4dc1f9c947f2e56e3b1730382167edd
Binary files /dev/null and b/Bachelorarbeit/V2/images/SCH-19_Graphical representation of protective distance.png differ
diff --git a/Bachelorarbeit/V2/images/STM-24_ranging_Performance.png b/Bachelorarbeit/V2/images/STM-24_ranging_Performance.png
new file mode 100644
index 0000000000000000000000000000000000000000..d7a0b866192ab2b04704147b3bde7e3813464584
Binary files /dev/null and b/Bachelorarbeit/V2/images/STM-24_ranging_Performance.png differ
diff --git a/Bachelorarbeit/V2/images/STM-25_Detection_Volume.png b/Bachelorarbeit/V2/images/STM-25_Detection_Volume.png
new file mode 100644
index 0000000000000000000000000000000000000000..a61188c693042d0f427cb75e68ebffd3da99a56d
Binary files /dev/null and b/Bachelorarbeit/V2/images/STM-25_Detection_Volume.png differ
diff --git a/Bachelorarbeit/V2/images/STM-25_FOV.jpg b/Bachelorarbeit/V2/images/STM-25_FOV.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0e0b73f5eca04fb4046e97852d04856cabab7198
Binary files /dev/null and b/Bachelorarbeit/V2/images/STM-25_FOV.jpg differ
diff --git a/Bachelorarbeit/V2/images/Screenshot 2025-04-30 165111.png b/Bachelorarbeit/V2/images/Screenshot 2025-04-30 165111.png
new file mode 100644
index 0000000000000000000000000000000000000000..e6185bd65856d981f130fd014f0307f98567d050
Binary files /dev/null and b/Bachelorarbeit/V2/images/Screenshot 2025-04-30 165111.png differ
diff --git a/Bachelorarbeit/V2/images/Sensor_FOV_TOP.png b/Bachelorarbeit/V2/images/Sensor_FOV_TOP.png
new file mode 100644
index 0000000000000000000000000000000000000000..ec6dc0dfc74f2362ff267d6709a1c0f07452252d
Binary files /dev/null and b/Bachelorarbeit/V2/images/Sensor_FOV_TOP.png differ
diff --git a/Bachelorarbeit/V2/images/Sensor_holder_on_UR10e.jpg b/Bachelorarbeit/V2/images/Sensor_holder_on_UR10e.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..aafc1493ca4a8daeab4357d2f1f2b00ed31532cf
Binary files /dev/null and b/Bachelorarbeit/V2/images/Sensor_holder_on_UR10e.jpg differ
diff --git a/Bachelorarbeit/V2/images/Sensorhalterung_im_Institut.jpeg b/Bachelorarbeit/V2/images/Sensorhalterung_im_Institut.jpeg
new file mode 100644
index 0000000000000000000000000000000000000000..349cad1617ee610d9faf3195eecb5a39b1e6e18f
Binary files /dev/null and b/Bachelorarbeit/V2/images/Sensorhalterung_im_Institut.jpeg differ
diff --git a/Bachelorarbeit/V2/images/Topic_explained.png b/Bachelorarbeit/V2/images/Topic_explained.png
new file mode 100644
index 0000000000000000000000000000000000000000..0e2f4fcefcdbcd3c010748b93ef877e44bd3639c
Binary files /dev/null and b/Bachelorarbeit/V2/images/Topic_explained.png differ
diff --git a/Bachelorarbeit/V2/images/UR10_Angeled_Side_with_FOV_Pyramids.png b/Bachelorarbeit/V2/images/UR10_Angeled_Side_with_FOV_Pyramids.png
new file mode 100644
index 0000000000000000000000000000000000000000..4919f11607424d25adf7788060f1e1ee3ef025fb
Binary files /dev/null and b/Bachelorarbeit/V2/images/UR10_Angeled_Side_with_FOV_Pyramids.png differ
diff --git a/Bachelorarbeit/V2/images/UR10_FOV_Pyramids.png b/Bachelorarbeit/V2/images/UR10_FOV_Pyramids.png
new file mode 100644
index 0000000000000000000000000000000000000000..bd12cd4ba4050c0dd390c3d8636d1830eef43999
Binary files /dev/null and b/Bachelorarbeit/V2/images/UR10_FOV_Pyramids.png differ
diff --git a/Bachelorarbeit/V2/images/UR10_Side_with_FOV_Pyramids.png b/Bachelorarbeit/V2/images/UR10_Side_with_FOV_Pyramids.png
new file mode 100644
index 0000000000000000000000000000000000000000..164e5a6be3f7971c52d1820e3b2da571245cfd8e
Binary files /dev/null and b/Bachelorarbeit/V2/images/UR10_Side_with_FOV_Pyramids.png differ
diff --git a/Bachelorarbeit/V2/images/UR10_with_FOV_Pyramids_Home.png b/Bachelorarbeit/V2/images/UR10_with_FOV_Pyramids_Home.png
new file mode 100644
index 0000000000000000000000000000000000000000..5af0ed817c622a64047557370a9332307edf4092
Binary files /dev/null and b/Bachelorarbeit/V2/images/UR10_with_FOV_Pyramids_Home.png differ
diff --git a/Bachelorarbeit/V2/images/VL53L7CX_Package.jpg b/Bachelorarbeit/V2/images/VL53L7CX_Package.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..9476edec29559e3069db7856f890bdb5aab484ba
Binary files /dev/null and b/Bachelorarbeit/V2/images/VL53L7CX_Package.jpg differ
diff --git a/Bachelorarbeit/V2/images/VL53L7CX_Performance_Table.png b/Bachelorarbeit/V2/images/VL53L7CX_Performance_Table.png
new file mode 100644
index 0000000000000000000000000000000000000000..86a1ec541797b906f5c2d7f31e9b2ddbae7a10a6
Binary files /dev/null and b/Bachelorarbeit/V2/images/VL53L7CX_Performance_Table.png differ
diff --git a/Bachelorarbeit/V2/images/VL53L7CX_Raster.png b/Bachelorarbeit/V2/images/VL53L7CX_Raster.png
new file mode 100644
index 0000000000000000000000000000000000000000..9babfa3e59af7a18d8b295e660224ed1389cdb40
Binary files /dev/null and b/Bachelorarbeit/V2/images/VL53L7CX_Raster.png differ
diff --git a/Bachelorarbeit/V2/images/VL53L7CX_range_accuracy.png b/Bachelorarbeit/V2/images/VL53L7CX_range_accuracy.png
new file mode 100644
index 0000000000000000000000000000000000000000..1c7b17c350722ed02a1770e455dc7ed323284a06
Binary files /dev/null and b/Bachelorarbeit/V2/images/VL53L7CX_range_accuracy.png differ
diff --git a/Bachelorarbeit/V2/images/Versuchsaufbau_mit_VL53L5CX.jpg b/Bachelorarbeit/V2/images/Versuchsaufbau_mit_VL53L5CX.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..1cf8f97b245f89a7c5c33529a51150e6cf8f16c8
Binary files /dev/null and b/Bachelorarbeit/V2/images/Versuchsaufbau_mit_VL53L5CX.jpg differ
diff --git a/Bachelorarbeit/V2/images/Wire_Schematic.png b/Bachelorarbeit/V2/images/Wire_Schematic.png
new file mode 100644
index 0000000000000000000000000000000000000000..46781025d876fd652eb2912f9768b203163fa45e
Binary files /dev/null and b/Bachelorarbeit/V2/images/Wire_Schematic.png differ
diff --git a/Bachelorarbeit/V2/images/Wiring_Schematic.png b/Bachelorarbeit/V2/images/Wiring_Schematic.png
new file mode 100644
index 0000000000000000000000000000000000000000..4ef3e1b46448d3843064e6ce2f464d65c7d34338
Binary files /dev/null and b/Bachelorarbeit/V2/images/Wiring_Schematic.png differ
diff --git a/Bachelorarbeit/V2/images/sensormodul.jpg b/Bachelorarbeit/V2/images/sensormodul.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..1a41f2d901fd521de500f17f345bf729458ca84c
Binary files /dev/null and b/Bachelorarbeit/V2/images/sensormodul.jpg differ
diff --git a/Bachelorarbeit/V2/images/two_pcd.jpg b/Bachelorarbeit/V2/images/two_pcd.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..118059e102d3baebf6bff0613739e47352d59510
Binary files /dev/null and b/Bachelorarbeit/V2/images/two_pcd.jpg differ
diff --git a/Bachelorarbeit/V2/moveit_stop_node.py b/Bachelorarbeit/V2/moveit_stop_node.py
new file mode 100644
index 0000000000000000000000000000000000000000..f8c2f853566a6fc5546a8cd0a1c51008aa49ebbc
--- /dev/null
+++ b/Bachelorarbeit/V2/moveit_stop_node.py
@@ -0,0 +1,151 @@
+import rclpy
+from rclpy.node import Node
+from sensor_msgs.msg import PointCloud2
+from tf2_ros import TransformListener, Buffer
+import numpy as np
+import trimesh
+from std_msgs.msg import String
+import sensor_msgs_py.point_cloud2 as pc2
+from std_msgs.msg import Header
+from sensor_msgs.msg import PointField
+
+class PointCloudProcessor(Node):
+    def __init__(self):
+        super().__init__('moveit_stop_node')
+
+        self.subscription = self.create_subscription(
+            PointCloud2,
+            'valid_from_perspective',
+            self.pointcloud_callback,
+            10)
+        
+        self.cancellation_pub = self.create_publisher(
+            String, "/trajectory_execution_event", 1
+        )
+        self.out_zone_pub = self.create_publisher(
+            PointCloud2,
+            '/out_safe_zone',
+            10)
+        self.in_zone_pub = self.create_publisher(
+            PointCloud2,
+            '/in_safe_zone',
+            10)
+        self.tf_buffer = Buffer()
+        self.tf_listener = TransformListener(self.tf_buffer, self)
+
+        # Create meshes 5cm larger than the original example
+        self.meshes_static = [
+            trimesh.creation.cylinder(radius=0.3, height=0.8),  # Cylinder for upper_arm_link
+            trimesh.creation.cylinder(radius=0.3, height=1.05)  # Cylinder for forearm_link
+        ]
+
+        # Rotate cylinders 90 degrees around the y-axis
+        rotation_matrix = trimesh.transformations.rotation_matrix(
+            angle=np.pi / 2,  # 90 degrees in radians
+            direction=[0, 1, 0]  # Rotation around the y-axis
+        )
+        for mesh in self.meshes_static:
+            mesh.apply_transform(rotation_matrix)
+
+        # Translate cylinders 35cm in the negative x direction
+        translation_matrix_x = trimesh.transformations.translation_matrix([-0.35, 0, 0])
+        for mesh in self.meshes_static:
+            mesh.apply_transform(translation_matrix_x)
+
+        # Move the upper_arm_link cylinder 10cm in the positive x direction
+        translation_matrix_upper_arm = trimesh.transformations.translation_matrix([-0.15, 0, 0])
+        self.meshes_static[0].apply_transform(translation_matrix_upper_arm)
+
+        # Move the forearm_link cylinder 5cm in the negative x direction
+        translation_matrix_forearm = trimesh.transformations.translation_matrix([0.05, 0, 0])
+        self.meshes_static[1].apply_transform(translation_matrix_forearm)
+
+        # Move cylinders 10cm in the positive z direction
+        translation_matrix_z = trimesh.transformations.translation_matrix([0, 0, 0.1])
+        for mesh in self.meshes_static:
+            mesh.apply_transform(translation_matrix_z)
+
+        # Threshold for stopping the trajectory
+        self.point_threshold = 2  # Adjust as needed
+
+    def pointcloud_callback(self, msg):
+        self.perspective_frame = 'vl53l7cx_link'
+        try:
+            now = rclpy.time.Time()
+            transforms = {
+                'shoulder': self.tf_buffer.lookup_transform(self.perspective_frame, 'upper_arm_link', now),
+                'forearm': self.tf_buffer.lookup_transform(self.perspective_frame, 'forearm_link', now)
+            }
+        except Exception as e:
+            self.get_logger().error(f'Error in looking up transform: {e}')
+            return
+
+        transformed_meshes = []
+        for i, transform in enumerate(transforms.values()):
+            transformed_meshes.append(self.transform_mesh(transform, self.meshes_static[i]))
+
+        points = np.array([(point['x'], point['y'], point['z']) for point in pc2.read_points(msg, field_names=("x", "y", "z"), skip_nans=True)], dtype=np.float64)
+
+        inside_robot = np.logical_or.reduce([mesh.contains(points) for mesh in transformed_meshes])
+        points_inside_robot = points[inside_robot]
+
+        self.get_logger().info(f'Number of points to close to robot: {len(points_inside_robot)}')
+
+        # Publish the point clouds
+        self.out_zone_pub.publish(self.create_point_cloud(points[~inside_robot], self.perspective_frame))
+        self.in_zone_pub.publish(self.create_point_cloud(points_inside_robot, self.perspective_frame))
+        if len(points_inside_robot) > self.point_threshold:
+            self.stop_trajectory()
+
+    def transform_mesh(self, transform, mesh_static):
+        translation = transform.transform.translation
+        rotation = transform.transform.rotation
+
+        translation_matrix = trimesh.transformations.translation_matrix([translation.x, translation.y, translation.z])
+        rotation_matrix = trimesh.transformations.quaternion_matrix([rotation.w, rotation.x, rotation.y, rotation.z])
+        transformation_matrix = np.dot(translation_matrix, rotation_matrix)
+
+        transformed_mesh = mesh_static.copy()
+        transformed_mesh.apply_transform(transformation_matrix)
+        return transformed_mesh
+
+    def stop_trajectory(self):
+        self.get_logger().info("Stopping trajectory execution due to point cloud threshold.")
+        cancel_string = String()
+        cancel_string.data = "stop"
+        self.cancellation_pub.publish(cancel_string)
+    
+    def create_point_cloud(self, points, parent_frame):
+        """
+        Create a PointCloud2 message from the 3D points.
+        """
+        ros_dtype = PointField.FLOAT32  # ROS 2 data type for floating-point numbers
+        dtype = np.float32  # NumPy data type for floating-point numbers
+        itemsize = np.dtype(dtype).itemsize  # Size of each data item in bytes
+        
+        data = points.astype(dtype).tobytes()  # Convert points to binary data
+
+        fields = [PointField(name=n, offset=i * itemsize, datatype=ros_dtype, count=1)
+                  for i, n in enumerate('xyz')]  # Define the fields for x, y, and z
+
+        header = Header(frame_id=parent_frame)  # Create a header with the parent frame
+        return PointCloud2(
+            header=header,
+            height=1,  # Single row of points
+            width=points.shape[0],  # Number of points
+            is_dense=True,  # Allow for invalid points
+            is_bigendian=False,  # Data is in little-endian format
+            fields=fields,  # Fields for x, y, and z
+            point_step=(itemsize * 3),  # Size of each point in bytes
+            row_step=(itemsize * 3 * points.shape[0]),  # Size of each row in bytes
+            data=data  # Binary data for the points
+        )
+def main(args=None):
+    rclpy.init(args=args)
+    pointcloud_processor = PointCloudProcessor()
+    rclpy.spin(pointcloud_processor)
+    pointcloud_processor.destroy_node()
+    rclpy.shutdown()
+
+if __name__ == '__main__':
+    main()
\ No newline at end of file
diff --git a/Bachelorarbeit/V2/pcl_filter_node.py b/Bachelorarbeit/V2/pcl_filter_node.py
new file mode 100644
index 0000000000000000000000000000000000000000..0a568b4b15a5bb1116c0c5ffb30b9020180c80b0
--- /dev/null
+++ b/Bachelorarbeit/V2/pcl_filter_node.py
@@ -0,0 +1,142 @@
+import rclpy
+from rclpy.node import Node
+from sensor_msgs.msg import PointCloud2
+from tf2_ros import TransformListener, Buffer
+import numpy as np
+import trimesh
+from std_msgs.msg import Header
+import sensor_msgs.msg as sensor_msgs
+import sensor_msgs_py.point_cloud2 as pc2
+
+class PointCloudProcessor(Node):
+    def __init__(self):
+        super().__init__('pcl_filter_node')
+        self.subscription = self.create_subscription(
+            PointCloud2,
+            'pcl',
+            self.pointcloud_callback,
+            10)
+        self.publisher_valid = self.create_publisher(PointCloud2, '/valid_from_perspective', 10)
+        self.publisher_invalid = self.create_publisher(PointCloud2, '/invalid_from_perspective', 10)
+
+        self.tf_buffer = Buffer()
+        self.tf_listener = TransformListener(self.tf_buffer, self)
+
+        # Create cylinders
+        self.meshes_static = [
+            trimesh.creation.cylinder(radius=0.25, height=0.9),  # Cylinder for upper_arm_link (10 cm shorter)
+            trimesh.creation.cylinder(radius=0.25, height=1.0)   # Cylinder for forearm_link
+        ]
+
+        # Rotate cylinders 90 degrees around the y-axis
+        rotation_matrix = trimesh.transformations.rotation_matrix(
+            angle=np.pi / 2,  # 90 degrees in radians
+            direction=[0, 1, 0]  # Rotation around the y-axis
+        )
+        for mesh in self.meshes_static:
+            mesh.apply_transform(rotation_matrix)
+
+        # Translate cylinders 35cm in the negative x direction
+        translation_matrix_x = trimesh.transformations.translation_matrix([-0.35, 0, 0])
+        for mesh in self.meshes_static:
+            mesh.apply_transform(translation_matrix_x)
+
+        # Move the upper_arm_link cylinder 10cm in the positive x direction
+        translation_matrix_upper_arm = trimesh.transformations.translation_matrix([0.1, 0, 0])
+        self.meshes_static[0].apply_transform(translation_matrix_upper_arm)
+
+        # Move the upper_arm_link cylinder 10cm in the positive x direction
+        translation_forearm = trimesh.transformations.translation_matrix([0, 0, 0])
+        self.meshes_static[1].apply_transform(translation_forearm)
+
+        # Move cylinders 10cm in the positive z direction
+        translation_matrix_z = trimesh.transformations.translation_matrix([0, 0, 0.1])
+        for mesh in self.meshes_static:
+            mesh.apply_transform(translation_matrix_z)
+
+    def visualize_meshes(self, meshes):
+        if meshes:  # Check if there are meshes to visualize
+            scene = trimesh.Scene()
+            for mesh in meshes:
+                scene.add_geometry(mesh)
+            scene.show()
+        else:
+            self.get_logger().info('No meshes to visualize.')
+
+    def pointcloud_callback(self, msg):
+        self.perspective_frame = 'vl53l7cx_link'
+        try:
+            now = rclpy.time.Time()
+            transforms = {
+                'shoulder': self.tf_buffer.lookup_transform(self.perspective_frame, 'upper_arm_link', now),
+                'forearm': self.tf_buffer.lookup_transform(self.perspective_frame, 'forearm_link', now)
+            }
+        except Exception as e:
+            self.get_logger().error(f'Error in looking up transform: {e}')
+            return
+
+        transformed_meshes = []
+        for i, transform in enumerate(transforms.values()):
+            transformed_meshes.append(self.transform_mesh(transform, self.meshes_static[i]))
+
+        #self.visualize_meshes(transformed_meshes)
+
+        points = np.array([(point['x'], point['y'], point['z']) for point in pc2.read_points(msg, field_names=("x", "y", "z"), skip_nans=True)], dtype=np.float64)
+
+        inside_robot = np.logical_or.reduce([mesh.contains(points) for mesh in transformed_meshes])
+        outside_robot = np.logical_not(inside_robot)
+
+        points_outside_robot = points[outside_robot]
+        points_inside_robot = points[inside_robot]
+
+        self.publish_point_cloud(points_outside_robot, points_inside_robot)
+        self.get_logger().info(f'Number of points outside robot: {len(points_outside_robot)}')
+        self.get_logger().info(f'Number of points inside robot: {len(points_inside_robot)}')
+
+    def transform_mesh(self, transform, mesh_static):
+        translation = transform.transform.translation
+        rotation = transform.transform.rotation
+
+        translation_matrix = trimesh.transformations.translation_matrix([translation.x, translation.y, translation.z])
+        rotation_matrix = trimesh.transformations.quaternion_matrix([rotation.w, rotation.x, rotation.y, rotation.z])
+        transformation_matrix = np.dot(translation_matrix, rotation_matrix)
+
+        transformed_mesh = mesh_static.copy()
+        transformed_mesh.apply_transform(transformation_matrix)
+        return transformed_mesh
+
+    def publish_point_cloud(self, points_valid, points_invalid):
+        self.get_logger().info(f'Publishing {len(points_valid)} valid points.')
+        self.publisher_valid.publish(self.point_cloud(points_valid, self.perspective_frame))
+        self.publisher_invalid.publish(self.point_cloud(points_invalid, self.perspective_frame))
+
+    def point_cloud(self, points, parent_frame):
+        ros_dtype = sensor_msgs.PointField.FLOAT32
+        dtype = np.float32
+        itemsize = np.dtype(dtype).itemsize
+
+        data = points.astype(dtype).tobytes()
+        fields = [sensor_msgs.PointField(name=n, offset=i * itemsize, datatype=ros_dtype, count=1) for i, n in enumerate('xyz')]
+        header = Header(frame_id=parent_frame)
+
+        return sensor_msgs.PointCloud2(
+            header=header,
+            height=1,
+            width=points.shape[0],
+            is_dense=False,
+            is_bigendian=False,
+            fields=fields,
+            point_step=(itemsize * 3),
+            row_step=(itemsize * 3 * points.shape[0]),
+            data=data
+        )
+
+def main(args=None):
+    rclpy.init(args=args)
+    pointcloud_processor = PointCloudProcessor()
+    rclpy.spin(pointcloud_processor)
+    pointcloud_processor.destroy_node()
+    rclpy.shutdown()
+
+if __name__ == '__main__':
+    main()
diff --git a/Bachelorarbeit/V2/pcl_rob_node.py b/Bachelorarbeit/V2/pcl_rob_node.py
new file mode 100644
index 0000000000000000000000000000000000000000..b4a0ae15abf208ff28cc2ab8f13ad63bda2469cd
--- /dev/null
+++ b/Bachelorarbeit/V2/pcl_rob_node.py
@@ -0,0 +1,234 @@
+import rclpy
+import rclpy.duration
+from rclpy.node import Node
+from sensor_msgs.msg import PointCloud2
+from tf2_ros import TransformListener, Buffer
+import numpy as np
+import trimesh
+import std_msgs.msg
+import sensor_msgs.msg as sensor_msgs
+import sensor_msgs_py.point_cloud2 as pc2
+from std_msgs.msg import Header
+from scipy.spatial.transform import Rotation as R
+import os
+
+class PointCloudProcessor(Node):
+    def __init__(self):
+        super().__init__('pointcloud_processor')
+        self.subscription = self.create_subscription(
+            PointCloud2,
+            'pcd',
+            self.pointcloud_callback,
+            10)
+        self.subscription  # prevent unused variable warning
+        self.publisher_valid = self.create_publisher(PointCloud2, '/valid_from_perspective', 10)
+        self.publisher_invalid = self.create_publisher(PointCloud2, '/invalid_from_perspective', 10)
+
+        self.tf_buffer = Buffer()
+        self.tf_listener = TransformListener(self.tf_buffer, self)
+
+        base_path = os.path.join(os.path.expanduser('~'), 'robot-sensor', 'workspaces', 'COLCON_WS', 'src', 'ur_description', 'meshes', 'ur10e', 'collision_custom')
+        self.mesh_shoulder_link_static = trimesh.load_mesh(os.path.join(base_path, 'shoulder.stl'))
+        self.mesh_forearm_link_static = trimesh.load_mesh(os.path.join(base_path, 'forearm.stl'))
+        self.mesh_upperarm_link_static = trimesh.load_mesh(os.path.join(base_path, 'upperarm.stl'))
+        self.mesh_wrist1_link_static = trimesh.load_mesh(os.path.join(base_path, 'wrist1.stl'))
+        self.mesh_wrist2_link_static = trimesh.load_mesh(os.path.join(base_path, 'wrist2.stl'))
+        self.mesh_wrist3_link_static = trimesh.load_mesh(os.path.join(base_path, 'wrist3.stl'))
+
+        base_path_collision = os.path.join(os.path.expanduser('~'), 'robot-sensor', 'workspaces', 'COLCON_WS', 'src', 'ur_description', 'meshes', 'ur10e', 'collision')
+        self.mesh_base_link_static = trimesh.load_mesh(os.path.join(base_path_collision, 'base.stl'))
+
+        self.meshes_static = [
+            self.mesh_shoulder_link_static,
+            self.mesh_base_link_static,
+            self.mesh_forearm_link_static,
+            self.mesh_upperarm_link_static,
+            self.mesh_wrist1_link_static,
+            self.mesh_wrist2_link_static,
+            self.mesh_wrist3_link_static
+        ]
+
+        # Scale the meshes only in z and y
+        scale_matrix = np.eye(4)
+        scale_matrix[1, 1] = 2  # Scale y by 2
+        scale_matrix[2, 2] = 2  # Scale z by 2
+        for mesh in self.meshes_static:
+            mesh.apply_transform(scale_matrix)
+
+    def visualize_meshes(self,meshes):
+        scene = trimesh.Scene()
+        for mesh in meshes:
+            scene.add_geometry(mesh)
+        scene.show()
+
+    def pointcloud_callback(self, msg):
+        self.perspective_frame ='vl53l7cx_link'
+        
+        try:
+            self.target_frame = 'vl53l7cx_link'
+            now = rclpy.time.Time()
+            transform_base = self.tf_buffer.lookup_transform(self.target_frame, 'world', now)
+            transform_shoulder = self.tf_buffer.lookup_transform(self.target_frame, 'shoulder_link', now)
+            transform_upperarm = self.tf_buffer.lookup_transform(self.target_frame, 'upper_arm_link', now)
+            transform_forearm = self.tf_buffer.lookup_transform(self.target_frame, 'forearm_link', now)
+            #transform_vl53l7cx = self.tf_buffer.lookup_transform(self.target_frame, 'vl53l7cx_link', now)
+            transform_wrist1 = self.tf_buffer.lookup_transform(self.target_frame, 'wrist_1_link', now)
+            transform_wrist2 = self.tf_buffer.lookup_transform(self.target_frame, 'wrist_2_link', now)
+            transform_wrist3 = self.tf_buffer.lookup_transform(self.target_frame, 'wrist_3_link', now)
+        except Exception as e:
+            self.get_logger().error(f'Error in looking up transform: {e}')
+            return
+
+
+        self.mesh_shoulder_link = self.transform_mesh(transform_shoulder, self.mesh_shoulder_link_static)
+        self.mesh_forearm_link = self.transform_mesh(transform_forearm, self.mesh_forearm_link_static)
+        self.mesh_base_link = self.transform_mesh(transform_base, self.mesh_base_link_static)
+        self.mesh_upperarm_link = self.transform_mesh(transform_upperarm, self.mesh_upperarm_link_static)
+        self.mesh_wrist1_link = self.transform_mesh(transform_wrist1, self.mesh_wrist1_link_static)
+        self.mesh_wrist2_link = self.transform_mesh(transform_wrist2, self.mesh_wrist2_link_static)
+        self.mesh_wrist3_link = self.transform_mesh(transform_wrist3, self.mesh_wrist3_link_static)
+        # Move the upperarm mesh in the negative z direction by 30 centimeters
+        translation_matrix = trimesh.transformations.translation_matrix([0, 0, -0.17])
+        self.mesh_upperarm_link.apply_transform(translation_matrix)
+        # Move the forearm mesh in the positive z direction by 5 centimeters
+        translation_matrix_forearm = trimesh.transformations.translation_matrix([0, 0, -0.05])
+        self.mesh_forearm_link.apply_transform(translation_matrix_forearm)
+        self.meshes = [
+            self.mesh_shoulder_link,
+            self.mesh_base_link,
+            self.mesh_forearm_link,
+            self.mesh_upperarm_link,
+            self.mesh_wrist1_link,
+            self.mesh_wrist2_link,
+            self.mesh_wrist3_link
+        ]
+
+        #self.visualize_meshes(self.meshes)
+        
+        self.transformed_points_base = self.do_transform_cloud(msg, transform_base.transform)
+        self.points = np.array([(point['x'], point['y'], point['z']) for point in pc2.read_points(msg, field_names=("x", "y", "z"), skip_nans=True)], dtype=np.float64)
+
+        inside_base = self.mesh_base_link.contains(self.points)
+        inside_forearm = self.mesh_forearm_link.contains(self.points)
+        inside_upperarm = self.mesh_upperarm_link.contains(self.points)
+        inside_wrist1 = self.mesh_wrist1_link.contains(self.points)
+        inside_wrist2 = self.mesh_wrist2_link.contains(self.points)
+        inside_wrist3 = self.mesh_wrist3_link.contains(self.points)
+        inside_shoulder = self.mesh_shoulder_link.contains(self.points)
+        inside_robot = np.logical_or.reduce((
+            inside_base,
+            inside_forearm,
+            inside_upperarm,
+            inside_wrist1,
+            inside_wrist2,
+            inside_wrist3,
+            inside_shoulder
+        ))
+        outside_robot = np.logical_not(inside_robot)
+        self.points =np.asanyarray(self.points)
+        #points_valid = np.hstack((self.transformed_points_base, inside_shoulder.reshape(10000,1)))
+        points_outside_robot = self.points[outside_robot]
+        points_inside_robot = self.points[inside_robot]
+        self.publish_point_cloud(points_outside_robot, points_inside_robot)
+        self.get_logger().info(f'Number of points outside robot: {len(points_outside_robot)}')
+        self.get_logger().info(f'Number of points inside robot: {len(points_inside_robot)}')
+    def do_transform_cloud(self, cloud, trans):
+        # Convert the cloud to a numpy array
+        quaternion = (trans.rotation.x, trans.rotation.y, trans.rotation.z, trans.rotation.w)
+        rotation = R.from_quat(quaternion)
+        self.points = pc2.read_points(cloud, skip_nans=True)
+        list_of_tuples = [tuple(row) for row in self.points]
+
+        points = []
+        for point in list_of_tuples:
+            point = rotation.apply(point)
+            point[0] = point[0] + trans.translation.x
+            point[1] = point[1] + trans.translation.y
+            point[2] = point[2] + trans.translation.z
+            points.append(point)
+        return points
+    
+    def transform_mesh(self, transform, mesh_static):
+        # Extract the translation and rotation from the transform
+        translation = transform.transform.translation
+        rotation = transform.transform.rotation
+
+        # Create a translation matrix
+        translation_matrix = trimesh.transformations.translation_matrix([translation.x, translation.y, translation.z])
+
+        # Create a rotation matrix from the quaternion
+        rotation_matrix = trimesh.transformations.quaternion_matrix([rotation.w, rotation.x, rotation.y, rotation.z])
+
+        # Combine the translation and rotation matrices into a single transformation matrix
+        transformation_matrix = np.dot(translation_matrix, rotation_matrix)
+        # Apply the transformation matrix to the mesh
+        transformed_mesh = mesh_static.copy()
+        transformed_mesh.apply_transform(transformation_matrix)
+        return transformed_mesh
+
+    def publish_point_cloud(self,points_valid,points_invalid):
+
+        # Create PointCloud2 message
+        header = std_msgs.msg.Header()
+        header.frame_id = self.perspective_frame  # The frame ID (replace with your robot's frame)
+        # Publish the message
+        self.publisher_valid.publish(self.point_cloud(points_valid, self.perspective_frame))
+        self.publisher_invalid.publish(self.point_cloud(points_invalid, self.perspective_frame))
+
+    def point_cloud(self, points, parent_frame):
+        """ Creates a point cloud message.
+        Args:
+            points: Nx3 array of xyz positions.
+            parent_frame: frame in which the point cloud is defined
+        Returns:
+            sensor_msgs/PointCloud2 message
+
+        Code source:
+            https://gist.github.com/pgorczak/5c717baa44479fa064eb8d33ea4587e0
+
+        References:
+            http://docs.ros.org/melodic/api/sensor_msgs/html/msg/PointCloud2.html
+            http://docs.ros.org/melodic/api/sensor_msgs/html/msg/PointField.html
+            http://docs.ros.org/melodic/api/std_msgs/html/msg/Header.html
+
+        """
+        # In a PointCloud2 message, the point cloud is stored as an byte 
+        # array. In order to unpack it, we also include some parameters 
+        # which desribes the size of each individual point.
+        ros_dtype = sensor_msgs.PointField.FLOAT32
+        dtype = np.float32
+        itemsize = np.dtype(dtype).itemsize # A 32-bit float takes 4 bytes.
+
+        data = points.astype(dtype).tobytes() 
+
+        # The fields specify what the bytes represents. The first 4 bytes 
+        # represents the x-coordinate, the next 4 the y-coordinate, etc.
+        fields = [sensor_msgs.PointField(
+            name=n, offset=i*itemsize, datatype=ros_dtype, count=1)
+            for i, n in enumerate('xyz')]
+
+        # The PointCloud2 message also has a header which specifies which 
+        # coordinate frame it is represented in. 
+        header = Header(frame_id=parent_frame)
+
+        return sensor_msgs.PointCloud2(
+            header=header,
+            height=1, 
+            width=points.shape[0],
+            is_dense=False,
+            is_bigendian=False,
+            fields=fields,
+            point_step=(itemsize * 3), # Every point consists of three float32s.
+            row_step=(itemsize * 3 * points.shape[0]),
+            data=data
+        )
+
+def main(args=None):
+    rclpy.init(args=args)
+    pointcloud_processor = PointCloudProcessor()
+    rclpy.spin(pointcloud_processor)
+    pointcloud_processor.destroy_node()
+    rclpy.shutdown()
+
+if __name__ == '__main__':
+    main()
\ No newline at end of file
diff --git a/Bachelorarbeit/V2/ser_test_node.py b/Bachelorarbeit/V2/ser_test_node.py
new file mode 100644
index 0000000000000000000000000000000000000000..360eab863ba44dc1a31f07e6fa8fa25ef9628605
--- /dev/null
+++ b/Bachelorarbeit/V2/ser_test_node.py
@@ -0,0 +1,146 @@
+import serial
+import rclpy
+import json
+import numpy as np
+from rclpy.node import Node
+import sensor_msgs.msg as sensor_msgs
+from std_msgs.msg import Header
+angles = [19.923,14.515,8.829,2.964,182.964,188.829,194.515,199.923]
+points = np.empty((0, 0))
+# Define the rotation angle in radians
+theta = np.pi / 4  # 45 degrees
+# Define the rotation matrix around the z-axis
+rotation_matrix = np.array([
+    [np.cos(theta), -np.sin(theta), 0],
+    [np.sin(theta), np.cos(theta),  0],
+    [0,             0,              1]
+])
+
+class SerialListPublisher(Node):
+    def __init__(self, serial_port='/dev/ttyACM0', baudrate=115200):                                                # the Serialport or baudrate may have to be edited when changing host pc
+        super().__init__('serial_list_publisher')
+        self.pcd_publisher = self.create_publisher(sensor_msgs.PointCloud2, 'pcd', 10)
+        self.serial_port = serial.Serial(serial_port, baudrate, timeout=None)
+        self.get_logger().info(f"Connected to {serial_port} at {baudrate} baud")
+        self.read_serial_and_publish()
+
+    def read_serial_and_publish(self):
+        try:
+            while rclpy.ok():
+                line = self.serial_port.readline().decode('utf-8').strip()                                          # Read a line from the serial port
+                data = json.loads(line)                                                                          # Interpret arrays from JSON String
+                points_all=[]                                                                                       # Create an empty List for the Arrays of points from the Sensors
+                count = 0                                                                                           # Set the Counter to Zero. The Counter is needed to rotate the sensor Points in the self.create_plc_from_distance function
+                for key, array in data.items():                                                                     # a for-Loop that goes through the arrays of data from each sensor
+                    points_all.append(self.create_pcd_from_distance(np.array(array).reshape(8,8), count*45))        # in this line we create an 8x8 np.array from the sensor data list, then we feed it into the the create_plc_from_distance() with the angle we want the points to be rotated around the z-axis  
+                    count = count + 1                                                                               # the counter counts one up
+                self.points = np.vstack(points_all)                                                                 # stacking the sensor 64x3 arrays to one (count*64)x3 vertically
+                self.pcd = self.point_cloud(self.points, 'vl53l7cx_link')                                                     # converting the np.array to pointcloud data with the parent_frame. The parent_frame has to be edited in the future to suit the use-case
+                self.pcd_publisher.publish(self.pcd)                                                                # publishing the pointcloud2 topic
+
+        except serial.SerialException as e:
+            self.get_logger().error(f"Serial error: {e}")
+        except KeyboardInterrupt:
+            self.get_logger().info("Shutting down...")
+        finally:
+            self.serial_port.close()
+            
+    def create_pcd_from_distance(self, distance, rotation_angle):
+        # Define the rotation angle in radians
+        theta = np.radians(rotation_angle) # 45 degrees
+        # Define the rotation matrix around the z-axis
+        rotation_matrix = np.array([
+            [np.cos(theta), -np.sin(theta), 0],
+            [np.sin(theta), np.cos(theta),  0],
+            [0,             0,              1]
+        ])
+        rotation_matrix_90_y = np.array([
+            [0, 0, 1],
+            [0, 1, 0],
+            [-1, 0, 0]
+        ])
+        #declaring the lists for the koordinates components
+        x_collum = []
+        z_collum = []
+        y_collum = []
+        for j in range(len(distance[:][0])):
+            for i in range(len(distance[0])):
+                x = (distance[i][j])
+                x_collum.append(x)
+
+                # Calculate x and y using trigonometry
+                z = x * np.sin(np.radians(angles[j]))
+                y = x * np.sin(np.radians(angles[i]))
+
+                z_collum.append(z)
+                y_collum.append(y)             
+        z_collum = np.array(z_collum).reshape(64,1).astype(int)                                                                 # Convert list to numpy array
+        y_collum = np.array(y_collum).reshape(64,1).astype(int)                                                                 # Convert list to numpy array
+        x_collum = np.array(x_collum).reshape(64,1).astype(int)                                                                 # Convert list to numpy array
+        self.points = np.dot(np.hstack((x_collum, y_collum, z_collum ))/1000, rotation_matrix.T)                                # Creating a 64x3 array out of the z_collum, y_collum and x_collum
+        self.points = np.dot(self.points, rotation_matrix_90_y.T)
+        return self.points
+
+    def point_cloud(self, points, parent_frame):
+        """ Creates a point cloud message.
+        Args:
+            points: Nx3 array of xyz positions.
+            parent_frame: frame in which the point cloud is defined
+        Returns:
+            sensor_msgs/PointCloud2 message
+
+        Code source:
+            https://gist.github.com/pgorczak/5c717baa44479fa064eb8d33ea4587e0
+
+        References:
+            http://docs.ros.org/melodic/api/sensor_msgs/html/msg/PointCloud2.html
+            http://docs.ros.org/melodic/api/sensor_msgs/html/msg/PointField.html
+            http://docs.ros.org/melodic/api/std_msgs/html/msg/Header.html
+
+        """
+        # In a PointCloud2 message, the point cloud is stored as an byte 
+        # array. In order to unpack it, we also include some parameters 
+        # which desribes the size of each individual point.
+        ros_dtype = sensor_msgs.PointField.FLOAT32
+        dtype = np.float32
+        itemsize = np.dtype(dtype).itemsize # A 32-bit float takes 4 bytes.
+
+        data = points.astype(dtype).tobytes() 
+
+        # The fields specify what the bytes represents. The first 4 bytes 
+        # represents the x-coordinate, the next 4 the y-coordinate, etc.
+        fields = [sensor_msgs.PointField(
+            name=n, offset=i*itemsize, datatype=ros_dtype, count=1)
+            for i, n in enumerate('xyz')]
+
+        # The PointCloud2 message also has a header which specifies which 
+        # coordinate frame it is represented in. 
+        header = Header(frame_id=parent_frame)
+
+        return sensor_msgs.PointCloud2(
+            header=header,
+            height=1, 
+            width=points.shape[0],
+            is_dense=False,
+            is_bigendian=False,
+            fields=fields,
+            point_step=(itemsize * 3), # Every point consists of three float32s.
+            row_step=(itemsize * 3 * points.shape[0]),
+            data=data
+        )
+
+
+def main(args=None):
+    rclpy.init(args=args)
+    node = SerialListPublisher(serial_port='/dev/ttyACM0', baudrate= 115200)
+    try:
+        rclpy.spin(node)
+    except KeyboardInterrupt:
+        node.get_logger().info("Node stopped by user.")
+    finally:
+        node.destroy_node()
+        rclpy.shutdown()
+
+
+if __name__ == '__main__':
+    main()
diff --git a/Bachelorarbeit/V2/serial_to_pcl_node.py b/Bachelorarbeit/V2/serial_to_pcl_node.py
new file mode 100644
index 0000000000000000000000000000000000000000..c90f25e3a9e8077b1b94eda746628fca41e8dd16
--- /dev/null
+++ b/Bachelorarbeit/V2/serial_to_pcl_node.py
@@ -0,0 +1,276 @@
+import serial  # For serial communication with the sensor
+import rclpy  # ROS 2 Python client library
+import json  # For parsing JSON data from the sensor
+import numpy as np  # For numerical operations
+from rclpy.node import Node  # Base class for ROS 2 nodes
+from sensor_msgs.msg import PointCloud2, PointField  # ROS 2 message types for point clouds
+from std_msgs.msg import Header  # ROS 2 message type for headers
+
+# Predefined angles for the sensor's field of view
+ANGLES = [29.923, 22.515, 15.829, 8.964, 1.964, 188.964, 195.829, 202.515, 209.923]
+
+# Mapping of sensor keys to their translation and rotation data
+SENSOR_DATA = {
+    "sensor0": [[5], [1]], "sensor1": [[0], [1]], "sensor2": [[1], [1]],
+    "sensor3": [[1], [0]], "sensor4": [[0], [0]], "sensor5": [[5], [0]],
+    "sensor6": [[5], [-1]], "sensor7": [[0], [-1]], "sensor8": [[1], [-1]],
+    "sensor9": [[2], [-1]], "sensor10": [[3], [-1]], "sensor11": [[4], [-1]],
+    "sensor12": [[4], [0]], "sensor13": [[3], [0]], "sensor14": [[2], [0]],
+    "sensor15": [[2], [1]], "sensor16": [[3], [1]], "sensor17": [[4], [1]],
+}
+
+# Translation vector for the sensor in meters
+TRANSLATION_VECTOR = np.array([0.085, 0.06, 0])  # 85 mm in meters
+
+
+class SerialListPublisher(Node):
+    """
+    A ROS 2 node that reads data from a serial port, processes it, and publishes it as a PointCloud2 message.
+    """
+    def __init__(self, serial_port='/dev/ttyACM0', baudrate=1000000):
+        super().__init__('serial_to_pcl_node')  # Initialize the ROS 2 node with a name
+        self.declare_parameter('parent_frame', 'vl53l7cx_link')  # Declare a parameter for the parent frame
+        self.parent_frame = self.get_parameter('parent_frame').get_parameter_value().string_value  # Get the parameter value
+        self.declare_parameter('usb_port', serial_port)
+        serial_port = self.get_parameter('usb_port').get_parameter_value().string_value
+        # Create a publisher for PointCloud2 messages
+        self.pcd_publisher = self.create_publisher(PointCloud2, 'pcl', 10)
+
+        # Open the serial port for communication
+        self.serial_port = serial.Serial(serial_port, baudrate, timeout=None)
+        self.get_logger().info(f"Connected to {serial_port} at {baudrate} baud")  # Log the connection details
+
+        # Start reading data from the serial port and publishing it
+        self.read_serial_and_publish()
+
+    def read_serial_and_publish(self):
+        """
+        Continuously read data from the serial port, process it, and publish it as a PointCloud2 message.
+        """
+        try:
+            while rclpy.ok():  # Keep running while ROS 2 is active
+                line = self.serial_port.readline().decode('utf-8').strip()  # Read a line from the serial port
+                data = self.parse_json(line)  # Parse the JSON data
+                if not data:  # Skip if the data is invalid
+                    continue
+                points_all = []  # Initialize an empty list for points
+
+                # Replace the first value with a high number for invalid points (where the second value in the pair is not 5)
+                filtered_data = {}
+                for key, array in data.items():
+                    filtered_array = []
+                    for pair in array:
+                        if pair[1] not in [5,6,9,12]:
+                            filtered_array.append([99999])
+                        else:
+                            filtered_array.append([pair[0]])
+                    filtered_data[key] = filtered_array
+                # Process the data for each sensor and create a point cloud
+                points_all = []
+                for key, array in filtered_data.items():
+                    distances = [pair[0] for pair in array if len(pair) > 0]
+                    if len(distances) == 64:  # Ensure the array has exactly 64 elements
+                        reshaped_array = np.array(distances).reshape(8, 8)
+                        points_all.append(self.create_pcd_from_distance(reshaped_array, key))
+                points = None
+                points = np.vstack(points_all)  # Combine all points into a single array
+                points = self.distance_filter(points, 3)  # Filter points based on distance threshold
+                pcd = self.create_point_cloud(points, self.parent_frame)  # Create a PointCloud2 message
+                self.pcd_publisher.publish(pcd)  # Publish the point cloud
+
+        except serial.SerialException as e:
+            self.get_logger().error(f"Serial error: {e}")  # Log serial communication errors
+        except KeyboardInterrupt:
+            self.get_logger().info("Shutting down...")  # Log when the node is stopped by the user
+        finally:
+            self.serial_port.close()  # Close the serial port when done
+
+    def parse_json(self, line):
+        """
+        Parse a JSON string from the serial port.
+        """
+        try:
+            data = json.loads(line)  # Parse the JSON string
+            if not isinstance(data, dict):  # Ensure the data is a dictionary
+                self.get_logger().info(f"Unexpected JSON format: {data}")
+                return None
+            return data
+        except json.JSONDecodeError as e:
+            self.get_logger().warn(f"JSON decode error: {e} | Line: {line}")  # Log JSON parsing errors
+            return None
+
+    def create_pcd_from_distance(self, distance, sensor_key):
+        """
+        Create a point cloud from distance data for a specific sensor.
+        """
+        x, y, z = self.calculate_coordinates(distance)  # Calculate the 3D coordinates
+        points_local = np.hstack((x, y, z)) / 1000  # Convert coordinates to meters
+
+        # Get the yaw and pitch angles for the sensor
+        yaw_deg, pitch_deg = self.get_sensor_angles(sensor_key)
+        points_local = self.apply_transformations(points_local, yaw_deg, pitch_deg, sensor_key)  # Apply transformations
+        return points_local
+
+    def calculate_coordinates(self, distance):
+        """
+        Calculate the 3D coordinates from distance data.
+        """
+        x, y, z = [], [], []
+        for j in range(distance.shape[1]):  # Iterate over columns
+            for i in range(distance.shape[0]):  # Iterate over rows
+                dist = distance[i, j]  # Get the distance value
+                z.append(dist * np.sin(np.radians(ANGLES[j])))  # Calculate z-coordinate
+                y.append(dist * np.sin(np.radians(ANGLES[i])))  # Calculate y-coordinate
+                x.append(dist)  # x-coordinate is the distance itself
+        return np.array(x).reshape(-1, 1), np.array(y).reshape(-1, 1), np.array(z).reshape(-1, 1)
+
+    def get_sensor_angles(self, sensor_key):
+        """
+        Get the yaw and pitch angles for a specific sensor.
+        """
+        yaw_deg = SENSOR_DATA[sensor_key][1][0] * 60  # Calculate yaw angle
+        pitch_deg = SENSOR_DATA[sensor_key][0][0] * 60  # Calculate pitch angle
+        return yaw_deg, pitch_deg
+
+    def apply_transformations(self, points, yaw_deg, pitch_deg, sensor_key):
+        """
+        Apply rotation and translation transformations to the points.
+        """
+        yaw_rad, pitch_rad = np.radians([yaw_deg, pitch_deg])  # Convert angles to radians
+
+        # Rotation matrices for x, y, and z axes
+        Rx = self.rotation_matrix_x(-np.pi / 2)
+        Ry = self.rotation_matrix_y(yaw_rad)
+        Rz = self.rotation_matrix_z(pitch_rad)
+
+        # Apply the rotations
+        points = (Rx @ points.T).T
+        points = (Ry @ points.T).T
+        points = (Rz @ points.T).T
+
+        # Calculate the translation vector
+        translation = self.calculate_translation(yaw_rad, pitch_rad, sensor_key)
+        return points + translation  # Apply the translation
+
+    @staticmethod
+    def rotation_matrix_x(angle):
+        """
+        Create a rotation matrix for rotation around the x-axis.
+        """
+        return np.array([
+            [1, 0, 0],
+            [0, np.cos(angle), -np.sin(angle)],
+            [0, np.sin(angle), np.cos(angle)]
+        ])
+
+    @staticmethod
+    def rotation_matrix_y(angle):
+        """
+        Create a rotation matrix for rotation around the y-axis.
+        """
+        return np.array([
+            [np.cos(angle), 0, np.sin(angle)],
+            [0, 1, 0],
+            [-np.sin(angle), 0, np.cos(angle)]
+        ])
+
+    @staticmethod
+    def rotation_matrix_z(angle):
+        """
+        Create a rotation matrix for rotation around the z-axis.
+        """
+        return np.array([
+            [np.cos(angle), -np.sin(angle), 0],
+            [np.sin(angle), np.cos(angle), 0],
+            [0, 0, 1]
+        ])
+
+    def calculate_translation(self, yaw_rad, pitch_rad, sensor_key):
+        """
+        Calculate the translation vector based on yaw and pitch angles for specific sensors.
+        Only sensors with a 0 in the second list from SENSOR_DATA are moved.
+        """
+        if SENSOR_DATA[sensor_key][1][0] == 0:  # Check if the second list contains a 0
+            tx = TRANSLATION_VECTOR[0] * np.cos(pitch_rad) # x-component of translation
+            ty = TRANSLATION_VECTOR[0] * np.sin(pitch_rad)  # y-component of translation
+            tz = 0
+            return np.array([tx, ty, tz])  # Return the translation vector
+        if SENSOR_DATA[sensor_key][1][0] == 1:
+            tx = TRANSLATION_VECTOR[1] * np.cos(pitch_rad)
+            ty = TRANSLATION_VECTOR[1] * np.sin(pitch_rad)
+            tz = -(TRANSLATION_VECTOR[1] * np.cos(yaw_rad))  # z-component of translation
+            return np.array([tx, ty, tz])
+        if SENSOR_DATA[sensor_key][1][0] == -1:
+            tx = TRANSLATION_VECTOR[1] * np.cos(pitch_rad)
+            ty = TRANSLATION_VECTOR[1] * np.sin(pitch_rad)
+            tz = (TRANSLATION_VECTOR[1] * np.cos(yaw_rad))
+            return np.array([tx, ty, tz])
+        else:
+            return np.array([0, 0, 0])  # No translation for other sensors
+
+    def distance_filter(self, points, threshold):
+        """
+        Filter points based on a distance threshold.
+        """
+        mask = np.linalg.norm(points, axis=1) < threshold
+        return points[mask]  # Return only the points within the threshold
+
+    def create_point_cloud(self, points, parent_frame):
+        """
+        Create a PointCloud2 message from the 3D points.
+        """
+        ros_dtype = PointField.FLOAT32  # ROS 2 data type for floating-point numbers
+        dtype = np.float32  # NumPy data type for floating-point numbers
+        itemsize = np.dtype(dtype).itemsize  # Size of each data item in bytes
+        
+        data = points.astype(dtype).tobytes()  # Convert points to binary data
+
+        fields = [PointField(name=n, offset=i * itemsize, datatype=ros_dtype, count=1)
+                  for i, n in enumerate('xyz')]  # Define the fields for x, y, and z
+
+        header = Header(frame_id=parent_frame)  # Create a header with the parent frame
+        return PointCloud2(
+            header=header,
+            height=1,  # Single row of points
+            width=points.shape[0],  # Number of points
+            is_dense=True,  # Allow for invalid points
+            is_bigendian=False,  # Data is in little-endian format
+            fields=fields,  # Fields for x, y, and z
+            point_step=(itemsize * 3),  # Size of each point in bytes
+            row_step=(itemsize * 3 * points.shape[0]),  # Size of each row in bytes
+            data=data  # Binary data for the points
+        )
+    def create_empty_point_cloud(self):
+        """
+        Create an empty PointCloud2 message.
+        """
+        header = Header(frame_id=self.parent_frame)
+        return PointCloud2(
+            header=header,
+            height=1,
+            width=0,
+            is_dense=True,
+            is_bigendian=False,
+            fields=[],
+            point_step=0,
+            row_step=0,
+            data=b''
+        )
+
+def main(args=None):
+    """
+    Main function to initialize and run the ROS 2 node.
+    """
+    rclpy.init(args=args)  # Initialize the ROS 2 Python client library
+    node = SerialListPublisher(serial_port='/dev/ttyACM0', baudrate=115200)  # Create the node
+    try:
+        rclpy.spin(node)  # Keep the node running
+    except KeyboardInterrupt:
+        node.get_logger().info("Node stopped by user.")  # Log when the node is stopped
+    finally:
+        node.destroy_node()  # Destroy the node
+        rclpy.shutdown()  # Shut down the ROS 2 client library
+
+
+if __name__ == '__main__':
+    main()  # Run the main function
diff --git a/Bachelorarbeit/V2/text.tex b/Bachelorarbeit/V2/text.tex
new file mode 100644
index 0000000000000000000000000000000000000000..01e88e759e024ff0b012dcc75421c205b940637c
--- /dev/null
+++ b/Bachelorarbeit/V2/text.tex
@@ -0,0 +1,261 @@
+
+\chapter{Einleitung}
+  % Warum wurde das Thema gewählt?
+Kollaborative Roboter werden mit der Entwicklung in Richtung zu Industrie 5.0 immer relevanter, da sie unteranderem die Kollaboration von Mensch und Maschine ermöglichen\cite{vogel-heuser_von_2023}. Dies ist aber nur gewährleistet, wenn von Robotern keine Gefahr für den Menschen ausgeht.
+\\Vor allem in der Zusammenarbeit mit körperlich beeinträchtigten Menschen erweisen sich \acrfull{Cobots} als hilfreich, da sie die Handhabung von Arbeitsmaterial erleichtern können \cite{noauthor_iidea_nodate}.
+\\Auch Aufgrund von einer alternden Gesselschaft, in der das Durchschnittsalter der arbeitenden Bevölkerung immer weiter zunimmt, können \acrshort{Cobots} für eine große Entlastung sorgen in dem sie repetive Aufgaben, die einen Menschen physisch Belasten würden, übernehmen\cite{haddadin_physical_2016}.
+\\Aus diesen gennnanten Gründen benötigt man Robotersytseme, die die Zusammenarbeit zwischen Mensch und Maschine effizient und sicher gestaltbar machen.
+\\Aktuell werden Kollisionen von \acrshort{Cobots} mit Menschen oder Objekten häufig mithilfe intrinsischer Sensoren detektiert\cite{popov_collision_2017} oder durch eine räumliche Trennung von Mensch und Maschine verhindert. 
+\\Wenn eine räumliche Trennung jedoch nicht möglich ist und trotzdem keine Kollision auftreten darf, um einen effizienten und sicheren Arbeitsablauf zu gewährleisten, kann das in dieser Bachelorarbeit vorgestellte Sensorsystem eine Lösung bieten.
+% Was ist das Ziel des Berichts?
+\\Das Ziel des Sensorsystems ist es, ergänzend zur Kollisionsdetektion, die meisten Kollisionen zu vermeiden. 
+Das Vermeiden von Kollisionen ermöglicht es dem \acrshort{Cobot}, nach einer vermiedenen Kollision eine neue Route zu planen und auszuführen, um seine Aufgabe trotz der beinahe Kollision zu erfüllen.
+% Welche Schlussfolgerungen wurden basierend auf den Ergebnissen gezogen?
+\\Das Sensorsystem, dass in dieser Arbeit vorgestellt wird besschränkt sich auf das Vermeiden von Kollisionen durch das Stoppen der aktuellen Bewegung. 
+% Welche Bedeutung haben sie für das Thema oder für die Praxis?
+\\Durch die Kollisionsvermeidung kann ein \acrshort{Cobot} autonomer arbeiten und so die Effizienz und Sicherheit in der Produktion erhöht werden.
+	
+\section{Zielsetzung}
+	In vorangegangenen Arbeiten wurden bereits unterschiedliche Sensoren, Sensorpositionierungen und Kommunikationsschnittstellen bewertet. Basierend auf diesen Bewertungen wurde ein Rahmen für diese Arbeit entworfen, der im Kapitel ''Stand der Technik'' näher erläutert wird.  
+	\\Das Ziel dieser Arbeit ist es, mithilfe von exterozeptiven Abstandssensoren aus der Ego-Perspektive eine vollständige, digitale, dreidimensionale Abbildung der Umgebung eines seriellen Roboters zu generieren.  
+	Unter "vollständig" wird in diesem Fall verstanden, dass die Abbildung genügend Informationen enthält, um eine Kollision mit einem Objekt im Arbeitsraum zu verhindern.  
+	\\Diese digitale Repräsentation des Arbeitsraums soll den Roboter selbst ausschließen.  
+	Unter Arbeitsraum versteht man im Allgemeinen den Raum, der von einem Roboter erreicht werden kann.  
+	\\Zum aktuellen Zeitpunkt muss nicht zwischen dem Ziel einer Manipulation und einem anderen Objekt unterschieden werden – mit Ausnahme des Roboters selbst.
+	
+\section{Forschungsfragen}
+	Aus den vorgelegten Zielen lassen sich Folgenden Forschungsfragen definieren:
+		\begin{itemize}
+			\item Lässt sich mit exterozeptiven Sensoren eine Kollision mit einem Menschen verhindern?
+			\item Wie erreicht man eine vollständige Arbeitsraumüberwachung eines seriellen Roboters mit exterozeptiven ToF-Sensoren aus der Ego-Perspektive?
+			\item Wie wertet man die Sensordaten aus, wenn die Sensoren nicht ortsfest sind?
+			\item Wie unterscheidet man Objekte im Arbeitsraum von dem seriellen Roboter selbst?
+		\end{itemize}
+	 
+
+\section{Aufbau der Arbeit}
+	Zunächst wird in dieser Arbeit auf alternative Lösungen zum Erreichen von Kollisionsvermeidung eingegangenen. Im Anschluss werden die Anforderungen an das Sensorsystem näher definiert. Im Kapitel ``Umsetzung'' wird daraufhin erläutert wie die Anforderungen erfüllt werden. Im letzten Kapitel wird festgehalten, ob die Anforderungen erfüllt wurden und es wird ein Ausblick gegeben wie das System verbessert werden kann.
+
+\cleardoublepage
+\chapter{Stand der Technik}
+
+	\begin{figure}[h]
+		\centering
+		\includegraphics[scale=0.5]{images/Cobots-Forecast-Global-Market-1024x576.jpg}
+		\caption{Market Outlook for Cobots. Source:\cite{noauthor_can_nodate}}
+		\label{Cobot Growth}
+	\end{figure}
+	
+	Zum anderen geht mit der Industriellen Revolution -Industrie 4.0- ein steigender Bedarf von personalisierten Produkten einher, die das etablieren von Routinen in der Produktion unattraktiv machen. Das Bedeutet, dass \acrshort{Cobots} besser werden müssen in der Wahrnehmung ihrer Umgebung, damit man sie universeller einsetzbar machen kann.\cite{liu_application_2024} 
+	\section{Kollisionsvermeidung und Kollisionserkennung}
+		%Warum ist Kollisionsvermeidung und Kollisionserkennung überhaupt wichtig?
+		Roboterarme müssen große Kräfte aufbringen können, um Lasten zu heben. Ein Roboterarm ohne Sensoren hat keine Möglichkeit zu erkennen, ob er die erwünschte Pose des Endeffektors erreicht hat. Außerdem wird die Kraft, die von einem Roboterarm auf ein Objekt ausgeübt wird nur von der Motorleistung beschränkt, was Gefahren für Mensch und Material birgt.
+		\\ 
+		%Paper zur Kollisionsdetektion nur mit Drehgeber und Drehmoment-Wächter 
+		Mit einem Drehgeber kann ein serieller Roboter den absolut Winkel der Gelenke bestimmen und dadurch mit anderen geometrischen Eigenschaften der Kinematik die Absolut Position der einzelnen Achsen bestimmen. Durch die Verwendung von Drehmoment-Wächtern und Angaben zu Schwerpunkten, Gewicht und Pose der einzelnen Achsen kann ein Soll-Drehmoment für jedes Gelenk in Drehrichtung bestimmt werden und mit dem Ist-Drehmoment verglichen werden. Unter der Voraussetzung das der Endeffektor kein Objekt gegriffen hat kann man davon ausgehen, dass es bei einer Abweichung, die nicht auf Beschleunigungen zurück zu führen ist, zu einer Kollision gekommen ist.\cite{popov_collision_2017} 
+		\\
+		Drehgeber und Drehmoment-Wächter sind propriozeptive Sensoren und können nur Eigenschaften messen die sich auf den inneren Status des Roboterarms beziehen. \cite{noauthor_robotics_2021}
+		\\
+		%Unterschiedliche Paper zum Stand dere Technik in der Kollisionsvermeidung
+		\indent Bei der Kollisionsvermeidung kann man unterschiedliche Ansätze verfolgen. 
+		Eine der Optionen ist mit einer Wärmebild Kamera einen Menschen zu identifizieren und dann mit dem Bild einer \acrfull{RGB-D} zu überlagern um die Position des Menschen im Arbeitsraum zu bestimmen.\cite{al_naser_fusion_2022}
+		\\
+		In einem anderen Paper wurde Kollisionsvermeidung erreicht in dem man den Roboter langsamer werden ließ wenn er einem Menschen zu nahe kam. Die Distanz hat man gemessen in dem man mit einer \acrfull{KI} den Video Input von einer Kinect Kamera ausgewertet hat.\cite{amaya-mejia_vision-based_2022}
+		\\
+		Eine Methode zu Kollisionsvermeidung die meinem Projekt ähnelt, bestimmt die Distanz zwischen Mensch und Maschine in dem es die Sensordaten von \acrfull{LIDAR}-Sensor, \acrfull{RGB} und Drehgeber Sensor fusioniert. Das ist besonders hilfreich bei Anwendungen mit schwer last Robotern, deren Bremsweg zu lang wäre, um sich auf Kollisionserkennung zu verlassen. \cite{rashid_local_2020}  
+	\newpage	
+	\section{Laserscanner (LiDAR)}
+		%Wie funktionieren Laserscanner?
+		Laserscanner machen sich unterschiedliche Physikalische Prinzipien zu nutze, um Abstände zu messen. Im Folgenden werde ich auf drei Vorgehensweisen näher eingehen. 
+		\\
+		Die \acrshort{ToFs} messen die Zeit, die ein Emittierter Lichtimpuls  braucht, um zu einer Oberfläche zu "fliegen", von der Oberfläche reflektiert zu werden und vom Receiver (in Abbildung 2: unten) des Scanners wieder wahrgenommen zu werden.
+		Mit Lichtgeschwindigkeit und der Zeit lässt sich dann der gesamte Weg des Lichtes errechnen und durch halbieren des Weges lässt sich dann die Distanz zum Ziel bestimmen.
+		\begin{figure}[h]
+			\centering
+			\includegraphics[scale=0.15]{images/20200501_Time_of_flight.svg.png}
+			\caption{The way a ToF-Sensor works, Source:\cite{noauthor_file20200501_2020}}
+			\label{ToF Explained}
+		\end{figure}
+		\\
+		Die zweite Methode zum messen von Abständen mit Lasern macht sich die Optische Interferenz zu nutze. Es werden zwei Lichtimpulse mit unterschiedlicher Wellenlänge emittiert und basierend darauf, in wie weit sich die Lichtwellen gegenseitig verstärken oder auslöschen auf dem weg vom Sensor zur Reflexions-Fläche und zurück. Kann die Distanz ermittelt werden.
+		\\
+		Bei der dritten Methode zur Abstandsbestimmung mit Lasern wird der Laserstrahl geneigt und basierend darauf wo der Laser auf einem "großflächigen" Detektor auftrifft kann der Abstand zwischen Sensor und Messobjekt trianguliert werden.
+		\cite{li_common_2019} \cite{jain_survey_nodate}
+		\\ \indent
+		%Welche Technischen Möglichkeiten gibt es um aus 1-Dimensionalen ToF-Sensoren  eine räumliche Wahrnehmung zu generiren?
+		Im Weiteren werde ich mich auf die Fortschritte der Laserscanner, die sich das Time-of-Flight Prinzip zu eigen machen fokussieren. \cite{raj_survey_2020}
+		\\
+		Um aus einem 1-Dimensionalen \acrshort{ToF} eine räumliche Wahrnehmung zu schaffen muss man mehrere Punkte messen und in einem Referenz Koordinatensystem beschreiben. Als Koordinaten Ursprung bietet sich zur Simplifizierung die Position des Sensors selbst an.
+		Wenn man nun die Orientierung des Sensormoduls ändert und die Absolut Position des \acrfull{LRF} bei behält, dann kann man mit genügend Punkten  und mit dem richtigen Werkzeugen zur Visualisierung ein 3-Dimensionales Abbild der Umgebung schaffen.
+		Beim Optomechanischen Scannen wird die Orientierung des Emitters und des Empfängers selbst nicht verändert, sondern mit Hilfen von Spiegeln und Prismen wird der ausgehende Laserstrahl und die eingehende Reflektion abgelenkt, um die Distanz von unterschiedlichen Punkten zum Sensor zu messen. In Abbildung 3 kann man zwei Beispiele für einen Optomechanischen 2D-\acrshort{LIDAR} sehen. Der Grundsätzliche Aufbau von den beiden ist ähnlich. Man verwendet in beiden \acrshort{LIDAR} einen um 45° zu seiner Drehachse geneigten Spiegel, um den Laserstrahl und seine Reflektion abzulenken. In Abbildung 6a hat man den Spiegel unterhalb des Emitters und Empfängers positioniert und man hat im Gegensatz zu dem Aufbau in Abbildung in 6b den Emitter und Empfänger 90° versetz zu einander angeordnet.\cite{raj_survey_2020}  
+		\begin{figure}[h]
+			\centering
+			\includegraphics[scale=0.1]{images/Optomechanical LiDAR.png}
+			\caption{Beispiel für einen Optomechanischen 2D-\acrshort{LIDAR}, Source:\cite{raj_survey_2020}}
+			\label{Opto LiDAR Example}
+		\end{figure}
+		\\
+		Beim Elektromechanischen Scannen verwendet man Servomotoren um mit einem 2-Dimensionalen Optomechanischen Scanner eine 3-Dimensionale Aufnahme der Umgebung zu schaffen. Das haben \cite{surmann_autonomous_2003} erreicht in dem sie 2D-Scanner im Ganzen um eine seiner Achsen haben rotieren lassen.
+		\\
+		Beim Scannen mit \acrfull{MEMS} verwendet man ein Spiegel Array zum Ablenken, des Laserstrahls und seiner Reflektion. Jedoch konnte bei \cite{niclass_design_2012} nur eine geringe \acrfull{FOV} von 15° in der Vertikale und 11° in der Horizontalen erreicht werden. Zudem wird in \cite{raj_survey_2020} erwähnt, dass eine \acrshort{FOV} von 40° in der Diagonalen nicht übertroffen werden konnte und dass \acrshort{MEMS} basierte \acrshort{LIDAR} anfällig gegenüber Vibrationen sind.
+		\\
+		Die letzte Art von \acrshort{ToF}, die ich im Rahmen dieses Kapitels berücksichtigen möchte ist der Solid-State Scanner. Der Vorteil von dieser Art Scanner ist, dass er keine sich bewegende Teile besitzt und große Scann raten von bis zu 600000 Punkten/s erreichen kann.\cite{noauthor_vlp_nodate} 
+		Der Sensor, den ich im Rahmen meines Praxisprojekts verwende, fällt auch in die Kategorie der Solid-State Scanner. Der VL53L7CX ist Flash-Sensor, dass bedeutet das die Leuchtdiode des Sensors einen ungerichteten Lichtimpuls aussendet. Die räumliche Wahrnehmung des Sensors erreicht man durch den speziellen Aufbau des Empfängers. Der Detektor besitzt 64 Kanäle die alle etwas unterschiedlich ausgerichtet sind. Dadurch sehen die einzelnen Kanäle nur ihre eigene festgelegte Richtungen innerhalb einer 40° Horizontalen und 40° Vertikalen \acrshort{FOV}\cite{noauthor_vl53l7cx_nodate}. In Abbildung 4 kann man das Package vom \acrfull{IC} des VL53L7CX sehen.
+		\\
+		\begin{figure}[h]
+			\centering
+			\includegraphics[scale=0.2]{images/VL53L7CX_Package.jpg}
+			\caption{Package eines VL53L7CX, Source:\cite{noauthor_vl53l7cx_nodate}}
+			\label{VL53L7CX Package}
+		\end{figure}
+\section{Rahmen}
+\begin{itemize}
+	\item Es sollen einer oder mehrere \acrshort{ToFs} verwendet werden.
+	\item Die Sensoren sollen am Roboter montiert sein, um eine Ego-Perspektive zu erreichen.
+\end{itemize}   
+\chapter{Anforderungen}
+
+	Zunächst werden hier die Allgemeinen Anforderungen im groben definiert um dann im Anschluss die Anforderungen näher zu definieren.	
+		\begin{itemize}
+			\item Der Detailgrad von den Informationen über den Arbeitsraum des Roboters reicht aus, um einen Menschen wahrzunehemen.
+			\item Die Informationen sind so aktuell, dass man damit eine Kollision
+		\end{itemize}	  
+\chapter{Umsetzung}
+	\section{Vorgehensweise}
+		Um erst mal herauszufinden, ob mein Vorhaben möglich ist habe ich auf Github nach ähnlichen Projekten gesucht. \cite{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate} Da ich in meinem Projekt mehrere Sensoren verwenden will, wurde mir von meiner externen Betreuerin, Sophie Charlotte Keunecke, für die Umsetzung der Raspberry Pi Pico \cite{noauthor_pico-series_nodate} \acrfull{MCU} vorgeschlagen, da dieser \acrshort{MCU} zwei \acrfull{I2C} Interfaces besitzt. Zur Visualisierung der Daten will ich das \acrfull{RVIZ} Programm auf einem \acrfull{NUC} mit  Ubuntu 22.04 mit \acrfull{ROS} Humble benutzen.
+		\\
+		Bei meinem ersten Versuch einen Sensor zu initialisieren und Daten auszulesen habe ich ein Beispiel aus der VL53L5CX Bibliothek von SparkFun verwendet.\cite{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025} Später bin ich dann zu der offiziellen Arduino Bibliothek vom VL53L7CX von STMicroelectronics gewechselt, da ich mich entschieden hatte vom VL53l5CX auf den VL53L7CX zu wechseln.
+		Der VL5L7CX hat eine grosesre Horizontale- und Vertikale-\acrshort{FOV} von $60\circ$ \cite{noauthor_vl53l7cx_nodate}
+		
+	\section{Software}
+		\subsection{Arduino}
+		Die folgende Software steuert mehrere \acrshort{ToFs} vom Typ VL53L7CX mit einem \acrshort{MCU}. 
+		Die Sensoren kommunizieren über \acrshort{I2C}, wobei zwei separate \acrshort{I2C}-Busse DEV\_I2C0 und DEV\_I2C1 verwendet werden. 
+		Die Sensordaten werden in einem \acrfull{JSON}-Format verarbeitet und über die serielle Schnittstelle ausgegeben.
+		\\
+		Das Projekt verwendet die folgenden Bibliotheken:
+		\begin{itemize}
+			\setlength{\itemsep}{0pt}  % Reduces space between items
+			\item Arduino.h: Grundlegende Arduino-Funktionalität
+			\item Wire.h: I2C-Kommunikation
+			\item ArduinoJson.h: Serialisierung der Sensordaten in \acrshort{JSON}-Format
+			\item vl53l7cx\_class.h: Ansteuerung der VL53L7CX-Sensoren
+			\item 74HC154.h: Steuerung des Multiplexers
+		\end{itemize}
+		Im Folgenden möchte ich auf einige Funktionen aus dem C++ eingehen.
+		Die Funktion aus \ref{Snippet LPN} steuert die \acrshort{LPN} Pins der Sensoren, um die \acrshort{I2C}-Kommunikation im Low-Power-Mode an- oder auszuschalten.
+		Sie setzt die entsprechenden Bits in zwei 8-Bit-Bytes und sendet diese über shiftOut() an ein Schieberegister.
+		\begin{lstlisting}[caption={Funktion zum setzen eines LPn Pin},label= Snippet LPN,style=customcpp]
+			void LPn(uint16_t dataPin, uint16_t latchPin, uint16_t clockPin, uint16_t Pin, bool on) {
+				uint8_t Pin_byte1 = 0b00000000;
+				uint8_t Pin_byte2 = 0b00000000;
+				
+				if (Pin <= 7) {
+					Pin_byte1 |= (on << Pin);
+				} else if (Pin <= 15) {
+					Pin_byte2 |= (on << (Pin - 8));
+				}
+				
+				digitalWrite(latchPin, LOW);
+				shiftOut(dataPin, clockPin, MSBFIRST, Pin_byte2);
+				shiftOut(dataPin, clockPin, MSBFIRST, Pin_byte1);
+				digitalWrite(latchPin, HIGH);
+			}
+		\end{lstlisting}
+		Die Funktion in \ref{Snippet Init} aktiviert den Sensor, setzt die \acrshort{I2C}-Adresse, die Auflösung und die Messfrequenz. Nach der Konfiguration wird das Ranging gestartet. Als Parameter benötigt die Funktion ein Objekt der Klasse VL53L7CX, eine neue \acrshort{I2C}-Adresse und den \acrshort{LPN}-Pin des Sensors, der initialisiert werden soll.
+		\begin{lstlisting}[caption={Funktion zum initialisieren der Sensoren im Setup},label=Snippet Init,style=customcpp]
+			void initializeSensorInterface0(VL53L7CX &sensor, uint16_t sensorAddress, int lpnPin) {
+				LPn(dataPin0, latchPin0, clockPin0, lpnPin, true); // Activate sensor power
+				delay(wait_for_i2c);
+				sensor.begin();
+				delay(wait_for_i2c);
+				sensor.init_sensor();
+				delay(wait_for_i2c);
+				
+				// Set I2C address
+				sensor.vl53l7cx_set_i2c_address(sensorAddress << 1);
+				delay(wait_for_i2c);
+				
+				// Set resolution and frequency
+				sensor.vl53l7cx_set_resolution(VL53L7CX_RESOLUTION_8X8);
+				delay(wait_for_i2c);
+				sensor.vl53l7cx_set_ranging_frequency_hz(ranging_frequency);
+				delay(wait_for_i2c);
+				
+				// Start ranging
+				sensor.vl53l7cx_start_ranging();
+				delay(wait_for_i2c);
+				
+				// Reset LPN
+				LPn(dataPin0, latchPin0, clockPin0, lpnPin, false);
+			}
+		\end{lstlisting}
+		Die letzte Funktion auf, die Ich in diesem Kapitel eingehen möchte, ist in \ref{Snippet Prozess}.
+		Diese Funktion ruft die Messwerte des \acrshort{ToF}s ab und speichert sie in einem \acrshort{JSON}-Array.
+		Die \acrfull{LED} wird während der Verarbeitung eingeschaltet und danach wieder ausgeschaltet.
+		Die Sensordaten werden in einer Matrix verarbeitet und im \acrshort{JSON}-Format gespeichert. Am Ende der Loop()-Funktion werden die Daten, wenn jeder Sensor ein Array aus Sensordaten zum \acrshort{JSON}-String hinzugefügt hat, seriell mit einer Baudrate von 115200 an den \acrshort{NUC} weitergegeben. Als Parameter benötigt die Funktion ein Objekt der Klasse VL53L7CX, eine leere Instanz des Sensordaten Formats, das \acrshort{JSON}-Array in dem die Daten gespeichert werden und den Schlüssel mit dem die Daten vom Programm auf dem \acrshort{NUC} aus dem \acrshort{JSON}-Array extrahiert werden können.		
+		\begin{lstlisting}[label=Snippet Prozess, style=customcpp, caption={processSensorData - Verarbeitung der Sensordaten}]
+		void processSensorData(VL53L7CX &sensor, VL53L7CX_ResultsData &results, const JsonArray& data, const char* sensorKey) {
+			uint8_t NewDataReady = 0;
+			uint8_t status;
+			
+			// Wait for data to be ready
+			do {
+				status = sensor.vl53l7cx_check_data_ready(&NewDataReady);
+			} while (!NewDataReady);
+			
+			// Turn LED on to indicate data processing
+			digitalWrite(LedPin, HIGH);
+			
+			// If data is ready, get the ranging data and store it in the JSON array
+			if ((!status) && (NewDataReady != 0)) {
+				status = sensor.vl53l7cx_get_ranging_data(&results);
+				JsonArray sensorData = doc[sensorKey].to<JsonArray>(); 
+				
+				// Process the results and add them to the JSON array
+				for (int  y = 0; y <= imageWidth * (imageWidth - 1); y += imageWidth) {
+					for (int x = imageWidth - 1; x >= 0;  x--) {
+						sensorData.add(results.distance_mm[VL53L7CX_NB_TARGET_PER_ZONE * (x + y)]);
+					}
+				}
+			}
+			// Turn LED off to indicate processing is done
+			digitalWrite(LedPin, LOW);
+		}
+		\end{lstlisting}
+		\subsection{Robot Operating System 2}
+			\acrfull{ROS} ist ein Open Source Framework, dass beim erstellen von Anwendungen für die Robotik helfen soll.
+			Innerhalb des Framework erstellt man Packages. Packages sind Ansammlungen von Nodes, die in unterschiedlichen Programmiersprachen geschrieben sein können.
+			Innerhalb eines Packages verwendet man für Nodes nur eine Programmiersprache. Nodes können in Dauerschleifen ausgeführt werden und während sie laufen durch \acrshort{ROS}-Subscriber neue Daten erhalten. Dieser Daten-Austausch geschieht unter anderem über Topics, die von \acrshort{ROS}-Pulishern den Subscribern zur Verfügung gestellt werden. In \ref{Topic_Viz} wird der Daten Austausch dargestellt.
+			\\
+			\begin{figure}[h]
+				\centering
+				\includegraphics[scale=0.65]{images/Topic_explained.png}
+				\caption{Visualisierung von einem Topic, Source:\cite{noauthor_tutorials_nodate}}
+				\label{Topic_Viz}
+			\end{figure}
+			
+			Im Rahmen meines Projekts habe ich ein Package erstellt, dass die Daten, die seriell über eine \acrfull{USB} Verbindung vom Raspberry Pi Pico übermittelt werden, aufbereitet und als Topic veröffentlicht werden. Das Package besteht aktuell aus 2 Nodes.
+			\\
+			Die erste \acrshort{ROS} Node liest Sensordaten über eine serielle Verbindung aus, verarbeitet sie und veröffentlicht sie als PointCloud2-Nachricht. Die Daten stammen von mehreren VL53L7CX-\acrshort{ToFs} und werden als \acrshort{JSON}-String empfangen. Die Node interpretiert die \acrshort{JSON}-Daten, konvertiert die Distanzwerte in 3D-Koordinaten und führt eine Rotationskorrektur durch, um die Sensordaten in ein einheitliches Koordinatensystem zu überführen. Anschließend werden die Punkte zu einer Punktwolke zusammengeführt und im \acrshort{ROS}-Frame "vl53l7cx\_link" veröffentlicht. Die Hauptfunktionen umfassen das Einlesen und Verarbeiten der seriellen Daten, die Berechnung der Punktkoordinaten aus den Sensordaten sowie die Generierung und Veröffentlichung der Punktwolke (PointCloud2) Nachricht.
+			\\
+			Die zweite \acrshort{ROS}-Node implementiert Code zur Verarbeitung von Punktwolken in Bezug auf ein \acrshort{UR}10e-Roboterarmmodell. Die Node empfängt Punktwolken über das Topic \acrfull{PCD}, transformiert diese in den Bezugssystemen des Roboters und überprüft, ob die Punkte innerhalb der kollisionsrelevanten Mesh-Modelle des Roboters liegen.
+			Dazu werden die statischen Meshes der Roboterglieder geladen und skaliert. Beim Eintreffen einer neuen Punktwolke werden die Transformationsdaten des Roboters aus dem TF-Tree ausgelesen und die Meshes entsprechend transformiert. Anschließend wird für jeden Punkt geprüft, ob er innerhalb eines der Robotermeshes liegt. Die als außerhalb und innerhalb des Roboters klassifizierten Punkte werden getrennt und als neue Punktwolken auf den Topics /valid\_from\_perspective und /invalid\_from\_perspective veröffentlicht. Logging-Informationen geben die Anzahl der Punkte innerhalb und außerhalb des Roboters aus.
+		\subsection{RVIZ2 und Gazebo Classic}
+			RViz2 und Gazebo Classic sind zwei essenzielle Werkzeuge in der Robotik, insbesondere in der ROS 2-Umgebung.
+			\\
+			RViz2 ist ein Visualisierungstool, das Sensordaten, Roboterbewegungen und Umgebungskarten in einer grafischen Oberfläche darstellt. Es ermöglicht das Debuggen und die Interaktion mit Sensordaten sowie die Visualisierung von Transformationsbeziehungen zwischen verschiedenen Robotergelenken und Sensoren.
+			\\
+			Gazebo Classic hingegen ist eine Simulationsumgebung, die realistische physikalische Modelle von Robotern und ihrer Umgebung erstellt. In Gazebo können Kollisionen, Gravitation, Reibung und andere physikalische Effekte simuliert werden, wodurch sich das Verhalten eines Roboters vor dem Einsatz in der realen Welt testen lässt.
+			\\
+			In meinem Projekt habe ich in Gazebo Classic einen \gls{UR}10 simuliert und mit MoveIt2 bewegt. 
+			Die Simulation hat dann Topics, die die Position der Gelenke vom Roboter betreffen veröffentlicht. 
+			In der pcl\_rob\_node.py, die überprüft, ob die Sensorwerte den Roboter oder ein potentielles Kollisionsobjekt detektiert haben, wird die Information über die Position des Sensormoduls und die Position der einzelnen Achsen des Roboters benötigt.
+			Als Vorlage für die Simulation diente \cite{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025}. Jedoch gibt es an dem Modell von Universal Robotics kein Sensor Modul und deswegen habe ich am Modell an den Unterarm des Roboters eine Sensor Halterung angebracht.
+			\\
+			Zur Visualisierung und Validierung der 3D-Koordinaten der Punktwolken benutze ich \acrshort{RVIZ}. 
+	\section{Hardware}
+		\subsection{Elektronisch}
+		\subsection{Mechanisch}
+\chapter{Fazit und Ausblick}
+
diff --git a/Bachelorarbeit/V2/titelangaben.tex b/Bachelorarbeit/V2/titelangaben.tex
new file mode 100644
index 0000000000000000000000000000000000000000..16808766bf0342d052d4eef0402911fded6a69d1
--- /dev/null
+++ b/Bachelorarbeit/V2/titelangaben.tex
@@ -0,0 +1,11 @@
+% Angaben für Titelseite
+\arbeitstyp{Bachelor Arbeit}
+\fachbereich{Maschinenbau und Mechatronik}
+\studiengang{Mechatronik}
+\titel{Entwicklung eines exterozeptiven Time-of-Flight-Sensorsystems zur Kollisionsvermeidung in der Robotik}
+\autor{René Ebeling}
+\matrnr{3279194}
+\betreuer{Prof. Dr. rer. nat. Klaus-Peter Kämper}
+\extbetreuer{Sophie Charlotte Keunecke M.Sc.}
+\datum{\today}
+\dank{}