diff --git a/Bachelorarbeit/Bachelorarbeit/BA.bib b/Bachelorarbeit/Bachelorarbeit/BA.bib index 09fca391b77d07bbb0f8e3fc2a54ac04891f2c8b..3feca493a4f7a2615c1388d566b65f2f57575214 100644 --- a/Bachelorarbeit/Bachelorarbeit/BA.bib +++ b/Bachelorarbeit/Bachelorarbeit/BA.bib @@ -1,9 +1,229 @@ -@misc{noauthor_iidea_nodate, - title = {{IIDEA} - {Inklusion} und {Integration} durch {Cobots} auf dem ersten {Arbeitsmarkt} - {RWTH} {AACHEN} {UNIVERSITY} {IGMR} - {Deutsch}}, - url = {https://www.igmr.rwth-aachen.de/cms/igmr/forschung/projekte/aktuelle-projekte/~baxrrf/iidea/}, - urldate = {2025-04-16}, - file = {Snapshot:/home/carla/Zotero/storage/MHVTD38V/undefined:text/html}, +@misc{noauthor_din_nodate, + title = {{DIN} {EN} {ISO} 10218-2:2021-03, {Robotik}\_- {Sicherheitsanforderungen} für {Robotersysteme} in industrieller {Umgebung}\_- {Teil}\_2: {Robotersysteme}, {Roboteranwendungen} und {Integration} von {Roboterzellen} ({ISO}/{DIS}\_10218-2:2020); {Deutsche} und {Englische} {Fassung} {prEN}\_ISO\_10218-2:2020}, + shorttitle = {{DIN} {EN} {ISO} 10218-2}, + url = {https://www.dinmedia.de/de/-/-/331246964}, + doi = {10.31030/3215258}, + language = {de}, + urldate = {2025-02-19}, + publisher = {DIN Media GmbH}, + file = {PDF:/home/sochi/Zotero/storage/M7E9L4CP/DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf}, +} + +@misc{noauthor_din_nodate-1, + title = {{DIN} {EN} {ISO} 10218-1:2021-09, {Robotik}\_- {Sicherheitsanforderungen}\_- {Teil}\_1: {Industrieroboter} ({ISO}/{DIS}\_10218-1.2:2021); {Deutsche} und {Englische} {Fassung} {prEN}\_ISO\_10218-1:2021}, + shorttitle = {{DIN} {EN} {ISO} 10218-1}, + url = {https://www.dinmedia.de/de/-/-/341406648}, + doi = {10.31030/3272912}, + language = {de}, + urldate = {2025-02-19}, + publisher = {DIN Media GmbH}, + file = {PDF:/home/sochi/Zotero/storage/XCP5RDRY/DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf}, +} + +@article{li_common_2019, + title = {Common {Sensors} in {Industrial} {Robots}: {A} {Review}}, + volume = {1267}, + issn = {1742-6588, 1742-6596}, + shorttitle = {Common {Sensors} in {Industrial} {Robots}}, + url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036}, + doi = {10.1088/1742-6596/1267/1/012036}, + abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.}, + language = {en}, + number = {1}, + urldate = {2025-02-18}, + journal = {Journal of Physics: Conference Series}, + author = {Li, Peng and Liu, Xiangpeng}, + month = jul, + year = {2019}, + pages = {012036}, + file = {PDF:/home/sochi/Zotero/storage/UVXS2R7J/Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf}, +} + +@article{wunderlich_rasante_2013, + title = {Rasante {Entwicklung} in der {3D}‐{Bildgebung}: {Weiterentwickelte} {Time}‐of‐{Flight}‐{Technologie} verbessert miniaturisierte {3D}‐{Kameras} und {Sensoren}}, + volume = {8}, + copyright = {http://onlinelibrary.wiley.com/termsAndConditions\#vor}, + issn = {1863-1460, 2191-1975}, + shorttitle = {Rasante {Entwicklung} in der {3D}‐{Bildgebung}}, + url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018}, + doi = {10.1002/opph.201300018}, + abstract = {Abstract + Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.}, + language = {de}, + number = {3}, + urldate = {2025-02-18}, + journal = {Optik \& Photonik}, + author = {Wunderlich, Max}, + month = sep, + year = {2013}, + pages = {38--40}, + file = {Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung:/home/sochi/Zotero/storage/H7CSUHLW/Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung.pdf:application/pdf}, +} + +@misc{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025, + title = {{UniversalRobots}/{Universal}\_Robots\_ROS2\_Gazebo\_Simulation}, + copyright = {BSD-3-Clause}, + url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation}, + urldate = {2025-02-17}, + publisher = {Universal Robots A/S}, + month = feb, + year = {2025}, + note = {original-date: 2021-12-15T12:18:45Z}, +} + +@article{haddadin_robot_2017, + title = {Robot {Collisions}: {A} {Survey} on {Detection}, {Isolation}, and {Identification}}, + volume = {33}, + issn = {1941-0468}, + shorttitle = {Robot {Collisions}}, + url = {https://ieeexplore.ieee.org/abstract/document/8059840}, + doi = {10.1109/TRO.2017.2723903}, + abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.}, + number = {6}, + urldate = {2025-02-12}, + journal = {IEEE Transactions on Robotics}, + author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin}, + month = dec, + year = {2017}, + note = {Conference Name: IEEE Transactions on Robotics}, + keywords = {Algorithm design and analysis, Collision avoidance, Collision detection, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction (pHRI), Real-time systems, Robot sensing systems, safe robotics, Service robots}, + pages = {1292--1312}, + file = {Accepted Version:/home/sochi/Zotero/storage/IEXJFAMF/Haddadin et al. - 2017 - Robot Collisions A Survey on Detection, Isolation, and Identification.pdf:application/pdf;IEEE Xplore Abstract Record:/home/sochi/Zotero/storage/LDB3Q92K/8059840.html:text/html}, +} + +@book{hertzberg_mobile_2012, + address = {Berlin, Heidelberg}, + series = {{eXamen}.press}, + title = {Mobile {Roboter}: {Eine} {Einführung} aus {Sicht} der {Informatik}}, + copyright = {https://www.springernature.com/gp/researchers/text-and-data-mining}, + isbn = {978-3-642-01725-4 978-3-642-01726-1}, + shorttitle = {Mobile {Roboter}}, + url = {https://link.springer.com/10.1007/978-3-642-01726-1}, + language = {de}, + urldate = {2025-02-12}, + publisher = {Springer Berlin Heidelberg}, + author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas}, + year = {2012}, + doi = {10.1007/978-3-642-01726-1}, + file = {PDF:/home/sochi/Zotero/storage/RLTU9P46/Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf}, +} + +@book{hering_sensoren_2018, + address = {Wiesbaden}, + title = {Sensoren in {Wissenschaft} und {Technik}}, + copyright = {http://www.springer.com/tdm}, + isbn = {978-3-658-12561-5 978-3-658-12562-2}, + url = {http://link.springer.com/10.1007/978-3-658-12562-2}, + language = {de}, + urldate = {2025-02-12}, + publisher = {Springer Fachmedien Wiesbaden}, + editor = {Hering, Ekbert and Schönfelder, Gert}, + year = {2018}, + doi = {10.1007/978-3-658-12562-2}, + file = {PDF:/home/sochi/Zotero/storage/9TI57WXD/Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf}, +} + +@article{saudabayev_sensors_2015, + title = {Sensors for {Robotic} {Hands}: {A} {Survey} of {State} of the {Art}}, + volume = {3}, + issn = {2169-3536}, + shorttitle = {Sensors for {Robotic} {Hands}}, + url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549}, + doi = {10.1109/ACCESS.2015.2482543}, + abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.}, + urldate = {2025-02-12}, + journal = {IEEE Access}, + author = {Saudabayev, Artur and Varol, Huseyin Atakan}, + year = {2015}, + note = {Conference Name: IEEE Access}, + keywords = {Robot sensing systems, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors, Sensors}, + pages = {1765--1782}, + file = {Full Text PDF:/home/sochi/Zotero/storage/HR7ZUF8W/Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:/home/sochi/Zotero/storage/484D4R7H/7283549.html:text/html}, +} + +@article{paya_state---art_2017, + title = {A {State}-of-the-{Art} {Review} on {Mapping} and {Localization} of {Mobile} {Robots} {Using} {Omnidirectional} {Vision} {Sensors}}, + volume = {2017}, + copyright = {Copyright © 2017 L. Payá et al.}, + issn = {1687-7268}, + url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650}, + doi = {10.1155/2017/3497650}, + abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.}, + language = {en}, + number = {1}, + urldate = {2025-02-12}, + journal = {Journal of Sensors}, + author = {Payá, L. and Gil, A. and Reinoso, O.}, + year = {2017}, + note = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1155/2017/3497650}, + pages = {3497650}, + file = {Full Text PDF:/home/sochi/Zotero/storage/EZ473NGD/Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:/home/sochi/Zotero/storage/86LDAQ62/3497650.html:text/html}, +} + +@misc{noauthor_vl53l7cx_nodate, + title = {{VL53L7CX} - {Time}-of-{Flight} ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}}, + url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html}, + abstract = {VL53L7CX - Time-of-Flight (ToF) 8x8 multizone ranging sensor with 90 degrees FoV, VL53L7CXV0GC/1, STMicroelectronics}, + language = {en}, + urldate = {2025-02-12}, + file = {Snapshot:/home/sochi/Zotero/storage/VEYLCCLA/vl53l7cx.html:text/html}, +} + +@misc{noauthor_pico-series_nodate, + title = {Pico-series {Microcontrollers} - {Raspberry} {Pi} {Documentation}}, + url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html}, + abstract = {The official documentation for Raspberry Pi computers and microcontrollers}, + language = {en}, + urldate = {2025-02-12}, + file = {Snapshot:/home/sochi/Zotero/storage/KUCB8PVI/pico-series.html:text/html}, +} + +@misc{noauthor_chatgpt_nodate, + title = {{ChatGPT}}, + url = {https://chatgpt.com}, + abstract = {A conversational AI system that listens, learns, and challenges}, + urldate = {2025-02-12}, + file = {Snapshot:/home/sochi/Zotero/storage/ZT8MG8Y4/678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html}, +} + +@misc{iii_earlephilhowerarduino-pico_2025, + title = {earlephilhower/arduino-pico}, + copyright = {LGPL-2.1}, + url = {https://github.com/earlephilhower/arduino-pico}, + abstract = {Raspberry Pi Pico Arduino core, for all RP2040 and RP2350 boards}, + urldate = {2025-02-12}, + author = {III, Earle F. Philhower}, + month = feb, + year = {2025}, + note = {original-date: 2021-02-25T04:20:27Z}, + keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi}, +} + +@misc{noauthor_tutorials_nodate, + title = {Tutorials — {ROS} 2 {Documentation}: {Humble} documentation}, + url = {https://docs.ros.org/en/humble/Tutorials.html}, + urldate = {2025-02-12}, + file = {Tutorials — ROS 2 Documentation\: Humble documentation:/home/sochi/Zotero/storage/28S5GUZ5/Tutorials.html:text/html}, +} + +@misc{noauthor_examples_nodate, + title = {Examples - trimesh 4.6.2 documentation}, + url = {https://trimesh.org/examples.html}, + urldate = {2025-02-12}, + file = {Examples - trimesh 4.6.2 documentation:/home/sochi/Zotero/storage/82WA6KM7/examples.html:text/html}, +} + +@misc{grans_sebastiangransros2-point-cloud-demo_2024, + title = {{SebastianGrans}/{ROS2}-{Point}-{Cloud}-{Demo}}, + copyright = {MIT}, + url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo}, + abstract = {Demo package for ROS2 that publishes a point cloud and visualizes it using RViz2}, + urldate = {2025-02-12}, + author = {Grans, Sebastian}, + month = dec, + year = {2024}, + note = {original-date: 2020-06-30T16:55:21Z}, } @incollection{mcgrath_sensing_2013, @@ -22,7 +242,7 @@ doi = {10.1007/978-1-4302-6014-1_2}, keywords = {Bulk Acoustic Wave, Electrochemical Sensor, Indium Antimonide, Linear Transfer Function, Smoke Detector}, pages = {15--50}, - file = {Full Text PDF:/home/carla/Zotero/storage/5CKH2AHE/McGrath et al. - 2013 - Sensing and Sensor Fundamentals.pdf:application/pdf}, + file = {Full Text PDF:/home/sochi/Zotero/storage/5CKH2AHE/McGrath et al. - 2013 - Sensing and Sensor Fundamentals.pdf:application/pdf}, } @article{zyl_sensor_2009, @@ -41,44 +261,7 @@ year = {2009}, keywords = {data acquisition, digital earth architecture, earth observation, Sensor Web, systems of systems}, pages = {16--30}, - file = {Full Text PDF:/home/carla/Zotero/storage/2EJXBMW8/Zyl et al. - 2009 - The Sensor Web systems of sensor systems.pdf:application/pdf}, -} - -@article{zyl_sensor_2009-1, - title = {The {Sensor} {Web}: systems of sensor systems}, - volume = {2}, - issn = {1753-8947}, - shorttitle = {The {Sensor} {Web}}, - url = {https://doi.org/10.1080/17538940802439549}, - doi = {10.1080/17538940802439549}, - abstract = {Global Earth Observing System of Systems (GEOSS) presents a great challenge of System of Systems integration across organisational and political boundaries. One existing paradigm that can address the scale of the challenge is that of the Sensor Web. In this paradigm, the internet is evolving into an active, macro sensing instrument, capable of drawing sensory data from around the globe to the fingertips of individuals. The Sensor Web will support scientific research and facilitate transparent political decision making. This article presents some of the technologies explored and activities engaged in by the GEOSS Sensor Web community, towards achieving GEOSS goals.}, - number = {1}, - urldate = {2025-02-26}, - journal = {International Journal of Digital Earth}, - author = {Zyl, T. L. van and Simonis, I. and McFerren, G.}, - month = mar, - year = {2009}, - pages = {16--30}, - file = {Full Text PDF:/home/carla/Zotero/storage/6QZEMCEX/Zyl et al. - 2009 - The Sensor Web systems of sensor systems.pdf:application/pdf}, -} - -@article{zyl_sensor_2009-2, - title = {The {Sensor} {Web}: systems of sensor systems}, - volume = {2}, - issn = {1753-8947}, - shorttitle = {The {Sensor} {Web}}, - url = {https://doi.org/10.1080/17538940802439549}, - doi = {10.1080/17538940802439549}, - abstract = {Global Earth Observing System of Systems (GEOSS) presents a great challenge of System of Systems integration across organisational and political boundaries. One existing paradigm that can address the scale of the challenge is that of the Sensor Web. In this paradigm, the internet is evolving into an active, macro sensing instrument, capable of drawing sensory data from around the globe to the fingertips of individuals. The Sensor Web will support scientific research and facilitate transparent political decision making. This article presents some of the technologies explored and activities engaged in by the GEOSS Sensor Web community, towards achieving GEOSS goals.}, - number = {1}, - urldate = {2025-02-26}, - journal = {International Journal of Digital Earth}, - author = {Zyl, T. L. van and Simonis, I. and McFerren, G.}, - month = mar, - year = {2009}, - keywords = {data acquisition, digital earth architecture, earth observation, Sensor Web, systems of systems}, - pages = {16--30}, - file = {Full Text PDF:/home/carla/Zotero/storage/FUEW7ZSG/Zyl et al. - 2009 - The Sensor Web systems of sensor systems.pdf:application/pdf}, + file = {Full Text PDF:/home/sochi/Zotero/storage/FUEW7ZSG/Zyl et al. - 2009 - The Sensor Web systems of sensor systems.pdf:application/pdf}, } @misc{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025, @@ -92,23 +275,18 @@ note = {original-date: 2021-12-15T12:15:45Z}, } -@misc{noauthor_sparkfun_nodate, - title = {{SparkFun} {VL53L5CX}}, - url = {https://github.com/sparkfun/SparkFun_VL53L5CX_Arduino_Library}, -} - @misc{noauthor_tof_imager_micro_rosteensy_pcl_publisherteensy_pcl_publisherino_nodate, title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher/teensy\_pcl\_publisher.ino at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}}, url = {https://github.com/adityakamath/tof_imager_micro_ros/blob/humble/teensy_pcl_publisher/teensy_pcl_publisher.ino#L177}, urldate = {2025-02-21}, - file = {tof_imager_micro_ros/teensy_pcl_publisher/teensy_pcl_publisher.ino at humble · adityakamath/tof_imager_micro_ros · GitHub:/home/carla/Zotero/storage/PYV8KTSC/teensy_pcl_publisher.html:text/html}, + file = {tof_imager_micro_ros/teensy_pcl_publisher/teensy_pcl_publisher.ino at humble · adityakamath/tof_imager_micro_ros · GitHub:/home/sochi/Zotero/storage/PYV8KTSC/teensy_pcl_publisher.html:text/html}, } @misc{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate, title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}}, url = {https://github.com/adityakamath/tof_imager_micro_ros/tree/humble/teensy_pcl_publisher}, urldate = {2025-02-21}, - file = {tof_imager_micro_ros/teensy_pcl_publisher at humble · adityakamath/tof_imager_micro_ros · GitHub:/home/carla/Zotero/storage/TEVM2A5B/teensy_pcl_publisher.html:text/html}, + file = {tof_imager_micro_ros/teensy_pcl_publisher at humble · adityakamath/tof_imager_micro_ros · GitHub:/home/sochi/Zotero/storage/TEVM2A5B/teensy_pcl_publisher.html:text/html}, } @misc{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025, @@ -127,7 +305,7 @@ abstract = {Mid-range lidar sensor}, language = {en}, urldate = {2025-02-20}, - file = {Snapshot:/home/carla/Zotero/storage/AR82YJRS/vlp-16.html:text/html}, + file = {Snapshot:/home/sochi/Zotero/storage/AR82YJRS/vlp-16.html:text/html}, } @article{niclass_design_2012, @@ -146,9 +324,9 @@ month = may, year = {2012}, note = {Publisher: Optica Publishing Group}, - keywords = {Deformable mirrors, Diode lasers, Image sensors, Light emitting diodes, Optical systems, Systems design}, + keywords = {Image sensors, Deformable mirrors, Diode lasers, Light emitting diodes, Optical systems, Systems design}, pages = {11863--11881}, - file = {Full Text PDF:/home/carla/Zotero/storage/BZWW7BVY/Niclass et al. - 2012 - Design and characterization of a 256x64-pixel single-photon imager in CMOS for a MEMS-based laser sc.pdf:application/pdf}, + file = {Full Text PDF:/home/sochi/Zotero/storage/BZWW7BVY/Niclass et al. - 2012 - Design and characterization of a 256x64-pixel single-photon imager in CMOS for a MEMS-based laser sc.pdf:application/pdf}, } @article{surmann_autonomous_2003, @@ -166,7 +344,7 @@ year = {2003}, keywords = {3D digitalization, 3D gaging, 3D laser range finder, Autonomous mobile robots, Next best view planning, Robot relocalization, Scan matching, SLAM}, pages = {181--198}, - file = {PDF:/home/carla/Zotero/storage/BKNJW2B7/Surmann et al. - 2003 - An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of ind.pdf:application/pdf;ScienceDirect Snapshot:/home/carla/Zotero/storage/H82LXSD3/S0921889003001556.html:text/html}, + file = {PDF:/home/sochi/Zotero/storage/BKNJW2B7/Surmann et al. - 2003 - An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of ind.pdf:application/pdf;ScienceDirect Snapshot:/home/sochi/Zotero/storage/H82LXSD3/S0921889003001556.html:text/html}, } @article{raj_survey_2020, @@ -188,7 +366,7 @@ Publisher: Multidisciplinary Digital Publishing Institute}, keywords = {electro-mechanical scanning, LiDAR, MEMS scanning, opto-mechanical scanning, solid-state LiDAR}, pages = {741}, - file = {Full Text PDF:/home/carla/Zotero/storage/2PBQYF7P/Raj et al. - 2020 - A Survey on LiDAR Scanning Mechanisms.pdf:application/pdf}, + file = {Full Text PDF:/home/sochi/Zotero/storage/2PBQYF7P/Raj et al. - 2020 - A Survey on LiDAR Scanning Mechanisms.pdf:application/pdf}, } @misc{noauthor_file20200501_2020, @@ -199,7 +377,7 @@ Publisher: Multidisciplinary Digital Publishing Institute}, urldate = {2025-02-20}, month = may, year = {2020}, - file = {Snapshot:/home/carla/Zotero/storage/H7EUEBHT/File20200501_Time_of_flight.html:text/html}, + file = {Snapshot:/home/sochi/Zotero/storage/H7EUEBHT/File20200501_Time_of_flight.html:text/html}, } @article{jain_survey_nodate, @@ -209,7 +387,7 @@ Publisher: Multidisciplinary Digital Publishing Institute}, language = {en}, urldate = {2025-02-19}, author = {Jain, Siddharth}, - file = {PDF:/home/carla/Zotero/storage/X2WNAHZB/Jain - A survey of Laser Range Finding.pdf:application/pdf}, + file = {PDF:/home/sochi/Zotero/storage/X2WNAHZB/Jain - A survey of Laser Range Finding.pdf:application/pdf}, } @inproceedings{rashid_local_2020, @@ -222,9 +400,9 @@ Publisher: Multidisciplinary Digital Publishing Institute}, author = {Rashid, Aquib and Peesapati, Kannan and Bdiwi, Mohamad and Krusche, Sebastian and Hardt, Wolfram and Putz, Matthias}, month = sep, year = {2020}, - keywords = {Cameras, Laser radar, Production, Robot vision systems, Safety, Sensor fusion, Service robots}, + keywords = {Service robots, Cameras, Laser radar, Production, Robot vision systems, Safety, Sensor fusion}, pages = {354--359}, - file = {Full Text PDF:/home/carla/Zotero/storage/HAXPN6EL/Rashid et al. - 2020 - Local and Global Sensors for Collision Avoidance.pdf:application/pdf;IEEE Xplore Abstract Record:/home/carla/Zotero/storage/4X42Y6TK/9235223.html:text/html}, + file = {Full Text PDF:/home/sochi/Zotero/storage/HAXPN6EL/Rashid et al. - 2020 - Local and Global Sensors for Collision Avoidance.pdf:application/pdf;IEEE Xplore Abstract Record:/home/sochi/Zotero/storage/4X42Y6TK/9235223.html:text/html}, } @inproceedings{al_naser_fusion_2022, @@ -238,9 +416,9 @@ Publisher: Multidisciplinary Digital Publishing Institute}, month = aug, year = {2022}, note = {ISSN: 1944-9437}, - keywords = {Sensor fusion, Service robots, Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Stability criteria, Thermal sensors}, + keywords = {Service robots, Sensor fusion, Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Stability criteria, Thermal sensors}, pages = {532--537}, - file = {Full Text PDF:/home/carla/Zotero/storage/Q933FYY2/Al Naser et al. - 2022 - Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a r.pdf:application/pdf;IEEE Xplore Abstract Record:/home/carla/Zotero/storage/942BAXF5/9900548.html:text/html}, + file = {Full Text PDF:/home/sochi/Zotero/storage/Q933FYY2/Al Naser et al. - 2022 - Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a r.pdf:application/pdf;IEEE Xplore Abstract Record:/home/sochi/Zotero/storage/942BAXF5/9900548.html:text/html}, } @inproceedings{choi_xr-based_2022, @@ -253,9 +431,9 @@ Publisher: Multidisciplinary Digital Publishing Institute}, author = {Choi, Sung Ho and Park, Kyeong-Beom and Roh, Dong Hyeon and Lee, Jae Yeol and Ghasemi, Yalda and Jeong, Heejin}, month = mar, year = {2022}, - keywords = {Safety, Service robots, Collaboration, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—XR-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality (MR/XR), Real-time systems, Robot sensing systems, safety distance, Three-dimensional displays}, + keywords = {Real-time systems, Robot sensing systems, Service robots, Safety, Collaboration, Three-dimensional displays, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—XR-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality (MR/XR), safety distance}, pages = {481--482}, - file = {Full Text PDF:/home/carla/Zotero/storage/VYUDN5LQ/Choi et al. - 2022 - An XR-based Approach to Safe Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:/home/carla/Zotero/storage/GDI6TZQ2/9757621.html:text/html}, + file = {Full Text PDF:/home/sochi/Zotero/storage/VYUDN5LQ/Choi et al. - 2022 - An XR-based Approach to Safe Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:/home/sochi/Zotero/storage/GDI6TZQ2/9757621.html:text/html}, } @inproceedings{amaya-mejia_vision-based_2022, @@ -269,9 +447,9 @@ Publisher: Multidisciplinary Digital Publishing Institute}, month = oct, year = {2022}, note = {ISSN: 2153-0866}, - keywords = {Safety, Service robots, Collaboration, Three-dimensional displays, Collision avoidance, Robot control, Solid modeling}, + keywords = {Collision avoidance, Service robots, Safety, Collaboration, Robot control, Solid modeling, Three-dimensional displays}, pages = {7331--7336}, - file = {Full Text PDF:/home/carla/Zotero/storage/XX9FL2U5/Amaya-Mejía et al. - 2022 - Vision-Based Safety System for Barrierless Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:/home/carla/Zotero/storage/29LFGV4B/9981689.html:text/html}, + file = {Full Text PDF:/home/sochi/Zotero/storage/XX9FL2U5/Amaya-Mejía et al. - 2022 - Vision-Based Safety System for Barrierless Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:/home/sochi/Zotero/storage/29LFGV4B/9981689.html:text/html}, } @article{li_safe_2024, @@ -289,9 +467,9 @@ Publisher: Multidisciplinary Digital Publishing Institute}, author = {Li, Weidong and Hu, Yudie and Zhou, Yong and Pham, Duc Truong}, month = jun, year = {2024}, - keywords = {Safety, Collaborative robots, Collision detection, Human–robot collaboration (HRC), Obstacle avoidance}, + keywords = {Collision detection, Safety, Collaborative robots, Human–robot collaboration (HRC), Obstacle avoidance}, pages = {2235--2261}, - file = {Full Text PDF:/home/carla/Zotero/storage/4JS4CSVA/Li et al. - 2024 - Safe human–robot collaboration for industrial settings a survey.pdf:application/pdf}, + file = {Full Text PDF:/home/sochi/Zotero/storage/4JS4CSVA/Li et al. - 2024 - Safe human–robot collaboration for industrial settings a survey.pdf:application/pdf}, } @misc{noauthor_can_nodate, @@ -301,7 +479,7 @@ Publisher: Multidisciplinary Digital Publishing Institute}, language = {en-GB}, urldate = {2025-02-19}, journal = {Interact Analysis}, - file = {Snapshot:/home/carla/Zotero/storage/25UG57J5/can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era.html:text/html}, + file = {Snapshot:/home/sochi/Zotero/storage/25UG57J5/can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era.html:text/html}, } @article{nath_review_2022, @@ -322,7 +500,7 @@ Publisher: Multidisciplinary Digital Publishing Institute}, note = {Number: 1}, keywords = {Sensors}, pages = {85--89}, - file = {Full Text PDF:/home/carla/Zotero/storage/AA6ZJJBN/Nath - 2022 - A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector.pdf:application/pdf}, + file = {Full Text PDF:/home/sochi/Zotero/storage/AA6ZJJBN/Nath - 2022 - A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector.pdf:application/pdf}, } @article{maheepala_low_2021, @@ -340,9 +518,9 @@ Publisher: Multidisciplinary Digital Publishing Institute}, month = jan, year = {2021}, note = {Conference Name: IEEE Sensors Journal}, - keywords = {Image sensors, Batteries, Cloud computing, image sensor, Internet of Things, low power, machine vision, Machine vision, processor, Program processors, Transceivers}, + keywords = {Batteries, Cloud computing, image sensor, Image sensors, Internet of Things, low power, machine vision, Machine vision, processor, Program processors, Transceivers}, pages = {1172--1186}, - file = {Full Text PDF:/home/carla/Zotero/storage/XSY3V6PK/Maheepala et al. - 2021 - Low Power Processors and Image Sensors for Vision-Based IoT Devices A Review.pdf:application/pdf;IEEE Xplore Abstract Record:/home/carla/Zotero/storage/Y7EV2L8T/9165781.html:text/html}, + file = {Full Text PDF:/home/sochi/Zotero/storage/XSY3V6PK/Maheepala et al. - 2021 - Low Power Processors and Image Sensors for Vision-Based IoT Devices A Review.pdf:application/pdf;IEEE Xplore Abstract Record:/home/sochi/Zotero/storage/Y7EV2L8T/9165781.html:text/html}, } @article{liu_application_2024, @@ -362,7 +540,7 @@ Publisher: Multidisciplinary Digital Publishing Institute}, note = {Publisher: Taylor \& Francis \_eprint: https://doi.org/10.1080/10447318.2022.2041907}, pages = {915--932}, - file = {Full Text PDF:/home/carla/Zotero/storage/G9ECNMWG/Liu et al. - 2024 - Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing.pdf:application/pdf}, + file = {Full Text PDF:/home/sochi/Zotero/storage/G9ECNMWG/Liu et al. - 2024 - Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing.pdf:application/pdf}, } @inproceedings{popov_collision_2017, @@ -380,7 +558,7 @@ Publisher: Multidisciplinary Digital Publishing Institute}, month = aug, year = {2017}, pages = {838--843}, - file = {PDF:/home/carla/Zotero/storage/LVC2B7U6/Popov et al. - 2017 - Collision detection, localization & classification for industrial robots with joint torque sensors.pdf:application/pdf}, + file = {PDF:/home/sochi/Zotero/storage/LVC2B7U6/Popov et al. - 2017 - Collision detection, localization & classification for industrial robots with joint torque sensors.pdf:application/pdf}, } @misc{noauthor_robotics_2021, @@ -392,10 +570,10 @@ Publisher: Multidisciplinary Digital Publishing Institute}, publisher = {DIN Media GmbH}, month = nov, year = {2021}, - file = {PDF:/home/carla/Zotero/storage/6SUCZU6R/DIN_EN_ISO_8373.pdf:application/pdf}, + file = {PDF:/home/sochi/Zotero/storage/6SUCZU6R/DIN_EN_ISO_8373.pdf:application/pdf}, } -@misc{noauthor_din_nodate, +@misc{noauthor_din_nodate-2, title = {{DIN} {EN} {ISO} 10218-1:2021-09, {Robotik}\_- {Sicherheitsanforderungen}\_- {Teil}\_1: {Industrieroboter} ({ISO}/{DIS}\_10218-1.2:2021); {Deutsche} und {Englische} {Fassung} {prEN}\_ISO\_10218-1:2021}, shorttitle = {{DIN} {EN} {ISO} 10218-1}, url = {https://www.dinmedia.de/de/-/-/341406648}, @@ -403,20 +581,20 @@ Publisher: Multidisciplinary Digital Publishing Institute}, language = {de}, urldate = {2025-02-19}, publisher = {DIN Media GmbH}, - file = {PDF:/home/carla/Zotero/storage/FFMUVR22/DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf}, + file = {PDF:/home/sochi/Zotero/storage/FFMUVR22/DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf}, } -@misc{noauthor_din_nodate-1, +@misc{noauthor_din_nodate-3, title = {{DIN} {EN} {ISO} 10218-2:2021-03, {Robotik}\_- {Sicherheitsanforderungen} für {Robotersysteme} in industrieller {Umgebung}\_- {Teil}\_2: {Robotersysteme}, {Roboteranwendungen} und {Integration} von {Roboterzellen} ({ISO}/{DIS}\_10218-2:2020); {Deutsche} und {Englische} {Fassung} {prEN}\_ISO\_10218-2:2020}, shorttitle = {{DIN} {EN} {ISO} 10218-2}, url = {https://www.dinmedia.de/de/-/-/331246964}, urldate = {2025-02-19}, publisher = {DIN Media GmbH}, doi = {10.31030/3215258}, - file = {PDF:/home/carla/Zotero/storage/HB28M28Z/DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf}, + file = {PDF:/home/sochi/Zotero/storage/HB28M28Z/DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf}, } -@article{li_common_2019, +@article{li_common_2019-1, title = {Common {Sensors} in {Industrial} {Robots}: {A} {Review}}, volume = {1267}, issn = {1742-6588, 1742-6596}, @@ -431,906 +609,24 @@ Publisher: Multidisciplinary Digital Publishing Institute}, month = jul, year = {2019}, pages = {012036}, - file = {PDF:/home/carla/Zotero/storage/WQ5C229K/Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf}, + file = {PDF:/home/sochi/Zotero/storage/WQ5C229K/Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf}, } -@misc{noauthor_examples_nodate, - title = {Examples - trimesh 4.6.2 documentation}, - url = {https://trimesh.org/examples.html}, - urldate = {2025-02-12}, - file = {Examples - trimesh 4.6.2 documentation:/home/carla/Zotero/storage/SURMD6VT/examples.html:text/html}, -} - -@misc{noauthor_tutorials_nodate, - title = {Tutorials — {ROS} 2 {Documentation}: {Humble} documentation}, - url = {https://docs.ros.org/en/humble/Tutorials.html}, - urldate = {2025-02-12}, - file = {Tutorials — ROS 2 Documentation\: Humble documentation:/home/carla/Zotero/storage/HQ4G28QE/Tutorials.html:text/html}, -} - -@article{wunderlich_rasante_2013, - title = {Rasante {Entwicklung} in der {3D}‐{Bildgebung}: {Weiterentwickelte} {Time}‐of‐{Flight}‐{Technologie} verbessert miniaturisierte {3D}‐{Kameras} und {Sensoren}}, - volume = {8}, - issn = {1863-1460, 2191-1975}, - shorttitle = {Rasante {Entwicklung} in der {3D}‐{Bildgebung}}, - url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018}, - doi = {10.1002/opph.201300018}, - abstract = {Abstract Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.}, - number = {3}, - urldate = {2025-02-18}, - journal = {Optik \& Photonik}, - author = {Wunderlich, Max}, - month = sep, - year = {2013}, - pages = {38--40}, - file = {Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung:/home/carla/Zotero/storage/JZJDENIL/Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung.pdf:application/pdf}, -} - -@misc{grans_sebastiangransros2-point-cloud-demo_2024, - title = {{SebastianGrans}/{ROS2}-{Point}-{Cloud}-{Demo}}, - url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo}, - abstract = {Demo package for ROS2 that publishes a point cloud and visualizes it using RViz2}, - urldate = {2025-02-12}, - author = {Grans, Sebastian}, - month = dec, - year = {2024}, -} - -@misc{iii_earlephilhowerarduino-pico_2025, - title = {earlephilhower/arduino-pico}, - url = {https://github.com/earlephilhower/arduino-pico}, - abstract = {Raspberry Pi Pico Arduino core, for all RP2040 and RP2350 boards}, - urldate = {2025-02-12}, - author = {III, Earle F. Philhower}, - month = feb, - year = {2025}, - keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi}, -} - -@misc{noauthor_chatgpt_nodate, - title = {{ChatGPT}}, - url = {https://chatgpt.com}, - abstract = {A conversational AI system that listens, learns, and challenges}, - urldate = {2025-02-12}, - file = {Snapshot:/home/carla/Zotero/storage/R25VDLY2/678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html}, -} - -@misc{noauthor_pico-series_nodate, - title = {Pico-series {Microcontrollers} - {Raspberry} {Pi} {Documentation}}, - url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html}, - abstract = {The official documentation for Raspberry Pi computers and microcontrollers}, - urldate = {2025-02-12}, - file = {Snapshot:/home/carla/Zotero/storage/VKDULUUX/pico-series.html:text/html}, -} - -@misc{noauthor_vl53l7cx_nodate, - title = {{VL53L7CX} - {Time}-of-{Flight} ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}}, - url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html}, - abstract = {VL53L7CX - Time-of-Flight (ToF) 8x8 multizone ranging sensor with 90 degrees FoV, VL53L7CXV0GC/1, STMicroelectronics}, - urldate = {2025-02-12}, - file = {Snapshot:/home/carla/Zotero/storage/BKZM82KQ/vl53l7cx.html:text/html}, -} - -@article{paya_state---art_2017, - title = {A {State}-of-the-{Art} {Review} on {Mapping} and {Localization} of {Mobile} {Robots} {Using} {Omnidirectional} {Vision} {Sensors}}, - volume = {2017}, - issn = {1687-7268}, - url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650}, - doi = {10.1155/2017/3497650}, - abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.}, - number = {1}, - urldate = {2025-02-12}, - journal = {Journal of Sensors}, - author = {Payá, L. and Gil, A. and Reinoso, O.}, - year = {2017}, - pages = {3497650}, - file = {Full Text PDF:/home/carla/Zotero/storage/G2QJUK53/Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:/home/carla/Zotero/storage/PSAFIKFD/3497650.html:text/html}, -} - -@article{saudabayev_sensors_2015, - title = {Sensors for {Robotic} {Hands}: {A} {Survey} of {State} of the {Art}}, - volume = {3}, - issn = {2169-3536}, - shorttitle = {Sensors for {Robotic} {Hands}}, - url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549}, - doi = {10.1109/ACCESS.2015.2482543}, - abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.}, - urldate = {2025-02-12}, - journal = {IEEE Access}, - author = {Saudabayev, Artur and Varol, Huseyin Atakan}, - year = {2015}, - keywords = {Robot sensing systems, Sensors, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors}, - pages = {1765--1782}, - file = {Full Text PDF:/home/carla/Zotero/storage/CDE3NZ3S/Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:/home/carla/Zotero/storage/BEAGLR7C/7283549.html:text/html}, -} - -@book{hering_sensoren_2018, - address = {Wiesbaden}, - title = {Sensoren in {Wissenschaft} und {Technik}}, - isbn = {978-3-658-12561-5 978-3-658-12562-2}, - url = {http://link.springer.com/10.1007/978-3-658-12562-2}, - urldate = {2025-02-12}, - publisher = {Springer Fachmedien Wiesbaden}, - editor = {Hering, Ekbert and Schönfelder, Gert}, - year = {2018}, - doi = {10.1007/978-3-658-12562-2}, - file = {PDF:/home/carla/Zotero/storage/BG7FCKRW/Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf}, -} - -@book{hertzberg_mobile_2012, - address = {Berlin, Heidelberg}, - series = {{eXamen}.press}, - title = {Mobile {Roboter}: {Eine} {Einführung} aus {Sicht} der {Informatik}}, - isbn = {978-3-642-01725-4 978-3-642-01726-1}, - shorttitle = {Mobile {Roboter}}, - url = {https://link.springer.com/10.1007/978-3-642-01726-1}, - urldate = {2025-02-12}, - publisher = {Springer Berlin Heidelberg}, - author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas}, - year = {2012}, - doi = {10.1007/978-3-642-01726-1}, - file = {PDF:/home/carla/Zotero/storage/4LFEHVEK/Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf}, -} - -@article{haddadin_robot_2017, - title = {Robot {Collisions}: {A} {Survey} on {Detection}, {Isolation}, and {Identification}}, - volume = {33}, - issn = {1941-0468}, - shorttitle = {Robot {Collisions}}, - url = {https://ieeexplore.ieee.org/abstract/document/8059840}, - doi = {10.1109/TRO.2017.2723903}, - abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.}, - number = {6}, - urldate = {2025-02-12}, - journal = {IEEE Transactions on Robotics}, - author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin}, - month = dec, - year = {2017}, - keywords = {Service robots, Real-time systems, Robot sensing systems, Collision avoidance, Collision detection, Algorithm design and analysis, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction (pHRI), safe robotics}, - pages = {1292--1312}, - file = {Accepted Version:/home/carla/Zotero/storage/BGZ6TUWR/Haddadin et al. - 2017 - Robot Collisions A Survey on Detection, Isolation, and Identification.pdf:application/pdf;IEEE Xplore Abstract Record:/home/carla/Zotero/storage/UJAHJUBE/8059840.html:text/html}, -} - -@misc{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025, - title = {{UniversalRobots}/{Universal}\_Robots\_ROS2\_Gazebo\_Simulation}, - url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation}, - urldate = {2025-02-17}, - publisher = {Universal Robots A/S}, - month = feb, - year = {2025}, -} - -@misc{noauthor_din_nodate-2, - title = {{DIN} {EN} {ISO} 10218-2:2021-03, {Robotik}\_- {Sicherheitsanforderungen} für {Robotersysteme} in industrieller {Umgebung}\_- {Teil}\_2: {Robotersysteme}, {Roboteranwendungen} und {Integration} von {Roboterzellen} ({ISO}/{DIS}\_10218-2:2020); {Deutsche} und {Englische} {Fassung} {prEN}\_ISO\_10218-2:2020}, - shorttitle = {{DIN} {EN} {ISO} 10218-2}, - url = {https://www.dinmedia.de/de/-/-/331246964}, - doi = {10.31030/3215258}, - language = {de}, - urldate = {2025-02-19}, - publisher = {DIN Media GmbH}, - file = {PDF:/home/carla/Zotero/storage/M7E9L4CP/DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf}, -} - -@misc{noauthor_din_nodate-3, - title = {{DIN} {EN} {ISO} 10218-1:2021-09, {Robotik}\_- {Sicherheitsanforderungen}\_- {Teil}\_1: {Industrieroboter} ({ISO}/{DIS}\_10218-1.2:2021); {Deutsche} und {Englische} {Fassung} {prEN}\_ISO\_10218-1:2021}, - shorttitle = {{DIN} {EN} {ISO} 10218-1}, - url = {https://www.dinmedia.de/de/-/-/341406648}, - doi = {10.31030/3272912}, - language = {de}, - urldate = {2025-02-19}, - publisher = {DIN Media GmbH}, - file = {PDF:/home/carla/Zotero/storage/XCP5RDRY/DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf}, -} - -@article{li_common_2019-1, - title = {Common {Sensors} in {Industrial} {Robots}: {A} {Review}}, - volume = {1267}, - issn = {1742-6588, 1742-6596}, - shorttitle = {Common {Sensors} in {Industrial} {Robots}}, - url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036}, - doi = {10.1088/1742-6596/1267/1/012036}, - abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.}, +@incollection{haddadin_physical_2016, + address = {Cham}, + title = {Physical {Human}{Robot} {Interaction}}, + isbn = {978-3-319-32552-1}, + url = {https://doi.org/10.1007/978-3-319-32552-1_69}, + abstract = {Over the last two decades, the foundations for physical human–robot interaction (pHRI) have evolved from successful developments in mechatronics, control, and planning, leading toward safer lightweight robot designs and interaction control schemes that advance beyond the current capacities of existing high-payload and high-precision position-controlled industrial robots. Based on their ability to sense physical interaction, render compliant behavior along the robot structure, plan motions that respect human preferences, and generate interaction plans for collaboration and coaction with humans, these novel robots have opened up novel and unforeseen application domains, and have advanced the field of human safety in robotics.}, language = {en}, - number = {1}, - urldate = {2025-02-18}, - journal = {Journal of Physics: Conference Series}, - author = {Li, Peng and Liu, Xiangpeng}, - month = jul, - year = {2019}, - pages = {012036}, - file = {PDF:/home/carla/Zotero/storage/UVXS2R7J/Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf}, -} - -@article{wunderlich_rasante_2013-1, - title = {Rasante {Entwicklung} in der {3D}‐{Bildgebung}: {Weiterentwickelte} {Time}‐of‐{Flight}‐{Technologie} verbessert miniaturisierte {3D}‐{Kameras} und {Sensoren}}, - volume = {8}, - copyright = {http://onlinelibrary.wiley.com/termsAndConditions\#vor}, - issn = {1863-1460, 2191-1975}, - shorttitle = {Rasante {Entwicklung} in der {3D}‐{Bildgebung}}, - url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018}, - doi = {10.1002/opph.201300018}, - abstract = {Abstract - Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.}, - language = {de}, - number = {3}, - urldate = {2025-02-18}, - journal = {Optik \& Photonik}, - author = {Wunderlich, Max}, - month = sep, - year = {2013}, - pages = {38--40}, - file = {Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung:/home/carla/Zotero/storage/H7CSUHLW/Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung.pdf:application/pdf}, -} - -@misc{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025-1, - title = {{UniversalRobots}/{Universal}\_Robots\_ROS2\_Gazebo\_Simulation}, - copyright = {BSD-3-Clause}, - url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation}, - urldate = {2025-02-17}, - publisher = {Universal Robots A/S}, - month = feb, - year = {2025}, - note = {original-date: 2021-12-15T12:18:45Z}, -} - -@article{haddadin_robot_2017-1, - title = {Robot {Collisions}: {A} {Survey} on {Detection}, {Isolation}, and {Identification}}, - volume = {33}, - issn = {1941-0468}, - shorttitle = {Robot {Collisions}}, - url = {https://ieeexplore.ieee.org/abstract/document/8059840}, - doi = {10.1109/TRO.2017.2723903}, - abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.}, - number = {6}, - urldate = {2025-02-12}, - journal = {IEEE Transactions on Robotics}, - author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin}, - month = dec, - year = {2017}, - note = {Conference Name: IEEE Transactions on Robotics}, - keywords = {Service robots, Real-time systems, Robot sensing systems, Collision avoidance, Collision detection, Algorithm design and analysis, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction (pHRI), safe robotics}, - pages = {1292--1312}, - file = {Accepted Version:/home/carla/Zotero/storage/IEXJFAMF/Haddadin et al. - 2017 - Robot Collisions A Survey on Detection, Isolation, and Identification.pdf:application/pdf;IEEE Xplore Abstract Record:/home/carla/Zotero/storage/LDB3Q92K/8059840.html:text/html}, -} - -@book{hertzberg_mobile_2012-1, - address = {Berlin, Heidelberg}, - series = {{eXamen}.press}, - title = {Mobile {Roboter}: {Eine} {Einführung} aus {Sicht} der {Informatik}}, - copyright = {https://www.springernature.com/gp/researchers/text-and-data-mining}, - isbn = {978-3-642-01725-4 978-3-642-01726-1}, - shorttitle = {Mobile {Roboter}}, - url = {https://link.springer.com/10.1007/978-3-642-01726-1}, - language = {de}, - urldate = {2025-02-12}, - publisher = {Springer Berlin Heidelberg}, - author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas}, - year = {2012}, - doi = {10.1007/978-3-642-01726-1}, - file = {PDF:/home/carla/Zotero/storage/RLTU9P46/Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf}, -} - -@book{hering_sensoren_2018-1, - address = {Wiesbaden}, - title = {Sensoren in {Wissenschaft} und {Technik}}, - copyright = {http://www.springer.com/tdm}, - isbn = {978-3-658-12561-5 978-3-658-12562-2}, - url = {http://link.springer.com/10.1007/978-3-658-12562-2}, - language = {de}, - urldate = {2025-02-12}, - publisher = {Springer Fachmedien Wiesbaden}, - editor = {Hering, Ekbert and Schönfelder, Gert}, - year = {2018}, - doi = {10.1007/978-3-658-12562-2}, - file = {PDF:/home/carla/Zotero/storage/9TI57WXD/Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf}, -} - -@article{saudabayev_sensors_2015-1, - title = {Sensors for {Robotic} {Hands}: {A} {Survey} of {State} of the {Art}}, - volume = {3}, - issn = {2169-3536}, - shorttitle = {Sensors for {Robotic} {Hands}}, - url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549}, - doi = {10.1109/ACCESS.2015.2482543}, - abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.}, - urldate = {2025-02-12}, - journal = {IEEE Access}, - author = {Saudabayev, Artur and Varol, Huseyin Atakan}, - year = {2015}, - note = {Conference Name: IEEE Access}, - keywords = {Robot sensing systems, Sensors, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors}, - pages = {1765--1782}, - file = {Full Text PDF:/home/carla/Zotero/storage/HR7ZUF8W/Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:/home/carla/Zotero/storage/484D4R7H/7283549.html:text/html}, -} - -@article{paya_state---art_2017-1, - title = {A {State}-of-the-{Art} {Review} on {Mapping} and {Localization} of {Mobile} {Robots} {Using} {Omnidirectional} {Vision} {Sensors}}, - volume = {2017}, - copyright = {Copyright © 2017 L. Payá et al.}, - issn = {1687-7268}, - url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650}, - doi = {10.1155/2017/3497650}, - abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.}, - language = {en}, - number = {1}, - urldate = {2025-02-12}, - journal = {Journal of Sensors}, - author = {Payá, L. and Gil, A. and Reinoso, O.}, - year = {2017}, - note = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1155/2017/3497650}, - pages = {3497650}, - file = {Full Text PDF:/home/carla/Zotero/storage/EZ473NGD/Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:/home/carla/Zotero/storage/86LDAQ62/3497650.html:text/html}, -} - -@misc{noauthor_vl53l7cx_nodate-1, - title = {{VL53L7CX} - {Time}-of-{Flight} ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}}, - url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html}, - abstract = {VL53L7CX - Time-of-Flight (ToF) 8x8 multizone ranging sensor with 90 degrees FoV, VL53L7CXV0GC/1, STMicroelectronics}, - language = {en}, - urldate = {2025-02-12}, - file = {Snapshot:/home/carla/Zotero/storage/VEYLCCLA/vl53l7cx.html:text/html}, -} - -@misc{noauthor_pico-series_nodate-1, - title = {Pico-series {Microcontrollers} - {Raspberry} {Pi} {Documentation}}, - url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html}, - abstract = {The official documentation for Raspberry Pi computers and microcontrollers}, - language = {en}, - urldate = {2025-02-12}, - file = {Snapshot:/home/carla/Zotero/storage/KUCB8PVI/pico-series.html:text/html}, -} - -@misc{noauthor_chatgpt_nodate-1, - title = {{ChatGPT}}, - url = {https://chatgpt.com}, - abstract = {A conversational AI system that listens, learns, and challenges}, - urldate = {2025-02-12}, - file = {Snapshot:/home/carla/Zotero/storage/ZT8MG8Y4/678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html}, -} - -@misc{iii_earlephilhowerarduino-pico_2025-1, - title = {earlephilhower/arduino-pico}, - copyright = {LGPL-2.1}, - url = {https://github.com/earlephilhower/arduino-pico}, - abstract = {Raspberry Pi Pico Arduino core, for all RP2040 and RP2350 boards}, - urldate = {2025-02-12}, - author = {III, Earle F. Philhower}, - month = feb, - year = {2025}, - note = {original-date: 2021-02-25T04:20:27Z}, - keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi}, -} - -@misc{noauthor_tutorials_nodate-1, - title = {Tutorials — {ROS} 2 {Documentation}: {Humble} documentation}, - url = {https://docs.ros.org/en/humble/Tutorials.html}, - urldate = {2025-02-12}, - file = {Tutorials — ROS 2 Documentation\: Humble documentation:/home/carla/Zotero/storage/28S5GUZ5/Tutorials.html:text/html}, -} - -@misc{noauthor_examples_nodate-1, - title = {Examples - trimesh 4.6.2 documentation}, - url = {https://trimesh.org/examples.html}, - urldate = {2025-02-12}, - file = {Examples - trimesh 4.6.2 documentation:/home/carla/Zotero/storage/82WA6KM7/examples.html:text/html}, -} - -@misc{grans_sebastiangransros2-point-cloud-demo_2024-1, - title = {{SebastianGrans}/{ROS2}-{Point}-{Cloud}-{Demo}}, - copyright = {MIT}, - url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo}, - abstract = {Demo package for ROS2 that publishes a point cloud and visualizes it using RViz2}, - urldate = {2025-02-12}, - author = {Grans, Sebastian}, - month = dec, - year = {2024}, - note = {original-date: 2020-06-30T16:55:21Z}, -} - -@misc{noauthor_tof_imager_micro_rosteensy_pcl_publisherteensy_pcl_publisherino_nodate-1, - title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher/teensy\_pcl\_publisher.ino at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}}, - url = {https://github.com/adityakamath/tof_imager_micro_ros/blob/humble/teensy_pcl_publisher/teensy_pcl_publisher.ino#L177}, - urldate = {2025-02-21}, -} - -@misc{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate-1, - title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}}, - url = {https://github.com/adityakamath/tof_imager_micro_ros/tree/humble/teensy_pcl_publisher}, - urldate = {2025-02-21}, -} - -@misc{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025-1, - title = {sparkfun/{SparkFun}\_VL53L5CX\_Arduino\_Library}, - url = {https://github.com/sparkfun/SparkFun_VL53L5CX_Arduino_Library}, - urldate = {2025-02-21}, - publisher = {SparkFun Electronics}, - month = jan, - year = {2025}, -} - -@misc{noauthor_vlp_nodate-1, - title = {{VLP} 16 {\textbackslash}textbar {Ouster}}, - url = {https://ouster.com/products/hardware/vlp-16}, - abstract = {Mid-range lidar sensor}, - urldate = {2025-02-20}, -} - -@article{niclass_design_2012-1, - title = {Design and characterization of a 256x64-pixel single-photon imager in {CMOS} for a {MEMS}-based laser scanning time-of-flight sensor}, - volume = {20}, - issn = {1094-4087}, - url = {https://opg.optica.org/oe/abstract.cfm?uri=oe-20-11-11863}, - doi = {10.1364/OE.20.011863}, - abstract = {We introduce an optical time-of-flight image sensor taking advantage of a MEMS-based laser scanning device. Unlike previous approaches, our concept benefits from the high timing resolution and the digital signal flexibility of single-photon pixels in CMOS to allow for a nearly ideal cooperation between the image sensor and the scanning device. This technique enables a high signal-to-background light ratio to be obtained, while simultaneously relaxing the constraint on size of the MEMS mirror. These conditions are critical for devising practical and low-cost depth sensors intended to operate in uncontrolled environments, such as outdoors. A proof-of-concept prototype capable of operating in real-time was implemented. This paper focuses on the design and characterization of a 256x64-pixel image sensor, which also comprises an event-driven readout circuit, an array of 64 row-level high-throughput time-to-digital converters, and a 16Gbit/s global readout circuit. Quantitative evaluation of the sensor under 2klux of background light revealed a repeatability error of 13.5cm throughout the distance range of 20 meters.}, - number = {11}, - urldate = {2025-02-20}, - journal = {Optics Express}, - author = {Niclass, Cristiano and Ito, Kota and Soga, Mineki and Matsubara, Hiroyuki and Aoyagi, Isao and Kato, Satoru and Kagami, Manabu}, - month = may, - year = {2012}, - keywords = {Deformable mirrors, Diode lasers, Image sensors, Light emitting diodes, Optical systems, Systems design}, - pages = {11863--11881}, -} - -@article{surmann_autonomous_2003-1, - title = {An autonomous mobile robot with a {3D} laser range finder for {3D} exploration and digitalization of indoor environments}, - volume = {45}, - issn = {0921-8890}, - url = {https://www.sciencedirect.com/science/article/pii/S0921889003001556}, - doi = {10.1016/j.robot.2003.09.004}, - abstract = {Digital 3D models of the environment are needed in rescue and inspection robotics, facility managements and architecture. This paper presents an automatic system for gaging and digitalization of 3D indoor environments. It consists of an autonomous mobile robot, a reliable 3D laser range finder and three elaborated software modules. The first module, a fast variant of the Iterative Closest Points algorithm, registers the 3D scans in a common coordinate system and relocalizes the robot. The second module, a next best view planner, computes the next nominal pose based on the acquired 3D data while avoiding complicated obstacles. The third module, a closed-loop and globally stable motor controller, navigates the mobile robot to a nominal pose on the base of odometry and avoids collisions with dynamical obstacles. The 3D laser range finder acquires a 3D scan at this pose. The proposed method allows one to digitalize large indoor environments fast and reliably without any intervention and solves the SLAM problem. The results of two 3D digitalization experiments are presented using a fast octree-based visualization method.}, - number = {3}, - urldate = {2025-02-20}, - journal = {Robotics and Autonomous Systems}, - author = {Surmann, Hartmut and Nüchter, Andreas and Hertzberg, Joachim}, - month = dec, - year = {2003}, - keywords = {3D digitalization, 3D gaging, 3D laser range finder, Autonomous mobile robots, Next best view planning, Robot relocalization, Scan matching, SLAM}, - pages = {181--198}, -} - -@article{raj_survey_2020-1, - title = {A {Survey} on {LiDAR} {Scanning} {Mechanisms}}, - volume = {9}, - issn = {2079-9292}, - url = {https://www.mdpi.com/2079-9292/9/5/741}, - doi = {10.3390/electronics9050741}, - abstract = {In recent years, light detection and ranging (LiDAR) technology has gained huge popularity in various applications such as navigation, robotics, remote sensing, and advanced driving assistance systems (ADAS). This popularity is mainly due to the improvements in LiDAR performance in terms of range detection, accuracy, power consumption, as well as physical features such as dimension and weight. Although a number of literatures on LiDAR technology have been published earlier, not many has been reported on the state-of-the-art LiDAR scanning mechanisms. The aim of this article is to review the scanning mechanisms employed in LiDAR technology from past research works to the current commercial products. The review highlights four commonly used mechanisms in LiDAR systems: Opto-mechanical, electromechanical, micro-electromechanical systems (MEMS), and solid-state scanning. The study reveals that electro-mechanical scanning is the most prominent technology in use today. The commercially available 1D time of flight (TOF) LiDAR instrument is currently the most attractive option for conversion from 1D to 3D LiDAR system, provided that low scanning rate is not an issue. As for applications with low size, weight, and power (SWaP) requirements, MEMS scanning is found to be the better alternative. MEMS scanning is by far the more matured technology compared to solid-state scanning and is currently given great emphasis to increase its robustness for fulfilling the requirements of ADAS applications. Finally, solid-state LiDAR systems are expected to fill in the gap in ADAS applications despite the low technology readiness in comparison to MEMS scanners. However, since solid-state scanning is believed to have superior robustness, field of view (FOV), and scanning rate potential, great efforts are given by both academics and industries to further develop this technology.}, - number = {5}, - urldate = {2025-02-20}, - journal = {Electronics}, - author = {Raj, Thinal and Hashim, Fazida Hanim and Huddin, Aqilah Baseri and Ibrahim, Mohd Faisal and Hussain, Aini}, - month = may, - year = {2020}, - keywords = {electro-mechanical scanning, LiDAR, MEMS scanning, opto-mechanical scanning, solid-state LiDAR}, - pages = {741}, -} - -@misc{noauthor_file20200501_2020-1, - title = {File:20200501 {Time} of flight.svg - {Wikipedia}}, - shorttitle = {File}, - url = {https://commons.wikimedia.org/wiki/File:20200501_Time_of_flight.svg}, - urldate = {2025-02-20}, - month = may, - year = {2020}, -} - -@article{jain_survey_nodate-1, - title = {A survey of {Laser} {Range} {Finding}}, - url = {http://www.siddjain.com/ee236a.pdf}, - abstract = {This report provides a informal survey of laser distance measurement. Applications of laser range finding are briefly discussed and various techniques for laser ranging such as active laser triangulation, pulsed time-of-flight (TOF), phase shift, FMCW, and correlation are described.}, - urldate = {2025-02-19}, - author = {Jain, Siddharth}, -} - -@inproceedings{rashid_local_2020-1, - title = {Local and {Global} {Sensors} for {Collision} {Avoidance}}, - url = {https://ieeexplore.ieee.org/document/9235223}, - doi = {10.1109/MFI49285.2020.9235223}, - abstract = {Implementation of safe and efficient human robot collaboration for agile production cells with heavy-duty industrial robots, having large stopping distances and large self-occlusion areas, is a challenging task. Collision avoidance is the main functionality required to realize this task. In fact, it requires accurate estimation of shortest distance between known (robot) and unknown (human or anything else) objects in a large area. This work proposes a selective fusion of global and local sensors, representing a large range 360° LiDAR and a small range RGB camera respectively, in the context of dynamic speed and separation monitoring. Safety functionality has been evaluated for collision detection between unknown dynamic object to manipulator joints. The system yields 29-40\% efficiency compared to fenced system. Heavy-duty industrial robot and a controlled linear axis dummy is used for evaluating different robot and scenario configurations. Results suggest higher efficiency and safety when using local and global setup.}, - urldate = {2025-02-19}, - booktitle = {2020 {IEEE} {International} {Conference} on {Multisensor} {Fusion} and {Integration} for {Intelligent} {Systems} ({MFI})}, - author = {Rashid, Aquib and Peesapati, Kannan and Bdiwi, Mohamad and Krusche, Sebastian and Hardt, Wolfram and Putz, Matthias}, - month = sep, - year = {2020}, - keywords = {Cameras, Laser radar, Production, Robot vision systems, Safety, Sensor fusion, Service robots}, - pages = {354--359}, -} - -@inproceedings{al_naser_fusion_2022-1, - title = {Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a robot in an industrial environment}, - url = {https://ieeexplore.ieee.org/document/9900548}, - doi = {10.1109/RO-MAN53752.2022.9900548}, - abstract = {Accurate detection of the human body and its limbs in real-time is one of the challenges toward human-robot collaboration (HRC) technology in the factory of the future. In this work, a new algorithm has been developed to fuse thermal, depth, and color information. Starting with the calibration of the sensors to each other, proceeding with data fusion and detection of human body parts, and ending with pose estimation. The proposed approach has been tested in various HRC scenarios. It has shown a significant decrease in errors and noise in industrial environments. Furthermore, it produces not only a higher positioning accuracy of the head and hands compared to the state of art algorithms (e.g., OpenPose), but it is also faster. Hence, such an algorithm could be joined with digital twins for safe and efficient human-robot collaboration.}, - urldate = {2025-02-19}, - booktitle = {2022 31st {IEEE} {International} {Conference} on {Robot} and {Human} {Interactive} {Communication} ({RO}-{MAN})}, - author = {Al Naser, Ibrahim and Dahmen, Johannes and Bdiwi, Mohamad and Ihlenfeldt, Steffen}, - month = aug, - year = {2022}, - keywords = {Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Sensor fusion, Service robots, Stability criteria, Thermal sensors}, - pages = {532--537}, -} - -@inproceedings{choi_xr-based_2022-1, - title = {An {XR}-based {Approach} to {Safe} {Human}-{Robot} {Collaboration}}, - url = {https://ieeexplore.ieee.org/document/9757621}, - doi = {10.1109/VRW55335.2022.00106}, - abstract = {It is crucial to prevent safety accidents for human-robot collaboration. This study proposes an extended reality (XR) approach to safe HRC by calculating the minimum distance between the human operator and robot in real time. The proposed approach scans the real environment with multiple sensors and constructs its virtual space in XR. The virtual robot is synchronized with the real robot by matching the point cloud of real environment with that of the virtual robot. This system can effectively provide task assistance and safety information to the user wearing an XR device.}, - urldate = {2025-02-19}, - booktitle = {2022 {IEEE} {Conference} on {Virtual} {Reality} and {3D} {User} {Interfaces} {Abstracts} and {Workshops} ({VRW})}, - author = {Choi, Sung Ho and Park, Kyeong-Beom and Roh, Dong Hyeon and Lee, Jae Yeol and Ghasemi, Yalda and Jeong, Heejin}, - month = mar, - year = {2022}, - keywords = {Collaboration, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—XR-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality (MR/XR), Real-time systems, Robot sensing systems, Safety, safety distance, Service robots, Three-dimensional displays}, - pages = {481--482}, -} - -@inproceedings{amaya-mejia_vision-based_2022-1, - title = {Vision-{Based} {Safety} {System} for {Barrierless} {Human}-{Robot} {Collaboration}}, - url = {https://ieeexplore.ieee.org/document/9981689}, - doi = {10.1109/IROS47612.2022.9981689}, - abstract = {Human safety has always been the main priority when working near an industrial robot. With the rise of Human-Robot Collaborative environments, physical barriers to avoiding collisions have been disappearing, increasing the risk of accidents and the need for solutions that ensure a safe Human-Robot Collaboration. This paper proposes a safety system that implements Speed and Separation Monitoring (SSM) type of operation. For this, safety zones are defined in the robot's workspace following current standards for industrial collaborative robots. A deep learning-based computer vision system detects, tracks, and estimates the 3D position of operators close to the robot. The robot control system receives the operator's 3D position and generates 3D representations of them in a simulation environment. Depending on the zone where the closest operator was detected, the robot stops or changes its operating speed. Three different operation modes in which the human and robot interact are presented. Results show that the vision-based system can correctly detect and classify in which safety zone an operator is located and that the different proposed operation modes ensure that the robot's reaction and stop time are within the required time limits to guarantee safety.}, - urldate = {2025-02-19}, - booktitle = {2022 {IEEE}/{RSJ} {International} {Conference} on {Intelligent} {Robots} and {Systems} ({IROS})}, - author = {Amaya-Mejía, Lina María and Duque-Suárez, Nicolás and Jaramillo-Ramírez, Daniel and Martinez, Carol}, - month = oct, - year = {2022}, - keywords = {Collaboration, Collision avoidance, Robot control, Safety, Service robots, Solid modeling, Three-dimensional displays}, - pages = {7331--7336}, -} - -@article{li_safe_2024-1, - title = {Safe human–robot collaboration for industrial settings: a survey}, - volume = {35}, - issn = {1572-8145}, - shorttitle = {Safe human–robot collaboration for industrial settings}, - url = {https://doi.org/10.1007/s10845-023-02159-4}, - doi = {10.1007/s10845-023-02159-4}, - abstract = {Human–robot collaboration (HRC) plays a pivotal role in today’s industry by supporting increasingly customised product development. Via HRC, the strengths of humans and robots can be combined to facilitate collaborative jobs within common workplaces to achieve specific industrial goals. Given the significance of safety assurance in HRC, in this survey paper, an update on standards and implementation approaches presented in the latest literature is given to reflect the state-of-the-art of this prominent research topic. First, an overview of safety standards for industrial robots, collaborative robots, and HRC is provided. Then, a survey of various approaches to HRC safety is conducted from two main perspectives, i.e., pre-collision and post-collision, which are further detailed in the aspects of sensing, prediction, learning, planning/replanning, and compliance control. Major characteristics, pros, cons, and applicability of the approaches are analysed. Finally, challenging issues and prospects for the future development of HRC safety are highlighted to provide recommendations for relevant stakeholders to consider when designing HRC-enabled industrial systems.}, - number = {5}, - urldate = {2025-02-19}, - journal = {Journal of Intelligent Manufacturing}, - author = {Li, Weidong and Hu, Yudie and Zhou, Yong and Pham, Duc Truong}, - month = jun, - year = {2024}, - keywords = {Collaborative robots, Collision detection, Human–robot collaboration (HRC), Obstacle avoidance, Safety}, - pages = {2235--2261}, -} - -@misc{noauthor_can_nodate-1, - title = {Can the collaborative robot market experience a second growth surge in the post-pandemic era?}, - url = {https://interactanalysis.com/insight/can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era/}, - abstract = {The market for collaborative robots is forecast to show strong growth over the coming years, with rising sales in industrial and non-industrial sectors.}, - urldate = {2025-02-19}, -} - -@article{nath_review_2022-1, - title = {A {Review} of {Advancements} in {Robotic} and {Sensor}-based {Technologies} in {Construction} {Sector}}, - volume = {7}, - issn = {2736-576X}, - url = {https://www.ej-eng.org/index.php/ejeng/article/view/2624}, - doi = {10.24018/ejeng.2022.7.1.2624}, - abstract = {The study explores recent innovations in robotic and sensor-based technologies that are spearheading advancements in the construction sector to achieve improvements in construction quality, efficiency, and safety. Automation in construction, although coined as a concept in 1980s, has witnessed minimal progress in the level of application. The study attempts to identify issues constraining adoption of automation in the sector, the recent developments in technologies and their scope in construction, their applications and impacts, and way forward. The role of robotics in various stages of construction and its impact on a wider scale has been identified and discussed. The evolution of Building Information Modeling (BIM) has transitioned it into being an efficient mediator in the construction process with novel concepts such as 4D and 5D BIM and Building Knowledge Management. Various sensor technologies, functioning at diverse scales, have found wide-ranging applications on construction sites ranging from high-accuracy positioning to slow-tracking of personnel and materials, as well as, in progress monitoring and quality control. Information Technology has a major role in binding the sensor technology with on-site requirements to create positive results. A study was done to identify such technological interventions and various software utilities which integrate BIM and sensor technology with tools such as GIS. The factors which restrained developments in automation in construction sector were identified in the course of study. Various global examples of advanced automated construction technologies with applications in various stages of construction were discussed. The review successfully identifies the nascent technological innovations and their productive usage in relevant areas of construction sector.}, - number = {1}, - urldate = {2025-02-19}, - journal = {European Journal of Engineering and Technology Research}, - author = {Nath, Aditya S.}, - month = feb, - year = {2022}, - keywords = {Sensors}, - pages = {85--89}, -} - -@article{maheepala_low_2021-1, - title = {Low {Power} {Processors} and {Image} {Sensors} for {Vision}-{Based} {IoT} {Devices}: {A} {Review}}, - volume = {21}, - issn = {1558-1748}, - shorttitle = {Low {Power} {Processors} and {Image} {Sensors} for {Vision}-{Based} {IoT} {Devices}}, - url = {https://ieeexplore.ieee.org/abstract/document/9165781}, - doi = {10.1109/JSEN.2020.3015932}, - abstract = {With the advancements of the Internet of Things (IoT) technology, applications of battery powered machine vision based IoT devices is rapidly growing. While numerous research works are being conducted to develop low power hardware solutions for IoT devices, image capture and image processing remain high power demanding processes leading to a short battery life. However, the power consumption of the machine vision based IoT devices can be minimized by the careful optimization of the hardware components that are used is these devices. In this article, we present a review of low power machine vision hardware components for the IoT applications. A guide to selecting the optimum processors and image sensors for a given battery powered machine vision based IoT device is presented. Next, the factors that must be considered when selecting processors and image sensors for a given IoT application are discussed, and selection criteria for the processors and image sensors are established. Then, the current commercially available hardware components are reviewed in accordance with the established selection criteria. Finally, the research trends in the field of battery powered machine vision based IoT devices are discussed, and the potential future research directions in the field are presented.}, - number = {2}, - urldate = {2025-02-19}, - journal = {IEEE Sensors Journal}, - author = {Maheepala, Malith and Joordens, Matthew A. and Kouzani, Abbas Z.}, - month = jan, - year = {2021}, - keywords = {Batteries, Cloud computing, image sensor, Image sensors, Internet of Things, low power, machine vision, Machine vision, processor, Program processors, Transceivers}, - pages = {1172--1186}, -} - -@article{liu_application_2024-1, - title = {Application, {Development} and {Future} {Opportunities} of {Collaborative} {Robots} ({Cobots}) in {Manufacturing}: {A} {Literature} {Review}}, - volume = {40}, - issn = {1044-7318}, - shorttitle = {Application, {Development} and {Future} {Opportunities} of {Collaborative} {Robots} ({Cobots}) in {Manufacturing}}, - url = {https://doi.org/10.1080/10447318.2022.2041907}, - doi = {10.1080/10447318.2022.2041907}, - abstract = {The rapid development of robot technology has introduced a substantial impact on manufacturing. Numerous studies have been carried out to apply collaborative robots (cobots) to address manufacturing productivity and ergonomics issues, which has brought extensive opportunities. In this context, a systematic literature search in the Web of Science, Scopus, and Google Scholar databases was carried out by electronic and manual search. Thus, 59 relevant contributions out of 4488 studies were analyzed by using preferred reporting items for systematic reviews and meta-analysis (PRISMA). To provide an overview of the different results, studies are summarized according to the following criteria: country, author, year, study design, robot category, results, and future opportunities. The effects of cobots on safety, system design, workplace design, task scheduling, productivity, and ergonomics are discussed to provide a better understanding of the application of cobots in manufacturing. To incentive future research, this paper reviews the development of cobots in manufacturing and discusses future opportunities and directions from cobots and manufacturing system perspectives. This paper provides novel and valuable insights into cobots application and illustrates potential developments of future human-cobot interaction.}, - number = {4}, - urldate = {2025-02-19}, - journal = {International Journal of Human–Computer Interaction}, - author = {Liu, Li and Guo, Fu and Zou, Zishuai and Duffy, Vincent G.}, - month = feb, - year = {2024}, - pages = {915--932}, -} - -@inproceedings{popov_collision_2017-1, - address = {Lisbon}, - title = {Collision detection, localization \& classification for industrial robots with joint torque sensors}, - isbn = {978-1-5386-3518-6}, - url = {http://ieeexplore.ieee.org/document/8172400/}, - doi = {10.1109/ROMAN.2017.8172400}, - abstract = {High dynamic capabilities of industrial robots make them dangerous for humans and environment. To reduce this factor and advance collaboration between human and manipulator fast and reliable collision detection algorithm is required. To overcome this problem, we present an approach allowing to detect collision, localize action point and classify collision nature. Internal joint torque and encoder measurements were used to determine potential collisions with the robot links. This work proposes two ways of solving this problem: using classical analytical approach and learning approach implemented with neural network. The suggested algorithms were examined on the industrial robotic arm Kuka iiwa LBR 14 R820, ground truth information on the contact nature and its location were obtained with 3D LIDAR and camera.}, - urldate = {2025-02-19}, - booktitle = {2017 26th {IEEE} {International} {Symposium} on {Robot} and {Human} {Interactive} {Communication} ({RO}-{MAN})}, - publisher = {IEEE}, - author = {Popov, Dmitry and Klimchik, Alexandr and Mavridis, Nikolaos}, - month = aug, - year = {2017}, - pages = {838--843}, -} - -@misc{noauthor_robotics_2021-1, - title = {Robotics - {Vocabulary}}, - shorttitle = {{ISO} 8373:2021-11}, - url = {https://www.dinmedia.de/de/norm/iso-8373/348036781}, - urldate = {2025-02-19}, - publisher = {DIN Media GmbH}, - month = nov, - year = {2021}, -} - -@misc{noauthor_din_nodate-4, - title = {{DIN} {EN} {ISO} 10218-1:2021-09, {Robotik}\_- {Sicherheitsanforderungen}\_- {Teil}\_1: {Industrieroboter} ({ISO}/{DIS}\_10218-1.2:2021); {Deutsche} und {Englische} {Fassung} {prEN}\_ISO\_10218-1:2021}, - shorttitle = {{DIN} {EN} {ISO} 10218-1}, - url = {https://www.dinmedia.de/de/-/-/341406648}, - urldate = {2025-02-19}, - publisher = {DIN Media GmbH}, - doi = {10.31030/3272912}, -} - -@misc{noauthor_din_nodate-5, - title = {{DIN} {EN} {ISO} 10218-2:2021-03, {Robotik}\_- {Sicherheitsanforderungen} für {Robotersysteme} in industrieller {Umgebung}\_- {Teil}\_2: {Robotersysteme}, {Roboteranwendungen} und {Integration} von {Roboterzellen} ({ISO}/{DIS}\_10218-2:2020); {Deutsche} und {Englische} {Fassung} {prEN}\_ISO\_10218-2:2020}, - shorttitle = {{DIN} {EN} {ISO} 10218-2}, - url = {https://www.dinmedia.de/de/-/-/331246964}, - urldate = {2025-02-19}, - publisher = {DIN Media GmbH}, - doi = {10.31030/3215258}, -} - -@article{li_common_2019-2, - title = {Common {Sensors} in {Industrial} {Robots}: {A} {Review}}, - volume = {1267}, - issn = {1742-6588, 1742-6596}, - shorttitle = {Common {Sensors} in {Industrial} {Robots}}, - url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036}, - doi = {10.1088/1742-6596/1267/1/012036}, - abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.}, - number = {1}, - urldate = {2025-02-18}, - journal = {Journal of Physics: Conference Series}, - author = {Li, Peng and Liu, Xiangpeng}, - month = jul, - year = {2019}, - pages = {012036}, -} - -@misc{noauthor_din_nodate-6, - title = {{DIN} {EN} {ISO} 10218-2:2021-03, {Robotik}\_- {Sicherheitsanforderungen} für {Robotersysteme} in industrieller {Umgebung}\_- {Teil}\_2: {Robotersysteme}, {Roboteranwendungen} und {Integration} von {Roboterzellen} ({ISO}/{DIS}\_10218-2:2020); {Deutsche} und {Englische} {Fassung} {prEN}\_ISO\_10218-2:2020}, - shorttitle = {{DIN} {EN} {ISO} 10218-2}, - url = {https://www.dinmedia.de/de/-/-/331246964}, - urldate = {2025-02-19}, - publisher = {DIN Media GmbH}, - doi = {10.31030/3215258}, -} - -@misc{noauthor_din_nodate-7, - title = {{DIN} {EN} {ISO} 10218-1:2021-09, {Robotik}\_- {Sicherheitsanforderungen}\_- {Teil}\_1: {Industrieroboter} ({ISO}/{DIS}\_10218-1.2:2021); {Deutsche} und {Englische} {Fassung} {prEN}\_ISO\_10218-1:2021}, - shorttitle = {{DIN} {EN} {ISO} 10218-1}, - url = {https://www.dinmedia.de/de/-/-/341406648}, - urldate = {2025-02-19}, - publisher = {DIN Media GmbH}, - doi = {10.31030/3272912}, -} - -@article{li_common_2019-3, - title = {Common {Sensors} in {Industrial} {Robots}: {A} {Review}}, - volume = {1267}, - issn = {1742-6588, 1742-6596}, - shorttitle = {Common {Sensors} in {Industrial} {Robots}}, - url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036}, - doi = {10.1088/1742-6596/1267/1/012036}, - abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.}, - number = {1}, - urldate = {2025-02-18}, - journal = {Journal of Physics: Conference Series}, - author = {Li, Peng and Liu, Xiangpeng}, - month = jul, - year = {2019}, - pages = {012036}, -} - -@article{wunderlich_rasante_2013-2, - title = {Rasante {Entwicklung} in der {3D}‐{Bildgebung}: {Weiterentwickelte} {Time}‐of‐{Flight}‐{Technologie} verbessert miniaturisierte {3D}‐{Kameras} und {Sensoren}}, - volume = {8}, - issn = {1863-1460, 2191-1975}, - shorttitle = {Rasante {Entwicklung} in der {3D}‐{Bildgebung}}, - url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018}, - doi = {10.1002/opph.201300018}, - abstract = {Abstract Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.}, - number = {3}, - urldate = {2025-02-18}, - journal = {Optik \& Photonik}, - author = {Wunderlich, Max}, - month = sep, - year = {2013}, - pages = {38--40}, -} - -@misc{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025-2, - title = {{UniversalRobots}/{Universal}\_Robots\_ROS2\_Gazebo\_Simulation}, - url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation}, - urldate = {2025-02-17}, - publisher = {Universal Robots A/S}, - month = feb, - year = {2025}, -} - -@article{haddadin_robot_2017-2, - title = {Robot {Collisions}: {A} {Survey} on {Detection}, {Isolation}, and {Identification}}, - volume = {33}, - issn = {1941-0468}, - shorttitle = {Robot {Collisions}}, - url = {https://ieeexplore.ieee.org/abstract/document/8059840}, - doi = {10.1109/TRO.2017.2723903}, - abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.}, - number = {6}, - urldate = {2025-02-12}, - journal = {IEEE Transactions on Robotics}, - author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin}, - month = dec, - year = {2017}, - keywords = {Algorithm design and analysis, Collision avoidance, Collision detection, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction (pHRI), Real-time systems, Robot sensing systems, safe robotics, Service robots}, - pages = {1292--1312}, -} - -@book{hertzberg_mobile_2012-2, - address = {Berlin, Heidelberg}, - series = {{eXamen}.press}, - title = {Mobile {Roboter}: {Eine} {Einführung} aus {Sicht} der {Informatik}}, - isbn = {978-3-642-01725-4 978-3-642-01726-1}, - shorttitle = {Mobile {Roboter}}, - url = {https://link.springer.com/10.1007/978-3-642-01726-1}, - urldate = {2025-02-12}, - publisher = {Springer Berlin Heidelberg}, - author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas}, - year = {2012}, - doi = {10.1007/978-3-642-01726-1}, -} - -@book{hering_sensoren_2018-2, - address = {Wiesbaden}, - title = {Sensoren in {Wissenschaft} und {Technik}}, - isbn = {978-3-658-12561-5 978-3-658-12562-2}, - url = {http://link.springer.com/10.1007/978-3-658-12562-2}, - urldate = {2025-02-12}, - publisher = {Springer Fachmedien Wiesbaden}, - editor = {Hering, Ekbert and Schönfelder, Gert}, - year = {2018}, - doi = {10.1007/978-3-658-12562-2}, -} - -@article{saudabayev_sensors_2015-2, - title = {Sensors for {Robotic} {Hands}: {A} {Survey} of {State} of the {Art}}, - volume = {3}, - issn = {2169-3536}, - shorttitle = {Sensors for {Robotic} {Hands}}, - url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549}, - doi = {10.1109/ACCESS.2015.2482543}, - abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.}, - urldate = {2025-02-12}, - journal = {IEEE Access}, - author = {Saudabayev, Artur and Varol, Huseyin Atakan}, - year = {2015}, - keywords = {Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, Robot sensing systems, robotic hands, Robots, sensors, Sensors}, - pages = {1765--1782}, -} - -@article{paya_state---art_2017-2, - title = {A {State}-of-the-{Art} {Review} on {Mapping} and {Localization} of {Mobile} {Robots} {Using} {Omnidirectional} {Vision} {Sensors}}, - volume = {2017}, - issn = {1687-7268}, - url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650}, - doi = {10.1155/2017/3497650}, - abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.}, - number = {1}, - urldate = {2025-02-12}, - journal = {Journal of Sensors}, - author = {Payá, L. and Gil, A. and Reinoso, O.}, - year = {2017}, - pages = {3497650}, -} - -@misc{noauthor_vl53l7cx_nodate-2, - title = {{VL53L7CX} - {Time}-of-{Flight} ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}}, - url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html}, - abstract = {VL53L7CX - Time-of-Flight (ToF) 8x8 multizone ranging sensor with 90 degrees FoV, VL53L7CXV0GC/1, STMicroelectronics}, - urldate = {2025-02-12}, -} - -@misc{noauthor_pico-series_nodate-2, - title = {Pico-series {Microcontrollers} - {Raspberry} {Pi} {Documentation}}, - url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html}, - abstract = {The official documentation for Raspberry Pi computers and microcontrollers}, - urldate = {2025-02-12}, -} - -@misc{noauthor_chatgpt_nodate-2, - title = {{ChatGPT}}, - url = {https://chatgpt.com}, - abstract = {A conversational AI system that listens, learns, and challenges}, - urldate = {2025-02-12}, -} - -@misc{iii_earlephilhowerarduino-pico_2025-2, - title = {earlephilhower/arduino-pico}, - url = {https://github.com/earlephilhower/arduino-pico}, - abstract = {Raspberry Pi Pico Arduino core, for all RP2040 and RP2350 boards}, - urldate = {2025-02-12}, - author = {III, Earle F. Philhower}, - month = feb, - year = {2025}, - keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi}, -} - -@misc{noauthor_tutorials_nodate-2, - title = {Tutorials — {ROS} 2 {Documentation}: {Humble} documentation}, - url = {https://docs.ros.org/en/humble/Tutorials.html}, - urldate = {2025-02-12}, -} - -@misc{noauthor_examples_nodate-2, - title = {Examples - trimesh 4.6.2 documentation}, - url = {https://trimesh.org/examples.html}, - urldate = {2025-02-12}, -} - -@misc{grans_sebastiangransros2-point-cloud-demo_2024-2, - title = {{SebastianGrans}/{ROS2}-{Point}-{Cloud}-{Demo}}, - url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo}, - abstract = {Demo package for ROS2 that publishes a point cloud and visualizes it using RViz2}, - urldate = {2025-02-12}, - author = {Grans, Sebastian}, - month = dec, - year = {2024}, -} - -@misc{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025-1, - title = {{UniversalRobots}/{Universal}\_Robots\_ROS2\_GZ\_Simulation}, - url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_GZ_Simulation}, - urldate = {2025-02-24}, - publisher = {Universal Robots A/S}, - month = feb, - year = {2025}, -} - -@article{vogel-heuser_von_2023, - title = {Von {Industrie} 4.0 zu {Industrie} 5.0 – {Idee}, {Konzept} und {Wahrnehmung}}, - volume = {60}, - issn = {2198-2775}, - url = {https://doi.org/10.1365/s40702-023-01002-x}, - doi = {10.1365/s40702-023-01002-x}, - abstract = {In der sich rasant entwickelnden Landschaft der industriellen Automatisierung läutet das Aufkommen von Industrie 5.0 (I5.0) einen Paradigmenwechsel hin zu einem stärker kollaborativen und menschzentrierten Ansatz ein. In diesem Beitrag wird die Rolle der Mensch-Maschine-Kollaboration und menschzentrierter Werkzeuge bei der Förderung einer symbiotischen Beziehung zwischen fortschrittlichen Technologien und menschlichen Benutzern untersucht, um so das volle Potenzial von I5.0 zu erschließen. Als nächste Stufe in der Entwicklung des Produktionssektors zielt I5.0 darauf ab, ein Gleichgewicht zwischen Automatisierung und menschlichen Fähigkeiten herzustellen und die sich ergänzenden Stärken beider zu nutzen. Es werden Technologien vorgestellt, welche menschzentrierte Lösungen zur Steigerung von Produktivität, Flexibilität und Nachhaltigkeit in der Fabrik der Zukunft fokussieren.}, - language = {de}, - number = {6}, - urldate = {2025-04-16}, - journal = {HMD Praxis der Wirtschaftsinformatik}, - author = {Vogel-Heuser, Birgit and Bengler, Klaus}, - month = dec, - year = {2023}, - keywords = {Cyber-Physical Production Systems, Cyber-physische Produktionssysteme, Fabrik der Zukunft, Factory of the Future, Human-Centered Automation, Human-Machine Collaboration, Industrie 4.0, Industrie 5.0, Industry 4.0, Industry 5.0, Mensch-Maschine-Kollaboration, Menschzentrierte Automatisierung}, - pages = {1124--1142}, - file = {Full Text PDF:/home/carla/Zotero/storage/U6RG6RSY/Vogel-Heuser and Bengler - 2023 - Von Industrie 4.0 zu Industrie 5.0 – Idee, Konzept und Wahrnehmung.pdf:application/pdf}, + urldate = {2025-04-17}, + booktitle = {Springer {Handbook} of {Robotics}}, + publisher = {Springer International Publishing}, + author = {Haddadin, Sami and Croft, Elizabeth}, + editor = {Siciliano, Bruno and Khatib, Oussama}, + year = {2016}, + doi = {10.1007/978-3-319-32552-1_69}, + keywords = {Collision Detection, Contact Force, Impedance Control, Industrial Robot, Joint Torque}, + pages = {1835--1874}, + file = {Full Text PDF:/home/sochi/Zotero/storage/U662HXY7/Haddadin and Croft - 2016 - Physical Human–Robot Interaction.pdf:application/pdf}, } diff --git a/Bachelorarbeit/Bachelorarbeit/Bibliothek.bib b/Bachelorarbeit/Bachelorarbeit/Bibliothek.bib deleted file mode 100644 index fa8bb91600e1916b2a4b2c4b19a8c339dac19ba8..0000000000000000000000000000000000000000 --- a/Bachelorarbeit/Bachelorarbeit/Bibliothek.bib +++ /dev/null @@ -1,452 +0,0 @@ - -@software{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025, - title = {{UniversalRobots}/Universal\_Robots\_ROS2\_Gazebo\_Simulation}, - rights = {{BSD}-3-Clause}, - url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation}, - publisher = {Universal Robots A/S}, - urldate = {2025-02-17}, - date = {2025-02-13}, - note = {original-date: 2021-12-15T12:18:45Z}, -} - -@article{haddadin_robot_2017, - title = {Robot Collisions: A Survey on Detection, Isolation, and Identification}, - volume = {33}, - issn = {1941-0468}, - url = {https://ieeexplore.ieee.org/abstract/document/8059840}, - doi = {10.1109/TRO.2017.2723903}, - shorttitle = {Robot Collisions}, - abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.}, - pages = {1292--1312}, - number = {6}, - journaltitle = {{IEEE} Transactions on Robotics}, - author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin}, - urldate = {2025-02-12}, - date = {2017-12}, - note = {Conference Name: {IEEE} Transactions on Robotics}, - keywords = {Algorithm design and analysis, Collision avoidance, Collision detection, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction ({pHRI}), Real-time systems, Robot sensing systems, safe robotics, Service robots}, - file = {Accepted Version:C\:\\Users\\Rene\\Zotero\\storage\\IEXJFAMF\\Haddadin et al. - 2017 - Robot Collisions A Survey on Detection, Isolation, and Identification.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\LDB3Q92K\\8059840.html:text/html}, -} - -@book{hertzberg_mobile_2012, - location = {Berlin, Heidelberg}, - title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik}, - rights = {https://www.springernature.com/gp/researchers/text-and-data-mining}, - isbn = {978-3-642-01725-4 978-3-642-01726-1}, - url = {https://link.springer.com/10.1007/978-3-642-01726-1}, - series = {{eXamen}.press}, - shorttitle = {Mobile Roboter}, - publisher = {Springer Berlin Heidelberg}, - author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas}, - urldate = {2025-02-12}, - date = {2012}, - langid = {german}, - doi = {10.1007/978-3-642-01726-1}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\RLTU9P46\\Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf}, -} - -@collection{hering_sensoren_2018, - location = {Wiesbaden}, - title = {Sensoren in Wissenschaft und Technik}, - rights = {http://www.springer.com/tdm}, - isbn = {978-3-658-12561-5 978-3-658-12562-2}, - url = {http://link.springer.com/10.1007/978-3-658-12562-2}, - publisher = {Springer Fachmedien Wiesbaden}, - editor = {Hering, Ekbert and Schönfelder, Gert}, - urldate = {2025-02-12}, - date = {2018}, - langid = {german}, - doi = {10.1007/978-3-658-12562-2}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\9TI57WXD\\Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf}, -} - -@article{saudabayev_sensors_2015, - title = {Sensors for Robotic Hands: A Survey of State of the Art}, - volume = {3}, - issn = {2169-3536}, - url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549}, - doi = {10.1109/ACCESS.2015.2482543}, - shorttitle = {Sensors for Robotic Hands}, - abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.}, - pages = {1765--1782}, - journaltitle = {{IEEE} Access}, - author = {Saudabayev, Artur and Varol, Huseyin Atakan}, - urldate = {2025-02-12}, - date = {2015}, - note = {Conference Name: {IEEE} Access}, - keywords = {Robot sensing systems, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors, Sensors}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\HR7ZUF8W\\Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\484D4R7H\\7283549.html:text/html}, -} - -@article{paya_state---art_2017, - title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors}, - volume = {2017}, - rights = {Copyright © 2017 L. Payá et al.}, - issn = {1687-7268}, - url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650}, - doi = {10.1155/2017/3497650}, - abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.}, - pages = {3497650}, - number = {1}, - journaltitle = {Journal of Sensors}, - author = {Payá, L. and Gil, A. and Reinoso, O.}, - urldate = {2025-02-12}, - date = {2017}, - langid = {english}, - note = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1155/2017/3497650}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\EZ473NGD\\Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\86LDAQ62\\3497650.html:text/html}, -} - -@online{noauthor_vl53l7cx_nodate, - title = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}}, - url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html}, - abstract = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV}, {VL}53L7CXV0GC/1, {STMicroelectronics}}, - urldate = {2025-02-12}, - langid = {english}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\VEYLCCLA\\vl53l7cx.html:text/html}, -} - -@online{noauthor_pico-series_nodate, - title = {Pico-series Microcontrollers - Raspberry Pi Documentation}, - url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html}, - abstract = {The official documentation for Raspberry Pi computers and microcontrollers}, - urldate = {2025-02-12}, - langid = {english}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\KUCB8PVI\\pico-series.html:text/html}, -} - -@online{noauthor_chatgpt_nodate, - title = {{ChatGPT}}, - url = {https://chatgpt.com}, - abstract = {A conversational {AI} system that listens, learns, and challenges}, - urldate = {2025-02-12}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\ZT8MG8Y4\\678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html}, -} - -@software{iii_earlephilhowerarduino-pico_2025, - title = {earlephilhower/arduino-pico}, - rights = {{LGPL}-2.1}, - url = {https://github.com/earlephilhower/arduino-pico}, - abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards}, - author = {{III}, Earle F. Philhower}, - urldate = {2025-02-12}, - date = {2025-02-11}, - note = {original-date: 2021-02-25T04:20:27Z}, - keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi}, -} - -@online{noauthor_tutorials_nodate, - title = {Tutorials — {ROS} 2 Documentation: Humble documentation}, - url = {https://docs.ros.org/en/humble/Tutorials.html}, - urldate = {2025-02-12}, - file = {Tutorials — ROS 2 Documentation\: Humble documentation:C\:\\Users\\Rene\\Zotero\\storage\\28S5GUZ5\\Tutorials.html:text/html}, -} - -@online{noauthor_examples_nodate, - title = {Examples - trimesh 4.6.2 documentation}, - url = {https://trimesh.org/examples.html}, - urldate = {2025-02-12}, - file = {Examples - trimesh 4.6.2 documentation:C\:\\Users\\Rene\\Zotero\\storage\\82WA6KM7\\examples.html:text/html}, -} - -@software{grans_sebastiangransros2-point-cloud-demo_2024, - title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo}, - rights = {{MIT}}, - url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo}, - abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2}, - author = {Grans, Sebastian}, - urldate = {2025-02-12}, - date = {2024-12-08}, - note = {original-date: 2020-06-30T16:55:21Z}, -} - -@article{wunderlich_rasante_2013, - title = {Rasante Entwicklung in der 3D‐Bildgebung: Weiterentwickelte Time‐of‐Flight‐Technologie verbessert miniaturisierte 3D‐Kameras und Sensoren}, - volume = {8}, - rights = {http://onlinelibrary.wiley.com/{termsAndConditions}\#vor}, - issn = {1863-1460, 2191-1975}, - url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018}, - doi = {10.1002/opph.201300018}, - shorttitle = {Rasante Entwicklung in der 3D‐Bildgebung}, - abstract = {Abstract - Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.}, - pages = {38--40}, - number = {3}, - journaltitle = {Optik \& Photonik}, - shortjournal = {Optik \& Photonik}, - author = {Wunderlich, Max}, - urldate = {2025-02-18}, - date = {2013-09}, - langid = {german}, - file = {Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung:C\:\\Users\\Rene\\Zotero\\storage\\H7CSUHLW\\Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung.pdf:application/pdf}, -} - -@article{li_common_2019, - title = {Common Sensors in Industrial Robots: A Review}, - volume = {1267}, - issn = {1742-6588, 1742-6596}, - url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036}, - doi = {10.1088/1742-6596/1267/1/012036}, - shorttitle = {Common Sensors in Industrial Robots}, - abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.}, - pages = {012036}, - number = {1}, - journaltitle = {Journal of Physics: Conference Series}, - shortjournal = {J. Phys.: Conf. Ser.}, - author = {Li, Peng and Liu, Xiangpeng}, - urldate = {2025-02-18}, - date = {2019-07-01}, - langid = {english}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\UVXS2R7J\\Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf}, -} - -@misc{noauthor_din_nodate, - title = {{DIN} {EN} {ISO} 10218-1:2021-09, Robotik\_- Sicherheitsanforderungen\_- Teil\_1: Industrieroboter ({ISO}/{DIS}\_10218-1.2:2021); Deutsche und Englische Fassung {prEN}\_ISO\_10218-1:2021}, - url = {https://www.dinmedia.de/de/-/-/341406648}, - doi = {10.31030/3272912}, - shorttitle = {{DIN} {EN} {ISO} 10218-1}, - publisher = {{DIN} Media {GmbH}}, - urldate = {2025-02-19}, - langid = {german}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\XCP5RDRY\\DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf}, -} - -@misc{noauthor_din_nodate-1, - title = {{DIN} {EN} {ISO} 10218-2:2021-03, Robotik\_- Sicherheitsanforderungen für Robotersysteme in industrieller Umgebung\_- Teil\_2: Robotersysteme, Roboteranwendungen und Integration von Roboterzellen ({ISO}/{DIS}\_10218-2:2020); Deutsche und Englische Fassung {prEN}\_ISO\_10218-2:2020}, - url = {https://www.dinmedia.de/de/-/-/331246964}, - doi = {10.31030/3215258}, - shorttitle = {{DIN} {EN} {ISO} 10218-2}, - publisher = {{DIN} Media {GmbH}}, - urldate = {2025-02-19}, - langid = {german}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\M7E9L4CP\\DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf}, -} - -@software{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025-1, - title = {{UniversalRobots}/Universal\_Robots\_ROS2\_Gazebo\_Simulation}, - url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation}, - publisher = {Universal Robots A/S}, - urldate = {2025-02-17}, - date = {2025-02-13}, -} - -@article{haddadin_robot_2017-1, - title = {Robot Collisions: A Survey on Detection, Isolation, and Identification}, - volume = {33}, - issn = {1941-0468}, - url = {https://ieeexplore.ieee.org/abstract/document/8059840}, - doi = {10.1109/TRO.2017.2723903}, - shorttitle = {Robot Collisions}, - abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.}, - pages = {1292--1312}, - number = {6}, - journaltitle = {{IEEE} Transactions on Robotics}, - author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin}, - urldate = {2025-02-12}, - date = {2017-12}, - keywords = {Algorithm design and analysis, Collision avoidance, Collision detection, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction ({pHRI}), Real-time systems, Robot sensing systems, safe robotics, Service robots}, - file = {Accepted Version:C\:\\Users\\Rene\\Zotero\\storage\\BGZ6TUWR\\Haddadin et al. - 2017 - Robot Collisions A Survey on Detection, Isolation, and Identification.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\UJAHJUBE\\8059840.html:text/html}, -} - -@book{hertzberg_mobile_2012-1, - location = {Berlin, Heidelberg}, - title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik}, - isbn = {978-3-642-01725-4 978-3-642-01726-1}, - url = {https://link.springer.com/10.1007/978-3-642-01726-1}, - series = {{eXamen}.press}, - shorttitle = {Mobile Roboter}, - publisher = {Springer Berlin Heidelberg}, - author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas}, - urldate = {2025-02-12}, - date = {2012}, - doi = {10.1007/978-3-642-01726-1}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\4LFEHVEK\\Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf}, -} - -@collection{hering_sensoren_2018-1, - location = {Wiesbaden}, - title = {Sensoren in Wissenschaft und Technik}, - isbn = {978-3-658-12561-5 978-3-658-12562-2}, - url = {http://link.springer.com/10.1007/978-3-658-12562-2}, - publisher = {Springer Fachmedien Wiesbaden}, - editor = {Hering, Ekbert and Schönfelder, Gert}, - urldate = {2025-02-12}, - date = {2018}, - doi = {10.1007/978-3-658-12562-2}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\BG7FCKRW\\Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf}, -} - -@article{saudabayev_sensors_2015-1, - title = {Sensors for Robotic Hands: A Survey of State of the Art}, - volume = {3}, - issn = {2169-3536}, - url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549}, - doi = {10.1109/ACCESS.2015.2482543}, - shorttitle = {Sensors for Robotic Hands}, - abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.}, - pages = {1765--1782}, - journaltitle = {{IEEE} Access}, - author = {Saudabayev, Artur and Varol, Huseyin Atakan}, - urldate = {2025-02-12}, - date = {2015}, - keywords = {Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, Robot sensing systems, robotic hands, Robots, sensors, Sensors}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\CDE3NZ3S\\Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\BEAGLR7C\\7283549.html:text/html}, -} - -@article{paya_state---art_2017-1, - title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors}, - volume = {2017}, - issn = {1687-7268}, - url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650}, - doi = {10.1155/2017/3497650}, - abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.}, - pages = {3497650}, - number = {1}, - journaltitle = {Journal of Sensors}, - author = {Payá, L. and Gil, A. and Reinoso, O.}, - urldate = {2025-02-12}, - date = {2017}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\G2QJUK53\\Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\PSAFIKFD\\3497650.html:text/html}, -} - -@online{noauthor_vl53l7cx_nodate-1, - title = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}}, - url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html}, - abstract = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV}, {VL}53L7CXV0GC/1, {STMicroelectronics}}, - urldate = {2025-02-12}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\BKZM82KQ\\vl53l7cx.html:text/html}, -} - -@online{noauthor_pico-series_nodate-1, - title = {Pico-series Microcontrollers - Raspberry Pi Documentation}, - url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html}, - abstract = {The official documentation for Raspberry Pi computers and microcontrollers}, - urldate = {2025-02-12}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\VKDULUUX\\pico-series.html:text/html}, -} - -@online{noauthor_chatgpt_nodate-1, - title = {{ChatGPT}}, - url = {https://chatgpt.com}, - abstract = {A conversational {AI} system that listens, learns, and challenges}, - urldate = {2025-02-12}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\R25VDLY2\\678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html}, -} - -@software{iii_earlephilhowerarduino-pico_2025-1, - title = {earlephilhower/arduino-pico}, - url = {https://github.com/earlephilhower/arduino-pico}, - abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards}, - author = {{III}, Earle F. Philhower}, - urldate = {2025-02-12}, - date = {2025-02-11}, - keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi}, -} - -@online{noauthor_tutorials_nodate-1, - title = {Tutorials — {ROS} 2 Documentation: Humble documentation}, - url = {https://docs.ros.org/en/humble/Tutorials.html}, - urldate = {2025-02-12}, - file = {Tutorials — ROS 2 Documentation\: Humble documentation:C\:\\Users\\Rene\\Zotero\\storage\\HQ4G28QE\\Tutorials.html:text/html}, -} - -@online{noauthor_examples_nodate-1, - title = {Examples - trimesh 4.6.2 documentation}, - url = {https://trimesh.org/examples.html}, - urldate = {2025-02-12}, - file = {Examples - trimesh 4.6.2 documentation:C\:\\Users\\Rene\\Zotero\\storage\\SURMD6VT\\examples.html:text/html}, -} - -@software{grans_sebastiangransros2-point-cloud-demo_2024-1, - title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo}, - url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo}, - abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2}, - author = {Grans, Sebastian}, - urldate = {2025-02-12}, - date = {2024-12-08}, -} - -@article{wunderlich_rasante_2013-1, - title = {Rasante Entwicklung in der 3D‐Bildgebung: Weiterentwickelte Time‐of‐Flight‐Technologie verbessert miniaturisierte 3D‐Kameras und Sensoren}, - volume = {8}, - issn = {1863-1460, 2191-1975}, - url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018}, - doi = {10.1002/opph.201300018}, - shorttitle = {Rasante Entwicklung in der 3D‐Bildgebung}, - abstract = {Abstract Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.}, - pages = {38--40}, - number = {3}, - journaltitle = {Optik \& Photonik}, - shortjournal = {Optik \& Photonik}, - author = {Wunderlich, Max}, - urldate = {2025-02-18}, - date = {2013-09}, - file = {Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung:C\:\\Users\\Rene\\Zotero\\storage\\JZJDENIL\\Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung.pdf:application/pdf}, -} - -@article{li_common_2019-1, - title = {Common Sensors in Industrial Robots: A Review}, - volume = {1267}, - issn = {1742-6588, 1742-6596}, - url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036}, - doi = {10.1088/1742-6596/1267/1/012036}, - shorttitle = {Common Sensors in Industrial Robots}, - abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.}, - pages = {012036}, - number = {1}, - journaltitle = {Journal of Physics: Conference Series}, - shortjournal = {J. Phys.: Conf. Ser.}, - author = {Li, Peng and Liu, Xiangpeng}, - urldate = {2025-02-18}, - date = {2019-07-01}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\WQ5C229K\\Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf}, -} - -@misc{noauthor_din_nodate-2, - title = {{DIN} {EN} {ISO} 10218-2:2021-03, Robotik\_- Sicherheitsanforderungen für Robotersysteme in industrieller Umgebung\_- Teil\_2: Robotersysteme, Roboteranwendungen und Integration von Roboterzellen ({ISO}/{DIS}\_10218-2:2020); Deutsche und Englische Fassung {prEN}\_ISO\_10218-2:2020}, - url = {https://www.dinmedia.de/de/-/-/331246964}, - shorttitle = {{DIN} {EN} {ISO} 10218-2}, - publisher = {{DIN} Media {GmbH}}, - urldate = {2025-02-19}, - doi = {10.31030/3215258}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\HB28M28Z\\DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf}, -} - -@misc{noauthor_din_nodate-3, - title = {{DIN} {EN} {ISO} 10218-1:2021-09, Robotik\_- Sicherheitsanforderungen\_- Teil\_1: Industrieroboter ({ISO}/{DIS}\_10218-1.2:2021); Deutsche und Englische Fassung {prEN}\_ISO\_10218-1:2021}, - url = {https://www.dinmedia.de/de/-/-/341406648}, - doi = {10.31030/3272912}, - shorttitle = {{DIN} {EN} {ISO} 10218-1}, - publisher = {{DIN} Media {GmbH}}, - urldate = {2025-02-19}, - langid = {german}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\FFMUVR22\\DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf}, -} - -@misc{noauthor_robotics_2021, - title = {Robotics - Vocabulary}, - url = {https://www.dinmedia.de/de/norm/iso-8373/348036781}, - shorttitle = {{ISO} 8373:2021-11}, - publisher = {{DIN} Media {GmbH}}, - urldate = {2025-02-19}, - date = {2021-11}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\6SUCZU6R\\DIN_EN_ISO_8373.pdf:application/pdf}, -} - -@inproceedings{popov_collision_2017, - location = {Lisbon}, - title = {Collision detection, localization \& classification for industrial robots with joint torque sensors}, - isbn = {978-1-5386-3518-6}, - url = {http://ieeexplore.ieee.org/document/8172400/}, - doi = {10.1109/ROMAN.2017.8172400}, - abstract = {High dynamic capabilities of industrial robots make them dangerous for humans and environment. To reduce this factor and advance collaboration between human and manipulator fast and reliable collision detection algorithm is required. To overcome this problem, we present an approach allowing to detect collision, localize action point and classify collision nature. Internal joint torque and encoder measurements were used to determine potential collisions with the robot links. This work proposes two ways of solving this problem: using classical analytical approach and learning approach implemented with neural network. The suggested algorithms were examined on the industrial robotic arm Kuka iiwa {LBR} 14 R820, ground truth information on the contact nature and its location were obtained with 3D {LIDAR} and camera.}, - eventtitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})}, - pages = {838--843}, - booktitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})}, - publisher = {{IEEE}}, - author = {Popov, Dmitry and Klimchik, Alexandr and Mavridis, Nikolaos}, - urldate = {2025-02-19}, - date = {2017-08}, - langid = {english}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\LVC2B7U6\\Popov et al. - 2017 - Collision detection, localization & classification for industrial robots with joint torque sensors.pdf:application/pdf}, -} diff --git a/Bachelorarbeit/Bachelorarbeit/ba.acn b/Bachelorarbeit/Bachelorarbeit/ba.acn index 5c30dabc5b913a7f7108995b32738f6cef8d525e..e58a04bb3e2c1a0d3cf94e015c86609dde24b734 100644 --- a/Bachelorarbeit/Bachelorarbeit/ba.acn +++ b/Bachelorarbeit/Bachelorarbeit/ba.acn @@ -1,26 +1,9 @@ \glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{3} \glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{3} \glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{3} +\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{3} \glossaryentry{Cobot?\glossentry{Cobot}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{PC?\glossentry{PC}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{3} \glossaryentry{Cobot?\glossentry{Cobot}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{6} -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{6} \glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{6} \glossaryentry{RGB-D?\glossentry{RGB-D}|setentrycounter[]{page}"\glsnumberformat}{7} \glossaryentry{RGB-D?\glossentry{RGB-D}|setentrycounter[]{page}"\glsnumberformat}{7} @@ -48,55 +31,56 @@ \glossaryentry{FoV?\glossentry{FOV}|setentrycounter[]{page}"\glsnumberformat}{10} \glossaryentry{IC?\glossentry{IC}|setentrycounter[]{page}"\glsnumberformat}{10} \glossaryentry{IC?\glossentry{IC}|setentrycounter[]{page}"\glsnumberformat}{10} -\glossaryentry{MCU?\glossentry{MCU}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{MCU?\glossentry{MCU}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{MCU?\glossentry{MCU}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{I$^2$C?\glossentry{I2C}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{I$^2$C?\glossentry{I2C}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{RVIZ2?\glossentry{RVIZ}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{RVIZ2?\glossentry{RVIZ}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{NUC?\glossentry{NUC}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{NUC?\glossentry{NUC}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{FoV?\glossentry{FOV}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{MCU?\glossentry{MCU}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{I$^2$C?\glossentry{I2C}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{I$^2$C?\glossentry{I2C}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{11} -\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{12} -\glossaryentry{LPn?\glossentry{LPN}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{10} +\glossaryentry{MCU?\glossentry{MCU}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{MCU?\glossentry{MCU}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{MCU?\glossentry{MCU}|setentrycounter[]{page}"\glsnumberformat}{12} \glossaryentry{I$^2$C?\glossentry{I2C}|setentrycounter[]{page}"\glsnumberformat}{12} \glossaryentry{I$^2$C?\glossentry{I2C}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{RVIZ2?\glossentry{RVIZ}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{RVIZ2?\glossentry{RVIZ}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{NUC?\glossentry{NUC}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{NUC?\glossentry{NUC}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{FoV?\glossentry{FOV}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{MCU?\glossentry{MCU}|setentrycounter[]{page}"\glsnumberformat}{12} \glossaryentry{I$^2$C?\glossentry{I2C}|setentrycounter[]{page}"\glsnumberformat}{12} -\glossaryentry{LPn?\glossentry{LPN}|setentrycounter[]{page}"\glsnumberformat}{12} -\glossaryentry{ToF-Sensor?\glossentry{ToF}|setentrycounter[]{page}"\glsnumberformat}{13} -\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{13} -\glossaryentry{LED?\glossentry{LED}|setentrycounter[]{page}"\glsnumberformat}{13} -\glossaryentry{LED?\glossentry{LED}|setentrycounter[]{page}"\glsnumberformat}{13} -\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{13} -\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{13} -\glossaryentry{NUC?\glossentry{NUC}|setentrycounter[]{page}"\glsnumberformat}{13} -\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{13} -\glossaryentry{NUC?\glossentry{NUC}|setentrycounter[]{page}"\glsnumberformat}{13} +\glossaryentry{I$^2$C?\glossentry{I2C}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{12} +\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{12} \glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{13} -\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{14} -\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{14} -\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{14} -\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{14} -\glossaryentry{USB?\glossentry{USB}|setentrycounter[]{page}"\glsnumberformat}{15} -\glossaryentry{USB?\glossentry{USB}|setentrycounter[]{page}"\glsnumberformat}{15} +\glossaryentry{LPn?\glossentry{LPN}|setentrycounter[]{page}"\glsnumberformat}{13} +\glossaryentry{I$^2$C?\glossentry{I2C}|setentrycounter[]{page}"\glsnumberformat}{13} +\glossaryentry{I$^2$C?\glossentry{I2C}|setentrycounter[]{page}"\glsnumberformat}{13} +\glossaryentry{I$^2$C?\glossentry{I2C}|setentrycounter[]{page}"\glsnumberformat}{13} +\glossaryentry{LPn?\glossentry{LPN}|setentrycounter[]{page}"\glsnumberformat}{13} +\glossaryentry{ToF-Sensor?\glossentry{ToF}|setentrycounter[]{page}"\glsnumberformat}{14} +\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{14} +\glossaryentry{LED?\glossentry{LED}|setentrycounter[]{page}"\glsnumberformat}{14} +\glossaryentry{LED?\glossentry{LED}|setentrycounter[]{page}"\glsnumberformat}{14} +\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{14} +\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{14} +\glossaryentry{NUC?\glossentry{NUC}|setentrycounter[]{page}"\glsnumberformat}{14} +\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{14} +\glossaryentry{NUC?\glossentry{NUC}|setentrycounter[]{page}"\glsnumberformat}{14} +\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{14} +\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{15} \glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{15} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{15} -\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{15} -\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{15} \glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{15} \glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{15} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{15} -\glossaryentry{pcd?\glossentry{PCD}|setentrycounter[]{page}"\glsnumberformat}{15} -\glossaryentry{pcd?\glossentry{PCD}|setentrycounter[]{page}"\glsnumberformat}{15} +\glossaryentry{USB?\glossentry{USB}|setentrycounter[]{page}"\glsnumberformat}{16} +\glossaryentry{USB?\glossentry{USB}|setentrycounter[]{page}"\glsnumberformat}{16} +\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{16} +\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{16} +\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{16} +\glossaryentry{JSON?\glossentry{JSON}|setentrycounter[]{page}"\glsnumberformat}{16} +\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{16} +\glossaryentry{ROS2?\glossentry{ROS}|setentrycounter[]{page}"\glsnumberformat}{16} \glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{16} -\glossaryentry{RVIZ2?\glossentry{RVIZ}|setentrycounter[]{page}"\glsnumberformat}{16} -\glossaryentry{LiDAR?\glossentry{LIDAR}|setentrycounter[]{page}"\glsnumberformat}{17} +\glossaryentry{pcd?\glossentry{PCD}|setentrycounter[]{page}"\glsnumberformat}{16} +\glossaryentry{pcd?\glossentry{PCD}|setentrycounter[]{page}"\glsnumberformat}{16} +\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{17} +\glossaryentry{RVIZ2?\glossentry{RVIZ}|setentrycounter[]{page}"\glsnumberformat}{17} +\glossaryentry{LiDAR?\glossentry{LIDAR}|setentrycounter[]{page}"\glsnumberformat}{18} diff --git a/Bachelorarbeit/Bachelorarbeit/ba.aux b/Bachelorarbeit/Bachelorarbeit/ba.aux index b4d4fef588bd3d14be779822f6efb68cd9d3b3ee..5e9e59ccfc33c38eee3838913461a57fdf71c139 100644 --- a/Bachelorarbeit/Bachelorarbeit/ba.aux +++ b/Bachelorarbeit/Bachelorarbeit/ba.aux @@ -33,28 +33,23 @@ \BKM@entry{id=1,dest={636861707465722E31},srcline={2}}{5C3337365C3337375C303030455C303030695C3030306E5C3030306C5C303030655C303030695C303030745C303030755C3030306E5C30303067} \citation{vogel-heuser_von_2023} \citation{noauthor_iidea_nodate} -\BKM@entry{id=2,dest={73656374696F6E2E312E31},srcline={21}}{5C3337365C3337375C3030304D5C3030306F5C303030745C303030695C303030765C303030615C303030745C303030695C3030306F5C3030306E} -\citation{noauthor_robotics_2021} +\citation{haddadin_physical_2016} \citation{popov_collision_2017} -\citation{li_common_2019} -\citation{noauthor_vl53l7cx_nodate} -\citation{hering_sensoren_2018} +\BKM@entry{id=2,dest={73656374696F6E2E312E31},srcline={18}}{5C3337365C3337375C3030305A5C303030695C303030655C3030306C5C303030735C303030655C303030745C3030307A5C303030755C3030306E5C30303067} \@writefile{toc}{\contentsline {chapter}{\numberline {1}Einleitung}{3}{chapter.1}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} \@writefile{lol}{\addvspace {10\p@ }} -\BKM@entry{id=3,dest={73656374696F6E2E312E32},srcline={37}}{5C3337365C3337375C3030305A5C303030695C303030655C3030306C5C303030735C303030655C303030745C3030307A5C303030755C3030306E5C30303067} -\BKM@entry{id=4,dest={73656374696F6E2E312E33},srcline={41}}{5C3337365C3337375C303030465C3030306F5C303030725C303030735C303030635C303030685C303030755C3030306E5C303030675C303030735C303030665C303030725C303030615C303030675C30303065} -\@writefile{toc}{\contentsline {section}{\numberline {1.1}Motivation}{4}{section.1.1}\protected@file@percent } -\@writefile{toc}{\contentsline {section}{\numberline {1.2}Zielsetzung}{4}{section.1.2}\protected@file@percent } -\BKM@entry{id=5,dest={73656374696F6E2E312E34},srcline={44}}{5C3337365C3337375C303030415C303030755C303030665C303030625C303030615C303030755C3030305C3034305C303030645C303030655C303030725C3030305C3034305C303030415C303030725C303030625C303030655C303030695C30303074} -\@writefile{toc}{\contentsline {section}{\numberline {1.3}Forschungsfrage}{5}{section.1.3}\protected@file@percent } -\@writefile{toc}{\contentsline {section}{\numberline {1.4}Aufbau der Arbeit}{5}{section.1.4}\protected@file@percent } -\BKM@entry{id=6,dest={636861707465722E32},srcline={48}}{5C3337365C3337375C303030535C303030745C303030615C3030306E5C303030645C3030305C3034305C303030645C303030655C303030725C3030305C3034305C303030545C303030655C303030635C303030685C3030306E5C303030695C3030306B} +\BKM@entry{id=3,dest={73656374696F6E2E312E32},srcline={26}}{5C3337365C3337375C303030465C3030306F5C303030725C303030735C303030635C303030685C303030755C3030306E5C303030675C303030735C303030665C303030725C303030615C303030675C303030655C3030306E} +\BKM@entry{id=4,dest={73656374696F6E2E312E33},srcline={36}}{5C3337365C3337375C303030415C303030755C303030665C303030625C303030615C303030755C3030305C3034305C303030645C303030655C303030725C3030305C3034305C303030415C303030725C303030625C303030655C303030695C30303074} +\@writefile{toc}{\contentsline {section}{\numberline {1.1}Zielsetzung}{4}{section.1.1}\protected@file@percent } +\@writefile{toc}{\contentsline {section}{\numberline {1.2}Forschungsfragen}{4}{section.1.2}\protected@file@percent } +\@writefile{toc}{\contentsline {section}{\numberline {1.3}Aufbau der Arbeit}{5}{section.1.3}\protected@file@percent } +\BKM@entry{id=5,dest={636861707465722E32},srcline={40}}{5C3337365C3337375C303030535C303030745C303030615C3030306E5C303030645C3030305C3034305C303030645C303030655C303030725C3030305C3034305C303030545C303030655C303030635C303030685C3030306E5C303030695C3030306B} \citation{noauthor_can_nodate} \citation{noauthor_can_nodate} \citation{liu_application_2024} -\BKM@entry{id=7,dest={73656374696F6E2E322E31},srcline={58}}{5C3337365C3337375C3030304B5C3030306F5C3030306C5C3030306C5C303030695C303030735C303030695C3030306F5C3030306E5C303030735C303030765C303030655C303030725C3030306D5C303030655C303030695C303030645C303030755C3030306E5C303030675C3030305C3034305C303030755C3030306E5C303030645C3030305C3034305C3030304B5C3030306F5C3030306C5C3030306C5C303030695C303030735C303030695C3030306F5C3030306E5C303030735C303030655C303030725C3030306B5C303030655C3030306E5C3030306E5C303030755C3030306E5C30303067} +\BKM@entry{id=6,dest={73656374696F6E2E322E31},srcline={50}}{5C3337365C3337375C3030304B5C3030306F5C3030306C5C3030306C5C303030695C303030735C303030695C3030306F5C3030306E5C303030735C303030765C303030655C303030725C3030306D5C303030655C303030695C303030645C303030755C3030306E5C303030675C3030305C3034305C303030755C3030306E5C303030645C3030305C3034305C3030304B5C3030306F5C3030306C5C3030306C5C303030695C303030735C303030695C3030306F5C3030306E5C303030735C303030655C303030725C3030306B5C303030655C3030306E5C3030306E5C303030755C3030306E5C30303067} \citation{popov_collision_2017} \citation{noauthor_robotics_2021} \citation{al_naser_fusion_2022} @@ -66,8 +61,8 @@ \@writefile{lol}{\addvspace {10\p@ }} \@writefile{lof}{\contentsline {figure}{\numberline {\textbf 2-1}{\ignorespaces Market Outlook for Cobots. Source:\cite {noauthor_can_nodate}}}{6}{figure.2.1}\protected@file@percent } \newlabel{Cobot Growth}{{\textbf 2-1}{6}{Market Outlook for Cobots. Source:\cite {noauthor_can_nodate}}{figure.2.1}{}} -\@writefile{toc}{\contentsline {section}{\numberline {2.1}Kollisionsvermeidung und Kollisionserkennung}{7}{section.2.1}\protected@file@percent } -\BKM@entry{id=8,dest={73656374696F6E2E322E32},srcline={75}}{5C3337365C3337375C3030304C5C303030615C303030735C303030655C303030725C303030735C303030635C303030615C3030306E5C3030306E5C303030655C303030725C3030305C3034305C3030305C3035305C3030304C5C303030695C303030445C303030415C303030525C3030305C303531} +\@writefile{toc}{\contentsline {section}{\numberline {2.1}Kollisionsvermeidung und Kollisionserkennung}{6}{section.2.1}\protected@file@percent } +\BKM@entry{id=7,dest={73656374696F6E2E322E32},srcline={67}}{5C3337365C3337375C3030304C5C303030615C303030735C303030655C303030725C303030735C303030635C303030615C3030306E5C3030306E5C303030655C303030725C3030305C3034305C3030305C3035305C3030304C5C303030695C303030445C303030415C303030525C3030305C303531} \citation{noauthor_file20200501_2020} \citation{noauthor_file20200501_2020} \citation{li_common_2019} @@ -88,113 +83,96 @@ \newlabel{ToF Explained}{{\textbf 2-2}{8}{The way a ToF-Sensor works, Source:\cite {noauthor_file20200501_2020}}{figure.2.2}{}} \@writefile{lof}{\contentsline {figure}{\numberline {\textbf 2-3}{\ignorespaces Beispiel für einen Optomechanischen 2D-\acrshort {LIDAR}, Source:\cite {raj_survey_2020}}}{9}{figure.2.3}\protected@file@percent } \newlabel{Opto LiDAR Example}{{\textbf 2-3}{9}{Beispiel für einen Optomechanischen 2D-\acrshort {LIDAR}, Source:\cite {raj_survey_2020}}{figure.2.3}{}} +\BKM@entry{id=8,dest={73656374696F6E2E322E33},srcline={111}}{5C3337365C3337375C303030525C303030615C303030685C3030306D5C303030655C3030306E} \@writefile{lof}{\contentsline {figure}{\numberline {\textbf 2-4}{\ignorespaces Package eines VL53L7CX, Source:\cite {noauthor_vl53l7cx_nodate}}}{10}{figure.2.4}\protected@file@percent } \newlabel{VL53L7CX Package}{{\textbf 2-4}{10}{Package eines VL53L7CX, Source:\cite {noauthor_vl53l7cx_nodate}}{figure.2.4}{}} -\BKM@entry{id=9,dest={636861707465722E33},srcline={120}}{5C3337365C3337375C303030555C3030306D5C303030735C303030655C303030745C3030307A5C303030755C3030306E5C30303067} -\BKM@entry{id=10,dest={73656374696F6E2E332E31},srcline={121}}{5C3337365C3337375C303030565C3030306F5C303030725C303030675C303030655C303030685C303030655C3030306E5C303030735C303030775C303030655C303030695C303030735C30303065} +\@writefile{toc}{\contentsline {section}{\numberline {2.3}Rahmen}{10}{section.2.3}\protected@file@percent } +\BKM@entry{id=9,dest={636861707465722E33},srcline={116}}{5C3337365C3337375C303030415C3030306E5C303030665C3030306F5C303030725C303030645C303030655C303030725C303030755C3030306E5C303030675C303030655C3030306E} +\@writefile{toc}{\contentsline {chapter}{\numberline {3}Anforderungen}{11}{chapter.3}\protected@file@percent } +\@writefile{lof}{\addvspace {10\p@ }} +\@writefile{lot}{\addvspace {10\p@ }} +\@writefile{lol}{\addvspace {10\p@ }} +\BKM@entry{id=10,dest={636861707465722E34},srcline={123}}{5C3337365C3337375C303030555C3030306D5C303030735C303030655C303030745C3030307A5C303030755C3030306E5C30303067} +\BKM@entry{id=11,dest={73656374696F6E2E342E31},srcline={124}}{5C3337365C3337375C303030565C3030306F5C303030725C303030675C303030655C303030685C303030655C3030306E5C303030735C303030775C303030655C303030695C303030735C30303065} \citation{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate} \citation{noauthor_pico-series_nodate} \citation{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025} \citation{noauthor_vl53l7cx_nodate} -\BKM@entry{id=11,dest={73656374696F6E2E332E32},srcline={127}}{5C3337365C3337375C303030535C3030306F5C303030665C303030745C303030775C303030615C303030725C30303065} -\BKM@entry{id=12,dest={73756273656374696F6E2E332E322E31},srcline={128}}{5C3337365C3337375C303030415C303030725C303030645C303030755C303030695C3030306E5C3030306F} -\@writefile{toc}{\contentsline {chapter}{\numberline {3}Umsetzung}{11}{chapter.3}\protected@file@percent } +\BKM@entry{id=12,dest={73656374696F6E2E342E32},srcline={130}}{5C3337365C3337375C303030535C3030306F5C303030665C303030745C303030775C303030615C303030725C30303065} +\BKM@entry{id=13,dest={73756273656374696F6E2E342E322E31},srcline={131}}{5C3337365C3337375C303030415C303030725C303030645C303030755C303030695C3030306E5C3030306F} +\@writefile{toc}{\contentsline {chapter}{\numberline {4}Umsetzung}{12}{chapter.4}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} \@writefile{lol}{\addvspace {10\p@ }} -\@writefile{toc}{\contentsline {section}{\numberline {3.1}Vorgehensweise}{11}{section.3.1}\protected@file@percent } -\@writefile{toc}{\contentsline {section}{\numberline {3.2}Software}{11}{section.3.2}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.1}Arduino}{11}{subsection.3.2.1}\protected@file@percent } -\newlabel{Snippet LPN}{{3.1}{12}{Funktion zum setzen eines LPn Pin}{lstlisting.3.1}{}} -\@writefile{lol}{\contentsline {lstlisting}{\numberline {3.1}Funktion zum setzen eines LPn Pin}{12}{lstlisting.3.1}\protected@file@percent } -\newlabel{Snippet Init}{{3.2}{12}{Funktion zum initialisieren der Sensoren im Setup}{lstlisting.3.2}{}} -\@writefile{lol}{\contentsline {lstlisting}{\numberline {3.2}Funktion zum initialisieren der Sensoren im Setup}{12}{lstlisting.3.2}\protected@file@percent } -\newlabel{Snippet Prozess}{{3.3}{13}{processSensorData - Verarbeitung der Sensordaten}{lstlisting.3.3}{}} -\@writefile{lol}{\contentsline {lstlisting}{\numberline {3.3}processSensorData - Verarbeitung der Sensordaten}{13}{lstlisting.3.3}\protected@file@percent } -\BKM@entry{id=13,dest={73756273656374696F6E2E332E322E32},srcline={223}}{5C3337365C3337375C303030525C3030306F5C303030625C3030306F5C303030745C3030305C3034305C3030304F5C303030705C303030655C303030725C303030615C303030745C303030695C3030306E5C303030675C3030305C3034305C303030535C303030795C303030735C303030745C303030655C3030306D5C3030305C3034305C30303032} +\@writefile{toc}{\contentsline {section}{\numberline {4.1}Vorgehensweise}{12}{section.4.1}\protected@file@percent } +\@writefile{toc}{\contentsline {section}{\numberline {4.2}Software}{12}{section.4.2}\protected@file@percent } +\@writefile{toc}{\contentsline {subsection}{\numberline {4.2.1}Arduino}{12}{subsection.4.2.1}\protected@file@percent } +\newlabel{Snippet LPN}{{4.1}{13}{Funktion zum setzen eines LPn Pin}{lstlisting.4.1}{}} +\@writefile{lol}{\contentsline {lstlisting}{\numberline {4.1}Funktion zum setzen eines LPn Pin}{13}{lstlisting.4.1}\protected@file@percent } +\newlabel{Snippet Init}{{4.2}{13}{Funktion zum initialisieren der Sensoren im Setup}{lstlisting.4.2}{}} +\@writefile{lol}{\contentsline {lstlisting}{\numberline {4.2}Funktion zum initialisieren der Sensoren im Setup}{13}{lstlisting.4.2}\protected@file@percent } +\newlabel{Snippet Prozess}{{4.3}{14}{processSensorData - Verarbeitung der Sensordaten}{lstlisting.4.3}{}} +\@writefile{lol}{\contentsline {lstlisting}{\numberline {4.3}processSensorData - Verarbeitung der Sensordaten}{14}{lstlisting.4.3}\protected@file@percent } +\BKM@entry{id=14,dest={73756273656374696F6E2E342E322E32},srcline={226}}{5C3337365C3337375C303030525C3030306F5C303030625C3030306F5C303030745C3030305C3034305C3030304F5C303030705C303030655C303030725C303030615C303030745C303030695C3030306E5C303030675C3030305C3034305C303030535C303030795C303030735C303030745C303030655C3030306D5C3030305C3034305C30303032} \citation{noauthor_tutorials_nodate} \citation{noauthor_tutorials_nodate} -\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.2}Robot Operating System 2}{14}{subsection.3.2.2}\protected@file@percent } -\@writefile{lof}{\contentsline {figure}{\numberline {\textbf 3-1}{\ignorespaces Visualisierung von einem Topic, Source:\cite {noauthor_tutorials_nodate}}}{15}{figure.3.1}\protected@file@percent } -\newlabel{Topic_Viz}{{\textbf 3-1}{15}{Visualisierung von einem Topic, Source:\cite {noauthor_tutorials_nodate}}{figure.3.1}{}} -\BKM@entry{id=14,dest={73756273656374696F6E2E332E322E33},srcline={241}}{5C3337365C3337375C303030525C303030565C303030495C3030305A5C303030325C3030305C3034305C303030755C3030306E5C303030645C3030305C3034305C303030475C303030615C3030307A5C303030655C303030625C3030306F5C3030305C3034305C303030435C3030306C5C303030615C303030735C303030735C303030695C30303063} +\@writefile{toc}{\contentsline {subsection}{\numberline {4.2.2}Robot Operating System 2}{15}{subsection.4.2.2}\protected@file@percent } +\@writefile{lof}{\contentsline {figure}{\numberline {\textbf 4-1}{\ignorespaces Visualisierung von einem Topic, Source:\cite {noauthor_tutorials_nodate}}}{16}{figure.4.1}\protected@file@percent } +\newlabel{Topic_Viz}{{\textbf 4-1}{16}{Visualisierung von einem Topic, Source:\cite {noauthor_tutorials_nodate}}{figure.4.1}{}} +\BKM@entry{id=15,dest={73756273656374696F6E2E342E322E33},srcline={244}}{5C3337365C3337375C303030525C303030565C303030495C3030305A5C303030325C3030305C3034305C303030755C3030306E5C303030645C3030305C3034305C303030475C303030615C3030307A5C303030655C303030625C3030306F5C3030305C3034305C303030435C3030306C5C303030615C303030735C303030735C303030695C30303063} \citation{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025} -\BKM@entry{id=15,dest={73656374696F6E2E332E33},srcline={254}}{5C3337365C3337375C303030485C303030615C303030725C303030645C303030775C303030615C303030725C30303065} -\BKM@entry{id=16,dest={73756273656374696F6E2E332E332E31},srcline={255}}{5C3337365C3337375C303030455C3030306C5C303030655C3030306B5C303030745C303030725C3030306F5C3030306E5C303030695C303030735C303030635C30303068} -\BKM@entry{id=17,dest={73756273656374696F6E2E332E332E32},srcline={256}}{5C3337365C3337375C3030304D5C303030655C303030635C303030685C303030615C3030306E5C303030695C303030735C303030635C30303068} -\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.3}RVIZ2 und Gazebo Classic}{16}{subsection.3.2.3}\protected@file@percent } -\@writefile{toc}{\contentsline {section}{\numberline {3.3}Hardware}{16}{section.3.3}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {3.3.1}Elektronisch}{16}{subsection.3.3.1}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {3.3.2}Mechanisch}{16}{subsection.3.3.2}\protected@file@percent } -\BKM@entry{id=18,dest={636861707465722E34},srcline={257}}{5C3337365C3337375C303030465C303030615C3030307A5C303030695C303030745C3030305C3034305C303030755C3030306E5C303030645C3030305C3034305C303030415C303030755C303030735C303030625C3030306C5C303030695C303030635C3030306B} +\BKM@entry{id=16,dest={73656374696F6E2E342E33},srcline={257}}{5C3337365C3337375C303030485C303030615C303030725C303030645C303030775C303030615C303030725C30303065} +\BKM@entry{id=17,dest={73756273656374696F6E2E342E332E31},srcline={258}}{5C3337365C3337375C303030455C3030306C5C303030655C3030306B5C303030745C303030725C3030306F5C3030306E5C303030695C303030735C303030635C30303068} +\BKM@entry{id=18,dest={73756273656374696F6E2E342E332E32},srcline={259}}{5C3337365C3337375C3030304D5C303030655C303030635C303030685C303030615C3030306E5C303030695C303030735C303030635C30303068} +\@writefile{toc}{\contentsline {subsection}{\numberline {4.2.3}RVIZ2 und Gazebo Classic}{17}{subsection.4.2.3}\protected@file@percent } +\@writefile{toc}{\contentsline {section}{\numberline {4.3}Hardware}{17}{section.4.3}\protected@file@percent } +\@writefile{toc}{\contentsline {subsection}{\numberline {4.3.1}Elektronisch}{17}{subsection.4.3.1}\protected@file@percent } +\@writefile{toc}{\contentsline {subsection}{\numberline {4.3.2}Mechanisch}{17}{subsection.4.3.2}\protected@file@percent } +\BKM@entry{id=19,dest={636861707465722E35},srcline={260}}{5C3337365C3337375C303030465C303030615C3030307A5C303030695C303030745C3030305C3034305C303030755C3030306E5C303030645C3030305C3034305C303030415C303030755C303030735C303030625C3030306C5C303030695C303030635C3030306B} \citation{noauthor_can_nodate} \citation{noauthor_file20200501_2020} \citation{raj_survey_2020} \citation{noauthor_vl53l7cx_nodate} \citation{noauthor_tutorials_nodate} -\@writefile{toc}{\contentsline {chapter}{\numberline {4}Fazit und Ausblick}{17}{chapter.4}\protected@file@percent } +\@writefile{toc}{\contentsline {chapter}{\numberline {5}Fazit und Ausblick}{18}{chapter.5}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} \@writefile{lol}{\addvspace {10\p@ }} -\BKM@entry{id=19,dest={617070656E6469782E41},srcline={1}}{5C3337365C3337375C303030515C303030755C303030655C3030306C5C3030306C5C303030635C3030306F5C303030645C30303065} -\BKM@entry{id=20,dest={73656374696F6E2E412E31},srcline={2}}{5C3337365C3337375C303030415C303030725C303030645C303030755C303030695C3030306E5C3030306F5C3030302D5C303030515C303030755C303030655C3030306C5C3030306C5C303030635C3030306F5C303030645C30303065} -\@writefile{toc}{\contentsline {chapter}{\numberline {A}Quellcode}{18}{appendix.A}\protected@file@percent } +\BKM@entry{id=20,dest={617070656E6469782E41},srcline={1}}{5C3337365C3337375C303030515C303030755C303030655C3030306C5C3030306C5C303030635C3030306F5C303030645C30303065} +\BKM@entry{id=21,dest={73656374696F6E2E412E31},srcline={2}}{5C3337365C3337375C303030415C303030725C303030645C303030755C303030695C3030306E5C3030306F5C3030302D5C303030515C303030755C303030655C3030306C5C3030306C5C303030635C3030306F5C303030645C30303065} +\@writefile{toc}{\contentsline {chapter}{\numberline {A}Quellcode}{19}{appendix.A}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} \@writefile{lol}{\addvspace {10\p@ }} -\@writefile{toc}{\contentsline {section}{\numberline {A.1}Arduino-Quellcode}{18}{section.A.1}\protected@file@percent } -\@writefile{lol}{\contentsline {lstlisting}{18\textunderscore vl53l7cx\textunderscore clean\textunderscore with\textunderscore STlibrary\textunderscore Shift\textunderscore register\textunderscore 20250127154813.ino}{18}{lstlisting.A.-1}\protected@file@percent } -\BKM@entry{id=21,dest={73656374696F6E2E412E32},srcline={4}}{5C3337365C3337375C303030525C3030304F5C303030535C3030305C3034305C303030325C3030302D5C3030304E5C3030306F5C303030645C303030655C303030735C3030305C3034305C303030515C303030755C303030655C3030306C5C3030306C5C303030635C3030306F5C303030645C30303065} -\BKM@entry{id=22,dest={73756273656374696F6E2E412E322E31},srcline={5}}{5C3337365C3337375C303030445C303030615C303030745C303030655C3030306E5C303030765C303030655C303030725C303030615C303030725C303030625C303030655C303030695C303030745C303030755C3030306E5C303030675C303030735C3030302D5C3030304E5C3030306F5C303030645C303030655C3030305C3034305C3030305C3035305C303030505C303030795C303030745C303030685C3030306F5C3030306E5C3030305C303531} -\@writefile{toc}{\contentsline {section}{\numberline {A.2}ROS 2-Nodes Quellcode}{32}{section.A.2}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {A.2.1}Datenverarbeitungs-Node (Python)}{32}{subsection.A.2.1}\protected@file@percent } -\@writefile{lol}{\contentsline {lstlisting}{ser\textunderscore test\textunderscore node.py}{32}{lstlisting.A.-2}\protected@file@percent } -\BKM@entry{id=23,dest={73756273656374696F6E2E412E322E32},srcline={7}}{5C3337365C3337375C3030304B5C3030306F5C3030306C5C3030306C5C303030695C303030735C303030695C3030306F5C3030306E5C303030735C303030655C303030725C3030306B5C303030655C3030306E5C3030306E5C303030755C3030306E5C303030675C303030735C3030302D5C3030304E5C3030306F5C303030645C303030655C3030305C3034305C3030305C3035305C303030505C303030795C303030745C303030685C3030306F5C3030306E5C3030305C303531} -\@writefile{toc}{\contentsline {subsection}{\numberline {A.2.2}Kollisionserkennungs-Node (Python)}{37}{subsection.A.2.2}\protected@file@percent } -\@writefile{lol}{\contentsline {lstlisting}{pcl\textunderscore rob\textunderscore node.py}{37}{lstlisting.A.-3}\protected@file@percent } -\BKM@entry{id=24,dest={617070656E6469782E42},srcline={10}}{5C3337365C3337375C303030335C303030445C3030302D5C3030304D5C3030306F5C303030645C303030655C3030306C5C3030306C5C303030655C3030305C3034305C303030645C303030655C303030735C3030305C3034305C303030535C303030655C3030306E5C303030735C3030306F5C303030725C3030306D5C3030306F5C303030645C303030755C3030306C5C30303073} -\@writefile{toc}{\contentsline {chapter}{\numberline {B}3D-Modelle des Sensormoduls}{45}{appendix.B}\protected@file@percent } +\@writefile{toc}{\contentsline {section}{\numberline {A.1}Arduino-Quellcode}{19}{section.A.1}\protected@file@percent } +\@writefile{lol}{\contentsline {lstlisting}{18\textunderscore vl53l7cx\textunderscore clean\textunderscore with\textunderscore STlibrary\textunderscore Shift\textunderscore register\textunderscore 20250127154813.ino}{19}{lstlisting.A.-1}\protected@file@percent } +\BKM@entry{id=22,dest={73656374696F6E2E412E32},srcline={4}}{5C3337365C3337375C303030525C3030304F5C303030535C3030305C3034305C303030325C3030302D5C3030304E5C3030306F5C303030645C303030655C303030735C3030305C3034305C303030515C303030755C303030655C3030306C5C3030306C5C303030635C3030306F5C303030645C30303065} +\BKM@entry{id=23,dest={73756273656374696F6E2E412E322E31},srcline={5}}{5C3337365C3337375C303030445C303030615C303030745C303030655C3030306E5C303030765C303030655C303030725C303030615C303030725C303030625C303030655C303030695C303030745C303030755C3030306E5C303030675C303030735C3030302D5C3030304E5C3030306F5C303030645C303030655C3030305C3034305C3030305C3035305C303030505C303030795C303030745C303030685C3030306F5C3030306E5C3030305C303531} +\@writefile{toc}{\contentsline {section}{\numberline {A.2}ROS 2-Nodes Quellcode}{33}{section.A.2}\protected@file@percent } +\@writefile{toc}{\contentsline {subsection}{\numberline {A.2.1}Datenverarbeitungs-Node (Python)}{33}{subsection.A.2.1}\protected@file@percent } +\@writefile{lol}{\contentsline {lstlisting}{ser\textunderscore test\textunderscore node.py}{33}{lstlisting.A.-2}\protected@file@percent } +\BKM@entry{id=24,dest={73756273656374696F6E2E412E322E32},srcline={7}}{5C3337365C3337375C3030304B5C3030306F5C3030306C5C3030306C5C303030695C303030735C303030695C3030306F5C3030306E5C303030735C303030655C303030725C3030306B5C303030655C3030306E5C3030306E5C303030755C3030306E5C303030675C303030735C3030302D5C3030304E5C3030306F5C303030645C303030655C3030305C3034305C3030305C3035305C303030505C303030795C303030745C303030685C3030306F5C3030306E5C3030305C303531} +\@writefile{toc}{\contentsline {subsection}{\numberline {A.2.2}Kollisionserkennungs-Node (Python)}{38}{subsection.A.2.2}\protected@file@percent } +\@writefile{lol}{\contentsline {lstlisting}{pcl\textunderscore rob\textunderscore node.py}{38}{lstlisting.A.-3}\protected@file@percent } +\BKM@entry{id=25,dest={617070656E6469782E42},srcline={10}}{5C3337365C3337375C303030335C303030445C3030302D5C3030304D5C3030306F5C303030645C303030655C3030306C5C3030306C5C303030655C3030305C3034305C303030645C303030655C303030735C3030305C3034305C303030535C303030655C3030306E5C303030735C3030306F5C303030725C3030306D5C3030306F5C303030645C303030755C3030306C5C30303073} +\@writefile{toc}{\contentsline {chapter}{\numberline {B}3D-Modelle des Sensormoduls}{46}{appendix.B}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} \@writefile{lol}{\addvspace {10\p@ }} -\@writefile{lof}{\contentsline {figure}{\numberline {\textbf B-1}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 1)}}{45}{figure.B.1}\protected@file@percent } -\newlabel{fig:3d_modell_1}{{\textbf B-1}{45}{3D-Modell des Sensormoduls (Perspektive 1)}{figure.B.1}{}} -\@writefile{lof}{\contentsline {figure}{\numberline {\textbf B-2}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 2)}}{46}{figure.B.2}\protected@file@percent } -\newlabel{fig:3d_modell_2}{{\textbf B-2}{46}{3D-Modell des Sensormoduls (Perspektive 2)}{figure.B.2}{}} -\@writefile{lof}{\contentsline {figure}{\numberline {\textbf B-3}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 3)}}{47}{figure.B.3}\protected@file@percent } -\newlabel{fig:3d_modell_3}{{\textbf B-3}{47}{3D-Modell des Sensormoduls (Perspektive 3)}{figure.B.3}{}} -\@writefile{lof}{\contentsline {figure}{\numberline {\textbf B-4}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 4)}}{48}{figure.B.4}\protected@file@percent } -\newlabel{fig:3d_modell_4}{{\textbf B-4}{48}{3D-Modell des Sensormoduls (Perspektive 4)}{figure.B.4}{}} -\BKM@entry{id=25,dest={617070656E6469782E43},srcline={36}}{5C3337365C3337375C303030565C303030655C303030725C303030735C303030755C303030635C303030685C303030735C303030615C303030755C303030665C303030625C303030615C30303075} -\@writefile{toc}{\contentsline {chapter}{\numberline {C}Versuchsaufbau}{49}{appendix.C}\protected@file@percent } +\@writefile{lof}{\contentsline {figure}{\numberline {\textbf B-1}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 1)}}{46}{figure.B.1}\protected@file@percent } +\newlabel{fig:3d_modell_1}{{\textbf B-1}{46}{3D-Modell des Sensormoduls (Perspektive 1)}{figure.B.1}{}} +\@writefile{lof}{\contentsline {figure}{\numberline {\textbf B-2}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 2)}}{47}{figure.B.2}\protected@file@percent } +\newlabel{fig:3d_modell_2}{{\textbf B-2}{47}{3D-Modell des Sensormoduls (Perspektive 2)}{figure.B.2}{}} +\@writefile{lof}{\contentsline {figure}{\numberline {\textbf B-3}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 3)}}{48}{figure.B.3}\protected@file@percent } +\newlabel{fig:3d_modell_3}{{\textbf B-3}{48}{3D-Modell des Sensormoduls (Perspektive 3)}{figure.B.3}{}} +\@writefile{lof}{\contentsline {figure}{\numberline {\textbf B-4}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 4)}}{49}{figure.B.4}\protected@file@percent } +\newlabel{fig:3d_modell_4}{{\textbf B-4}{49}{3D-Modell des Sensormoduls (Perspektive 4)}{figure.B.4}{}} +\BKM@entry{id=26,dest={617070656E6469782E43},srcline={36}}{5C3337365C3337375C303030565C303030655C303030725C303030735C303030755C303030635C303030685C303030735C303030615C303030755C303030665C303030625C303030615C30303075} +\@writefile{toc}{\contentsline {chapter}{\numberline {C}Versuchsaufbau}{50}{appendix.C}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} \@writefile{lol}{\addvspace {10\p@ }} -\@writefile{lof}{\contentsline {figure}{\numberline {\textbf C-1}{\ignorespaces Versuchsaufbau mit zwei VL53L5CX}}{49}{figure.C.1}\protected@file@percent } -\newlabel{fig:versuchsaufbau}{{\textbf C-1}{49}{Versuchsaufbau mit zwei VL53L5CX}{figure.C.1}{}} -\bibdata{BA} -\bibcite{noauthor_can_nodate}{1} -\bibcite{noauthor_iidea_nodate}{2} -\bibcite{noauthor_pico-series_nodate}{3} -\bibcite{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate}{4} -\bibcite{noauthor_tutorials_nodate}{5} -\bibcite{noauthor_vl53l7cx_nodate}{6} -\bibcite{noauthor_vlp_nodate}{7} -\bibcite{noauthor_file20200501_2020}{8} -\bibcite{noauthor_robotics_2021}{9} -\bibcite{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025}{10} -\bibcite{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025}{11} -\bibcite{al_naser_fusion_2022}{12} -\bibcite{amaya-mejia_vision-based_2022}{13} -\bibcite{hering_sensoren_2018}{14} -\bibcite{jain_survey_nodate}{15} -\bibcite{li_common_2019}{16} -\bibcite{liu_application_2024}{17} -\bibcite{niclass_design_2012}{18} -\bibcite{popov_collision_2017}{19} -\bibcite{raj_survey_2020}{20} -\bibcite{rashid_local_2020}{21} -\bibcite{surmann_autonomous_2003}{22} -\bibcite{vogel-heuser_von_2023}{23} +\@writefile{lof}{\contentsline {figure}{\numberline {\textbf C-1}{\ignorespaces Versuchsaufbau mit zwei VL53L5CX}}{50}{figure.C.1}\protected@file@percent } +\newlabel{fig:versuchsaufbau}{{\textbf C-1}{50}{Versuchsaufbau mit zwei VL53L5CX}{figure.C.1}{}} \global\csname @altsecnumformattrue\endcsname -\gdef \@abspage@last{52} +\gdef \@abspage@last{50} diff --git a/Bachelorarbeit/Bachelorarbeit/ba.bbl b/Bachelorarbeit/Bachelorarbeit/ba.bbl index bd77876b6932de092a9e1918ac5915adfb70a4aa..eacf269fea4b3e2b62bbe212cc964ce2ec2a4b6b 100644 --- a/Bachelorarbeit/Bachelorarbeit/ba.bbl +++ b/Bachelorarbeit/Bachelorarbeit/ba.bbl @@ -1,4 +1,4 @@ -\begin{thebibliography}{10} +\begin{thebibliography}{} % this bibliography is generated by plaindin.bst [8.2] from 2005-12-21 @@ -7,181 +7,4 @@ \providecommand{\doi}[1]{doi: #1}\else \providecommand{\doi}{doi: \begingroup \urlstyle{rm}\Url}\fi -\bibitem[1]{noauthor_can_nodate} -\emph{Can the collaborative robot market experience a second growth surge in - the post-pandemic era?} -\newblock - \url{https://interactanalysis.com/insight/can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era/} - -\bibitem[2]{noauthor_iidea_nodate} -\emph{{IIDEA} - {Inklusion} und {Integration} durch {Cobots} auf dem ersten - {Arbeitsmarkt} - {RWTH} {AACHEN} {UNIVERSITY} {IGMR} - {Deutsch}}. -\newblock - \url{https://www.igmr.rwth-aachen.de/cms/igmr/forschung/projekte/aktuelle-projekte/~baxrrf/iidea/} - -\bibitem[3]{noauthor_pico-series_nodate} -\emph{Pico-series {Microcontrollers} - {Raspberry} {Pi} {Documentation}}. -\newblock - \url{https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html} - -\bibitem[4]{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate} -\emph{tof\_imager\_micro\_ros/teensy\_pcl\_publisher at humble · - adityakamath/tof\_imager\_micro\_ros · {GitHub}}. -\newblock - \url{https://github.com/adityakamath/tof_imager_micro_ros/tree/humble/teensy_pcl_publisher} - -\bibitem[5]{noauthor_tutorials_nodate} -\emph{Tutorials — {ROS} 2 {Documentation}: {Humble} documentation}. -\newblock \url{https://docs.ros.org/en/humble/Tutorials.html} - -\bibitem[6]{noauthor_vl53l7cx_nodate} -\emph{{VL53L7CX} - {Time}-of-{Flight} ({ToF}) 8x8 multizone ranging sensor with - 90 degrees {FoV} - {STMicroelectronics}}. -\newblock - \url{https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html} - -\bibitem[7]{noauthor_vlp_nodate} -\emph{{VLP} 16 {\textbar} {Ouster}}. -\newblock \url{https://ouster.com/products/hardware/vlp-16} - -\bibitem[8]{noauthor_file20200501_2020} -\emph{File:20200501 {Time} of flight.svg - {Wikipedia}}. -\newblock - \url{https://commons.wikimedia.org/wiki/File:20200501_Time_of_flight.svg}. -\newblock \,Version:\,Mai 2020 - -\bibitem[9]{noauthor_robotics_2021} -\emph{Robotics - {Vocabulary}}. -\newblock \url{https://www.dinmedia.de/de/norm/iso-8373/348036781}. -\newblock \,Version:\,November 2021 - -\bibitem[10]{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025} -\emph{sparkfun/{SparkFun}\_VL53L5CX\_Arduino\_Library}. -\newblock \url{https://github.com/sparkfun/SparkFun_VL53L5CX_Arduino_Library}. -\newblock \,Version:\,Januar 2025. -- -\newblock original-date: 2021-10-22T21:06:36Z - -\bibitem[11]{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025} -\emph{{UniversalRobots}/{Universal}\_Robots\_ROS2\_GZ\_Simulation}. -\newblock - \url{https://github.com/UniversalRobots/Universal_Robots_ROS2_GZ_Simulation}. -\newblock \,Version:\,Februar 2025. -- -\newblock original-date: 2021-12-15T12:15:45Z - -\bibitem[12]{al_naser_fusion_2022} -\textsc{Al~Naser}, Ibrahim ; \textsc{Dahmen}, Johannes ; \textsc{Bdiwi}, - Mohamad ; \textsc{Ihlenfeldt}, Steffen: -\newblock Fusion of depth, color, and thermal images towards digital twins and - safe human interaction with a robot in an industrial environment. -\newblock {In: }\emph{2022 31st {IEEE} {International} {Conference} on {Robot} - and {Human} {Interactive} {Communication} ({RO}-{MAN})}, 2022, 532--537. -- -\newblock ISSN: 1944-9437 - -\bibitem[13]{amaya-mejia_vision-based_2022} -\textsc{Amaya-Mejía}, Lina~M. ; \textsc{Duque-Suárez}, Nicolás ; - \textsc{Jaramillo-Ramírez}, Daniel ; \textsc{Martinez}, Carol: -\newblock Vision-{Based} {Safety} {System} for {Barrierless} {Human}-{Robot} - {Collaboration}. -\newblock {In: }\emph{2022 {IEEE}/{RSJ} {International} {Conference} on - {Intelligent} {Robots} and {Systems} ({IROS})}, 2022, 7331--7336. -- -\newblock ISSN: 2153-0866 - -\bibitem[14]{hering_sensoren_2018} -\textsc{Hering}, Ekbert (Hrsg.) ; \textsc{Schönfelder}, Gert (Hrsg.): -\newblock \emph{Sensoren in {Wissenschaft} und {Technik}}. -\newblock Wiesbaden : Springer Fachmedien Wiesbaden, 2018. -\newblock \url{http://dx.doi.org/10.1007/978-3-658-12562-2}. -\newblock \url{http://dx.doi.org/10.1007/978-3-658-12562-2}. -- -\newblock ISBN 978--3--658--12561--5 978--3--658--12562--2 - -\bibitem[15]{jain_survey_nodate} -\textsc{Jain}, Siddharth: -\newblock A survey of {Laser} {Range} {Finding}. -\newblock \url{http://www.siddjain.com/ee236a.pdf} - -\bibitem[16]{li_common_2019} -\textsc{Li}, Peng ; \textsc{Liu}, Xiangpeng: -\newblock Common {Sensors} in {Industrial} {Robots}: {A} {Review}. -\newblock {In: }\emph{Journal of Physics: Conference Series} 1267 (2019), Juli, - Nr. 1, 012036. -\newblock \url{http://dx.doi.org/10.1088/1742-6596/1267/1/012036}. -- -\newblock DOI 10.1088/1742--6596/1267/1/012036. -- -\newblock ISSN 1742--6588, 1742--6596 - -\bibitem[17]{liu_application_2024} -\textsc{Liu}, Li ; \textsc{Guo}, Fu ; \textsc{Zou}, Zishuai ; \textsc{Duffy}, - Vincent~G.: -\newblock Application, {Development} and {Future} {Opportunities} of - {Collaborative} {Robots} ({Cobots}) in {Manufacturing}: {A} {Literature} - {Review}. -\newblock {In: }\emph{International Journal of Human–Computer Interaction} 40 - (2024), Februar, Nr. 4, 915--932. -\newblock \url{http://dx.doi.org/10.1080/10447318.2022.2041907}. -- -\newblock DOI 10.1080/10447318.2022.2041907. -- -\newblock ISSN 1044--7318. -- -\newblock Publisher: Taylor \& Francis \_eprint: - https://doi.org/10.1080/10447318.2022.2041907 - -\bibitem[18]{niclass_design_2012} -\textsc{Niclass}, Cristiano ; \textsc{Ito}, Kota ; \textsc{Soga}, Mineki ; - \textsc{Matsubara}, Hiroyuki ; \textsc{Aoyagi}, Isao ; \textsc{Kato}, Satoru - ; \textsc{Kagami}, Manabu: -\newblock Design and characterization of a 256x64-pixel single-photon imager in - {CMOS} for a {MEMS}-based laser scanning time-of-flight sensor. -\newblock {In: }\emph{Optics Express} 20 (2012), Mai, Nr. 11, 11863--11881. -\newblock \url{http://dx.doi.org/10.1364/OE.20.011863}. -- -\newblock DOI 10.1364/OE.20.011863. -- -\newblock ISSN 1094--4087. -- -\newblock Publisher: Optica Publishing Group - -\bibitem[19]{popov_collision_2017} -\textsc{Popov}, Dmitry ; \textsc{Klimchik}, Alexandr ; \textsc{Mavridis}, - Nikolaos: -\newblock Collision detection, localization \& classification for industrial - robots with joint torque sensors. -\newblock {In: }\emph{2017 26th {IEEE} {International} {Symposium} on {Robot} - and {Human} {Interactive} {Communication} ({RO}-{MAN})}. -\newblock Lisbon : IEEE, August 2017. -- -\newblock ISBN 978--1--5386--3518--6, 838--843 - -\bibitem[20]{raj_survey_2020} -\textsc{Raj}, Thinal ; \textsc{Hashim}, Fazida~H. ; \textsc{Huddin}, Aqilah~B. - ; \textsc{Ibrahim}, Mohd~F. ; \textsc{Hussain}, Aini: -\newblock A {Survey} on {LiDAR} {Scanning} {Mechanisms}. -\newblock {In: }\emph{Electronics} 9 (2020), Mai, Nr. 5, 741. -\newblock \url{http://dx.doi.org/10.3390/electronics9050741}. -- -\newblock DOI 10.3390/electronics9050741. -- -\newblock ISSN 2079--9292. -- -\newblock Number: 5 Publisher: Multidisciplinary Digital Publishing Institute - -\bibitem[21]{rashid_local_2020} -\textsc{Rashid}, Aquib ; \textsc{Peesapati}, Kannan ; \textsc{Bdiwi}, Mohamad ; - \textsc{Krusche}, Sebastian ; \textsc{Hardt}, Wolfram ; \textsc{Putz}, - Matthias: -\newblock Local and {Global} {Sensors} for {Collision} {Avoidance}. -\newblock {In: }\emph{2020 {IEEE} {International} {Conference} on {Multisensor} - {Fusion} and {Integration} for {Intelligent} {Systems} ({MFI})}, 2020, - 354--359 - -\bibitem[22]{surmann_autonomous_2003} -\textsc{Surmann}, Hartmut ; \textsc{Nüchter}, Andreas ; \textsc{Hertzberg}, - Joachim: -\newblock An autonomous mobile robot with a {3D} laser range finder for {3D} - exploration and digitalization of indoor environments. -\newblock {In: }\emph{Robotics and Autonomous Systems} 45 (2003), Dezember, Nr. - 3, 181--198. -\newblock \url{http://dx.doi.org/10.1016/j.robot.2003.09.004}. -- -\newblock DOI 10.1016/j.robot.2003.09.004. -- -\newblock ISSN 0921--8890 - -\bibitem[23]{vogel-heuser_von_2023} -\textsc{Vogel-Heuser}, Birgit ; \textsc{Bengler}, Klaus: -\newblock Von {Industrie} 4.0 zu {Industrie} 5.0 – {Idee}, {Konzept} und - {Wahrnehmung}. -\newblock {In: }\emph{HMD Praxis der Wirtschaftsinformatik} 60 (2023), - Dezember, Nr. 6, 1124--1142. -\newblock \url{http://dx.doi.org/10.1365/s40702-023-01002-x}. -- -\newblock DOI 10.1365/s40702--023--01002--x. -- -\newblock ISSN 2198--2775 - \end{thebibliography} diff --git a/Bachelorarbeit/Bachelorarbeit/ba.blg b/Bachelorarbeit/Bachelorarbeit/ba.blg index e8a2b7b1c42f49b3b05f1de3f8bdcc4cf9326866..8b5b6d436f87433eb32517ad2f4c563aa8f3cf5b 100644 --- a/Bachelorarbeit/Bachelorarbeit/ba.blg +++ b/Bachelorarbeit/Bachelorarbeit/ba.blg @@ -2,63 +2,75 @@ This is BibTeX, Version 0.99d (TeX Live 2022/dev/Debian) Capacity: max_strings=200000, hash_size=200000, hash_prime=170003 The top-level auxiliary file: ba.aux The style file: plaindin.bst +I found no \bibdata command---while reading file ba.aux Reallocated singl_function (elt_size=4) to 100 items from 50. Reallocated singl_function (elt_size=4) to 100 items from 50. Reallocated wiz_functions (elt_size=4) to 6000 items from 3000. Reallocated singl_function (elt_size=4) to 100 items from 50. Reallocated singl_function (elt_size=4) to 100 items from 50. -Database file #1: BA.bib -Warning--to sort, need author or key in noauthor_iidea_nodate -Warning--to sort, need author or key in noauthor_robotics_2021 -Warning--to sort, need author or key in noauthor_vl53l7cx_nodate -Warning--to sort, need author or key in noauthor_can_nodate -Warning--to sort, need author or key in noauthor_file20200501_2020 -Warning--to sort, need author or key in noauthor_vlp_nodate -Warning--to sort, need author or key in noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate -Warning--to sort, need author or key in noauthor_pico-series_nodate -Warning--to sort, need author or key in noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025 -Warning--to sort, need author or key in noauthor_tutorials_nodate -Warning--to sort, need author or key in noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025 -Warning--there's no year in jain_survey_nodate -You've used 23 entries, +Warning--I didn't find a database entry for "vogel-heuser_von_2023" +Warning--I didn't find a database entry for "noauthor_iidea_nodate" +Warning--I didn't find a database entry for "haddadin_physical_2016" +Warning--I didn't find a database entry for "popov_collision_2017" +Warning--I didn't find a database entry for "noauthor_robotics_2021" +Warning--I didn't find a database entry for "li_common_2019" +Warning--I didn't find a database entry for "noauthor_vl53l7cx_nodate" +Warning--I didn't find a database entry for "hering_sensoren_2018" +Warning--I didn't find a database entry for "noauthor_can_nodate" +Warning--I didn't find a database entry for "liu_application_2024" +Warning--I didn't find a database entry for "al_naser_fusion_2022" +Warning--I didn't find a database entry for "amaya-mejia_vision-based_2022" +Warning--I didn't find a database entry for "rashid_local_2020" +Warning--I didn't find a database entry for "noauthor_file20200501_2020" +Warning--I didn't find a database entry for "jain_survey_nodate" +Warning--I didn't find a database entry for "raj_survey_2020" +Warning--I didn't find a database entry for "surmann_autonomous_2003" +Warning--I didn't find a database entry for "niclass_design_2012" +Warning--I didn't find a database entry for "noauthor_vlp_nodate" +Warning--I didn't find a database entry for "noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate" +Warning--I didn't find a database entry for "noauthor_pico-series_nodate" +Warning--I didn't find a database entry for "noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025" +Warning--I didn't find a database entry for "noauthor_tutorials_nodate" +Warning--I didn't find a database entry for "noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025" +You've used 0 entries, 4335 wiz_defined-function locations, - 981 strings with 11962 characters, -and the built_in function-call counts, 13469 in all, are: -= -- 1430 -> -- 261 -< -- 397 -+ -- 452 -- -- 91 -* -- 958 -:= -- 2137 -add.period$ -- 36 -call.type$ -- 23 -change.case$ -- 89 + 851 strings with 6418 characters, +and the built_in function-call counts, 33 in all, are: += -- 0 +> -- 0 +< -- 0 ++ -- 0 +- -- 0 +* -- 2 +:= -- 10 +add.period$ -- 0 +call.type$ -- 0 +change.case$ -- 0 chr.to.int$ -- 0 -cite$ -- 35 -duplicate$ -- 313 -empty$ -- 1050 -format.name$ -- 172 -if$ -- 2808 +cite$ -- 0 +duplicate$ -- 0 +empty$ -- 1 +format.name$ -- 0 +if$ -- 1 int.to.chr$ -- 0 -int.to.str$ -- 23 -missing$ -- 113 -newline$ -- 148 -num.names$ -- 24 -pop$ -- 143 +int.to.str$ -- 0 +missing$ -- 0 +newline$ -- 10 +num.names$ -- 0 +pop$ -- 0 preamble$ -- 1 -purify$ -- 89 +purify$ -- 0 quote$ -- 0 -skip$ -- 580 +skip$ -- 1 stack$ -- 0 -substring$ -- 1411 -swap$ -- 90 -text.length$ -- 57 +substring$ -- 0 +swap$ -- 0 +text.length$ -- 0 text.prefix$ -- 0 top$ -- 0 -type$ -- 100 -warning$ -- 12 -while$ -- 81 -width$ -- 25 -write$ -- 320 -(There were 12 warnings) +type$ -- 0 +warning$ -- 0 +while$ -- 0 +width$ -- 0 +write$ -- 7 +(There was 1 error message) diff --git a/Bachelorarbeit/Bachelorarbeit/ba.ist b/Bachelorarbeit/Bachelorarbeit/ba.ist index 5505e51d8261d4c9d2acd50bc8fe6ef0c6cd04a6..7d736dd99d27830ae05243bf182a6df54c62a99b 100644 --- a/Bachelorarbeit/Bachelorarbeit/ba.ist +++ b/Bachelorarbeit/Bachelorarbeit/ba.ist @@ -1,5 +1,5 @@ % makeindex style file created by the glossaries package -% for document 'ba' on 2025-4-16 +% for document 'ba' on 2025-4-18 actual '?' encap '|' level '!' diff --git a/Bachelorarbeit/Bachelorarbeit/ba.lof b/Bachelorarbeit/Bachelorarbeit/ba.lof index f66c833bbceeb1d955478eb7e6ab11fbec4e0252..d6d601c6b4cd545649be2c2a8cfcb966ed4ef79c 100644 --- a/Bachelorarbeit/Bachelorarbeit/ba.lof +++ b/Bachelorarbeit/Bachelorarbeit/ba.lof @@ -6,13 +6,14 @@ \contentsline {figure}{\numberline {\textbf 2-3}{\ignorespaces Beispiel für einen Optomechanischen 2D-\acrshort {LIDAR}, Source:\cite {raj_survey_2020}}}{9}{figure.2.3}% \contentsline {figure}{\numberline {\textbf 2-4}{\ignorespaces Package eines VL53L7CX, Source:\cite {noauthor_vl53l7cx_nodate}}}{10}{figure.2.4}% \addvspace {10\p@ } -\contentsline {figure}{\numberline {\textbf 3-1}{\ignorespaces Visualisierung von einem Topic, Source:\cite {noauthor_tutorials_nodate}}}{15}{figure.3.1}% \addvspace {10\p@ } +\contentsline {figure}{\numberline {\textbf 4-1}{\ignorespaces Visualisierung von einem Topic, Source:\cite {noauthor_tutorials_nodate}}}{16}{figure.4.1}% \addvspace {10\p@ } \addvspace {10\p@ } -\contentsline {figure}{\numberline {\textbf B-1}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 1)}}{45}{figure.B.1}% -\contentsline {figure}{\numberline {\textbf B-2}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 2)}}{46}{figure.B.2}% -\contentsline {figure}{\numberline {\textbf B-3}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 3)}}{47}{figure.B.3}% -\contentsline {figure}{\numberline {\textbf B-4}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 4)}}{48}{figure.B.4}% \addvspace {10\p@ } -\contentsline {figure}{\numberline {\textbf C-1}{\ignorespaces Versuchsaufbau mit zwei VL53L5CX}}{49}{figure.C.1}% +\contentsline {figure}{\numberline {\textbf B-1}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 1)}}{46}{figure.B.1}% +\contentsline {figure}{\numberline {\textbf B-2}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 2)}}{47}{figure.B.2}% +\contentsline {figure}{\numberline {\textbf B-3}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 3)}}{48}{figure.B.3}% +\contentsline {figure}{\numberline {\textbf B-4}{\ignorespaces 3D-Modell des Sensormoduls (Perspektive 4)}}{49}{figure.B.4}% +\addvspace {10\p@ } +\contentsline {figure}{\numberline {\textbf C-1}{\ignorespaces Versuchsaufbau mit zwei VL53L5CX}}{50}{figure.C.1}% diff --git a/Bachelorarbeit/Bachelorarbeit/ba.log b/Bachelorarbeit/Bachelorarbeit/ba.log index 9d8164790ecd3b4eb6d074b2933c822069295097..8fbdb542ca780eafe1c88816e28851662d6a0a5a 100644 --- a/Bachelorarbeit/Bachelorarbeit/ba.log +++ b/Bachelorarbeit/Bachelorarbeit/ba.log @@ -1,4 +1,4 @@ -This is pdfTeX, Version 3.141592653-2.6-1.40.22 (TeX Live 2022/dev/Debian) (preloaded format=pdflatex 2025.4.15) 16 APR 2025 17:42 +This is pdfTeX, Version 3.141592653-2.6-1.40.22 (TeX Live 2022/dev/Debian) (preloaded format=pdflatex 2025.4.17) 18 APR 2025 14:01 entering extended mode restricted \write18 enabled. %&-line parsing enabled. @@ -1473,63 +1473,192 @@ Class scrbook Warning: \float@addtolists detected! (scrbook) a package that still implements the (scrbook) deprecated \float@addtolist interface. + +LaTeX Warning: Citation `vogel-heuser_von_2023' on page 3 undefined on input li +ne 4. + + +LaTeX Warning: Citation `noauthor_iidea_nodate' on page 3 undefined on input li +ne 5. + + +LaTeX Warning: Citation `haddadin_physical_2016' on page 3 undefined on input l +ine 6. + + +LaTeX Warning: Citation `popov_collision_2017' on page 3 undefined on input lin +e 8. + [3 -] [4] [5] +] +LaTeX Font Info: Trying to load font information for TS1+phv on input line 2 +9. + (/usr/share/texlive/texmf-dist/tex/latex/psnfss/ts1phv.fd +File: ts1phv.fd 2020/03/25 scalable font definitions for TS1/phv. +) +Package microtype Info: Loading generic protrusion settings for font family +(microtype) `phv' (encoding: TS1). +(microtype) For optimal results, create family-specific settings. +(microtype) See the microtype manual for details. + [4] [5] chapter 2. -<images/Cobots-Forecast-Global-Market-1024x576.jpg, id=124, 770.88pt x 433.62pt -> +<images/Cobots-Forecast-Global-Market-1024x576.jpg, id=91, 770.88pt x 433.62pt> + File: images/Cobots-Forecast-Global-Market-1024x576.jpg Graphic file (type jpg) <use images/Cobots-Forecast-Global-Market-1024x576.jpg> Package pdftex.def Info: images/Cobots-Forecast-Global-Market-1024x576.jpg use -d on input line 52. +d on input line 44. (pdftex.def) Requested size: 385.43906pt x 216.80946pt. + +LaTeX Warning: Citation `noauthor_can_nodate' on page 6 undefined on input line + 45. + + +LaTeX Warning: Citation `noauthor_can_nodate' on page 6 undefined on input line + 45. + + +LaTeX Warning: Citation `liu_application_2024' on page 6 undefined on input lin +e 49. + + +LaTeX Warning: Citation `popov_collision_2017' on page 6 undefined on input lin +e 55. + + +LaTeX Warning: Citation `noauthor_robotics_2021' on page 6 undefined on input l +ine 57. + + +LaTeX Warning: Citation `al_naser_fusion_2022' on page 6 undefined on input lin +e 61. + + +LaTeX Warning: Citation `amaya-mejia_vision-based_2022' on page 6 undefined on +input line 63. + + +LaTeX Warning: Citation `rashid_local_2020' on page 6 undefined on input line 6 +5. + [6 <./images/Cobots-Forecast-Global-Market-1024x576.jpg>] [7] -<images/20200501_Time_of_flight.svg.png, id=164, 1284.8pt x 857.2025pt> +<images/20200501_Time_of_flight.svg.png, id=117, 1284.8pt x 857.2025pt> File: images/20200501_Time_of_flight.svg.png Graphic file (type png) <use images/20200501_Time_of_flight.svg.png> Package pdftex.def Info: images/20200501_Time_of_flight.svg.png used on input -line 83. +line 75. (pdftex.def) Requested size: 192.71169pt x 128.57483pt. -LaTeX Font Info: Trying to load font information for TS1+phv on input line 9 -8. -(/usr/share/texlive/texmf-dist/tex/latex/psnfss/ts1phv.fd -File: ts1phv.fd 2020/03/25 scalable font definitions for TS1/phv. -) -Package microtype Info: Loading generic protrusion settings for font family -(microtype) `phv' (encoding: TS1). -(microtype) For optimal results, create family-specific settings. -(microtype) See the microtype manual for details. -<images/Optomechanical LiDAR.png, id=177, 3661.68pt x 1959.32pt> + +LaTeX Warning: Citation `noauthor_file20200501_2020' on page 8 undefined on inp +ut line 76. + + +LaTeX Warning: Citation `noauthor_file20200501_2020' on page 8 undefined on inp +ut line 76. + + +LaTeX Warning: Citation `li_common_2019' on page 8 undefined on input line 83. + + +LaTeX Warning: Citation `jain_survey_nodate' on page 8 undefined on input line +83. + + +LaTeX Warning: Citation `raj_survey_2020' on page 8 undefined on input line 86. + + + +LaTeX Warning: Citation `raj_survey_2020' on page 8 undefined on input line 90. + + +<images/Optomechanical LiDAR.png, id=124, 3661.68pt x 1959.32pt> File: images/Optomechanical LiDAR.png Graphic file (type png) <use images/Optomechanical LiDAR.png> -Package pdftex.def Info: images/Optomechanical LiDAR.png used on input line 10 -1. +Package pdftex.def Info: images/Optomechanical LiDAR.png used on input line 93 +. (pdftex.def) Requested size: 366.18945pt x 195.94348pt. -<images/VL53L7CX_Package.jpg, id=198, 735.49782pt x 535.2497pt> + +LaTeX Warning: Citation `raj_survey_2020' on page 8 undefined on input line 94. + + + +LaTeX Warning: Citation `raj_survey_2020' on page 8 undefined on input line 94. + + + +LaTeX Warning: Citation `surmann_autonomous_2003' on page 8 undefined on input +line 98. + + +LaTeX Warning: Citation `niclass_design_2012' on page 8 undefined on input line + 100. + + +LaTeX Warning: Citation `raj_survey_2020' on page 8 undefined on input line 100 +. + + +LaTeX Warning: Citation `noauthor_vlp_nodate' on page 8 undefined on input line + 102. + + +LaTeX Warning: Citation `noauthor_vl53l7cx_nodate' on page 8 undefined on input + line 103. + +<images/VL53L7CX_Package.jpg, id=138, 735.49782pt x 535.2497pt> File: images/VL53L7CX_Package.jpg Graphic file (type jpg) <use images/VL53L7CX_Package.jpg> -Package pdftex.def Info: images/VL53L7CX_Package.jpg used on input line 115. +Package pdftex.def Info: images/VL53L7CX_Package.jpg used on input line 107. (pdftex.def) Requested size: 147.09695pt x 107.04803pt. - [8 <./images/20200501_Time_of_flight.svg.png>] [9 <./images/Optomechanical LiD -AR.png>] [10 <./images/VL53L7CX_Package.jpg>] + +LaTeX Warning: Citation `noauthor_vl53l7cx_nodate' on page 8 undefined on input + line 108. + + +LaTeX Warning: Citation `noauthor_vl53l7cx_nodate' on page 8 undefined on input + line 108. + +[8 <./images/20200501_Time_of_flight.svg.png>] [9 <./images/Optomechanical LiDA +R.png>] [10 <./images/VL53L7CX_Package.jpg>] chapter 3. [11 +] +chapter 4. + +LaTeX Warning: Citation `noauthor_tof_imager_micro_rosteensy_pcl_publisher_noda +te' on page 12 undefined on input line 125. + + +LaTeX Warning: Citation `noauthor_pico-series_nodate' on page 12 undefined on i +nput line 125. + + +LaTeX Warning: Citation `noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_202 +5' on page 12 undefined on input line 127. + + +LaTeX Warning: Citation `noauthor_vl53l7cx_nodate' on page 12 undefined on inpu +t line 128. + +[12 + + ] Package hyperref Info: bookmark level for unknown lstlisting defaults to 0 on i -nput line 145. +nput line 148. LaTeX Font Info: Trying to load font information for T1+cmtt on input line 1 -45. +48. (/usr/share/texlive/texmf-dist/tex/latex/base/t1cmtt.fd File: t1cmtt.fd 2019/12/16 v2.5j Standard LaTeX font definitions ) @@ -1538,21 +1667,55 @@ Package microtype Info: Loading generic protrusion settings for font family (microtype) For optimal results, create family-specific settings. (microtype) See the microtype manual for details. LaTeX Font Info: Font shape `T1/cmtt/bx/n' in size <10.95> not available -(Font) Font shape `T1/cmtt/m/n' tried instead on input line 146. - [12] [13] -<images/Topic_explained.png, id=351, 641.39626pt x 359.84438pt> +(Font) Font shape `T1/cmtt/m/n' tried instead on input line 149. + [13] [14] +<images/Topic_explained.png, id=283, 641.39626pt x 359.84438pt> File: images/Topic_explained.png Graphic file (type png) <use images/Topic_explained.png> -Package pdftex.def Info: images/Topic_explained.png used on input line 230. +Package pdftex.def Info: images/Topic_explained.png used on input line 233. (pdftex.def) Requested size: 416.90262pt x 233.89607pt. -[14] [15 <./images/Topic_explained.png>] [16] -chapter 4. -) (./ba.lof) + +LaTeX Warning: Citation `noauthor_tutorials_nodate' on page 15 undefined on inp +ut line 234. + + +LaTeX Warning: Citation `noauthor_tutorials_nodate' on page 15 undefined on inp +ut line 234. + +[15] [16 <./images/Topic_explained.png>] + +LaTeX Warning: Citation `noauthor_universalrobotsuniversal_robots_ros2_gz_simul +ation_2025' on page 17 undefined on input line 254. + +[17] +chapter 5. +) (./ba.lof + +LaTeX Warning: Citation `noauthor_can_nodate' on page 18 undefined on input lin +e 4. + + +LaTeX Warning: Citation `noauthor_file20200501_2020' on page 18 undefined on in +put line 5. + + +LaTeX Warning: Citation `raj_survey_2020' on page 18 undefined on input line 6. + + + +LaTeX Warning: Citation `noauthor_vl53l7cx_nodate' on page 18 undefined on inpu +t line 7. + + +LaTeX Warning: Citation `noauthor_tutorials_nodate' on page 18 undefined on inp +ut line 10. + +) \tf@lof=\write9 \openout9 = `ba.lof'. - (./anhang.tex [17 + (./anhang.tex [18 ] @@ -1567,7 +1730,7 @@ Overfull \hbox (81.1738pt too wide) in paragraph at lines 11--12 [][][][][][][] [] -[18 +[19 ] @@ -1575,21 +1738,21 @@ Overfull \hbox (81.1738pt too wide) in paragraph at lines 38--39 [][][][][][][] [] -[19] +[20] Overfull \hbox (12.9025pt too wide) in paragraph at lines 52--53 [][][][] [] -[20] [21] [22] +[21] [22] [23] Overfull \hbox (33.2728pt too wide) in paragraph at lines 164--166 [][][][] [] -[23] [24] [25] [26] [27] [28] [29] [30] [31]) +[24] [25] [26] [27] [28] [29] [30] [31] [32]) (/usr/share/texlive/texmf-dist/tex/latex/listings/lstlang1.sty File: lstlang1.sty 2020/03/24 1.8d listings language file ) -(./ser_test_node.py [32] +(./ser_test_node.py [33] Overfull \hbox (169.0748pt too wide) in paragraph at lines 31--32 [][] [] @@ -1604,7 +1767,7 @@ Overfull \hbox (284.5065pt too wide) in paragraph at lines 33--34 [][] [] -[33] +[34] Overfull \hbox (135.1243pt too wide) in paragraph at lines 34--35 [][] [] @@ -1629,7 +1792,7 @@ Overfull \hbox (101.1738pt too wide) in paragraph at lines 39--41 [][] [] -[34] +[35] Overfull \hbox (53.6431pt too wide) in paragraph at lines 77--78 [][] [] @@ -1644,14 +1807,14 @@ Overfull \hbox (53.6431pt too wide) in paragraph at lines 79--80 [][] [] -[35] [36]) (./pcl_rob_node.py [37] [38] [39] [40] [41] [42] [43]) [44] +[36] [37]) (./pcl_rob_node.py [38] [39] [40] [41] [42] [43] [44]) [45] chapter B. -<images/Halterung_Seite.jpg, id=1447, 484.81125pt x 450.9347pt> +<images/Halterung_Seite.jpg, id=1369, 484.81125pt x 450.9347pt> File: images/Halterung_Seite.jpg Graphic file (type jpg) <use images/Halterung_Seite.jpg> Package pdftex.def Info: images/Halterung_Seite.jpg used on input line 13. (pdftex.def) Requested size: 455.24411pt x 423.45207pt. -<images/Halterung_Seite_Oben.jpg, id=1448, 496.10344pt x 508.14844pt> +<images/Halterung_Seite_Oben.jpg, id=1370, 496.10344pt x 508.14844pt> File: images/Halterung_Seite_Oben.jpg Graphic file (type jpg) <use images/Halterung_Seite_Oben.jpg> Package pdftex.def Info: images/Halterung_Seite_Oben.jpg used on input line 19 @@ -1660,7 +1823,7 @@ Package pdftex.def Info: images/Halterung_Seite_Oben.jpg used on input line 19 LaTeX Warning: `h' float specifier changed to `ht'. -<images/Halterung_Seite_Oben_schräg.jpg, id=1449, 561.59813pt x 478.78876pt> +<images/Halterung_Seite_Oben_schräg.jpg, id=1371, 561.59813pt x 478.78876pt> File: images/Halterung_Seite_Oben_schräg.jpg Graphic file (type jpg) <use images/Halterung_Seite_Oben_schräg.jpg> Package pdftex.def Info: images/Halterung_Seite_Oben_schräg.jpg used on input @@ -1669,7 +1832,7 @@ Package pdftex.def Info: images/Halterung_Seite_Oben_schräg.jpg used on input LaTeX Warning: `h' float specifier changed to `ht'. -<images/Halterung_Top.jpg, id=1450, 545.03625pt x 496.85625pt> +<images/Halterung_Top.jpg, id=1372, 545.03625pt x 496.85625pt> File: images/Halterung_Top.jpg Graphic file (type jpg) <use images/Halterung_Top.jpg> Package pdftex.def Info: images/Halterung_Top.jpg used on input line 31. @@ -1677,113 +1840,49 @@ Package pdftex.def Info: images/Halterung_Top.jpg used on input line 31. LaTeX Warning: `h' float specifier changed to `ht'. -[45 +[46 - <./images/Halterung_Seite.jpg>] [46 <./images/Halterung_Seite_Oben.jpg>] -[47 <./images/Halterung_Seite_Oben_schräg.jpg>] [48 <./images/Halterung_Top.jp + <./images/Halterung_Seite.jpg>] [47 <./images/Halterung_Seite_Oben.jpg>] +[48 <./images/Halterung_Seite_Oben_schräg.jpg>] [49 <./images/Halterung_Top.jp g>] chapter C. -<images/Versuchsaufbau_mit_VL53L5CX.jpg, id=1467, 813.0375pt x 1016.29688pt> +<images/Versuchsaufbau_mit_VL53L5CX.jpg, id=1390, 813.0375pt x 1016.29688pt> File: images/Versuchsaufbau_mit_VL53L5CX.jpg Graphic file (type jpg) <use images/Versuchsaufbau_mit_VL53L5CX.jpg> Package pdftex.def Info: images/Versuchsaufbau_mit_VL53L5CX.jpg used on input line 39. (pdftex.def) Requested size: 406.51776pt x 508.1472pt. -) [49 - - - <./images/Versuchsaufbau_mit_VL53L5CX.jpg>] (./ba.bbl -LaTeX Font Info: Font shape `T1/phv/m/it' in size <12> not available -(Font) Font shape `T1/phv/m/sl' tried instead on input line 12. +) [50 -Underfull \hbox (badness 4647) in paragraph at lines 12--15 -[]\T1/phv/m/sl/12 (+20) Can the col-la-bo-ra-ti-ve ro-bot mar-ket ex-pe-ri-ence - a se-cond grow-th sur- - [] - - -Underfull \hbox (badness 2213) in paragraph at lines 12--15 -\T1/phv/m/sl/12 (+20) ge in the post-pandemic era? []$\T1/cmtt/m/n/12 https : -/ / interactanalysis . com / insight / - [] - - -Overfull \hbox (150.14659pt too wide) in paragraph at lines 12--15 -\T1/cmtt/m/n/12 can-[]the-[]collaborative-[]robot-[]market-[]experience-[]a-[]s -econd-[]growth-[]surge-[]in-[]the-[]post-[]pandemic-[]era/$[] - [] - -LaTeX Font Info: Font shape `TS1/phv/m/it' in size <12> not available -(Font) Font shape `TS1/phv/m/sl' tried instead on input line 29. - -Underfull \hbox (badness 10000) in paragraph at lines 29--32 -[]\T1/phv/m/sl/12 (+20) tof_imager_micro_ros/teensy_pcl_publisher at hum-ble \T -S1/phv/m/sl/12 (+20) � \T1/phv/m/sl/12 (+20) adi-tya-ka-ma- - [] - -Underfull \hbox (badness 6725) in paragraph at lines 39--42 -[]\T1/phv/m/sl/12 (+20) VL53L7CX - Time-of-Flight (ToF) 8x8 mul-ti-zo-ne ran-gi -ng sen-sor with - [] - - -Underfull \hbox (badness 10000) in paragraph at lines 39--42 -\T1/phv/m/sl/12 (+20) 90 de-grees FoV - STMi-cro-elec-tro-nics\T1/phv/m/n/12 (+ -20) . []$\T1/cmtt/m/n/12 https : / / www . st . com / en / - [] - - -Underfull \hbox (badness 10000) in paragraph at lines 59--63 -[]\T1/phv/m/sl/12 (+20) sparkfun/SparkFun_VL53L5CX_Arduino_Library\T1/phv/m/n/1 -2 (+20) . []$\T1/cmtt/m/n/12 https : / / github . com / - [] - - -Underfull \hbox (badness 4229) in paragraph at lines 59--63 -\T1/cmtt/m/n/12 sparkfun / SparkFun _ VL53L5CX _ Arduino _ Library$[]\T1/phv/m/ -n/12 (+20) . Version:Januar 2025. -- - [] - -[50 - - - - -] -Underfull \hbox (badness 2103) in paragraph at lines 65--70 -\T1/cmtt/m/n/12 UniversalRobots / Universal _ Robots _ ROS2 _ GZ _ Simulation$[ -]\T1/phv/m/n/12 (+20) . Version:Februar - [] - - -Underfull \hbox (badness 1769) in paragraph at lines 90--96 -\T1/phv/m/sl/12 (+20) sen-schaft und Tech-nik\T1/phv/m/n/12 (+20) . Wies-ba-de -n : Sprin-ger Fach-me-di-en Wies-ba-den, - [] - -[51]) + <./images/Versuchsaufbau_mit_VL53L5CX.jpg>] Package glossaries Warning: No \printglossary or \printglossaries found. (Remove \makeglossaries if you don't want any glossaries.) This document will not have a glossary. -[52] (./ba.aux) +(./ba.aux) + +LaTeX Warning: There were undefined references. + Package rerunfilecheck Info: File `ba.out' has not changed. (rerunfilecheck) Checksum: D41D8CD98F00B204E9800998ECF8427E;0. ) Here is how much of TeX's memory you used: - 44587 strings out of 478287 - 889387 string characters out of 5849289 - 1878648 words of memory out of 5000000 - 60638 multiletter control sequences out of 15000+600000 - 533735 words of font info for 272 fonts, out of 8000000 for 9000 + 44448 strings out of 478287 + 887011 string characters out of 5849289 + 1877542 words of memory out of 5000000 + 60597 multiletter control sequences out of 15000+600000 + 521034 words of font info for 224 fonts, out of 8000000 for 9000 1141 hyphenation exceptions out of 8191 108i,20n,106p,10668b,2413s stack positions out of 5000i,500n,10000p,200000b,80000s pdfTeX warning (dest): name{glo:PCD} has been referenced but does not exist, replaced by a fixed one +pdfTeX warning (dest): name{glo:UR} has been referenced but does not exist, rep +laced by a fixed one + pdfTeX warning (dest): name{glo:USB} has been referenced but does not exist, re placed by a fixed one @@ -1826,6 +1925,9 @@ placed by a fixed one pdfTeX warning (dest): name{glo:ToF} has been referenced but does not exist, re placed by a fixed one +pdfTeX warning (dest): name{glo:ToFs} has been referenced but does not exist, r +eplaced by a fixed one + pdfTeX warning (dest): name{glo:RGB} has been referenced but does not exist, re placed by a fixed one @@ -1838,15 +1940,6 @@ laced by a fixed one pdfTeX warning (dest): name{glo:RGB-D} has been referenced but does not exist, replaced by a fixed one -pdfTeX warning (dest): name{glo:UR} has been referenced but does not exist, rep -laced by a fixed one - -pdfTeX warning (dest): name{glo:PC} has been referenced but does not exist, rep -laced by a fixed one - -pdfTeX warning (dest): name{glo:ToFs} has been referenced but does not exist, r -eplaced by a fixed one - pdfTeX warning (dest): name{glo:Cobot} has been referenced but does not exist, replaced by a fixed one @@ -1858,14 +1951,12 @@ texmf-dist/fonts/enc/dvips/base/8r.enc}</usr/share/texlive/texmf-dist/fonts/typ e1/public/amsfonts/cm/cmr12.pfb></usr/share/texlive/texmf-dist/fonts/type1/publ ic/amsfonts/cm/cmr8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfo nts/cm/cmsy10.pfb></usr/share/texmf/fonts/type1/public/cm-super/sftt1095.pfb></ -usr/share/texmf/fonts/type1/public/cm-super/sftt1200.pfb></usr/share/texlive/te -xmf-dist/fonts/type1/urw/helvetic/uhvb8a.pfb></usr/share/texlive/texmf-dist/fon -ts/type1/urw/helvetic/uhvr8a.pfb></usr/share/texlive/texmf-dist/fonts/type1/urw -/helvetic/uhvro8a.pfb> -Output written on ba.pdf (52 pages, 2254397 bytes). +usr/share/texlive/texmf-dist/fonts/type1/urw/helvetic/uhvb8a.pfb></usr/share/te +xlive/texmf-dist/fonts/type1/urw/helvetic/uhvr8a.pfb> +Output written on ba.pdf (50 pages, 2194434 bytes). PDF statistics: - 1862 PDF objects out of 2073 (max. 8388607) - 1754 compressed objects within 18 object streams - 1093 named destinations out of 1200 (max. 500000) - 75020 words of extra memory for PDF output out of 89155 (max. 10000000) + 1724 PDF objects out of 1728 (max. 8388607) + 1628 compressed objects within 17 object streams + 1067 named destinations out of 1200 (max. 500000) + 64788 words of extra memory for PDF output out of 74296 (max. 10000000) diff --git a/Bachelorarbeit/Bachelorarbeit/ba.lot b/Bachelorarbeit/Bachelorarbeit/ba.lot deleted file mode 100644 index 51de257d215ff0f682e3b6c88ace4aa3f3109c6f..0000000000000000000000000000000000000000 --- a/Bachelorarbeit/Bachelorarbeit/ba.lot +++ /dev/null @@ -1,7 +0,0 @@ -\babel@toc {ngerman}{}\relax -\addvspace {10\p@ } -\addvspace {10\p@ } -\addvspace {10\p@ } -\addvspace {10\p@ } -\addvspace {10\p@ } -\providecommand \tocbasic@end@toc@file {}\tocbasic@end@toc@file diff --git a/Bachelorarbeit/Bachelorarbeit/ba.pdf b/Bachelorarbeit/Bachelorarbeit/ba.pdf index a25b621e6b1f502b296c0b860aa89b08e702d977..a2312a350e0068e4e0b86759e3d512f5ce63ab43 100644 Binary files a/Bachelorarbeit/Bachelorarbeit/ba.pdf and b/Bachelorarbeit/Bachelorarbeit/ba.pdf differ diff --git a/Bachelorarbeit/Bachelorarbeit/ba.synctex.gz b/Bachelorarbeit/Bachelorarbeit/ba.synctex.gz index 2771ae3d45bc65e3069ff869780a516bc7393cc0..3be2c0c7c46755f1abd4c47e880cc186846d4397 100644 Binary files a/Bachelorarbeit/Bachelorarbeit/ba.synctex.gz and b/Bachelorarbeit/Bachelorarbeit/ba.synctex.gz differ diff --git a/Bachelorarbeit/Bachelorarbeit/ba.tex b/Bachelorarbeit/Bachelorarbeit/ba.tex index 58ca4edb6b4287b94f82bf392a25aba818db36b4..4d51c2787267549345a30404317558f58044fcdb 100644 --- a/Bachelorarbeit/Bachelorarbeit/ba.tex +++ b/Bachelorarbeit/Bachelorarbeit/ba.tex @@ -220,6 +220,6 @@ colorlinks=false % Deaktiviert die farbige Textmarkierung, lässt Rahmen aktiv \input{anhang} \cleardoublepage -\bibliography{BA} +%\bibliography{BA} \end{document} \ No newline at end of file diff --git a/Bachelorarbeit/Bachelorarbeit/ba.toc b/Bachelorarbeit/Bachelorarbeit/ba.toc index 500d42d36d91613cae6eca3faac2678b7d1c3ffe..f2c975f190b16bcd7ce7d3c603fa3b42536219e0 100644 --- a/Bachelorarbeit/Bachelorarbeit/ba.toc +++ b/Bachelorarbeit/Bachelorarbeit/ba.toc @@ -1,26 +1,27 @@ \babel@toc {ngerman}{}\relax \contentsline {chapter}{\numberline {1}Einleitung}{3}{chapter.1}% -\contentsline {section}{\numberline {1.1}Motivation}{4}{section.1.1}% -\contentsline {section}{\numberline {1.2}Zielsetzung}{4}{section.1.2}% -\contentsline {section}{\numberline {1.3}Forschungsfrage}{5}{section.1.3}% -\contentsline {section}{\numberline {1.4}Aufbau der Arbeit}{5}{section.1.4}% +\contentsline {section}{\numberline {1.1}Zielsetzung}{4}{section.1.1}% +\contentsline {section}{\numberline {1.2}Forschungsfragen}{4}{section.1.2}% +\contentsline {section}{\numberline {1.3}Aufbau der Arbeit}{5}{section.1.3}% \contentsline {chapter}{\numberline {2}Stand der Technik}{6}{chapter.2}% -\contentsline {section}{\numberline {2.1}Kollisionsvermeidung und Kollisionserkennung}{7}{section.2.1}% +\contentsline {section}{\numberline {2.1}Kollisionsvermeidung und Kollisionserkennung}{6}{section.2.1}% \contentsline {section}{\numberline {2.2}Laserscanner (LiDAR)}{8}{section.2.2}% -\contentsline {chapter}{\numberline {3}Umsetzung}{11}{chapter.3}% -\contentsline {section}{\numberline {3.1}Vorgehensweise}{11}{section.3.1}% -\contentsline {section}{\numberline {3.2}Software}{11}{section.3.2}% -\contentsline {subsection}{\numberline {3.2.1}Arduino}{11}{subsection.3.2.1}% -\contentsline {subsection}{\numberline {3.2.2}Robot Operating System 2}{14}{subsection.3.2.2}% -\contentsline {subsection}{\numberline {3.2.3}RVIZ2 und Gazebo Classic}{16}{subsection.3.2.3}% -\contentsline {section}{\numberline {3.3}Hardware}{16}{section.3.3}% -\contentsline {subsection}{\numberline {3.3.1}Elektronisch}{16}{subsection.3.3.1}% -\contentsline {subsection}{\numberline {3.3.2}Mechanisch}{16}{subsection.3.3.2}% -\contentsline {chapter}{\numberline {4}Fazit und Ausblick}{17}{chapter.4}% -\contentsline {chapter}{\numberline {A}Quellcode}{18}{appendix.A}% -\contentsline {section}{\numberline {A.1}Arduino-Quellcode}{18}{section.A.1}% -\contentsline {section}{\numberline {A.2}ROS 2-Nodes Quellcode}{32}{section.A.2}% -\contentsline {subsection}{\numberline {A.2.1}Datenverarbeitungs-Node (Python)}{32}{subsection.A.2.1}% -\contentsline {subsection}{\numberline {A.2.2}Kollisionserkennungs-Node (Python)}{37}{subsection.A.2.2}% -\contentsline {chapter}{\numberline {B}3D-Modelle des Sensormoduls}{45}{appendix.B}% -\contentsline {chapter}{\numberline {C}Versuchsaufbau}{49}{appendix.C}% +\contentsline {section}{\numberline {2.3}Rahmen}{10}{section.2.3}% +\contentsline {chapter}{\numberline {3}Anforderungen}{11}{chapter.3}% +\contentsline {chapter}{\numberline {4}Umsetzung}{12}{chapter.4}% +\contentsline {section}{\numberline {4.1}Vorgehensweise}{12}{section.4.1}% +\contentsline {section}{\numberline {4.2}Software}{12}{section.4.2}% +\contentsline {subsection}{\numberline {4.2.1}Arduino}{12}{subsection.4.2.1}% +\contentsline {subsection}{\numberline {4.2.2}Robot Operating System 2}{15}{subsection.4.2.2}% +\contentsline {subsection}{\numberline {4.2.3}RVIZ2 und Gazebo Classic}{17}{subsection.4.2.3}% +\contentsline {section}{\numberline {4.3}Hardware}{17}{section.4.3}% +\contentsline {subsection}{\numberline {4.3.1}Elektronisch}{17}{subsection.4.3.1}% +\contentsline {subsection}{\numberline {4.3.2}Mechanisch}{17}{subsection.4.3.2}% +\contentsline {chapter}{\numberline {5}Fazit und Ausblick}{18}{chapter.5}% +\contentsline {chapter}{\numberline {A}Quellcode}{19}{appendix.A}% +\contentsline {section}{\numberline {A.1}Arduino-Quellcode}{19}{section.A.1}% +\contentsline {section}{\numberline {A.2}ROS 2-Nodes Quellcode}{33}{section.A.2}% +\contentsline {subsection}{\numberline {A.2.1}Datenverarbeitungs-Node (Python)}{33}{subsection.A.2.1}% +\contentsline {subsection}{\numberline {A.2.2}Kollisionserkennungs-Node (Python)}{38}{subsection.A.2.2}% +\contentsline {chapter}{\numberline {B}3D-Modelle des Sensormoduls}{46}{appendix.B}% +\contentsline {chapter}{\numberline {C}Versuchsaufbau}{50}{appendix.C}% diff --git a/Bachelorarbeit/Bachelorarbeit/chat_text.tex b/Bachelorarbeit/Bachelorarbeit/chat_text.tex new file mode 100644 index 0000000000000000000000000000000000000000..a2b32f64d665cd6c859df57da9c680138b86dc32 --- /dev/null +++ b/Bachelorarbeit/Bachelorarbeit/chat_text.tex @@ -0,0 +1,23 @@ +\chapter{Einleitung} +Mit dem Fortschreiten der Entwicklung hin zu Industrie 5.0 gewinnen kollaborative Roboter zunehmend an Bedeutung, da sie unter anderem eine enge Zusammenarbeit zwischen Mensch und Maschine ermöglichen \cite{vogel-heuser_von_2023}. Diese Zusammenarbeit setzt jedoch voraus, dass von den Robotern keinerlei Gefährdung für den Menschen ausgeht. +\\Insbesondere im Kontext der Assistenz für körperlich beeinträchtigte Personen erweisen sich \acrfull{Cobots} als unterstützend, da sie die Handhabung von Arbeitsmaterialien erleichtern können \cite{noauthor_iidea_nodate}. +\\Auch vor dem Hintergrund einer alternden Gesellschaft, in der das Durchschnittsalter der Erwerbsbevölkerung kontinuierlich steigt, können \acrshort{Cobots} einen wesentlichen Beitrag zur Entlastung leisten, indem sie repetitive und physisch belastende Tätigkeiten übernehmen\cite{haddadin_physical_2016}. +Aus den genannten Gründen sind Robotersysteme erforderlich, die eine effiziente und sichere Zusammenarbeit zwischen Mensch und Maschine ermöglichen. +Derzeit werden Kollisionen von \acrshort{Cobots} mit Menschen oder Objekten häufig mithilfe intrinsischer Sensoren detektiert \cite{popov_collision_2017} oder durch eine räumliche Trennung von Mensch und Maschine verhindert. +Ist eine räumliche Trennung jedoch nicht realisierbar und muss eine Kollision dennoch unbedingt vermieden werden, um einen sicheren und zugleich effizienten Arbeitsablauf zu gewährleisten, kann das in dieser Bachelorarbeit vorgestellte Sensorsystem eine geeignete Lösung darstellen. +\\Ziel des Sensorsystems ist es, über die reine Kollisionsdetektion hinaus insbesondere potenzielle Kollisionen frühzeitig zu erkennen und aktiv zu vermeiden. Durch das Vermeiden von Kollisionen kann der \acrshort{Cobot} nach einer identifizierten Gefährdungslage eine alternative Bahn planen und ausführen, um seine Aufgabe trotz der beinahe erfolgten Kollision weiterhin zuverlässig zu erfüllen. +\\Das in dieser Arbeit vorgestellte Sensorsystem beschränkt sich auf die Vermeidung von Kollisionen durch das gezielte Stoppen der aktuellen Roboterbewegung. +\\Zur präzisen Erfassung der Umgebung greift das vorgestellte Sensorsystem auf \acrfull{ToFs} zurück. Aus den erfassten Daten kann eine dreidimensionale Abbildung der Umgebung erzeugt werden. Diese digitale Repräsentation ermöglicht es, potenzielle Kollisionen frühzeitig zu erkennen und gezielt zu vermeiden. +\\Durch die Implementierung von Kollisionsvermeidung kann ein \acrshort{Cobot} autonomer agieren, wodurch sowohl die Effizienz als auch die Sicherheit im Produktionsumfeld signifikant gesteigert werden. + +\section{Zielsetzung} + +Auf Grundlage vorangegangener Arbeiten, in denen verschiedene Sensorarten, deren Positionierung sowie geeignete Kommunikationsschnittstellen untersucht wurden, wurde ein konzeptioneller Rahmen für die vorliegende Arbeit entwickelt. Dieser wird im weiteren Verlauf detaillierter dargestellt. + +\\Ziel dieser Arbeit ist es, mithilfe exterozeptiver Abstandssensoren aus der Ego-Perspektive eine vollständige, digitale und dreidimensionale Abbildung der Umgebung eines seriellen Roboters zu erzeugen. + +\\Der Begriff „vollständig“ bezieht sich in diesem Zusammenhang darauf, dass die Abbildung hinreichende Informationen liefert, um potenzielle Kollisionen mit Objekten im Arbeitsraum zuverlässig zu vermeiden. + +\\Diese digitale Repräsentation des Arbeitsraums ist so konzipiert, dass der Roboter selbst von der Abbildung ausgeschlossen wird. Unter dem Begriff Arbeitsraum wird dabei der Bereich verstanden, der vom Roboter mechanisch erreicht werden kann. + +\\Eine Differenzierung zwischen dem Manipulationsziel und anderen Objekten ist zum aktuellen Zeitpunkt nicht erforderlich – ausgenommen hiervon ist lediglich der Roboter selbst. diff --git a/Bachelorarbeit/Bachelorarbeit/library.bib b/Bachelorarbeit/Bachelorarbeit/library.bib deleted file mode 100644 index 69a3c336b003f7b24834a60e673ffa39285cd01e..0000000000000000000000000000000000000000 --- a/Bachelorarbeit/Bachelorarbeit/library.bib +++ /dev/null @@ -1,161 +0,0 @@ - -@software{grans_sebastiangransros2-point-cloud-demo_2024, - title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo}, - rights = {{MIT}}, - url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo}, - abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2}, - author = {Grans, Sebastian}, - urldate = {2025-02-12}, - date = {2024-12-08}, - note = {original-date: 2020-06-30T16:55:21Z}, -} - -@online{noauthor_examples_nodate, - title = {Examples - trimesh 4.6.2 documentation}, - url = {https://trimesh.org/examples.html}, - urldate = {2025-02-12}, - file = {Examples - trimesh 4.6.2 documentation:/home/sochi/Zotero/storage/82WA6KM7/examples.html:text/html}, -} - -@online{noauthor_tutorials_nodate, - title = {Tutorials — {ROS} 2 Documentation: Humble documentation}, - url = {https://docs.ros.org/en/humble/Tutorials.html}, - urldate = {2025-02-12}, - file = {Tutorials — ROS 2 Documentation\: Humble documentation:/home/sochi/Zotero/storage/28S5GUZ5/Tutorials.html:text/html}, -} - -@software{iii_earlephilhowerarduino-pico_2025, - title = {earlephilhower/arduino-pico}, - rights = {{LGPL}-2.1}, - url = {https://github.com/earlephilhower/arduino-pico}, - abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards}, - author = {{III}, Earle F. Philhower}, - urldate = {2025-02-12}, - date = {2025-02-11}, - note = {original-date: 2021-02-25T04:20:27Z}, - keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi}, -} - -@online{noauthor_chatgpt_nodate, - title = {{ChatGPT}}, - url = {https://chatgpt.com}, - abstract = {A conversational {AI} system that listens, learns, and challenges}, - urldate = {2025-02-12}, - file = {Snapshot:/home/sochi/Zotero/storage/ZT8MG8Y4/678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html}, -} - -@online{noauthor_pico-series_nodate, - title = {Pico-series Microcontrollers - Raspberry Pi Documentation}, - url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html}, - abstract = {The official documentation for Raspberry Pi computers and microcontrollers}, - urldate = {2025-02-12}, - langid = {english}, - file = {Snapshot:/home/sochi/Zotero/storage/KUCB8PVI/pico-series.html:text/html}, -} - -@online{noauthor_vl53l7cx_nodate, - title = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}}, - url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html}, - abstract = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV}, {VL}53L7CXV0GC/1, {STMicroelectronics}}, - urldate = {2025-02-12}, - langid = {english}, - file = {Snapshot:/home/sochi/Zotero/storage/VEYLCCLA/vl53l7cx.html:text/html}, -} - -@article{paya_state---art_2017, - title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors}, - volume = {2017}, - rights = {Copyright © 2017 L. Payá et al.}, - issn = {1687-7268}, - url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650}, - doi = {10.1155/2017/3497650}, - abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.}, - pages = {3497650}, - number = {1}, - journaltitle = {Journal of Sensors}, - author = {Payá, L. and Gil, A. and Reinoso, O.}, - urldate = {2025-02-12}, - date = {2017}, - langid = {english}, - note = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1155/2017/3497650}, - file = {Full Text PDF:/home/sochi/Zotero/storage/EZ473NGD/Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:/home/sochi/Zotero/storage/86LDAQ62/3497650.html:text/html}, -} - -@article{saudabayev_sensors_2015, - title = {Sensors for Robotic Hands: A Survey of State of the Art}, - volume = {3}, - issn = {2169-3536}, - url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549}, - doi = {10.1109/ACCESS.2015.2482543}, - shorttitle = {Sensors for Robotic Hands}, - abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.}, - pages = {1765--1782}, - journaltitle = {{IEEE} Access}, - author = {Saudabayev, Artur and Varol, Huseyin Atakan}, - urldate = {2025-02-12}, - date = {2015}, - note = {Conference Name: {IEEE} Access}, - keywords = {Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, Robot sensing systems, robotic hands, Robots, sensors, Sensors}, - file = {Full Text PDF:/home/sochi/Zotero/storage/HR7ZUF8W/Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:/home/sochi/Zotero/storage/484D4R7H/7283549.html:text/html}, -} - -@collection{hering_sensoren_2018, - location = {Wiesbaden}, - title = {Sensoren in Wissenschaft und Technik}, - rights = {http://www.springer.com/tdm}, - isbn = {978-3-658-12561-5 978-3-658-12562-2}, - url = {http://link.springer.com/10.1007/978-3-658-12562-2}, - publisher = {Springer Fachmedien Wiesbaden}, - editor = {Hering, Ekbert and Schönfelder, Gert}, - urldate = {2025-02-12}, - date = {2018}, - langid = {german}, - doi = {10.1007/978-3-658-12562-2}, - file = {PDF:/home/sochi/Zotero/storage/9TI57WXD/Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf}, -} - -@book{hertzberg_mobile_2012, - location = {Berlin, Heidelberg}, - title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik}, - rights = {https://www.springernature.com/gp/researchers/text-and-data-mining}, - isbn = {978-3-642-01725-4 978-3-642-01726-1}, - url = {https://link.springer.com/10.1007/978-3-642-01726-1}, - series = {{eXamen}.press}, - shorttitle = {Mobile Roboter}, - publisher = {Springer Berlin Heidelberg}, - author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas}, - urldate = {2025-02-12}, - date = {2012}, - langid = {german}, - doi = {10.1007/978-3-642-01726-1}, - file = {PDF:/home/sochi/Zotero/storage/RLTU9P46/Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf}, -} - -@article{haddadin_robot_2017, - title = {Robot Collisions: A Survey on Detection, Isolation, and Identification}, - volume = {33}, - issn = {1941-0468}, - url = {https://ieeexplore.ieee.org/abstract/document/8059840}, - doi = {10.1109/TRO.2017.2723903}, - shorttitle = {Robot Collisions}, - abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.}, - pages = {1292--1312}, - number = {6}, - journaltitle = {{IEEE} Transactions on Robotics}, - author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin}, - urldate = {2025-02-12}, - date = {2017-12}, - note = {Conference Name: {IEEE} Transactions on Robotics}, - keywords = {Robot sensing systems, Algorithm design and analysis, Collision avoidance, Collision detection, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction ({pHRI}), Real-time systems, safe robotics, Service robots}, - file = {Accepted Version:/home/sochi/Zotero/storage/IEXJFAMF/Haddadin et al. - 2017 - Robot Collisions A Survey on Detection, Isolation, and Identification.pdf:application/pdf;IEEE Xplore Abstract Record:/home/sochi/Zotero/storage/LDB3Q92K/8059840.html:text/html}, -} - -@software{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025, - title = {{UniversalRobots}/Universal\_Robots\_ROS2\_Gazebo\_Simulation}, - rights = {{BSD}-3-Clause}, - url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation}, - publisher = {Universal Robots A/S}, - urldate = {2025-02-17}, - date = {2025-02-13}, - note = {original-date: 2021-12-15T12:18:45Z}, -} diff --git a/Bachelorarbeit/Bachelorarbeit/main.acn b/Bachelorarbeit/Bachelorarbeit/main.acn deleted file mode 100644 index 7f9053c87b27b3208c902648aeb990c4ce14f206..0000000000000000000000000000000000000000 --- a/Bachelorarbeit/Bachelorarbeit/main.acn +++ /dev/null @@ -1,19 +0,0 @@ -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{Cobot?\glossentry{Cobot}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{PC?\glossentry{PC}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{Cobot?\glossentry{Cobot}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{ToF-Sensor?\glossentry{ToF}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{ToF-Sensor?\glossentry{ToF}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{5} -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{5} -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{5} diff --git a/Bachelorarbeit/Bachelorarbeit/main.glo b/Bachelorarbeit/Bachelorarbeit/main.glo deleted file mode 100644 index b18b14f44e5ff3df639aab03402e91f06915c409..0000000000000000000000000000000000000000 --- a/Bachelorarbeit/Bachelorarbeit/main.glo +++ /dev/null @@ -1,33 +0,0 @@ -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{Cobot?\glossentry{Cobot}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{PC?\glossentry{PC}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{Cobot?\glossentry{Cobot}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{1} -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{1} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{1} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{1} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{1} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{1} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{1} -\glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{1} -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{Pose?\glossentry{Pose}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{Kollisionsobjekt?\glossentry{KO}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{Pose?\glossentry{Pose}|setentrycounter[]{page}"\glsnumberformat}{2} -\glossaryentry{RGB-D?\glossentry{RGB-D}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{Arbeitsraum?\glossentry{AR}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{KI?\glossentry{KI}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{LiDAR?\glossentry{LIDAR}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{RGB-Kamera?\glossentry{RGB}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{3} -\glossaryentry{ToF-Sensor?\glossentry{ToF}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{LRF?\glossentry{LRF}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{LiDAR?\glossentry{LIDAR}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{LiDAR?\glossentry{LIDAR}|setentrycounter[]{page}"\glsnumberformat}{4} -\glossaryentry{LiDAR?\glossentry{LIDAR}|setentrycounter[]{page}"\glsnumberformat}{4} diff --git a/Bachelorarbeit/Bachelorarbeit/main.ist b/Bachelorarbeit/Bachelorarbeit/main.ist deleted file mode 100644 index f7676255afa045ef0b50e074e94a0ad3ee986798..0000000000000000000000000000000000000000 --- a/Bachelorarbeit/Bachelorarbeit/main.ist +++ /dev/null @@ -1,29 +0,0 @@ -% makeindex style file created by the glossaries package -% for document 'main' on 2025-2-20 -actual '?' -encap '|' -level '!' -quote '"' -keyword "\\glossaryentry" -preamble "\\glossarysection[\\glossarytoctitle]{\\glossarytitle}\\glossarypreamble\n\\begin{theglossary}\\glossaryheader\n" -postamble "\%\n\\end{theglossary}\\glossarypostamble\n" -group_skip "\\glsgroupskip\n" -item_0 "\%\n" -item_1 "\%\n" -item_2 "\%\n" -item_01 "\%\n" -item_x1 "\\relax \\glsresetentrylist\n" -item_12 "\%\n" -item_x2 "\\relax \\glsresetentrylist\n" -delim_0 "\{\\glossaryentrynumbers\{\\relax " -delim_1 "\{\\glossaryentrynumbers\{\\relax " -delim_2 "\{\\glossaryentrynumbers\{\\relax " -delim_t "\}\}" -delim_n "\\delimN " -delim_r "\\delimR " -headings_flag 1 -heading_prefix "\\glsgroupheading\{" -heading_suffix "\}\\relax \\glsresetentrylist " -symhead_positive "glssymbols" -numhead_positive "glsnumbers" -page_compositor "." diff --git a/Bachelorarbeit/Bachelorarbeit/main.nlo b/Bachelorarbeit/Bachelorarbeit/main.nlo deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/Bachelorarbeit/Bachelorarbeit/text.tex b/Bachelorarbeit/Bachelorarbeit/text.tex index 87cafa1e46ae21be8d7a5bafcf70bff92b14cbb5..621fc0e9b4f8b7589e5e464dff89357799488665 100644 --- a/Bachelorarbeit/Bachelorarbeit/text.tex +++ b/Bachelorarbeit/Bachelorarbeit/text.tex @@ -1,52 +1,44 @@ \chapter{Einleitung} % Warum wurde das Thema gewählt? -Kollaborative Roboter werden mit der Entwicklung in Richtung zu Industrie 5.0 immer relevanter, da sie unteranderem die Kollaboration von Mensch und Maschine ermöglichen\cite{vogel-heuser_von_2023}. +Kollaborative Roboter werden mit der Entwicklung in Richtung zu Industrie 5.0 immer relevanter, da sie unteranderem die Kollaboration von Mensch und Maschine ermöglichen\cite{vogel-heuser_von_2023}. Dies ist aber nur gewährleistet, wenn von Robotern keine Gefahr für den Menschen ausgeht. \\Vor allem in der Zusammenarbeit mit körperlich beeinträchtigten Menschen erweisen sich \acrfull{Cobots} als hilfreich, da sie die Handhabung von Arbeitsmaterial erleichtern können \cite{noauthor_iidea_nodate}. -\\Aktuell werden Kollisionen von \acrshort{Cobots} häufig mithilfe intrinsischer Sensoren detektiert oder durch eine räumliche Trennung von Mensch und Maschine verhindert. Wenn eine räumliche Trennung jedoch nicht möglich ist und trotzdem keine Kollision auftreten darf, kann das in dieser Bachelorarbeit vorgestellte Sensorsystem eingesetzt werden. +\\Auch Aufgrund von einer alternden Gesselschaft, in der das Durchschnittsalter der arbeitenden Bevölkerung immer weiter zunimmt, können \acrshort{Cobots} für eine große Entlastung sorgen in dem sie repetive Aufgaben, die einen Menschen physisch Belasten würden, übernehmen\cite{haddadin_physical_2016}. +\\Aus diesen gennnanten Gründen benötigt man Robotersytseme, die die Zusammenarbeit zwischen Mensch und Maschine effizient und sicher gestaltbar machen. +\\Aktuell werden Kollisionen von \acrshort{Cobots} mit Menschen oder Objekten häufig mithilfe intrinsischer Sensoren detektiert\cite{popov_collision_2017} oder durch eine räumliche Trennung von Mensch und Maschine verhindert. +\\Wenn eine räumliche Trennung jedoch nicht möglich ist und trotzdem keine Kollision auftreten darf, um einen effizienten und sicheren Arbeitsablauf zu gewährleisten, kann das in dieser Bachelorarbeit vorgestellte Sensorsystem eine Lösung bieten. % Was ist das Ziel des Berichts? \\Das Ziel des Sensorsystems ist es, ergänzend zur Kollisionsdetektion, die meisten Kollisionen zu vermeiden. -Das Vermeiden von Kollisionen ermöglicht es dem \acrshort{Cobot}, nach einer vermiedenen Kollision eine neue Route zu planen und auszuführen, -um seine Aufgabe trotz der beinahe Kollision zu erfüllen. -% Welche Methoden oder Ansätze wurden verwendet, um die Ziele zu erreichen oder die Forschung durchzuführen? -\\ \indent Das System verwendet \acrfull{ToFs} von STMicroelectronics zur Abstandsmessung von potentiellen Kollisionsobjekten. -Die Messwerte werden auf einem Linux-basierten persönlichen Computer (\acrshort{PC}) ausgewertet, visualisiert und zusammen mit anderen Sensorinputs interpretiert. -Zur Testung des Systems habe ich den Sensorinput der Drehgebern des \acrfull{UR} in Gazebo Classic simuliert und die Messwerte der \acrshort{ToFs} in einem Testaufbau generiert. +Das Vermeiden von Kollisionen ermöglicht es dem \acrshort{Cobot}, nach einer vermiedenen Kollision eine neue Route zu planen und auszuführen, um seine Aufgabe trotz der beinahe Kollision zu erfüllen. % Welche Schlussfolgerungen wurden basierend auf den Ergebnissen gezogen? -\\\acrshort{ToFs} eignen sich zur Überwachung des Arbeitsraums eines Roboters, -da sie präzise genug sind um Entfernungsdaten zu liefern und so die Kollisionsvermeidung durch das erkennen von Objekten im Arbeitsraum ermöglichen können. +\\Das Sensorsystem, dass in dieser Arbeit vorgestellt wird besschränkt sich auf das Vermeiden von Kollisionen durch das Stoppen der aktuellen Bewegung. % Welche Bedeutung haben sie für das Thema oder für die Praxis? \\Durch die Kollisionsvermeidung kann ein \acrshort{Cobot} autonomer arbeiten und so die Effizienz und Sicherheit in der Produktion erhöht werden. - -\section{Motivation} - %Allgemeiner Überblick über das Thema - Ich habe im Rahmen meines Praxisprojekts ein System entwickelt, welches zum Ziel hatte eine extrinsische Sensorüberwachung des Arbeitsraums von einem \acrfull{UR} zu realisieren. - Mit dem Sensorinput kann man die Umgebung des Roboters überwachen und gegebenenfalls auf veränderte Umstände reagieren. Am meisten Anwendung hätte das System in Bereichen in denen es schwierig ist Routinen zu schaffen. Wie zum Beispiel bei der Kollaboration mit Menschen. - Der Vorteil, den die extrinsische Sensorik mit sich bringt, ist das man nicht mehr auf die Detektion von Kollisionen beschränkt ist und auf unterschiedliche Arten präventiv eine Kollision verhindern kann. - \\ - %Spezifizierung des Themas - \indent In der Robotik werden unterschiedliche Arten von Sensoren verwendet um \acrshort{Cobots} sicherer zu machen. Im Groben unterscheidet man zwischen intrinsischen und exterozeptiven Sensoren.\cite{noauthor_robotics_2021} Die intrinsischen Sensoren betrachten nur, was innerhalb eines Roboters gemessen werden kann. Das wäre zum Beispiel die Position der Gelenke, die vom Drehgeber eingelesen wird oder das Drehmoment, dass vom Drehmoment-Wächter in den Gelenken bestimmt wird. Mit Hilfe der gemessenen Drehmomente wird häufig erkannt, ob es zu einer Kollision mit umliegenden Objekten gekommen sein könnte.\cite{popov_collision_2017} Die Extrinsischen Sensoren betrachten die Umgebung des Roboters und liefern Informationen zu möglichen Kollisionsobjekten oder auch zu Objekten die mit dem Endeffektor des Roboters gegriffen werden sollen. - Beispiele für extrinsische Sensoren wären unterschiedliche Arten von Kameras, Laserscannern (\acrshort{ToFs}) oder Ultraschallsensoren.\cite{li_common_2019} - \\ - %Kern der Arbeit - \indent Ich habe für die Erkennung von potentiellen Kollisionsobjekten in Reichweite der seriellen Kinematik vom \acrshort{UR} \acrshort{ToFs} von STMicroelectronics verwenden.\cite{noauthor_vl53l7cx_nodate} - \\ - Die \acrshort{ToFs} senden einen Lichtimpuls aus und messen wie lange das Infrarot-Licht braucht um wieder vom Empfänger wahrgenommen zu werden.\cite{hering_sensoren_2018} - Mein bestreben ist es herauszufinden in wie Weit \acrshort{ToFs} Hindernisse wahrnehmen können. - Mit welchen anderen Sensor Informationen oder allgemeinen Angaben man die Daten fusionieren muss, um Kollisionen vermeiden zu können und wie man die Daten der Sensoren verarbeiten muss, um eine sinnvolle digitale Projektion, des Arbeitsraums vom \acrshort{UR} zu erhalten. + \section{Zielsetzung} - % – Was ist konkret das Problem, das gelöst werden soll? - % – Was ist das Ziel der Arbeit? - Es soll mithilfe von ToF-Sensoren der serieller Roboter gestoppt werden wenn ein Mensch zu dicht am Roboter ist. -\section{Forschungsfrage} - Inwieweit lassen sich mit ToF-Sensoren Kollisionen verhindern? + In vorangegangenen Arbeiten wurden bereits unterschiedliche Sensoren, Sensorpositionierungen und Kommunikationsschnittstellen bewertet. Basierend auf diesen Bewertungen wurde ein Rahmen für diese Arbeit entworfen, der im Kapitel ''Stand der Technik'' näher erläutert wird. + \\Das Ziel dieser Arbeit ist es, mithilfe von exterozeptiven Abstandssensoren aus der Ego-Perspektive eine vollständige, digitale, dreidimensionale Abbildung der Umgebung eines seriellen Roboters zu generieren. + Unter "vollständig" wird in diesem Fall verstanden, dass die Abbildung genügend Informationen enthält, um eine Kollision mit einem Objekt im Arbeitsraum zu verhindern. + \\Diese digitale Repräsentation des Arbeitsraums soll den Roboter selbst ausschließen. + Unter Arbeitsraum versteht man im Allgemeinen den Raum, der von einem Roboter erreicht werden kann. + \\Zum aktuellen Zeitpunkt muss nicht zwischen dem Ziel einer Manipulation und einem anderen Objekt unterschieden werden – mit Ausnahme des Roboters selbst. + +\section{Forschungsfragen} + Aus den vorgelegten Zielen lassen sich Folgenden Forschungsfragen definieren: + \begin{itemize} + \item Lässt sich mit exterozeptiven Sensoren eine Kollision mit einem Menschen verhindern? + \item Wie erreicht man eine vollständige Arbeitsraumüberwachung eines seriellen Roboters mit exterozeptiven ToF-Sensoren aus der Ego-Perspektive? + \item Wie wertet man die Sensordaten aus, wenn die Sensoren nicht ortsfest sind? + \item Wie unterscheidet man Objekte im Arbeitsraum von dem seriellen Roboter selbst? + \end{itemize} + \section{Aufbau der Arbeit} Zunächst wird in dieser Arbeit auf alternative Lösungen zum Erreichen von Kollisionsvermeidung eingegangenen. Im Anschluss werden die Anforderungen an das Sensorsystem näher definiert. Im Kapitel ``Umsetzung'' wird daraufhin erläutert wie die Anforderungen erfüllt werden. Im letzten Kapitel wird festgehalten, ob die Anforderungen erfüllt wurden und es wird ein Ausblick gegeben wie das System verbessert werden kann. \cleardoublepage \chapter{Stand der Technik} - Im Bereich der \acrlong{Cobots} sind in den letzten Jahren große Fortschritte gemacht worden. Das liegt zum Teil daran, dass der Bedarf nach \acrshort{Cobots} in den letzten Jahren konstant gewachsen ist und somit auch die Finanziellen Mittel, die der Forschung in dem Bereich zu Verfügung stehen. + \begin{figure}[h] \centering \includegraphics[scale=0.5]{images/Cobots-Forecast-Global-Market-1024x576.jpg} @@ -57,7 +49,7 @@ da sie präzise genug sind um Entfernungsdaten zu liefern und so die Kollisionsv Zum anderen geht mit der Industriellen Revolution -Industrie 4.0- ein steigender Bedarf von personalisierten Produkten einher, die das etablieren von Routinen in der Produktion unattraktiv machen. Das Bedeutet, dass \acrshort{Cobots} besser werden müssen in der Wahrnehmung ihrer Umgebung, damit man sie universeller einsetzbar machen kann.\cite{liu_application_2024} \section{Kollisionsvermeidung und Kollisionserkennung} %Warum ist Kollisionsvermeidung und Kollisionserkennung überhaupt wichtig? - Roboterarme müssen große Kräfte aufbringen können, um Lasten zu heben. Ein Roboterarm ohne Sensoren hat keine Möglichkeit zu erkennen, ob er die erwünschte Pose des Endeffektors erreicht hat. Außerdem wird die Kraft, die von einem Roboterarm auf ein Kollisionsobjekt ausgeübt wird nur von der Motorleistung beschränkt, was Gefahren für Mensch und Material birgt. + Roboterarme müssen große Kräfte aufbringen können, um Lasten zu heben. Ein Roboterarm ohne Sensoren hat keine Möglichkeit zu erkennen, ob er die erwünschte Pose des Endeffektors erreicht hat. Außerdem wird die Kraft, die von einem Roboterarm auf ein Objekt ausgeübt wird nur von der Motorleistung beschränkt, was Gefahren für Mensch und Material birgt. \\ %Paper zur Kollisionsdetektion nur mit Drehgeber und Drehmoment-Wächter Mit einem Drehgeber kann ein serieller Roboter den absolut Winkel der Gelenke bestimmen und dadurch mit anderen geometrischen Eigenschaften der Kinematik die Absolut Position der einzelnen Achsen bestimmen. Durch die Verwendung von Drehmoment-Wächtern und Angaben zu Schwerpunkten, Gewicht und Pose der einzelnen Achsen kann ein Soll-Drehmoment für jedes Gelenk in Drehrichtung bestimmt werden und mit dem Ist-Drehmoment verglichen werden. Unter der Voraussetzung das der Endeffektor kein Objekt gegriffen hat kann man davon ausgehen, dass es bei einer Abweichung, die nicht auf Beschleunigungen zurück zu führen ist, zu einer Kollision gekommen ist.\cite{popov_collision_2017} @@ -115,8 +107,19 @@ da sie präzise genug sind um Entfernungsdaten zu liefern und so die Kollisionsv \includegraphics[scale=0.2]{images/VL53L7CX_Package.jpg} \caption{Package eines VL53L7CX, Source:\cite{noauthor_vl53l7cx_nodate}} \label{VL53L7CX Package} - \end{figure} - + \end{figure} +\section{Rahmen} +\begin{itemize} + \item Es sollen einer oder mehrere \acrshort{ToFs} verwendet werden. + \item Die Sensoren sollen am Roboter montiert sein, um eine Ego-Perspektive zu erreichen. +\end{itemize} +\chapter{Anforderungen} + + Zunächst werden hier die Allgemeinen Anforderungen im groben definiert um dann im Anschluss die Anforderungen näher zu definieren. + \begin{itemize} + \item Der Detailgrad von den Informationen über den Arbeitsraum des Roboters reicht aus, um einen Menschen wahrzunehemen. + \item Die Informationen sind so aktuell, dass man damit eine Kollision + \end{itemize} \chapter{Umsetzung} \section{Vorgehensweise} Um erst mal herauszufinden, ob mein Vorhaben möglich ist habe ich auf Github nach ähnlichen Projekten gesucht. \cite{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate} Da ich in meinem Projekt mehrere Sensoren verwenden will, wurde mir von meiner externen Betreuerin, Sophie Charlotte Keunecke, für die Umsetzung der Raspberry Pi Pico \cite{noauthor_pico-series_nodate} \acrfull{MCU} vorgeschlagen, da dieser \acrshort{MCU} zwei \acrfull{I2C} Interfaces besitzt. Zur Visualisierung der Daten will ich das \acrfull{RVIZ} Programm auf einem \acrfull{NUC} mit Ubuntu 22.04 mit \acrfull{ROS} Humble benutzen. diff --git a/Bachelorarbeit/Forschungsfragen.txt b/Bachelorarbeit/Forschungsfragen.txt index c7135aeee91d6f7865ce1ecb0149ad97378ed3cc..303369ec8c3a62f9c7b4393a6148cfbae05d473b 100644 --- a/Bachelorarbeit/Forschungsfragen.txt +++ b/Bachelorarbeit/Forschungsfragen.txt @@ -2,12 +2,12 @@ Inwieweit lassen sich mit ToF-Sensoren Kollisionen verhindern? -1. Wie erreicht man eine möglichst vollständige Arbeitsraum überwachung mit ToF-Sensoren? - "Antwort" Kurz: Indem man mehrere Sensoren am Roboter selbst montiert. +1. Wie erreicht man eine vollständige Arbeitsraumüberwachung eines seriellen Roboters mit exterozeptiven ToF-Sensoren aus der Ego-Perspektive? + "Antwort" Kurz: Indem man mehrere Sensoren verwendet. -2. Wie wertet man die Sensordaten aus, wenn die Sensoren nicht raumfest sind? +2. Wie wertet man die Sensordaten aus, wenn die Sensoren nicht ortsfest sind? "Antwort" Kurz: Indem man die Bewegung der Achse, an dem die Sensoren befestigt sind, in die Auswerung mit einbezieht. -3. Wie unterscheide ich potentiele Kollisionsobjekte von dem seriellen Roboter? +3. Wie unterscheidet man potentiele Kollisionsobjekte von dem seriellen Roboter selbst? "Antwort" Kurz: Indem man die Sensordaten filtert und die Punkte die auf den Achsen des Roboters liegen ignoriert. diff --git a/Bachelorarbeit/Fragen_an_Sophie b/Bachelorarbeit/Fragen_an_Sophie new file mode 100644 index 0000000000000000000000000000000000000000..1cbeda5a480e5c9afb748c91e2724eefe4effc4c --- /dev/null +++ b/Bachelorarbeit/Fragen_an_Sophie @@ -0,0 +1,4 @@ +Fragen an Sophie: +Sollte man im "Stand der Technik" Kapitel mehr auf die Grundlagen zu meiner BA eingehen (Bsp.: die Sensorvergleich Masterarbeit) oder auf andere Systeme die ähnliches erreichen mit einem komplett anderen Ansatz? +Wie stark sollte ich auf die funktionsweise der VL53L7CX eingehen oder ist das mir überlassen? (bsp. VCSEL, DOE und SPAD) +Was war am Anfang für die BA gegeben (Rahmenbedingungen)? (Bsp.: Ego-Perspektive, Sensorwahl: ToF-Sensor, ROS2 Humble, NUC, Raspberry Pi Pico)