diff --git a/Praxiprojekt_Bericht/BA.bib b/Praxiprojekt_Bericht/BA.bib index a0f307bce83322d7a33dd25380f37e82075fce71..008a836f1b6c89b59dbfd15d30c4f302b9e7b498 100644 --- a/Praxiprojekt_Bericht/BA.bib +++ b/Praxiprojekt_Bericht/BA.bib @@ -1,203 +1,291 @@ -@software{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025, - title = {{UniversalRobots}/Universal\_Robots\_ROS2\_Gazebo\_Simulation}, - rights = {{BSD}-3-Clause}, - url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation}, - publisher = {Universal Robots A/S}, - urldate = {2025-02-17}, - date = {2025-02-13}, - note = {original-date: 2021-12-15T12:18:45Z}, +@online{noauthor_tof_imager_micro_rosteensy_pcl_publisherteensy_pcl_publisherino_nodate, + title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher/teensy\_pcl\_publisher.ino at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}}, + url = {https://github.com/adityakamath/tof_imager_micro_ros/blob/humble/teensy_pcl_publisher/teensy_pcl_publisher.ino#L177}, + urldate = {2025-02-21}, + file = {tof_imager_micro_ros/teensy_pcl_publisher/teensy_pcl_publisher.ino at humble · adityakamath/tof_imager_micro_ros · GitHub:C\:\\Users\\reebe\\Zotero\\storage\\PYV8KTSC\\teensy_pcl_publisher.html:text/html}, } -@article{haddadin_robot_2017, - title = {Robot Collisions: A Survey on Detection, Isolation, and Identification}, - volume = {33}, - issn = {1941-0468}, - url = {https://ieeexplore.ieee.org/abstract/document/8059840}, - doi = {10.1109/TRO.2017.2723903}, - shorttitle = {Robot Collisions}, - abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.}, - pages = {1292--1312}, - number = {6}, - journaltitle = {{IEEE} Transactions on Robotics}, - author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin}, - urldate = {2025-02-12}, - date = {2017-12}, - note = {Conference Name: {IEEE} Transactions on Robotics}, - keywords = {Algorithm design and analysis, Collision avoidance, Collision detection, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction ({pHRI}), Real-time systems, Robot sensing systems, safe robotics, Service robots}, - file = {Accepted Version:C\:\\Users\\Rene\\Zotero\\storage\\IEXJFAMF\\Haddadin et al. - 2017 - Robot Collisions A Survey on Detection, Isolation, and Identification.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\LDB3Q92K\\8059840.html:text/html}, +@online{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate, + title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}}, + url = {https://github.com/adityakamath/tof_imager_micro_ros/tree/humble/teensy_pcl_publisher}, + urldate = {2025-02-21}, + file = {tof_imager_micro_ros/teensy_pcl_publisher at humble · adityakamath/tof_imager_micro_ros · GitHub:C\:\\Users\\reebe\\Zotero\\storage\\TEVM2A5B\\teensy_pcl_publisher.html:text/html}, } -@book{hertzberg_mobile_2012, - location = {Berlin, Heidelberg}, - title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik}, - rights = {https://www.springernature.com/gp/researchers/text-and-data-mining}, - isbn = {978-3-642-01725-4 978-3-642-01726-1}, - url = {https://link.springer.com/10.1007/978-3-642-01726-1}, - series = {{eXamen}.press}, - shorttitle = {Mobile Roboter}, - publisher = {Springer Berlin Heidelberg}, - author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas}, - urldate = {2025-02-12}, - date = {2012}, - langid = {german}, - doi = {10.1007/978-3-642-01726-1}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\RLTU9P46\\Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf}, +@software{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025, + title = {sparkfun/{SparkFun}\_VL53L5CX\_Arduino\_Library}, + url = {https://github.com/sparkfun/SparkFun_VL53L5CX_Arduino_Library}, + publisher = {{SparkFun} Electronics}, + urldate = {2025-02-21}, + date = {2025-01-28}, + note = {original-date: 2021-10-22T21:06:36Z}, } -@collection{hering_sensoren_2018, - location = {Wiesbaden}, - title = {Sensoren in Wissenschaft und Technik}, - rights = {http://www.springer.com/tdm}, - isbn = {978-3-658-12561-5 978-3-658-12562-2}, - url = {http://link.springer.com/10.1007/978-3-658-12562-2}, - publisher = {Springer Fachmedien Wiesbaden}, - editor = {Hering, Ekbert and Schönfelder, Gert}, - urldate = {2025-02-12}, - date = {2018}, - langid = {german}, - doi = {10.1007/978-3-658-12562-2}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\9TI57WXD\\Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf}, +@online{noauthor_vlp_nodate, + title = {{VLP} 16 {\textbar} Ouster}, + url = {https://ouster.com/products/hardware/vlp-16}, + abstract = {Mid-range lidar sensor}, + urldate = {2025-02-20}, + langid = {english}, + file = {Snapshot:C\:\\Users\\reebe\\Zotero\\storage\\AR82YJRS\\vlp-16.html:text/html}, } -@article{saudabayev_sensors_2015, - title = {Sensors for Robotic Hands: A Survey of State of the Art}, - volume = {3}, - issn = {2169-3536}, - url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549}, - doi = {10.1109/ACCESS.2015.2482543}, - shorttitle = {Sensors for Robotic Hands}, - abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.}, - pages = {1765--1782}, - journaltitle = {{IEEE} Access}, - author = {Saudabayev, Artur and Varol, Huseyin Atakan}, - urldate = {2025-02-12}, - date = {2015}, - note = {Conference Name: {IEEE} Access}, - keywords = {Robot sensing systems, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors, Sensors}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\HR7ZUF8W\\Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\484D4R7H\\7283549.html:text/html}, +@article{niclass_design_2012, + title = {Design and characterization of a 256x64-pixel single-photon imager in {CMOS} for a {MEMS}-based laser scanning time-of-flight sensor}, + volume = {20}, + rights = {© 2012 {OSA}}, + issn = {1094-4087}, + url = {https://opg.optica.org/oe/abstract.cfm?uri=oe-20-11-11863}, + doi = {10.1364/OE.20.011863}, + abstract = {We introduce an optical time-of-flight image sensor taking advantage of a {MEMS}-based laser scanning device. Unlike previous approaches, our concept benefits from the high timing resolution and the digital signal flexibility of single-photon pixels in {CMOS} to allow for a nearly ideal cooperation between the image sensor and the scanning device. This technique enables a high signal-to-background light ratio to be obtained, while simultaneously relaxing the constraint on size of the {MEMS} mirror. These conditions are critical for devising practical and low-cost depth sensors intended to operate in uncontrolled environments, such as outdoors. A proof-of-concept prototype capable of operating in real-time was implemented. This paper focuses on the design and characterization of a 256x64-pixel image sensor, which also comprises an event-driven readout circuit, an array of 64 row-level high-throughput time-to-digital converters, and a 16Gbit/s global readout circuit. Quantitative evaluation of the sensor under 2klux of background light revealed a repeatability error of 13.5cm throughout the distance range of 20 meters.}, + pages = {11863--11881}, + number = {11}, + journaltitle = {Optics Express}, + shortjournal = {Opt. Express, {OE}}, + author = {Niclass, Cristiano and Ito, Kota and Soga, Mineki and Matsubara, Hiroyuki and Aoyagi, Isao and Kato, Satoru and Kagami, Manabu}, + urldate = {2025-02-20}, + date = {2012-05-21}, + note = {Publisher: Optica Publishing Group}, + keywords = {Deformable mirrors, Diode lasers, Image sensors, Light emitting diodes, Optical systems, Systems design}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\BZWW7BVY\\Niclass et al. - 2012 - Design and characterization of a 256x64-pixel single-photon imager in CMOS for a MEMS-based laser sc.pdf:application/pdf}, } -@article{paya_state---art_2017, - title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors}, - volume = {2017}, - rights = {Copyright © 2017 L. Payá et al.}, - issn = {1687-7268}, - url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650}, - doi = {10.1155/2017/3497650}, - abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.}, - pages = {3497650}, - number = {1}, - journaltitle = {Journal of Sensors}, - author = {Payá, L. and Gil, A. and Reinoso, O.}, - urldate = {2025-02-12}, - date = {2017}, +@article{surmann_autonomous_2003, + title = {An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of indoor environments}, + volume = {45}, + issn = {0921-8890}, + url = {https://www.sciencedirect.com/science/article/pii/S0921889003001556}, + doi = {10.1016/j.robot.2003.09.004}, + abstract = {Digital 3D models of the environment are needed in rescue and inspection robotics, facility managements and architecture. This paper presents an automatic system for gaging and digitalization of 3D indoor environments. It consists of an autonomous mobile robot, a reliable 3D laser range finder and three elaborated software modules. The first module, a fast variant of the Iterative Closest Points algorithm, registers the 3D scans in a common coordinate system and relocalizes the robot. The second module, a next best view planner, computes the next nominal pose based on the acquired 3D data while avoiding complicated obstacles. The third module, a closed-loop and globally stable motor controller, navigates the mobile robot to a nominal pose on the base of odometry and avoids collisions with dynamical obstacles. The 3D laser range finder acquires a 3D scan at this pose. The proposed method allows one to digitalize large indoor environments fast and reliably without any intervention and solves the {SLAM} problem. The results of two 3D digitalization experiments are presented using a fast octree-based visualization method.}, + pages = {181--198}, + number = {3}, + journaltitle = {Robotics and Autonomous Systems}, + shortjournal = {Robotics and Autonomous Systems}, + author = {Surmann, Hartmut and Nüchter, Andreas and Hertzberg, Joachim}, + urldate = {2025-02-20}, + date = {2003-12-31}, + keywords = {3D digitalization, 3D gaging, 3D laser range finder, Autonomous mobile robots, Next best view planning, Robot relocalization, Scan matching, {SLAM}}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\BKNJW2B7\\Surmann et al. - 2003 - An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of ind.pdf:application/pdf;ScienceDirect Snapshot:C\:\\Users\\reebe\\Zotero\\storage\\H82LXSD3\\S0921889003001556.html:text/html}, +} + +@article{raj_survey_2020, + title = {A Survey on {LiDAR} Scanning Mechanisms}, + volume = {9}, + rights = {http://creativecommons.org/licenses/by/3.0/}, + issn = {2079-9292}, + url = {https://www.mdpi.com/2079-9292/9/5/741}, + doi = {10.3390/electronics9050741}, + abstract = {In recent years, light detection and ranging ({LiDAR}) technology has gained huge popularity in various applications such as navigation, robotics, remote sensing, and advanced driving assistance systems ({ADAS}). This popularity is mainly due to the improvements in {LiDAR} performance in terms of range detection, accuracy, power consumption, as well as physical features such as dimension and weight. Although a number of literatures on {LiDAR} technology have been published earlier, not many has been reported on the state-of-the-art {LiDAR} scanning mechanisms. The aim of this article is to review the scanning mechanisms employed in {LiDAR} technology from past research works to the current commercial products. The review highlights four commonly used mechanisms in {LiDAR} systems: Opto-mechanical, electromechanical, micro-electromechanical systems ({MEMS}), and solid-state scanning. The study reveals that electro-mechanical scanning is the most prominent technology in use today. The commercially available 1D time of flight ({TOF}) {LiDAR} instrument is currently the most attractive option for conversion from 1D to 3D {LiDAR} system, provided that low scanning rate is not an issue. As for applications with low size, weight, and power ({SWaP}) requirements, {MEMS} scanning is found to be the better alternative. {MEMS} scanning is by far the more matured technology compared to solid-state scanning and is currently given great emphasis to increase its robustness for fulfilling the requirements of {ADAS} applications. Finally, solid-state {LiDAR} systems are expected to fill in the gap in {ADAS} applications despite the low technology readiness in comparison to {MEMS} scanners. However, since solid-state scanning is believed to have superior robustness, field of view ({FOV}), and scanning rate potential, great efforts are given by both academics and industries to further develop this technology.}, + pages = {741}, + number = {5}, + journaltitle = {Electronics}, + author = {Raj, Thinal and Hashim, Fazida Hanim and Huddin, Aqilah Baseri and Ibrahim, Mohd Faisal and Hussain, Aini}, + urldate = {2025-02-20}, + date = {2020-05}, langid = {english}, - note = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1155/2017/3497650}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\EZ473NGD\\Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\86LDAQ62\\3497650.html:text/html}, + note = {Number: 5 +Publisher: Multidisciplinary Digital Publishing Institute}, + keywords = {electro-mechanical scanning, {LiDAR}, {MEMS} scanning, opto-mechanical scanning, solid-state {LiDAR}}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\2PBQYF7P\\Raj et al. - 2020 - A Survey on LiDAR Scanning Mechanisms.pdf:application/pdf}, } -@online{noauthor_vl53l7cx_nodate, - title = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}}, - url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html}, - abstract = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV}, {VL}53L7CXV0GC/1, {STMicroelectronics}}, - urldate = {2025-02-12}, +@online{noauthor_file20200501_2020, + title = {File:20200501 Time of flight.svg - Wikipedia}, + url = {https://commons.wikimedia.org/wiki/File:20200501_Time_of_flight.svg}, + shorttitle = {File}, + urldate = {2025-02-20}, + date = {2020-05-01}, langid = {english}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\VEYLCCLA\\vl53l7cx.html:text/html}, + file = {Snapshot:C\:\\Users\\reebe\\Zotero\\storage\\H7EUEBHT\\File20200501_Time_of_flight.html:text/html}, } -@online{noauthor_pico-series_nodate, - title = {Pico-series Microcontrollers - Raspberry Pi Documentation}, - url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html}, - abstract = {The official documentation for Raspberry Pi computers and microcontrollers}, - urldate = {2025-02-12}, +@article{jain_survey_nodate, + title = {A survey of Laser Range Finding}, + url = {http://www.siddjain.com/ee236a.pdf}, + abstract = {This report provides a informal survey of laser distance measurement. Applications of laser range finding are briefly discussed and various techniques for laser ranging such as active laser triangulation, pulsed time-of-flight ({TOF}), phase shift, {FMCW}, and correlation are described.}, + author = {Jain, Siddharth}, + urldate = {2025-02-19}, langid = {english}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\KUCB8PVI\\pico-series.html:text/html}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\X2WNAHZB\\Jain - A survey of Laser Range Finding.pdf:application/pdf}, } -@online{noauthor_chatgpt_nodate, - title = {{ChatGPT}}, - url = {https://chatgpt.com}, - abstract = {A conversational {AI} system that listens, learns, and challenges}, - urldate = {2025-02-12}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\ZT8MG8Y4\\678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html}, +@inproceedings{rashid_local_2020, + title = {Local and Global Sensors for Collision Avoidance}, + url = {https://ieeexplore.ieee.org/document/9235223}, + doi = {10.1109/MFI49285.2020.9235223}, + abstract = {Implementation of safe and efficient human robot collaboration for agile production cells with heavy-duty industrial robots, having large stopping distances and large self-occlusion areas, is a challenging task. Collision avoidance is the main functionality required to realize this task. In fact, it requires accurate estimation of shortest distance between known (robot) and unknown (human or anything else) objects in a large area. This work proposes a selective fusion of global and local sensors, representing a large range 360° {LiDAR} and a small range {RGB} camera respectively, in the context of dynamic speed and separation monitoring. Safety functionality has been evaluated for collision detection between unknown dynamic object to manipulator joints. The system yields 29-40\% efficiency compared to fenced system. Heavy-duty industrial robot and a controlled linear axis dummy is used for evaluating different robot and scenario configurations. Results suggest higher efficiency and safety when using local and global setup.}, + eventtitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})}, + pages = {354--359}, + booktitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})}, + author = {Rashid, Aquib and Peesapati, Kannan and Bdiwi, Mohamad and Krusche, Sebastian and Hardt, Wolfram and Putz, Matthias}, + urldate = {2025-02-19}, + date = {2020-09}, + keywords = {Cameras, Laser radar, Production, Robot vision systems, Safety, Sensor fusion, Service robots}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\HAXPN6EL\\Rashid et al. - 2020 - Local and Global Sensors for Collision Avoidance.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\reebe\\Zotero\\storage\\4X42Y6TK\\9235223.html:text/html}, } -@software{iii_earlephilhowerarduino-pico_2025, - title = {earlephilhower/arduino-pico}, - rights = {{LGPL}-2.1}, - url = {https://github.com/earlephilhower/arduino-pico}, - abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards}, - author = {{III}, Earle F. Philhower}, - urldate = {2025-02-12}, - date = {2025-02-11}, - note = {original-date: 2021-02-25T04:20:27Z}, - keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi}, +@inproceedings{al_naser_fusion_2022, + title = {Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a robot in an industrial environment}, + url = {https://ieeexplore.ieee.org/document/9900548}, + doi = {10.1109/RO-MAN53752.2022.9900548}, + abstract = {Accurate detection of the human body and its limbs in real-time is one of the challenges toward human-robot collaboration ({HRC}) technology in the factory of the future. In this work, a new algorithm has been developed to fuse thermal, depth, and color information. Starting with the calibration of the sensors to each other, proceeding with data fusion and detection of human body parts, and ending with pose estimation. The proposed approach has been tested in various {HRC} scenarios. It has shown a significant decrease in errors and noise in industrial environments. Furthermore, it produces not only a higher positioning accuracy of the head and hands compared to the state of art algorithms (e.g., {OpenPose}), but it is also faster. Hence, such an algorithm could be joined with digital twins for safe and efficient human-robot collaboration.}, + eventtitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})}, + pages = {532--537}, + booktitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})}, + author = {Al Naser, Ibrahim and Dahmen, Johannes and Bdiwi, Mohamad and Ihlenfeldt, Steffen}, + urldate = {2025-02-19}, + date = {2022-08}, + note = {{ISSN}: 1944-9437}, + keywords = {Sensor fusion, Service robots, Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Stability criteria, Thermal sensors}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\Q933FYY2\\Al Naser et al. - 2022 - Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a r.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\reebe\\Zotero\\storage\\942BAXF5\\9900548.html:text/html}, } -@online{noauthor_tutorials_nodate, - title = {Tutorials — {ROS} 2 Documentation: Humble documentation}, - url = {https://docs.ros.org/en/humble/Tutorials.html}, - urldate = {2025-02-12}, - file = {Tutorials — ROS 2 Documentation\: Humble documentation:C\:\\Users\\Rene\\Zotero\\storage\\28S5GUZ5\\Tutorials.html:text/html}, +@inproceedings{choi_xr-based_2022, + title = {An {XR}-based Approach to Safe Human-Robot Collaboration}, + url = {https://ieeexplore.ieee.org/document/9757621}, + doi = {10.1109/VRW55335.2022.00106}, + abstract = {It is crucial to prevent safety accidents for human-robot collaboration. This study proposes an extended reality ({XR}) approach to safe {HRC} by calculating the minimum distance between the human operator and robot in real time. The proposed approach scans the real environment with multiple sensors and constructs its virtual space in {XR}. The virtual robot is synchronized with the real robot by matching the point cloud of real environment with that of the virtual robot. This system can effectively provide task assistance and safety information to the user wearing an {XR} device.}, + eventtitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})}, + pages = {481--482}, + booktitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})}, + author = {Choi, Sung Ho and Park, Kyeong-Beom and Roh, Dong Hyeon and Lee, Jae Yeol and Ghasemi, Yalda and Jeong, Heejin}, + urldate = {2025-02-19}, + date = {2022-03}, + keywords = {Safety, Service robots, Collaboration, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—{XR}-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality ({MR}/{XR}), Real-time systems, Robot sensing systems, safety distance, Three-dimensional displays}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\VYUDN5LQ\\Choi et al. - 2022 - An XR-based Approach to Safe Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\reebe\\Zotero\\storage\\GDI6TZQ2\\9757621.html:text/html}, } -@online{noauthor_examples_nodate, - title = {Examples - trimesh 4.6.2 documentation}, - url = {https://trimesh.org/examples.html}, - urldate = {2025-02-12}, - file = {Examples - trimesh 4.6.2 documentation:C\:\\Users\\Rene\\Zotero\\storage\\82WA6KM7\\examples.html:text/html}, +@inproceedings{amaya-mejia_vision-based_2022, + title = {Vision-Based Safety System for Barrierless Human-Robot Collaboration}, + url = {https://ieeexplore.ieee.org/document/9981689}, + doi = {10.1109/IROS47612.2022.9981689}, + abstract = {Human safety has always been the main priority when working near an industrial robot. With the rise of Human-Robot Collaborative environments, physical barriers to avoiding collisions have been disappearing, increasing the risk of accidents and the need for solutions that ensure a safe Human-Robot Collaboration. This paper proposes a safety system that implements Speed and Separation Monitoring ({SSM}) type of operation. For this, safety zones are defined in the robot's workspace following current standards for industrial collaborative robots. A deep learning-based computer vision system detects, tracks, and estimates the 3D position of operators close to the robot. The robot control system receives the operator's 3D position and generates 3D representations of them in a simulation environment. Depending on the zone where the closest operator was detected, the robot stops or changes its operating speed. Three different operation modes in which the human and robot interact are presented. Results show that the vision-based system can correctly detect and classify in which safety zone an operator is located and that the different proposed operation modes ensure that the robot's reaction and stop time are within the required time limits to guarantee safety.}, + eventtitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})}, + pages = {7331--7336}, + booktitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})}, + author = {Amaya-Mejía, Lina María and Duque-Suárez, Nicolás and Jaramillo-Ramírez, Daniel and Martinez, Carol}, + urldate = {2025-02-19}, + date = {2022-10}, + note = {{ISSN}: 2153-0866}, + keywords = {Safety, Service robots, Collaboration, Three-dimensional displays, Collision avoidance, Robot control, Solid modeling}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\XX9FL2U5\\Amaya-Mejía et al. - 2022 - Vision-Based Safety System for Barrierless Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\reebe\\Zotero\\storage\\29LFGV4B\\9981689.html:text/html}, } -@software{grans_sebastiangransros2-point-cloud-demo_2024, - title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo}, - rights = {{MIT}}, - url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo}, - abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2}, - author = {Grans, Sebastian}, - urldate = {2025-02-12}, - date = {2024-12-08}, - note = {original-date: 2020-06-30T16:55:21Z}, +@article{li_safe_2024, + title = {Safe human–robot collaboration for industrial settings: a survey}, + volume = {35}, + issn = {1572-8145}, + url = {https://doi.org/10.1007/s10845-023-02159-4}, + doi = {10.1007/s10845-023-02159-4}, + shorttitle = {Safe human–robot collaboration for industrial settings}, + abstract = {Human–robot collaboration ({HRC}) plays a pivotal role in today’s industry by supporting increasingly customised product development. Via {HRC}, the strengths of humans and robots can be combined to facilitate collaborative jobs within common workplaces to achieve specific industrial goals. Given the significance of safety assurance in {HRC}, in this survey paper, an update on standards and implementation approaches presented in the latest literature is given to reflect the state-of-the-art of this prominent research topic. First, an overview of safety standards for industrial robots, collaborative robots, and {HRC} is provided. Then, a survey of various approaches to {HRC} safety is conducted from two main perspectives, i.e., pre-collision and post-collision, which are further detailed in the aspects of sensing, prediction, learning, planning/replanning, and compliance control. Major characteristics, pros, cons, and applicability of the approaches are analysed. Finally, challenging issues and prospects for the future development of {HRC} safety are highlighted to provide recommendations for relevant stakeholders to consider when designing {HRC}-enabled industrial systems.}, + pages = {2235--2261}, + number = {5}, + journaltitle = {Journal of Intelligent Manufacturing}, + shortjournal = {J Intell Manuf}, + author = {Li, Weidong and Hu, Yudie and Zhou, Yong and Pham, Duc Truong}, + urldate = {2025-02-19}, + date = {2024-06-01}, + langid = {english}, + keywords = {Safety, Collaborative robots, Collision detection, Human–robot collaboration ({HRC}), Obstacle avoidance}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\4JS4CSVA\\Li et al. - 2024 - Safe human–robot collaboration for industrial settings a survey.pdf:application/pdf}, } -@article{wunderlich_rasante_2013, - title = {Rasante Entwicklung in der 3D‐Bildgebung: Weiterentwickelte Time‐of‐Flight‐Technologie verbessert miniaturisierte 3D‐Kameras und Sensoren}, - volume = {8}, - rights = {http://onlinelibrary.wiley.com/{termsAndConditions}\#vor}, - issn = {1863-1460, 2191-1975}, - url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018}, - doi = {10.1002/opph.201300018}, - shorttitle = {Rasante Entwicklung in der 3D‐Bildgebung}, - abstract = {Abstract - Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.}, - pages = {38--40}, - number = {3}, - journaltitle = {Optik \& Photonik}, - shortjournal = {Optik \& Photonik}, - author = {Wunderlich, Max}, - urldate = {2025-02-18}, - date = {2013-09}, - langid = {german}, - file = {Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung:C\:\\Users\\Rene\\Zotero\\storage\\H7CSUHLW\\Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung.pdf:application/pdf}, +@online{noauthor_can_nodate, + title = {Can the collaborative robot market experience a second growth surge in the post-pandemic era?}, + url = {https://interactanalysis.com/insight/can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era/}, + abstract = {The market for collaborative robots is forecast to show strong growth over the coming years, with rising sales in industrial and non-industrial sectors.}, + titleaddon = {Interact Analysis}, + urldate = {2025-02-19}, + langid = {british}, + file = {Snapshot:C\:\\Users\\reebe\\Zotero\\storage\\25UG57J5\\can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era.html:text/html}, } -@article{li_common_2019, - title = {Common Sensors in Industrial Robots: A Review}, - volume = {1267}, - issn = {1742-6588, 1742-6596}, - url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036}, - doi = {10.1088/1742-6596/1267/1/012036}, - shorttitle = {Common Sensors in Industrial Robots}, - abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.}, - pages = {012036}, +@article{nath_review_2022, + title = {A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector}, + volume = {7}, + rights = {Copyright (c) 2022 Aditya S. Nath}, + issn = {2736-576X}, + url = {https://www.ej-eng.org/index.php/ejeng/article/view/2624}, + doi = {10.24018/ejeng.2022.7.1.2624}, + abstract = {The study explores recent innovations in robotic and sensor-based technologies that are spearheading advancements in the construction sector to achieve improvements in construction quality, efficiency, and safety. Automation in construction, although coined as a concept in 1980s, has witnessed minimal progress in the level of application. The study attempts to identify issues constraining adoption of automation in the sector, the recent developments in technologies and their scope in construction, their applications and impacts, and way forward. The role of robotics in various stages of construction and its impact on a wider scale has been identified and discussed. The evolution of Building Information Modeling ({BIM}) has transitioned it into being an efficient mediator in the construction process with novel concepts such as 4D and 5D {BIM} and Building Knowledge Management. Various sensor technologies, functioning at diverse scales, have found wide-ranging applications on construction sites ranging from high-accuracy positioning to slow-tracking of personnel and materials, as well as, in progress monitoring and quality control. Information Technology has a major role in binding the sensor technology with on-site requirements to create positive results. A study was done to identify such technological interventions and various software utilities which integrate {BIM} and sensor technology with tools such as {GIS}. The factors which restrained developments in automation in construction sector were identified in the course of study. Various global examples of advanced automated construction technologies with applications in various stages of construction were discussed. The review successfully identifies the nascent technological innovations and their productive usage in relevant areas of construction sector.}, + pages = {85--89}, number = {1}, - journaltitle = {Journal of Physics: Conference Series}, - shortjournal = {J. Phys.: Conf. Ser.}, - author = {Li, Peng and Liu, Xiangpeng}, - urldate = {2025-02-18}, - date = {2019-07-01}, + journaltitle = {European Journal of Engineering and Technology Research}, + author = {Nath, Aditya S.}, + urldate = {2025-02-19}, + date = {2022-02-28}, langid = {english}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\UVXS2R7J\\Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf}, + note = {Number: 1}, + keywords = {Sensors}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\AA6ZJJBN\\Nath - 2022 - A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector.pdf:application/pdf}, +} + +@article{maheepala_low_2021, + title = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices: A Review}, + volume = {21}, + issn = {1558-1748}, + url = {https://ieeexplore.ieee.org/abstract/document/9165781}, + doi = {10.1109/JSEN.2020.3015932}, + shorttitle = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices}, + abstract = {With the advancements of the Internet of Things ({IoT}) technology, applications of battery powered machine vision based {IoT} devices is rapidly growing. While numerous research works are being conducted to develop low power hardware solutions for {IoT} devices, image capture and image processing remain high power demanding processes leading to a short battery life. However, the power consumption of the machine vision based {IoT} devices can be minimized by the careful optimization of the hardware components that are used is these devices. In this article, we present a review of low power machine vision hardware components for the {IoT} applications. A guide to selecting the optimum processors and image sensors for a given battery powered machine vision based {IoT} device is presented. Next, the factors that must be considered when selecting processors and image sensors for a given {IoT} application are discussed, and selection criteria for the processors and image sensors are established. Then, the current commercially available hardware components are reviewed in accordance with the established selection criteria. Finally, the research trends in the field of battery powered machine vision based {IoT} devices are discussed, and the potential future research directions in the field are presented.}, + pages = {1172--1186}, + number = {2}, + journaltitle = {{IEEE} Sensors Journal}, + author = {Maheepala, Malith and Joordens, Matthew A. and Kouzani, Abbas Z.}, + urldate = {2025-02-19}, + date = {2021-01}, + note = {Conference Name: {IEEE} Sensors Journal}, + keywords = {Image sensors, Batteries, Cloud computing, image sensor, Internet of Things, low power, machine vision, Machine vision, processor, Program processors, Transceivers}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\XSY3V6PK\\Maheepala et al. - 2021 - Low Power Processors and Image Sensors for Vision-Based IoT Devices A Review.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\reebe\\Zotero\\storage\\Y7EV2L8T\\9165781.html:text/html}, +} + +@article{liu_application_2024, + title = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing: A Literature Review}, + volume = {40}, + issn = {1044-7318}, + url = {https://doi.org/10.1080/10447318.2022.2041907}, + doi = {10.1080/10447318.2022.2041907}, + shorttitle = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing}, + abstract = {The rapid development of robot technology has introduced a substantial impact on manufacturing. Numerous studies have been carried out to apply collaborative robots (cobots) to address manufacturing productivity and ergonomics issues, which has brought extensive opportunities. In this context, a systematic literature search in the Web of Science, Scopus, and Google Scholar databases was carried out by electronic and manual search. Thus, 59 relevant contributions out of 4488 studies were analyzed by using preferred reporting items for systematic reviews and meta-analysis ({PRISMA}). To provide an overview of the different results, studies are summarized according to the following criteria: country, author, year, study design, robot category, results, and future opportunities. The effects of cobots on safety, system design, workplace design, task scheduling, productivity, and ergonomics are discussed to provide a better understanding of the application of cobots in manufacturing. To incentive future research, this paper reviews the development of cobots in manufacturing and discusses future opportunities and directions from cobots and manufacturing system perspectives. This paper provides novel and valuable insights into cobots application and illustrates potential developments of future human-cobot interaction.}, + pages = {915--932}, + number = {4}, + journaltitle = {International Journal of Human–Computer Interaction}, + author = {Liu, Li and Guo, Fu and Zou, Zishuai and Duffy, Vincent G.}, + urldate = {2025-02-19}, + date = {2024-02-16}, + note = {Publisher: Taylor \& Francis +\_eprint: https://doi.org/10.1080/10447318.2022.2041907}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\G9ECNMWG\\Liu et al. - 2024 - Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing.pdf:application/pdf}, +} + +@inproceedings{popov_collision_2017, + location = {Lisbon}, + title = {Collision detection, localization \& classification for industrial robots with joint torque sensors}, + isbn = {978-1-5386-3518-6}, + url = {http://ieeexplore.ieee.org/document/8172400/}, + doi = {10.1109/ROMAN.2017.8172400}, + abstract = {High dynamic capabilities of industrial robots make them dangerous for humans and environment. To reduce this factor and advance collaboration between human and manipulator fast and reliable collision detection algorithm is required. To overcome this problem, we present an approach allowing to detect collision, localize action point and classify collision nature. Internal joint torque and encoder measurements were used to determine potential collisions with the robot links. This work proposes two ways of solving this problem: using classical analytical approach and learning approach implemented with neural network. The suggested algorithms were examined on the industrial robotic arm Kuka iiwa {LBR} 14 R820, ground truth information on the contact nature and its location were obtained with 3D {LIDAR} and camera.}, + eventtitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})}, + pages = {838--843}, + booktitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})}, + publisher = {{IEEE}}, + author = {Popov, Dmitry and Klimchik, Alexandr and Mavridis, Nikolaos}, + urldate = {2025-02-19}, + date = {2017-08}, + langid = {english}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\LVC2B7U6\\Popov et al. - 2017 - Collision detection, localization & classification for industrial robots with joint torque sensors.pdf:application/pdf}, +} + +@misc{noauthor_robotics_2021, + title = {Robotics - Vocabulary}, + url = {https://www.dinmedia.de/de/norm/iso-8373/348036781}, + shorttitle = {{ISO} 8373:2021-11}, + publisher = {{DIN} Media {GmbH}}, + urldate = {2025-02-19}, + date = {2021-11}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\6SUCZU6R\\DIN_EN_ISO_8373.pdf:application/pdf}, } @misc{noauthor_din_nodate, @@ -208,21 +296,20 @@ publisher = {{DIN} Media {GmbH}}, urldate = {2025-02-19}, langid = {german}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\XCP5RDRY\\DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\FFMUVR22\\DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf}, } @misc{noauthor_din_nodate-1, title = {{DIN} {EN} {ISO} 10218-2:2021-03, Robotik\_- Sicherheitsanforderungen für Robotersysteme in industrieller Umgebung\_- Teil\_2: Robotersysteme, Roboteranwendungen und Integration von Roboterzellen ({ISO}/{DIS}\_10218-2:2020); Deutsche und Englische Fassung {prEN}\_ISO\_10218-2:2020}, url = {https://www.dinmedia.de/de/-/-/331246964}, - doi = {10.31030/3215258}, shorttitle = {{DIN} {EN} {ISO} 10218-2}, publisher = {{DIN} Media {GmbH}}, urldate = {2025-02-19}, - langid = {german}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\M7E9L4CP\\DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf}, + doi = {10.31030/3215258}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\HB28M28Z\\DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf}, } -@article{li_common_2019-1, +@article{li_common_2019, title = {Common Sensors in Industrial Robots: A Review}, volume = {1267}, issn = {1742-6588, 1742-6596}, @@ -237,17 +324,18 @@ author = {Li, Peng and Liu, Xiangpeng}, urldate = {2025-02-18}, date = {2019-07-01}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\WQ5C229K\\Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\WQ5C229K\\Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf}, } @misc{noauthor_din_nodate-2, title = {{DIN} {EN} {ISO} 10218-2:2021-03, Robotik\_- Sicherheitsanforderungen für Robotersysteme in industrieller Umgebung\_- Teil\_2: Robotersysteme, Roboteranwendungen und Integration von Roboterzellen ({ISO}/{DIS}\_10218-2:2020); Deutsche und Englische Fassung {prEN}\_ISO\_10218-2:2020}, url = {https://www.dinmedia.de/de/-/-/331246964}, + doi = {10.31030/3215258}, shorttitle = {{DIN} {EN} {ISO} 10218-2}, publisher = {{DIN} Media {GmbH}}, urldate = {2025-02-19}, - doi = {10.31030/3215258}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\HB28M28Z\\DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf}, + langid = {german}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\M7E9L4CP\\DIN EN ISO 10218-22021-03, Robotik_- Sicherheitsanforderungen für Robotersysteme in industrieller U.pdf:application/pdf}, } @misc{noauthor_din_nodate-3, @@ -258,287 +346,216 @@ publisher = {{DIN} Media {GmbH}}, urldate = {2025-02-19}, langid = {german}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\FFMUVR22\\DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf}, -} - -@misc{noauthor_robotics_2021, - title = {Robotics - Vocabulary}, - url = {https://www.dinmedia.de/de/norm/iso-8373/348036781}, - shorttitle = {{ISO} 8373:2021-11}, - publisher = {{DIN} Media {GmbH}}, - urldate = {2025-02-19}, - date = {2021-11}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\6SUCZU6R\\DIN_EN_ISO_8373.pdf:application/pdf}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\XCP5RDRY\\DIN EN ISO 10218-12021-09, Robotik_- Sicherheitsanforderungen_- Teil_1 Industrieroboter (ISODIS_1.pdf:application/pdf}, } -@inproceedings{popov_collision_2017, - location = {Lisbon}, - title = {Collision detection, localization \& classification for industrial robots with joint torque sensors}, - isbn = {978-1-5386-3518-6}, - url = {http://ieeexplore.ieee.org/document/8172400/}, - doi = {10.1109/ROMAN.2017.8172400}, - abstract = {High dynamic capabilities of industrial robots make them dangerous for humans and environment. To reduce this factor and advance collaboration between human and manipulator fast and reliable collision detection algorithm is required. To overcome this problem, we present an approach allowing to detect collision, localize action point and classify collision nature. Internal joint torque and encoder measurements were used to determine potential collisions with the robot links. This work proposes two ways of solving this problem: using classical analytical approach and learning approach implemented with neural network. The suggested algorithms were examined on the industrial robotic arm Kuka iiwa {LBR} 14 R820, ground truth information on the contact nature and its location were obtained with 3D {LIDAR} and camera.}, - eventtitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})}, - pages = {838--843}, - booktitle = {2017 26th {IEEE} International Symposium on Robot and Human Interactive Communication ({RO}-{MAN})}, - publisher = {{IEEE}}, - author = {Popov, Dmitry and Klimchik, Alexandr and Mavridis, Nikolaos}, - urldate = {2025-02-19}, - date = {2017-08}, +@article{li_common_2019-1, + title = {Common Sensors in Industrial Robots: A Review}, + volume = {1267}, + issn = {1742-6588, 1742-6596}, + url = {https://iopscience.iop.org/article/10.1088/1742-6596/1267/1/012036}, + doi = {10.1088/1742-6596/1267/1/012036}, + shorttitle = {Common Sensors in Industrial Robots}, + abstract = {The application of industrial robots has greatly promoted the development of industry in the past decades. Now with the proposal and prevalence of Industry 4.0, industrial robots are required to be more independent and intelligent to accomplish more complex and flexible tasks. The advancement of industrial robots relies on the development and progress of multiple technologies, among which sensors are the indispensable part. They can acquire abundant information to help industrial robots implement their functions. This paper reviews the recent literatures and gives a summary and introduction of the commonly used sensors in industrial robots. Additionally, the applications of these sensors in diverse functions of industrial robots are also presented. Finally, the developing direction and challenges of industrial robots in the future are discussed in the last part of this article.}, + pages = {012036}, + number = {1}, + journaltitle = {Journal of Physics: Conference Series}, + shortjournal = {J. Phys.: Conf. Ser.}, + author = {Li, Peng and Liu, Xiangpeng}, + urldate = {2025-02-18}, + date = {2019-07-01}, langid = {english}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\LVC2B7U6\\Popov et al. - 2017 - Collision detection, localization & classification for industrial robots with joint torque sensors.pdf:application/pdf}, -} - -@article{liu_application_2024, - title = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing: A Literature Review}, - volume = {40}, - issn = {1044-7318}, - url = {https://doi.org/10.1080/10447318.2022.2041907}, - doi = {10.1080/10447318.2022.2041907}, - shorttitle = {Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing}, - abstract = {The rapid development of robot technology has introduced a substantial impact on manufacturing. Numerous studies have been carried out to apply collaborative robots (cobots) to address manufacturing productivity and ergonomics issues, which has brought extensive opportunities. In this context, a systematic literature search in the Web of Science, Scopus, and Google Scholar databases was carried out by electronic and manual search. Thus, 59 relevant contributions out of 4488 studies were analyzed by using preferred reporting items for systematic reviews and meta-analysis ({PRISMA}). To provide an overview of the different results, studies are summarized according to the following criteria: country, author, year, study design, robot category, results, and future opportunities. The effects of cobots on safety, system design, workplace design, task scheduling, productivity, and ergonomics are discussed to provide a better understanding of the application of cobots in manufacturing. To incentive future research, this paper reviews the development of cobots in manufacturing and discusses future opportunities and directions from cobots and manufacturing system perspectives. This paper provides novel and valuable insights into cobots application and illustrates potential developments of future human-cobot interaction.}, - pages = {915--932}, - number = {4}, - journaltitle = {International Journal of Human–Computer Interaction}, - author = {Liu, Li and Guo, Fu and Zou, Zishuai and Duffy, Vincent G.}, - urldate = {2025-02-19}, - date = {2024-02-16}, - note = {Publisher: Taylor \& Francis -\_eprint: https://doi.org/10.1080/10447318.2022.2041907}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\G9ECNMWG\\Liu et al. - 2024 - Application, Development and Future Opportunities of Collaborative Robots (Cobots) in Manufacturing.pdf:application/pdf}, -} - -@article{maheepala_low_2021, - title = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices: A Review}, - volume = {21}, - issn = {1558-1748}, - url = {https://ieeexplore.ieee.org/abstract/document/9165781}, - doi = {10.1109/JSEN.2020.3015932}, - shorttitle = {Low Power Processors and Image Sensors for Vision-Based {IoT} Devices}, - abstract = {With the advancements of the Internet of Things ({IoT}) technology, applications of battery powered machine vision based {IoT} devices is rapidly growing. While numerous research works are being conducted to develop low power hardware solutions for {IoT} devices, image capture and image processing remain high power demanding processes leading to a short battery life. However, the power consumption of the machine vision based {IoT} devices can be minimized by the careful optimization of the hardware components that are used is these devices. In this article, we present a review of low power machine vision hardware components for the {IoT} applications. A guide to selecting the optimum processors and image sensors for a given battery powered machine vision based {IoT} device is presented. Next, the factors that must be considered when selecting processors and image sensors for a given {IoT} application are discussed, and selection criteria for the processors and image sensors are established. Then, the current commercially available hardware components are reviewed in accordance with the established selection criteria. Finally, the research trends in the field of battery powered machine vision based {IoT} devices are discussed, and the potential future research directions in the field are presented.}, - pages = {1172--1186}, - number = {2}, - journaltitle = {{IEEE} Sensors Journal}, - author = {Maheepala, Malith and Joordens, Matthew A. and Kouzani, Abbas Z.}, - urldate = {2025-02-19}, - date = {2021-01}, - note = {Conference Name: {IEEE} Sensors Journal}, - keywords = {Batteries, Cloud computing, image sensor, Image sensors, Internet of Things, low power, machine vision, Machine vision, processor, Program processors, Transceivers}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\XSY3V6PK\\Maheepala et al. - 2021 - Low Power Processors and Image Sensors for Vision-Based IoT Devices A Review.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\Y7EV2L8T\\9165781.html:text/html}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\UVXS2R7J\\Li und Liu - 2019 - Common Sensors in Industrial Robots A Review.pdf:application/pdf}, } -@article{nath_review_2022, - title = {A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector}, - volume = {7}, - rights = {Copyright (c) 2022 Aditya S. Nath}, - issn = {2736-576X}, - url = {https://www.ej-eng.org/index.php/ejeng/article/view/2624}, - doi = {10.24018/ejeng.2022.7.1.2624}, - abstract = {The study explores recent innovations in robotic and sensor-based technologies that are spearheading advancements in the construction sector to achieve improvements in construction quality, efficiency, and safety. Automation in construction, although coined as a concept in 1980s, has witnessed minimal progress in the level of application. The study attempts to identify issues constraining adoption of automation in the sector, the recent developments in technologies and their scope in construction, their applications and impacts, and way forward. The role of robotics in various stages of construction and its impact on a wider scale has been identified and discussed. The evolution of Building Information Modeling ({BIM}) has transitioned it into being an efficient mediator in the construction process with novel concepts such as 4D and 5D {BIM} and Building Knowledge Management. Various sensor technologies, functioning at diverse scales, have found wide-ranging applications on construction sites ranging from high-accuracy positioning to slow-tracking of personnel and materials, as well as, in progress monitoring and quality control. Information Technology has a major role in binding the sensor technology with on-site requirements to create positive results. A study was done to identify such technological interventions and various software utilities which integrate {BIM} and sensor technology with tools such as {GIS}. The factors which restrained developments in automation in construction sector were identified in the course of study. Various global examples of advanced automated construction technologies with applications in various stages of construction were discussed. The review successfully identifies the nascent technological innovations and their productive usage in relevant areas of construction sector.}, - pages = {85--89}, - number = {1}, - journaltitle = {European Journal of Engineering and Technology Research}, - author = {Nath, Aditya S.}, - urldate = {2025-02-19}, - date = {2022-02-28}, - langid = {english}, - note = {Number: 1}, - keywords = {Sensors}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\AA6ZJJBN\\Nath - 2022 - A Review of Advancements in Robotic and Sensor-based Technologies in Construction Sector.pdf:application/pdf}, +@article{wunderlich_rasante_2013, + title = {Rasante Entwicklung in der 3D‐Bildgebung: Weiterentwickelte Time‐of‐Flight‐Technologie verbessert miniaturisierte 3D‐Kameras und Sensoren}, + volume = {8}, + rights = {http://onlinelibrary.wiley.com/{termsAndConditions}\#vor}, + issn = {1863-1460, 2191-1975}, + url = {https://onlinelibrary.wiley.com/doi/10.1002/opph.201300018}, + doi = {10.1002/opph.201300018}, + shorttitle = {Rasante Entwicklung in der 3D‐Bildgebung}, + abstract = {Abstract + Optoelektrische Sensoren, die eine dreidimensionale Bildinformation auswerten können, eröffnen nicht nur der Mensch‐Maschinen‐Interaktion durch Gestensteuerung völlig neue Anwendungsmöglichkeiten. Auch für die Industrie erweitert sich durch diese Technologie die Prozessüberwachung und Steuerung sprichwörtlich um eine neue Dimension.}, + pages = {38--40}, + number = {3}, + journaltitle = {Optik \& Photonik}, + shortjournal = {Optik \& Photonik}, + author = {Wunderlich, Max}, + urldate = {2025-02-18}, + date = {2013-09}, + langid = {german}, + file = {Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung:C\:\\Users\\reebe\\Zotero\\storage\\H7CSUHLW\\Optik Photonik - 2013 - Wunderlich - Rasante Entwicklung in der 3D‐Bildgebung.pdf:application/pdf}, } -@online{noauthor_can_nodate, - title = {Can the collaborative robot market experience a second growth surge in the post-pandemic era?}, - url = {https://interactanalysis.com/insight/can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era/}, - abstract = {The market for collaborative robots is forecast to show strong growth over the coming years, with rising sales in industrial and non-industrial sectors.}, - titleaddon = {Interact Analysis}, - urldate = {2025-02-19}, - langid = {british}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\25UG57J5\\can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era.html:text/html}, +@software{noauthor_universalrobotsuniversal_robots_ros2_gazebo_simulation_2025, + title = {{UniversalRobots}/Universal\_Robots\_ROS2\_Gazebo\_Simulation}, + rights = {{BSD}-3-Clause}, + url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_Gazebo_Simulation}, + publisher = {Universal Robots A/S}, + urldate = {2025-02-17}, + date = {2025-02-13}, + note = {original-date: 2021-12-15T12:18:45Z}, } -@article{li_safe_2024, - title = {Safe human–robot collaboration for industrial settings: a survey}, - volume = {35}, - issn = {1572-8145}, - url = {https://doi.org/10.1007/s10845-023-02159-4}, - doi = {10.1007/s10845-023-02159-4}, - shorttitle = {Safe human–robot collaboration for industrial settings}, - abstract = {Human–robot collaboration ({HRC}) plays a pivotal role in today’s industry by supporting increasingly customised product development. Via {HRC}, the strengths of humans and robots can be combined to facilitate collaborative jobs within common workplaces to achieve specific industrial goals. Given the significance of safety assurance in {HRC}, in this survey paper, an update on standards and implementation approaches presented in the latest literature is given to reflect the state-of-the-art of this prominent research topic. First, an overview of safety standards for industrial robots, collaborative robots, and {HRC} is provided. Then, a survey of various approaches to {HRC} safety is conducted from two main perspectives, i.e., pre-collision and post-collision, which are further detailed in the aspects of sensing, prediction, learning, planning/replanning, and compliance control. Major characteristics, pros, cons, and applicability of the approaches are analysed. Finally, challenging issues and prospects for the future development of {HRC} safety are highlighted to provide recommendations for relevant stakeholders to consider when designing {HRC}-enabled industrial systems.}, - pages = {2235--2261}, - number = {5}, - journaltitle = {Journal of Intelligent Manufacturing}, - shortjournal = {J Intell Manuf}, - author = {Li, Weidong and Hu, Yudie and Zhou, Yong and Pham, Duc Truong}, - urldate = {2025-02-19}, - date = {2024-06-01}, - langid = {english}, - keywords = {Collision detection, Collaborative robots, Human–robot collaboration ({HRC}), Obstacle avoidance, Safety}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\4JS4CSVA\\Li et al. - 2024 - Safe human–robot collaboration for industrial settings a survey.pdf:application/pdf}, +@article{haddadin_robot_2017, + title = {Robot Collisions: A Survey on Detection, Isolation, and Identification}, + volume = {33}, + issn = {1941-0468}, + url = {https://ieeexplore.ieee.org/abstract/document/8059840}, + doi = {10.1109/TRO.2017.2723903}, + shorttitle = {Robot Collisions}, + abstract = {Robot assistants and professional coworkers are becoming a commodity in domestic and industrial settings. In order to enable robots to share their workspace with humans and physically interact with them, fast and reliable handling of possible collisions on the entire robot structure is needed, along with control strategies for safe robot reaction. The primary motivation is the prevention or limitation of possible human injury due to physical contacts. In this survey paper, based on our early work on the subject, we review, extend, compare, and evaluate experimentally model-based algorithms for real-time collision detection, isolation, and identification that use only proprioceptive sensors. This covers the context-independent phases of the collision event pipeline for robots interacting with the environment, as in physical human–robot interaction or manipulation tasks. The problem is addressed for rigid robots first and then extended to the presence of joint/transmission flexibility. The basic physically motivated solution has already been applied to numerous robotic systems worldwide, ranging from manipulators and humanoids to flying robots, and even to commercial products.}, + pages = {1292--1312}, + number = {6}, + journaltitle = {{IEEE} Transactions on Robotics}, + author = {Haddadin, Sami and De Luca, Alessandro and Albu-Schäffer, Alin}, + urldate = {2025-02-12}, + date = {2017-12}, + note = {Conference Name: {IEEE} Transactions on Robotics}, + keywords = {Service robots, Real-time systems, Robot sensing systems, Collision avoidance, Collision detection, Algorithm design and analysis, collision identification, collision isolation, Drones, flexible joint manipulators, human-friendly robotics, Human-robot interaction, physical human–robot interaction ({pHRI}), safe robotics}, + file = {Accepted Version:C\:\\Users\\reebe\\Zotero\\storage\\IEXJFAMF\\Haddadin et al. - 2017 - Robot Collisions A Survey on Detection, Isolation, and Identification.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\reebe\\Zotero\\storage\\LDB3Q92K\\8059840.html:text/html}, } -@inproceedings{amaya-mejia_vision-based_2022, - title = {Vision-Based Safety System for Barrierless Human-Robot Collaboration}, - url = {https://ieeexplore.ieee.org/document/9981689}, - doi = {10.1109/IROS47612.2022.9981689}, - abstract = {Human safety has always been the main priority when working near an industrial robot. With the rise of Human-Robot Collaborative environments, physical barriers to avoiding collisions have been disappearing, increasing the risk of accidents and the need for solutions that ensure a safe Human-Robot Collaboration. This paper proposes a safety system that implements Speed and Separation Monitoring ({SSM}) type of operation. For this, safety zones are defined in the robot's workspace following current standards for industrial collaborative robots. A deep learning-based computer vision system detects, tracks, and estimates the 3D position of operators close to the robot. The robot control system receives the operator's 3D position and generates 3D representations of them in a simulation environment. Depending on the zone where the closest operator was detected, the robot stops or changes its operating speed. Three different operation modes in which the human and robot interact are presented. Results show that the vision-based system can correctly detect and classify in which safety zone an operator is located and that the different proposed operation modes ensure that the robot's reaction and stop time are within the required time limits to guarantee safety.}, - eventtitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})}, - pages = {7331--7336}, - booktitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})}, - author = {Amaya-Mejía, Lina María and Duque-Suárez, Nicolás and Jaramillo-Ramírez, Daniel and Martinez, Carol}, - urldate = {2025-02-19}, - date = {2022-10}, - note = {{ISSN}: 2153-0866}, - keywords = {Collision avoidance, Service robots, Safety, Collaboration, Robot control, Solid modeling, Three-dimensional displays}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\XX9FL2U5\\Amaya-Mejía et al. - 2022 - Vision-Based Safety System for Barrierless Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\29LFGV4B\\9981689.html:text/html}, +@book{hertzberg_mobile_2012, + location = {Berlin, Heidelberg}, + title = {Mobile Roboter: Eine Einführung aus Sicht der Informatik}, + rights = {https://www.springernature.com/gp/researchers/text-and-data-mining}, + isbn = {978-3-642-01725-4 978-3-642-01726-1}, + url = {https://link.springer.com/10.1007/978-3-642-01726-1}, + series = {{eXamen}.press}, + shorttitle = {Mobile Roboter}, + publisher = {Springer Berlin Heidelberg}, + author = {Hertzberg, Joachim and Lingemann, Kai and Nüchter, Andreas}, + urldate = {2025-02-12}, + date = {2012}, + langid = {german}, + doi = {10.1007/978-3-642-01726-1}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\RLTU9P46\\Hertzberg et al. - 2012 - Mobile Roboter Eine Einführung aus Sicht der Informatik.pdf:application/pdf}, } -@inproceedings{choi_xr-based_2022, - title = {An {XR}-based Approach to Safe Human-Robot Collaboration}, - url = {https://ieeexplore.ieee.org/document/9757621}, - doi = {10.1109/VRW55335.2022.00106}, - abstract = {It is crucial to prevent safety accidents for human-robot collaboration. This study proposes an extended reality ({XR}) approach to safe {HRC} by calculating the minimum distance between the human operator and robot in real time. The proposed approach scans the real environment with multiple sensors and constructs its virtual space in {XR}. The virtual robot is synchronized with the real robot by matching the point cloud of real environment with that of the virtual robot. This system can effectively provide task assistance and safety information to the user wearing an {XR} device.}, - eventtitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})}, - pages = {481--482}, - booktitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})}, - author = {Choi, Sung Ho and Park, Kyeong-Beom and Roh, Dong Hyeon and Lee, Jae Yeol and Ghasemi, Yalda and Jeong, Heejin}, - urldate = {2025-02-19}, - date = {2022-03}, - keywords = {Real-time systems, Robot sensing systems, Service robots, Safety, Collaboration, Three-dimensional displays, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—{XR}-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality ({MR}/{XR}), safety distance}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\VYUDN5LQ\\Choi et al. - 2022 - An XR-based Approach to Safe Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\GDI6TZQ2\\9757621.html:text/html}, +@collection{hering_sensoren_2018, + location = {Wiesbaden}, + title = {Sensoren in Wissenschaft und Technik}, + rights = {http://www.springer.com/tdm}, + isbn = {978-3-658-12561-5 978-3-658-12562-2}, + url = {http://link.springer.com/10.1007/978-3-658-12562-2}, + publisher = {Springer Fachmedien Wiesbaden}, + editor = {Hering, Ekbert and Schönfelder, Gert}, + urldate = {2025-02-12}, + date = {2018}, + langid = {german}, + doi = {10.1007/978-3-658-12562-2}, + file = {PDF:C\:\\Users\\reebe\\Zotero\\storage\\9TI57WXD\\Hering and Schönfelder - 2018 - Sensoren in Wissenschaft und Technik.pdf:application/pdf}, } -@inproceedings{al_naser_fusion_2022, - title = {Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a robot in an industrial environment}, - url = {https://ieeexplore.ieee.org/document/9900548}, - doi = {10.1109/RO-MAN53752.2022.9900548}, - abstract = {Accurate detection of the human body and its limbs in real-time is one of the challenges toward human-robot collaboration ({HRC}) technology in the factory of the future. In this work, a new algorithm has been developed to fuse thermal, depth, and color information. Starting with the calibration of the sensors to each other, proceeding with data fusion and detection of human body parts, and ending with pose estimation. The proposed approach has been tested in various {HRC} scenarios. It has shown a significant decrease in errors and noise in industrial environments. Furthermore, it produces not only a higher positioning accuracy of the head and hands compared to the state of art algorithms (e.g., {OpenPose}), but it is also faster. Hence, such an algorithm could be joined with digital twins for safe and efficient human-robot collaboration.}, - eventtitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})}, - pages = {532--537}, - booktitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})}, - author = {Al Naser, Ibrahim and Dahmen, Johannes and Bdiwi, Mohamad and Ihlenfeldt, Steffen}, - urldate = {2025-02-19}, - date = {2022-08}, - note = {{ISSN}: 1944-9437}, - keywords = {Service robots, Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Sensor fusion, Stability criteria, Thermal sensors}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\Q933FYY2\\Al Naser et al. - 2022 - Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a r.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\942BAXF5\\9900548.html:text/html}, +@article{saudabayev_sensors_2015, + title = {Sensors for Robotic Hands: A Survey of State of the Art}, + volume = {3}, + issn = {2169-3536}, + url = {https://ieeexplore.ieee.org/document/7283549/?arnumber=7283549}, + doi = {10.1109/ACCESS.2015.2482543}, + shorttitle = {Sensors for Robotic Hands}, + abstract = {Recent decades have seen significant progress in the field of artificial hands. Most of the surveys, which try to capture the latest developments in this field, focused on actuation and control systems of these devices. In this paper, our goal is to provide a comprehensive survey of the sensors for artificial hands. In order to present the evolution of the field, we cover five year periods starting at the turn of the millennium. At each period, we present the robot hands with a focus on their sensor systems dividing them into categories, such as prosthetics, research devices, and industrial end-effectors. We also cover the sensors developed for robot hand usage in each era. Finally, the period between 2010 and 2015 introduces the reader to the state of the art and also hints to the future directions in the sensor development for artificial hands.}, + pages = {1765--1782}, + journaltitle = {{IEEE} Access}, + author = {Saudabayev, Artur and Varol, Huseyin Atakan}, + urldate = {2025-02-12}, + date = {2015}, + note = {Conference Name: {IEEE} Access}, + keywords = {Robot sensing systems, Sensors, Artificial hands, Hands, industrial robotics, prosthetics, Prosthetics, review, robot end effectors, robot sensing, robotic hands, Robots, sensors}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\HR7ZUF8W\\Saudabayev and Varol - 2015 - Sensors for Robotic Hands A Survey of State of the Art.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\reebe\\Zotero\\storage\\484D4R7H\\7283549.html:text/html}, } -@inproceedings{rashid_local_2020, - title = {Local and Global Sensors for Collision Avoidance}, - url = {https://ieeexplore.ieee.org/document/9235223}, - doi = {10.1109/MFI49285.2020.9235223}, - abstract = {Implementation of safe and efficient human robot collaboration for agile production cells with heavy-duty industrial robots, having large stopping distances and large self-occlusion areas, is a challenging task. Collision avoidance is the main functionality required to realize this task. In fact, it requires accurate estimation of shortest distance between known (robot) and unknown (human or anything else) objects in a large area. This work proposes a selective fusion of global and local sensors, representing a large range 360° {LiDAR} and a small range {RGB} camera respectively, in the context of dynamic speed and separation monitoring. Safety functionality has been evaluated for collision detection between unknown dynamic object to manipulator joints. The system yields 29-40\% efficiency compared to fenced system. Heavy-duty industrial robot and a controlled linear axis dummy is used for evaluating different robot and scenario configurations. Results suggest higher efficiency and safety when using local and global setup.}, - eventtitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})}, - pages = {354--359}, - booktitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})}, - author = {Rashid, Aquib and Peesapati, Kannan and Bdiwi, Mohamad and Krusche, Sebastian and Hardt, Wolfram and Putz, Matthias}, - urldate = {2025-02-19}, - date = {2020-09}, - keywords = {Service robots, Safety, Sensor fusion, Cameras, Laser radar, Production, Robot vision systems}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\HAXPN6EL\\Rashid et al. - 2020 - Local and Global Sensors for Collision Avoidance.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\4X42Y6TK\\9235223.html:text/html}, +@article{paya_state---art_2017, + title = {A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision Sensors}, + volume = {2017}, + rights = {Copyright © 2017 L. Payá et al.}, + issn = {1687-7268}, + url = {https://onlinelibrary.wiley.com/doi/abs/10.1155/2017/3497650}, + doi = {10.1155/2017/3497650}, + abstract = {Nowadays, the field of mobile robotics is experiencing a quick evolution, and a variety of autonomous vehicles is available to solve different tasks. The advances in computer vision have led to a substantial increase in the use of cameras as the main sensors in mobile robots. They can be used as the only source of information or in combination with other sensors such as odometry or laser. Among vision systems, omnidirectional sensors stand out due to the richness of the information they provide the robot with, and an increasing number of works about them have been published over the last few years, leading to a wide variety of frameworks. In this review, some of the most important works are analysed. One of the key problems the scientific community is addressing currently is the improvement of the autonomy of mobile robots. To this end, building robust models of the environment and solving the localization and navigation problems are three important abilities that any mobile robot must have. Taking it into account, the review concentrates on these problems; how researchers have addressed them by means of omnidirectional vision; the main frameworks they have proposed; and how they have evolved in recent years.}, + pages = {3497650}, + number = {1}, + journaltitle = {Journal of Sensors}, + author = {Payá, L. and Gil, A. and Reinoso, O.}, + urldate = {2025-02-12}, + date = {2017}, + langid = {english}, + note = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1155/2017/3497650}, + file = {Full Text PDF:C\:\\Users\\reebe\\Zotero\\storage\\EZ473NGD\\Payá et al. - 2017 - A State-of-the-Art Review on Mapping and Localization of Mobile Robots Using Omnidirectional Vision.pdf:application/pdf;Snapshot:C\:\\Users\\reebe\\Zotero\\storage\\86LDAQ62\\3497650.html:text/html}, } -@article{jain_survey_nodate, - title = {A survey of Laser Range Finding}, - url = {http://www.siddjain.com/ee236a.pdf}, - abstract = {This report provides a informal survey of laser distance measurement. Applications of laser range finding are briefly discussed and various techniques for laser ranging such as active laser triangulation, pulsed time-of-flight ({TOF}), phase shift, {FMCW}, and correlation are described.}, - author = {Jain, Siddharth}, - urldate = {2025-02-19}, +@online{noauthor_vl53l7cx_nodate, + title = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}}, + url = {https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html}, + abstract = {{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV}, {VL}53L7CXV0GC/1, {STMicroelectronics}}, + urldate = {2025-02-12}, langid = {english}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\X2WNAHZB\\Jain - A survey of Laser Range Finding.pdf:application/pdf}, + file = {Snapshot:C\:\\Users\\reebe\\Zotero\\storage\\VEYLCCLA\\vl53l7cx.html:text/html}, } -@online{noauthor_file20200501_2020, - title = {File:20200501 Time of flight.svg - Wikipedia}, - url = {https://commons.wikimedia.org/wiki/File:20200501_Time_of_flight.svg}, - shorttitle = {File}, - urldate = {2025-02-20}, - date = {2020-05-01}, +@online{noauthor_pico-series_nodate, + title = {Pico-series Microcontrollers - Raspberry Pi Documentation}, + url = {https://www.raspberrypi.com/documentation/microcontrollers/pico-series.html}, + abstract = {The official documentation for Raspberry Pi computers and microcontrollers}, + urldate = {2025-02-12}, langid = {english}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\H7EUEBHT\\File20200501_Time_of_flight.html:text/html}, + file = {Snapshot:C\:\\Users\\reebe\\Zotero\\storage\\KUCB8PVI\\pico-series.html:text/html}, } -@article{raj_survey_2020, - title = {A Survey on {LiDAR} Scanning Mechanisms}, - volume = {9}, - rights = {http://creativecommons.org/licenses/by/3.0/}, - issn = {2079-9292}, - url = {https://www.mdpi.com/2079-9292/9/5/741}, - doi = {10.3390/electronics9050741}, - abstract = {In recent years, light detection and ranging ({LiDAR}) technology has gained huge popularity in various applications such as navigation, robotics, remote sensing, and advanced driving assistance systems ({ADAS}). This popularity is mainly due to the improvements in {LiDAR} performance in terms of range detection, accuracy, power consumption, as well as physical features such as dimension and weight. Although a number of literatures on {LiDAR} technology have been published earlier, not many has been reported on the state-of-the-art {LiDAR} scanning mechanisms. The aim of this article is to review the scanning mechanisms employed in {LiDAR} technology from past research works to the current commercial products. The review highlights four commonly used mechanisms in {LiDAR} systems: Opto-mechanical, electromechanical, micro-electromechanical systems ({MEMS}), and solid-state scanning. The study reveals that electro-mechanical scanning is the most prominent technology in use today. The commercially available 1D time of flight ({TOF}) {LiDAR} instrument is currently the most attractive option for conversion from 1D to 3D {LiDAR} system, provided that low scanning rate is not an issue. As for applications with low size, weight, and power ({SWaP}) requirements, {MEMS} scanning is found to be the better alternative. {MEMS} scanning is by far the more matured technology compared to solid-state scanning and is currently given great emphasis to increase its robustness for fulfilling the requirements of {ADAS} applications. Finally, solid-state {LiDAR} systems are expected to fill in the gap in {ADAS} applications despite the low technology readiness in comparison to {MEMS} scanners. However, since solid-state scanning is believed to have superior robustness, field of view ({FOV}), and scanning rate potential, great efforts are given by both academics and industries to further develop this technology.}, - pages = {741}, - number = {5}, - journaltitle = {Electronics}, - author = {Raj, Thinal and Hashim, Fazida Hanim and Huddin, Aqilah Baseri and Ibrahim, Mohd Faisal and Hussain, Aini}, - urldate = {2025-02-20}, - date = {2020-05}, - langid = {english}, - note = {Number: 5 -Publisher: Multidisciplinary Digital Publishing Institute}, - keywords = {electro-mechanical scanning, {LiDAR}, {MEMS} scanning, opto-mechanical scanning, solid-state {LiDAR}}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\2PBQYF7P\\Raj et al. - 2020 - A Survey on LiDAR Scanning Mechanisms.pdf:application/pdf}, +@online{noauthor_chatgpt_nodate, + title = {{ChatGPT}}, + url = {https://chatgpt.com}, + abstract = {A conversational {AI} system that listens, learns, and challenges}, + urldate = {2025-02-12}, + file = {Snapshot:C\:\\Users\\reebe\\Zotero\\storage\\ZT8MG8Y4\\678e85c2-fd98-800d-908f-de95b27f0abf.html:text/html}, } -@article{surmann_autonomous_2003, - title = {An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of indoor environments}, - volume = {45}, - issn = {0921-8890}, - url = {https://www.sciencedirect.com/science/article/pii/S0921889003001556}, - doi = {10.1016/j.robot.2003.09.004}, - abstract = {Digital 3D models of the environment are needed in rescue and inspection robotics, facility managements and architecture. This paper presents an automatic system for gaging and digitalization of 3D indoor environments. It consists of an autonomous mobile robot, a reliable 3D laser range finder and three elaborated software modules. The first module, a fast variant of the Iterative Closest Points algorithm, registers the 3D scans in a common coordinate system and relocalizes the robot. The second module, a next best view planner, computes the next nominal pose based on the acquired 3D data while avoiding complicated obstacles. The third module, a closed-loop and globally stable motor controller, navigates the mobile robot to a nominal pose on the base of odometry and avoids collisions with dynamical obstacles. The 3D laser range finder acquires a 3D scan at this pose. The proposed method allows one to digitalize large indoor environments fast and reliably without any intervention and solves the {SLAM} problem. The results of two 3D digitalization experiments are presented using a fast octree-based visualization method.}, - pages = {181--198}, - number = {3}, - journaltitle = {Robotics and Autonomous Systems}, - shortjournal = {Robotics and Autonomous Systems}, - author = {Surmann, Hartmut and Nüchter, Andreas and Hertzberg, Joachim}, - urldate = {2025-02-20}, - date = {2003-12-31}, - keywords = {3D digitalization, 3D gaging, 3D laser range finder, Autonomous mobile robots, Next best view planning, Robot relocalization, Scan matching, {SLAM}}, - file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\BKNJW2B7\\Surmann et al. - 2003 - An autonomous mobile robot with a 3D laser range finder for 3D exploration and digitalization of ind.pdf:application/pdf;ScienceDirect Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\H82LXSD3\\S0921889003001556.html:text/html}, +@software{iii_earlephilhowerarduino-pico_2025, + title = {earlephilhower/arduino-pico}, + rights = {{LGPL}-2.1}, + url = {https://github.com/earlephilhower/arduino-pico}, + abstract = {Raspberry Pi Pico Arduino core, for all {RP}2040 and {RP}2350 boards}, + author = {{III}, Earle F. Philhower}, + urldate = {2025-02-12}, + date = {2025-02-11}, + note = {original-date: 2021-02-25T04:20:27Z}, + keywords = {a2dp, arduino, arduino-pico, ble, bluetooth, freertos, pi, pico, pico2, picow, raspberry, raspberry-pi, risc-v, riscv, rp2040, rp2350, smp, wifi}, } -@article{niclass_design_2012, - title = {Design and characterization of a 256x64-pixel single-photon imager in {CMOS} for a {MEMS}-based laser scanning time-of-flight sensor}, - volume = {20}, - rights = {© 2012 {OSA}}, - issn = {1094-4087}, - url = {https://opg.optica.org/oe/abstract.cfm?uri=oe-20-11-11863}, - doi = {10.1364/OE.20.011863}, - abstract = {We introduce an optical time-of-flight image sensor taking advantage of a {MEMS}-based laser scanning device. Unlike previous approaches, our concept benefits from the high timing resolution and the digital signal flexibility of single-photon pixels in {CMOS} to allow for a nearly ideal cooperation between the image sensor and the scanning device. This technique enables a high signal-to-background light ratio to be obtained, while simultaneously relaxing the constraint on size of the {MEMS} mirror. These conditions are critical for devising practical and low-cost depth sensors intended to operate in uncontrolled environments, such as outdoors. A proof-of-concept prototype capable of operating in real-time was implemented. This paper focuses on the design and characterization of a 256x64-pixel image sensor, which also comprises an event-driven readout circuit, an array of 64 row-level high-throughput time-to-digital converters, and a 16Gbit/s global readout circuit. Quantitative evaluation of the sensor under 2klux of background light revealed a repeatability error of 13.5cm throughout the distance range of 20 meters.}, - pages = {11863--11881}, - number = {11}, - journaltitle = {Optics Express}, - shortjournal = {Opt. Express, {OE}}, - author = {Niclass, Cristiano and Ito, Kota and Soga, Mineki and Matsubara, Hiroyuki and Aoyagi, Isao and Kato, Satoru and Kagami, Manabu}, - urldate = {2025-02-20}, - date = {2012-05-21}, - note = {Publisher: Optica Publishing Group}, - keywords = {Image sensors, Deformable mirrors, Diode lasers, Light emitting diodes, Optical systems, Systems design}, - file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\BZWW7BVY\\Niclass et al. - 2012 - Design and characterization of a 256x64-pixel single-photon imager in CMOS for a MEMS-based laser sc.pdf:application/pdf}, +@online{noauthor_tutorials_nodate, + title = {Tutorials — {ROS} 2 Documentation: Humble documentation}, + url = {https://docs.ros.org/en/humble/Tutorials.html}, + urldate = {2025-02-12}, + file = {Tutorials — ROS 2 Documentation\: Humble documentation:C\:\\Users\\reebe\\Zotero\\storage\\28S5GUZ5\\Tutorials.html:text/html}, } -@online{noauthor_vlp_nodate, - title = {{VLP} 16 {\textbar} Ouster}, - url = {https://ouster.com/products/hardware/vlp-16}, - abstract = {Mid-range lidar sensor}, - urldate = {2025-02-20}, - langid = {english}, - file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\AR82YJRS\\vlp-16.html:text/html}, +@online{noauthor_examples_nodate, + title = {Examples - trimesh 4.6.2 documentation}, + url = {https://trimesh.org/examples.html}, + urldate = {2025-02-12}, + file = {Examples - trimesh 4.6.2 documentation:C\:\\Users\\reebe\\Zotero\\storage\\82WA6KM7\\examples.html:text/html}, } -@software{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025, - title = {sparkfun/{SparkFun}\_VL53L5CX\_Arduino\_Library}, - url = {https://github.com/sparkfun/SparkFun_VL53L5CX_Arduino_Library}, - publisher = {{SparkFun} Electronics}, - urldate = {2025-02-21}, - date = {2025-01-28}, - note = {original-date: 2021-10-22T21:06:36Z}, +@software{grans_sebastiangransros2-point-cloud-demo_2024, + title = {{SebastianGrans}/{ROS}2-Point-Cloud-Demo}, + rights = {{MIT}}, + url = {https://github.com/SebastianGrans/ROS2-Point-Cloud-Demo}, + abstract = {Demo package for {ROS}2 that publishes a point cloud and visualizes it using {RViz}2}, + author = {Grans, Sebastian}, + urldate = {2025-02-12}, + date = {2024-12-08}, + note = {original-date: 2020-06-30T16:55:21Z}, } -@online{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate, - title = {tof\_imager\_micro\_ros/teensy\_pcl\_publisher at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}}, - url = {https://github.com/adityakamath/tof_imager_micro_ros/tree/humble/teensy_pcl_publisher}, - urldate = {2025-02-21}, - file = {tof_imager_micro_ros/teensy_pcl_publisher at humble · adityakamath/tof_imager_micro_ros · GitHub:C\:\\Users\\Rene\\Zotero\\storage\\TEVM2A5B\\teensy_pcl_publisher.html:text/html}, +@software{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025, + title = {{UniversalRobots}/Universal\_Robots\_ROS2\_GZ\_Simulation}, + rights = {{BSD}-3-Clause}, + url = {https://github.com/UniversalRobots/Universal_Robots_ROS2_GZ_Simulation}, + publisher = {Universal Robots A/S}, + urldate = {2025-02-24}, + date = {2025-02-17}, + note = {original-date: 2021-12-15T12:15:45Z}, } diff --git a/Praxiprojekt_Bericht/ba.aux b/Praxiprojekt_Bericht/ba.aux index d2ed843bfb0e825466481f8a4e1bf7935c6da781..d3a76bc654e7fd328036ef0257767ad883aeb253 100644 --- a/Praxiprojekt_Bericht/ba.aux +++ b/Praxiprojekt_Bericht/ba.aux @@ -17,6 +17,7 @@ \@writefile{toc}{\contentsline {chapter}{\numberline {1}Zusammenfassung}{3}{chapter.1}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} +\@writefile{lol}{\addvspace {10\p@ }} \BKM@entry{id=2,dest={636861707465722E32},srcline={23}}{5C3337365C3337375C3030304D5C3030306F5C303030745C303030695C303030765C303030615C303030745C303030695C3030306F5C3030306E} \citation{noauthor_robotics_2021} \citation{popov_collision_2017} @@ -26,6 +27,7 @@ \@writefile{toc}{\contentsline {chapter}{\numberline {2}Motivation}{4}{chapter.2}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} +\@writefile{lol}{\addvspace {10\p@ }} \BKM@entry{id=3,dest={636861707465722E33},srcline={42}}{5C3337365C3337375C303030535C303030745C303030615C3030306E5C303030645C3030305C3034305C303030645C303030655C303030725C3030305C3034305C303030545C303030655C303030635C303030685C3030306E5C303030695C3030306B} \citation{noauthor_can_nodate} \citation{noauthor_can_nodate} @@ -39,6 +41,7 @@ \@writefile{toc}{\contentsline {chapter}{\numberline {3}Stand der Technik}{5}{chapter.3}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} +\@writefile{lol}{\addvspace {10\p@ }} \@writefile{lof}{\contentsline {figure}{\numberline {\textbf 3-1}{\ignorespaces Market Outlook for Cobots. Source:\cite {noauthor_can_nodate}}}{5}{figure.3.1}\protected@file@percent } \newlabel{Cobot Growth}{{\textbf 3-1}{5}{Market Outlook for Cobots. Source:\cite {noauthor_can_nodate}}{figure.3.1}{}} \@writefile{toc}{\contentsline {section}{\numberline {3.1}Kollisionsvermeidung und Kollisionserkennung}{6}{section.3.1}\protected@file@percent } @@ -69,53 +72,76 @@ \BKM@entry{id=7,dest={73656374696F6E2E342E31},srcline={115}}{5C3337365C3337375C303030565C3030306F5C303030725C303030675C303030655C303030685C303030655C3030306E5C303030735C303030775C303030655C303030695C303030735C30303065} \citation{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate} \citation{noauthor_pico-series_nodate} -\BKM@entry{id=8,dest={73656374696F6E2E342E32},srcline={117}}{5C3337365C3337375C303030535C3030306F5C303030665C303030745C303030775C303030615C303030725C30303065} -\BKM@entry{id=9,dest={73756273656374696F6E2E342E322E31},srcline={118}}{5C3337365C3337375C303030415C303030725C303030645C303030755C303030695C3030306E5C3030306F} -\BKM@entry{id=10,dest={73756273656374696F6E2E342E322E32},srcline={119}}{5C3337365C3337375C303030525C3030306F5C303030625C3030306F5C303030745C3030305C3034305C3030304F5C303030705C303030655C303030725C303030615C303030745C303030695C3030306E5C303030675C3030305C3034305C303030535C303030795C303030735C303030745C303030655C3030306D5C3030305C3034305C30303032} -\BKM@entry{id=11,dest={73756273656374696F6E2E342E322E33},srcline={120}}{5C3337365C3337375C303030525C303030565C303030495C3030305A5C303030325C3030305C3034305C303030755C3030306E5C303030645C3030305C3034305C303030475C303030615C3030307A5C303030655C303030625C3030306F5C3030305C3034305C303030435C3030306C5C303030615C303030735C303030735C303030695C30303063} -\BKM@entry{id=12,dest={73656374696F6E2E342E33},srcline={121}}{5C3337365C3337375C303030485C303030615C303030725C303030645C303030775C303030615C303030725C30303065} -\BKM@entry{id=13,dest={73756273656374696F6E2E342E332E31},srcline={122}}{5C3337365C3337375C303030455C3030306C5C303030655C3030306B5C303030745C303030725C3030306F5C3030306E5C303030695C303030735C303030635C30303068} -\BKM@entry{id=14,dest={73756273656374696F6E2E342E332E32},srcline={123}}{5C3337365C3337375C3030304D5C303030655C303030635C303030685C303030615C3030306E5C303030695C303030735C303030635C30303068} +\citation{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025} +\citation{noauthor_vl53l7cx_nodate} +\BKM@entry{id=8,dest={73656374696F6E2E342E32},srcline={121}}{5C3337365C3337375C303030535C3030306F5C303030665C303030745C303030775C303030615C303030725C30303065} +\BKM@entry{id=9,dest={73756273656374696F6E2E342E322E31},srcline={122}}{5C3337365C3337375C303030415C303030725C303030645C303030755C303030695C3030306E5C3030306F} \@writefile{toc}{\contentsline {chapter}{\numberline {4}Umsetzung}{10}{chapter.4}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} +\@writefile{lol}{\addvspace {10\p@ }} \@writefile{toc}{\contentsline {section}{\numberline {4.1}Vorgehensweise}{10}{section.4.1}\protected@file@percent } \@writefile{toc}{\contentsline {section}{\numberline {4.2}Software}{10}{section.4.2}\protected@file@percent } \@writefile{toc}{\contentsline {subsection}{\numberline {4.2.1}Arduino}{10}{subsection.4.2.1}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {4.2.2}Robot Operating System 2}{10}{subsection.4.2.2}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {4.2.3}RVIZ2 und Gazebo Classic}{10}{subsection.4.2.3}\protected@file@percent } -\@writefile{toc}{\contentsline {section}{\numberline {4.3}Hardware}{10}{section.4.3}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {4.3.1}Elektronisch}{10}{subsection.4.3.1}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {4.3.2}Mechanisch}{10}{subsection.4.3.2}\protected@file@percent } -\BKM@entry{id=15,dest={636861707465722E35},srcline={124}}{5C3337365C3337375C303030455C303030725C303030675C303030655C303030625C3030306E5C303030695C30303073} -\@writefile{toc}{\contentsline {chapter}{\numberline {5}Ergebnis}{11}{chapter.5}\protected@file@percent } +\newlabel{Snippet LPN}{{4.1}{11}{Funktion zum setzen eines LPn Pin}{lstlisting.4.1}{}} +\@writefile{lol}{\contentsline {lstlisting}{\numberline {4.1}Funktion zum setzen eines LPn Pin}{11}{lstlisting.4.1}\protected@file@percent } +\newlabel{Snippet Init}{{4.2}{11}{Funktion zum initialisieren der Sensoren im Setup}{lstlisting.4.2}{}} +\@writefile{lol}{\contentsline {lstlisting}{\numberline {4.2}Funktion zum initialisieren der Sensoren im Setup}{11}{lstlisting.4.2}\protected@file@percent } +\newlabel{Snippet Prozess}{{4.3}{12}{processSensorData - Verarbeitung der Sensordaten}{lstlisting.4.3}{}} +\@writefile{lol}{\contentsline {lstlisting}{\numberline {4.3}processSensorData - Verarbeitung der Sensordaten}{12}{lstlisting.4.3}\protected@file@percent } +\BKM@entry{id=10,dest={73756273656374696F6E2E342E322E32},srcline={217}}{5C3337365C3337375C303030525C3030306F5C303030625C3030306F5C303030745C3030305C3034305C3030304F5C303030705C303030655C303030725C303030615C303030745C303030695C3030306E5C303030675C3030305C3034305C303030535C303030795C303030735C303030745C303030655C3030306D5C3030305C3034305C30303032} +\citation{noauthor_tutorials_nodate} +\citation{noauthor_tutorials_nodate} +\@writefile{toc}{\contentsline {subsection}{\numberline {4.2.2}Robot Operating System 2}{13}{subsection.4.2.2}\protected@file@percent } +\@writefile{lof}{\contentsline {figure}{\numberline {\textbf 4-1}{\ignorespaces Visualisierung von einem Topic, Source:\cite {noauthor_tutorials_nodate}}}{14}{figure.4.1}\protected@file@percent } +\newlabel{Topic_Viz}{{\textbf 4-1}{14}{Visualisierung von einem Topic, Source:\cite {noauthor_tutorials_nodate}}{figure.4.1}{}} +\BKM@entry{id=11,dest={73756273656374696F6E2E342E322E33},srcline={235}}{5C3337365C3337375C303030525C303030565C303030495C3030305A5C303030325C3030305C3034305C303030755C3030306E5C303030645C3030305C3034305C303030475C303030615C3030307A5C303030655C303030625C3030306F5C3030305C3034305C303030435C3030306C5C303030615C303030735C303030735C303030695C30303063} +\citation{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025} +\@writefile{toc}{\contentsline {subsection}{\numberline {4.2.3}RVIZ2 und Gazebo Classic}{15}{subsection.4.2.3}\protected@file@percent } +\@writefile{lof}{\contentsline {figure}{\numberline {\textbf 4-2}{\ignorespaces Bildschirmaufnahme aus Fusion von einer UR10 Baugruppe mit Sensormodul}}{15}{figure.4.2}\protected@file@percent } +\newlabel{ur10}{{\textbf 4-2}{15}{Bildschirmaufnahme aus Fusion von einer UR10 Baugruppe mit Sensormodul}{figure.4.2}{}} +\BKM@entry{id=12,dest={73656374696F6E2E342E33},srcline={262}}{5C3337365C3337375C303030485C303030615C303030725C303030645C303030775C303030615C303030725C30303065} +\BKM@entry{id=13,dest={73756273656374696F6E2E342E332E31},srcline={263}}{5C3337365C3337375C303030455C3030306C5C303030655C3030306B5C303030745C303030725C3030306F5C3030306E5C303030695C303030735C303030635C30303068} +\BKM@entry{id=14,dest={73756273656374696F6E2E342E332E32},srcline={264}}{5C3337365C3337375C3030304D5C303030655C303030635C303030685C303030615C3030306E5C303030695C303030735C303030635C30303068} +\@writefile{lof}{\contentsline {figure}{\numberline {\textbf 4-3}{\ignorespaces Bildschirmaufnahme aus \acrshort {RVIZ} von einer Punktwolke von einer Wand}}{16}{figure.4.3}\protected@file@percent } +\newlabel{two_pcd}{{\textbf 4-3}{16}{Bildschirmaufnahme aus \acrshort {RVIZ} von einer Punktwolke von einer Wand}{figure.4.3}{}} +\@writefile{toc}{\contentsline {section}{\numberline {4.3}Hardware}{16}{section.4.3}\protected@file@percent } +\@writefile{toc}{\contentsline {subsection}{\numberline {4.3.1}Elektronisch}{16}{subsection.4.3.1}\protected@file@percent } +\@writefile{toc}{\contentsline {subsection}{\numberline {4.3.2}Mechanisch}{16}{subsection.4.3.2}\protected@file@percent } +\BKM@entry{id=15,dest={636861707465722E35},srcline={265}}{5C3337365C3337375C303030455C303030725C303030675C303030655C303030625C3030306E5C303030695C30303073} +\@writefile{toc}{\contentsline {chapter}{\numberline {5}Ergebnis}{17}{chapter.5}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} -\BKM@entry{id=16,dest={636861707465722E36},srcline={125}}{5C3337365C3337375C303030415C303030755C303030735C303030625C3030306C5C303030695C303030635C3030306B} -\@writefile{toc}{\contentsline {chapter}{\numberline {6}Ausblick}{12}{chapter.6}\protected@file@percent } +\@writefile{lol}{\addvspace {10\p@ }} +\BKM@entry{id=16,dest={636861707465722E36},srcline={266}}{5C3337365C3337375C303030415C303030755C303030735C303030625C3030306C5C303030695C303030635C3030306B} +\@writefile{toc}{\contentsline {chapter}{\numberline {6}Ausblick}{18}{chapter.6}\protected@file@percent } \@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }} +\@writefile{lol}{\addvspace {10\p@ }} \bibdata{BA} \bibcite{noauthor_can_nodate}{1} \bibcite{noauthor_file20200501_2020}{2} \bibcite{noauthor_pico-series_nodate}{3} \bibcite{noauthor_robotics_2021}{4} \bibcite{hering_sensoren_2018}{5} -\bibcite{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate}{6} -\bibcite{noauthor_vl53l7cx_nodate}{7} -\bibcite{noauthor_vlp_nodate}{8} -\bibcite{al_naser_fusion_2022}{9} -\bibcite{amaya-mejia_vision-based_2022}{10} -\bibcite{jain_survey_nodate}{11} -\bibcite{li_common_2019}{12} -\bibcite{liu_application_2024}{13} -\bibcite{niclass_design_2012}{14} -\bibcite{popov_collision_2017}{15} -\bibcite{raj_survey_2020}{16} -\bibcite{rashid_local_2020}{17} -\bibcite{surmann_autonomous_2003}{18} +\bibcite{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025}{6} +\bibcite{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate}{7} +\bibcite{noauthor_tutorials_nodate}{8} +\bibcite{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025}{9} +\bibcite{noauthor_vl53l7cx_nodate}{10} +\bibcite{noauthor_vlp_nodate}{11} +\bibcite{al_naser_fusion_2022}{12} +\bibcite{amaya-mejia_vision-based_2022}{13} +\bibcite{jain_survey_nodate}{14} +\bibcite{li_common_2019}{15} +\bibcite{liu_application_2024}{16} +\bibcite{niclass_design_2012}{17} +\bibcite{popov_collision_2017}{18} +\bibcite{raj_survey_2020}{19} +\bibcite{rashid_local_2020}{20} +\bibcite{surmann_autonomous_2003}{21} \global\@namedef{scr@dte@chapter@lastmaxnumwidth}{11.47185pt} \global\@namedef{scr@dte@section@lastmaxnumwidth}{21.47964pt} \global\@namedef{scr@dte@subsection@lastmaxnumwidth}{31.48744pt} \@writefile{toc}{\providecommand\tocbasic@end@toc@file{}\tocbasic@end@toc@file} -\gdef \@abspage@last{15} +\gdef \@abspage@last{21} diff --git a/Praxiprojekt_Bericht/ba.bbl b/Praxiprojekt_Bericht/ba.bbl index eb80ddc20663e6b80463cfd2adae55c048e4cc31..f14b1094fd41b740c50becdb6dcac4002052ea67 100644 --- a/Praxiprojekt_Bericht/ba.bbl +++ b/Praxiprojekt_Bericht/ba.bbl @@ -32,23 +32,40 @@ \newblock \emph{Sensoren in Wissenschaft und Technik}. \newblock \url{http://dx.doi.org/10.1007/978-3-658-12562-2} -\bibitem[6]{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate} +\bibitem[6]{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025} +\emph{sparkfun/{SparkFun}\_VL53L5CX\_Arduino\_Library}. +\newblock \url{https://github.com/sparkfun/SparkFun_VL53L5CX_Arduino_Library}. + -- +\newblock original-date: 2021-10-22T21:06:36Z + +\bibitem[7]{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate} \emph{tof\_imager\_micro\_ros/teensy\_pcl\_publisher at humble · adityakamath/tof\_imager\_micro\_ros · {GitHub}}. \newblock \url{https://github.com/adityakamath/tof_imager_micro_ros/tree/humble/teensy_pcl_publisher} -\bibitem[7]{noauthor_vl53l7cx_nodate} +\bibitem[8]{noauthor_tutorials_nodate} +\emph{Tutorials — {ROS} 2 Documentation: Humble documentation}. +\newblock \url{https://docs.ros.org/en/humble/Tutorials.html} + +\bibitem[9]{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025} +\emph{{UniversalRobots}/Universal\_Robots\_ROS2\_GZ\_Simulation}. +\newblock + \url{https://github.com/UniversalRobots/Universal_Robots_ROS2_GZ_Simulation}. + -- +\newblock original-date: 2021-12-15T12:15:45Z + +\bibitem[10]{noauthor_vl53l7cx_nodate} \emph{{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 degrees {FoV} - {STMicroelectronics}}. \newblock \url{https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html} -\bibitem[8]{noauthor_vlp_nodate} +\bibitem[11]{noauthor_vlp_nodate} \emph{{VLP} 16 {\textbar} Ouster}. \newblock \url{https://ouster.com/products/hardware/vlp-16} -\bibitem[9]{al_naser_fusion_2022} +\bibitem[12]{al_naser_fusion_2022} \textsc{Al~Naser}, Ibrahim ; \textsc{Dahmen}, Johannes ; \textsc{Bdiwi}, Mohamad ; \textsc{Ihlenfeldt}, Steffen: \newblock Fusion of depth, color, and thermal images towards digital twins and @@ -57,7 +74,7 @@ Human Interactive Communication ({RO}-{MAN})}, 532--537. -- \newblock {ISSN}: 1944-9437 -\bibitem[10]{amaya-mejia_vision-based_2022} +\bibitem[13]{amaya-mejia_vision-based_2022} \textsc{Amaya-Mejía}, Lina~M. ; \textsc{Duque-Suárez}, Nicolás ; \textsc{Jaramillo-Ramírez}, Daniel ; \textsc{Martinez}, Carol: \newblock Vision-Based Safety System for Barrierless Human-Robot Collaboration. @@ -65,12 +82,12 @@ Robots and Systems ({IROS})}, 7331--7336. -- \newblock {ISSN}: 2153-0866 -\bibitem[11]{jain_survey_nodate} +\bibitem[14]{jain_survey_nodate} \textsc{Jain}, Siddharth: \newblock A survey of Laser Range Finding. \newblock \url{http://www.siddjain.com/ee236a.pdf} -\bibitem[12]{li_common_2019} +\bibitem[15]{li_common_2019} \textsc{Li}, Peng ; \textsc{Liu}, Xiangpeng: \newblock Common Sensors in Industrial Robots: A Review. \newblock 1267, Nr. 1, 012036. @@ -78,7 +95,7 @@ \newblock DOI 10.1088/1742--6596/1267/1/012036. -- \newblock ISSN 1742--6588, 1742--6596 -\bibitem[13]{liu_application_2024} +\bibitem[16]{liu_application_2024} \textsc{Liu}, Li ; \textsc{Guo}, Fu ; \textsc{Zou}, Zishuai ; \textsc{Duffy}, Vincent~G.: \newblock Application, Development and Future Opportunities of Collaborative @@ -90,7 +107,7 @@ \newblock Publisher: Taylor \& Francis \_eprint: https://doi.org/10.1080/10447318.2022.2041907 -\bibitem[14]{niclass_design_2012} +\bibitem[17]{niclass_design_2012} \textsc{Niclass}, Cristiano ; \textsc{Ito}, Kota ; \textsc{Soga}, Mineki ; \textsc{Matsubara}, Hiroyuki ; \textsc{Aoyagi}, Isao ; \textsc{Kato}, Satoru ; \textsc{Kagami}, Manabu: @@ -102,7 +119,7 @@ \newblock ISSN 1094--4087. -- \newblock Publisher: Optica Publishing Group -\bibitem[15]{popov_collision_2017} +\bibitem[18]{popov_collision_2017} \textsc{Popov}, Dmitry ; \textsc{Klimchik}, Alexandr ; \textsc{Mavridis}, Nikolaos: \newblock Collision detection, localization \& classification for industrial @@ -111,7 +128,7 @@ Human Interactive Communication ({RO}-{MAN})}, {IEEE}. -- \newblock ISBN 978--1--5386--3518--6, 838--843 -\bibitem[16]{raj_survey_2020} +\bibitem[19]{raj_survey_2020} \textsc{Raj}, Thinal ; \textsc{Hashim}, Fazida~H. ; \textsc{Huddin}, Aqilah~B. ; \textsc{Ibrahim}, Mohd~F. ; \textsc{Hussain}, Aini: \newblock A Survey on {LiDAR} Scanning Mechanisms. @@ -121,7 +138,7 @@ \newblock ISSN 2079--9292. -- \newblock Number: 5 Publisher: Multidisciplinary Digital Publishing Institute -\bibitem[17]{rashid_local_2020} +\bibitem[20]{rashid_local_2020} \textsc{Rashid}, Aquib ; \textsc{Peesapati}, Kannan ; \textsc{Bdiwi}, Mohamad ; \textsc{Krusche}, Sebastian ; \textsc{Hardt}, Wolfram ; \textsc{Putz}, Matthias: @@ -129,7 +146,7 @@ \newblock {In: }\emph{2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})}, 354--359 -\bibitem[18]{surmann_autonomous_2003} +\bibitem[21]{surmann_autonomous_2003} \textsc{Surmann}, Hartmut ; \textsc{Nüchter}, Andreas ; \textsc{Hertzberg}, Joachim: \newblock An autonomous mobile robot with a 3D laser range finder for 3D diff --git a/Praxiprojekt_Bericht/ba.blg b/Praxiprojekt_Bericht/ba.blg index f63f05ca3dffc1052ae236c6c16c9a31fa345be4..530a04bd7e02afff9d4de7a0f2b0faf8a44e9ae0 100644 --- a/Praxiprojekt_Bericht/ba.blg +++ b/Praxiprojekt_Bericht/ba.blg @@ -8,20 +8,26 @@ Reallocated wiz_functions (elt_size=4) to 6000 items from 3000. Reallocated singl_function (elt_size=4) to 100 items from 50. Reallocated singl_function (elt_size=4) to 100 items from 50. Database file #1: BA.bib +Warning--entry type for "noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate" isn't style-file defined +--line 9 of file BA.bib +Warning--entry type for "noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025" isn't style-file defined +--line 16 of file BA.bib +Warning--entry type for "noauthor_vlp_nodate" isn't style-file defined +--line 25 of file BA.bib +Warning--entry type for "noauthor_file20200501_2020" isn't style-file defined +--line 93 of file BA.bib +Warning--entry type for "noauthor_can_nodate" isn't style-file defined +--line 195 of file BA.bib Warning--entry type for "hering_sensoren_2018" isn't style-file defined ---line 48 of file BA.bib +--line 438 of file BA.bib Warning--entry type for "noauthor_vl53l7cx_nodate" isn't style-file defined ---line 100 of file BA.bib +--line 490 of file BA.bib Warning--entry type for "noauthor_pico-series_nodate" isn't style-file defined ---line 109 of file BA.bib -Warning--entry type for "noauthor_can_nodate" isn't style-file defined ---line 350 of file BA.bib -Warning--entry type for "noauthor_file20200501_2020" isn't style-file defined ---line 452 of file BA.bib -Warning--entry type for "noauthor_vlp_nodate" isn't style-file defined ---line 521 of file BA.bib -Warning--entry type for "noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate" isn't style-file defined ---line 539 of file BA.bib +--line 499 of file BA.bib +Warning--entry type for "noauthor_tutorials_nodate" isn't style-file defined +--line 528 of file BA.bib +Warning--entry type for "noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025" isn't style-file defined +--line 553 of file BA.bib Warning--to sort, need author or key in noauthor_robotics_2021 Warning--to sort, need author or key in noauthor_vl53l7cx_nodate Warning--to sort, need author or key in hering_sensoren_2018 @@ -30,6 +36,9 @@ Warning--to sort, need author or key in noauthor_file20200501_2020 Warning--to sort, need author or key in noauthor_vlp_nodate Warning--to sort, need author or key in noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate Warning--to sort, need author or key in noauthor_pico-series_nodate +Warning--to sort, need author or key in noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025 +Warning--to sort, need author or key in noauthor_tutorials_nodate +Warning--to sort, need author or key in noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025 Warning--there's no year in jain_survey_nodate Warning--there's no year in li_common_2019 Warning--there's no year in liu_application_2024 @@ -37,45 +46,45 @@ Warning--there's no year in niclass_design_2012 Warning--neither address nor publication date in popov_collision_2017 Warning--there's no year in raj_survey_2020 Warning--there's no year in surmann_autonomous_2003 -You've used 18 entries, +You've used 21 entries, 4335 wiz_defined-function locations, - 937 strings with 10502 characters, -and the built_in function-call counts, 11109 in all, are: -= -- 1180 -> -- 239 + 950 strings with 11095 characters, +and the built_in function-call counts, 11742 in all, are: += -- 1238 +> -- 242 < -- 379 -+ -- 428 ++ -- 431 - -- 85 -* -- 746 -:= -- 1838 -add.period$ -- 23 -call.type$ -- 18 -change.case$ -- 75 +* -- 775 +:= -- 1919 +add.period$ -- 26 +call.type$ -- 21 +change.case$ -- 81 chr.to.int$ -- 0 -cite$ -- 33 -duplicate$ -- 245 -empty$ -- 803 +cite$ -- 39 +duplicate$ -- 272 +empty$ -- 884 format.name$ -- 162 -if$ -- 2234 +if$ -- 2400 int.to.chr$ -- 0 -int.to.str$ -- 18 -missing$ -- 75 -newline$ -- 114 +int.to.str$ -- 21 +missing$ -- 93 +newline$ -- 128 num.names$ -- 21 -pop$ -- 136 +pop$ -- 152 preamble$ -- 1 -purify$ -- 75 +purify$ -- 81 quote$ -- 0 -skip$ -- 503 +skip$ -- 531 stack$ -- 0 -substring$ -- 1138 -swap$ -- 64 +substring$ -- 1168 +swap$ -- 67 text.length$ -- 52 text.prefix$ -- 0 top$ -- 0 -type$ -- 72 -warning$ -- 15 +type$ -- 84 +warning$ -- 18 while$ -- 73 -width$ -- 20 -write$ -- 244 -(There were 22 warnings) +width$ -- 23 +write$ -- 275 +(There were 28 warnings) diff --git a/Praxiprojekt_Bericht/ba.log b/Praxiprojekt_Bericht/ba.log index 4cbb3f1cf4204cd1e36a354c6602915cbb327014..0a8c02e5f5d65312b59e6b08ad281cd81940aacb 100644 --- a/Praxiprojekt_Bericht/ba.log +++ b/Praxiprojekt_Bericht/ba.log @@ -1,4 +1,4 @@ -This is pdfTeX, Version 3.141592653-2.6-1.40.26 (TeX Live 2024) (preloaded format=pdflatex 2025.2.18) 21 FEB 2025 12:18 +This is pdfTeX, Version 3.141592653-2.6-1.40.26 (TeX Live 2024) (preloaded format=pdflatex 2025.2.18) 24 FEB 2025 23:32 entering extended mode restricted \write18 enabled. %&-line parsing enabled. @@ -736,6 +736,54 @@ Package scrlayer-scrpage Info: Makeing stand-alone element `pagehead' from Package: mathrsfs 1996/01/01 Math RSFS package v1.0 (jk) \symrsfs=\mathgroup4 ) +(c:/texlive/2024/texmf-dist/tex/latex/listings/listings.sty +\lst@mode=\count355 +\lst@gtempboxa=\box70 +\lst@token=\toks42 +\lst@length=\count356 +\lst@currlwidth=\dimen191 +\lst@column=\count357 +\lst@pos=\count358 +\lst@lostspace=\dimen192 +\lst@width=\dimen193 +\lst@newlines=\count359 +\lst@lineno=\count360 +\lst@maxwidth=\dimen194 + +(c:/texlive/2024/texmf-dist/tex/latex/listings/lstpatch.sty +File: lstpatch.sty 2024/09/23 1.10c (Carsten Heinz) +) +(c:/texlive/2024/texmf-dist/tex/latex/listings/lstmisc.sty +File: lstmisc.sty 2024/09/23 1.10c (Carsten Heinz) +\c@lstnumber=\count361 +\lst@skipnumbers=\count362 +\lst@framebox=\box71 +) +(c:/texlive/2024/texmf-dist/tex/latex/listings/listings.cfg +File: listings.cfg 2024/09/23 1.10c listings configuration +)) +Package: listings 2024/09/23 1.10c (Carsten Heinz) + +(c:/texlive/2024/texmf-dist/tex/latex/xcolor/xcolor.sty +Package: xcolor 2024/09/29 v3.02 LaTeX color extensions (UK) + +(c:/texlive/2024/texmf-dist/tex/latex/graphics-cfg/color.cfg +File: color.cfg 2016/01/02 v1.6 sample color configuration +) +Package xcolor Info: Driver file: pdftex.def on input line 274. +LaTeX Info: Redefining \color on input line 762. + +(c:/texlive/2024/texmf-dist/tex/latex/graphics/mathcolor.ltx) +Package xcolor Info: Model `cmy' substituted by `cmy0' on input line 1349. +Package xcolor Info: Model `hsb' substituted by `rgb' on input line 1353. +Package xcolor Info: Model `RGB' extended on input line 1365. +Package xcolor Info: Model `HTML' substituted by `rgb' on input line 1367. +Package xcolor Info: Model `Hsb' substituted by `hsb' on input line 1368. +Package xcolor Info: Model `tHsb' substituted by `hsb' on input line 1369. +Package xcolor Info: Model `HSB' substituted by `hsb' on input line 1370. +Package xcolor Info: Model `Gray' substituted by `gray' on input line 1371. +Package xcolor Info: Model `wave' substituted by `hsb' on input line 1372. +) (c:/texlive/2024/texmf-dist/tex/latex/chngcntr/chngcntr.sty Package: chngcntr 2018/04/09 v1.1a change counter resetting Package chngcntr Info: \counterwithout already defined. @@ -745,12 +793,12 @@ Package chngcntr Info: \counterwithout already defined. (c:/texlive/2024/texmf-dist/tex/latex/pgf/basiclayer/pgf.sty (c:/texlive/2024/texmf-dist/tex/latex/pgf/utilities/pgfrcs.sty (c:/texlive/2024/texmf-dist/tex/generic/pgf/utilities/pgfutil-common.tex -\pgfutil@everybye=\toks42 -\pgfutil@tempdima=\dimen191 -\pgfutil@tempdimb=\dimen192 +\pgfutil@everybye=\toks43 +\pgfutil@tempdima=\dimen195 +\pgfutil@tempdimb=\dimen196 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/utilities/pgfutil-latex.def -\pgfutil@abb=\box70 +\pgfutil@abb=\box72 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/utilities/pgfrcs.code.tex (c:/texlive/2024/texmf-dist/tex/generic/pgf/pgf.revision.tex) @@ -764,33 +812,33 @@ Package: pgf 2023-01-15 v3.1.10 (3.1.10) Package: pgfsys 2023-01-15 v3.1.10 (3.1.10) (c:/texlive/2024/texmf-dist/tex/generic/pgf/utilities/pgfkeys.code.tex -\pgfkeys@pathtoks=\toks43 -\pgfkeys@temptoks=\toks44 +\pgfkeys@pathtoks=\toks44 +\pgfkeys@temptoks=\toks45 (c:/texlive/2024/texmf-dist/tex/generic/pgf/utilities/pgfkeyslibraryfiltered.co de.tex -\pgfkeys@tmptoks=\toks45 +\pgfkeys@tmptoks=\toks46 )) -\pgf@x=\dimen193 -\pgf@y=\dimen194 -\pgf@xa=\dimen195 -\pgf@ya=\dimen196 -\pgf@xb=\dimen197 -\pgf@yb=\dimen198 -\pgf@xc=\dimen199 -\pgf@yc=\dimen256 -\pgf@xd=\dimen257 -\pgf@yd=\dimen258 +\pgf@x=\dimen197 +\pgf@y=\dimen198 +\pgf@xa=\dimen199 +\pgf@ya=\dimen256 +\pgf@xb=\dimen257 +\pgf@yb=\dimen258 +\pgf@xc=\dimen259 +\pgf@yc=\dimen260 +\pgf@xd=\dimen261 +\pgf@yd=\dimen262 \w@pgf@writea=\write3 \r@pgf@reada=\read3 -\c@pgf@counta=\count355 -\c@pgf@countb=\count356 -\c@pgf@countc=\count357 -\c@pgf@countd=\count358 -\t@pgf@toka=\toks46 -\t@pgf@tokb=\toks47 -\t@pgf@tokc=\toks48 -\pgf@sys@id@count=\count359 +\c@pgf@counta=\count363 +\c@pgf@countb=\count364 +\c@pgf@countc=\count365 +\c@pgf@countd=\count366 +\t@pgf@toka=\toks47 +\t@pgf@tokb=\toks48 +\t@pgf@tokc=\toks49 +\pgf@sys@id@count=\count367 (c:/texlive/2024/texmf-dist/tex/generic/pgf/systemlayer/pgf.cfg File: pgf.cfg 2023-01-15 v3.1.10 (3.1.10) ) @@ -804,43 +852,23 @@ File: pgfsys-common-pdf.def 2023-01-15 v3.1.10 (3.1.10) ))) (c:/texlive/2024/texmf-dist/tex/generic/pgf/systemlayer/pgfsyssoftpath.code.tex File: pgfsyssoftpath.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgfsyssoftpath@smallbuffer@items=\count360 -\pgfsyssoftpath@bigbuffer@items=\count361 +\pgfsyssoftpath@smallbuffer@items=\count368 +\pgfsyssoftpath@bigbuffer@items=\count369 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/systemlayer/pgfsysprotocol.code.tex File: pgfsysprotocol.code.tex 2023-01-15 v3.1.10 (3.1.10) -)) (c:/texlive/2024/texmf-dist/tex/latex/xcolor/xcolor.sty -Package: xcolor 2024/09/29 v3.02 LaTeX color extensions (UK) - -(c:/texlive/2024/texmf-dist/tex/latex/graphics-cfg/color.cfg -File: color.cfg 2016/01/02 v1.6 sample color configuration -) -Package xcolor Info: Driver file: pdftex.def on input line 274. -LaTeX Info: Redefining \color on input line 762. - -(c:/texlive/2024/texmf-dist/tex/latex/graphics/mathcolor.ltx) -Package xcolor Info: Model `cmy' substituted by `cmy0' on input line 1349. -Package xcolor Info: Model `hsb' substituted by `rgb' on input line 1353. -Package xcolor Info: Model `RGB' extended on input line 1365. -Package xcolor Info: Model `HTML' substituted by `rgb' on input line 1367. -Package xcolor Info: Model `Hsb' substituted by `hsb' on input line 1368. -Package xcolor Info: Model `tHsb' substituted by `hsb' on input line 1369. -Package xcolor Info: Model `HSB' substituted by `hsb' on input line 1370. -Package xcolor Info: Model `Gray' substituted by `gray' on input line 1371. -Package xcolor Info: Model `wave' substituted by `hsb' on input line 1372. -) -(c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcore.code.tex +)) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcore.code.tex Package: pgfcore 2023-01-15 v3.1.10 (3.1.10) (c:/texlive/2024/texmf-dist/tex/generic/pgf/math/pgfmath.code.tex (c:/texlive/2024/texmf-dist/tex/generic/pgf/math/pgfmathutil.code.tex) (c:/texlive/2024/texmf-dist/tex/generic/pgf/math/pgfmathparser.code.tex -\pgfmath@dimen=\dimen259 -\pgfmath@count=\count362 -\pgfmath@box=\box71 -\pgfmath@toks=\toks49 -\pgfmath@stack@operand=\toks50 -\pgfmath@stack@operation=\toks51 +\pgfmath@dimen=\dimen263 +\pgfmath@count=\count370 +\pgfmath@box=\box73 +\pgfmath@toks=\toks50 +\pgfmath@stack@operand=\toks51 +\pgfmath@stack@operation=\toks52 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/math/pgfmathfunctions.code.tex) (c:/texlive/2024/texmf-dist/tex/generic/pgf/math/pgfmathfunctions.basic.code.te @@ -861,56 +889,56 @@ x) etics.code.tex) (c:/texlive/2024/texmf-dist/tex/generic/pgf/math/pgfmathcalc.code.tex) (c:/texlive/2024/texmf-dist/tex/generic/pgf/math/pgfmathfloat.code.tex -\c@pgfmathroundto@lastzeros=\count363 +\c@pgfmathroundto@lastzeros=\count371 )) (c:/texlive/2024/texmf-dist/tex/generic/pgf/math/pgfint.code.tex) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcorepoints.code.tex File: pgfcorepoints.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgf@picminx=\dimen260 -\pgf@picmaxx=\dimen261 -\pgf@picminy=\dimen262 -\pgf@picmaxy=\dimen263 -\pgf@pathminx=\dimen264 -\pgf@pathmaxx=\dimen265 -\pgf@pathminy=\dimen266 -\pgf@pathmaxy=\dimen267 -\pgf@xx=\dimen268 -\pgf@xy=\dimen269 -\pgf@yx=\dimen270 -\pgf@yy=\dimen271 -\pgf@zx=\dimen272 -\pgf@zy=\dimen273 +\pgf@picminx=\dimen264 +\pgf@picmaxx=\dimen265 +\pgf@picminy=\dimen266 +\pgf@picmaxy=\dimen267 +\pgf@pathminx=\dimen268 +\pgf@pathmaxx=\dimen269 +\pgf@pathminy=\dimen270 +\pgf@pathmaxy=\dimen271 +\pgf@xx=\dimen272 +\pgf@xy=\dimen273 +\pgf@yx=\dimen274 +\pgf@yy=\dimen275 +\pgf@zx=\dimen276 +\pgf@zy=\dimen277 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcorepathconstruct.cod e.tex File: pgfcorepathconstruct.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgf@path@lastx=\dimen274 -\pgf@path@lasty=\dimen275 +\pgf@path@lastx=\dimen278 +\pgf@path@lasty=\dimen279 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcorepathusage.code.te x File: pgfcorepathusage.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgf@shorten@end@additional=\dimen276 -\pgf@shorten@start@additional=\dimen277 +\pgf@shorten@end@additional=\dimen280 +\pgf@shorten@start@additional=\dimen281 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcorescopes.code.tex File: pgfcorescopes.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgfpic=\box72 -\pgf@hbox=\box73 -\pgf@layerbox@main=\box74 -\pgf@picture@serial@count=\count364 +\pgfpic=\box74 +\pgf@hbox=\box75 +\pgf@layerbox@main=\box76 +\pgf@picture@serial@count=\count372 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcoregraphicstate.code .tex File: pgfcoregraphicstate.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgflinewidth=\dimen278 +\pgflinewidth=\dimen282 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcoretransformations.c ode.tex File: pgfcoretransformations.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgf@pt@x=\dimen279 -\pgf@pt@y=\dimen280 -\pgf@pt@temp=\dimen281 +\pgf@pt@x=\dimen283 +\pgf@pt@y=\dimen284 +\pgf@pt@temp=\dimen285 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcorequick.code.tex File: pgfcorequick.code.tex 2023-01-15 v3.1.10 (3.1.10) @@ -924,20 +952,20 @@ File: pgfcorepathprocessing.code.tex 2023-01-15 v3.1.10 (3.1.10) ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcorearrows.code.tex File: pgfcorearrows.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgfarrowsep=\dimen282 +\pgfarrowsep=\dimen286 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcoreshade.code.tex File: pgfcoreshade.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgf@max=\dimen283 -\pgf@sys@shading@range@num=\count365 -\pgf@shadingcount=\count366 +\pgf@max=\dimen287 +\pgf@sys@shading@range@num=\count373 +\pgf@shadingcount=\count374 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcoreimage.code.tex File: pgfcoreimage.code.tex 2023-01-15 v3.1.10 (3.1.10) ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcoreexternal.code.tex File: pgfcoreexternal.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgfexternal@startupbox=\box75 +\pgfexternal@startupbox=\box77 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/basiclayer/pgfcorelayers.code.tex File: pgfcorelayers.code.tex 2023-01-15 v3.1.10 (3.1.10) ) @@ -952,7 +980,7 @@ File: pgfcorerdf.code.tex 2023-01-15 v3.1.10 (3.1.10) ))) (c:/texlive/2024/texmf-dist/tex/generic/pgf/modules/pgfmoduleshapes.code.te x File: pgfmoduleshapes.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgfnodeparttextbox=\box76 +\pgfnodeparttextbox=\box78 ) (c:/texlive/2024/texmf-dist/tex/generic/pgf/modules/pgfmoduleplot.code.tex File: pgfmoduleplot.code.tex 2023-01-15 v3.1.10 (3.1.10) @@ -960,8 +988,8 @@ File: pgfmoduleplot.code.tex 2023-01-15 v3.1.10 (3.1.10) (c:/texlive/2024/texmf-dist/tex/latex/pgf/compatibility/pgfcomp-version-0-65.st y Package: pgfcomp-version-0-65 2023-01-15 v3.1.10 (3.1.10) -\pgf@nodesepstart=\dimen284 -\pgf@nodesepend=\dimen285 +\pgf@nodesepstart=\dimen288 +\pgf@nodesepend=\dimen289 ) (c:/texlive/2024/texmf-dist/tex/latex/pgf/compatibility/pgfcomp-version-1-18.st y @@ -973,10 +1001,10 @@ Package: pgfcomp-version-1-18 2023-01-15 v3.1.10 (3.1.10) (c:/texlive/2024/texmf-dist/tex/generic/pgf/math/pgfmath.code.tex)) (c:/texlive/2024/texmf-dist/tex/generic/pgf/utilities/pgffor.code.tex Package: pgffor 2023-01-15 v3.1.10 (3.1.10) -\pgffor@iter=\dimen286 -\pgffor@skip=\dimen287 -\pgffor@stack=\toks52 -\pgffor@toks=\toks53 +\pgffor@iter=\dimen290 +\pgffor@skip=\dimen291 +\pgffor@stack=\toks53 +\pgffor@toks=\toks54 )) (c:/texlive/2024/texmf-dist/tex/generic/pgf/frontendlayer/tikz/tikz.code.tex Package: tikz 2023-01-15 v3.1.10 (3.1.10) @@ -984,33 +1012,33 @@ Package: tikz 2023-01-15 v3.1.10 (3.1.10) (c:/texlive/2024/texmf-dist/tex/generic/pgf/libraries/pgflibraryplothandlers.co de.tex File: pgflibraryplothandlers.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgf@plot@mark@count=\count367 -\pgfplotmarksize=\dimen288 -) -\tikz@lastx=\dimen289 -\tikz@lasty=\dimen290 -\tikz@lastxsaved=\dimen291 -\tikz@lastysaved=\dimen292 -\tikz@lastmovetox=\dimen293 -\tikz@lastmovetoy=\dimen294 -\tikzleveldistance=\dimen295 -\tikzsiblingdistance=\dimen296 -\tikz@figbox=\box77 -\tikz@figbox@bg=\box78 -\tikz@tempbox=\box79 -\tikz@tempbox@bg=\box80 -\tikztreelevel=\count368 -\tikznumberofchildren=\count369 -\tikznumberofcurrentchild=\count370 -\tikz@fig@count=\count371 +\pgf@plot@mark@count=\count375 +\pgfplotmarksize=\dimen292 +) +\tikz@lastx=\dimen293 +\tikz@lasty=\dimen294 +\tikz@lastxsaved=\dimen295 +\tikz@lastysaved=\dimen296 +\tikz@lastmovetox=\dimen297 +\tikz@lastmovetoy=\dimen298 +\tikzleveldistance=\dimen299 +\tikzsiblingdistance=\dimen300 +\tikz@figbox=\box79 +\tikz@figbox@bg=\box80 +\tikz@tempbox=\box81 +\tikz@tempbox@bg=\box82 +\tikztreelevel=\count376 +\tikznumberofchildren=\count377 +\tikznumberofcurrentchild=\count378 +\tikz@fig@count=\count379 (c:/texlive/2024/texmf-dist/tex/generic/pgf/modules/pgfmodulematrix.code.tex File: pgfmodulematrix.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgfmatrixcurrentrow=\count372 -\pgfmatrixcurrentcolumn=\count373 -\pgf@matrix@numberofcolumns=\count374 +\pgfmatrixcurrentrow=\count380 +\pgfmatrixcurrentcolumn=\count381 +\pgf@matrix@numberofcolumns=\count382 ) -\tikz@expandcount=\count375 +\tikz@expandcount=\count383 (c:/texlive/2024/texmf-dist/tex/generic/pgf/frontendlayer/tikz/libraries/tikzli brarytopaths.code.tex @@ -1063,26 +1091,26 @@ File: tikzlibraryshapes.multipart.code.tex 2023-01-15 v3.1.10 (3.1.10) (c:/texlive/2024/texmf-dist/tex/generic/pgf/libraries/shapes/pgflibraryshapes.m ultipart.code.tex File: pgflibraryshapes.multipart.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgfnodepartlowerbox=\box81 -\pgfnodeparttwobox=\box82 -\pgfnodepartthreebox=\box83 -\pgfnodepartfourbox=\box84 -\pgfnodeparttwentybox=\box85 -\pgfnodepartnineteenbox=\box86 -\pgfnodeparteighteenbox=\box87 -\pgfnodepartseventeenbox=\box88 -\pgfnodepartsixteenbox=\box89 -\pgfnodepartfifteenbox=\box90 -\pgfnodepartfourteenbox=\box91 -\pgfnodepartthirteenbox=\box92 -\pgfnodeparttwelvebox=\box93 -\pgfnodepartelevenbox=\box94 -\pgfnodeparttenbox=\box95 -\pgfnodepartninebox=\box96 -\pgfnodeparteightbox=\box97 -\pgfnodepartsevenbox=\box98 -\pgfnodepartsixbox=\box99 -\pgfnodepartfivebox=\box100 +\pgfnodepartlowerbox=\box83 +\pgfnodeparttwobox=\box84 +\pgfnodepartthreebox=\box85 +\pgfnodepartfourbox=\box86 +\pgfnodeparttwentybox=\box87 +\pgfnodepartnineteenbox=\box88 +\pgfnodeparteighteenbox=\box89 +\pgfnodepartseventeenbox=\box90 +\pgfnodepartsixteenbox=\box91 +\pgfnodepartfifteenbox=\box92 +\pgfnodepartfourteenbox=\box93 +\pgfnodepartthirteenbox=\box94 +\pgfnodeparttwelvebox=\box95 +\pgfnodepartelevenbox=\box96 +\pgfnodeparttenbox=\box97 +\pgfnodepartninebox=\box98 +\pgfnodeparteightbox=\box99 +\pgfnodepartsevenbox=\box100 +\pgfnodepartsixbox=\box101 +\pgfnodepartfivebox=\box102 ))) (c:/texlive/2024/texmf-dist/tex/generic/pgf/frontendlayer/tikz/libraries/tikzli braryarrows.code.tex @@ -1090,7 +1118,7 @@ File: tikzlibraryarrows.code.tex 2023-01-15 v3.1.10 (3.1.10) (c:/texlive/2024/texmf-dist/tex/generic/pgf/libraries/pgflibraryarrows.code.tex File: pgflibraryarrows.code.tex 2023-01-15 v3.1.10 (3.1.10) -\arrowsize=\dimen297 +\arrowsize=\dimen301 )) (c:/texlive/2024/texmf-dist/tex/latex/tikz-cd/tikz-cd.sty Package: tikz-cd 2021/05/04 v1.0 Commutative diagrams with TikZ @@ -1106,15 +1134,15 @@ File: tikzlibraryquotes.code.tex 2023-01-15 v3.1.10 (3.1.10) (c:/texlive/2024/texmf-dist/tex/generic/pgf/libraries/pgflibraryarrows.meta.cod e.tex File: pgflibraryarrows.meta.code.tex 2023-01-15 v3.1.10 (3.1.10) -\pgfarrowinset=\dimen298 -\pgfarrowlength=\dimen299 -\pgfarrowwidth=\dimen300 -\pgfarrowlinewidth=\dimen301 +\pgfarrowinset=\dimen302 +\pgfarrowlength=\dimen303 +\pgfarrowwidth=\dimen304 +\pgfarrowlinewidth=\dimen305 ))) (c:/texlive/2024/texmf-dist/tex/latex/colortbl/colortbl.sty Package: colortbl 2024/07/06 v1.0i Color table columns (DPC) -\everycr=\toks54 +\everycr=\toks55 \minrowclearance=\skip87 -\rownum=\count376 +\rownum=\count384 ) (c:/texlive/2024/texmf-dist/tex/latex/paralist/paralist.sty Package: paralist 2017/01/22 v2.7 Extended list environments @@ -1122,52 +1150,62 @@ Package: paralist 2017/01/22 v2.7 Extended list environments \plpartopsep=\skip89 \plitemsep=\skip90 \plparsep=\skip91 -\pl@lab=\toks55 +\pl@lab=\toks56 ) (./titelangaben.tex) -LaTeX Font Info: Trying to load font information for T1+phv on input line 12 -8. - (c:/texlive/2024/texmf-dist/tex/latex/psnfss/t1phv.fd +(c:/texlive/2024/texmf-dist/tex/latex/listings/lstlang1.sty +File: lstlang1.sty 2024/09/23 1.10c listings language file +) +(c:/texlive/2024/texmf-dist/tex/latex/listings/lstlang1.sty +File: lstlang1.sty 2024/09/23 1.10c listings language file +) +(c:/texlive/2024/texmf-dist/tex/latex/listings/lstmisc.sty +File: lstmisc.sty 2024/09/23 1.10c (Carsten Heinz) +) +LaTeX Font Info: Trying to load font information for T1+phv on input line 17 +3. + +(c:/texlive/2024/texmf-dist/tex/latex/psnfss/t1phv.fd File: t1phv.fd 2020/03/25 scalable font definitions for T1/phv. ) (c:/texlive/2024/texmf-dist/tex/latex/l3backend/l3backend-pdftex.def File: l3backend-pdftex.def 2024-05-08 L3 backend support: PDF output (pdfTeX) -\l__color_backend_stack_int=\count377 -\l__pdf_internal_box=\box101 +\l__color_backend_stack_int=\count385 +\l__pdf_internal_box=\box103 ) (./ba.aux) \openout1 = `ba.aux'. -LaTeX Font Info: Checking defaults for OML/cmm/m/it on input line 128. -LaTeX Font Info: ... okay on input line 128. -LaTeX Font Info: Checking defaults for OMS/cmsy/m/n on input line 128. -LaTeX Font Info: ... okay on input line 128. -LaTeX Font Info: Checking defaults for OT1/cmr/m/n on input line 128. -LaTeX Font Info: ... okay on input line 128. -LaTeX Font Info: Checking defaults for T1/cmr/m/n on input line 128. -LaTeX Font Info: ... okay on input line 128. -LaTeX Font Info: Checking defaults for TS1/cmr/m/n on input line 128. -LaTeX Font Info: ... okay on input line 128. -LaTeX Font Info: Checking defaults for OMX/cmex/m/n on input line 128. -LaTeX Font Info: ... okay on input line 128. -LaTeX Font Info: Checking defaults for U/cmr/m/n on input line 128. -LaTeX Font Info: ... okay on input line 128. -LaTeX Font Info: Checking defaults for PD1/pdf/m/n on input line 128. -LaTeX Font Info: ... okay on input line 128. -LaTeX Font Info: Checking defaults for PU/pdf/m/n on input line 128. -LaTeX Font Info: ... okay on input line 128. +LaTeX Font Info: Checking defaults for OML/cmm/m/it on input line 173. +LaTeX Font Info: ... okay on input line 173. +LaTeX Font Info: Checking defaults for OMS/cmsy/m/n on input line 173. +LaTeX Font Info: ... okay on input line 173. +LaTeX Font Info: Checking defaults for OT1/cmr/m/n on input line 173. +LaTeX Font Info: ... okay on input line 173. +LaTeX Font Info: Checking defaults for T1/cmr/m/n on input line 173. +LaTeX Font Info: ... okay on input line 173. +LaTeX Font Info: Checking defaults for TS1/cmr/m/n on input line 173. +LaTeX Font Info: ... okay on input line 173. +LaTeX Font Info: Checking defaults for OMX/cmex/m/n on input line 173. +LaTeX Font Info: ... okay on input line 173. +LaTeX Font Info: Checking defaults for U/cmr/m/n on input line 173. +LaTeX Font Info: ... okay on input line 173. +LaTeX Font Info: Checking defaults for PD1/pdf/m/n on input line 173. +LaTeX Font Info: ... okay on input line 173. +LaTeX Font Info: Checking defaults for PU/pdf/m/n on input line 173. +LaTeX Font Info: ... okay on input line 173. (c:/texlive/2024/texmf-dist/tex/context/base/mkii/supp-pdf.mkii [Loading MPS to PDF converter (version 2006.09.02).] -\scratchcounter=\count378 -\scratchdimen=\dimen302 -\scratchbox=\box102 -\nofMPsegments=\count379 -\nofMParguments=\count380 -\everyMPshowfont=\toks56 -\MPscratchCnt=\count381 -\MPscratchDim=\dimen303 -\MPnumerator=\count382 -\makeMPintoPDFobject=\count383 -\everyMPtoPDFconversion=\toks57 +\scratchcounter=\count386 +\scratchdimen=\dimen306 +\scratchbox=\box104 +\nofMPsegments=\count387 +\nofMParguments=\count388 +\everyMPshowfont=\toks57 +\MPscratchCnt=\count389 +\MPscratchDim=\dimen307 +\MPnumerator=\count390 +\makeMPintoPDFobject=\count391 +\everyMPtoPDFconversion=\toks58 ) (c:/texlive/2024/texmf-dist/tex/latex/epstopdf-pkg/epstopdf-base.sty Package: epstopdf-base 2020-01-24 v2.11 Base part for package epstopdf Package epstopdf-base Info: Redefining graphics rule for `.eps' on input line 4 @@ -1178,22 +1216,22 @@ File: epstopdf-sys.cfg 2010/07/13 v1.3 Configuration of (r)epstopdf for TeX Liv e )) Package microtype Info: Patching varwidth to enable character protrusion. -\MT@vwid@leftmargin=\dimen304 -\MT@vwid@rightmargin=\dimen305 -LaTeX Info: Redefining \microtypecontext on input line 128. -Package microtype Info: Applying patch `item' on input line 128. -Package microtype Info: Applying patch `toc' on input line 128. -Package microtype Info: Applying patch `eqnum' on input line 128. -Package microtype Info: Applying patch `footnote' on input line 128. -Package microtype Info: Applying patch `verbatim' on input line 128. -LaTeX Info: Redefining \microtypesetup on input line 128. +\MT@vwid@leftmargin=\dimen308 +\MT@vwid@rightmargin=\dimen309 +LaTeX Info: Redefining \microtypecontext on input line 173. +Package microtype Info: Applying patch `item' on input line 173. +Package microtype Info: Applying patch `toc' on input line 173. +Package microtype Info: Applying patch `eqnum' on input line 173. +Package microtype Info: Applying patch `footnote' on input line 173. +Package microtype Info: Applying patch `verbatim' on input line 173. +LaTeX Info: Redefining \microtypesetup on input line 173. Package microtype Info: Generating PDF output. Package microtype Info: Character protrusion enabled (level 2). Package microtype Info: Using default protrusion set `alltext'. Package microtype Info: Automatic font expansion enabled (level 2), (microtype) stretch: 20, shrink: 20, step: 1, non-selected. Package microtype Info: Using default expansion set `alltext-nott'. -LaTeX Info: Redefining \showhyphens on input line 128. +LaTeX Info: Redefining \showhyphens on input line 173. Package microtype Info: No adjustment of tracking. Package microtype Info: No adjustment of interword spacing. Package microtype Info: No adjustment of character kerning. @@ -1236,7 +1274,7 @@ Package microtype Info: Loading generic protrusion settings for font family * \@reversemarginfalse * (1in=72.27pt=25.4mm, 1cm=28.453pt) -Package hyperref Info: Link coloring OFF on input line 128. +Package hyperref Info: Link coloring OFF on input line 173. (./ba.out) (./ba.out) \@outlinefile=\write4 \openout4 = `ba.out'. @@ -1248,7 +1286,7 @@ Class scrbook Info: loading recommended package `bookmark'. (scrbook) `bookmarkpackage=false' before \begin{document} and (scrbook) you can avoid this message adding: (scrbook) \usepackage{bookmark} -(scrbook) before \begin{document} on input line 128. +(scrbook) before \begin{document} on input line 173. (c:/texlive/2024/texmf-dist/tex/latex/bookmark/bookmark.sty Package: bookmark 2023-12-10 v1.31 PDF bookmarks (HO) @@ -1256,44 +1294,47 @@ Package: bookmark 2023-12-10 v1.31 PDF bookmarks (HO) (c:/texlive/2024/texmf-dist/tex/latex/bookmark/bkm-pdftex.def File: bkm-pdftex.def 2023-12-10 v1.31 bookmark driver for pdfTeX and luaTeX (HO ) -\BKM@id=\count384 +\BKM@id=\count392 )) -Package scrbase Info: activating ngerman \figurename on input line 136. -Package scrbase Info: activating ngerman \contentsname on input line 137. -\@logobreite=\skip92 -\@logohoehe=\skip93 -\@koordx=\skip94 -\@koordy=\skip95 -\@offsx=\skip96 -\@offsy=\skip97 +\c@lstlisting=\count393 +Package tocbasic Info: setting babel extension for `lol' on input line 173. +\scr@dte@lstlisting@maxnumwidth=\skip92 +Package scrbase Info: activating ngerman \figurename on input line 181. +Package scrbase Info: activating ngerman \contentsname on input line 182. +\@logobreite=\skip93 +\@logohoehe=\skip94 +\@koordx=\skip95 +\@koordy=\skip96 +\@offsx=\skip97 +\@offsy=\skip98 <fh_logo.png, id=7, 601.2864pt x 2056.8042pt> File: fh_logo.png Graphic file (type png) <use fh_logo.png> -Package pdftex.def Info: fh_logo.png used on input line 141. +Package pdftex.def Info: fh_logo.png used on input line 186. (pdftex.def) Requested size: 33.26811pt x 113.81102pt. File: fh_logo.png Graphic file (type png) <use fh_logo.png> -Package pdftex.def Info: fh_logo.png used on input line 141. +Package pdftex.def Info: fh_logo.png used on input line 186. (pdftex.def) Requested size: 33.26811pt x 113.81102pt. File: fh_logo.png Graphic file (type png) <use fh_logo.png> -Package pdftex.def Info: fh_logo.png used on input line 141. +Package pdftex.def Info: fh_logo.png used on input line 186. (pdftex.def) Requested size: 33.26811pt x 113.81102pt. -Package xcolor Warning: Incompatible color definition on input line 141. +Package xcolor Warning: Incompatible color definition on input line 186. (c:/texlive/2024/texmf-dist/tex/latex/microtype/mt-cmr.cfg File: mt-cmr.cfg 2013/05/19 v2.2 microtype config. file: Computer Modern Roman (RS) ) -LaTeX Font Info: Trying to load font information for U+rsfs on input line 14 -1. +LaTeX Font Info: Trying to load font information for U+rsfs on input line 18 +6. (c:/texlive/2024/texmf-dist/tex/latex/jknapltx/ursfs.fd File: ursfs.fd 1998/03/24 rsfs font definition file (jk) ) -Overfull \hbox (11.99998pt too wide) in paragraph at lines 141--141 +Overfull \hbox (11.99998pt too wide) in paragraph at lines 186--186 [][] [] @@ -1305,8 +1346,8 @@ Overfull \hbox (11.99998pt too wide) in paragraph at lines 141--141 {c:/texlive/2024/texmf-var/fonts/map/pdftex/updmap/pdftex.map}{c:/texlive/2024/ texmf-dist/fonts/enc/dvips/base/8r.enc} <./fh_logo.png>] -Package tocbasic Info: character protrusion at toc deactivated on input line 15 -5. +Package tocbasic Info: character protrusion at toc deactivated on input line 20 +0. (./ba.toc) \tf@toc=\write5 \openout5 = `ba.toc'. @@ -1324,7 +1365,7 @@ Package scrlayer-scrpage Warning: Command deprecated! (scrlayer-scrpage) `<line thickness>:<line length>' to setup (scrlayer-scrpage) the line length and thickness, and (scrlayer-scrpage) `\setkomafont' or `\addtokomafont' to -(scrlayer-scrpage) setup the colour on input line 165. +(scrlayer-scrpage) setup the colour on input line 210. (./text.tex @@ -1405,26 +1446,64 @@ chapter 4. ] +Package microtype Info: Loading generic protrusion settings for font family +(microtype) `cmtt' (encoding: T1). +(microtype) For optimal results, create family-specific settings. +(microtype) See the microtype manual for details. +LaTeX Font Info: Font shape `T1/cmtt/bx/n' in size <10.95> not available +(Font) Font shape `T1/cmtt/m/n' tried instead on input line 140. + + +[11{c:/texlive/2024/texmf-dist/fonts/enc/dvips/cm-super/cm-super-t1.enc}] + +[12] +<images/Topic_explained.png, id=210, 641.39626pt x 359.84438pt> +File: images/Topic_explained.png Graphic file (type png) +<use images/Topic_explained.png> +Package pdftex.def Info: images/Topic_explained.png used on input line 224. +(pdftex.def) Requested size: 416.90262pt x 233.89607pt. + + +[13] + +[14 <./images/Topic_explained.png>] +<images/Sensor_holder_on_UR10e.jpg, id=251, 472.76625pt x 586.44093pt> +File: images/Sensor_holder_on_UR10e.jpg Graphic file (type jpg) +<use images/Sensor_holder_on_UR10e.jpg> +Package pdftex.def Info: images/Sensor_holder_on_UR10e.jpg used on input line +248. +(pdftex.def) Requested size: 142.77597pt x 177.10587pt. +<images/two_pcd.jpg, id=253, 536.75531pt x 350.81062pt> +File: images/two_pcd.jpg Graphic file (type jpg) +<use images/two_pcd.jpg> +Package pdftex.def Info: images/two_pcd.jpg used on input line 256. +(pdftex.def) Requested size: 268.37698pt x 175.40488pt. + +Underfull \hbox (badness 10000) in paragraph at lines 236--261 + + [] + + + +[15 <./images/Sensor_holder_on_UR10e.jpg>] + +[16 <./images/two_pcd.jpg>] chapter 5. -[11 +[17 ] chapter 6. -[12 +[18 ] (./ba.bbl LaTeX Font Info: Font shape `T1/phv/m/it' in size <12> not available (Font) Font shape `T1/phv/m/sl' tried instead on input line 12. -Package microtype Info: Loading generic protrusion settings for font family -(microtype) `cmtt' (encoding: T1). -(microtype) For optimal results, create family-specific settings. -(microtype) See the microtype manual for details. Underfull \hbox (badness 4647) in paragraph at lines 12--15 []\T1/phv/m/sl/12 (+20) Can the col-la-bo-ra-ti-ve ro-bot mar-ket ex-pe-ri-ence @@ -1443,45 +1522,50 @@ Overfull \hbox (150.14659pt too wide) in paragraph at lines 12--15 econd-[]growth-[]surge-[]in-[]the-[]post-[]pandemic-[]era/$[] [] -LaTeX Font Info: Font shape `TS1/phv/m/it' in size <12> not available -(Font) Font shape `TS1/phv/m/sl' tried instead on input line 37. -Underfull \hbox (badness 10000) in paragraph at lines 37--40 -[]\T1/phv/m/sl/12 (+20) tof_imager_micro_ros/teensy_pcl_publisher at hum-ble \T -S1/phv/m/sl/12 (+20) � \T1/phv/m/sl/12 (+20) adi-tya-ka- +Underfull \hbox (badness 10000) in paragraph at lines 36--40 +[]\T1/phv/m/sl/12 (+20) sparkfun/SparkFun_VL53L5CX_Arduino_Library\T1/phv/m/n/1 +2 (+20) . []$\T1/cmtt/m/n/12 https : / / github . com / [] -Underfull \hbox (badness 2293) in paragraph at lines 37--40 -\T1/phv/m/sl/12 (+20) math/tof_imager_micro_ros \TS1/phv/m/sl/12 (+20) � \T1/ph -v/m/sl/12 (+20) GitHub\T1/phv/m/n/12 (+20) . []$\T1/cmtt/m/n/12 https : / / gi -thub . com / adityakamath / +Underfull \hbox (badness 3029) in paragraph at lines 36--40 +\T1/cmtt/m/n/12 sparkfun / SparkFun _ VL53L5CX _ Arduino _ Library$[]\T1/phv/m/ +n/12 (+20) . -- original-date: 2021-10- [] +LaTeX Font Info: Font shape `TS1/phv/m/it' in size <12> not available +(Font) Font shape `TS1/phv/m/sl' tried instead on input line 43. -Underfull \hbox (badness 6725) in paragraph at lines 43--46 +Underfull \hbox (badness 10000) in paragraph at lines 43--46 +[]\T1/phv/m/sl/12 (+20) tof_imager_micro_ros/teensy_pcl_publisher at hum-ble \T +S1/phv/m/sl/12 (+20) � \T1/phv/m/sl/12 (+20) adi-tya-ka-ma- + [] + + +Underfull \hbox (badness 6725) in paragraph at lines 60--63 []\T1/phv/m/sl/12 (+20) VL53L7CX - Time-of-Flight (ToF) 8x8 mul-ti-zo-ne ran-gi ng sen-sor with [] -Underfull \hbox (badness 10000) in paragraph at lines 43--46 +Underfull \hbox (badness 10000) in paragraph at lines 60--63 \T1/phv/m/sl/12 (+20) 90 de-grees FoV - STMi-cro-elec-tro-nics\T1/phv/m/n/12 (+ 20) . []$\T1/cmtt/m/n/12 https : / / www . st . com / en / [] -[13 +[19 -{c:/texlive/2024/texmf-dist/fonts/enc/dvips/cm-super/cm-super-t1.enc}] +] -[14]) +[20]) -[15] (./ba.aux) +[21] (./ba.aux) *********** LaTeX2e <2024-11-01> patch level 2 L3 programming layer <2025-01-18> @@ -1490,22 +1574,24 @@ Package rerunfilecheck Info: File `ba.out' has not changed. (rerunfilecheck) Checksum: D41D8CD98F00B204E9800998ECF8427E;0. ) ) Here is how much of TeX's memory you used: - 40166 strings out of 473200 - 816263 string characters out of 5720278 - 1474790 words of memory out of 5000000 - 62438 multiletter control sequences out of 15000+600000 - 615181 words of font info for 255 fonts, out of 8000000 for 9000 + 42437 strings out of 473200 + 848283 string characters out of 5720278 + 1817956 words of memory out of 5000000 + 64577 multiletter control sequences out of 15000+600000 + 621377 words of font info for 277 fonts, out of 8000000 for 9000 1141 hyphenation exceptions out of 8191 - 108i,19n,107p,10968b,654s stack positions out of 10000i,1000n,20000p,200000b,200000s -<c:/texlive/2024/texmf-dist/fonts/type1/public/amsfonts/cm/cmr8.pfb><c:/tex -live/2024/texmf-dist/fonts/type1/public/cm-super/sftt1200.pfb><c:/texlive/2024/ -texmf-dist/fonts/type1/urw/helvetic/uhvb8a.pfb><c:/texlive/2024/texmf-dist/font -s/type1/urw/helvetic/uhvr8a.pfb><c:/texlive/2024/texmf-dist/fonts/type1/urw/hel -vetic/uhvro8a.pfb> -Output written on ba.pdf (15 pages, 1416188 bytes). + 108i,20n,107p,10968b,2172s stack positions out of 10000i,1000n,20000p,200000b,200000s +<c:/texlive/2024/texmf-dist/fonts/type1/public/amsfonts/cm/cmr12.pfb><c:/te +xlive/2024/texmf-dist/fonts/type1/public/amsfonts/cm/cmr8.pfb><c:/texlive/2024/ +texmf-dist/fonts/type1/public/amsfonts/cm/cmsy10.pfb><c:/texlive/2024/texmf-dis +t/fonts/type1/public/cm-super/sftt1095.pfb><c:/texlive/2024/texmf-dist/fonts/ty +pe1/public/cm-super/sftt1200.pfb><c:/texlive/2024/texmf-dist/fonts/type1/urw/he +lvetic/uhvb8a.pfb><c:/texlive/2024/texmf-dist/fonts/type1/urw/helvetic/uhvr8a.p +fb><c:/texlive/2024/texmf-dist/fonts/type1/urw/helvetic/uhvro8a.pfb> +Output written on ba.pdf (21 pages, 1873608 bytes). PDF statistics: - 282 PDF objects out of 1000 (max. 8388607) - 241 compressed objects within 3 object streams - 56 named destinations out of 1000 (max. 500000) - 66214 words of extra memory for PDF output out of 74296 (max. 10000000) + 438 PDF objects out of 1000 (max. 8388607) + 379 compressed objects within 4 object streams + 138 named destinations out of 1000 (max. 500000) + 72885 words of extra memory for PDF output out of 74296 (max. 10000000) diff --git a/Praxiprojekt_Bericht/ba.pdf b/Praxiprojekt_Bericht/ba.pdf index 550df2a8813ce779ebe474700ad87bbad31b4117..965a35b173c93912d05d701c3cd56b986f7d148a 100644 Binary files a/Praxiprojekt_Bericht/ba.pdf and b/Praxiprojekt_Bericht/ba.pdf differ diff --git a/Praxiprojekt_Bericht/ba.synctex.gz b/Praxiprojekt_Bericht/ba.synctex.gz index 104296bee0f1ad18d61c344354cd404d5ccb14f1..fd07164b43e2dd797d5b06c2fa2d2c3f113ffe6c 100644 Binary files a/Praxiprojekt_Bericht/ba.synctex.gz and b/Praxiprojekt_Bericht/ba.synctex.gz differ diff --git a/Praxiprojekt_Bericht/ba.tex b/Praxiprojekt_Bericht/ba.tex index 15bd6487cf83297432d2b262ddeb8834d4f7173f..adac012ebf72587af1254686bf6f04542448be86 100644 --- a/Praxiprojekt_Bericht/ba.tex +++ b/Praxiprojekt_Bericht/ba.tex @@ -44,6 +44,8 @@ \usepackage[automark]{scrlayer-scrpage} % Package zum Definieren der Kopf- und Fußzeilen \usepackage{amsmath} % Muss sein \usepackage{mathrsfs} % Weitere Mathematik-Symbole, z.B. Laplace-L +\usepackage{listings} % Paket für Code-Darstellung +\usepackage{xcolor} % Für farbige Hervorhebung % %%%%% Anpassung an Formatvorlagen des Fachbereichs % @@ -112,6 +114,12 @@ \newacronym{MCU}{MCU}{Micro Controller Unit} \newacronym{ROS}{ROS2}{Robot Operating System 2} \newacronym{RVIZ}{RVIZ2}{Robot Visualization 2} +\newacronym{NUC}{NUC}{Next Unit of Computing} +\newacronym{LPN}{LPn}{Low-Power-Mode-Communication-Enable} +\newacronym{JSON}{JSON}{JavaScript Object Notation} +\newacronym{LED}{LED}{Light Emitting Diode} +\newacronym{USB}{USB}{Universal Seriell Bus} +\newacronym{PCD}{pcd}{Point Cloud Data} \newglossaryentry{Pose}{ name={Pose}, description={Position und Orientierung} @@ -124,7 +132,44 @@ name={Arbeitsraum}, description={Anteil des eingeschränkten Raumes, der während der Ausführung aller vom Anwenderprogramm vorgegebenen Bewegungen [vom Roboter] benutzt wird} } - +\lstset{ + language=C++, % Programmiersprache + basicstyle=\ttfamily\small, % Schriftart und -größe + keywordstyle=\color{blue}, % Schlüsselwörter in Blau + commentstyle=\color{green!50!black}, % Kommentare in Grün + stringstyle=\color{red}, % Strings in Rot + numbers=left, % Zeilennummern links anzeigen + numberstyle=\tiny, % Zeilennummern in kleiner Schrift + stepnumber=1, % Jede Zeile nummerieren + numbersep=10pt, % Abstand der Nummerierung vom Code + backgroundcolor=\color{gray!10},% Hintergrundfarbe + frame=single, % Rahmen um den Code + breaklines=true, % Automatischer Zeilenumbruch + captionpos=b, % Beschriftung unterhalb des Codes + tabsize=2, % Breite von Tabs + showstringspaces=false % Leerzeichen in Strings nicht anzeigen +} +\lstdefinestyle{customcpp}{ + morekeywords=[2]{ + shiftOut, + uint8_t, + uint16\_t, + digitalWrite, + LPn,initializeSensorInterface0, + delay, + sensor, + begin, + init_sensor, + vl53l7cx_set_i2c_address, + vl53l7cx_set_resolution, + vl53l7cx_set_ranging_frequency_hz, + vl53l7cx_start_ranging, + VL53L7CX, + }, + keywordstyle=[2]\color{blue}\bfseries, % Style for custom keywords + %emph=[2]{shiftOut,digitalWrite,LPn} + %emphstyle=[2]{\color{orange}\bfseries} +} \begin{document} %% Verschiedene Versionen, nach DIN 1505 zu zitieren \bibliographystyle{plaindin} diff --git a/Praxiprojekt_Bericht/ba.toc b/Praxiprojekt_Bericht/ba.toc index a61debac472c47df2c5baf40f852b0e3fa674714..6c554dd5ee1b555ca2552a8fb98110475a382804 100644 --- a/Praxiprojekt_Bericht/ba.toc +++ b/Praxiprojekt_Bericht/ba.toc @@ -8,11 +8,11 @@ \contentsline {section}{\numberline {4.1}Vorgehensweise}{10}{section.4.1}% \contentsline {section}{\numberline {4.2}Software}{10}{section.4.2}% \contentsline {subsection}{\numberline {4.2.1}Arduino}{10}{subsection.4.2.1}% -\contentsline {subsection}{\numberline {4.2.2}Robot Operating System 2}{10}{subsection.4.2.2}% -\contentsline {subsection}{\numberline {4.2.3}RVIZ2 und Gazebo Classic}{10}{subsection.4.2.3}% -\contentsline {section}{\numberline {4.3}Hardware}{10}{section.4.3}% -\contentsline {subsection}{\numberline {4.3.1}Elektronisch}{10}{subsection.4.3.1}% -\contentsline {subsection}{\numberline {4.3.2}Mechanisch}{10}{subsection.4.3.2}% -\contentsline {chapter}{\numberline {5}Ergebnis}{11}{chapter.5}% -\contentsline {chapter}{\numberline {6}Ausblick}{12}{chapter.6}% +\contentsline {subsection}{\numberline {4.2.2}Robot Operating System 2}{13}{subsection.4.2.2}% +\contentsline {subsection}{\numberline {4.2.3}RVIZ2 und Gazebo Classic}{15}{subsection.4.2.3}% +\contentsline {section}{\numberline {4.3}Hardware}{16}{section.4.3}% +\contentsline {subsection}{\numberline {4.3.1}Elektronisch}{16}{subsection.4.3.1}% +\contentsline {subsection}{\numberline {4.3.2}Mechanisch}{16}{subsection.4.3.2}% +\contentsline {chapter}{\numberline {5}Ergebnis}{17}{chapter.5}% +\contentsline {chapter}{\numberline {6}Ausblick}{18}{chapter.6}% \providecommand \tocbasic@end@toc@file {}\tocbasic@end@toc@file diff --git a/Praxiprojekt_Bericht/images/Screenshot 2025-01-20 013418.jpg b/Praxiprojekt_Bericht/images/Screenshot 2025-01-20 013418.jpg new file mode 100644 index 0000000000000000000000000000000000000000..39577a478c909f6911dcd0df695970437715794b Binary files /dev/null and b/Praxiprojekt_Bericht/images/Screenshot 2025-01-20 013418.jpg differ diff --git a/Praxiprojekt_Bericht/images/Sensor_holder_on_UR10e.jpg b/Praxiprojekt_Bericht/images/Sensor_holder_on_UR10e.jpg new file mode 100644 index 0000000000000000000000000000000000000000..aafc1493ca4a8daeab4357d2f1f2b00ed31532cf Binary files /dev/null and b/Praxiprojekt_Bericht/images/Sensor_holder_on_UR10e.jpg differ diff --git a/Praxiprojekt_Bericht/images/Topic_explained.png b/Praxiprojekt_Bericht/images/Topic_explained.png new file mode 100644 index 0000000000000000000000000000000000000000..0e2f4fcefcdbcd3c010748b93ef877e44bd3639c Binary files /dev/null and b/Praxiprojekt_Bericht/images/Topic_explained.png differ diff --git a/Praxiprojekt_Bericht/images/two_pcd.jpg b/Praxiprojekt_Bericht/images/two_pcd.jpg new file mode 100644 index 0000000000000000000000000000000000000000..118059e102d3baebf6bff0613739e47352d59510 Binary files /dev/null and b/Praxiprojekt_Bericht/images/two_pcd.jpg differ diff --git a/Praxiprojekt_Bericht/text.tex b/Praxiprojekt_Bericht/text.tex index 717720247251a9b77809bf8f4d567ff4cd7e3d75..2983a856e786b38139ef40eb52193740f4eea63f 100644 --- a/Praxiprojekt_Bericht/text.tex +++ b/Praxiprojekt_Bericht/text.tex @@ -113,11 +113,152 @@ \chapter{Umsetzung} \section{Vorgehensweise} - Um erst mal herauszufinden, ob mein Vorhaben möglich ist habe ich auf Github nach ähnlichen Projekten gesucht. \cite{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate} Da ich in meinem Projekt mehrere Sensoren verwenden will, wurde mir von meiner externen Betreuerin, Sophie Charlotte Keunecke, für die Umsetzung der Raspberry Pi Pico \cite{noauthor_pico-series_nodate} \acrfull{MCU} vorgeschlagen, da dieser \acrshort{MCU} zwei \acrfull{I2C} Interfaces besitzt. Zur Visualisierung der Daten will ich das \acrfull{RVIZ} Programm auf einem Ubuntu 22.04 mit \acrfull{ROS} Humble benutzen. + Um erst mal herauszufinden, ob mein Vorhaben möglich ist habe ich auf Github nach ähnlichen Projekten gesucht. \cite{noauthor_tof_imager_micro_rosteensy_pcl_publisher_nodate} Da ich in meinem Projekt mehrere Sensoren verwenden will, wurde mir von meiner externen Betreuerin, Sophie Charlotte Keunecke, für die Umsetzung der Raspberry Pi Pico \cite{noauthor_pico-series_nodate} \acrfull{MCU} vorgeschlagen, da dieser \acrshort{MCU} zwei \acrfull{I2C} Interfaces besitzt. Zur Visualisierung der Daten will ich das \acrfull{RVIZ} Programm auf einem \acrfull{NUC} mit Ubuntu 22.04 mit \acrfull{ROS} Humble benutzen. + \\ + Bei meinem ersten Versuch einen Sensor zu initialisieren und Daten auszulesen habe ich ein Beispiel aus der VL53L5CX Bibliothek von SparkFun verwendet.\cite{noauthor_sparkfunsparkfun_vl53l5cx_arduino_library_2025} Später bin ich dann zu der offiziellen Arduino Bibliothek vom VL53L7CX von STMicroelectronics gewechselt, da ich mich entschieden hatte vom VL53l5CX auf den VL53L7CX zu wechseln. + Der VL5L7CX hat eine grosesre Horizontale- und Vertikale-\acrshort{FOV} von $60\circ$ \cite{noauthor_vl53l7cx_nodate} + \section{Software} \subsection{Arduino} + Die folgende Software steuert mehrere \acrshort{ToFs} vom Typ VL53L7CX mit einem \acrshort{MCU}. + Die Sensoren kommunizieren über \acrshort{I2C}, wobei zwei separate \acrshort{I2C}-Busse DEV\_I2C0 und DEV\_I2C1 verwendet werden. + Die Sensordaten werden in einem \acrfull{JSON}-Format verarbeitet und über die serielle Schnittstelle ausgegeben. + \\ + Das Projekt verwendet die folgenden Bibliotheken: + \begin{itemize} + \setlength{\itemsep}{0pt} % Reduces space between items + \item Arduino.h: Grundlegende Arduino-Funktionalität + \item Wire.h: I2C-Kommunikation + \item ArduinoJson.h: Serialisierung der Sensordaten in \acrshort{JSON}-Format + \item vl53l7cx\_class.h: Ansteuerung der VL53L7CX-Sensoren + \item 74HC154.h: Steuerung des Multiplexers + \end{itemize} + Im Folgenden möchte ich auf einige Funktionen aus dem C++ eingehen. + Die Funktion aus \ref{Snippet LPN} steuert die \acrshort{LPN} Pins der Sensoren, um die \acrshort{I2C}-Kommunikation im Low-Power-Mode an- oder auszuschalten. + Sie setzt die entsprechenden Bits in zwei 8-Bit-Bytes und sendet diese über shiftOut() an ein Schieberegister. + \begin{lstlisting}[caption={Funktion zum setzen eines LPn Pin},label= Snippet LPN,style=customcpp] + void LPn(uint16_t dataPin, uint16_t latchPin, uint16_t clockPin, uint16_t Pin, bool on) { + uint8_t Pin_byte1 = 0b00000000; + uint8_t Pin_byte2 = 0b00000000; + + if (Pin <= 7) { + Pin_byte1 |= (on << Pin); + } else if (Pin <= 15) { + Pin_byte2 |= (on << (Pin - 8)); + } + + digitalWrite(latchPin, LOW); + shiftOut(dataPin, clockPin, MSBFIRST, Pin_byte2); + shiftOut(dataPin, clockPin, MSBFIRST, Pin_byte1); + digitalWrite(latchPin, HIGH); + } + \end{lstlisting} + Die Funktion in \ref{Snippet Init} aktiviert den Sensor, setzt die \acrshort{I2C}-Adresse, die Auflösung und die Messfrequenz. Nach der Konfiguration wird das Ranging gestartet. Als Parameter benötigt die Funktion ein Objekt der Klasse VL53L7CX, eine neue \acrshort{I2C}-Adresse und den \acrshort{LPN}-Pin des Sensors, der initialisiert werden soll. + \begin{lstlisting}[caption={Funktion zum initialisieren der Sensoren im Setup},label=Snippet Init,style=customcpp] + void initializeSensorInterface0(VL53L7CX &sensor, uint16_t sensorAddress, int lpnPin) { + LPn(dataPin0, latchPin0, clockPin0, lpnPin, true); // Activate sensor power + delay(wait_for_i2c); + sensor.begin(); + delay(wait_for_i2c); + sensor.init_sensor(); + delay(wait_for_i2c); + + // Set I2C address + sensor.vl53l7cx_set_i2c_address(sensorAddress << 1); + delay(wait_for_i2c); + + // Set resolution and frequency + sensor.vl53l7cx_set_resolution(VL53L7CX_RESOLUTION_8X8); + delay(wait_for_i2c); + sensor.vl53l7cx_set_ranging_frequency_hz(ranging_frequency); + delay(wait_for_i2c); + + // Start ranging + sensor.vl53l7cx_start_ranging(); + delay(wait_for_i2c); + + // Reset LPN + LPn(dataPin0, latchPin0, clockPin0, lpnPin, false); + } + \end{lstlisting} + Die letzte Funktion auf, die Ich in diesem Kapitel eingehen möchte, ist in \ref{Snippet Prozess}. + Diese Funktion ruft die Messwerte des \acrshort{ToF}s ab und speichert sie in einem \acrshort{JSON}-Array. + Die \acrfull{LED} wird während der Verarbeitung eingeschaltet und danach wieder ausgeschaltet. + Die Sensordaten werden in einer Matrix verarbeitet und im \acrshort{JSON}-Format gespeichert. Am Ende der Loop()-Funktion werden die Daten, wenn jeder Sensor ein Array aus Sensordaten zum \acrshort{JSON}-String hinzugefügt hat, seriell mit einer Baudrate von 115200 an den \acrshort{NUC} weitergegeben. Als Parameter benötigt die Funktion ein Objekt der Klasse VL53L7CX, eine leere Instanz des Sensordaten Formats, das \acrshort{JSON}-Array in dem die Daten gespeichert werden und den Schlüssel mit dem die Daten vom Programm auf dem \acrshort{NUC} aus dem \acrshort{JSON}-Array extrahiert werden können. + \begin{lstlisting}[label=Snippet Prozess, style=customcpp, caption={processSensorData - Verarbeitung der Sensordaten}] + void processSensorData(VL53L7CX &sensor, VL53L7CX_ResultsData &results, const JsonArray& data, const char* sensorKey) { + uint8_t NewDataReady = 0; + uint8_t status; + + // Wait for data to be ready + do { + status = sensor.vl53l7cx_check_data_ready(&NewDataReady); + } while (!NewDataReady); + + // Turn LED on to indicate data processing + digitalWrite(LedPin, HIGH); + + // If data is ready, get the ranging data and store it in the JSON array + if ((!status) && (NewDataReady != 0)) { + status = sensor.vl53l7cx_get_ranging_data(&results); + JsonArray sensorData = doc[sensorKey].to<JsonArray>(); + + // Process the results and add them to the JSON array + for (int y = 0; y <= imageWidth * (imageWidth - 1); y += imageWidth) { + for (int x = imageWidth - 1; x >= 0; x--) { + sensorData.add(results.distance_mm[VL53L7CX_NB_TARGET_PER_ZONE * (x + y)]); + } + } + } + // Turn LED off to indicate processing is done + digitalWrite(LedPin, LOW); + } + \end{lstlisting} \subsection{Robot Operating System 2} + \acrfull{ROS} ist ein Open Source Framework, dass beim erstellen von Anwendungen für die Robotik helfen soll. + Innerhalb des Framework erstellt man Packages. Packages sind Ansammlungen von Nodes, die in unterschiedlichen Programmiersprachen geschrieben sein können. + Innerhalb eines Packages verwendet man für Nodes nur eine Programmiersprache. Nodes können in Dauerschleifen ausgeführt werden und während sie laufen durch \acrshort{ROS}-Subscriber neue Daten erhalten. Dieser Daten-Austausch geschieht unter anderem über Topics, die von \acrshort{ROS}-Pulishern den Subscribern zur Verfügung gestellt werden. In \ref{Topic_Viz} wird der Daten Austausch dargestellt. + \\ + \begin{figure}[h] + \centering + \includegraphics[scale=0.65]{images/Topic_explained.png} + \caption{Visualisierung von einem Topic, Source:\cite{noauthor_tutorials_nodate}} + \label{Topic_Viz} + \end{figure} + + Im Rahmen meines Projekts habe ich ein Package erstellt, dass die Daten, die seriell über eine \acrfull{USB} Verbindung vom Raspberry Pi Pico übermittelt werden, aufbereitet und als Topic veröffentlicht werden. Das Package besteht aktuell aus 2 Nodes. + \\ + Die erste \acrshort{ROS} Node liest Sensordaten über eine serielle Verbindung aus, verarbeitet sie und veröffentlicht sie als PointCloud2-Nachricht. Die Daten stammen von mehreren VL53L7CX-\acrshort{ToFs} und werden als \acrshort{JSON}-String empfangen. Die Node interpretiert die \acrshort{JSON}-Daten, konvertiert die Distanzwerte in 3D-Koordinaten und führt eine Rotationskorrektur durch, um die Sensordaten in ein einheitliches Koordinatensystem zu überführen. Anschließend werden die Punkte zu einer Punktwolke zusammengeführt und im \acrshort{ROS}-Frame "vl53l7cx\_link" veröffentlicht. Die Hauptfunktionen umfassen das Einlesen und Verarbeiten der seriellen Daten, die Berechnung der Punktkoordinaten aus den Sensordaten sowie die Generierung und Veröffentlichung der Punktwolke (PointCloud2) Nachricht. + \\ + Die zweite \acrshort{ROS}-Node implementiert Code zur Verarbeitung von Punktwolken in Bezug auf ein \acrshort{UR}10e-Roboterarmmodell. Die Node empfängt Punktwolken über das Topic \acrfull{PCD}, transformiert diese in den Bezugssystemen des Roboters und überprüft, ob die Punkte innerhalb der kollisionsrelevanten Mesh-Modelle des Roboters liegen. + Dazu werden die statischen Meshes der Roboterglieder geladen und skaliert. Beim Eintreffen einer neuen Punktwolke werden die Transformationsdaten des Roboters aus dem TF-Tree ausgelesen und die Meshes entsprechend transformiert. Anschließend wird für jeden Punkt geprüft, ob er innerhalb eines der Robotermeshes liegt. Die als außerhalb und innerhalb des Roboters klassifizierten Punkte werden getrennt und als neue Punktwolken auf den Topics /valid\_from\_perspective und /invalid\_from\_perspective veröffentlicht. Logging-Informationen geben die Anzahl der Punkte innerhalb und außerhalb des Roboters aus. \subsection{RVIZ2 und Gazebo Classic} + RViz2 und Gazebo Classic sind zwei essenzielle Werkzeuge in der Robotik, insbesondere in der ROS 2-Umgebung. + \\ + RViz2 ist ein Visualisierungstool, das Sensordaten, Roboterbewegungen und Umgebungskarten in einer grafischen Oberfläche darstellt. Es ermöglicht das Debuggen und die Interaktion mit Sensordaten sowie die Visualisierung von Transformationsbeziehungen zwischen verschiedenen Robotergelenken und Sensoren. + \\ + Gazebo Classic hingegen ist eine Simulationsumgebung, die realistische physikalische Modelle von Robotern und ihrer Umgebung erstellt. In Gazebo können Kollisionen, Gravitation, Reibung und andere physikalische Effekte simuliert werden, wodurch sich das Verhalten eines Roboters vor dem Einsatz in der realen Welt testen lässt. + \\ + In meinem Projekt habe ich in Gazebo Classic einen \acrshort{UR}10 simuliert und mit MoveIt2 bewegt. + Die Simulation hat dann Topics, die die Position der Gelenke vom Roboter betreffen veröffentlicht. + In der pcl\_rob\_node.py, die überprüft, ob die Sensorwerte den Roboter oder ein potentielles Kollisionsobjekt detektiert haben, wird die Information über die Position des Sensormoduls und die Position der einzelnen Achsen des Roboters benötigt. + Als Vorlage für die Simulation diente \cite{noauthor_universalrobotsuniversal_robots_ros2_gz_simulation_2025}. Jedoch gibt es an dem Modell von Universal Robotics kein Sensor Modul und deswegen habe ich zum Testen an den Unterarm des Roboters eine Sensor Halterung angebracht, wie in Abbildung \ref{ur10} zu sehen ist. + \begin{figure}[h] + \centering + \includegraphics[scale=0.302]{images/Sensor_holder_on_UR10e.jpg} + \caption{Bildschirmaufnahme aus Fusion von einer UR10 Baugruppe mit Sensormodul} + \label{ur10} + \end{figure} + \\ + Zur Visualisierung und Validierung der 3D-Koordinaten der Punktwolken benutze ich \acrshort{RVIZ}. Bei meinem ersten Versuch eine Punktwolke mit den Daten von zwei VL53L7CX zu generieren. Habe ich die Sensoren gegen eine circa 1 Meter entfernte Wand gerichtet um sicherzustellen, dass die Sensoren richtig ausgerichtet sind. Die Punktwolke ist in Abbildung \ref{two_pcd} zu sehen. + \begin{figure}[h] + \centering + \includegraphics[scale=0.5]{images/two_pcd.jpg} + \caption{Bildschirmaufnahme aus \acrshort{RVIZ} von einer Punktwolke von einer Wand} + \label{two_pcd} + \end{figure} + \\ + \section{Hardware} \subsection{Elektronisch} \subsection{Mechanisch}