diff --git a/Praxiprojekt_Bericht/BA.bib b/Praxiprojekt_Bericht/BA.bib index 1e0eb71c0a3f13b46f59a2cc65bb16fb3ee68534..9df6dd3bcb9ea5745c4b59801f6049d60e99757d 100644 --- a/Praxiprojekt_Bericht/BA.bib +++ b/Praxiprojekt_Bericht/BA.bib @@ -356,3 +356,93 @@ langid = {british}, file = {Snapshot:C\:\\Users\\Rene\\Zotero\\storage\\25UG57J5\\can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era.html:text/html}, } + +@article{li_safe_2024, + title = {Safe human–robot collaboration for industrial settings: a survey}, + volume = {35}, + issn = {1572-8145}, + url = {https://doi.org/10.1007/s10845-023-02159-4}, + doi = {10.1007/s10845-023-02159-4}, + shorttitle = {Safe human–robot collaboration for industrial settings}, + abstract = {Human–robot collaboration ({HRC}) plays a pivotal role in today’s industry by supporting increasingly customised product development. Via {HRC}, the strengths of humans and robots can be combined to facilitate collaborative jobs within common workplaces to achieve specific industrial goals. Given the significance of safety assurance in {HRC}, in this survey paper, an update on standards and implementation approaches presented in the latest literature is given to reflect the state-of-the-art of this prominent research topic. First, an overview of safety standards for industrial robots, collaborative robots, and {HRC} is provided. Then, a survey of various approaches to {HRC} safety is conducted from two main perspectives, i.e., pre-collision and post-collision, which are further detailed in the aspects of sensing, prediction, learning, planning/replanning, and compliance control. Major characteristics, pros, cons, and applicability of the approaches are analysed. Finally, challenging issues and prospects for the future development of {HRC} safety are highlighted to provide recommendations for relevant stakeholders to consider when designing {HRC}-enabled industrial systems.}, + pages = {2235--2261}, + number = {5}, + journaltitle = {Journal of Intelligent Manufacturing}, + shortjournal = {J Intell Manuf}, + author = {Li, Weidong and Hu, Yudie and Zhou, Yong and Pham, Duc Truong}, + urldate = {2025-02-19}, + date = {2024-06-01}, + langid = {english}, + keywords = {Collaborative robots, Collision detection, Human–robot collaboration ({HRC}), Obstacle avoidance, Safety}, + file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\4JS4CSVA\\Li et al. - 2024 - Safe human–robot collaboration for industrial settings a survey.pdf:application/pdf}, +} + +@inproceedings{amaya-mejia_vision-based_2022, + title = {Vision-Based Safety System for Barrierless Human-Robot Collaboration}, + url = {https://ieeexplore.ieee.org/document/9981689}, + doi = {10.1109/IROS47612.2022.9981689}, + abstract = {Human safety has always been the main priority when working near an industrial robot. With the rise of Human-Robot Collaborative environments, physical barriers to avoiding collisions have been disappearing, increasing the risk of accidents and the need for solutions that ensure a safe Human-Robot Collaboration. This paper proposes a safety system that implements Speed and Separation Monitoring ({SSM}) type of operation. For this, safety zones are defined in the robot's workspace following current standards for industrial collaborative robots. A deep learning-based computer vision system detects, tracks, and estimates the 3D position of operators close to the robot. The robot control system receives the operator's 3D position and generates 3D representations of them in a simulation environment. Depending on the zone where the closest operator was detected, the robot stops or changes its operating speed. Three different operation modes in which the human and robot interact are presented. Results show that the vision-based system can correctly detect and classify in which safety zone an operator is located and that the different proposed operation modes ensure that the robot's reaction and stop time are within the required time limits to guarantee safety.}, + eventtitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})}, + pages = {7331--7336}, + booktitle = {2022 {IEEE}/{RSJ} International Conference on Intelligent Robots and Systems ({IROS})}, + author = {Amaya-Mejía, Lina María and Duque-Suárez, Nicolás and Jaramillo-Ramírez, Daniel and Martinez, Carol}, + urldate = {2025-02-19}, + date = {2022-10}, + note = {{ISSN}: 2153-0866}, + keywords = {Collaboration, Collision avoidance, Robot control, Safety, Service robots, Solid modeling, Three-dimensional displays}, + file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\XX9FL2U5\\Amaya-Mejía et al. - 2022 - Vision-Based Safety System for Barrierless Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\29LFGV4B\\9981689.html:text/html}, +} + +@inproceedings{choi_xr-based_2022, + title = {An {XR}-based Approach to Safe Human-Robot Collaboration}, + url = {https://ieeexplore.ieee.org/document/9757621}, + doi = {10.1109/VRW55335.2022.00106}, + abstract = {It is crucial to prevent safety accidents for human-robot collaboration. This study proposes an extended reality ({XR}) approach to safe {HRC} by calculating the minimum distance between the human operator and robot in real time. The proposed approach scans the real environment with multiple sensors and constructs its virtual space in {XR}. The virtual robot is synchronized with the real robot by matching the point cloud of real environment with that of the virtual robot. This system can effectively provide task assistance and safety information to the user wearing an {XR} device.}, + eventtitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})}, + pages = {481--482}, + booktitle = {2022 {IEEE} Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops ({VRW})}, + author = {Choi, Sung Ho and Park, Kyeong-Beom and Roh, Dong Hyeon and Lee, Jae Yeol and Ghasemi, Yalda and Jeong, Heejin}, + urldate = {2025-02-19}, + date = {2022-03}, + keywords = {Collaboration, Conferences, H.5.1 [Information Interfaces and Presentation]: Multimedia Information Systems—{XR}-based safety information, Human-robot collaboration, I.3.8 [Computer Graphics]: Applications—Extended Reality, J.7 [Computer Applications]: Computers in Other Systems—Virtual-Physics synchronization, mixed/extended reality ({MR}/{XR}), Real-time systems, Robot sensing systems, Safety, safety distance, Service robots, Three-dimensional displays}, + file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\VYUDN5LQ\\Choi et al. - 2022 - An XR-based Approach to Safe Human-Robot Collaboration.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\GDI6TZQ2\\9757621.html:text/html}, +} + +@inproceedings{al_naser_fusion_2022, + title = {Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a robot in an industrial environment}, + url = {https://ieeexplore.ieee.org/document/9900548}, + doi = {10.1109/RO-MAN53752.2022.9900548}, + abstract = {Accurate detection of the human body and its limbs in real-time is one of the challenges toward human-robot collaboration ({HRC}) technology in the factory of the future. In this work, a new algorithm has been developed to fuse thermal, depth, and color information. Starting with the calibration of the sensors to each other, proceeding with data fusion and detection of human body parts, and ending with pose estimation. The proposed approach has been tested in various {HRC} scenarios. It has shown a significant decrease in errors and noise in industrial environments. Furthermore, it produces not only a higher positioning accuracy of the head and hands compared to the state of art algorithms (e.g., {OpenPose}), but it is also faster. Hence, such an algorithm could be joined with digital twins for safe and efficient human-robot collaboration.}, + eventtitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})}, + pages = {532--537}, + booktitle = {2022 31st {IEEE} International Conference on Robot and Human Interactive Communication ({RO}-{MAN})}, + author = {Al Naser, Ibrahim and Dahmen, Johannes and Bdiwi, Mohamad and Ihlenfeldt, Steffen}, + urldate = {2025-02-19}, + date = {2022-08}, + note = {{ISSN}: 1944-9437}, + keywords = {Collaboration, Digital Twins, human detection, Human robot collaboration, Image color analysis, Location awareness, sensor fusion, Sensor fusion, Service robots, Stability criteria, Thermal sensors}, + file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\Q933FYY2\\Al Naser et al. - 2022 - Fusion of depth, color, and thermal images towards digital twins and safe human interaction with a r.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\942BAXF5\\9900548.html:text/html}, +} + +@inproceedings{rashid_local_2020, + title = {Local and Global Sensors for Collision Avoidance}, + url = {https://ieeexplore.ieee.org/document/9235223}, + doi = {10.1109/MFI49285.2020.9235223}, + abstract = {Implementation of safe and efficient human robot collaboration for agile production cells with heavy-duty industrial robots, having large stopping distances and large self-occlusion areas, is a challenging task. Collision avoidance is the main functionality required to realize this task. In fact, it requires accurate estimation of shortest distance between known (robot) and unknown (human or anything else) objects in a large area. This work proposes a selective fusion of global and local sensors, representing a large range 360° {LiDAR} and a small range {RGB} camera respectively, in the context of dynamic speed and separation monitoring. Safety functionality has been evaluated for collision detection between unknown dynamic object to manipulator joints. The system yields 29-40\% efficiency compared to fenced system. Heavy-duty industrial robot and a controlled linear axis dummy is used for evaluating different robot and scenario configurations. Results suggest higher efficiency and safety when using local and global setup.}, + eventtitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})}, + pages = {354--359}, + booktitle = {2020 {IEEE} International Conference on Multisensor Fusion and Integration for Intelligent Systems ({MFI})}, + author = {Rashid, Aquib and Peesapati, Kannan and Bdiwi, Mohamad and Krusche, Sebastian and Hardt, Wolfram and Putz, Matthias}, + urldate = {2025-02-19}, + date = {2020-09}, + keywords = {Cameras, Laser radar, Production, Robot vision systems, Safety, Sensor fusion, Service robots}, + file = {Full Text PDF:C\:\\Users\\Rene\\Zotero\\storage\\HAXPN6EL\\Rashid et al. - 2020 - Local and Global Sensors for Collision Avoidance.pdf:application/pdf;IEEE Xplore Abstract Record:C\:\\Users\\Rene\\Zotero\\storage\\4X42Y6TK\\9235223.html:text/html}, +} + +@article{jain_survey_nodate, + title = {A survey of Laser Range Finding}, + abstract = {This report provides a informal survey of laser distance measurement. Applications of laser range finding are briefly discussed and various techniques for laser ranging such as active laser triangulation, pulsed time-of-flight ({TOF}), phase shift, {FMCW}, and correlation are described.}, + author = {Jain, Siddharth}, + langid = {english}, + file = {PDF:C\:\\Users\\Rene\\Zotero\\storage\\X2WNAHZB\\Jain - A survey of Laser Range Finding.pdf:application/pdf}, +} diff --git a/Praxiprojekt_Bericht/Quellen/Laser_Range_finding.pdf b/Praxiprojekt_Bericht/Quellen/Laser_Range_finding.pdf new file mode 100644 index 0000000000000000000000000000000000000000..2ae1600fb70b233e6f8979f856822fd81fca4270 Binary files /dev/null and b/Praxiprojekt_Bericht/Quellen/Laser_Range_finding.pdf differ diff --git a/Praxiprojekt_Bericht/main.aux b/Praxiprojekt_Bericht/main.aux index 6447a87c493f16900ceb83210c0be655def12f56..77da8b1660a10ed28055a4e8bd8111ebad1a3655 100644 --- a/Praxiprojekt_Bericht/main.aux +++ b/Praxiprojekt_Bericht/main.aux @@ -2,7 +2,7 @@ \providecommand \babel@aux [2]{\global \let \babel@toc \@gobbletwo } \@nameuse{bbl@beforestart} \catcode `"\active -\bibstyle{plaindin} +\bibstyle{plain} \providecommand\@newglossary[4]{} \@newglossary{main}{glg}{gls}{glo} \providecommand\@glsorder[1]{} @@ -20,29 +20,37 @@ \citation{liu_application_2024} \citation{popov_collision_2017} \citation{noauthor_robotics_2021} +\citation{al_naser_fusion_2022} +\citation{amaya-mejia_vision-based_2022} +\citation{rashid_local_2020} \@writefile{toc}{\contentsline {section}{\numberline {2}Stand der Technik}{2}{}\protected@file@percent } \@writefile{lof}{\contentsline {figure}{\numberline {1}{\ignorespaces Market Outlook for Cobots. Source:\cite {noauthor_can_nodate}}}{2}{}\protected@file@percent } \newlabel{Cobot Growth}{{1}{2}{}{figure.1}{}} \@writefile{toc}{\contentsline {subsection}{\numberline {2.1}Kollisionsvermeidung und Kollisionserkennung}{2}{}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {2.2}Laserscanner oder Kamerasysteme}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {2.3}ROS2 und MoveIt2}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {section}{\numberline {3}Umsetzung}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {3.1}Vorgehensweise}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {3.2}Software}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.2.1}Arduino}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.2.2}Robot Operating System 2}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.2.3}RVIZ2 und Gazebo Classic}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {subsection}{\numberline {3.3}Hardware}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.3.1}Elektronisch}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.3.2}Mechanisch}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {section}{\numberline {4}Ergebnis}{3}{}\protected@file@percent } -\@writefile{toc}{\contentsline {section}{\numberline {5}Ausblick}{3}{}\protected@file@percent } +\citation{li_common_2019} +\citation{jain_survey_nodate} +\@writefile{toc}{\contentsline {subsection}{\numberline {2.2}Laserscanner und Kamerasysteme}{3}{}\protected@file@percent } +\@writefile{toc}{\contentsline {section}{\numberline {3}Umsetzung}{4}{}\protected@file@percent } +\@writefile{toc}{\contentsline {subsection}{\numberline {3.1}Vorgehensweise}{4}{}\protected@file@percent } +\@writefile{toc}{\contentsline {subsection}{\numberline {3.2}Software}{4}{}\protected@file@percent } +\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.2.1}Arduino}{4}{}\protected@file@percent } +\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.2.2}Robot Operating System 2}{4}{}\protected@file@percent } +\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.2.3}RVIZ2 und Gazebo Classic}{4}{}\protected@file@percent } +\@writefile{toc}{\contentsline {subsection}{\numberline {3.3}Hardware}{4}{}\protected@file@percent } +\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.3.1}Elektronisch}{4}{}\protected@file@percent } +\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.3.2}Mechanisch}{4}{}\protected@file@percent } +\@writefile{toc}{\contentsline {section}{\numberline {4}Ergebnis}{4}{}\protected@file@percent } +\@writefile{toc}{\contentsline {section}{\numberline {5}Ausblick}{4}{}\protected@file@percent } \bibdata{BA} \bibcite{noauthor_can_nodate}{1} \bibcite{noauthor_robotics_2021}{2} \bibcite{hering_sensoren_2018}{3} \bibcite{noauthor_vl53l7cx_nodate}{4} -\bibcite{li_common_2019}{5} -\bibcite{liu_application_2024}{6} -\bibcite{popov_collision_2017}{7} -\gdef \@abspage@last{8} +\bibcite{al_naser_fusion_2022}{5} +\bibcite{amaya-mejia_vision-based_2022}{6} +\bibcite{jain_survey_nodate}{7} +\bibcite{li_common_2019}{8} +\bibcite{liu_application_2024}{9} +\bibcite{popov_collision_2017}{10} +\bibcite{rashid_local_2020}{11} +\gdef \@abspage@last{9} diff --git a/Praxiprojekt_Bericht/main.bbl b/Praxiprojekt_Bericht/main.bbl index b92dcae5af979cdf4ef2b5e55a5a239887fd06dd..7da2b14dd73acdf4dd0440f22749f79265601960 100644 --- a/Praxiprojekt_Bericht/main.bbl +++ b/Praxiprojekt_Bericht/main.bbl @@ -1,60 +1,64 @@ -\begin{thebibliography}{1} - -% this bibliography is generated by plaindin.bst [8.2] from 2005-12-21 - -\providecommand{\url}[1]{\texttt{#1}} -\expandafter\ifx\csname urlstyle\endcsname\relax - \providecommand{\doi}[1]{doi: #1}\else - \providecommand{\doi}{doi: \begingroup \urlstyle{rm}\Url}\fi - -\bibitem[1]{noauthor_can_nodate} -\emph{Can the collaborative robot market experience a second growth surge in - the post-pandemic era?} -\newblock - \url{https://interactanalysis.com/insight/can-the-collaborative-robot-market-experience-a-second-growth-surge-in-the-post-pandemic-era/} - -\bibitem[2]{noauthor_robotics_2021} -\emph{Robotics - Vocabulary}. -\newblock \url{https://www.dinmedia.de/de/norm/iso-8373/348036781} - -\bibitem[3]{hering_sensoren_2018} -\textsc{Hering}, Ekbert (Hrsg.) ; \textsc{Schönfelder}, Gert (Hrsg.): -\newblock \emph{Sensoren in Wissenschaft und Technik}. -\newblock \url{http://dx.doi.org/10.1007/978-3-658-12562-2} - -\bibitem[4]{noauthor_vl53l7cx_nodate} -\emph{{VL}53L7CX - Time-of-Flight ({ToF}) 8x8 multizone ranging sensor with 90 - degrees {FoV} - {STMicroelectronics}}. -\newblock - \url{https://www.st.com/en/imaging-and-photonics-solutions/vl53l7cx.html} - -\bibitem[5]{li_common_2019} -\textsc{Li}, Peng ; \textsc{Liu}, Xiangpeng: -\newblock Common Sensors in Industrial Robots: A Review. -\newblock 1267, Nr. 1, 012036. -\newblock \url{http://dx.doi.org/10.1088/1742-6596/1267/1/012036}. -- -\newblock DOI 10.1088/1742--6596/1267/1/012036. -- -\newblock ISSN 1742--6588, 1742--6596 - -\bibitem[6]{liu_application_2024} -\textsc{Liu}, Li ; \textsc{Guo}, Fu ; \textsc{Zou}, Zishuai ; \textsc{Duffy}, - Vincent~G.: -\newblock Application, Development and Future Opportunities of Collaborative - Robots (Cobots) in Manufacturing: A Literature Review. -\newblock 40, Nr. 4, 915--932. -\newblock \url{http://dx.doi.org/10.1080/10447318.2022.2041907}. -- -\newblock DOI 10.1080/10447318.2022.2041907. -- -\newblock ISSN 1044--7318. -- +\begin{thebibliography}{10} + +\bibitem{noauthor_can_nodate} +Can the collaborative robot market experience a second growth surge in the + post-pandemic era? + +\bibitem{noauthor_robotics_2021} +Robotics - vocabulary. + +\bibitem{hering_sensoren_2018} +Sensoren in wissenschaft und technik. + +\bibitem{noauthor_vl53l7cx_nodate} +{VL}53l7cx - time-of-flight ({ToF}) 8x8 multizone ranging sensor with 90 + degrees {FoV} - {STMicroelectronics}. + +\bibitem{al_naser_fusion_2022} +Ibrahim Al~Naser, Johannes Dahmen, Mohamad Bdiwi, and Steffen Ihlenfeldt. +\newblock Fusion of depth, color, and thermal images towards digital twins and + safe human interaction with a robot in an industrial environment. +\newblock In {\em 2022 31st {IEEE} International Conference on Robot and Human + Interactive Communication ({RO}-{MAN})}, pages 532--537. +\newblock {ISSN}: 1944-9437. + +\bibitem{amaya-mejia_vision-based_2022} +Lina~María Amaya-Mejía, Nicolás Duque-Suárez, Daniel Jaramillo-Ramírez, + and Carol Martinez. +\newblock Vision-based safety system for barrierless human-robot collaboration. +\newblock In {\em 2022 {IEEE}/{RSJ} International Conference on Intelligent + Robots and Systems ({IROS})}, pages 7331--7336. +\newblock {ISSN}: 2153-0866. + +\bibitem{jain_survey_nodate} +Siddharth Jain. +\newblock A survey of laser range finding. + +\bibitem{li_common_2019} +Peng Li and Xiangpeng Liu. +\newblock Common sensors in industrial robots: A review. +\newblock 1267(1):012036. + +\bibitem{liu_application_2024} +Li~Liu, Fu~Guo, Zishuai Zou, and Vincent~G. Duffy. +\newblock Application, development and future opportunities of collaborative + robots (cobots) in manufacturing: A literature review. +\newblock 40(4):915--932. \newblock Publisher: Taylor \& Francis \_eprint: - https://doi.org/10.1080/10447318.2022.2041907 + https://doi.org/10.1080/10447318.2022.2041907. -\bibitem[7]{popov_collision_2017} -\textsc{Popov}, Dmitry ; \textsc{Klimchik}, Alexandr ; \textsc{Mavridis}, - Nikolaos: +\bibitem{popov_collision_2017} +Dmitry Popov, Alexandr Klimchik, and Nikolaos Mavridis. \newblock Collision detection, localization \& classification for industrial robots with joint torque sensors. -\newblock {In: }\emph{2017 26th {IEEE} International Symposium on Robot and - Human Interactive Communication ({RO}-{MAN})}, {IEEE}. -- -\newblock ISBN 978--1--5386--3518--6, 838--843 +\newblock In {\em 2017 26th {IEEE} International Symposium on Robot and Human + Interactive Communication ({RO}-{MAN})}, pages 838--843. {IEEE}. + +\bibitem{rashid_local_2020} +Aquib Rashid, Kannan Peesapati, Mohamad Bdiwi, Sebastian Krusche, Wolfram + Hardt, and Matthias Putz. +\newblock Local and global sensors for collision avoidance. +\newblock In {\em 2020 {IEEE} International Conference on Multisensor Fusion + and Integration for Intelligent Systems ({MFI})}, pages 354--359. \end{thebibliography} diff --git a/Praxiprojekt_Bericht/main.blg b/Praxiprojekt_Bericht/main.blg index 7362c56402f2dd865be5690c7506c4680c61015a..40f287ae1676037d7873eee7a8666c9735e22aca 100644 --- a/Praxiprojekt_Bericht/main.blg +++ b/Praxiprojekt_Bericht/main.blg @@ -1,12 +1,7 @@ This is BibTeX, Version 0.99d (TeX Live 2024) Capacity: max_strings=200000, hash_size=200000, hash_prime=170003 The top-level auxiliary file: main.aux -The style file: plaindin.bst -Reallocated singl_function (elt_size=4) to 100 items from 50. -Reallocated singl_function (elt_size=4) to 100 items from 50. -Reallocated wiz_functions (elt_size=4) to 6000 items from 3000. -Reallocated singl_function (elt_size=4) to 100 items from 50. -Reallocated singl_function (elt_size=4) to 100 items from 50. +The style file: plain.bst Database file #1: BA.bib Warning--entry type for "hering_sensoren_2018" isn't style-file defined --line 48 of file BA.bib @@ -18,48 +13,55 @@ Warning--to sort, need author or key in noauthor_robotics_2021 Warning--to sort, need author or key in noauthor_vl53l7cx_nodate Warning--to sort, need author or key in hering_sensoren_2018 Warning--to sort, need author or key in noauthor_can_nodate -Warning--there's no year in li_common_2019 -Warning--there's no year in liu_application_2024 -Warning--neither address nor publication date in popov_collision_2017 -You've used 7 entries, - 4335 wiz_defined-function locations, - 871 strings with 7558 characters, -and the built_in function-call counts, 3924 in all, are: -= -- 416 -> -- 65 -< -- 93 -+ -- 104 -- -- 21 -* -- 276 -:= -- 614 -add.period$ -- 9 -call.type$ -- 7 -change.case$ -- 23 +Warning--empty year in al_naser_fusion_2022 +Warning--empty year in amaya-mejia_vision-based_2022 +Warning--empty journal in jain_survey_nodate +Warning--empty year in jain_survey_nodate +Warning--empty journal in li_common_2019 +Warning--empty year in li_common_2019 +Warning--empty journal in liu_application_2024 +Warning--empty year in liu_application_2024 +Warning--empty year in popov_collision_2017 +Warning--empty year in rashid_local_2020 +You've used 11 entries, + 2118 wiz_defined-function locations, + 545 strings with 6118 characters, +and the built_in function-call counts, 3687 in all, are: += -- 348 +> -- 144 +< -- 4 ++ -- 60 +- -- 48 +* -- 243 +:= -- 525 +add.period$ -- 28 +call.type$ -- 11 +change.case$ -- 57 chr.to.int$ -- 0 -cite$ -- 14 -duplicate$ -- 89 -empty$ -- 331 -format.name$ -- 42 -if$ -- 838 +cite$ -- 25 +duplicate$ -- 154 +empty$ -- 330 +format.name$ -- 48 +if$ -- 811 int.to.chr$ -- 0 -int.to.str$ -- 7 -missing$ -- 33 -newline$ -- 50 -num.names$ -- 7 -pop$ -- 48 +int.to.str$ -- 11 +missing$ -- 7 +newline$ -- 52 +num.names$ -- 14 +pop$ -- 101 preamble$ -- 1 -purify$ -- 23 +purify$ -- 46 quote$ -- 0 -skip$ -- 145 +skip$ -- 136 stack$ -- 0 -substring$ -- 467 -swap$ -- 21 -text.length$ -- 16 +substring$ -- 235 +swap$ -- 50 +text.length$ -- 4 text.prefix$ -- 0 top$ -- 0 -type$ -- 28 -warning$ -- 7 -while$ -- 23 -width$ -- 8 -write$ -- 98 -(There were 10 warnings) +type$ -- 44 +warning$ -- 14 +while$ -- 29 +width$ -- 13 +write$ -- 94 +(There were 17 warnings) diff --git a/Praxiprojekt_Bericht/main.glo b/Praxiprojekt_Bericht/main.glo index bff351fd1372913b4e68a90aded8ab52c7b0a0cc..60ace19dc4310ec3011d001ef59f67fef7afe702 100644 --- a/Praxiprojekt_Bericht/main.glo +++ b/Praxiprojekt_Bericht/main.glo @@ -10,8 +10,8 @@ \glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{1} \glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{1} \glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{1} -\glossaryentry{ToF-Sensor?\glossentry{ToF}|setentrycounter[]{page}"\glsnumberformat}{1} -\glossaryentry{ToF-Sensor?\glossentry{ToF}|setentrycounter[]{page}"\glsnumberformat}{1} +\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{1} +\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{1} \glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{1} \glossaryentry{UR?\glossentry{UR}|setentrycounter[]{page}"\glsnumberformat}{1} \glossaryentry{Cobot's?\glossentry{Cobots}|setentrycounter[]{page}"\glsnumberformat}{2} @@ -20,3 +20,9 @@ \glossaryentry{Pose?\glossentry{Pose}|setentrycounter[]{page}"\glsnumberformat}{2} \glossaryentry{Kollisionsobjekt?\glossentry{KO}|setentrycounter[]{page}"\glsnumberformat}{2} \glossaryentry{Pose?\glossentry{Pose}|setentrycounter[]{page}"\glsnumberformat}{2} +\glossaryentry{RGB-D?\glossentry{RGB-D}|setentrycounter[]{page}"\glsnumberformat}{3} +\glossaryentry{Arbeitsraum?\glossentry{AR}|setentrycounter[]{page}"\glsnumberformat}{3} +\glossaryentry{KI?\glossentry{KI}|setentrycounter[]{page}"\glsnumberformat}{3} +\glossaryentry{LIDAR?\glossentry{LIDAR}|setentrycounter[]{page}"\glsnumberformat}{3} +\glossaryentry{RGB-Kamera?\glossentry{RGB}|setentrycounter[]{page}"\glsnumberformat}{3} +\glossaryentry{ToF-Sensoren?\glossentry{ToFs}|setentrycounter[]{page}"\glsnumberformat}{3} diff --git a/Praxiprojekt_Bericht/main.log b/Praxiprojekt_Bericht/main.log index 74458cb7abddf05e425e3e7fe0b2697a5c210be2..a0a00965e11e9ad6c3c2527ee79ef79d3a39a513 100644 --- a/Praxiprojekt_Bericht/main.log +++ b/Praxiprojekt_Bericht/main.log @@ -1,4 +1,4 @@ -This is pdfTeX, Version 3.141592653-2.6-1.40.26 (TeX Live 2024) (preloaded format=pdflatex 2025.2.18) 19 FEB 2025 15:37 +This is pdfTeX, Version 3.141592653-2.6-1.40.26 (TeX Live 2024) (preloaded format=pdflatex 2025.2.18) 19 FEB 2025 20:07 entering extended mode restricted \write18 enabled. %&-line parsing enabled. @@ -390,20 +390,20 @@ File: l3backend-pdftex.def 2024-05-08 L3 backend support: PDF output (pdfTeX) (./main.aux) \openout1 = `main.aux'. -LaTeX Font Info: Checking defaults for OML/cmm/m/it on input line 40. -LaTeX Font Info: ... okay on input line 40. -LaTeX Font Info: Checking defaults for OMS/cmsy/m/n on input line 40. -LaTeX Font Info: ... okay on input line 40. -LaTeX Font Info: Checking defaults for OT1/cmr/m/n on input line 40. -LaTeX Font Info: ... okay on input line 40. -LaTeX Font Info: Checking defaults for T1/cmr/m/n on input line 40. -LaTeX Font Info: ... okay on input line 40. -LaTeX Font Info: Checking defaults for TS1/cmr/m/n on input line 40. -LaTeX Font Info: ... okay on input line 40. -LaTeX Font Info: Checking defaults for OMX/cmex/m/n on input line 40. -LaTeX Font Info: ... okay on input line 40. -LaTeX Font Info: Checking defaults for U/cmr/m/n on input line 40. -LaTeX Font Info: ... okay on input line 40. +LaTeX Font Info: Checking defaults for OML/cmm/m/it on input line 49. +LaTeX Font Info: ... okay on input line 49. +LaTeX Font Info: Checking defaults for OMS/cmsy/m/n on input line 49. +LaTeX Font Info: ... okay on input line 49. +LaTeX Font Info: Checking defaults for OT1/cmr/m/n on input line 49. +LaTeX Font Info: ... okay on input line 49. +LaTeX Font Info: Checking defaults for T1/cmr/m/n on input line 49. +LaTeX Font Info: ... okay on input line 49. +LaTeX Font Info: Checking defaults for TS1/cmr/m/n on input line 49. +LaTeX Font Info: ... okay on input line 49. +LaTeX Font Info: Checking defaults for OMX/cmex/m/n on input line 49. +LaTeX Font Info: ... okay on input line 49. +LaTeX Font Info: Checking defaults for U/cmr/m/n on input line 49. +LaTeX Font Info: ... okay on input line 49. (c:/texlive/2024/texmf-dist/tex/context/base/mkii/supp-pdf.mkii [Loading MPS to PDF converter (version 2006.09.02).] \scratchcounter=\count318 @@ -485,79 +485,23 @@ File: images/Cobots-Forecast-Global-Market-1024x576.jpg Graphic file (type jpg) <use images/Cobots-Forecast-Global-Market-1024x576.jpg> Package pdftex.def Info: images/Cobots-Forecast-Global-Market-1024x576.jpg use -d on input line 99. +d on input line 108. (pdftex.def) Requested size: 385.43906pt x 216.80946pt. [2 <./images/Cobots-Forecast-Global-Market-1024x576.jpg>] [3] -No file main.gls. - - -[4 - -] (./main.bbl -Underfull \hbox (badness 10000) in paragraph at lines 12--15 -[]\OT1/cmr/m/it/12 Can the col-la-bo-ra-ti-ve ro-bot mar-ket ex-pe-ri-ence a - [] - - -Underfull \hbox (badness 10000) in paragraph at lines 12--15 -\OT1/cmr/m/it/12 se-cond grow-th sur-ge in the post-pandemic era? - [] - - -Overfull \hbox (365.78387pt too wide) in paragraph at lines 12--15 -\OT1/cmtt/m/n/12 https://interactanalysis.com/insight/can-the-collaborative-rob -ot-market-experience-a-second-growth-surge-in-the-post-pandemic-era/ - [] - - -Underfull \hbox (badness 10000) in paragraph at lines 27--30 -[]\OT1/cmr/m/it/12 VL53L7CX - Time-of-Flight (ToF) 8x8 mul-ti-zo-ne ran- - [] - - -Underfull \hbox (badness 10000) in paragraph at lines 27--30 -\OT1/cmr/m/it/12 ging sen-sor with 90 de-grees FoV - STMi-cro-elec-tro-nics\OT1 -/cmr/m/n/12 . - [] - - -Underfull \hbox (badness 5022) in paragraph at lines 40--50 -[]\OT1/cmr/m/sc/12 Liu\OT1/cmr/m/n/12 , Li ; \OT1/cmr/m/sc/12 Guo\OT1/cmr/m/n/1 -2 , Fu ; \OT1/cmr/m/sc/12 Zou\OT1/cmr/m/n/12 , Zis-huai ; \OT1/cmr/m/sc/12 Duff -y\OT1/cmr/m/n/12 , Vin-cent G.: Ap-p-li-ca- - [] - - -Underfull \hbox (badness 10000) in paragraph at lines 40--50 -\OT1/cmr/m/n/12 ti-on, De-ve-lop-ment and Fu-ture Op-por-tu-ni-ties of Col-la-b -o-ra-ti-ve Ro-bots - [] - - -Underfull \hbox (badness 10000) in paragraph at lines 40--50 -\OT1/cmr/m/n/12 (Co-bots) in Ma-nu-fac-tu-ring: A Li-te-ra-ture Re-view. 40, N -r. 4, 915-- - [] +[4] +No file main.gls. -Underfull \hbox (badness 10000) in paragraph at lines 40--50 -\OT1/cmr/m/n/12 932. \OT1/cmtt/m/n/12 http://dx.doi.org/10.1080/10447318.2022. -2041907\OT1/cmr/m/n/12 . -- DOI - [] +[5 -Underfull \hbox (badness 1527) in paragraph at lines 40--50 -\OT1/cmr/m/n/12 10.1080/10447318.2022.2041907. -- ISSN 1044--7318. -- Pu-blis --her: Tay-lor & - [] +] (./main.bbl) -) - -[5 +[6 ] (./main.aux) *********** @@ -566,25 +510,23 @@ L3 programming layer <2025-01-18> *********** ) Here is how much of TeX's memory you used: - 10822 strings out of 473200 - 194215 string characters out of 5720278 - 643009 words of memory out of 5000000 - 33930 multiletter control sequences out of 15000+600000 - 566555 words of font info for 63 fonts, out of 8000000 for 9000 + 11006 strings out of 473200 + 196969 string characters out of 5720278 + 645969 words of memory out of 5000000 + 34116 multiletter control sequences out of 15000+600000 + 566085 words of font info for 61 fonts, out of 8000000 for 9000 1141 hyphenation exceptions out of 8191 94i,8n,93p,2365b,289s stack positions out of 10000i,1000n,20000p,200000b,200000s <c:/texlive/2024/texmf-dist/fonts/type1/public/amsfonts/cm/cmbx10.pfb><c:/tex live/2024/texmf-dist/fonts/type1/public/amsfonts/cm/cmbx12.pfb><c:/texlive/2024 -/texmf-dist/fonts/type1/public/amsfonts/cm/cmcsc10.pfb><c:/texlive/2024/texmf-d -ist/fonts/type1/public/amsfonts/cm/cmr10.pfb><c:/texlive/2024/texmf-dist/fonts/ -type1/public/amsfonts/cm/cmr12.pfb><c:/texlive/2024/texmf-dist/fonts/type1/publ -ic/amsfonts/cm/cmr17.pfb><c:/texlive/2024/texmf-dist/fonts/type1/public/amsfont -s/cm/cmti12.pfb><c:/texlive/2024/texmf-dist/fonts/type1/public/amsfonts/cm/cmtt -12.pfb> -Output written on main.pdf (8 pages, 186672 bytes). +/texmf-dist/fonts/type1/public/amsfonts/cm/cmr10.pfb><c:/texlive/2024/texmf-dis +t/fonts/type1/public/amsfonts/cm/cmr12.pfb><c:/texlive/2024/texmf-dist/fonts/ty +pe1/public/amsfonts/cm/cmr17.pfb><c:/texlive/2024/texmf-dist/fonts/type1/public +/amsfonts/cm/cmti12.pfb> +Output written on main.pdf (9 pages, 163796 bytes). PDF statistics: - 72 PDF objects out of 1000 (max. 8388607) - 44 compressed objects within 1 object stream + 65 PDF objects out of 1000 (max. 8388607) + 40 compressed objects within 1 object stream 0 named destinations out of 1000 (max. 500000) 6 words of extra memory for PDF output out of 10000 (max. 10000000) diff --git a/Praxiprojekt_Bericht/main.pdf b/Praxiprojekt_Bericht/main.pdf index a56c902701600a93d0f2ea94ae85777dd77d277f..748efe28db16fedb41ed6a33149edfb2320522c3 100644 Binary files a/Praxiprojekt_Bericht/main.pdf and b/Praxiprojekt_Bericht/main.pdf differ diff --git a/Praxiprojekt_Bericht/main.synctex.gz b/Praxiprojekt_Bericht/main.synctex.gz index 59814dd2f7a0a0a094d348ef86fdef0442ac21fd..79cf37cd293df5c2cbc005b00858d2c948491e82 100644 Binary files a/Praxiprojekt_Bericht/main.synctex.gz and b/Praxiprojekt_Bericht/main.synctex.gz differ diff --git a/Praxiprojekt_Bericht/main.tex b/Praxiprojekt_Bericht/main.tex index 41ef763ca414b58d9e750ead64af3c41c546ebfc..6c844217517bc19cca9ce30769db286bc4535857 100644 --- a/Praxiprojekt_Bericht/main.tex +++ b/Praxiprojekt_Bericht/main.tex @@ -16,11 +16,16 @@ \cftsetindents{subsection}{3.8em}{3.2em} % Einrückung für Unterabschnitte \newacronym{UR}{UR}{Universal Robots} +\newacronym{HRC}{HRC}{Human-Robot Collaboration} \newacronym{Cobot}{Cobot}{kollaborativer Roboter} \newacronym{PC}{PC}{persönlicher Computer} \newacronym{ToF}{ToF-Sensor}{Time-of-Flight-Sensor} \newacronym{ToFs}{ToF-Sensoren}{Time-of-Flight-Sensoren} \newacronym{Cobots}{Cobot's}{kollaborativen Robotern} +\newacronym{RGB-D}{RGB-D}{Rot-Grün-Blau-Tiefen Kamera} +\newacronym{KI}{KI}{Künstlichen Inteligenz} +\newacronym{LIDAR}{LIDAR}{Light Detection and Ranging} +\newacronym{RGB}{RGB-Kamera}{Rot-Grün-Gelb Kamera} \newglossaryentry{Pose}{ name={Pose}, @@ -30,6 +35,10 @@ name={Kollisionsobjekt}, description={Mensch oder Objekt innerhalb des Arbeitsraums des Industrie Roboters, das nicht Ziel der Manipulation ist} } +\newglossaryentry{AR}{ + name={Arbeitsraum}, + description={Anteil des eingeschränkten Raumes, der während der Ausführung aller vom Anwenderprogramm vorgegebenen Bewegungen [vom Roboter] benutzt wird} +} \title{Praxisprojekt Bericht {\large \\Mechatronische Entwicklung eines intrinsischen Time-of-Flight-Sensorsystems zur Kollisionsvermeidung in der Robotik}} @@ -40,7 +49,7 @@ \begin{document} % Start page numbering here \pagenumbering{arabic} % or "roman" for Roman numerals -\bibliographystyle{plaindin} +\bibliographystyle{plain} \maketitle \thispagestyle{empty} % Suppress page number on this page \newpage @@ -75,20 +84,20 @@ \section{Motivation} %Allgemeiner Überblick über das Thema - Ich habe im Rahmen meines Praxisprojekts ein System entwickelt, welches zum Ziel hatte eine extrinsische Sensorüberwachung des Arbeitsraums von einem \acrshort{UR} zu realisieren. - Mit dem Sensorinput kann man die Umgebung des Roboters überwachen und gegebenenfalls auf veränderte Umstände reagieren. Am meisten Anwendung hätte das System in Bereichen in denen es schwierig ist Routinen zu schaffen. Wie zum Beispiel bei der Kollaboration mit Menschen oder bei Arbeiten mit Objekten die nicht in Serie produziert werden. + Ich habe im Rahmen meines Praxisprojekts ein System entwickelt, welches zum Ziel hatte eine extrinsische Sensorüberwachung des Arbeitsraums von einem \acrfull{UR} zu realisieren. + Mit dem Sensorinput kann man die Umgebung des Roboters überwachen und gegebenenfalls auf veränderte Umstände reagieren. Am meisten Anwendung hätte das System in Bereichen in denen es schwierig ist Routinen zu schaffen. Wie zum Beispiel bei der Kollaboration mit Menschen. Der Vorteil, den die extrinsische Sensorik mit sich bringt, ist das man nicht mehr auf die Detektion von Kollisionen beschränkt ist und auf unterschiedliche Arten präventiv eine Kollision verhindern kann. \\ %Spezifizierung des Themas \indent In der Robotik werden unterschiedliche Arten von Sensoren verwendet um \acrshort{Cobots} sicherer zu machen. Im Groben unterscheidet man zwischen intrinsischen und extrinsischen Sensoren.\cite{noauthor_robotics_2021} Die intrinsischen Sensoren betrachten nur, was innerhalb eines Roboters gemessen werden kann. Das wäre zum Beispiel die Position der Gelenke, die vom Drehgeber eingelesen wird oder das Drehmoment, dass vom Drehmoment-Wächter in den Gelenken bestimmt wird. Mit Hilfe der gemessenen Drehmomente wird häufig erkannt, ob es zu einer Kollision mit umliegenden Objekten gekommen sein könnte.\cite{popov_collision_2017} Die Extrinsischen Sensoren betrachten die Umgebung des Roboters und liefern Informationen zu möglichen Kollisionsobjekten oder auch zu Objekten die mit dem Endeffektor des Roboters gegriffen werden sollen. - Beispiele für extrinsische Sensoren wären unterschiedliche Arten von Kameras, Laserscanner (\acrshort{ToFs}) oder Ultraschallsensoren.\cite{li_common_2019} + Beispiele für extrinsische Sensoren wären unterschiedliche Arten von Kameras, Laserscannern (\acrshort{ToFs}) oder Ultraschallsensoren.\cite{li_common_2019} \\ %Kern der Arbeit - \indent Ich habe für die Erkennung von potentiellen Kollisionsobjekten in Reichweite der seriellen Kinematik vom \acrshort{UR} \acrshort{ToF} von STMicroelectronics verwenden.\cite{noauthor_vl53l7cx_nodate} - Die \acrshort{ToF} senden einen Lichtimpuls aus und messen wie lange das Infrarot-Licht braucht um wieder vom Empfänger wahrgenommen zu werden.\cite{hering_sensoren_2018} + \indent Ich habe für die Erkennung von potentiellen Kollisionsobjekten in Reichweite der seriellen Kinematik vom \acrshort{UR} \acrshort{ToFs} von STMicroelectronics verwenden.\cite{noauthor_vl53l7cx_nodate} + \\ + Die \acrshort{ToFs} senden einen Lichtimpuls aus und messen wie lange das Infrarot-Licht braucht um wieder vom Empfänger wahrgenommen zu werden.\cite{hering_sensoren_2018} Mein bestreben ist es herauszufinden in wie Weit \acrshort{ToFs} Hindernisse wahrnehmen können. - Mit welchen anderen Sensor Informationen oder allgemeinen Angaben man die Daten fusionieren muss, um Kollisionen vermeiden zu können. - Wie man die Daten der Sensoren verarbeiten muss, um eine sinnvolle digitale Projektion, des Arbeitsraums vom \acrshort{UR} zu erhalten. + Mit welchen anderen Sensor Informationen oder allgemeinen Angaben man die Daten fusionieren muss, um Kollisionen vermeiden zu können und wie man die Daten der Sensoren verarbeiten muss, um eine sinnvolle digitale Projektion, des Arbeitsraums vom \acrshort{UR} zu erhalten. \newpage @@ -109,9 +118,22 @@ \\ Drehgeber und Drehmoment-Wächter sind propriozeptive Sensoren und können nur Eigenschaften messen die sich auf den inneren Status des Roboterarms beziehen. \cite{noauthor_robotics_2021} \\ - \indent - \subsection{Laserscanner oder Kamerasysteme} - \subsection{ROS2 und MoveIt2} + \indent Bei der Kollisionsvermeidung kann man unterschiedliche Ansätze verfolgen. + Eine der Optionen ist mit einer Wärmebild Kamera einen Menschen zu identifizieren und dann mit dem Bild einer \acrfull{RGB-D} zu überlagern um die Position des Menschen im \gls{AR} zu bestimmen.\cite{al_naser_fusion_2022} + \\ + In einem anderen Paper wurde Kollisionsvermeidung erreicht in dem man den Roboter langsamer werden ließ wenn er einem Menschen zu nahe kam. Die Distanz hat man gemessen in dem man mit einer \acrfull{KI} den Video Input von einer Kinect Kamera ausgewertet hat.\cite{amaya-mejia_vision-based_2022} + \\ + Eine Methode zu Kollisionsvermeidung die meinem Projekt ähnelt ist die Sensor Fusion von \acrfull{LIDAR}, \acrfull{RGB} und Drehgeber Sensor Daten zur Bestimmung des Abstandes von Mensch und Roboter. Bei Anwendungen mit schwer last Robotern, deren Bremsweg zu lang wäre um sich auf Kollisionserkennung zu verlassen. \cite{rashid_local_2020} + + \subsection{Laserscanner und Kamerasysteme} + Laserscanner machen sich drei unterschiedliche Physikalische Prinzipien zu nutze. + \\ + Die \acrshort{ToFs} messen die Zeit, die ein Emittierter Lichtimpuls braucht um zu einer Oberfläche zu reisen, reflektiert zu werden und vom Receiver des Scanners wieder wahrgenommen zu werden. + \\ + Die zweite Methode zum messen von Abständen mit Lasern macht sich die Optische Interferenz zu nutze. Es werden zwei Lichtimpulse mit unterschiedlicher Wellenlänge emittiert und basierend darauf, in wie weit sich die Lichtwellen gegenseitig verstärken oder auslöschen auf dem weg vom Sensor zu Reflexions-Fläche und zurück. Kann die Distanz ermittelt werden. + \\ + Bei der dritten Methode zur Abstandsbestimmung mit Lasern wird der Laserstrahl geneigt und basierend darauf wo der Laser auf einem "großflächigen" Detektor auftrifft kann der Abstand zwischen Sensor und Messobjekt trianguliert werden. + \cite{li_common_2019} \cite{jain_survey_nodate} \section{Umsetzung} \subsection{Vorgehensweise} \subsection{Software} diff --git a/Praxiprojekt_Bericht/main.toc b/Praxiprojekt_Bericht/main.toc index 1507c0c7c3e9ddd7fd19a7da01b7f8c3144f5bac..ea468ba12fc16b59f9d2a0511d26a5751fb0d4d5 100644 --- a/Praxiprojekt_Bericht/main.toc +++ b/Praxiprojekt_Bericht/main.toc @@ -2,16 +2,15 @@ \contentsline {section}{\numberline {1}Motivation}{1}{}% \contentsline {section}{\numberline {2}Stand der Technik}{2}{}% \contentsline {subsection}{\numberline {2.1}Kollisionsvermeidung und Kollisionserkennung}{2}{}% -\contentsline {subsection}{\numberline {2.2}Laserscanner oder Kamerasysteme}{3}{}% -\contentsline {subsection}{\numberline {2.3}ROS2 und MoveIt2}{3}{}% -\contentsline {section}{\numberline {3}Umsetzung}{3}{}% -\contentsline {subsection}{\numberline {3.1}Vorgehensweise}{3}{}% -\contentsline {subsection}{\numberline {3.2}Software}{3}{}% -\contentsline {subsubsection}{\numberline {3.2.1}Arduino}{3}{}% -\contentsline {subsubsection}{\numberline {3.2.2}Robot Operating System 2}{3}{}% -\contentsline {subsubsection}{\numberline {3.2.3}RVIZ2 und Gazebo Classic}{3}{}% -\contentsline {subsection}{\numberline {3.3}Hardware}{3}{}% -\contentsline {subsubsection}{\numberline {3.3.1}Elektronisch}{3}{}% -\contentsline {subsubsection}{\numberline {3.3.2}Mechanisch}{3}{}% -\contentsline {section}{\numberline {4}Ergebnis}{3}{}% -\contentsline {section}{\numberline {5}Ausblick}{3}{}% +\contentsline {subsection}{\numberline {2.2}Laserscanner und Kamerasysteme}{3}{}% +\contentsline {section}{\numberline {3}Umsetzung}{4}{}% +\contentsline {subsection}{\numberline {3.1}Vorgehensweise}{4}{}% +\contentsline {subsection}{\numberline {3.2}Software}{4}{}% +\contentsline {subsubsection}{\numberline {3.2.1}Arduino}{4}{}% +\contentsline {subsubsection}{\numberline {3.2.2}Robot Operating System 2}{4}{}% +\contentsline {subsubsection}{\numberline {3.2.3}RVIZ2 und Gazebo Classic}{4}{}% +\contentsline {subsection}{\numberline {3.3}Hardware}{4}{}% +\contentsline {subsubsection}{\numberline {3.3.1}Elektronisch}{4}{}% +\contentsline {subsubsection}{\numberline {3.3.2}Mechanisch}{4}{}% +\contentsline {section}{\numberline {4}Ergebnis}{4}{}% +\contentsline {section}{\numberline {5}Ausblick}{4}{}%