\relax \providecommand\hyper@newdestlabel[2]{} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {chapter}{\numberline {2}Theoretische Grundlagen}{4}{chapter.2}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\addvspace {10\p@ }} \@writefile{lot}{\defcounter {refsection}{0}\relax }\@writefile{lot}{\addvspace {10\p@ }} \@writefile{loa}{\defcounter {refsection}{0}\relax }\@writefile{loa}{\addvspace {10\p@ }} \newlabel{sec:grundlagen}{{2}{4}{Theoretische Grundlagen}{chapter.2}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {2.1}Machine Learning}{4}{section.2.1}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.1}Lernalgorithmen nach Aufgaben}{4}{subsection.2.1.1}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.1.1.1}Klassifizierung}{4}{subsubsection.2.1.1.1}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.1.1.2}Regression}{5}{subsubsection.2.1.1.2}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.1.1.3}Transkription}{5}{subsubsection.2.1.1.3}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.1.1.4}Maschinelle Übersetzung}{5}{subsubsection.2.1.1.4}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.1.1.5}Erkennung von Anomalien}{5}{subsubsection.2.1.1.5}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.2}Lernalgorithmen nach Erfahrung}{5}{subsection.2.1.2}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.1.2.1}Überwachtes Lernen}{5}{subsubsection.2.1.2.1}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.1.2.2}Unüberwachtes Lernen}{6}{subsubsection.2.1.2.2}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.1.2.3}Reinforcement Learning}{6}{subsubsection.2.1.2.3}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.1}{\ignorespaces Prinzipielle Darstellung des Reinforcement Learning \cite [400]{Zhou.2021}\relax }}{6}{figure.caption.8}\protected@file@percent } \newlabel{fig:Reinforcement Learning}{{2.1}{6}{Prinzipielle Darstellung des Reinforcement Learning \cite [400]{Zhou.2021}\relax }{figure.caption.8}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.3}Generalisierung}{6}{subsection.2.1.3}\protected@file@percent } \newlabel{sec:Generalisierung}{{2.1.3}{6}{Generalisierung}{subsection.2.1.3}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.2}{\ignorespaces Prinzipielle Darstellung des Generalisierungsfehlers anhand der Modellkomplexität \cite [127]{Goodfellow.2018}\relax }}{7}{figure.caption.9}\protected@file@percent } \newlabel{fig:Generalisierungsfehler}{{2.2}{7}{Prinzipielle Darstellung des Generalisierungsfehlers anhand der Modellkomplexität \cite [127]{Goodfellow.2018}\relax }{figure.caption.9}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.4}Hyperparameter und Validierung}{8}{subsection.2.1.4}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.3}{\ignorespaces Prinzipielle Darstellung der k-fach Kreuzvalidierung \cite [135]{Goodfellow.2018} und \cite [207]{Raschka.2018}\relax }}{9}{figure.caption.10}\protected@file@percent } \newlabel{fig:Kreuzvalidierung}{{2.3}{9}{Prinzipielle Darstellung der k-fach Kreuzvalidierung \cite [135]{Goodfellow.2018} und \cite [207]{Raschka.2018}\relax }{figure.caption.10}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.5}Maximum-Likelihood-Schätzung}{9}{subsection.2.1.5}\protected@file@percent } \newlabel{sec:MaximumLikelihood}{{2.1.5}{9}{Maximum-Likelihood-Schätzung}{subsection.2.1.5}{}} \newlabel{eq:formelMaximumLikelihoodMLLog}{{1}{9}{Maximum-Likelihood-Schätzung}{equation.2.1.1}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.6}Stochastisches Gradientenabstiegsverfahren}{10}{subsection.2.1.6}\protected@file@percent } \newlabel{sec:StochastischesGradientenabstiegsverfahren}{{2.1.6}{10}{Stochastisches Gradientenabstiegsverfahren}{subsection.2.1.6}{}} \newlabel{eq:formelVerlustNegativeLogLikelihoodML}{{2}{10}{Stochastisches Gradientenabstiegsverfahren}{equation.2.1.2}{}} \newlabel{eq:formelLKostenfunktion}{{3}{10}{Stochastisches Gradientenabstiegsverfahren}{equation.2.1.3}{}} \newlabel{eq:formelGradientML}{{4}{10}{Stochastisches Gradientenabstiegsverfahren}{equation.2.1.4}{}} \newlabel{eq:formelGradientMiniBatch}{{5}{11}{Stochastisches Gradientenabstiegsverfahren}{equation.2.1.5}{}} \newlabel{eq:formelGradientStochasticDescent}{{6}{11}{Stochastisches Gradientenabstiegsverfahren}{equation.2.1.6}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {2.2}Künstliche tiefe neuronale Netze}{11}{section.2.2}\protected@file@percent } \newlabel{sec:tiefeNetze}{{2.2}{11}{Künstliche tiefe neuronale Netze}{section.2.2}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.1}Tiefe Feedforward-Netze}{11}{subsection.2.2.1}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.4}{\ignorespaces Prinzipielle Darstellung eines k\"unstlichen Neuron \cite [105]{Zhou.2021}\relax }}{12}{figure.caption.11}\protected@file@percent } \newlabel{fig:Neuron}{{2.4}{12}{Prinzipielle Darstellung eines k\"unstlichen Neuron \cite [105]{Zhou.2021}\relax }{figure.caption.11}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.5}{\ignorespaces Prinzipielle Darstellung eines \gls {MLP} \cite [191]{Goodfellow.2018}, \cite [387-389]{Raschka.2018}\relax }}{12}{figure.caption.12}\protected@file@percent } \newlabel{fig:Neuronales Netz}{{2.5}{12}{Prinzipielle Darstellung eines \gls {MLP} \cite [191]{Goodfellow.2018}, \cite [387-389]{Raschka.2018}\relax }{figure.caption.12}{}} \newlabel{eq:formelNeuron}{{7}{13}{Tiefe Feedforward-Netze}{equation.2.2.7}{}} \newlabel{eq:formelMLPInput}{{8}{13}{Tiefe Feedforward-Netze}{equation.2.2.8}{}} \newlabel{eq:formelMLPHidden}{{9}{13}{Tiefe Feedforward-Netze}{equation.2.2.9}{}} \newlabel{eq:formelMLPOutput}{{10}{13}{Tiefe Feedforward-Netze}{equation.2.2.10}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.2}Aktivierungsfunktionen}{14}{subsection.2.2.2}\protected@file@percent } \newlabel{eq:formelReLU}{{11}{14}{Aktivierungsfunktionen}{equation.2.2.11}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.6}{\ignorespaces Plot der \gls {ReLU}-Funktion \cite [192]{Goodfellow.2018}\relax }}{14}{figure.caption.13}\protected@file@percent } \newlabel{fig:ReLU}{{2.6}{14}{Plot der \gls {ReLU}-Funktion \cite [192]{Goodfellow.2018}\relax }{figure.caption.13}{}} \newlabel{eq:formelSigmoid}{{12}{14}{Aktivierungsfunktionen}{equation.2.2.12}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.7}{\ignorespaces Plot der Sigmoid-Funktion \cite [391]{Raschka.2018}\relax }}{15}{figure.caption.14}\protected@file@percent } \newlabel{fig:Sigmoid}{{2.7}{15}{Plot der Sigmoid-Funktion \cite [391]{Raschka.2018}\relax }{figure.caption.14}{}} \newlabel{eq:formelSoftmax}{{13}{15}{Aktivierungsfunktionen}{equation.2.2.13}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.8}{\ignorespaces Plot der \gls {ReLU}6-Funktion \cite {Sandler}\relax }}{16}{figure.caption.15}\protected@file@percent } \newlabel{fig:ReLU6}{{2.8}{16}{Plot der \gls {ReLU}6-Funktion \cite {Sandler}\relax }{figure.caption.15}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.3}Backpropagation}{16}{subsection.2.2.3}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.4}Regularisierung}{16}{subsection.2.2.4}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.4.1}Parameter-Norm-Strafterme L2- und L1-Regularisierung}{16}{subsubsection.2.2.4.1}\protected@file@percent } \newlabel{eq:formelL2Regularisierung}{{14}{17}{Parameter-Norm-Strafterme L2- und L1-Regularisierung}{equation.2.2.14}{}} \newlabel{eq:formelL2RegularisierungGesamtzielfunktion}{{15}{17}{Parameter-Norm-Strafterme L2- und L1-Regularisierung}{equation.2.2.15}{}} \newlabel{eq:formelL2RegularisierungGesamtzielfunktionGradient}{{16}{17}{Parameter-Norm-Strafterme L2- und L1-Regularisierung}{equation.2.2.16}{}} \newlabel{eq:formelL2RegularisierungGesamtzielfunktionGradientenschritt}{{17}{17}{Parameter-Norm-Strafterme L2- und L1-Regularisierung}{equation.2.2.17}{}} \newlabel{eq:formelL1Regularisierung}{{18}{17}{Parameter-Norm-Strafterme L2- und L1-Regularisierung}{equation.2.2.18}{}} \newlabel{eq:formelL1RegularisierungGesamtzielfunktion}{{19}{17}{Parameter-Norm-Strafterme L2- und L1-Regularisierung}{equation.2.2.19}{}} \newlabel{eq:formelL1RegularisierungGesamtzielfunktionGradient}{{20}{17}{Parameter-Norm-Strafterme L2- und L1-Regularisierung}{equation.2.2.20}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.4.2}Erweitern des Datensatzes / Data Augmentation}{18}{subsubsection.2.2.4.2}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.4.3}Früher Abbruch}{18}{subsubsection.2.2.4.3}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.4.4}Dropout}{18}{subsubsection.2.2.4.4}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.5}Optimierung beim Trainieren neuronaler Netze}{19}{subsection.2.2.5}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.5.1}Stochastisches Gradientenabstiegsverfahren}{19}{subsubsection.2.2.5.1}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.5.2}Momentum}{19}{subsubsection.2.2.5.2}\protected@file@percent } \newlabel{eq:formelMomentum}{{21}{19}{Momentum}{equation.2.2.21}{}} \newlabel{eq:formelGradientStochasticDescentMomentum}{{22}{19}{Momentum}{equation.2.2.22}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.5.3}Adaptive Lernraten}{20}{subsubsection.2.2.5.3}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.9}{\ignorespaces Lernrate mit Warm Up und Cosine Decay\relax }}{20}{figure.caption.16}\protected@file@percent } \newlabel{fig:CosineDecay}{{2.9}{20}{Lernrate mit Warm Up und Cosine Decay\relax }{figure.caption.16}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.5.4}Anpassung der Merkmale mit Standardisierung und Normierung}{20}{subsubsection.2.2.5.4}\protected@file@percent } \newlabel{StandardisierungNormierung}{{2.2.5.4}{20}{Anpassung der Merkmale mit Standardisierung und Normierung}{subsubsection.2.2.5.4}{}} \newlabel{eq:Normierung}{{23}{21}{Anpassung der Merkmale mit Standardisierung und Normierung}{equation.2.2.23}{}} \newlabel{eq:Standardisierung}{{24}{21}{Anpassung der Merkmale mit Standardisierung und Normierung}{equation.2.2.24}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.5.5}Batch-Normalisierung}{21}{subsubsection.2.2.5.5}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.6}Konvolutionale Neuronale Netze}{21}{subsection.2.2.6}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.10}{\ignorespaces Darstellung eines CNN nach der LeNet-5-Struktur \cite {Zhou.182017}\relax }}{22}{figure.caption.17}\protected@file@percent } \newlabel{fig:CNNLeNet5}{{2.10}{22}{Darstellung eines CNN nach der LeNet-5-Struktur \cite {Zhou.182017}\relax }{figure.caption.17}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.6.1}Faltungsoperation mittels Kernel / Convolution}{22}{subsubsection.2.2.6.1}\protected@file@percent } \newlabel{eq:ZweidimensionaleFaltung1}{{25}{23}{Faltungsoperation mittels Kernel / Convolution}{equation.2.2.25}{}} \newlabel{eq:ZweidimensionaleFaltungKernelFlipping}{{26}{23}{Faltungsoperation mittels Kernel / Convolution}{equation.2.2.26}{}} \newlabel{eq:ZweidimensionaleFaltungKreuzkorrelation}{{27}{23}{Faltungsoperation mittels Kernel / Convolution}{equation.2.2.27}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.11}{\ignorespaces Darstellung der Convolution mit Valid-Padding und Schrittweite 1 \cite [373]{Goodfellow.2018}\relax }}{24}{figure.caption.18}\protected@file@percent } \newlabel{fig:Convolution}{{2.11}{24}{Darstellung der Convolution mit Valid-Padding und Schrittweite 1 \cite [373]{Goodfellow.2018}\relax }{figure.caption.18}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.6.2}Pooling / Max-Pooling}{25}{subsubsection.2.2.6.2}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.12}{\ignorespaces Darstellung des Max-Pooling \cite [500]{Raschka.2018}\relax }}{25}{figure.caption.19}\protected@file@percent } \newlabel{fig:MaxPooling}{{2.12}{25}{Darstellung des Max-Pooling \cite [500]{Raschka.2018}\relax }{figure.caption.19}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {2.3}Objekterkennung mittels neuronaler Netze}{26}{section.2.3}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.3.1}Parameter zur Objektidentifikation}{26}{subsection.2.3.1}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.3.2}Netzarchitekturen}{26}{subsection.2.3.2}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.3.2.1}Single Shot MultiBox Detector}{26}{subsubsection.2.3.2.1}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.13}{\ignorespaces Architektur des \gls {SSD} mit VGG-16 als Basisnetz \cite {Liu.2015}\relax }}{27}{figure.caption.20}\protected@file@percent } \newlabel{fig:SSD}{{2.13}{27}{Architektur des \gls {SSD} mit VGG-16 als Basisnetz \cite {Liu.2015}\relax }{figure.caption.20}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.14}{\ignorespaces Ground Truth Boxen und Standardboxen des \gls {SSD} in den 8x8 und 4x4 Feature Maps \cite {Liu.2015}\relax }}{27}{figure.caption.21}\protected@file@percent } \newlabel{fig:SSDDefaultBoxen}{{2.14}{27}{Ground Truth Boxen und Standardboxen des \gls {SSD} in den 8x8 und 4x4 Feature Maps \cite {Liu.2015}\relax }{figure.caption.21}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.3.2.2}Mobilenet V1 / V2 als Basisnetz zum \gls {SSD}}{28}{subsubsection.2.3.2.2}\protected@file@percent } \newlabel{eq:ordinaryConvolution}{{28}{28}{Mobilenet V1 / V2 als Basisnetz zum \gls {SSD}}{equation.2.3.28}{}} \newlabel{eq:ordinaryComputationalCost}{{29}{28}{Mobilenet V1 / V2 als Basisnetz zum \gls {SSD}}{equation.2.3.29}{}} \newlabel{eq:DepthwiseConvolution}{{30}{28}{Mobilenet V1 / V2 als Basisnetz zum \gls {SSD}}{equation.2.3.30}{}} \newlabel{eq:DepthwiseComputationalCost}{{31}{29}{Mobilenet V1 / V2 als Basisnetz zum \gls {SSD}}{equation.2.3.31}{}} \newlabel{eq:DepthwiseSeparabelComputationalCost}{{32}{29}{Mobilenet V1 / V2 als Basisnetz zum \gls {SSD}}{equation.2.3.32}{}} \newlabel{eq:ComputationalCostQuotient}{{33}{29}{Mobilenet V1 / V2 als Basisnetz zum \gls {SSD}}{equation.2.3.33}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.15}{\ignorespaces Depthwise Separable Convolution Block \cite {Sandler}\relax }}{29}{figure.caption.22}\protected@file@percent } \newlabel{fig:MobilenetV1}{{2.15}{29}{Depthwise Separable Convolution Block \cite {Sandler}\relax }{figure.caption.22}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.16}{\ignorespaces Bottleneck Depth-Separable Convolution with residuals Block \cite {Sandler}\relax }}{30}{figure.caption.23}\protected@file@percent } \newlabel{fig:MobilenetV2}{{2.16}{30}{Bottleneck Depth-Separable Convolution with residuals Block \cite {Sandler}\relax }{figure.caption.23}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.3.2.3}\gls {SSD}Lite}{30}{subsubsection.2.3.2.3}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.3.2.4}ResNet-50 als Basisnetz zum \gls {SSD}}{31}{subsubsection.2.3.2.4}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.17}{\ignorespaces Convolution mit Residual Block \cite {He.2015}\relax }}{31}{figure.caption.24}\protected@file@percent } \newlabel{fig:ResNet}{{2.17}{31}{Convolution mit Residual Block \cite {He.2015}\relax }{figure.caption.24}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.3.2.5}Feature Pyramid Network}{31}{subsubsection.2.3.2.5}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.18}{\ignorespaces Prinzipdarsellung des Feature Pyramid Network links mit Bottom-up und rechts mit Top-down Pathway \cite {Lin.1292016}\relax }}{32}{figure.caption.25}\protected@file@percent } \newlabel{fig:FPN}{{2.18}{32}{Prinzipdarsellung des Feature Pyramid Network links mit Bottom-up und rechts mit Top-down Pathway \cite {Lin.1292016}\relax }{figure.caption.25}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.3.3}Evaluierung der Objekterkennung}{32}{subsection.2.3.3}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.3.3.1}Konfusionsmatrix}{32}{subsubsection.2.3.3.1}\protected@file@percent } \@writefile{lot}{\defcounter {refsection}{0}\relax }\@writefile{lot}{\contentsline {table}{\numberline {2.1}{\ignorespaces Darstellung der Konfusionmatrix\relax }}{33}{table.caption.26}\protected@file@percent } \newlabel{tab:Konfusionmatrix}{{2.1}{33}{Darstellung der Konfusionmatrix\relax }{table.caption.26}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.3.3.2}Accuracy}{33}{subsubsection.2.3.3.2}\protected@file@percent } \newlabel{eq:Accuracy}{{34}{33}{Accuracy}{equation.2.3.34}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.3.3.3}Precision und Recall}{33}{subsubsection.2.3.3.3}\protected@file@percent } \newlabel{PrecisionRecall}{{2.3.3.3}{33}{Precision und Recall}{subsubsection.2.3.3.3}{}} \newlabel{eq:Precision}{{35}{33}{Precision und Recall}{equation.2.3.35}{}} \newlabel{eq:Recall}{{36}{33}{Precision und Recall}{equation.2.3.36}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.3.3.4}Intersection over Union}{34}{subsubsection.2.3.3.4}\protected@file@percent } \newlabel{eq:IoU}{{37}{34}{Intersection over Union}{equation.2.3.37}{}} \newlabel{fig:ven-1a}{{2.19a}{34}{Subfigure 2 2.19a}{subfigure.2.19.1}{}} \newlabel{sub@fig:ven-1a}{{(a)}{a}{Subfigure 2 2.19a\relax }{subfigure.2.19.1}{}} \newlabel{fig:ven-1b}{{2.19b}{34}{Subfigure 2 2.19b}{subfigure.2.19.2}{}} \newlabel{sub@fig:ven-1b}{{(b)}{b}{Subfigure 2 2.19b\relax }{subfigure.2.19.2}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.19}{\ignorespaces Darstellung der Area of union / intersection \cite [5]{Arulprakash.2021}\relax }}{34}{figure.caption.27}\protected@file@percent } \newlabel{fig:IoU}{{2.19}{34}{Darstellung der Area of union / intersection \cite [5]{Arulprakash.2021}\relax }{figure.caption.27}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {subfigure}{\numberline{(a)}{\ignorespaces {Area of union }}}{34}{subfigure.19.1}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {subfigure}{\numberline{(b)}{\ignorespaces {Area of intersection }}}{34}{subfigure.19.2}\protected@file@percent } \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.3.3.5}Mean Average Precision / Average Precision / Average Recall}{34}{subsubsection.2.3.3.5}\protected@file@percent } \newlabel{eq:APGeneral}{{38}{34}{Mean Average Precision / Average Precision / Average Recall}{equation.2.3.38}{}} \newlabel{eq:PrecisionInterpol}{{39}{35}{Mean Average Precision / Average Precision / Average Recall}{equation.2.3.39}{}} \newlabel{eq:APSum}{{40}{35}{Mean Average Precision / Average Precision / Average Recall}{equation.2.3.40}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {2.4}Depth Estimation mittels neuronaler Netze}{35}{section.2.4}\protected@file@percent } \newlabel{DepthEstimation}{{2.4}{35}{Depth Estimation mittels neuronaler Netze}{section.2.4}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.4.1}MiDaS}{35}{subsection.2.4.1}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.20}{\ignorespaces Basisarchitektur des MiDaS \cite {Xian_2018_CVPR}\relax }}{36}{figure.caption.28}\protected@file@percent } \newlabel{fig:DepthEstimationCNNXian}{{2.20}{36}{Basisarchitektur des MiDaS \cite {Xian_2018_CVPR}\relax }{figure.caption.28}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.21}{\ignorespaces Encoder und Decoder in einem CNN \cite {EncoderDecoder7803544}\relax }}{36}{figure.caption.29}\protected@file@percent } \newlabel{fig:EncoderDecoder}{{2.21}{36}{Encoder und Decoder in einem CNN \cite {EncoderDecoder7803544}\relax }{figure.caption.29}{}} \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.22}{\ignorespaces Resultate des MiDaS \cite {Ranftl.2019}\relax }}{37}{figure.caption.30}\protected@file@percent } \newlabel{fig:DepthEstimationResults}{{2.22}{37}{Resultate des MiDaS \cite {Ranftl.2019}\relax }{figure.caption.30}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.4.2}PyDnet}{37}{subsection.2.4.2}\protected@file@percent } \@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2.23}{\ignorespaces Architektur des PyDNet \cite {Poggi.2018}\relax }}{37}{figure.caption.31}\protected@file@percent } \newlabel{fig:PyDnet}{{2.23}{37}{Architektur des PyDNet \cite {Poggi.2018}\relax }{figure.caption.31}{}} \@setckpt{grundlagen}{ \setcounter{page}{38} \setcounter{equation}{40} \setcounter{enumi}{0} \setcounter{enumii}{0} \setcounter{enumiii}{0} \setcounter{enumiv}{0} \setcounter{footnote}{0} \setcounter{mpfootnote}{0} \setcounter{part}{0} \setcounter{chapter}{2} \setcounter{section}{4} \setcounter{subsection}{2} \setcounter{subsubsection}{0} \setcounter{paragraph}{0} \setcounter{subparagraph}{0} \setcounter{figure}{23} \setcounter{table}{1} \setcounter{Item}{0} \setcounter{Hfootnote}{0} \setcounter{bookmark@seq@number}{0} \setcounter{caption@flags}{0} \setcounter{continuedfloat}{0} \setcounter{KVtest}{0} \setcounter{subfigure}{0} \setcounter{subfigure@save}{2} \setcounter{lofdepth}{1} \setcounter{subtable}{0} \setcounter{subtable@save}{0} \setcounter{lotdepth}{1} \setcounter{AM@survey}{0} \setcounter{ALC@unique}{0} \setcounter{ALC@line}{0} \setcounter{ALC@rem}{0} \setcounter{ALC@depth}{0} \setcounter{AlgoLine}{0} \setcounter{algocfline}{0} \setcounter{algocfproc}{0} \setcounter{algocf}{0} \setcounter{parentequation}{0} \setcounter{su@anzahl}{0} \setcounter{LT@tables}{1} \setcounter{LT@chunks}{2} \setcounter{tabx@nest}{0} \setcounter{listtotal}{0} \setcounter{listcount}{0} \setcounter{liststart}{0} \setcounter{liststop}{0} \setcounter{citecount}{0} \setcounter{citetotal}{0} \setcounter{multicitecount}{0} \setcounter{multicitetotal}{0} \setcounter{instcount}{335} \setcounter{maxnames}{3} \setcounter{minnames}{1} \setcounter{maxitems}{3} \setcounter{minitems}{1} \setcounter{citecounter}{0} \setcounter{maxcitecounter}{0} \setcounter{savedcitecounter}{0} \setcounter{uniquelist}{0} \setcounter{uniquename}{0} \setcounter{refsection}{0} \setcounter{refsegment}{0} \setcounter{maxextratitle}{0} \setcounter{maxextratitleyear}{0} \setcounter{maxextraname}{9} \setcounter{maxextradate}{0} \setcounter{maxextraalpha}{0} \setcounter{abbrvpenalty}{50} \setcounter{highnamepenalty}{50} \setcounter{lownamepenalty}{25} \setcounter{maxparens}{3} \setcounter{parenlevel}{0} \setcounter{mincomprange}{10} \setcounter{maxcomprange}{100000} \setcounter{mincompwidth}{1} \setcounter{afterword}{0} \setcounter{savedafterword}{0} \setcounter{annotator}{0} \setcounter{savedannotator}{0} \setcounter{author}{0} \setcounter{savedauthor}{0} \setcounter{bookauthor}{0} \setcounter{savedbookauthor}{0} \setcounter{commentator}{0} \setcounter{savedcommentator}{0} \setcounter{editor}{0} \setcounter{savededitor}{0} \setcounter{editora}{0} \setcounter{savededitora}{0} \setcounter{editorb}{0} \setcounter{savededitorb}{0} \setcounter{editorc}{0} \setcounter{savededitorc}{0} \setcounter{foreword}{0} \setcounter{savedforeword}{0} \setcounter{holder}{0} \setcounter{savedholder}{0} \setcounter{introduction}{0} \setcounter{savedintroduction}{0} \setcounter{namea}{0} \setcounter{savednamea}{0} \setcounter{nameb}{0} \setcounter{savednameb}{0} \setcounter{namec}{0} \setcounter{savednamec}{0} \setcounter{translator}{0} \setcounter{savedtranslator}{0} \setcounter{shortauthor}{0} \setcounter{savedshortauthor}{0} \setcounter{shorteditor}{0} \setcounter{savedshorteditor}{0} \setcounter{labelname}{0} \setcounter{savedlabelname}{0} \setcounter{institution}{0} \setcounter{savedinstitution}{0} \setcounter{lista}{0} \setcounter{savedlista}{0} \setcounter{listb}{0} \setcounter{savedlistb}{0} \setcounter{listc}{0} \setcounter{savedlistc}{0} \setcounter{listd}{0} \setcounter{savedlistd}{0} \setcounter{liste}{0} \setcounter{savedliste}{0} \setcounter{listf}{0} \setcounter{savedlistf}{0} \setcounter{location}{0} \setcounter{savedlocation}{0} \setcounter{organization}{0} \setcounter{savedorganization}{0} \setcounter{origlocation}{0} \setcounter{savedoriglocation}{0} \setcounter{origpublisher}{0} \setcounter{savedorigpublisher}{0} \setcounter{publisher}{0} \setcounter{savedpublisher}{0} \setcounter{language}{0} \setcounter{savedlanguage}{0} \setcounter{origlanguage}{0} \setcounter{savedoriglanguage}{0} \setcounter{pageref}{0} \setcounter{savedpageref}{0} \setcounter{textcitecount}{0} \setcounter{textcitetotal}{0} \setcounter{textcitemaxnames}{0} \setcounter{biburlbigbreakpenalty}{100} \setcounter{biburlbreakpenalty}{200} \setcounter{biburlnumpenalty}{0} \setcounter{biburlucpenalty}{0} \setcounter{biburllcpenalty}{0} \setcounter{smartand}{1} \setcounter{bbx:relatedcount}{0} \setcounter{bbx:relatedtotal}{0} \setcounter{section@level}{2} }