From fec065cc44115d94ae59d1e4f51a4565ac0ce12c Mon Sep 17 00:00:00 2001 From: Frank Schultz Date: Thu, 26 Oct 2023 19:23:39 +0200 Subject: [PATCH] Update ddasp_exercise_slides.tex - SVD / Subspaces improved/re-ordered - slides for tutorial sketches --- slides/ddasp_exercise_slides.tex | 409 ++++++++++++++++++++++++------- 1 file changed, 326 insertions(+), 83 deletions(-) diff --git a/slides/ddasp_exercise_slides.tex b/slides/ddasp_exercise_slides.tex index b2bf284..326c611 100644 --- a/slides/ddasp_exercise_slides.tex +++ b/slides/ddasp_exercise_slides.tex @@ -298,7 +298,9 @@ \section{Ex02: SVD / 4 Subspaces} $\bm{X}$ might be complex-valued -if some $\lambda_m=0$, we know that for this eigenvector we get $\bm{A} \bm{x}_m = 0 \bm{x}_m = \bm{0}$, i.e. $\bm{A}$ is a singular matrix, i.e. $\bm{A}$ is a non-full rank matrix +if $\lambda_m=0$ we get $\bm{A} \bm{x}_m = 0 \cdot \bm{x}_m = \bm{0}$, i.e. $\bm{A}$ is a singular matrix, i.e. $\bm{A}$ is a non-full rank matrix + +rank of matrix $\bm{A}$ is $R$ == number of non-zero eigenvalues \end{frame} @@ -327,15 +329,51 @@ \section{Ex02: SVD / 4 Subspaces} $ \end{center} -the matrix is acting onto $m$-th eigenvector as +the matrix is acting onto the $m$-th eigenvector as $$\bm{A} \bm{q}_m = \lambda_m \bm{q}_m$$ -$\Lambda\in\mathbb{R}$ +$\bm{\Lambda}\in\mathbb{R}$ $\bm{Q}\in\mathbb{R}$ if $\bm{A}\in\mathbb{R}$, $\bm{Q}\in\mathbb{C}$ if $\bm{A}\in\mathbb{C}$ -if some $\lambda_m=0$ we know that for this eigenvector we get $\bm{A} \bm{q}_m = 0 \bm{q}_m = \bm{0}$, i.e. $\bm{A}$ is a singular matrix, i.e. $\bm{A}$ is a non-full rank matrix +if $\lambda_m=0$ we get $\bm{A} \bm{q}_m = 0 \cdot \bm{q}_m = \bm{0}$, i.e. $\bm{A}$ is a singular matrix, i.e. $\bm{A}$ is a non-full rank matrix + +rank of matrix $\bm{A}$ is $R$ == number of non-zero eigenvalues +\end{frame} + + + + + +\begin{frame}[t]{Matrix Factorization from Eigenwert Problem for Symmetric Matrix} + +for a normal matrix $\bm{A}$ (such as symmetric, i.e. $\bm{A}^H \bm{A} = \bm{A} \bm{A}^H$ ) + +there is the fundamental spectral theorem +$$\bm{A} = \bm{Q} \bm{\Lambda} \bm{Q}^{H}$$ + +i.e. diagonalization in terms of eigenvectors in full rank matrix $\bm{Q}$ and eigenvalues in $\bm{\Lambda}\in\mathbb{R}$ + +What does $\bm{A}$ with an eigenvector $\bm{q}$? + +\begin{flushleft} +$ +\def\M{1} +\def\N{1} +\def\rank{0.9999} +\drawmatrix[fill=none, height=\M, width=\N]A_\mathtt{M \times M} +\drawmatrix[fill=none, height=\M, width=0]q_\mathtt{M \times 1} += +\drawmatrix[bbox style={fill=C4}, bbox height=\M, bbox width=\M, fill=C1, height=\M, width=\rank\N]{Q}_\mathtt{M \times M} +\drawmatrix[diag]\Lambda_\mathtt{M \times M} +\drawmatrix[bbox style={fill=C4}, bbox height=\N, bbox width=\N, fill=C1, height=\N, width=\rank\N]{Q}_\mathtt{M \times M}^{H} +\drawmatrix[fill=none, height=\M, width=0]q_\mathtt{M \times 1} = ? +$ + +%$$\bm{A} \bm{q} = \bm{Q} \bm{\Lambda} \bm{Q}^{H} \bm{q} = ?$$ +\end{flushleft} + \end{frame} @@ -344,8 +382,8 @@ \section{Ex02: SVD / 4 Subspaces} -\begin{frame}{Generalized Factorization?!?} -Over-Determined +\begin{frame}{Generalized Factorization?!} +Over-Determined (tall/thin matrix $\bm{A}$) \begin{center} $ \def\M{3} @@ -357,7 +395,7 @@ \section{Ex02: SVD / 4 Subspaces} \drawmatrix[bbox style={fill=C1}, bbox height=\N, bbox width=\N, fill=C2, height=\N, width=\rank\N]{V}_\mathtt{N \times N}^H $ \end{center} -Under-Determined +Under-Determined (flat/fat or short/wide matrix $\bm{A}$) \begin{center} $ \def\M{1} @@ -373,8 +411,8 @@ \section{Ex02: SVD / 4 Subspaces} -\begin{frame}{Singular Value Decomposition (SVD)} -most general matrix factorization % that nicely reveals the 4 subspaces of a rank $r$ matrix $\bm{A}$ +\begin{frame}{Generalized Factorization: Singular Value Decomposition (SVD)} +most general matrix factorization for a matrix $\bm{A}$ with rank $R$ fundamentally important for understanding the heart beat of linear algebra @@ -402,7 +440,7 @@ \section{Ex02: SVD / 4 Subspaces} $ \end{center} -$$\bm{A} = \bm{U} \bm{\Sigma} \bm{V}^\mathrm{H}$$ +$$\bm{A} = \bm{U} \bm{\Sigma} \bm{V}^\mathrm{H}\qquad \qquad \bm{U}\bm{U}^H = \bm{U}^H\bm{U} = \bm{I} \qquad \qquad \bm{V}\bm{V}^H = \bm{V}^H\bm{V} = \bm{I}$$ left singular vectors\quad$\bm{U} = \mathrm{eigvec}(\bm{A}\bm{A}^\mathrm{H})$ @@ -411,59 +449,240 @@ \section{Ex02: SVD / 4 Subspaces} right singular vectors $\bm{V} = \mathrm{eigvec}(\bm{A}^\mathrm{H}\bm{A})$ \begin{footnotesize}, order must match to the corresponding singular values\end{footnotesize} -singular value matrix $\bm{\Sigma}$, $r$ singular values on \underline{diagonal} descending order +singular value matrix $\bm{\Sigma}$ +\begin{footnotesize}, $\text{min}(M,N)$ singular values on \underline{diagonal} descending order, $R$ of them non-zero +\end{footnotesize} + \end{frame} + + + +\begin{frame}[label=SVD1]{Singular Value Decomposition (SVD)} + +\begin{flushleft} +$ +\def\M{1.4} +\def\N{1} +\def\rank{0.4} +\drawmatrix[fill=none, height=\M, width=\N]A_\mathtt{M \times N} = +\drawmatrix[bbox style={fill=C4}, bbox height=\M, bbox width=\M, fill=C0, height=\M, width=\rank\N]U_\mathtt{M \times M} +\drawmatrix[bbox style={fill=gray!50}, bbox height=\M, bbox width=\N, fill=white, height=\rank\N, width=\rank\N]\Sigma_\mathtt{M \times N} +\drawmatrix[bbox style={fill=C1}, bbox height=\N, bbox width=\N, fill=C2, height=\N, width=\rank\N]{V}_\mathtt{N \times N}^H +$ +\end{flushleft} + +\begin{flushleft} +$ +\def\M{1} +\def\N{1.4} +\def\rank{0.7} +\drawmatrix[fill=none, height=\M, width=\N]A_\mathtt{M \times N} = +\drawmatrix[bbox style={fill=C4}, bbox height=\M, bbox width=\M, fill=C0, height=\M, width=\rank\M]U_\mathtt{M \times M} +\drawmatrix[bbox style={fill=gray!50}, bbox height=\M, bbox width=\N, fill=white, height=\rank\M, width=\rank\M]\Sigma_\mathtt{M \times N} +\drawmatrix[bbox style={fill=C1}, bbox height=\N, bbox width=\N, fill=C2, height=\N, width=\rank\M]{V}_\mathtt{N \times N}^H +$ +\end{flushleft} + +%$$\bm{A} = \bm{U} \bm{\Sigma} \bm{V}^\mathrm{H}$$ + +\vspace{3.25cm} + +left singular vectors\quad$\bm{U} = \mathrm{eigvec}(\bm{A}\bm{A}^\mathrm{H})$ +\begin{footnotesize}, order must match to the corresponding singular values\end{footnotesize} + +right singular vectors $\bm{V} = \mathrm{eigvec}(\bm{A}^\mathrm{H}\bm{A})$ +\begin{footnotesize}, order must match to the corresponding singular values\end{footnotesize} + +singular value matrix $\bm{\Sigma}$ +\begin{footnotesize}, $\text{min}(M,N)$ singular values on \underline{diagonal} descending order, $R$ of them non-zero +\end{footnotesize} + +\end{frame} + + + + + + + + + + + + + + \begin{frame}{Singular Value Decomposition (SVD)} -input-related matrix $\bm{V}$ and output related matrix $\bm{U}$ are unitary, i.e. -$$\bm{V}\bm{V}^\mathrm{H}=\bm{I},\quad\bm{V}^\mathrm{H}\bm{V}=\bm{I},\quad\bm{U}\bm{U}^\mathrm{H}=\bm{I},\quad\bm{U}^\mathrm{H}\bm{U}=\bm{I}$$ +\begin{flushleft} +$ +\def\M{1.4} +\def\N{1} +\def\rank{0.4} +\drawmatrix[fill=none, height=\M, width=\N]A_\mathtt{M \times N} = +\drawmatrix[bbox style={fill=C4}, bbox height=\M, bbox width=\M, fill=C0, height=\M, width=\rank\N]U_\mathtt{M \times M} +\drawmatrix[bbox style={fill=gray!50}, bbox height=\M, bbox width=\N, fill=white, height=\rank\N, width=\rank\N]\Sigma_\mathtt{M \times N} +\drawmatrix[bbox style={fill=C1}, bbox height=\N, bbox width=\N, fill=C2, height=\N, width=\rank\N]{V}_\mathtt{N \times N}^H +$ +\end{flushleft} -superposition of rank-1 matrices (outer products) because singular values in diagonal matrix $\bm{\Sigma}$ -$$\bm{A} = \sum_{i=1}^{\text{rank }r} \sigma_i \bm{u}_i \bm{v}_i^\mathrm{H} = \bm{U} \bm{S} \bm{V}^\mathrm{H}$$ +\begin{flushleft} +$ +\def\M{1} +\def\N{1.4} +\def\rank{0.7} +\drawmatrix[fill=none, height=\M, width=\N]A_\mathtt{M \times N} = +\drawmatrix[bbox style={fill=C4}, bbox height=\M, bbox width=\M, fill=C0, height=\M, width=\rank\M]U_\mathtt{M \times M} +\drawmatrix[bbox style={fill=gray!50}, bbox height=\M, bbox width=\N, fill=white, height=\rank\M, width=\rank\M]\Sigma_\mathtt{M \times N} +\drawmatrix[bbox style={fill=C1}, bbox height=\N, bbox width=\N, fill=C2, height=\N, width=\rank\M]{V}_\mathtt{N \times N}^H +$ +\end{flushleft} -Fundamentally important: $\bm{U}$ and $\bm{V}$ span the 4 subspaces of matrix $\bm{A}$ +$$\bm{A} +\begin{bmatrix} +\bm{v}_1 & \bm{v}_2 & \bm{v}_R & \bm{v}_N +\end{bmatrix} += $$ -\centering -\includegraphics[width=0.4\textwidth]{four_subspaces.pdf} +$$\bm{A} +\begin{bmatrix} +\bm{v}_1 & \bm{v}_2 & \bm{v}_R & \bm{v}_N +\end{bmatrix} += $$ +\vspace{1.5cm} + +left singular vectors\quad$\bm{U} = \mathrm{eigvec}(\bm{A}\bm{A}^\mathrm{H})$ +\begin{footnotesize}, order must match to the corresponding singular values\end{footnotesize} + +right singular vectors $\bm{V} = \mathrm{eigvec}(\bm{A}^\mathrm{H}\bm{A})$ +\begin{footnotesize}, order must match to the corresponding singular values\end{footnotesize} + +singular value matrix $\bm{\Sigma}$ +\begin{footnotesize}, $\text{min}(M,N)$ singular values on \underline{diagonal} descending order, $R$ of them non-zero +\end{footnotesize} \end{frame} +\againframe{SVD1} + + + + + \begin{frame}{The 4 Subspaces of a Matrix} -matrix $\bm{A}_{M \times N} = \bm{U} \bm{\Sigma} \bm{V}^H$ spans four fundamental subspaces +matrix $\bm{A}_{M \times N}$ spans four fundamental subspaces \hspace{4.25cm} \textcolor{C0}{column space} $\perp$ \textcolor{C4}{left null space} \hspace{0.75cm} \textcolor{C2}{row space} $\perp$ \textcolor{C1}{null space} -\textcolor{C0}{column space / range / image:} $$R(\bm{A}) = \{\bm{p}\in\mathbb{C}^M | \bm{A} \bm{g} = \bm{p},\qquad\forall \bm{g}\in\mathbb{C}^N\}$$ +\textcolor{C0}{column space / range / image in U:} $$\text{Range}(\bm{A}) = \{\bm{b}\in\mathbb{C}^M | \bm{A} \bm{x} = \bm{b},\qquad\forall \bm{x}\in\mathbb{C}^N\}$$ -\textcolor{C1}{null space / kernel:} $$N(\bm{A}) = \{\bm{g}\in\mathbb{C}^N | \bm{A} \bm{g} = \bm{0}\}$$ +\textcolor{C1}{null space / kernel in V:} $$\text{Null}(\bm{A}) = \{\bm{x}\in\mathbb{C}^N | \bm{A} \bm{x} = \bm{0}\}$$ the column/null-space concept for transposed matrix $\bm{A}^H$ yields the two other spaces: -\textcolor{C2}{row space:} $$R(\bm{A}^H) = \{\bm{g}\in\mathbb{C}^N | \bm{A}^H \bm{p} = \bm{g},\qquad\forall \bm{p}\in\mathbb{C}^M\}$$ +\textcolor{C2}{row space in V:} $$\text{Range}(\bm{A}^H) = \{\bm{x}\in\mathbb{C}^N | \bm{A}^H \bm{b} = \bm{x},\qquad\forall \bm{b}\in\mathbb{C}^M\}$$ -\textcolor{C4}{left null space / cokernel:} $$N(\bm{A}^H) = \{\bm{p}\in\mathbb{C}^M | \bm{A}^H \bm{p} = \bm{0}\}$$ +\textcolor{C4}{left null space / cokernel in U:} $$\text{Null}(\bm{A}^H) = \{\bm{b}\in\mathbb{C}^M | \bm{A}^H \bm{b} = \bm{0}\}$$ + +matrix rank of $\bm{A}$ is $R\leq \text{min}(M,N)$ == dimension of column space == dimension of row space == number of independent columns and rows +== number of pivots in rref($\bm{A}$) + +\end{frame} + + +\begin{frame}[t]{Simple Example For Subspaces} + +matrix factorization column space times row space +$$ +\bm{C} = +\begin{bmatrix} +1 & 0 \\ 0 & 2 \\ 0 & 0 +\end{bmatrix} +\qquad +\bm{R} = +\begin{bmatrix} +3 & 0 \\ 0 & 4 +\end{bmatrix} +\qquad +\bm{A} = +\bm{C} \bm{R} = +\begin{bmatrix} +3 & 0 \\ 0 & 8 \\ 0 & 0 +\end{bmatrix} +$$ +matrix factorization in terms of SVD +$$ +\bm{A} = \bm{U} \bm{\Sigma} \bm{V}^H += +\begin{bmatrix} +0 & 1 & 0 \\ +1 & 0 & 0 \\ +0 & 0 & 1 +\end{bmatrix} +% +\begin{bmatrix} +8 & 0\\ +0 & 3\\ +0 & 0 +\end{bmatrix} +% +\left( +\begin{bmatrix} +0 & 1\\ +1 & 0 +\end{bmatrix} +\right)^H +$$ +rank of matrix $\bm{A}$? -rank of $\bm{A}$ == dimension of column space == dimension of row space == number of independent columns and rows +subspaces of matrix $\bm{A}$? equations and sketch... \end{frame} + + + + +\begin{frame}{SVD Fundamentals} % +superposition of rank-1 matrices (outer products) because singular values in diagonal matrix $\bm{\Sigma}$ +$$\bm{A}_{M \times N} = \sum_{r=1}^{\text{rank }R} \sigma_r \bm{u}_r \bm{v}_r^\mathrm{H} = \bm{U} \bm{S} \bm{V}^\mathrm{H}$$ % +input-related matrix $\bm{V}$ and output related matrix $\bm{U}$ are unitary, i.e. +$$\bm{V}\bm{V}^\mathrm{H}=\bm{I},\quad\bm{V}^\mathrm{H}\bm{V}=\bm{I},\quad\bm{U}\bm{U}^\mathrm{H}=\bm{I},\quad\bm{U}^\mathrm{H}\bm{U}=\bm{I}$$ % +due to $\bm{V}^\mathrm{H}\bm{V}=\bm{I}$ we can re-arrange +$$\bm{A} = \bm{U} \bm{\Sigma} \bm{V}^\mathrm{H} \rightarrow +\bm{A} \bm{V} = \bm{U} \bm{\Sigma}$$ +rank $R$ matrix $\bm{A}$ acting on \textcolor{C2}{row space} $\bm{v}$ maps to corresponding \textcolor{C0}{column space / range / image} $\bm{u}$ weighted by corresponding singular value $\sigma$ +$$\bm{A} \bm{v}_{1...R} = \sigma_{1...R} \bm{u}_{1...R}$$ +rank $R$ matrix $\bm{A}$ matrix acting on \textcolor{C1}{null space / kernel} $\bm{v}$ maps to zero vector $\bm{0}_{M \times 1}$ +$$\bm{A} \bm{v}_{R+1...N} = \bm{0}$$ + + +\end{frame} + + + + + + \begin{frame}{4 Subspaces of a Matrix} +% +every matrix $\bm{A}_{M \times N} = \bm{U} \bm{\Sigma} \bm{V}^H$ spans four fundamental subspaces -matrix $\bm{A}_{M \times N} = \bm{U} \bm{\Sigma} \bm{V}^H$ spans four fundamental subspaces, these are nicely encoded in the SVD +$\bm{U}$ and $\bm{V}$ span the 4 subspaces of matrix $\bm{A}$ in the (probably) most elegant way \hspace{0.75cm} -\textcolor{C2}{row space} $\perp$ \textcolor{C1}{null space} -\hspace{4.5cm} -\textcolor{C0}{column space} $\perp$ \textcolor{C4}{left null space} +in $\bm{V}$: \textcolor{C2}{row space} $\perp$ \textcolor{C1}{null space} +\hspace{2cm} +in $\bm{U}$: \textcolor{C0}{column space} $\perp$ \textcolor{C4}{left null space} \centering \includegraphics[width=0.5\textwidth]{four_subspaces.pdf} @@ -474,13 +693,45 @@ \section{Ex02: SVD / 4 Subspaces} left singular vectors in $\bm{U}$ \end{frame} + + + + + + +\begin{frame}[label=SubspacesForSketch]{4 Subspaces of a Matrix - Examples for Special Matrix Characteristics} + +\hspace{-0.5cm} +\textcolor{C2}{row space} $\perp$ \textcolor{C1}{null space} +\hspace{0.5cm} +\textcolor{C0}{column space} $\perp$ \textcolor{C4}{left null space} + +\begin{flushleft} +\includegraphics[width=0.5\textwidth]{four_subspaces.pdf} +\end{flushleft} + +\hspace{-0.5cm} +right singular vectors in $\bm{V}$ +\hspace{0.5cm} +left singular vectors in $\bm{U}$ + +\end{frame} +\againframe{SubspacesForSketch} +\againframe{SubspacesForSketch} +\againframe{SubspacesForSketch} + + + + + + % % % \begin{frame}{Singular Value Decomposition (SVD), full rank cases} $\cdot$ Sum of rank-1 matrices\qquad -$\bm{A} = \bm{U} \bm{\Sigma} \bm{V}^H = \sum\limits_{i=1}^{r} \sigma_i \quad \textcolor{C0}{\bm{u}}_i \quad \textcolor{C2}{\bm{v}}^H_i$ +$\bm{A} = \bm{U} \bm{\Sigma} \bm{V}^H = \sum\limits_{r=1}^{R} \sigma_r \quad \textcolor{C0}{\bm{u}}_r \quad \textcolor{C2}{\bm{v}}^H_r$ \hspace{4.25cm} \textcolor{C0}{column space} $\perp$ \textcolor{C4}{left null space} @@ -530,14 +781,16 @@ \section{Ex02: SVD / 4 Subspaces} \begin{frame}{Singular Value Decomposition (SVD), rank-deficient cases} $\cdot$ Sum of rank-1 matrices\qquad -$\bm{A} = \bm{U} \bm{\Sigma} \bm{V}^H = \sum\limits_{i=1}^{r} \sigma_i \quad \textcolor{C0}{\bm{u}}_i \quad \textcolor{C2}{\bm{v}}^H_i$ +$\bm{A} = \bm{U} \bm{\Sigma} \bm{V}^H = \sum\limits_{r=1}^{R} \sigma_r \quad \textcolor{C0}{\bm{u}}_r \quad \textcolor{C2}{\bm{v}}^H_r$ + +$\cdot$ Rank-deficient cases always need pseudo-inverse $\bm{A}^\dagger = \bm{V} \Sigma^\dagger \bm{U}^H$ \hspace{4.25cm} \textcolor{C0}{column space} $\perp$ \textcolor{C4}{left null space} \hspace{0.75cm} \textcolor{C2}{row space} $\perp$ \textcolor{C1}{null space} -$\cdot$ Square matrix $\bm{A}$, \quad $M$ rows $=$ $N$ columns, \quad rank deficient ($r$ $N$ columns, \quad rank deficient ($r$ $N$ columns, \quad rank deficient ($r \lambda_2 >... >\lambda_r \neq 0$. +We sort the $r=1...R$ eigenvalues (and keep it linked to their +corresponding eigenvectors) in decreasing order, i.e. $\lambda_1 > \lambda_2 >... >\lambda_R \neq 0$. The sorted! eigenvectors of $\bm{X}^\mathrm{T} \bm{X}$ are the sorted (unit length) right singular vectors $\bm{v}_i$ of $\bm{X}$, these span the row space. In our example the spanned row space is $\bm{v}_{1}$, i.e. a line in 2D. -The sorted! (non-zero) eigenvalues $\lambda_i$ become the sorted! singular values $\sigma_i = \sqrt{\lambda_i}$, these are used by the matrix to map row space stuff to column space or vice versa. +The sorted! (non-zero) eigenvalues $\lambda_r$ become the sorted! singular values $\sigma_r = \sqrt{\lambda_r}$, these are used by the matrix to map row space stuff to column space and vice versa. Now, we use the SVD property -$\bm{X} \bm{v}_{i=1...r} = \sigma_{i=1...r} \bm{u}_{i=1...r}$ -to find the corresponding $i$-th unit vector in column space $\bm{u}_i$, simply by -$\bm{X} \bm{v}_{i} \frac{1}{\sigma} = \bm{u}_{i}$ (textbook approach, real algorithms don't do this, numerical linear algebra is tough science!) +$\bm{X} \bm{v}_{r=1...R} = \sigma_{r=1...R} \bm{u}_{r=1...R}$ +to find the corresponding $r$-th unit vector $\bm{u}_r$ in the column space, simply by +$\bm{X} \bm{v}_{r} \frac{1}{\sigma_r} = \bm{u}_{r}$ (this is the textbook approach, real algorithms don't do this, numerical linear algebra is tough science!) \end{frame} -\begin{frame}{SVD Example Manual Calculus: Towards Left Singular Vectors} +\begin{frame}[t]{SVD Example Manual Calculus: Towards Left Singular Vectors} -So, with $\sigma_1 = \sqrt{\lambda_1} = \sqrt{50}$ and $\bm{v}_{i=1} = +So, with $\sigma_1 = \sqrt{\lambda_1} = \sqrt{50}$ and $\bm{v}_{r=1=R} = \frac{1}{\sqrt{10}} \begin{bmatrix} 1 \\3 @@ -1197,7 +1440,7 @@ \section{Appendix: SVD Example Manual Calculus} $$ which is already a unit length vector. As we only have one non-zero singular value, $\bm{u}_{1}$ completely spans the column space, which is a line in 2D space. -What we got so far? We might want to check that $\bm{X} \bm{v}_1 = \sigma_1 \bm{u}_1$ with the found solutions +What we got so far? We might want to check that $\bm{X} \bm{v}_1 = \sigma_1 \bm{u}_1$ holds with the found solutions $$ \sigma_1 = \sqrt{50}\qquad @@ -1213,7 +1456,7 @@ \section{Appendix: SVD Example Manual Calculus} 2 \end{bmatrix} $$ -As we have only one non-zero singular value, the full rank matrix approximation (just this one outer product) +As we have only one non-zero singular value, the full rank matrix approximation (just this one outer product from the general SVD equation $\bm{X} = \sum_{r=1}^{R} \sigma_r \bm{u}_r \bm{v}^\mathrm{T}_r$) $$\sigma_1 \bm{u}_1 \bm{v}^\mathrm{T}_1 = \sqrt{50} \frac{1}{\sqrt{5}} \begin{bmatrix} @@ -1238,12 +1481,12 @@ \section{Appendix: SVD Example Manual Calculus} 2 & 6 \end{bmatrix}=\bm{X} $$ -yields exactly $\bm{X}$ where we started from. We, however need to finish the SVD... +already yields exactly $\bm{X}$ where we started from. We, however need to finish the SVD... \end{frame} -\begin{frame}{SVD Example Manual Calculus: Nullspace} +\begin{frame}[t]{SVD Example Manual Calculus: Nullspace} The null space is spanned by all these vectors from our above eigenwert problem, which belong to a zero eigenvalue. By definition they are all orthogonal. We calculated one unit length vector for the null space @@ -1307,16 +1550,16 @@ \section{Appendix: SVD Example Manual Calculus} $$ \end{frame} -\begin{frame}{SVD Example Manual Calculus: Left Nullspace} -We obviously cannot follow $\bm{X} \bm{v}_i = \sigma_i \bm{u}_i$ to find -$\bm{u}_i$ that correspond to a $\sigma_i=0$, dividing by zero is not a good idea. +\begin{frame}[t]{SVD Example Manual Calculus: Left Nullspace} +We obviously cannot follow $\bm{X} \bm{v} = \sigma \bm{u}$ to find missing +$\bm{u}_2$, because there is no corresponding $\sigma$ that acts as meaningful mapping. Instead, we could solve the dedicated eigenwert problem $$\bm{X} \bm{X}^\mathrm{T} \bm{u}_1 = \lambda_1 \bm{u}_1\quad \bm{X} \bm{X}^\mathrm{T} \bm{u}_2 = \lambda_2 \bm{u}_2$$ to find out that again $\lambda_1 = 50$, $\lambda_2=0$. -Furthermore, already known $\bm{u}_1$ is spaning the column space, as this has the singular value $\sigma_1 = \sqrt{\lambda_1}$, which is linked to $\bm{v}_1$ spanning the row space. +Furthermore, the already known $\bm{u}_1$ is spaning the column space, as this has the singular value $\sigma_1 = \sqrt{\lambda_1}$, which is linked to $\bm{v}_1$ spanning the row space. $\bm{u}_2$ corresponds to $\lambda_2=0$, so certainly no column space stuff, it then must be left null space. @@ -1358,7 +1601,7 @@ \section{Appendix: SVD Example Manual Calculus} \end{frame} -\begin{frame}{SVD Example Manual Calculus: Left Nullspace} +\begin{frame}[t]{SVD Example Manual Calculus: Final Result} We make this a unit length vector as required for the SVD $$ \bm{u}_2 = \frac{1}{\sqrt{5}}\begin{bmatrix}2\\-1\end{bmatrix} @@ -1381,7 +1624,7 @@ \section{Appendix: SVD Example Manual Calculus} \end{bmatrix} $$ -So, if there are no typos (statistics tells us there should be :-)) +So, if there are no typos (statistics tells us there should be ;-)) we end up in $$ \bm{X}