notes

uni notes
git clone git://popovic.xyz/notes.git
Log | Files | Refs

prb7.tex (6501B)


      1 \include{./preamble.tex}
      2 
      3 \begin{document}
      4 \maketitle
      5 \tableofcontents
      6 \section{Sheet 7}
      7 \subsection{Problem 1}
      8 For a matrix $A \in \mathbb{C}^{n\times n}$, define $B, C \in
      9 \mathbb{C}^{n\times n}$ in the following way
     10 \begin{align}
     11     B = \frac{1}{2}(A + A^*), \quad C = \frac{1}{2i} (A - A^*).
     12 \end{align}
     13 The matrices $B$ and $C$ are Hermitian, which can be seen by directly
     14 calculating the adjoint.
     15 \begin{align}
     16     B^* &= \frac{1}{2}(A+A^*)^* = \frac{1}{2}\left( A^* + (A^*)^*\right)\\
     17         &= \frac{1}{2}(A^* + A) = \frac{1}{2}(A+A^*) = B,\\
     18         \nonumber\\
     19     C^* &= -\frac{1}{2i}(A-A^*)^* = -\frac{1}{2i}\left( A^* - (A^*)^*\right)\\
     20         &= -\frac{1}{2i}(A^* - A) = \frac{1}{2}(A -A^*) = C.
     21 \end{align}
     22 Additionally we can bound the eigenvalues of $A$ by the minimum and maximum
     23 eigenvalues of $B$ and $C$ by rewriting $A$ as
     24 \begin{align}
     25     A = (B + iC).
     26 \end{align}
     27 Now consider an arbitrary eigenpair of $A$, $(\lambda, v)$, such that $\|v\|
     28 = 1$, the eigenvalue equation reads
     29 \begin{align}
     30     Av &= (B + iC)v = \lambda v\\
     31         &= Bv + iCv\\
     32     \Leftrightarrow & v^* B v + v^*(iC)v = \lambda.
     33 \end{align}
     34 The real and the imaginary part of $\lambda$ can be calculated by a simple
     35 identity
     36 \begin{align}
     37     \text{Re}(\lambda)
     38     &=  \frac{1}{2}(\lambda + \bar{\lambda}) \\
     39     &= \frac{1}{2}(v^* B v + v^*(iC)v + v^*B^*v - v^* (i C)v)\\
     40     &= \frac{1}{2} ( v^*Bv + v^*B^*v + v^* (iC)v - v^*(iC)v)\\
     41     &= \frac{1}{2}(2v^* B v) = v^*Bv\\
     42     \nonumber\\
     43     \text{Im}(\lambda)
     44     &=  \frac{1}{2i}(\lambda - \bar{\lambda}) \\
     45     &= v^*Cv
     46 \end{align}
     47 Putting the results from above with the Reighley-Ritz Theorem, which states
     48 that for all $D \in \mathbb{C}^{n\times n}$ Hermitian $\forall x \in
     49 \mathbb{C}^{n}$, where $x\neq 0 $ we have a boundary from below and above by
     50 the minimum and maximum eigenvalue of $D$
     51 \begin{align}
     52     \lambda_{\text{min}}(D) \le \frac{x^*Dx}{\|x\|^2} \le
     53     \lambda_{\text{max}}(D)
     54 \end{align}
     55 Then we have
     56 \begin{align}
     57     \Rightarrow  \begin{cases}
     58         \text{Re}(\lambda) \in [ \lambda_{\text{min}}(B),
     59         \lambda_{\text{max}}(B)]\\
     60         \text{Im}(\lambda) \in [ \lambda_{\text{min}}(C),
     61         \lambda_{\text{max}}(C)]\\
     62     \end{cases}
     63 \end{align}
     64 \subsection{Problem 2}
     65 Given two Hermitian matrices $A, B \in \mathbb{C}^{n\times n}$, denote
     66 $\left\{ \lambda_j(A) \right\}_{j=1}^n $ and $\left\{ \lambda_j(A + B)
     67 \right\}_{j=1}^n$ the eigenvalues of $A$ and $A+B$ in increasing order. If
     68 $B$ is positive semi-definite then we have a bound
     69 \begin{align}
     70     \lambda_k(A) \le \lambda_k(A+B) \qquad \forall k \in \left\{ 1, \ldots,n
     71     \right\}.
     72 \end{align}
     73 By the Courant Fischer Theorem, let $\mathcal{V}_k$ be the set of all $k$
     74 dimensional subsets of $\mathbb{C}^{n\times n}$ we have
     75 \begin{align}
     76     \lambda_k(A)
     77     &= \min_{v \in \mathcal{V}_k} \max_{v\in \mathbb{C}^{n \times
     78     n},\; \|v\|=1} \langle v, Av\rangle .
     79 \end{align}
     80 And if $B$ is positive  semi-definite we have
     81 \begin{align}
     82     x^* B x \ge 0 \qquad \forall x \in \mathbb{C}^{n}.
     83 \end{align}
     84 Since $A$ and $B$ are hermitian, then $A+B$ are hermitian too and we can
     85 write
     86 \begin{align}
     87     \lambda_k(A)
     88     &= \min_{v \in \mathcal{V}_k} \max_{v\in \mathbb{C}^{n \times
     89     n},\; \|v\|=1} \langle v, Av\rangle \\
     90     &\ge \min_{v \in \mathcal{V}_k} \max_{v\in \mathbb{C}^{n \times
     91     n},\; \|v\|=1} \langle v, Av\rangle = \lambda_k(A)
     92 \end{align}
     93 \subsection{Problem 3}
     94 Let $A \in \mathbb{C}^{n\times n}$ be diagonalizable by $X = (x_1,\ldots,x_n)
     95 \in \mathbb{C}^{n \times n}$ the matrix of right-eigenvectors $x_j \in
     96 \mathbb{C}^{n}$ of A. For all $\varepsilon> 0 $, let $\nu$ be the eigenvalues
     97 of $A+\varepsilon A$, then there exists and eigenvalue $\lambda$ of $A$ with
     98 \begin{align}
     99     \frac{|\lambda - \nu|}{|\lambda|} \le K_p(X)\varepsilon
    100 \end{align}
    101 Let us rewrite
    102 \begin{align}
    103     A + \varepsilon A = (1+\varepsilon) A,
    104 \end{align}
    105 then the eigenvalue $\nu \in \lambda(A+\varepsilon A)$ can be written as an
    106 eigenvalue of $A$ with
    107 \begin{align}
    108     \frac{\nu}{1+\varepsilon} \in \lambda (A).
    109 \end{align}
    110 Then the bound reads
    111 \begin{align}
    112     \frac{|\lambda - \nu|}{|\lambda|}
    113     &= \frac{|\frac{\nu}{1+\varepsilon} - \nu|}{|\frac{\nu}{1+\varepsilon}|}\\
    114     &= \frac{|\nu - (1+\varepsilon)\nu|}{|\nu|}\\
    115     &= \varepsilon \le \varepsilon K_p(X),
    116 \end{align}
    117 since $K_p(X) \ge 1$ for all $X$ that diagonalize $A$, if $A$ is invertible
    118 !.
    119 \subsection{Exercise 4}
    120 Given some $\mu \in \mathbb{R}$ the shifted QR-algorithm is defined as: Let
    121 $Q_0$ be orthogonal, such that $T_0 =  Q_0^T A Q_0$ is upper Hessenberg form.
    122 For $k \in \mathbb{N}$ determine a sequence of the matrices $T_k$ by
    123 \begin{itemize}
    124     \item Determine $Q_k$ and $R_k$, s.t. $Q_k R_k = T_{k-1} - \mu I$, as a
    125         QR-decomposition of $T_{k-1} - \mu I$
    126     \item Let $T_k = R_k Q_k + \mu I$
    127 \end{itemize}
    128 The sequence of these matrices $T_k$ is infact similar to $A$, in the
    129 following way
    130 \begin{align}
    131     T_{k+1}
    132     &= R_k Q_k + \mu I \\
    133     &= Q_k^T ( T_k - \mu I) Q_k + \mu I\\
    134     &= Q_k^T T_k Q_k - \mu I + \mu I\\
    135     &= Q_k^T T_k Q_k\\
    136     &= Q_k^T \cdots Q_1^T T_0 Q_1 \cdots Q_k\\
    137     &= \underbrace{Q_k^T \cdots Q_0^T}_{=Q^T} A \underbrace{Q_0 \cdots Q_k}_{= Q}
    138 \end{align}
    139 Furthermore if $A$ is an unreduced Hessenberg matrix and $\mu$ an eigenvalue
    140 of $A$. Then let $QR = A-\mu I$ be the QR-decomposition of $A-\mu I $, define
    141 \begin{align}
    142   \overline{A} = RQ  + \mu I,
    143 \end{align}
    144 then
    145 \begin{align}
    146     \overline{A}_{n,n} = \mu \quad \& \quad \overline{A}_{n-1, n} = 0
    147 \end{align}
    148 To start, if $A$ is an irreducible Hessenber then
    149 \begin{align}
    150     A_{i+1, i} \neq 0 \qquad \forall i \in \left\{ 1, \ldots , n-1 \right\}.
    151 \end{align}
    152 Then $A-\mu I$ is singular since $\mu$ is Eigenvalue of $A$, $\det(A-\mu I)
    153 =0 $ is an eigenvalue equation. And additionally $0$ is an eigenvalue of $A -
    154 \mu I$, then
    155 \begin{align}
    156      &\Rightarrow \overline{A} = RQ + \mu I.
    157 \end{align}
    158 Where $A-\mu I$ is singular and the first  $n-1$ columns are linearly
    159 independent, since $R = Q^T(A-\mu I)$. Then the first $n-1$ columns of $R$
    160 are linearly independent and because $R$ is also singular perserved by
    161 rotation of $Q^T$ the last row needs to be $0$, i.e. $R_{n, \cdot} = 0^T$,
    162 then
    163 \begin{align}
    164     &R_{n,n-1} = 0,\qquad (RQ)_{n, n-1} = 0,\\
    165     &R_{n,n} = 0,\qquad (RQ)_{n, n} = 0.\\
    166 \end{align}
    167 By this we conlude
    168 \begin{align}
    169     \overline{A}_{n,n} &= (RQ)_{n,n}+\mu = \mu\\
    170     \overline{A}_{n, n-1} &= (RQ)_{n,n-1} = 0
    171 \end{align}
    172 
    173 \end{document}
    174 
    175