\documentclass[12pt]{article} \usepackage{amsfonts,amssymb,amsmath} %\documentstyle[12pt,amsfonts]{article} %\documentstyle{article} \setlength{\topmargin}{-.5in} \setlength{\oddsidemargin}{0 in} \setlength{\evensidemargin}{0 in} \setlength{\textwidth}{6.5truein} \setlength{\textheight}{8.5truein} \setcounter{MaxMatrixCols}{15} %\input ../basicmath/basicmathmac.tex % %\input ../adgeomcs/lamacb.tex \input ../adgeomcs/mac-new.tex \input ../gma-v2/mathmac-v2.tex %\input ../adgeomcs/mac.tex %\input ../adgeomcs/mathmac.tex \def\fseq#1#2{(#1_{#2})_{#2\geq 1}} \def\fsseq#1#2#3{(#1_{#3(#2)})_{#2\geq 1}} \def\qleq{\sqsubseteq} % \begin{document} \begin{center} \fbox{{\Large\bf Spring, 2012 \hspace*{0.4cm} CIS 515}}\\ \vspace{1cm} {\Large\bf Fundamentals of Linear Algebra and Optimization\\ Jean Gallier \\ \vspace{0.5cm} Homework 3}\$10pt] February 28, 2012; Due March 13, 2012\\ \end{center} \vspace {0.25cm}\noindent {\bf Problem B1 (20 pts).} (1) Given two vectors in \reals^2 of coordinates (c_1 - a_1, c_2 - a_2) and (b_1 - a_1, b_2 - a_2), prove that they are linearly dependent iff \[ \amsdetc{a_1}{b_1}{c_1}{a_2}{b_2}{c_2}{1}{1}{1} = 0.$ (2) Given three vectors in $\reals^3$ of coordinates $(d_1 - a_1, d_2 - a_2, d_3 - a_3)$, $(c_1 - a_1, c_2 - a_2, c_3 - a_3)$, and $(b_1 - a_1, b_2 - a_2, b_3 - a_3)$, prove that they are linearly dependent iff $\begin{vmatrix} a_1 & b_1 & c_1 & d_1 \\ a_2 & b_2 & c_2 & d_2 \\ a_3 & b_3 & c_3 & d_3 \\ 1 & 1 & 1 & 1 \end{vmatrix} = 0.$ \vspace {0.25cm}\noindent {\bf Problem B2 (10 pts).} If $A$ is an $n\times n$ symmetric matrix and $B$ is any $n\times n$ invertible matrix, prove that $A$ is positive definite iff $\transpos{B} A B$ is positive definite. \vspace {0.25cm}\noindent {\bf Problem B3 (80 pts).} (1) Let $A$ be any invertible $2\times 2$ matrix $A = \begin{pmatrix} a & b \\ c & d \end{pmatrix}.$ Prove that there is an invertible matrix $S$ such that $SA = \begin{pmatrix} 1 & 0 \\ 0 & ad - bc \end{pmatrix},$ where $S$ is the product of at most four elementary matrices of the form $E_{i, j; \beta}$. \medskip Conclude that every matrix $A$ in $\mathbf{SL}(2)$ (the group of invertible $2\times 2$ matrices $A$ with $\det(A) = +1$) is the product of at most four elementary matrices of the form $E_{i, j; \beta}$. \medskip For any $a\not= 0, 1$, give an explicit factorization as above for $A = \begin{pmatrix} a & 0 \\ 0 & a^{-1} \end{pmatrix}.$ What is this decomposition for $a = -1$? \medskip (2) Recall that a rotation matrix $R$ (a member of the group $\mathbf{SO}(2)$) is a matrix of the form $R = \begin{pmatrix} \cos\theta & -\sin\theta \\ \sin\theta & \cos\theta \end{pmatrix}.$ Prove that if $\theta \not= k \pi$ (with $k \in \integs$), any rotation matrix can be written as a product $R = U L U,$ where $U$ is upper triangular and $L$ is lower triangular of the form $U = \begin{pmatrix} 1 & u \\ 0 & 1 \end{pmatrix}, \quad L = \begin{pmatrix} 1 & 0 \\ v & 1 \end{pmatrix}.$ \medskip Therefore, every plane rotation (except a flip about the origin when $\theta = \pi$) can be written as the composition of three shear transformations! \medskip (3) Recall that $E_{i, d}$ is the diagonal matrix $E_{i, d} = \mathrm{diag}(1,\ldots, 1, d, 1, \ldots, 1),$ whose diagonal entries are all $+1$, except the $(i, i)$th entry which is equal to $d$. \medskip Given any $n\times n$ matrix $A$, for any pair $(i, j)$ of distinct row indices ($1 \leq i, j \leq n$), prove that there exist two elementary matrices $E_1(i,j)$ and $E_2(i,j)$ of the form $E_{k,\ell; \beta}$, such that $E_{j,-1} E_1(i,j)E_2(i,j)E_1(i,j) A = P(i, j) A,$ the matrix obtained from the matrix $A$ by permuting row $i$ and row $j$. Equivalently, we have $E_1(i,j)E_2(i,j)E_1(i,j) A = E_{j,-1}P(i, j) A,$ the matrix obtained from $A$ by permuting row $i$ and row $j$ and multiplying row $j$ by $-1$. \medskip Prove that for every $i = 2, \ldots, n$, there exist four elementary matrices $E_3(i,d), E_4(i,d)$, $E_5(i,d), E_6(i,d)$ of the form $E_{k,\ell; \beta}$, such that $E_6(i)E_5(i)E_4(i)E_3(i) E_{n,d} = E_{i, d}.$ What happens when $d = -1$, that is, what kinds of simplifications occur? \medskip Prove that all permutation matrices can be written as products of elementary operations of the form $E_{k,\ell; \beta}$ and the operation $E_{n, -1}$. \medskip (4) Prove that for every invertible $n\times n$ matrix $A$, there is a matrix $S$ such that $S A = \begin{pmatrix} I_{n - 1} & 0 \\ 0 & d \end{pmatrix} = E_{n, d},$ with $d = \det(A)$, and where $S$ is a product of elementary matrices of the form $E_{k,\ell; \beta}$. \medskip In particular, every matrix in $\mathbf{SL}(n)$ (the group of invertible $n\times n$ matrices $A$ with $\det(A) = +1$) can be written as a product of elementary matrices of the form $E_{k,\ell; \beta}$. Prove that at most $n(n + 1) - 2$ such transformations are needed. \vspace {0.25cm}\noindent {\bf Problem B4 (40 pts).} A matrix, $A$, is called {\it strictly column diagonally dominant\/} iff $|a_{j\, j}| > \sum_{i = 1,\, i\not= j}^{n} |a_{i\, j}|, \quad\hbox{for j = 1, \ldots, n}$ Prove that if $A$ is strictly column diagonally dominant, then Gaussian elimination does not require pivoting and $A$ is invertible. \vspace {0.25cm}\noindent {\bf Problem B5 (40 pts).} Let $(\alpha_1, \ldots, \alpha_{m + 1})$ be a sequence of pairwise distinct scalars in $\reals$ and let $(\beta_1, \ldots, \beta_{m + 1})$ be any sequence of scalars in $\reals$, not necessarily distinct. \medskip (1) Prove that there is a unique polynomial $P$ of degree at most $m$ such that $P(\alpha_i) = \beta_i, \quad 1\leq i \leq m + 1.$ \medskip \hint Remember Vandermonde! \medskip (2) Let $L_i(X)$ be the polynomial of degree $m$ given by $L_i(X) = \frac{(X - \alpha_1)\cdots (X - \alpha_{i - 1}) (X - \alpha_{i + 1}) \cdots (X - \alpha_{m + 1})} {(\alpha_i - \alpha_1)\cdots (\alpha_i - \alpha_{i - 1}) (\alpha_i - \alpha_{i + 1}) \cdots (\alpha_i - \alpha_{m + 1})}, \quad 1 \leq i \leq m + 1.$ The polynomials $L_i(X)$ are known as {\it Lagrange polynomial interpolants\/}. Prove that $L_i(\alpha_j) = \delta_{i\, j} \quad 1\leq i, j \leq m + 1.$ Prove that $P(X) = \beta_1L_1(X) + \cdots + \beta_{m + 1} L_{m + 1}(X)$ is the unique polynomial of degree at most $m$ such that $P(\alpha_i) = \beta_i, \quad 1\leq i \leq m + 1.$ \medskip (3) Prove that $L_1(X), \dots, L_{m + 1}(X)$ are lineary independent, and that they form a basis of all polynomials of degree at most $m$. \medskip How is $1$ (the constant polynomial $1$) expressed over the basis $(L_1(X), \dots, L_{m + 1}(X))$? \medskip Give the expression of every polynomial $P(X)$ of degree at most $m$ over the basis $(L_1(X), \dots, L_{m + 1}(X))$. \medskip (4) Prove that the dual basis $(L_1^*, \dots, L_{m + 1}^*)$ of the basis $(L_1(X), \dots, L_{m + 1}(X))$ consists of the linear forms $L_i^*$ given by $L_i^*(P) = P(\alpha_i),$ for every polynomial $P$ of degree at most $m$; this is simply {\it evaluation at $\alpha_i$\/}. \vspace {0.25cm}\noindent {\bf Problem B6 (30 pts).} Consider the $n \times n$ symmetric matrix $A = \begin{pmatrix} 1 & 2 & 0 & 0 & \ldots & 0 & 0\\ 2 & 5 & 2 & 0 & \ldots & 0 & 0\\ 0 & 2 & 5 & 2 & \ldots & 0 & 0\\ \vdots &\vdots & \ddots & \ddots& \ddots&\vdots&\vdots \\ 0 & 0 & \ldots & 2 & 5 & 2 & 0 \\ 0 & 0 & \ldots & 0 & 2 & 5 & 2 \\ 0 & 0 & \ldots & 0 & 0 & 2 & 5 \end{pmatrix}.$ \medskip (1) Find an upper-triangular matrix $R$ such that $A = \transpos{R}R$. \medskip (2) Prove that $\det(A) = 1$. \medskip (3) Consider the sequence \begin{align*} p_0(\lambda) &= 1\\ p_1(\lambda) &= 1 - \lambda \\ p_{k}(\lambda) &= (5 - \lambda)p_{k - 1}(\lambda) - 4 p_{k - 2}(\lambda) \quad 2 \leq k \leq n. \end{align*} Prove that $\det(A - \lambda I ) = p_n(\lambda).$ \remark It can be shown that $p_n(\lambda)$ has $n$ distinct (real) roots and that the roots of $p_{k}(\lambda)$ separate the roots of $p_{k + 1}(\lambda)$. \vspace{0.5cm}\noindent {\bf TOTAL: 220 points.} \end{document}