\documentclass[11pt]{article}
\usepackage{amsgen,amsmath,amstext,amsbsy,amsopn,amssymb}
\usepackage[dvips]{graphicx}
%% \usepackage[round]{natbib}
% \usepackage[pdftex]{hyperref}
\usepackage{hyperref}
\hypersetup{
colorlinks=true, % false: boxed links; true: colored links
linkcolor=blue, % color of internal links
citecolor=blue, % color of links to bibliography
filecolor=blue, % color of file links
urlcolor=blue % color of external links
}
\textwidth 6.3in \textheight 8.8in \topmargin -0.5truein
\oddsidemargin .15truein
\parskip .1in
\renewcommand{\baselinestretch}{1.53} % double spaced
\newtheorem{theorem}{\bf Theorem}
\newcommand{\Reals} {{\mathbb{R}}}
\newcommand{\reals} {{\mathbb{R}}}
\newcommand{\Vp}{{\mathbb{V}}}
\newcommand{\Wp}{{\mathbb{W}}}
\newcommand{\Pp}{{\mathcal{P}}}
\newcommand{\cC}{\mathcal{C}}
\newcommand{\cS}{\mathcal{S}}
\newcommand{\cT}{\mathcal{T}}
\newcommand{\cU}{\mathcal{U}}
\newcommand{\cV}{\mathcal{V}}
\newcommand{\Ra}{{\mathcal{R}}}
\newcommand{\0}{{\mathbf{0}}}
\renewcommand{\b}{{\mathbf{b}}}
\renewcommand{\c}{{\mathbf{c}}}
\newcommand{\bo}{\mathbf{b}^{old}}
\newcommand{\bn}{\mathbf{b}^{new}}
\newcommand{\co}{\mathbf{c}^{old}}
\newcommand{\cn}{\mathbf{c}^{new}}
\newcommand{\xo}{{x}^{old}}
\newcommand{\xn}{{x}^{new}}
\newcommand{\yo}{{y}^{old}}
\newcommand{\yn}{{y}^{new}}
\newcommand{\lo}{{l}^{old}}
\renewcommand{\ln}{{l}^{new}}
\newcommand{\Ao}{{A}^{old}}
\newcommand{\An}{{A}^{new}}
\newcommand{\Bo}{{B}^{old}}
\newcommand{\Bn}{{B}^{new}}
\newcommand{\xocd}{x^{old}}
\newcommand{\xncd}{x^{new}}
\newcommand{\yocd}{y^{old}}
\newcommand{\yncd}{y^{new}}
\newcommand{\locd}{l^{old}}
\newcommand{\lncd}{l^{new}}
\newcommand{\Aocd}{A^{old}}
\newcommand{\Ancd}{A^{new}}
\newcommand{\Bocd}{B^{old}}
\newcommand{\Bncd}{B^{new}}
\newcommand{\zo}{{z}}
\newcommand{\zn}{{\tilde{z}}}
\newcommand{\ao}{{a}}
\newcommand{\an}{{\tilde{a}}}
\newcommand{\fo}{{f}}
\newcommand{\fn}{{\tilde{f}}}
\newcommand{\e}{{\mathbf{e}}}
\newcommand{\f}{{\mathbf{f}}}
\renewcommand{\k}{{\boldsymbol{k}}}
\renewcommand{\l}{{\boldsymbol{l}}}
\renewcommand{\u}{{\mathbf{u}}}
\renewcommand{\v}{{\mathbf{v}}}
\newcommand{\w}{{\mathbf{w}}}
\newcommand{\x}{{\mathbf{x}}}
\newcommand{\y}{{\mathbf{y}}}
\newcommand{\z}{{\mathbf{z}}}
\newcommand{\A}{{\mathbf{A}}}
\newcommand{\B}{{\mathbf{B}}}
\newcommand{\C}{{\mathbf{C}}}
\newcommand{\D}{{\mathbf{D}}}
\newcommand{\E}{{\mathbf{E}}}
\renewcommand{\H}{{\mathbf{H}}}
\newcommand{\I}{{\mathbf{I}}}
\newcommand{\M}{{\mathbf{M}}}
\newcommand{\N}{{\mathbf{N}}}
\renewcommand{\L}{{\mathbf{L}}}
\renewcommand{\P}{{\mathbf{P}}}
\newcommand{\Q}{{\mathbf{Q}}}
\newcommand{\R}{{\mathbf{R}}}
\renewcommand{\S}{{\mathbf{S}}}
\newcommand{\T}{{\mathbf{T}}}
\newcommand{\U}{{\mathbf{U}}}
\newcommand{\W}{{\mathbf{W}}}
\newcommand{\X}{{\mathbf{X}}}
\newcommand{\Y}{{\mathbf{Y}}}
\newcommand{\Z}{{\mathbf{Z}}}
\newcommand{\bX}{{\mathbf{\bar{X}}}}
\newcommand{\by}{{\mathbf{\bar{y}}}}
\newcommand{\cR}{{\cal R}}
\newcommand{\cN}{{\cal N}}
\newcommand{\bOmega}{\mathbf{\Omega}}
\newcommand{\bDelta}{\mathbf{\Delta}}
\newcommand{\Bphi}{\mathbf{\phi}}
\newcommand{\Bpsi}{\mathbf{\psi}}
\newcommand{\cbind}{\mathrm{cbind}}
\newcommand{\rbind}{\mathrm{rbind}}
\newcommand{\rank}{\mathrm{rank}}
\newcommand{\var}{\mathrm{var}}
\newcommand{\tr}{\mathrm{tr}}
\newcommand{\diag}{\mathrm{diag}}
\newcommand{\mean}{\mathrm{mean}}
\newcommand{\Range}{\mathbf{Range}}
\newcommand{\Null}{\mathbf{Null}}
\newcommand{\pnorm}[1]{{[\![}#1{]\!]}}
\newcommand{\plangle}{{\langle\!\langle}}
\newcommand{\prangle}{{\rangle\!\rangle}}
\newcommand{\Ans}{{\bf Answer:~}}
\begin{document}
\begin{title}
{\Large\bf HOMEWORK 2, STAT 961: Lin. Alg. 2 \\ Due Fri, 2019/10/11, 5:00pm}
\end{title}
\author{\bf Your Name: ... (replace this)}
\maketitle
%----------------------------------------------------------------
\noindent
{\bf Instructions:} Edit this LaTex file by inserting your solutions
after each problem statement. Generate a PDF file from it and e-mail
the PDF in an attachment with filename
\centerline{\bf hw02-Yourlastname-Yourfirstname.pdf}
\noindent
to the following class Gmail address:
\centerline{\bf stat961.at.wharton@gmail.com}
\noindent
with {\bf subject line exactly} as follows for easy gmail search:
\centerline{\bf Homework 2, 2019}
\noindent Rules to be strictly followed under honor code:
\begin{enumerate} \itemsep 0em
\item You must write your own solutions and not copy from anyone.
Verbatim copying from others or unlisted sources will result in zero
points for the whole homework.
\item Subject to the previous item, you may explain the problems, but
not the solutions, to each other in general terms.
\item Do not discuss the homework with previous years' students of
Stat 961/541, and do not consult solutions of similar homeworks of
previous years.
\item Report here (1)~from whom you got help, (2)~who you helped, and
(3)~other sources, such as online, papers, books,~... You do not
need to report help with LaTex and English language.
\begin{itemize}
\item {\bf Who helped me:} ... (replace this)
\item {\bf Who I helped:} ... (replace this)
\item {\bf Complete list of my other sources:} ... (replace this)
\end{itemize}
\end{enumerate}
Instructions for presentation and typesetting:
\begin{enumerate}
\item Give derivations where appropriate, but don't when
instructed to give the answer without derivation.
\item Notations are different here from HW1. The reason is that we
need to distinguish between abstract objects and their coordinates.
We use bold for abstract objects and italic for coordinate objects.
\item Matrix transposes and inverses should be written as $M^\top$
and~$M^{-1}$; simultaneous transposing and inverting can be
written~$M^{-\top}$.
\item To mimic obvious R functions in LaTex math mode, use
$\cbind(...)$, $\rbind(...)$.
\end{enumerate}
%----------------------------------------------------------------
\newpage
\centerline{\bf Problems for Review of Abstract Linear Algebra}
\centerline{Homework 2, Stat 961, 2019C}
%----------------------------------------------------------------
\begin{enumerate}
\item What follows is remedial material about vector spaces, bases,
coordinates, linear forms, linear maps, bilinear forms, basis
changes and associated coordinate transformations.
\begin{itemize}
\item Let $\Vp$ and $\Wp$ be linear spaces (synonym: vector spaces)
over the real numbers, that is, addition of elements and
multiplication of elements with real numbers are defined and
follow the usual axioms.
\item The linear spaces $\Vp$ and $\Wp$ are ``abstract,'' meaning
that you should not think of them as spaces of coordinate tuples.
Anything we do should apply to all kinds of concrete linear
spaces, such as spaces of polynomials, splines, signed measures
(of measure theory), tensors, tangent vectors at a point on a
curved manifold,~etc.
\item However, one of the purposes here is to introduce the notion
of a basis and coordinates relative to a basis. Subsequently we
do the same for linear forms, $\Vp \rightarrow \reals$, that is,
linear functions on $\Vp$ (also called ``dual vectors''), then for
linear maps $\Vp \rightarrow \Wp$, and finally for bilinear forms
$\Vp \times \Wp \rightarrow \reals$, which are linear in each of
two vector arguments separately.
\item In Problem~2 we consider the effects of a change of basis on
the coordinates of all four objects: vectors, linear forms, linear
maps, and bilinear forms.
\item In Problem~3 we apply the concepts to concrete spaces of
polynomials. You will need to apply the concepts of Problems~1
and 2 rigorously to get it right.
\item Important: There is no inner product yet, hence there are no
notions of length, angle, orthogonality, symmetry of linear maps,
and identification of primal and dual vectors. This will require
careful thinking because we are used to using these concepts,
often without realizing it.
\item {\bf Definitions}:
\begin{itemize}
\item A finite set of vectors $\{\b_1,...,\b_p\} \subset \Vp$ is
said to be {\bf linearly independent} if
$\sum_{j=1...p} x_j \b_j = \0$ entails $x_j = 0$ for all
$j=1...p$. These vectors are necessarily non-zero ($\neq \0$).
\item If $\{\b_1,...,\b_p\}$ is a maximal set of linearly
independent vectors, that is, if increasing the set by any
vector $\b_{p+1}$ makes the $p+1$ vectors linearly dependent,
then this set is called {\bf a basis of}~$\Vp$.
\item Fact: Any other basis must also be of size~$p$. This unique
number $p$ is called {\bf the dimension of}~$\Vp$:
$p = \dim(\Vp)$. We assume $p > 0$ to avoid dealing with
$\Vp = \{\0\}$.
\item If we need another linear space besides $\Vp$, we denote it
by $\Wp$ and its dimension by~$n = \dim(\Wp)$. A basis of $\Wp$
will be written $\{\c_1,...,\c_n\} \subset \Wp$.
\end{itemize}
[Remarks: (1)~We purposely wrote ``a basis'' and not ``the basis''
because there always exists an infinity of different bases.
(2)~There exist infinite-dimensional linear spaces, and you study
them in a course on ``real analysis'' or ``functional analysis.'']
\end{itemize}
\begin{enumerate}
\item A vector $\x \in \Vp$ has a unique representation
$\x = \sum_{j=1,...,p} x_j \b_j$. We call the numbers
$x_1,...,x_p$ the {\em coordinates of $\x$ in the basis
$\{\b_1,...,\b_p\}$}. We collect these numbers in a
$p \times 1$ matrix or column vector
$x = (x_1,...,x_p)^\top \in \reals^{p \times 1}$.
Similarly for $\y \in \Wp$ we will have
$\y = \sum_{i=1,...,n} y_i \, \c_i$ with coordinates collected in
the $n \times 1$ matrix or column vector
$y = (y_1,...,y_n)^\top \in \reals^{n \times 1}$.
{\bf Important:} $\x$ and $x$ are {\em not} the same, $\y$ and $y$
are {\em not} the same, $\Vp$ is {\em not} $\reals^{p \times 1}$,
and $\Wp$ is {\em not} $\reals^{n \times 1}$.
{\bf Comprehension question}: For fixed $j_0$, what is the
coordinate vector of~$\x = \b_{j_0}$? Reason?
\Ans
\bigskip
\item The set of linear forms $\x \mapsto \l(\x)$,
$\Vp \rightarrow \Reals$, is called the ``dual space'' $\Vp'$ of
$\Vp$. It is also a linear space of dimension~$p$. Linear forms
are sometimes called ``dual vectors,'' so vectors $\x \in \Vp$ may
be called ``primal vectors.''
Coefficients for a linear form $\l$ relative to the primal basis
of $\b_j$ can be defined by $l_j = \l(\b_j)$. We collect them in
a $1 \times p$ matrix or row vector
$l = (l_1,...,l_p) \in \reals^{1 \times p}$. We will see below
that these coefficients can be interpreted as coordinates in a
suitable basis of~$\Vp'$.
{\bf Important:} $\Vp'$ is {\em not} the same as $\Vp$, and
neither is $\Vp'$ the same as~$\reals^{1 \times p}$.
{\bf Task:} Express $\l(\x)$ in terms of the associated
coefficient vector $l$, the coordinate vector $x$, and matrix
multiplication. Be careful regarding transposing or not. Give a
derivation. (The result will justify calling $l_j$ the
``coefficients'' of~$\l$.)
\Ans
\item Why is the formula in the previous question {\em not} an inner
product?
\Ans
\item For fixed $j_0 \in \{1,...,p\}$, consider the ``coordinate
picker'' linear form $\l(\x) = x_{j_0}$ that picks the $j_0$'th
coordinate of $\x$. What is the coefficient vector~$l$ of this
linear form? Reason?
\Ans
\item Denote the ``coordinate picker'' linear forms by
$\l_1,...,\l_p$ : $\l_j(\x) = x_j$. Caution: Do {\em not} confuse
$\l_j$ and~$l_j$!
{\bf Task:} Write an {\em arbitrary} linear form $\l$ with
coefficient vector $l = (l_1,...,l_p)$ as a linear combination of
these coordinate pickers.
\Ans
{\bf Implication:} The linear forms $\{\l_1,...,\l_p\}$ constitute
a basis of $\Vp'$, also called the ``dual basis.'' The
coefficients $l_j$ can now be interpreted as coordinates of the
linear form~$\l$ in this basis. The ``coordinate picker'' basis
of $\Vp'$ depends on the coordinates of $\x$ in $\Vp$, which in
turn depends on the primal basis~$\{\b_1,...,\b_p\}$, hence the
dual basis of $\Vp'$ fully depends on the primal basis of~$\Vp$.
Recall that $l_j$ was defined as~$l_j = \l(\b_j)$.
\item Someone forms $x + l^\top$. What is its meaning, if any?
\Ans
\bigskip
\item A linear map $\x \mapsto \y = \A(\x),~ \Vp \rightarrow \Wp$,
has an associated $n \times p$ matrix $A = (A_{ij})$ relative to
the bases $\{\b_1,...,\b_p\} \subset \Vp$ and
$\{\c_1,...,\c_n\} \subset \Wp$. The elements $A_{ij}$ of the
matrix are defined by $\A(\b_j) = \sum_{i=1,...,n} A_{ij} \c_i$.
Be careful about the order of the indices!
{\bf Comprehension question}: What is the meaning of
$(A_{1j},...,A_{nj})^\top$? Why is this even a definition? We
didn't write ``$A_{ij} = ...$,'' right?
\Ans
\item Express a linear map $\y = \A(\x)$ in terms of the associated
matrix $A$ and column vectors $y = (y_1,...,y_n)^\top$ and
$x = (x_1,...,x_p)^\top$. Give a pedantic derivation.
\Ans
% You may uncomment and use the following LaTex template:
% \begin{eqnarray*}
% \y &=& ... \\
% &=& ... \\
% &=& ... \\
% &=& ... \\
% &=& ... \\
% &=& ... \\
% &=& ... \\
% &=& ...
% \end{eqnarray*}
\item The linear map $\A: \Vp \rightarrow \Wp$ has an associated
dual map $\A': \k \mapsto \l = \A'(\k)$, in the opposite
direction, $\Wp' \rightarrow \Vp'$, defined by composition of $\k$
and $\A$: $\A'(\k) = \k \circ \A$. That is,
$(\A'(\k))(\x) = \k(\A(\x))$ for all $\x \in \Vp$ and all
$\k \in \Wp'$.
Express the dual map $\A'$ in terms of the matrix $A$, the row
vector $k = (k_1,...,k_n)$ with coefficients for $\k$, and
$l = (l_1,...,l_p)$ for~$\l$. Give a pedantic derivation
for~$l_j$.
\Ans
\item It might bother you that the dual linear map $\A'$ is in the
opposite direction from $\A$. Based on the matrix expression you
just derived, you might be tempted to invert the direction and
express $k$ as a function of~$l$, thereby making $\A$ and $\A'$
more parallel. Why is this not a good idea? What additional
assumption would you have to make?
\Ans
\item In some texts you might read that $A^\top$ is the natural
matrix for $\A'$. What is their convention for the dual
coordinate vectors?
\Ans
\bigskip
\item A bilinear form $(\y,\x) \mapsto \B(\y,\x$),
$\Wp \times \Vp \rightarrow \Reals$, has an associated matrix
$B = (B_{ij})$ in the bases $\{\c_1,...,\c_n\} \subset \Wp$ and
$\{\b_1,...,\b_p\} \subset \Vp$, with $B_{ij}$ defined by
$B_{ij} = \B(\c_i,\b_j)$.
{\bf Comprehension question} 1: For fixed $\y \in \Wp$, what kind
of object is the function $\x \mapsto \B(\y,\x)$? Similarly, for
fixed $\x \in \Vp$, what is $\y \mapsto \B(\y,\x)$?
\Ans
{\bf Comprehension question} 2: Does the matrix $B$ stand for a linear
map~$\Vp \rightarrow \Wp$?
\Ans
\item Express a bilinear form $\B(\y,\x)$ in terms of the
associated matrix $B$ and coordinate vectors $y$ and~$x$.
Give a pedantic derivation.
\Ans
\item Weird: Consider the special case $\Wp = \Vp'$ and
$\B(\l,\x) = \l(\x)$. Does this qualify as a bilinear form? What
would be its matrix in terms of primal and dual bases?
\Ans
\bigskip
\item For $n = \dim(\Wp)$, let $\l^{(1)}, \l^{(2)}, ..., \l^{(n)}$
be a set of $n$ arbitrary linear forms on $\Vp$ with associated
row vectors $l^{(1)}, l^{(2)}, ..., l^{(n)}$. Construct the
following map: $\C(\x) = \sum_{i=1...n} \l^{(i)}(\x) \c_i$. What
kind of map is~$\C(\cdot)$? Can you give a matrix description?
\Ans
\bigskip
\item For $m$ unrelated to $n$ or $p$, let
$\l^{(1)}, \l^{(2)}, ..., \l^{(m)}$ be $m$ arbitrary linear forms
on $\Vp$ with associated row vectors
$l^{(1)}, l^{(2)}, ..., l^{(m)}$. Similarly, let
$\k^{(1)}, \k^{(2)}, ..., \k^{(m)}$ be $m$ linear forms on $\Wp$
with row vectors $k^{(1)}, k^{(2)}, ..., k^{(m)}$. Construct the
following function:
$\D(\y,\x) = \sum_{i=1...m} \k^{(i)}(\y) \l^{(i)}(\x)$. What kind
of function is~$\D(\cdot,\cdot)$? Can you give a matrix
description?
\Ans
\bigskip
\item Consider linear regression as in class: We write the rows of
the regressor matrix as column vectors $\boldsymbol{x}$ and the
coefficient vector as a column vector $\hat{\boldsymbol{\beta}}$.
Do they belong to the same vector space? Give an interpretation
in light of what we have learned so far. To reason about these
vectors, it may help to think like a physicist again. The two
types of vectors are, however, somehow connected. How?
\Ans
\end{enumerate}
\bigskip
%----------------------------------------------------------------
\item {\bf Basis changes and associated coordinate transformations:}
Consider two bases of the same space $\Vp$: $\bo_1,...,\bo_p$ and
$\bn_1,...,\bn_p$. For any vector $\x$ we have coordinates in both
bases:
% Conventions: \xo and \xn for old and new coordinate vectors.
% \xocd_i and \xncd_i for old and new coordinates.
\begin{equation*}
\x = \sum_j \xncd_j \bn_j = \sum_{j'} \xocd_{j'} \bo_{j'}
\end{equation*}
Denote the respective {\bf coordinate} vectors by
$\xn = (\xncd_1,...\xncd_p)^\top$ and
$\xo = (\xocd_1,...\xocd_p)^\top$. To link the two types of
coordinates to each other, we express {\bf the old basis in terms of
the new basis}, in this order:
\begin{equation} \label{eq:coord-trans-matrix}
\bo_{j'} = \sum_j T_{jj'} \bn_j
\end{equation}
Collect the coefficients in a $p \times p$ matrix $T = (T_{jj'})$.
It is important to keep awareness of the convention regarding
the order of the subscripts of $T$.
\begin{enumerate}
\item What does $T$ contain in column~$j'$~?
\Ans
\item How are the {\bf coordinate vectors} $\xo$ and $\xn$ for the
abstract vector $\x$ related to each other in terms of the
matrix~$T$? Give a pedantic derivation.
\Ans
% You may uncomment and use the following LaTex template
% for your derivation:
% \begin{eqnarray*}
% \x &=& ... \\
% &=& ... \\
% &=& ... \\
% &=& ...
% \end{eqnarray*}
\item Argue that $T$ should not be viewed as the matrix of
a linear map.
\Ans
\item Can $T$ be singular (rank-deficient)? Yes or no? No
derivations but one English sentence for explanation.
\Ans
\item What is most likely wrong with the operation $\xo + \xn$?
\Ans
\bigskip
\item For a linear form $\l$ represented by coordinate vectors $\lo$
and $\ln$ in the respective dual bases, how are $\lo$ and $\ln$
related to each other? Give a pedantic derivation. Express $\ln$
in terms of $\lo$, in this order, and explain why this is natural,
unlike the case of dual linear maps.
\Ans
\item In some texts, the matrix for coordinate transformation of
dual vectors is shown as $T^{-\top}$. What is their convention?
\Ans
\bigskip
{\bf Preparation for linear maps and bilinear forms:} We now need
a second space, $\Wp$, and two bases in it as well. Denote them
$\{\co_1,...,\co_n\}$ and $\{\cn_1,...,\cn_n\}$. Instead of
introducing a new symbol for the associated transformation matrix,
we subscript both matrices by their spaces: $T_\Vp$ and~$T_\Wp$.
\bigskip
\item For a linear map, $\Vp \rightarrow \Wp$,
$\x \mapsto A(\x) = \y$, represented by the matrix $\Ao$ in the
old bases $\{\bo_1,...,\bo_p\} \subset \Vp$ and
$\{\co_1,...,\co_n\} \subset \Wp$, and the matrix $\An$ in the new
bases $\{\bn_1,...,\bn_p\} \subset \Vp$ and
$\{\cn_1,...,\cn_n\} \subset \Wp$, how are $\Ao$ and $\An$ related
to each other? Express $\An$ in terms of~$\Ao$.
\Ans
\bigskip
\item For a bilinear form $\Wp \times \Vp \rightarrow \reals$,
$(\y,\x) \mapsto \B(\y,\x)$, represented by matrices $\Bo$ and
$\Bn$ in the respective bases, how are $\Bo$ and $\Bn$ related to
each other? Express $\Bn$ in terms of~$\Bo$.
\Ans
\end{enumerate}
\bigskip
%----------------------------------------------------------------
\item {\bf Linear Algebra in Action: Spaces of Polynomials}
Let the space $\Pp$ be the polynomials up to degree~$p$:
\[
\Pp = \bigg\{ f_a(z) =\!\!\!\! \sum_{j=0,...,p} a_j z^j \,:~ a = (a_0,...,a_p)^\top
\in \reals^{(p+1) \times 1} \bigg\}
\]
This is a linear space of dimension~$p+1$. The coefficients $a_j$
($j=0,1,...,p$) form natural coordinates, collected in a column
vector~$a \in \reals^{(p+1) \times 1}$, in the basis of
monomials~$1, z, z^2,~...,~z^p$.
{\bf Convention:} In this exercise it is convenient to let all
indices start at 0 and end at $p$, hence $j$,
$j' = 0, 1, 2, ..., p$, and {\bf not} $1, 2, 3, ..., p+1$.
\begin{enumerate}
\item Consider the linear form on $\Vp = \Pp$ generated by
expectation, $f_a(z) \mapsto \E[f_a(Z)]$, where $Z \sim N(0,1)$.
What are the coordinates/coefficients of this linear form? You
may consult Wikipedia or another online source and use their
notations in your answer. Write the row vector $l$ of coordinates
of the linear form explicitly for polynomials of degree~$8$
($p=8$) using actual numbers.
\Ans
\bigskip
\item Consider the linear map generated by differentiation,
$\Pp \rightarrow \Pp$, $f_a(z) \mapsto \frac{d}{dz} f_a(z)$ (hence
$\Vp = \Wp = \Pp$). What is its matrix~$A$? Describe its
elements~$A_{jj'}$ ($j,j' = 0,1,...,p$) and show the matrix with
explicit numbers for~$p=4$. Is this linear map
non-singular/invertible? What is its rank?
\Ans
% You may uncomment and use the following LaTex code for the matrix:
% \[
% A ~=~
% \left(
% \begin{array}{ccccc}
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ...
% \end{array}
% \right)
% \]
\bigskip
\item Consider the following bilinear form
$(f(z), g(z)) \mapsto \E[ f(Z) g(Z) ]$,
$\Pp \times \Pp \rightarrow \reals$, where again $Z \sim N(0,1)$
and $\Wp = \Vp = \Pp$. Show the matrix for~$p = 4$. You may
re-use what you learned earlier about the linear
form~$f(z) \mapsto \E[f(Z)]$.
\Ans
% You may uncomment and use the following LaTex code for the matrix:
% \[
% B ~=~
% \left(
% \begin{array}{ccccc}
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ...
% \end{array}
% \right)
% \]
\bigskip
\item Consider a shift of the $z$-axis, $\zn = \zo - t$, and
accordingly a shift of the polynomials:
\[
\fo_\ao(\zo) = \fo_\ao(\zn + t) = \fn_\an(\zn).
\]
The substitution $\zo = \zn + t$ produces another polynomial of
the same degree but with argument~$\zn$ and new coefficient
vector~$\an$. The substitution amounts to a change of basis with
a coordinate transformation described by a matrix~$T$.
% LaTex note:
% We distinguish between \zo for old 'z' and \zn for new 'z' (= z-t).
% However, \zo prints as plain 'z' and \zn as 'z' with tilde.
% See the respective \newcommand near the beginning of the file.
% We also need to distinguish between indeces for summation in the
% old and new polynomials, the old using j', the new plain j.
Find the elements $T_{jj'}$ of this matrix by expressing the old
basis in terms of the new basis. \\
Hint: You will need binomial coefficients $\binom{...}{...}$. \\
Show the matrix for $p=4$ and $t=1$ with actual numbers.
\Ans
% You may uncomment and use the following LaTex code for the matrix:
% \[
% T ~=~
% \left(
% \begin{array}{ccccc}
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ... \\
% ... & ... & ... & ... & ...
% \end{array}
% \right)
% \]
\end{enumerate}
%----------------------------------------------------------------
\end{enumerate}
\end{document}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%