%Style Section
\documentclass{amsart}
\usepackage{latexsym}
%\usepackage[dvips]{graphicx}
%Declaration Section
\newtheorem{Corollary}{Corollary}
\newtheorem{Proposition}{Proposition}
\newtheorem{Lemma}{Lemma}
\newtheorem{Definition}{Definition}
\newtheorem{Theorem}{Theorem}
\newtheorem{Example}{Example}
%Command Section
%\errorcontextlines=0
%\numberwithin{equation}{equation}
\newcommand{\A}{{\mathbf{a}}}
\newcommand{\B}{{\mathbf{B}}}
\newcommand{\C}{{\mathbf{C}}}
\newcommand{\boldsigma}{{\boldsymbol{\sigma}}}
\newcommand{\s}{{\boldsymbol{\sigma}}}
\newcommand{\R}{\mathbb{R}}
\newcommand{\E}{{\mathbf{E}}}
\newcommand{\FF}{{\mathcal{F}}}
\newcommand{\MM}{{\mathcal{M}}}
\newcommand{\boldgamma}{{\boldsymbol{\gamma}}}
\newcommand{\Var}{{\rm Var}}
\newcommand{\half}{\frac{1}{2}}
%Special Environment for Tests and HW Problems
\newcounter{Problem}
\setcounter{Problem}{1}
\newenvironment{Problem}
{\medskip \noindent{\sc Problem \theProblem.}\addtocounter{Problem}{1}}
%{\smallskip}
\begin{document}
\thispagestyle{empty}
\begin{center} \bf
Statistics 930: Probability Theory --- Homework No. 6.
\end{center}
\noindent {\sc Instructions.} You should read the text through Section 3.3; it is important to supplement the lectures with reading. Don't forget
to read the problems too.
The two big theorems we are after now are the continuity theorem
for characteristic functions and L\'evy's inversion formula. These will require sustained arguments that take some effort to
to master. You may need
to review the notion of sequential compactness. This is not needed for this HW, but it will be needed for a solid understanding of
the proof of the continuity theorem which is rather``concept rich."
Some of the
problems on this HW review concepts from our earliest days, and the others just start our work with characteristic functions.
\begin{Problem}
Suppose that the random variables $X_i$, $i=1,2,\ldots, n$ are i.i.d. Suppose $0 \leq X_i \leq 4n^2$ with probability one for all $i$. Suppose that
$EX_i \geq n$ for all $i$. Show that
$$
P( X_1+X_2 + \ldots +X_n \geq n^2/2) \geq \frac{1}{20}.
$$
Hint: Is there a lower bound inequality that you might try here? What do you need to compute? How can you use the hypotheses?
\end{Problem}
\begin{Problem} Let $\log^+(x)$ denote $\max (0, \log x)$. Suppose the random variable $X$ satisfies $P(X>0)=1$. Show that
$$
\sum_{n=1}^\infty \frac{1}{n} P(X \geq n) < \infty \quad \text{if and only if} \quad E[\log^+ X] < \infty.
$$
\end{Problem}
\begin{Problem}
Let $\{A_n: n=1,2,...\}$ be a sequence of arbitrary events. Show that if
$$
P(A_n) \rightarrow 0 \quad \text{as } n \rightarrow \infty \quad \text{and} \quad \sum_{n=1}^\infty P(A_n \cap A^c_{n+1}) < \infty
$$
then $P(A_n \, i.o.)=0.$ Show by example that the first condition cannot be dropped.
\medskip
\noindent
{\sc Note:} This is a \emph{variation} of the easy Borel-Cantelli I. We slip in a second event which will give the sum an even better
``chance to converge." Having seen this example, I'll bet you can think of many more refinements and variations of the Borel Cantelli I.
The next time you face a problem where ``Borel Cantell I" does not work for you, just think ``Can I invent a version a `BC Lemma' that will work
here?" People do this every day.
\end{Problem}
\begin{Problem} Quick Shots:
\begin{enumerate}
\item Given characteristic functions $\phi_1(t)$ and $\phi_2(t)$ we know $\phi_1(t) \phi_2(t)$ is always a characteristic function. Give an
example where $\phi_1(t)$ and $\phi_2(t)$ are characteristic functions and
$\psi(t)=\phi_1(t)/\phi_2(t)$ is also characteristic function.
\item Give an example where $\phi_1(t)$ and $\phi_2(t)$ are characteristic functions, $\phi_2(t)$ never equals zero, and
$\psi(t)=\phi_1(t)/\phi_2(t)$ is \emph{not} a characteristic function.
\item Give an example of a characteristic function $\phi(t)$ such that $$\limsup_{t \rightarrow \infty} \phi(t) =1/2.$$
Can you find an example where
$$\lim_{t \rightarrow \infty} \phi(t) =1/2.$$
\end{enumerate}
\end{Problem}
\begin{Problem} Show that the function
$$
\phi(t) =|\cos t|
$$
is \emph{not} a characteristic function.
\smallskip
\noindent{Hint:} Assume that $\phi$ is a characteristic function and work toward a contraction. Here is one path: (1) Calculate the first four derivatives at zero and explain why you have
legitimately found the first four moments. (2) Look at the second and fourth moments. What random variables have such moments? Form a
conjecture and prove it --- perhaps by using the \emph{case of equality} in either Jensen's inequality or the Cauchy-Schwarz inequality. (3)
Now you know the distribution that the random variable $X$ must have if $\phi$ is a characteristic function. (4) Recall that you already know the
characteristic function of such a distribution. (5) Observe that you have your contradiction!
\end{Problem}
\begin{Problem}
Let $A_1, A_2, \ldots, A_n$ be $n$ events in some probability space and let $a_{jk}=P(A_j \cap A_k)$. Show that the $n \times n$ matrix
$A =\{a_{jk} \}$ is a real symmetric positive semi-definite matrix. That is, show that for any real $n$-vector $x$, one has $x^T A x \geq 0$.
In the case of $n=2$, this is just the Cauchy-Schwarz inequality. In the case of $n\geq 3$ it provides a kind of generalization of the Cauchy-Schwarz inequality. In a few lucky cases, this observation has been used to refine an argument where a more automatic
Cauchy-Schwarz had been used.
\end{Problem}
\end{document}