%Style Section
\documentclass{amsart}
\usepackage{latexsym, bbm, html, enumerate, amssymb, amsmath}
%\usepackage[dvips]{graphicx}
%Declaration Section
\newtheorem{Corollary}{Corollary}
\newtheorem{Proposition}{Proposition}
\newtheorem{Lemma}{Lemma}
\newtheorem{Definition}{Definition}
\newtheorem{Theorem}{Theorem}
\newtheorem{Example}{Example}
%Command Section
%\errorcontextlines=0
%\numberwithin{equation}{equation}
\newcommand{\A}{{\mathbf{a}}}
\newcommand{\B}{{\mathbf{B}}}
\newcommand{\boldsigma}{{\boldsymbol{\sigma}}}
\newcommand{\s}{{\boldsymbol{\sigma}}}
\newcommand{\R}{\mathbb{R}}
\newcommand{\E}{{\mathbf{E}}}
\newcommand{\FF}{{\mathcal{F}}}
\newcommand{\MM}{{\mathcal{M}}}
\newcommand{\boldgamma}{{\boldsymbol{\gamma}}}
\newcommand{\Var}{{\rm Var}}
\newcommand{\half}{\frac{1}{2}}
\newcommand{\1}{\mathbbm{1}}
%Special for Tests
\newcounter{Problem}
\setcounter{Problem}{1}
\newenvironment{Problem}
{\medskip \noindent{\sc Problem \theProblem.}\addtocounter{Problem}{1}}
{\smallskip}
\begin{document}
\thispagestyle{empty}
\begin{center} \bf
Statistics 930: Probability Theory \\
Homework No. 10
\end{center}
{\sc General Comments:} Please continue with your reading of Durrett. We've covered most of the material in the first three chapters.
We dipped a toe into Chapter 4, but we will not dig into it deeply until we have added martingales to our toolkit. You should give
Chapter 4 a cursory read to familiarize yourself with the topics, but you should start reading Chaper 5 in earnest. It will occupy
the majority
of our remaining time.
\begin{Problem}
Quick shots on uniform integrability.
\begin{itemize}
\item Let $S$ denote the collection of all random variables such that $$E(|X| (1+\log \log (X^2+20))<10^6.$$ Prove or disprove that this collection
is uniformly integrable. Note: You should refuse to work with this ugly integrand. If you drag along all of that detail, it will just muddy the water. Instead, you should formulate an attractive generalization and prove that generalization!
\item Suppose that $S$ is a collection of uniformly integrable random variables and $S'$ is a collection of uniformly integrable random variables.
Consider the collection $S''$ of all sums $X+Y$ with $X \in S$ and $Y \in S'$. Prove or disprove that $S''$ is a uniformly integrable collection.
\item Suppose that $S=\{Z_1, Z_2, ..., Z_n, ...\}$ where the $Z_i$ are (possibly dependent) normals with mean $\mu_i$ and variance $\sigma_i^2$.
Suppose all of the
$\mu_i$ and variance $\sigma_i^2$ are bounded between $1$ and $15$. Let $S'$ denote the set of all random variables that can be written as the
product of $10$ or fewer elements from $S$. Prove or disprove that $S'$ is a uniformly integrable collection.
\end{itemize}
\end{Problem}
\begin{Problem}
Suppose that $X_1,X_2, ...$ are i.i.d. $N(0,1)$ and let $Z_n=S_n/\sqrt{n}$
where as usual $S_n=X_1+X_2+ \cdots + X_n$. It it is utterly trivial that $Z_n$ converges in distribution; in fact, $Z_n$ is $N(0,1)$ for all $n$.
Show that $Z_n$ \emph{does not} converge in probability!
Now generalize your discovery as far as you can. You might even aspire to show that for \emph{any} i.i.d (non-constant) sequence $\{X_n\}$
the scaled partial sums $Z_n$, $n=1,2, ...$ fail to converge in probability. Take this as far as you like, but be clear in what you claim
(and prove!).
\end{Problem}
\medskip
\begin{Problem}
Give an example of random variables $X$,$X'$, $Y$, and $Y'$ with the following properties
\begin{itemize}
\item $X$ and $Y$ are independent of each other and $X'$ and $Y'$ are independent of each other
\item $X$ and $X'$ have the same distribution
\item $X+Y$ and $X'+Y'$ have the same distribution
\item $Y$ and $Y'$ do NOT have the same distribution.
\end{itemize}
You may want to remember the advice about translating weird problems into the language of characteristic functions. You may also want
to review what we covered about random variables with characteristic function of P\'olya type.
\end{Problem}
\begin{Problem} Consider a sequence of independent random variables $X_k$, $k=1,2, \ldots$ and assume that they have the symmetric density
on $\R$ that is given by
$$
f(x) =
\begin{cases}
0 \quad &\text{for } t \in [-1,1] \\
|x|^{-3} \quad &\text{for } t \notin [-1,1].
\end{cases}
$$
\medskip
\noindent
(a) Familiarization Checks: confrim that this is a density, $EX_k=0$, and $EX_k^2=\infty$.
\medskip
\noindent
(b) Determine a explicit sequence of increasing positive constants $A_n$ such that
$$
A_n^{-1} S_n \Rightarrow N(0,1) \quad \text{where } S_n=X_1+X_2+\cdots + X_n.
$$
Here, as usual, ``$\Rightarrow$'' denotes convergence in distribution.
\medskip
\noindent
{\sc Hint:} There are several ways to proceed, but you might want to consider an artful truncation of the $X_k$'s. What you want from the truncation
is a triangular array to which Lindeberg's theorem can be applied, but you also need to be able to deduce
the original claim from what you have obtained from Lindeberg.
\end{Problem}
\begin{Problem}
(a) Suppose that $X$ is a {\bf non-negative} random variable for which there exist constants $A$ and $B$ such that
$$
\frac{1}{t} E [\sin (tX) ]\leq A \quad \text{and} \quad \frac{1}{t} P\left(X\geq \frac{1}{t}\right) \leq B \quad \text{for all } t > 0.
$$
Show that $EX < \infty$.
\medskip
\noindent
Hint: First show that Fatou's Lemma implies that it suffices to show that for some constant $c$ that
\begin{equation}\label{e1:hint}
E[X \1 (X \leq c/t)] \quad \text{ is uniformly bounded for all} \, t >0.
\end{equation}
The shape of the sin function on $[0, \pi/2]$ and our usual methods will let you prove the bound \eqref{e1:hint} in a few lines.
Incidentally,
if taken alone, neither of the two conditions of the proposition would suffice to obtain the conclusion. Thus, we have a fine example of the
``principle of combined estimates" or ``two estimates used together can be \emph{seriously better} than either estimate used alone."
\smallskip
(b) Suppose that $X$ is a non-negative random variable. Show that
$$
\1(X \geq \frac{2}{t}) \leq \frac{2}{t} \int_0^t (1-\cos (u X)) \, du \quad \text{for all } \omega \in \Omega \text{ and } t>0.
$$
This is just calculus (or logic), but it gives us a very nice way to think about the tail of a non-negative random variable. Obviously we
are just itching to take the expectation of both sides.
(c) Assemble the preceding pieces to show that if $X$ is a {\bf non-negative} random variable with characteristic function $\phi$
and if there exist constant $C$ such that
$$
\frac{1}{t} |\phi(t) -1| \leq C \quad \text{for all } t>0,
$$
then $E(X) < \infty$.
Just as a point of culture, one should note that the last condition certainly holds if $\phi'(0)$ exists, but we do not have to
assume quite that much.
Also, by a fact mentioned in class (but not proved), we know that we cannot drop the assumption that
$X$ is non-negative --- even if we assume that $\phi'(0)$ exists.
\smallskip
\noindent
{\sc Comment:} This has been broken into reasonably small steps in order to maximize the likelihood that you can close the loops.
Still, to get maximum value out
of the problem, take a moment to imagine that you were just given the last assertion without the intermediate steps.
Try to build a
story for yourself that would have led to the discovery of the proof. Naturally, such a story is not completely honest, but it is still
a useful fiction of discovery and learning. Such ``creation tales" help to reinforce memory and build technique.
\end{Problem}
\end{document}