\documentclass[a4paper]{article}
\usepackage[english]{babel}
\usepackage[utf8x]{inputenc}
\usepackage{amsmath}
\usepackage{amssymb}
\usepackage{graphicx}
\usepackage{bbm}
\usepackage[colorinlistoftodos]{todonotes}
\title{Stochastic Process Notes}
\author{Bryan Arguello}
\date{\today}
\begin{document}
\maketitle
\section{Martingales}
\subsection{Simple Random Walks} A SRW is a (martingale/submartingale/supermartingale) if $p (=/>/<)\frac{1}{2}$
\subsection{multi-step propery} Just use tower property multiple times
\subsection{convex/concave, integrable functions of martingales} These are submartingales/supermartingales; just use Jensen's conditional
\subsection{increasing $\phi$ and $M$ submartingale} Then $\phi(M_n)$ is a submartingale; order preserved
\section{Martingale Transforms}
\subsection{Martingale transforms of predictable processes wrt martingales are martingales}
Write $E[I_{n+1} \vert \mathcal{F}_n] = I_n + H_{n+1} E[X_{n+1} - X_n \vert \mathcal{f}_n]$ and use properties of conditional expectation and filtrations.
\subsection{Martingale transforms of predictable, bdd, increasing processes wrt submartingales are submartingales}
Do same as previous result
\section{Stopping Times}
\subsection{Form of stopping time for natural filtrations}
$\mathbbm{1}_{\{}T=n\}$ can be written as $g_n(X_0,\dots,X_n)$ for some measurable $g_n$. To prove, just use definition of $\sigma(X_0, \dots, X_n)$ measurability
\subsection{Predictability of $H_n = \mathbbm{1}_{\{n \leq T\}}$}
Write $\mathbbm{1}_{\{n \leq T\}} = 1 - \mathbbm{1}_{\{T < n\}} = 1 - \mathbbm{1}_{\{T \leq n-1\}}$
\subsection{$X_{n \wedge T}$ is a super/sub/martingale} True since it can be written as $(\mathbbm{1}_{\{n \leq T\}} \cdot X_n) + X_0$
\section{Optional Stopping Theorem with bdd stopping times (unconditional version)}
The result says that if $S \leq T < k < \infty$ and $M_n$ is a sub/super/martingale then $E[M_S] (\leq/\geq/=) E[M_T]$
\subsection{$M_{n \wedge T} - M_{n \wedge S}$ is a submartigale (follow same reasoning for super and martingale)}
$M_{n \wedge T} - M_{n \wedge S}$ can be written as $(\mathbbm{1}_{\{n \leq T\|} \cdot$ which, in turn, is a submartingale
\subsection{$E[M_{n \wedge T} - M_{n \wedge S}] \geq 0$}
True because of the previous result, but I don't know why. Let $n=k$ to finish result
\section{$\mathcal{F}_S$ for $S$ a stopping time}
General definition is $\mathcal{F}_S = \{ A \in \mathcal{F} \vert A \cap \{S=n\} \in \mathcal{F}_n \text{ for all } $n$\}$. It is straightforward to show that this is a $\sigma$-algebra
\subsection{Definition reduction to $\sigma(X_0, X_{1 \wedge S}, \dots, X_{n \wedge S}, \dots)$ in case of natural filtration}
I do not know why this is true
\subsection{$L = S \mathbbm{1}_A + T \mathbbm{1}_{A^C}$ is a stopping time if $A \in \mathcal{F}_S$ and $S \leq T$}
Homework problem
\subsection{$E[M_S] \leq E[M_L] \leq E[M_T]$ when $S \leq T < k < \infty$}
This is just the unconditional version of the OS Theorem
\subsection{$M_L = M_S \mathbbm{1}_A + M_T \mathbbm{1}_{A^C}$}
I don't know why this is true
\subsection{Conditional OS Theorem: $M_S \leq E[M_T \vert \mathcal{F}_S]$}
Substitute 5.4 into 5.3 to get $E[M_S \mathbbm{1}_A] \leq E[M_T \mathbbm{1}_A]$ then use definition of conditional expectation.
\subsection{$M_S \mathbbm{1}_{\{S < \infty\}}$}
Just write this as $\sum_{i=0}^\infty M_i \mathbbm{1}_{\{S=i\}}$ then examine preimage of rays
\section{Up-crossings and Up-crossing inequality}
Setup: let $a<b$ and define\\
$T_0 = 0$\\
$T_{2k+1} = \text{inf} \{ n \geq T_{2k} \vert M_n \leq a\}$\\
$T_{2k+2} = \text{inf} \{ n \geq T_{2+1k} \vert M_n \geq b\}$\\
$U(a,b,n) = \text{max}\{K \vert T_{2k} \leq n\}$\\
$U(a,b) = \text{lim} U(a,b,n)$\\
$H_n = \sum_{k=0}^\infty \mathbbm{1}_{\{T_{2k+1} < n \leq T_{2k+2}\}}$
\subsection{Up-crossing inequality: $(H \cdot M) \geq (b-a) U(a,b,n) + \text{possible final loss}$}
Just think about it for a minute
\subsection{Up-crossing Theorem: $E[U(a,b,n)] \leq \frac{E[M_n^+] + \lvert a \rvert}{b-a}$ if $M_n$ is a submartingale}
Note that since $H_n \leq 1$, $((1-H) \cdot M)$ is a nonnegative submartingale. Hence $E[(H \cdot M)_n] \leq E[M_n - M_0]$. Now replace $M_n$ with $N_n = (M_n - a)^+$ which is a submartingale ($x^+$ is positive, convex, increasing). Use the estimate: $E[N_n - N_0] \geq E[(H \cdot N)_n] \geq E[(b-a)U(a,b,n)]$ to get the inequality.
\section{Martingale Convergence Theorem}
Assume $M_n$ is a submartingale and $\text{sup}_n M_n^+ < \infty$ ($M_n^+$ is bdd in $L^1$).\\
Then there is some $M_\infty \in L^1(\mathcal{F}_\infty)$ s.t. $M_n \to M_\infty$ a.s. where $\mathcal{F}_\infty = \bigvee_n \mathcal{F}_n$
\subsection{If $M_n$ is a submartingale then $L^1$ boundedness of $M_n$ boundedness is equivalent to $L^1$ boundedness of $M_n^+$}
Observe that $E[M_n] = E[M_n^+] - E[M_n^-]$ where the first two expectations are increasing in $n$ (since $M_n$ and $M_n^+$ are submartingales) and the third is positive. If $M_n$ is bounded in $L^1$, then $M_n^+$ is bounded in $L^1$ since $M_n^+$ is dominated by $\lvert M_n \rvert$. Now if $M_n^+$ is bounded in $L^1$ then $E[\lvert M_n \rvert]$ is bounded since $M_n^+$ dominates $M_n$. It follows that $M_n^-$ is bounded in $L^1$ which, in turn, makes $M_n$ bounded in $L^1$.
\subsection{$P(U(a,b) < \infty) = 1$} By the Up-crossing theorem and the fact that $M_n^+$ is bounded in $L^1$, $E[U(a,b)] \leq \text{sup} \frac{E[M_n^+ + \lvert a \rvert]}{b-a} < \infty$. The result holds as a consequence of boundedness of the integral.
\subsection{$M_n$ converges to some $M_{\infty} \in \mathcal{F}_{\infty}$ a.s.}
By definition of limsup and liminf,\\
$\{\underline{\text{lim}} M_n < \overline{\text{lim}} M_n\} = \cup_{a<b \vert a,b \in \mathbb{Q}} \{U(a,b) = \infty\}$. Hence,\\ $P(\underline{\text{lim}} M_n = \overline{\text{lim}} M_n) = 1$. Furthermore, $M_\infty \in \mathcal{F}_\infty$ since $M_n \in \mathcal{F}_n \subset \mathcal{F}_\infty$. Fatou can be used to get $L^1$ boundedness: $E[\lvert M_\infty \rvert] \leq \underline{\text{lim}} E[\lvert M_n \rvert ] < \infty$
\subsection{First hitting time at a point for a simple random walk is finite a.s.}
Given $a > 0$, define $T = \text{inf}\{n \geq 0 \vert X_n = a \}$. Then $X_{T \wedge n}^+ \leq a \Rightarrow X_{T \wedge n}^+$ is $L^1$ bounded. Since the random walk, $X_n$ is a martingale (and hence a submartingale), so is $X_{T \wedge n}^+$. By the martingale convergence theorem, $X_{n \wedge T}$ converges almost surely to something that is a.s. finite. As a result, \\$P(X_{n \wedge T} \text{ is eventually constant}) = 1$ and hence $P(T < \infty) = 1$.
\subsection{Counterexample showing deficiency in O.S. Theorem with unbounded times}
Note that, in the simple random walk, $0 = X_0 \neq E[X_T] = a$. O.S. theorem does not apply since $T$ is unbounded. Additionally note that, though $X_{n \wedge T} \to 1$ a.s., convergence in $L^1$ does not occur. Uniform continuity is the missing condition.
\section{Uniform Integrability (UI)}
In these notes, uniform integrability will be with regard to a collection of random variables, $\chi$ unless stated otherwise
\subsection{If $\chi$ is dominated by some $y \in L^1$ then $\chi$ is UI}
Use domination and DCT, to get $E[\lvert X \rvert \mathbbm{1}_{\{ \lvert X \rvert \geq M \}}] \leq E[\lvert Y \rvert \mathbbm{1}_{\{\lvert Y \rvert \geq M \}}] \to 0$ as $m \nearrow \infty$ for all $X \in \chi$
\subsection{If $\chi$ is countable then domination is equivalent to $\text{sup}\lvert X \rvert \in L^1$}
This is clearly true for a countable $\chi$. The real question is: why is it not generally true for uncountable $\chi$
\subsection{If $X_n \to X$ in probability then the subsequent implications are true (giving TFAE)}
\subsection{$X_n$ is UI $\Rightarrow$ $X_n \to X$ in $L^1$. Additionally, $X_n$ and $X\in L^1$}
First define $\phi_m(x) = -M \mathbbm{1}_{\{x \leq -M\}} + x \mathbbm{1}_{\{x \in (-M,M)\}} + M \mathbbm{1}_{\{x \geq -M\}}$ and observe that $\lvert x - \phi_M(x) \rvert = ( \lvert X \rvert - M)^+ \mathbbm{1}_{\{ \lvert X \rvert \geq M \}} \leq \lvert X \rvert \mathbbm{1}_{\{ \lvert X \rvert \geq M \}}$. Also note that UI of $X_n$ implies that $X_n \subset L^1$. Now, just use the estimate $E[\lvert X_n - X \rvert] \leq E[\lvert X_n - \phi_M(X_n) \rvert ] + E[\lvert \phi_M(X_n) - \phi_M(X) \rvert ] + \\E[\lvert \phi_M(X) - X \rvert] \leq E[\lvert X_n \rvert \mathbbm{1}_{\{ \lvert X_n \rvert \geq M \}}] + E[\lvert \phi_M(X_n) - \phi_M(X) \rvert] + E[\lvert X \rvert \mathbbm{1}_{\{\lvert X \rvert \geq M \}}]$ obtained from the triangular inequality and the above observation. The first term in final part of the estimate vanishes due to UI, the third vanishes due to the fact that $X \in L^1$, and the second vanishes due to.
\subsection{$X_n \to X$ in $L^1$ where $X_n$ and $X\in L^1 \Rightarrow E[\lvert X_n \rvert] \to E[\lvert X \rvert]$}
This is just triangular inequality
\subsection{$E[\lvert X_n \rvert] \to E[\lvert X \rvert] \Rightarrow X_n$ is UI}
\end{document}