Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F104945832
intro_stoch.tex
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Thu, Mar 13, 14:17
Size
8 KB
Mime Type
text/x-tex
Expires
Sat, Mar 15, 14:17 (2 d)
Engine
blob
Format
Raw Data
Handle
24885084
Attached To
R2653 epfl
intro_stoch.tex
View Options
\documentclass[aspectratio=169]{beamer}
\def\stylepath{../styles}
\usepackage{\stylepath/com303}
\begin{document}
\begin{frame} \frametitle{Deterministic vs. stochastic}
\begin{itemize}
\item deterministic signals are known in advance: $x[n] = \sin(0.2\,n)$
\item interesting signals are \emph{not} known in advance: $s[n] = \mbox{what I'm going to say next}$
\item we usually know something, though: $s[n]$ is a speech signal
\item stochastic signals can be described probabilistically
\item can we do signal processing with random signals? Yes!
\end{itemize}
\end{frame}
\begin{frame} \frametitle{Discrete-Time Random Processes}
\begin{center}
\begin{dspBlocks}{1}{0.2}
\BDfilter{$x[n]$} & $\breve{x}[n]$
\ncline{->}{1,1}{1,2}
\end{dspBlocks}
\end{center}
\begin{itemize}
\item $x[n]$: sequence of \textit{random variables}
\item $\breve{x}[n]$: \textit{realization} of the process
\end{itemize}
\end{frame}
\begin{frame} \frametitle{A simple discrete-time random signal generator}
For each new sample, toss a fair coin:
\[
\breve{x}[n] = \begin{cases}
+1 & \mbox{if the outcome of the $n$-th toss is head} \\
-1 & \mbox{if the outcome of the $n$-th toss is tail}
\end{cases}
\]
\end{frame}
\begin{frame} \frametitle{A simple discrete-time random signal generator}
\centering
every time we turn on the generator we obtain a different {\em realization}\/ of the signal
\vspace{2em}
\begin{figure}
\begin{dspPlot}[height=3cm]{0, 32}{-1.3, 1.3}
\moocStyle
\only<1>{\dspSignal{\dspRand 0 ge {1} {-1} ifelse}}
\only<2>{\dspSignal{\dspRand 0 ge {1} {-1} ifelse}}
\only<3>{\dspSignal{\dspRand 0 ge {1} {-1} ifelse}}
\only<4>{\dspSignal{\dspRand 0 ge {1} {-1} ifelse}}
\end{dspPlot}
\end{figure}
\end{frame}
\begin{frame} \frametitle{Discrete-Time Random Processes}
\begin{itemize}
\item infinite-length sequence of \textit{interdependent} random variables
\item a full characterization requires knowing
\[
f_{x[n_0]x[n_1]\cdots x[n_{k - 1}]}(x_0, x_1,\cdots, x_{k - 1})
\]
for {\em all} possible sets of $k$ indices $\{n_0, n_1, \cdots , n_{k - 1}\}$ and for {\em all} $k \in \mathbb{N}$
\item clearly too much to handle
\end{itemize}
\end{frame}
\begin{frame} \frametitle{What do we really need?}
\begin{itemize}
\item averages
\item some form of spectral representation
\item computing the MSE
\end{itemize}
\vspace{3em}
\centering
we can get away with very reasonable requirements: WSS
\end{frame}
\begin{frame} \frametitle{Wide-Sense Stationarity}
For WSS random processes we only care about the first two moments:
\begin{itemize}
\item mean must be time-invariant: $\expt{x[n]} = m_x$ \\
\item autocorrelation must depend only on time lag: $\expt{x[n]x[m]} = r_x[n-m]$ \\
\end{itemize}
\end{frame}
\begin{frame} \frametitle{White Processes (White Noise)}
White noise process:
\begin{itemize}
\item zero-mean: $\expt{x[n]} = 0$
\item uncorrelated: $\expt{x[n]x[m]} = \expt{x[n]}\expt{x[m]}$ for $m \neq n$
\item autocorrelation $r_x[n] = \sigma_x^2\delta[n]$
\end{itemize}
\vspace{2em}
According to underlying distribution:
\begin{itemize}
\item Gaussian white noise
\item uniform white noise
\item ...
\end{itemize}
\end{frame}
\begin{frame} \frametitle{The coin-toss process}
For each new sample, toss a fair coin:
\[
x[n] = \begin{cases}
+1 & \mbox{if the outcome of the $n$-th toss is head} \\
-1 & \mbox{if the outcome of the $n$-th toss is tail}
\end{cases}
\]
\vspace{1em}
\begin{itemize}
\item each sample is independent from all others
\item each sample value has a 50\% probability: $f_x(x) = \delta(x \pm 1)/2$
\end{itemize}
\vspace{1em}
\centering
white noise process with $r_x[n] = \delta[n]$
\end{frame}
\intertitle{spectral representation}
\begin{frame} \frametitle{Averaging the DFT?}
Consider a zero-mean WSS process:
\begin{itemize}
\item the DFT is different for each realization and for realizations of different length
\item the DFT is linear so $\expt{\DFT{x[n]}} = \DFT{\expt{x[n]}} = 0$
\item however the signal ``moves'', so its energy or power must be nonzero
\end{itemize}
\end{frame}
\begin{frame} \frametitle{Energy and power}
\begin{itemize}
\item the coin-toss process produces realizations with infinite energy:
\[
E_{x} = \lim_{N\rightarrow\infty}\sum_{n=-N}^{N}|\breve{x}[n]|^2 = \lim_{N\rightarrow\infty} (2N+1) = \infty
\]
\item which, however, have has finite \textit{power}:
\[
P_{x} = \lim_{N\rightarrow\infty} \frac{1}{2N+1}\sum_{n=-N}^{N} |\breve{x}[n]|^2 = 1
\]
\end{itemize}
\end{frame}
\begin{frame} \frametitle{Energy and Power Signals}
\begin{itemize}
\item energy signals: $\displaystyle \sum_{n=-\infty}^{\infty}|x[n]|^2 < \infty$
\item power signals: $\displaystyle \lim_{N\rightarrow\infty}\frac{1}{2N+1}\sum_{n=-N}^{N}|x[n]|^2 < \infty$
\end{itemize}
\end{frame}
\begin{frame} \frametitle{Energy Signals}
\begin{itemize}
\item finite support, $\sinc(n)$, $\alpha^n\,u[n]$ for $|\alpha| < 1$, ...
\item DTFT is well defined
\item DTFT square magnitude is \textit{energy} distribution in frequency
\end{itemize}
\end{frame}
\begin{frame} \frametitle{Power Signals}
\begin{itemize}
\item $x[n] = 1$, $u[n]$, $e^{j\omega n}$, $\sin, \cos$, ...
\item DTFT uses the Dirac delta formalism
\item ``DTFT square magnitude'' doesn't make sense!
\end{itemize}
\end{frame}
\begin{frame} \frametitle{Power Spectral Density}
Consider a truncated DTFT
\[
X_N(e^{j\omega}) = \sum_{n = -N}^{N} x[n]e^{-j\omega n}
\]
\vspace{1em}
define the power spectral density of a signal as:
\[
P(e^{j\omega}) = \lim_{N\rightarrow\infty}\,\frac{1}{2N+1}|X_N(e^{j\omega})|^2
\]
\end{frame}
\begin{frame} \frametitle{Power Spectral Density}
Examples:
\begin{itemize}
\item $x[n] = a$, $P_x(e^{j\omega}) = a^2\tilde{\delta}(\omega)$
\item $x[n] = ae^{j\sigma n}$, $P_x(e^{j\omega}) = a^2\tilde{\delta}(\omega - \sigma)$
\item $x[n] = au[n]$, $P_x(e^{j\omega}) = (a^2/2)\tilde{\delta}(\omega)$
\end{itemize}
\end{frame}
\begin{frame} \frametitle{Power Spectral Density for WSS Processes}
\centering
For a random process
\[
P_x(e^{j\omega}) = \lim_{N\rightarrow\infty}\displaystyle\frac{1}{2N+1}\expt{|X_N(e^{j\omega})|^2 }
\]
\end{frame}
\begin{frame} \frametitle{Power Spectral Density for WSS Processes}
\begin{align*}
P_x(e^{j\omega}) &= \mbox{DTFT}\{r_x[k]\}
\end{align*}
\end{frame}
\begin{frame} \frametitle{PSD of white noise}
White noise:
\begin{itemize}
\item $m = 0$
\item $r[k] = \sigma^2 \delta[k]$
\end{itemize}
\begin{align*}
P(e^{j\omega}) &= \sigma^2
\end{align*}
\end{frame}
\begin{frame} \frametitle{PSD of white noise}
\begin{figure}
\begin{dspPlot}[xtype=freq,yticks=custom,ylabel={$P_w(e^{j\omega})$}]{-1,1}{0,1}
\moocStyle
\dspFunc{0.7}
\dspCustomTicks[axis=y]{0.7 $\sigma^2$}
\end{dspPlot}
\end{figure}
\end{frame}
\begin{frame} \frametitle{Filtering a Random Process}
\center
\begin{figure}
\begin{dspBlocks}{1cm}{1cm}
$x[n]$~ & \BDfilter{$h[n]$} & ~$y[n]$
\BDConnH{1}{1}{2}{}
\BDConnH{1}{2}{3}{}
\end{dspBlocks}
\end{figure}
\vspace{2em}
\[
P_y (e^{j\omega})\ =\ \left| H(e^{j\omega}) \right|^2 P_x(e^{j\omega})
\]
\end{frame}
\begin{frame} \frametitle{Stochastic signal processing}
key points:
\begin{itemize}
\item Deterministic filters can be used to shape the power distribution of WSS random processes
\item filters designed for deterministic signals still work (in magnitude) in the stochastic case
\item we lose the concept of phase since we don't know the shape of a realization in advance
\end{itemize}
\end{frame}
\end{document}
Event Timeline
Log In to Comment