\documentclass[11pt]{article}
\usepackage{odonnell}
\begin{document}
\noindent {\large {\bf Analysis of Boolean Functions}} {\hfill {\bf CMU 18-859S / 21-801A, Fall 2012}}
\begin{center}
{\sc Problem Set 4} \\
{\bf Due: Monday, Oct.\ 8, beginning of class} \\
\end{center}
\hrule
\bigskip
\noindent {\bf Homework policy}: Please try to work on the homework by yourself; it isn't intended to be too difficult. Questions about the homework or other course material can be asked on Piazza.\\
\hrule
\bigskip
\begin{enumerate}
\item \label{ex:GL} Informally: a ``one-way permutation'' is a bijective function $f \co \F_2^n \to \F_2^n$ which is easy to compute on all inputs but hard to invert on more than a negligible fraction of inputs; a ``pseudorandom generator'' is a function $g \co \F_2^k \to \F_2^m$ for $m > k$ whose output on a random input ``looks unpredictable'' to any efficient algorithm. Goldreich and Levin proposed the following construction of the latter from the former: for $k = 2n$, $m = 2n+1$, define
\[
g(r, s) = (r, f(s), r \cdot s),
\]
where $r, s \in \F_2^n$. When $g$'s input $(\br, \bs)$ is uniformly random then so is the first $2n$ bits of its output (using the fact that $f$ is a bijection). The key to the analysis is showing that the final bit, $\br \cdot \bs$, is highly unpredictable to efficient algorithms even \emph{given} the first $2n$ bits $(\br, f(\bs))$. This is proved by contradiction.
\begin{enumerate}
\item Suppose that an adversary has a deterministic, efficient algorithm~$A$ good at predicting the bit $\br \cdot \bs$:
\[
\Pr_{\br, \bs \sim \F_2^n}[A(\br, f(\bs)) = \br \cdot \bs] \geq \tfrac12 + \gamma.
\]
Show there exists $B \subseteq \F_2^n$ with $|B|/2^n \geq \frac12 \gamma$ such that for all $s \in B$,
\[
\Pr_{\br \sim \F_2^n}[A(\br, f(s)) = \br \cdot s] \geq \tfrac12 + \tfrac12 \gamma.
\]
\item Switching to $\pm 1$ notation in the output, deduce $\wh{\restr{A}{[n]}{f(s)}}(s) \geq \gamma$ for all $s \in B$.
\item Show that the adversary can efficiently compute $s$ given~$f(s)$ (with high probability) for any $s \in B$. If $\gamma$ is nonnegligible this contradicts the assumption that $f$ is ``one-way''. (Hint: use the Goldreich--Levin algorithm.)
\item Deduce the same conclusion even if $A$ is a randomized algorithm.
\end{enumerate}
\item Given $f \btb$ and integer $k \geq 2$ let $A_k = \frac{1}{k}(\W{\geq 1}[f] + \W{\geq 2}[f] + \cdots + \W{\geq k}[f])$, the ``average of the first~$k$ tail weights''. (Recall $\W{\geq \ell}[f] = \sum_{|S| \geq \ell} \wh{f}(S)^2$.) Show that $\NS_{1/k}[f]$ is the same as~$A_k$ up to universal constants. E.g., you might show $\frac{1-e^{-2}}{2} A_k \leq \NS_{1/k}[f]\leq A_k$.
\item \label{ex:l1-conc} Let $f \btR$ and let $\eps > 0$. Show that $f$ is
$\eps$-concentrated on a collection $\calF \subseteq 2^{[n]}$ with $|\calF| \leq \snorm{f}_1^2/\eps$. (Recall the notation from Problem~1 on Homework~3.)
\item For this problem, recall Problem~3 from Homework~3.
\begin{enumerate}
\item Let $H \leq \F_2^n$ be a subspace and let $z \in \F_2^n$. Let $\vphi_{H+z} \ftR$ be the probability density function associated to the uniform probability distribution on the affine subspace $H+z$. Write the Fourier expansion of $\varphi_{H+z}$.
\item For $f \ftR$ and $z \in \F_2^n$, define the function $f^{+z} \ftR$ by $f^{+z}(x) = f(x+z)$. Show that $f^{+z} = \varphi_{\{z\}} \conv f$. (In writing $\varphi_{\{z\}}$ we are treating $\{z\}$ as a $0$-dimensional affine subspace and using the notation of the previous problem.) Show also that $\wh{f^{+z}}(\gamma) = (-1)^{\gamma \cdot z} \wh{f}(\gamma)$.
\item Prove the ``Poisson Summation Formula'',
\[
\Ex_{\bh \sim H}[f(\bh+z)] = \sum_{\gamma \in H^\perp} \chi_\gamma(z) \wh{f}(\gamma).
\]
(Hint: use Plancherel on $\la \vphi_H, f^{+z} \ra$.)
\end{enumerate}
\item Give a direct (Fourier-free) simple proof that if $f \btR$ and $(\bJ \mid \bz)$ is a $\delta$-random restriction then $\E[\Inf_i[\restr{f}{\bJ}{z}]] = \delta \Inf_i[f]$ for any $i \in [n]$.
\item \label{ex:baby-switch} In this exercise you will prove the ``Baby Switching Lemma'': If $\phi = T_1 \vee T_2 \vee \cdots \vee T_s$ is a DNF of width~$w \geq 1$ over variables $x_1, \dots, x_n$ and $(\bJ \mid \bz)$ is a $\delta$-random restriction ($0 < \delta < 1/3$), then
\[
\Pr[\restr{f}{\bJ}{\bz} \text{ is not a constant function}] \leq 3 \delta w.
\]
\begin{enumerate}
\item \label{ex:baby1} Suppose $R = (J \mid z)$ is a ``bad'' restriction, meaning that $\restr{\phi}{J}{z}$ is not a constant function. Let~$i$ be minimal such that $\restr{(T_i)}{J}{z}$ is neither constantly $\true$ or $\false$, and let $j$ be minimal such that $x_j$ or $\overline{x}_j$ appears in this restricted term. Show there is a unique restriction $R' = (J \setminus \{j\} \mid z')$ extending~$R$ which doesn't falsify $T_i$.
\item Suppose we enumerate all bad restrictions $R$, and for each we write down the associated~ $R'$ as in part~(\ref{ex:baby1}). Show that no restriction is written more than~$w$ times.
\item If $(\bJ \mid \bz)$ is a $\delta$-random restriction and $R$ and $R'$ are as in part~(\ref{ex:baby1}), show that $\Pr[(\bJ \mid \bz) = R] = \frac{2\delta}{1-\delta} \Pr[(\bJ \mid \bz) = R']$.
\item Complete the proof by showing $\Pr[(\bJ \mid \bz) \text{ is bad}] \leq 3\delta w$.
\end{enumerate}
\end{enumerate}
\end{document}