\documentclass[11pt]{article}

\usepackage{epsfig}
\usepackage{amsfonts}
\usepackage{amssymb}
\usepackage{amstext}
\usepackage{amscd}
\usepackage{amsmath}
\usepackage{xspace}
\usepackage{theorem}
\usepackage{float}
%\usepackage{layout}% if you want to see the layout parameters
                     % and now use \layout command in the body

% This is the stuff for normal spacing
\makeatletter
 \setlength{\textwidth}{6.5in}
 \setlength{\oddsidemargin}{0in}
 \setlength{\evensidemargin}{0in}
 \setlength{\topmargin}{0.25in}
 \setlength{\textheight}{8.25in}
 \setlength{\headheight}{0pt}
 \setlength{\headsep}{0pt}
 \setlength{\marginparwidth}{59pt}

 \setlength{\parindent}{0pt}
 \setlength{\parskip}{5pt plus 1pt}
 \setlength{\theorempreskipamount}{5pt plus 1pt}
 \setlength{\theorempostskipamount}{0pt}
 \setlength{\abovedisplayskip}{8pt plus 3pt minus 6pt}
 \setlength{\intextsep}{15pt plus 3pt minus 6pt}

 \renewcommand{\section}{\@startsection{section}{1}{0mm}%
                                   {2ex plus -1ex minus -.2ex}%
                                   {1.3ex plus .2ex}%
                                   {\normalfont\Large\bfseries}}%
 \renewcommand{\subsection}{\@startsection{subsection}{2}{0mm}%
                                     {1ex plus -1ex minus -.2ex}%
                                     {1ex plus .2ex}%
                                     {\normalfont\large\bfseries}}%
 \renewcommand{\subsubsection}{\@startsection{subsubsection}{3}{0mm}%
                                     {1ex plus -1ex minus -.2ex}%
                                     {1ex plus .2ex}%
                                     {\normalfont\normalsize\bfseries}}
 \renewcommand\paragraph{\@startsection{paragraph}{4}{0mm}%
                                    {1ex \@plus1ex \@minus.2ex}%
                                    {-1em}%
                                    {\normalfont\normalsize\bfseries}}
 \renewcommand\subparagraph{\@startsection{subparagraph}{5}{\parindent}%
                                       {2.0ex \@plus1ex \@minus .2ex}%
                                       {-1em}%
                                      {\normalfont\normalsize\bfseries}}
\makeatother

\newcounter{thelecture}

\newenvironment{proof}{{\bf Proof:  }}{\hfill\rule{2mm}{2mm}}
\newenvironment{proofof}[1]{{\bf Proof of #1:  }}{\hfill\rule{2mm}{2mm}}
\newenvironment{proofofnobox}[1]{{\bf#1:  }}{}
\newenvironment{example}{{\bf Example:  }}{\hfill\rule{2mm}{2mm}}

%\renewcommand{\theequation}{\thesection.\arabic{equation}}
%\renewcommand{\thefigure}{\thesection.\arabic{figure}}

\newtheorem{fact}{Fact}
\newtheorem{lemma}[fact]{Lemma}
\newtheorem{theorem}[fact]{Theorem}
\newtheorem{definition}[fact]{Definition}
\newtheorem{corollary}[fact]{Corollary}
\newtheorem{proposition}[fact]{Proposition}
\newtheorem{claim}[fact]{Claim}
\newtheorem{exercise}[fact]{Exercise}

% math notation
\newcommand{\R}{\ensuremath{\mathbb R}}
\newcommand{\Z}{\ensuremath{\mathbb Z}}
\newcommand{\N}{\ensuremath{\mathbb N}}
\newcommand{\B}{\ensuremath{\mathbb B}}
\newcommand{\F}{\ensuremath{\mathcal F}}
\newcommand{\SymGrp}{\ensuremath{\mathfrak S}}
\newcommand{\prob}[1]{\ensuremath{\text{{\bf Pr}$\left[#1\right]$}}}
\newcommand{\expct}[1]{\ensuremath{\text{{\bf E}$\left[#1\right]$}}}
\newcommand{\size}[1]{\ensuremath{\left|#1\right|}}
\newcommand{\ceil}[1]{\ensuremath{\left\lceil#1\right\rceil}}
\newcommand{\floor}[1]{\ensuremath{\left\lfloor#1\right\rfloor}}
\newcommand{\ang}[1]{\ensuremath{\langle{#1}\rangle}}
\newcommand{\poly}{\operatorname{poly}}
\newcommand{\polylog}{\operatorname{polylog}}

% anupam's abbreviations
\newcommand{\e}{\epsilon}
\newcommand{\half}{\ensuremath{\frac{1}{2}}}
\newcommand{\junk}[1]{}
\newcommand{\sse}{\subseteq}
\newcommand{\union}{\cup}
\newcommand{\meet}{\wedge}
\newcommand{\dist}[1]{\|{#1}\|_{\text{dist}}}
\newcommand{\hooklongrightarrow}{\lhook\joinrel\longrightarrow}
\newcommand{\embeds}[1]{\;\lhook\joinrel\xrightarrow{#1}\;}
\newcommand{\mnote}[1]{\normalmarginpar \marginpar{\tiny #1}}

\newcommand{\That}{\widehat{T}}
\newcommand{\Ghat}{\widehat{G}}
\newcommand{\chat}{\widehat{c}}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Document begins here %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\newcommand{\hwheadings}[3]{
{\bf Approximation Algorithms} \hfill {{\bf Homework #1}}\\
{{\bf Date:} #2} \hfill {{\bf Due:} #3} \\
\rule[0.1in]{\textwidth}{0.025in}
%\thispagestyle{empty}
}

\begin{document}

\hwheadings{2}{Sept 21, 2005}{Sept 28, 2005}

\textbf{Note:} In all these problems: if we ask you to show an
approximation ratio of $\rho$, you should tell us about the best
approximation ratio you did find, be it better or worse than $\rho$.

\begin{enumerate}
\item \textbf{Multiway Cut.} Recall the \textsc{Multiway Cut} problem:
  given a graph $G = (V,E)$ with each edge $e$ having a \emph{capacity}
  $c_e$, and a subset $S = \{s_1, s_2, \ldots, s_k\} \sse V$ of
  \emph{terminals}, find a set of edges $E_{cut} \sse E$ of minimum
  capacity such that each connected component of $G \setminus E_{cut}$
  contains at most one terminal.
  
  We saw a greedy algorithm in class that gave a $2(1 -
  \frac1k)$-approximation for the problem. Here's another ``greedier''
  approximation algorithm that Mohit proposed in class:
  \begin{quote}
    Find a $s_i$-$s_j$ min-cut whose capacity is smallest amongst all
    $s$-$s'$ min-cuts with $s,s' \in S$. Delete all edges in this cut.
    Recurse on the resulting components.
  \end{quote}
  Show that this algorithm is also a $2(1 - \frac1k)$-approximation for
  the problem.
  
\item \textbf{Hitting Set.} Given a set $U$, and a family $\F = \{S_1,
  S_2, \ldots, S_k\}$ of subsets of $U$, a \emph{hitting set} for $\F$
  is a subset $H \sse U$ such that $H \cap S_i \neq \emptyset$ for all
  $1 \leq i \leq k$. The \textsc{(Minimum) Hitting Set} problem seeks to
  find a hitting set of the smallest cardinality.
  
  Show that the \textsc{Set Cover} problem discussed in class has a
  $\rho$-approximation if and only if the \textsc{Hitting Set} problem
  has a $\rho$-approximation.
  
\item \textbf{Better Makespan Minimization.} In class, we saw that
  Graham's List Scheduling algorithm gave a $2$-approximation for the
  problem of scheduling $n$ jobs on $m$ parallel machines to minimize
  the makespan.  Construct an instance on which this algorithm does no
  better than a $2$-approximation.
  
  Chris had proposed another ``greedy'' algorithm: sort all the jobs in
  non-increasing order of sizes, and run List Scheduling with this order
  of jobs. Show that this gives a $1.5$-approximation for the problem.
  
\item \textbf{Steiner Tree.} An instance of the \textsc{Minimum Cost
    Steiner Tree} problem consists of a graph $G = (V,E)$ with positive
  costs $c_e$ for each edge $e \in E$, and a subset $R \sse V$ of
  \emph{required vertices} or \emph{terminals}. The goal is to find a
  minimum cost set of edges $E_{ST} \sse E$ that \emph{spans} the set
  $R$; i.e., any pair of terminals in $R$ has a connecting path in
  $E_{ST}$. Let $|R| = k$.
  
  Since the edge set $E_{ST}$ has minimum cost, it will have no cycles
  and hence be a tree: this is called a minimum cost \emph{Steiner} tree
  on $R$.

  \begin{enumerate}
  \item Let $\Ghat = (V, \binom{V}{2})$ be a complete graph on the same
    set of nodes $V$: set the cost $\chat_e$ of edge $e = \{u,v\}$ to be
    the shortest-path distance according to the edge ``lengths'' $c_e$
    between $u$ and $v$ in $G$. ($\Ghat$ is called the \emph{metric
      completion} of $G$.) 
    
    For any set $R$ of terminals, show that $T^*$, the optimal Steiner
    tree on $R$ in $G$ has the same cost as $\That^*$, the optimal
    Steiner tree on $R$ in the metric completion $\Ghat$.
    
  \item Consider the subgraph $\Ghat[R]$ be the graph induced by the set
    of terminals in the graph $\Ghat$, and let $\That$ be the minimum
    spanning tree in $\Ghat[R]$ according to the edge costs $\chat_e$.
    
    Show that $\chat(\That) \doteq \sum_{e \in \That} \chat_e$, the cost
    of $\That$, is at most $2(1-\frac1k) \sum_{e \in \That^*} \chat_e$.
    
  \item Show how to use the tree $\That$ to find a tree $T$ in $G$ with
    cost $c(T) = \sum_{e \in T} c_e$ at most $\chat(\That)$. Infer that
    the algorithm you have developed is a $2(1 - \frac1k)$-approximation
    to the minimum cost Steiner tree problem.
  \end{enumerate}

\end{enumerate}

\end{document}
