\documentclass[11pt,twoside]{article}
\usepackage{amsmath}
\usepackage{graphics}
\usepackage{latexsym}
\input{/a/class/6.042/fall99/inputs/macros-course}

\renewcommand{\solution}[1]{}

\begin{document}
\pset{10}{November 12, 1999 (minor revision 12/3)}{start of tutorial, November 18, 1999}

\textbf{Required reading}: F97 Lecture Notes 18,19.  Rosen 4.4, 4.5
(pages 267-271), Handout 63: "Probabilities in a Dice Game".

\textbf{Remember to include a \textbf{collaboration statement} on the
front page of your solutions.}

\begin{problems}

\problem(Rosen p. 305, problem 40)

Suppose that $A$ and $B$ are events in a probability space
with $\prob{B}\neq 0$.
Prove the following identity, 
\iffalse
Using the fact that $A = (A \cap B) \cup (A \cap \overline{B})$, prove
that the probability of A is the weighted average of the
probaility of A given B and the probability of A given the complement
of $B, \overline{B}$ where the weights are the
probabilities of B and $\overline{B}$ , respectively.
Namely,
\fi
\[
\prob{A} = \prob{A \mid B} \cdot \prob{B} + \prob{A \mid  \overline{B}} \cdot
\prob{\overline{B}}.
\]
\hint $(A\cap B)$ and $(A\cap \overline{B})$ are disjoint.


\solution{
\begin{eqnarray*}
\lefteqn{\prob{A \mid  B} \cdot \prob{B} + \prob{A \mid  \overline{B}} \cdot \prob{\overline{B}}}\\
& = & \frac{\prob{A \cap B}}{\prob{B}} \prob{B} + \frac{\prob{A \cap \overline{B}}}{\prob{\overline{B}}} \prob{\overline{B}} \\
& = & \prob{A \cap B} + \prob{A \cap \overline{B}} \\
& = & \prob{(A \cap B) \cup (A \cap \overline{B})}  \qquad \mbox{(since $A \cap B$ and $A \cap \overline{B}$ are disjoint}) \\
& = & \prob{A}
\end{eqnarray*}

}
       

\problem
{\bf Tennis Tournaments.} A tennis tournament has 8 players. The
players are assigned randomly to positions in the first round of a
tournament ladder (see Figure~\ref{tennis}).

\begin{figure}[ht]\label{tennis}
\centerline{\resizebox{!}{3in}{\includegraphics{H64-tennis.eps}}}
\caption{A Tennis Tournament Ladder}
\end{figure}

\bparts
\ppart
Suppose the {\it best player} always defeats everybody else, and
the {\it second-best player} always defeats everybody but the best. What
is the chance that the second-best player makes it to the final round? 

\solution{
In order to make it to the final round, the 
second-best player has to meet
the winner in the final round and not earlier.  
That happens if and only if the best
player and the second-best player start out in different brackets of
four; in other words, if the best player starts in position 1-4 of the
first round, then the second-best must start in positions 5-8, and
vice versa.

Here are two ways to determine the probability.  The first way counts
complete assignments: there are 8 ways to place the best player in the
first round, then 4 ways to place the second-best, then $6!$ ways to
place the remaining players.  Divide by the size of the sample space
($8!$, the total number of ways to assign all eight players with no
constraints) to get $4/7$.

The second way treats the second player's position as a random selection.
The best player can be placed anywhere.  Given the best player's position,
the second-best player will meet the first player in the final round
only if he is placed in 4 of the remaining 7 positions---not 4 out of 8,
because the best player is already occupying one (this is a common error
-- the positions of the players can't be treated like independent coin
flips or dice rolls, because no two players can occupy the same
position.)!  Since all placements are equally likely, there is a $4/7$
chance of this happening.

}


\ppart Suppose the 8 tennis players are equally good, i.e.,
for any two players A and B, 
\[
\prob{A \mbox{ wins}}=\prob{B \mbox{ wins}}= 1/2,
\]
and the twins Tom and Mot are amongst the 8 players.  What is the chance
that Tom and Mot ever meet in a match during the tournament?

\solution{
The probability that Tom and Mot will meet in the first round is $1/7$
(Tom can be placed anywhere, but given Tom's position, Mot has only 1
choice in 7).

Tom and Mot will meet in the second round if and only if both win
their first-round matches (with probability $1/2 \cdot 1/2$) and both
are in different groups of two but the same group of four (probability
$2/7$).

Tom and Mot will meet in the third round if and only if both win two
matches (with probability $1/2^2 \cdot 1/2^2$) and both are in different
groups of four (probability $4/7$).

Since these events are disjoint, the probability that Tom and Mot meet in
any round is just the sum:
\[
\frac{1}{7} + \frac{1}{2} \cdot \frac{1}{2} \cdot \frac{2}{7} 
+ \frac{1}{4} \cdot \frac{1}{4} \cdot \frac{4}{7} = \frac{1}{4}
\]
}

\eparts


\problem {\bf Testing for Defective Chips and False Positives}

Intel has discovered a flaw in their newest mircoprocessor which
causes division errors, with catastrophic results. Luckily for them,
it only affects 1 chip in 1000, so they do not have to redesign the
whole thing -- they can just test each chip for the flaw and throw out the
bad ones. 

They have devised a {\it chip test} with  the
following accuracy: in 99 percent of the cases where the flaw is
present, the test will return ``Positive: chip flawed'' in which case the
chip is thrown out, and in 95 percent of the
cases in which the flaw is not present, the test will return ``Negative: chip OK''.

What percentage of the chips that they throw out will actually divide
without any problems (or borrowing terminology from the medical world,
what is the percentage of false positives for the chip-test)?

\solution{

We are given that $\prob{\text{chip is flawed}} = 0.001$ and
$\prob{\text{chip is OK}} = 0.999$.  We also know the following
probabilities:
\begin{eqnarray*}
\prob{\text{Test outcome is ``Positive: chip flawed''} \mid \text{chip is flawed} } &=& 0.99 \\
\prob{\mbox{Test outcome is ``Negative: chip OK''} \mid \text{chip is flawed} } &=& 0.01 \\
\prob{\mbox{Test outcome is ``Positive: chip flawed''} \mid \text{no flaw present}} &=& 0.05\\
\prob{\mbox{Test outcome is ``Negative: chip OK''} \mid \text{no flaw present}} &=& 0.95
\end{eqnarray*}
With these probabilities, we can construct a tree as in Lecture Notes
\#19 (see Figure~\ref{flaw}).  Then, we use the product rule to determine:

\begin{figure}[ht]\label{flaw}
\centerline{\resizebox{!}{4in}{\includegraphics{H64-flaw.eps}}}
\caption{Possibility Tree for a Chip Test}
\end{figure}

\begin{eqnarray*}
\lefteqn{\prob{\text{chip is flawed} \mid \text{Test outcome was ``Positive: chip flawed''}}}\\
& = & \frac{\prob{\mbox{chip is flawed and test outcome is
``Positive: chip flawed''}}}{\prob{\mbox{Test outcome was ``Positive: chip flawed''}}} \\ 
& = & \frac{0.00099}{0.04995 + 0.00099} \\ 
& = & \frac{0.00099}{0.05094} = 0.019
\end{eqnarray*} \\ 
Thus, the percentage of false positives is 98.1\% --
which may come as a surprise!
}

\problem {\bf Craps.} The game of craps is played with a pair of dice.
The player rolls both dice, winning immediately if the total is 7 or 11,
and losing immediately if the total is 2, 3, or 12.  Any other total is
called the ``point.''  If the first roll is a point, then the player
continues rolling repeatedly until rolling the point again (which wins),
or rolling 7 (which loses).
\bparts

\ppart What is the chance of winning on the first roll?

\solution{

As the sample space of elementary events for the first roll we can take
$D^2 = \{(1,1), (1,2),\ldots,(6,6)\}$.  The following table summarizes in
the second column what is the probability of having rolled the total value
in the first column.

\begin{center}
\begin{tabular}{r|r}
Roll & Prob \\
\hline
2 & $1/36$ \\
3 & $2/36$ \\
4 & $3/36$ \\
5 & $4/36$ \\
6 & $5/36$ \\
7 & $6/36$ \\
8 & $5/36$ \\
9 & $4/36$ \\
10 & $3/36$ \\
11 & $2/36$ \\
12 & $1/36$ \\
\end{tabular}
\end{center}

So the probability of rolling a 7 or 11 on the first roll is just
$6/36 + 2/36 = 2/9$.

}

\ppart What is the probability of losing immmediately on the first roll?

\solution{$1/36 + 2/36 +1/36 = 1/9$.}

\ppart What is the overall chance of winning?  Explain in your answer how
you model the sample space.  (For this part, it may be useful to refer to
Handout 63 ``Probabilities in a Dice Game.'')

\solution{

The first way to win is by rolling 7 or 11 on the first roll, which we
now know has probability $2/9$.  Otherwise, we must win by rolling a
point: getting 4, 5, 6, 8, 9, or 10 on the first roll, then eventually
rolling the same number again without rolling 7 first.  Since the odds
of rolling each point value is different, let $w_r$ be the probability
of winning with point $r$, for $r \in \{4, 5, 6, 8, 9, 10\}$.

One way to compute $w_r$ is by summing over all possible roll
sequences:
\[
w_r = \prob{\mbox{first roll is $r$}} 
\sum_{i=0}^{\infty} \prob{\mbox{the next $i$ rolls are neither $r$ nor 7}}
                    \prob{\mbox{the final roll is $r$}}.
\]

Let $p_r$ be the probability of rolling $r$ on a single roll (e.g.,
$p_4 = 3/36$). Then the formula for $w_r$ reduces to:
\begin{eqnarray*}
w_r & = & p_r \sum_{i=0}^{\infty} (1-p_r-p_7)^i p_r \\
    & = & p_r p_r \sum_{i=0}^{\infty} (1-p_r-p_7)^i \\
    & = & p_r \frac{p_r}{1-(1-p_r-p_7)}\\
    & = & p_r \frac {p_r}{p_r+p_7}.
\end{eqnarray*}

So the probability of winning with point 4 (for example) is 
\begin{eqnarray*}
w_4 & = & {3/36}\cdot{\frac{3/36}{3/36 +  6/36}} \\
    & = & {3/36}\cdot{3/9} \\
    & = & {1/36}.
\end{eqnarray*}
By similar calculations we get
\begin{eqnarray*}
w_5 & = & {4/36}\cdot \frac{4/36}{4/36 + 6/36} = {2/45}, \\
w_6 & = & {5/36}\cdot \frac{5/36} {5/36 + 6/36} = {25/396}.
\end{eqnarray*}

Note that the chance of rolling 8 is the same as rolling 6, so $w_8=w_6$,
$w_9=w_5$, and $w_{10} = w_4$, so we can simply double the probability
$w_4+w_5+w_6$ to find the total probability of winning with a point.  So
the overall chance of winning is $2/9 + 2\cdot (1/36 + 2/45 + 25/396 )
\approx 0.493$.  In other words, the odds of winning are slightly worse
than 50-50.  A similar series calculation shows that the chance of losing
with point $r$ is
\[
p_r \frac {p_7}{p_r+p_7} = p_r(1-w_r).
\]

The underlying sample space for the above solution is the set of all
sequences of possible rolls.  For example, if a player rolls a total of 8
on the first roll, 9 on the second, 3 on the third, and 7 on the 4th, then
the game ends with player losing because he rolled a 7, and the
corresponding sequence in the sample space is $8 \cdot 9 \cdot 3 \cdot 7$,
(where $\cdot$ denotes concatentation of numerals, not multiplication of
numbers).  The probability of a sequence $r_1 \cdot r_2 \cdots r_n$ in the
sample space is defined to be $\prod_{i=1}^n p_{r_i}$.  This definition
meets the formal requirements for a probability space because the sum of
the probabilities of all the points is exactly one.  This fact can be
verified by combining the series used above to determine the probabilities
of win or loss with a given point; we omit the details.

Note that it is also logically possible for the game to run forever when a
point is rolled first and neither the point nor a seven comes up in later
rolls.  However, since the other sample points have total probability one,
the ``run forever'' possibility could only be assigned probability zero,
and it could be omitted from a model sample space without affecting
anything.  Perhaps it makes the model more realistic to acknowledge the
``run forever'' possibility by adding a single sample point for this
outcome to which we assign probability zero.

METHOD 2: 

Here's a way of computing $w_r$ that neatly avoids an infinite sample
space and infinite sums.  When you're trying to roll your point, none of
the rolls matter except the last one, when you either roll your point and
win, or roll 7 and lose.  So we can {\it ignore} the intermediate rolls,
and just consider the probability of rolling your point given that you
rolled either your point or 7.  This way there are just seventeen points
in the sample space, namely, five sequences of length one: $7,11,2,3,12$,
and twelve sequences of length two: six winning ones of the form $r \codt
r$ and six losing ones of the form $r \cdot 7$ for $r = 4,5,6,8,9,10$.
The probability of a length one sample point $r= 7,11,2,3,12$ will be
defined to be $p_r$ as above.  Now the reasoning that leads us to this
sample space implies that we should define the probability for a sample
point of the form $r \cdot r$ to be
\[
p_r \frac{p_r}{p_r+p_7}.
\]
This, of course, is exactly the $w_r$ calculated using the infinite sample
space.  Likewise, the probability of the sample points of the form $r
\cdot 7$ will be
\[
p_r \frac{p_7}{p_r+p_7}.
\]

}


\eparts

\problem

\bparts

\ppart Suppose that you are looking in your desk for a letter from some
time ago.  Your desk has eight drawers, and you assess the probability that
it is in any particular drawer as 10\% (so there is a 20\% chance that it
is not in the desk at all). Suppose now that you start searching
systematically through your desk, one drawer at a time.  In addition,
suppose that you have not found the letter in the first $i$ drawers, where
$0 \leq i \leq 7$.  Let $p_i$ denote the probability that the letter will
be found in the next drawer, and let $q_i$ denote the probability that the
letter will be found in some subsequent drawer (both $p_i$ and $q_i$ are
conditional probabilities, since they are based on the assumption that the
letter is not in the first $i$ drawers).  Show that the $p_i$'s increase
and the $q_i$'s decrease.

\hint Observe that if event $A$ implies event $B$ then $\prob{A \mid B} =
\prob{A}/ \prob{B}$.

\solution{

First, we verify the hint.  Saying that $A$ implies $B$ is the same as
saying that that $A \cap B = A$, so
\[
\prob{A \mid B} = \frac{\prob{A \cap B}}{\prob{B}} = \frac{\prob{A}}{\prob{B}}.
\]

Now, we define the following events:
\begin{eqnarray*}
E_i & \eqdef & \text{letter is in drawer } i,\\
N_i & \eqdef & \text{letter is not in drawers } 1, 2, \ldots i,\\
R_i & \eqdef & \text{letter is in one of drawers } i+1, i+2, \ldots 8.
\end{eqnarray*}
Now we wish to find
\begin{eqnarray*}
p_i & = & \prob{E_{i+1} \mid N_i}\\
&  = & \frac{\prob{E_{i+1} \cap N_i}}{\prob{N_i}}\\
&  = & \frac{\prob{E_{i+1}}}{\prob{N_i}} \qquad
   \text{(from the hint, since $E_{i+1} \Rightarrow N_i$)}\\
& = & \frac{0.1}{0.2 + 0.1(8-i)} = \frac{1}{10-i},
\end{eqnarray*}
which clearly increases as $i$ increases, since the denominator gets
smaller while the numerator remains constant.

Now,
\begin{eqnarray*}
q_i & = & \prob{R_i \mid N_i}\\
& = & \frac{\prob{R_i \cap N_i}}{\prob{N_i}}\\
& = & \frac{\prob{R_i}}{\prob{N_i}} \qquad\text{(from the hint, since $R_i \Rightarrow N_i$)}\\
& = & \frac{0.1(8-i)}{0.2 + 0.1(8-i)}\\
& = & \frac{8-i}{10-i} = \frac{10-i}{10-i} - \frac{2}{10-i} = 1 - \frac{2}{10-i}
\end{eqnarray*}
which decreases as $i$ increases, since the denominator of the fraction
shrinks, so the fraction becomes greater, so the negation of the fraction
becomes lesser.

}

\ppart
The following data appeared in an article in the Wall Street Journal.
For the ages 20, 30, 40, 50, and 60, the probability of a woman in the
U.S. developing cancer in the next ten years is 0.5\%, 1.2\%, 3.2\%,
6.4\%, and 10.8\%, respectively. At the same set of ages, the
probability of a woman in the U.S. eventually developing cancer is
39.6\%, 39.5\%, 39.1\%, 37.5\%, and 34.2\%, respectively.  This seems
strange, but use the previous part of the problem to give an
explanation for these data.

\eparts


\problem
{\bf Extra credit}
You are given two urns and 50 balls. Half of the balls are white and
half are black. You are asked to distribute the balls in the urns with
no restriction placed on the number of either type in an urn. How
should you distribute the balls in the urns to maximize the
probability of obtaining a white ball if an urn is chosen at random
and a ball drawn out at random? Justify your answer.

\solution{\\ Let $w_1$ be the number of white balls in urn 1, and
$b_1$ be the number of black balls in urn one. Consider the strategy
of $w_1 = b_1$. This gives probability of 1/2 of selecting a white
ball once either urn is selected, and thus 1/2 overall probability of
selecting a white ball. Now consider $w_1 = 1$ and $b_1 = 0$. This
gives probability 1 of selecting a white ball from urn 1, and $24/25$
of selecting a white ball from urn 2. Thus, the overall probability of
selecting a white ball is $1/2 \cdot 1 + 1/2 \cdot 24/50 = 0.74$,
which is greater than 1/2. So clearly, the optimal strategy must have
$w_1 \neq b_1$. That is, one urn must have more white balls than
black, and the other must have more black than white. In the urn with
more white balls, the best we can do is to have probability 1 of
choosing a white ball. In the other urn, which must have fewer white
balls than black, the best we can do is to have only 1 fewer. So, the
$w_1 = 1, b_1 = 0$ strategy is best for both urns, and therefore best
overall. }
\end{problems}


\section*{Topics for Week of November 15, 1999}

\begin{itemize}
\item
Probability Spaces (Rosen 4.4, 4.5 pp.267-270, F97 Lecture Notes 18,
Handout 63)
\item
Conditional Probability (Rosen 4.5 pp. 270-271, F97 Lecture Notes 19)
\end{itemize}

\end{document}

\iffalse

\problem {\bf Pharmaceutic Food} 

Farmer John's cows occasionally suffer from the famous "Mad Cow" disease.
Fortunately, they recover from it fairly nicely, although it may take
up to four weeks.  Half of the cows that contract the disease recover
in two weeks, and the other cows all recover in four weeks.  Farmer
John is trying a new treatment for the disease, a special feed for those
cows that have the disease.  For cows that respond to this treatment, all
of them recover in two weeks.  Unfortunately, only 70\% of the cows
respond to the treatment.

Farmer John's prize cow, Elsie, contact the disease.  Farmer John, of
course, puts Elsie on the special feed, and Elsie recovers in two weeks.
What is the probability that Elsie is responsive to the medicine?

\problem
In London, half of the days have some rain. The weather forecaster is
correct 2/3 of the time -- i.e. the probability that it rains, given
that she has predicted that it will rain, and the probability that it
does not rain given that she has predicted that it does not rain, are
both equal to 2/3. When rain is forecast, Mr. Pickwick takes his
umbrella. When rain is not forecast, he takes it with probability
1/3. Find: \\
a) the probability that Pickwick has no umbrella, given that it rains. \\
\solution{\\
We construct a tree as in lecture notes \#19 (see figure 3). For part (a), we wi
sh to find
\prob{\text{no umbrella} \mid \text{rain}}
\[ = \frac{\prob{\mbox{no umbrella} \cap \mbox{rain}}}
          {\prob{\mbox{rain}}} \]
\[ = \frac{1/9}{1/2} = 2/9 \]
}
b) the probability that it doesn't rain, given that he brings his umbrella.  \\
\solution{
\[ \mbox{\prob{\text{no rain} \mid \text{umbrella}}} = \frac{\prob{\mbox{no rain} \cap \mbox{u
mbrella}}}
          {\prob{\mbox{umbrella}}} \]
\[ = \frac{1/6 + 1/9}{1/9 + 1/6 + 1/3 + 1/18} = 5/12 \]
}



\problem \textbf{Sample Space. Events. Conbinations of Probabilites.}

\bparts

 ``Concrete Mathematics.'' Graham, Knuth, Patashnik. 
\ppart We are rolling a pair of dice.  Describe the sample set of
elementary events. 

\solution{The set $\Omega$ of elementary events is $D^2 = {(1,1),
(1,2),\ldots,(6,6)}$, where $D = {1, 2, 3, 4, 5, 6}$ is the set of all
six ways that a given die can land.  Two rolls such as $(1,2)$ and
$(2,1)$ are considered to be distinct; hence this probability space
has a total $6^2 = 36$ elements.}

\ppart Let $\mathcal{A}$ be the event that doubles are thrown. What's
the probability of $\mathcal{A}$?

\solution{$\prob{\omega \in \mathcal{A}} = \sum_{w\in
\mathcal{A}}\prob{\omega}$.  Thus, the probability of doubles with fair
dice is $\frac{1}{36} + \frac{1}{36} + \frac{1}{36} + \frac{1}{36} +
\frac{1}{36} + \frac{1}{36} = \frac{1}{6}$}

\ppart Now suppose that not all dice are ``fair.'' Let $\prob{1} = \prob{6}
= \frac{1}{4}; \prob{2} = \prob{3} = \prob{4} = \prob{5} = \frac{1}{8}$. What's
$\prob{\omega \in \mathcal{A}}?$

\solution{$\prob{\omega \in \mathcal{A}} = 4 \times \frac{1}{8} + 2 \times \frac{1}{4} = \frac{3}{16}$}

%http://www.unc.edu/~rowlett/Math81/problem_sets/Set11.html
\ppart Going back to fair dice, which is more likely, rolling 8 with
two dice or rolling 8 with three dice?

\solution{If we roll two dice, there are 36 possible outcomes, of
which 5 (6-2, 5-3, 4-4, 3-5, and 2-6) yield an 8. So the probability
of rolling 8 with two dice is $\frac{5}{36} = 0.1389$. If we roll
three dice, there are 63 = 216 possible outcomes. Exactly 21 of these
outcomes yield 8 (you can list them), so the probability of rolling 8
with three dice is $\frac{21}{216} = 0.0972$. Thus it is more likely
to roll 8 with two dice.}

\eparts 
\problem

Suppose that a resident of Pittsburgh chosen at random is
a Steelers fan with probability $0.90$, that a resident chosen at
random is a Penguins fan with probability $0.75$, and that a resident
Steelers fan chosen at random is a Penguins fan with probability
$0.80$.  Now suppose that a resident is chosen at random from among
those that are {\it not\/} Steelers fans.  What is the probability
that such a resident is a Penguins fan?

Let $P$ represent the event that a resident is a Penguins fan, and $S$ represent the event
that the resident is a Steelers fan.  Then:
 
\begin{eqnarray*}
\prob{P} & = & \prob{P \mid  S} \prob{S} + \prob{P \mid  \overline{S}} \prob{\overline{S}} \\
\prob{P \mid  \overline{S}} & = & \frac{\prob{P} - \prob{P \mid  S} * \prob{S}}{\prob{\overline{S}}} \\
& = & \frac{0.75 - 0.80 \cdot 0.90}{1 - 0.9} \\
& = & 0.3
\end{eqnarray*}

\problem \textbf{Conditional Probability}
% 18.313 Quiz I, problem 4. 

Suppose that 5 men out of 100, and 25 women out of 10,000 are
colorblind.  A colorblind person is chosen at random from a population
with equal numbers of males and females.  What is the probability of
this person being male?

\solution{Let M be the event the person is male, F female,
C colorblind.  We are given $\prob{C \mid M} = \frac{5}{100}$ and
$\prob{C \mid F}=\frac{25}{10,000}.$ Since the population consists of equal
numbers of males and females, $\prob{M} = \prob{F} = \frac{1}{2}$.  So,
\[\prob{M \mid C} = \frac{\prob{C \mid M}\times \prob{M}}{\prob{C}} = \frac{\prob{C \mid M}\times \prob{M}}{\prob{C \mid M}\times \prob{M} + \prob{C \mid F}\times \prob{F}} = \]
\[\frac{(5/100)\times (1/2)}{(5/100)\times (1/2) + (25/10000) \times
(1/2)} = 20/21. \]}

\problem

Suppose that a resident of Pittsburgh chosen at random is
a Steelers fan with probability $0.90$, that a resident chosen at
random is a Penguins fan with probability $0.75$, and that a resident
Steelers fan chosen at random is a Penguins fan with probability
$0.80$.  Now suppose that a resident is chosen at random from among
those that are {\it not\/} Steelers fans.  What is the probability
that such a resident is a Penguins fan?

Let $P$ represent the event that a resident is a Penguins fan, and $S$ represent the event
that the resident is a Steelers fan.  Then:
 
\begin{eqnarray*}
\prob{P} & = & \prob{P  \mid  S} \prob{S} + \prob{P  \mid  \overline{S}} \prob{\overline{S}} \\
\prob{P  \mid  \overline{S}} & = & \frac{\prob{P} - \prob{P  \mid  S} * \prob{S}}{\prob{\overline{S}}} \\
& = & \frac{0.75 - 0.80 \cdot 0.90}{1 - 0.9} \\
& = & 0.3
\end{eqnarray*}
\problem 
% 18.313 Quiz I, problem 3

A population consists of $c_1$ members of type 1, $c_2$ members
of type $2,\cdots ,c_n$ members of type $n$.  A set of $k$ members is
drawn from the population without replacement. 

\bparts

\ppart Find the probability that $i$ members of type 1
were chosen.

\solution{
Let $N_{1i}$ be the event that $i$ members of class 1 were
chosen.  $\prob{N_{1i}} = \frac{\binom{c_1}{i} \times \binom{c_2+\cdots +
c_n}{k-i}}{\binom{c_1+\cdots +c_n}{k}}$.

}

\ppart Find the probability that $i$ members of class 1
and $j$ members of class 2 were chosen.

\solution{Let $N_{2j}$ be the event that $j$ members of class 2 were
chosen.  Then $ \prob{N_{1i} \cap N_{2j}} = \frac{\binom{c_1}{i} \binom{c_2}{j}
\binom{c_3 +\cdots + c_n}{k-i-j}}{\binom{c_1 + \cdots +c_n}{k}} $}

\eparts

\problem
In London, half of the days have some rain. The weather forecaster is
correct 2/3 of the time -- i.e. the probability that it rains, given
that she has predicted that it will rain, and the probability that it
does not rain given that she has predicted that it does not rain, are
both equal to 2/3. When rain is forecast, Mr. Pickwick takes his
umbrella. When rain is not forecast, he takes it with probability
1/3. Find: \\
a) the probability that Pickwick has no umbrella, given that it rains. \\
b) the probability that it doesn't rain, given that he brings his umbrella.  \\
\solution{\\
We construct a tree as in lecture notes \#19. For part (a), we wish to find 
\begin{eqnarray*}
\prob{\text{no umbrella}  \mid \text{rain}}
  & = &\frac{\prob{\mbox{no umbrella} \wedge \mbox{rain}}}{\prob{\mbox{rain}}}\\
& = & \frac{1/9}{1/2} = 2/9.
\end{eqnarray*}

For part (b), we wish to find
\begin{eqnarray*}
\prob{\text{no rain} \mid \text{umbrella}} & = &
 \frac{\prob{\mbox{no rain} \wedge \mbox{umbrella}}}{\prob{\mbox{umbrella}}}\\
& = & \frac{1/6 + 1/9}{1/9 + 1/6 + 1/3 + 1/18} = 5/12
\end{eqnarray*}

}

\problem
{\bf Three-Cornered Duel.} This problem involves old-fashioned duels
by pistol.  [You may want to use a calculator for this problem.]

\bparts
\ppart
A and C are engaged in a duel.  Each takes turns firing
a shot at the other until one of them is hit and falls down. On each
shot, A has probability 0.3 of hitting, and C has probability 0.5. If
A shoots first, what is the chance that A will win this duel?

\solution{
One approach we might take to this problem uses infinite sums, as in
the craps problem above.  In other words, suppose $p_i$ is the
probability that A wins after $i$th turns.  To win on the first turn,
A must hit immediately, so $p_0 = 0.3$.  To win on the second turn, A
must miss, then B must miss, then A must hit, giving $p_1 = 0.7 \cdot
0.5 \cdot 0.3$.  
In general, $p_i = (0.7 \cdot 0.5)^i 0.3$.  
Summing this over all $i$ gives:

\begin{eqnarray*}
\sum_{i=0}^\infty p_i & = & \sum_{i=0}^\infty (0.7 \cdot 0.5)^i 0.3 \\
& = & 0.3 \cdot \frac{1}{1 - {0.7 \cdot 0.5}} \\
& \approx & 0.462
\end{eqnarray*}

Another approach relies on the problem's {\it self-similarity.}
Suppose A misses on the first turn, then B misses.  Now the situation
is exactly the same as it was at the beginning: it's A's turn, and
both duellists still have the same probability of hitting.  So the
probability that A wins given that A just missed and B just missed
must be the same as the overall probability that A wins.  If we let
$p$ be the probability that A wins overall, we can write:

\begin{eqnarray*}
\prob{\mbox{A wins eventually}} & = & \prob{\mbox{A hits on first turn}} + \prob{\mbox{A hits after the first turn}} \\
\prob{\mbox{A wins eventually}} & = & \prob{\mbox{A hits on first turn}} \\
& & + \prob{\mbox{A wins eventually}  \mid  \mbox{A missed then B missed}}\prob{\mbox{A missed then B missed}} \\
p & = & 0.3 + 0.7 \cdot 0.5 p \\
(1 - 0.35)p & = & 0.3 \\
p & \approx & 0.462
\end{eqnarray*}

}
\ppart A, B, and C are to fight a ``three-cornered duel.''
They take turns (in the order A,B,C, cyclically) firing at their
choice of target.  Once hit, a duellist falls down, dropping out of
the duel. The duel continues until only one is left standing. They all
know that A has probability 0.3 of hitting a target, and C has
probability 0.5. But B never misses! (Assume that B is rational, and
always eliminates the most serious threat.)  What should A's strategy
be?

\solution{
If B has a chance, B will shoot and kill C in the first round, because
C is the most serious threat.  So A must decide whether to shoot at B
(which might be wise, because B is incredibly dangerous) or at C
(which might be better, because leaving B alive ensures that C won't
get a shot at either of them).

Suppose A aims at B in
the first round.  If A hits B (probability 0.3), and C shoots at and
misses A (probability 0.5), then A and C will be left in a duel
identical to the warmup problem (with probability 0.462 that A wins).
If A misses B (probability 0.7), then B will hit C (probability 1),
and A will get one more chance to hit B before certain annihilation
(probability 0.3).  So the probability that A wins by aiming at B is about
$0.3 \cdot 0.5 \cdot 0.462 + 0.7 \cdot 0.3 = 0.279$.

Now suppose A aims at C in the first round.  If A hits C, then A
definitely loses, since B will unerringly shoot and hit A.  If A
misses C (probability 0.7), then B will take out C and A will get one
chance at B (probability 0.3).  So the probability that A wins by
aiming at C is exactly $0.7 \cdot 0.3 = 0.21$.

But there's a third alternative: A can intentionally miss everybody.
Then B shoots C, and A has one chance to hit B (probability 0.3).  So
the probability that A wins by shooting in the air is exactly $0.3$,
which makes it the best strategy.}
\eparts
\fi
