%%%%%%%%%%%%%%%%% DO NOT CHANGE HERE %%%%%%%%%%%%%%%%%%%% {
\documentclass[12pt,letterpaper]{article}
\usepackage{fullpage}
\usepackage[top=2cm, bottom=4.5cm, left=2.5cm, right=2.5cm]{geometry}
\usepackage{amsmath,amsthm,amsfonts,amssymb,amscd}
\usepackage{lastpage}
\usepackage{enumerate}
\usepackage{fancyhdr}
\usepackage{mathrsfs}
\usepackage{xcolor}
\usepackage{graphicx}
\usepackage{listings}
\usepackage{hyperref}
\usepackage{todonotes}[disable]
\usepackage{esvect}

\hypersetup{%
  colorlinks=true,
  linkcolor=blue,
  linkbordercolor={0 0 1}
}

\setlength{\parindent}{0.0in}
\setlength{\parskip}{0.05in}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% }

%%%%%%%%%%%%%%%%%%%%%%%% CHANGE HERE %%%%%%%%%%%%%%%%%%%% {
\newcommand\course{CMPT 727}
\newcommand\semester{Spring 2023}
\newcommand\hwnumber{1}                 % <-- ASSIGNMENT #
%\newcommand\NetIDa{Your Name}           % <-- YOUR NAME
%\newcommand\NetIDb{200XXYYZZ}           % <-- STUDENT ID #
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% }

%%%%%%%%%%%%%%%%% DO NOT CHANGE HERE %%%%%%%%%%%%%%%%%%%% {
\pagestyle{fancyplain}
\headheight 35pt
\chead{\textbf{\Large Assignment \hwnumber}}
\rhead{\course \\ \semester}
\lfoot{}
\cfoot{}
\rfoot{\small\thepage}
\headsep 1.5em
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% }

% Uncomment for questions
% \newcommand\sol[1]{}
% \newcommand\aspace[1]{\vspace{#1}}

% Uncomment for solutions
  \newcommand\sol[1]{Solution: #1}
  \newcommand\aspace[1]{}


\begin{document}

\section*{Problem 1}
For each member of your group, write their name, program, where they're from, and one other fact about them (e.g. hobby, favorite food).





\section*{Problem 2}

%\todo[inline]{This is far too hard. Let's just ask for the probability of exactly 70 baskets with a 0.7 accuracy.}

Shanille O’Keal shoots free throws on a basketball court with 0.7 accuracy. What is the probability she hits exactly 70 of
her first 100 shots? (A mathematical expression that could be plugged into a calculator is sufficient; you do not need to give the number itself.)



\section*{Problem 3}

Consider the following Bayesian Network containing four Boolean random variables: global warming ($A$), clear sky over Vancouver ($B$), ice melting in arctic ($C$), and high temperature in Vancouver ($D$).  Each variable can be either True or False.  The probability that variable $X$ is true is written $P(X)$; the probability that $X$ is false is written $P(\neg X) = 1-P(X)$.

%\todo[inline]{Add:
%(1) Explanation of bayes net: each variable can be either T or F, written A or not-A; what we mean by $P(A)$.  Add explicit p(not A) values. If you can, add a reasonable story about what the variables represent. Maybe something with global warming, clouds and weather? 
%}

\centerline{\includegraphics[width=0.8\textwidth]{2021Spring/A1/BN.PNG}}

\begin{enumerate}

\item  Compute P($A | C$)

\item Compute P($\neg A, B, \neg C, D$) 


 
 \end{enumerate}


\section*{Problem 4}

%\todo[inline]{This is a great question, but far too hard. Let's use just the last half, after applying Jensen's inequality. That is, prove $1/n \sum \log x_i = \prod \sqrt[n]x_i$. )}

% We can use Jensen’s inequality to prove the arithmetic mean-geometric mean (AM-GM) inequality:
% \begin{equation*}
%     \frac{\sum_{i=1}^n{a_i}}{n} \geq \sqrt[n]{\prod_{i=1}^n{a_i}} 
% \end{equation*}

% Where $a_1, a_2,... ,a_n$ is a collection of $n$ non-negative real numbers.

% Let $f(x) = \log x$,  $\forall x>0$. It's easy to see from the graph of $\log x$ that $f(x)$ is concave. By Jensen’s inequality we have 
%  \begin{equation*}
%       f(\frac{\sum_{i=1}^n{a_i}}{n}) \geq  \frac{\sum_{i=1}^n{f(a_i)}}{n}
%  \end{equation*}
 
%  Hence,
%   \begin{equation*}
%      \log (\frac{\sum_{i=1}^n{a_i}}{n})  \geq  \frac{\sum_{i=1}^n{\log(a_i)}}{n}
%  \end{equation*}

% Complete this proof. That is, 
Prove the following: 
\begin{equation}
    \frac 1 n \sum_{i=1}^n \log(a_i) =   
    \log\left(\prod_{i=1}^n \sqrt[n]{a_i}\right)
\end{equation}


 
 


\section*{Problem 5}
% Suppose someone hands you a stack of $N$ vectors, $\{\vv{x_1}, . . . \vv{x_1} \}$, each of dimension $d$, and an scalar
% observation associated with each one, $\{y_1, . . . , y_N \}$. . Let $\mathbf{Y}$ be
% a vector composed of the stacked observations $\{y_i\}$, and let  $\mathbf{X}$ be the vector whose rows are the
% vectors $\{\vv{x_i}\}$.
% , 

Let $y \in \mathbb{R}^n$ be a $n$-dimensional vector, let $w \in \mathbb{R}^m$ be a $m$-dimensional vector, and let 
 $\mathbf{X} \in \mathbb{R}^{n \times m}$ be an $n$-by-$m$ matrix.
Define
\begin{equation}
    f(w) = (y - \mathbf{X} w)^\top (y - \mathbf{X} w)
\end{equation}

%\todo[inline]{Improve formatting/spacing}

\begin{enumerate}
    \item Find the multivariate derivative of $f(w)$ with respect to $w$.
    \item Find the the vector $w$ that minimizes $f(w)$. Hint: Try finding the value of $w$ such that the derivative equals $\mathbf{0}$, the vector of all zeroes.
\end{enumerate}


For the purposes of this question, you may assume that the inverse of any square matrix exists. 

(You might recognize your solution as the solution to a least-squares regression problem.)




 
 
 \section*{Problem 6}
 If you have any concerns or suggestions about the course format, please write them here.


\end{document}
