Homework 4 Solutions

Size: px
Start display at page:

Download "Homework 4 Solutions"

Transcription

1 Homework 4 Solutions Problem 1 # Read in Data school.1 <- read.table("~/dropbox/uw/teaching/cs&ss-stat564/solutions/hw4/school1.dat")[, 1] school.2 <- read.table("~/dropbox/uw/teaching/cs&ss-stat564/solutions/hw4/school2.dat")[, 1] school.3 <- read.table("~/dropbox/uw/teaching/cs&ss-stat564/solutions/hw4/school3.dat")[, 1] # Create Functions for Obtaining Posterior Parameters get.kappa.n <- function(y, kappa.0) { kappa.n <- kappa.0 + length(y) return(kappa.n) get.mu.n <- function(y, mu.0, kappa.0, kappa.n) { mu.n <- (kappa.0*mu.0 + sum(y))/kappa.n return(mu.n) get.nu.n <- function(y, nu.0) { nu.n <- nu.0 + length(y) return(nu.n) get.sigma.sq.n <- function(y, mu.0, nu.0, sigma.sq.0, nu.n, kappa.n) { sigma.sq.n <- (nu.0*sigma.sq.0 + sum((y - mean(y))^2) + kappa.0*length(y)*(mean(y) - mu.0)^2/kappa.n)/nu.n return(sigma.sq.n) # Set hyperparameters mu.0 <- 5 sig.sq.0 <- 4 kappa.0 <- 1 nu.0 <- 2 # Compute posterior parameters kappa.n.1 <- get.kappa.n(school.1, kappa.0) kappa.n.2 <- get.kappa.n(school.2, kappa.0) kappa.n.3 <- get.kappa.n(school.3, kappa.0) mu.n.1 <- get.mu.n(school.1, mu.0, kappa.0, kappa.n.1) mu.n.2 <- get.mu.n(school.2, mu.0, kappa.0, kappa.n.2) mu.n.3 <- get.mu.n(school.3, mu.0, kappa.0, kappa.n.3) 1

2 nu.n.1 <- get.nu.n(school.1, nu.0) nu.n.2 <- get.nu.n(school.2, nu.0) nu.n.3 <- get.nu.n(school.3, nu.0) sig.sq.n.1 <- get.sigma.sq.n(school.1, mu.0, nu.0, sig.sq.0, nu.n.1, kappa.n.1) sig.sq.n.2 <- get.sigma.sq.n(school.2, mu.0, nu.0, sig.sq.0, nu.n.2, kappa.n.2) sig.sq.n.3 <- get.sigma.sq.n(school.3, mu.0, nu.0, sig.sq.0, nu.n.3, kappa.n.3) # Take MC Samples S < sigma.sq.inv.samps.1 <- rgamma(s, nu.n.1/2, nu.n.1*sig.sq.n.1/2) theta.samps.1 <- rnorm(s, mu.n.1, sqrt((1/sigma.sq.inv.samps.1)/kappa.n.1)) tilde.y.samps.1 <- rnorm(s, theta.samps.1, sqrt((1/sigma.sq.inv.samps.1))) sigma.sq.inv.samps.2 <- rgamma(s, nu.n.2/2, nu.n.2*sig.sq.n.2/2) theta.samps.2 <- rnorm(s, mu.n.2, sqrt((1/sigma.sq.inv.samps.2)/kappa.n.2)) tilde.y.samps.2 <- rnorm(s, theta.samps.2, sqrt((1/sigma.sq.inv.samps.2))) sigma.sq.inv.samps.3 <- rgamma(s, nu.n.3/2, nu.n.3*sig.sq.n.3/2) theta.samps.3 <- rnorm(s, mu.n.3, sqrt((1/sigma.sq.inv.samps.3)/kappa.n.3)) tilde.y.samps.3 <- rnorm(s, theta.samps.3, sqrt((1/sigma.sq.inv.samps.3))) Part a. summ <- rbind(c(mean(theta.samps.1), quantile(theta.samps.1, c(0.025, 0.975)), mean(sqrt(1/sigma.sq.inv.samps.1)), quantile(sqrt(1/sigma.sq.inv.samps.1), c(0.025, 0.975))), c(mean(theta.samps.2), quantile(theta.samps.2, c(0.025, 0.975)), mean(sqrt(1/sigma.sq.inv.samps.2)), quantile(sqrt(1/sigma.sq.inv.samps.2), c(0.025, 0.975))), c(mean(theta.samps.3), quantile(theta.samps.3, c(0.025, 0.975)), mean(sqrt(1/sigma.sq.inv.samps.3)), quantile(sqrt(1/sigma.sq.inv.samps.3), c(0.025, 0.975)))) row.names(summ) <- 1:3 colnames(summ) <- c("$\\mathbb{e\\left[\\theta \\boldsymbol y\\right]$", "$\\theta \\boldsymbol y_{\\left(0.025\\right)$", "$\\theta \\boldsymbol y_{\\left(0.975\\right)$", "$\\mathbb{e\\left[\\sigma \\boldsymbol y\\right]$", "$\\sigma \\boldsymbol y_{\\left(0.025\\right)$", "$\\sigma \\boldsymbol y_{\\left(0.975\\right)$") kable(summ, digits = 2) 2

3 E [θ y] θ y (0.025) θ y (0.975) E [σ y] σ y (0.025) σ y (0.975) Part b. comp.theta <- rbind(mean(theta.samps.1 < theta.samps.2 & theta.samps.2 < theta.samps.3), mean(theta.samps.1 < theta.samps.3 & theta.samps.3 < theta.samps.2), mean(theta.samps.2 < theta.samps.1 & theta.samps.1 < theta.samps.3), mean(theta.samps.2 < theta.samps.3 & theta.samps.3 < theta.samps.1), mean(theta.samps.3 < theta.samps.1 & theta.samps.1 < theta.samps.2), mean(theta.samps.3 < theta.samps.2 & theta.samps.2 < theta.samps.1)) row.names(comp.theta) <- c("$\\text{pr\\left(\\theta_1 < \\theta_2 < \\theta_3 \\right)$", "$\\text{pr\\left(\\theta_1 < \\theta_3 < \\theta_2 \\right)$", "$\\text{pr\\left(\\theta_2 < \\theta_1 < \\theta_3 \\right)$", "$\\text{pr\\left(\\theta_2 < \\theta_3 < \\theta_1 \\right)$", "$\\text{pr\\left(\\theta_3 < \\theta_1 < \\theta_2 \\right)$", "$\\text{pr\\left(\\theta_3 < \\theta_2 < \\theta_1 \\right)$") kable(comp.theta, digits = 2) Pr (θ 1 < θ 2 < θ 3 ) 0.01 Pr (θ 1 < θ 3 < θ 2 ) 0.00 Pr (θ 2 < θ 1 < θ 3 ) 0.08 Pr (θ 2 < θ 3 < θ 1 ) 0.67 Pr (θ 3 < θ 1 < θ 2 ) 0.02 Pr (θ 3 < θ 2 < θ 1 ) 0.22 Part c. comp.tilde.y <- rbind(mean(tilde.y.samps.1 < tilde.y.samps.2 & tilde.y.samps.2 < tilde.y.samps.3), mean(tilde.y.samps.1 < tilde.y.samps.3 & tilde.y.samps.3 < tilde.y.samps.2), mean(tilde.y.samps.2 < tilde.y.samps.1 & tilde.y.samps.1 < tilde.y.samps.3), mean(tilde.y.samps.2 < tilde.y.samps.3 & tilde.y.samps.3 < tilde.y.samps.1), mean(tilde.y.samps.3 < tilde.y.samps.1 & tilde.y.samps.1 < tilde.y.samps.2), mean(tilde.y.samps.3 < tilde.y.samps.2 & tilde.y.samps.2 < tilde.y.samps.1)) row.names(comp.tilde.y) <- c("$\\text{pr\\left(\\tilde{y_1 < \\tilde{y_2 < \\tilde{y_3 \\right)$", "$\\text{pr\\left(\\tilde{y_1 < \\tilde{y_3 < \\tilde{y_2 \\right)$", "$\\text{pr\\left(\\tilde{y_2 < \\tilde{y_1 < \\tilde{y_3 \\right)$", "$\\text{pr\\left(\\tilde{y_2 < \\tilde{y_3 < \\tilde{y_1 \\right)$", "$\\text{pr\\left(\\tilde{y_3 < \\tilde{y_1 < \\tilde{y_2 \\right)$", "$\\text{pr\\left(\\tilde{y_3 < \\tilde{y_2 < \\tilde{y_1 \\right)$") kable(comp.tilde.y, digits = 2) Pr (ỹ 1 < ỹ 2 < ỹ 3 ) 0.11 Pr (ỹ 1 < ỹ 3 < ỹ 2 ) 0.11 Pr (ỹ 2 < ỹ 1 < ỹ 3 )

4 Pr (ỹ 2 < ỹ 3 < ỹ 1 ) 0.27 Pr (ỹ 3 < ỹ 1 < ỹ 2 ) 0.14 Pr (ỹ 3 < ỹ 2 < ỹ 1 ) 0.20 Part d. comp <- rbind(mean(theta.samps.1 > theta.samps.2 & theta.samps.1 > theta.samps.3), mean(tilde.y.samps.1 > tilde.y.samps.2 & tilde.y.samps.1 > tilde.y.samps.3)) row.names(comp) <- c("$\\text{pr\\left(\\theta_1 > \\theta_2\\text{ \\& \\theta_1 > \\theta_3\\right)$ "$\\text{pr\\left(\\tilde{y_1 > \\tilde{y_2\\text{ \\& \\tilde{y_1 > \\tilde{y kable(comp, digits = 2) Pr (θ 1 > θ 2 & θ 1 > θ 3 ) 0.89 Pr (ỹ 1 > ỹ 2 & ỹ 1 > ỹ 3 ) 0.47 Problem 2 n.a <- n.b <- 16 y.bar.a < s.a <- 7.3 y.bar.b < s.b <- 8.1 mu.0 <- 75 sig.sq.0 <- 100 kappa.0 <- nu.0 <- 2^seq(0, 5, by = 1) kappa.n.a <- kappa.0 + n.a kappa.n.b <- kappa.0 + n.b mu.n.a <- (kappa.0*mu.0 + n.a*y.bar.a)/kappa.n.a mu.n.b <- (kappa.0*mu.0 + n.b*y.bar.b)/kappa.n.b nu.n.a <- nu.0 + n.a nu.n.b <- nu.0 + n.b sig.sq.n.a <- (nu.0*sig.sq.0 + (n.a - 1)*s.a^2 + ((kappa.0*n.a)/kappa.n.a)*(y.bar.a - mu.0)^2)/nu.n.a sig.sq.n.b <- (nu.0*sig.sq.0 + (n.b - 1)*s.b^2 + ((kappa.0*n.b)/kappa.n.b)*(y.bar.b - mu.0)^2)/nu.n.b ests <- numeric(length(kappa.0)) # Compute posterior theta_a < theta_b S < for (i in 1:length(ests)) { sigma.sq.inv.samps.a <- rgamma(s, nu.n.a[i]/2, nu.n.a[i]*sig.sq.n.a[i]/2) 4

5 theta.samps.a <- rnorm(s, mu.n.a[i], sqrt((1/sigma.sq.inv.samps.a)/kappa.n.a[i])) sigma.sq.inv.samps.b <- rgamma(s, nu.n.b[i]/2, nu.n.b[i]*sig.sq.n.b[i]/2) theta.samps.b <- rnorm(s, mu.n.b[i], sqrt((1/sigma.sq.inv.samps.b)/kappa.n.b[i])) ests[i] <- mean(theta.samps.a < theta.samps.b) plot(kappa.0, ests, xlab = expression(paste(kappa[0], ", ", nu[0], sep = "")), ylab = expression(paste("pr(", theta[a], "<", theta[b], " ", y[a], ", ", y[b], ")"), sep = "")) Pr(θ A <θ B y A, y B ) κ 0, ν 0 From the plot, we see that Pr (θ A < θ B y A, y B ) is not very sensitive to the choice of κ 0 and ν 0 when κ 0 = ν 0. As κ 0 and ν 0 increase, i.e. our prior belief that θ A = θ B and σ A = σ B increases in certainty, Pr (θ A < θ B y A, y B ) decreases slowly. Someone with a strong prior belief that θ A = θ B, i.e. κ 0 = ν 0 = 32, would still conclude that θ A < θ B after seeing the data. 5

Homework 6 Solutions

Homework 6 Solutions Homework 6 Solutions set.seed(1) library(mvtnorm) samp.theta

More information

STAT 532: Bayesian Data Analysis

STAT 532: Bayesian Data Analysis Class 1: August 28, 2017 Class Introductions Course overview Class Survey (Quiz 1) STAT 532: Bayesian Data Analysis Class 2: August 30, 2017 Experiment. Olympics Testing Assume you were hired by the World

More information

Paired comparisons. We assume that

Paired comparisons. We assume that To compare to methods, A and B, one can collect a sample of n pairs of observations. Pair i provides two measurements, Y Ai and Y Bi, one for each method: If we want to compare a reaction of patients to

More information

AMS 206: Applied Bayesian Statistics

AMS 206: Applied Bayesian Statistics AMS 206: Applied Bayesian Statistics David Draper Department of Applied Mathematics and Statistics University of California, Santa Cruz draper@ucsc.edu www.ams.ucsc.edu/ draper Lecture Notes (Part 4) 1

More information

W = 2 log U 1 and V = 2πU 2, We need the (absolute) determinant of the partial derivative matrix ye (1/2)(x2 +y 2 ) x x 2 +y 2.

W = 2 log U 1 and V = 2πU 2, We need the (absolute) determinant of the partial derivative matrix ye (1/2)(x2 +y 2 ) x x 2 +y 2. Stat 591 Homework 05 Solutions 1. (a) First, let X be a random variable with distribution function F, and assume that F is strictly increasing and continuous. Define U = F (X); the claim is that U Unif(0,

More information

Lecture 1 Bayesian inference

Lecture 1 Bayesian inference Lecture 1 Bayesian inference olivier.francois@imag.fr April 2011 Outline of Lecture 1 Principles of Bayesian inference Classical inference problems (frequency, mean, variance) Basic simulation algorithms

More information

Module 4: Bayesian Methods Lecture 9 A: Default prior selection. Outline

Module 4: Bayesian Methods Lecture 9 A: Default prior selection. Outline Module 4: Bayesian Methods Lecture 9 A: Default prior selection Peter Ho Departments of Statistics and Biostatistics University of Washington Outline Je reys prior Unit information priors Empirical Bayes

More information

Likelihoods. P (Y = y) = f(y). For example, suppose Y has a geometric distribution on 1, 2,... with parameter p. Then the pmf is

Likelihoods. P (Y = y) = f(y). For example, suppose Y has a geometric distribution on 1, 2,... with parameter p. Then the pmf is Likelihoods The distribution of a random variable Y with a discrete sample space (e.g. a finite sample space or the integers) can be characterized by its probability mass function (pmf): P (Y = y) = f(y).

More information

Conjugate Priors for Normal Data

Conjugate Priors for Normal Data Conjugate Priors for Normal Data September 22, 2008 Gill Chapter 3. Sections 4, 7-8 Conjugate Priors for Normal Data p.1/17 Normal Model IID observations Y = (Y 1,Y 2,...Y n ) Y i N(µ,σ 2 ) unknown parameters

More information

STAT 675 Statistical Computing

STAT 675 Statistical Computing STAT 675 Statistical Computing Solutions to Homework Exercises Chapter 3 Note that some outputs may differ, depending on machine settings, generating seeds, random variate generation, etc. 3.1. Sample

More information

Mathematics Review Exercises. (answers at end)

Mathematics Review Exercises. (answers at end) Brock University Physics 1P21/1P91 Mathematics Review Exercises (answers at end) Work each exercise without using a calculator. 1. Express each number in scientific notation. (a) 437.1 (b) 563, 000 (c)

More information

Package bpp. December 13, 2016

Package bpp. December 13, 2016 Type Package Package bpp December 13, 2016 Title Computations Around Bayesian Predictive Power Version 1.0.0 Date 2016-12-13 Author Kaspar Rufibach, Paul Jordan, Markus Abt Maintainer Kaspar Rufibach Depends

More information

Theory of Inference: Homework 4

Theory of Inference: Homework 4 Theory of Inference: Homework 4 1. Here is a slightly technical question about hypothesis tests. Suppose that Y 1,..., Y n iid Poisson(λ) λ > 0. The two competing hypotheses are H 0 : λ = λ 0 versus H

More information

36-463/663: Multilevel & Hierarchical Models HW09 Solution

36-463/663: Multilevel & Hierarchical Models HW09 Solution 36-463/663: Multilevel & Hierarchical Models HW09 Solution November 15, 2016 Quesion 1 Following the derivation given in class, when { n( x µ) 2 L(µ) exp, f(p) exp 2σ 2 0 ( the posterior is also normally

More information

X t P oisson(λ) (16.1)

X t P oisson(λ) (16.1) Chapter 16 Stan 16.1 Discoveries data revisited The file evaluation_discoveries.csv contains data on the numbers of great inventions and scientific discoveries (X t ) in each year from 1860 to 1959 [1].

More information

Conjugate Priors for Normal Data

Conjugate Priors for Normal Data Conjugate Priors for Normal Data September 23, 2009 Hoff Chapter 5 Conjugate Priors for Normal Data p.1/22 Normal Model IID observations Y = (Y 1,Y 2,...Y n ) Y i µ,σ 2 N(µ,σ 2 ) unknown parameters µ and

More information

Prediction problems 3: Validation and Model Checking

Prediction problems 3: Validation and Model Checking Prediction problems 3: Validation and Model Checking Data Science 101 Team May 17, 2018 Outline Validation Why is it important How should we do it? Model checking Checking whether your model is a good

More information

Multivariate Normal & Wishart

Multivariate Normal & Wishart Multivariate Normal & Wishart Hoff Chapter 7 October 21, 2010 Reading Comprehesion Example Twenty-two children are given a reading comprehsion test before and after receiving a particular instruction method.

More information

Package BMDE. October 14, 2010

Package BMDE. October 14, 2010 Package BMDE October 14, 2010 Type Package Title a Bayesian hierarchical model for the inference on the differential gene expression based on RNA-Seq data Version 1.0 Date 2010-10-08 Author Juhee Lee,

More information

Darwin Uy Math 538 Quiz 4 Dr. Behseta

Darwin Uy Math 538 Quiz 4 Dr. Behseta Darwin Uy Math 538 Quiz 4 Dr. Behseta 1) Section 16.1.4 talks about how when sample size gets large, posterior distributions become approximately normal and centered at the MLE. This is largely due to

More information

One Parameter Models

One Parameter Models One Parameter Models p. 1/2 One Parameter Models September 22, 2010 Reading: Hoff Chapter 3 One Parameter Models p. 2/2 Highest Posterior Density Regions Find Θ 1 α = {θ : p(θ Y ) h α } such that P (θ

More information

NAME:... STUDENT NUMBER:... Note that if your solutions are not functional then they must be dysfunctional.

NAME:... STUDENT NUMBER:... Note that if your solutions are not functional then they must be dysfunctional. COMP717, Data Mining with R, Test One, Tuesday the 17 th of March, 2015, 10h00-12h00 1 NAME:...... STUDENT NUMBER:...... Note that if your solutions are not functional then they must be dysfunctional.

More information

Bayesian Dynamic Modeling for Space-time Data in R

Bayesian Dynamic Modeling for Space-time Data in R Bayesian Dynamic Modeling for Space-time Data in R Andrew O. Finley and Sudipto Banerjee September 5, 2014 We make use of several libraries in the following example session, including: ˆ library(fields)

More information

David Giles Bayesian Econometrics

David Giles Bayesian Econometrics David Giles Bayesian Econometrics 5. Bayesian Computation Historically, the computational "cost" of Bayesian methods greatly limited their application. For instance, by Bayes' Theorem: p(θ y) = p(θ)p(y

More information

Chapter 3: Methods for Generating Random Variables

Chapter 3: Methods for Generating Random Variables Chapter 3: Methods for Generating Random Variables Lecturer: Zhao Jianhua Department of Statistics Yunnan University of Finance and Economics Outline 3.1 Introduction Random Generators of Common Probability

More information

Bayesian Analysis (Optional)

Bayesian Analysis (Optional) Bayesian Analysis (Optional) 1 2 Big Picture There are two ways to conduct statistical inference 1. Classical method (frequentist), which postulates (a) Probability refers to limiting relative frequencies

More information

Bayesian Computation in Color-Magnitude Diagrams

Bayesian Computation in Color-Magnitude Diagrams Bayesian Computation in Color-Magnitude Diagrams SA, AA, PT, EE, MCMC and ASIS in CMDs Paul Baines Department of Statistics Harvard University October 19, 2009 Overview Motivation and Introduction Modelling

More information

Name: SOLUTIONS Exam 01 (Midterm Part 2 take home, open everything)

Name: SOLUTIONS Exam 01 (Midterm Part 2 take home, open everything) Name: SOLUTIONS Exam 01 (Midterm Part 2 take home, open everything) To help you budget your time, questions are marked with *s. One * indicates a straightforward question testing foundational knowledge.

More information

T m / A. Table C2 Submicroscopic Masses [2] Symbol Meaning Best Value Approximate Value

T m / A. Table C2 Submicroscopic Masses [2] Symbol Meaning Best Value Approximate Value APPENDIX C USEFUL INFORMATION 1247 C USEFUL INFORMATION This appendix is broken into several tables. Table C1, Important Constants Table C2, Submicroscopic Masses Table C3, Solar System Data Table C4,

More information

Explore the data. Anja Bråthen Kristoffersen Biomedical Research Group

Explore the data. Anja Bråthen Kristoffersen Biomedical Research Group Explore the data Anja Bråthen Kristoffersen Biomedical Research Group density 0.2 0.4 0.6 0.8 Probability distributions Can be either discrete or continuous (uniform, bernoulli, normal, etc) Defined by

More information

Appendix B Impact Table - Proposed Routes

Appendix B Impact Table - Proposed Routes PSC REF#:360505 Impact Table - Proposed s Table B-1 Table B-2 Table B-3 Table B-4 Table B-5 Impact Summaries-Hill Valley Substation Impact Summaries-Mississippi River Routing Area Impact Summaries- Western

More information

Bayesian Inference in a Normal Population

Bayesian Inference in a Normal Population Bayesian Inference in a Normal Population September 20, 2007 Casella & Berger Chapter 7, Gelman, Carlin, Stern, Rubin Sec 2.6, 2.8, Chapter 3. Bayesian Inference in a Normal Population p. 1/16 Normal Model

More information

Generating Random Numbers

Generating Random Numbers Generating Random Numbers Seungchul Baek Department of Mathematics and Statistics, UMBC STAT 633: Statistical Computing 1 / 67 Introduction Simulation is a very powerful tool for statisticians. It allows

More information

2015 SISG Bayesian Statistics for Genetics R Notes: Generalized Linear Modeling

2015 SISG Bayesian Statistics for Genetics R Notes: Generalized Linear Modeling 2015 SISG Bayesian Statistics for Genetics R Notes: Generalized Linear Modeling Jon Wakefield Departments of Statistics and Biostatistics, University of Washington 2015-07-24 Case control example We analyze

More information

Lecture 9 Point Processes

Lecture 9 Point Processes Lecture 9 Point Processes Dennis Sun Stats 253 July 21, 2014 Outline of Lecture 1 Last Words about the Frequency Domain 2 Point Processes in Time and Space 3 Inhomogeneous Poisson Processes 4 Second-Order

More information

3 D Plotting. Paul E. Johnson 1 2. Overview persp scatter3d Scatterplot3d rockchalk. 1 Department of Political Science

3 D Plotting. Paul E. Johnson 1 2. Overview persp scatter3d Scatterplot3d rockchalk. 1 Department of Political Science 3 D Plotting Paul E. Johnson 1 2 1 Department of Political Science 2 Center for Research Methods and Data Analysis, University of Kansas 2013 Outline 1 Overview 2 persp 3 scatter3d 4 Scatterplot3d 5 rockchalk

More information

Bayesian Analysis - A First Example

Bayesian Analysis - A First Example Bayesian Analysis - A First Example This script works through the example in Hoff (29), section 1.2.1 We are interested in a single parameter: θ, the fraction of individuals in a city population with with

More information

MCMC Sampling for Bayesian Inference using L1-type Priors

MCMC Sampling for Bayesian Inference using L1-type Priors MÜNSTER MCMC Sampling for Bayesian Inference using L1-type Priors (what I do whenever the ill-posedness of EEG/MEG is just not frustrating enough!) AG Imaging Seminar Felix Lucka 26.06.2012 , MÜNSTER Sampling

More information

Bayesian Inference in a Normal Population

Bayesian Inference in a Normal Population Bayesian Inference in a Normal Population September 17, 2008 Gill Chapter 3. Sections 1-4, 7-8 Bayesian Inference in a Normal Population p.1/18 Normal Model IID observations Y = (Y 1,Y 2,...Y n ) Y i N(µ,σ

More information

MALA versus Random Walk Metropolis Dootika Vats June 4, 2017

MALA versus Random Walk Metropolis Dootika Vats June 4, 2017 MALA versus Random Walk Metropolis Dootika Vats June 4, 2017 Introduction My research thus far has predominantly been on output analysis for Markov chain Monte Carlo. The examples on which I have implemented

More information

HW3 Solutions : Applied Bayesian and Computational Statistics

HW3 Solutions : Applied Bayesian and Computational Statistics HW3 Solutions 36-724: Applied Bayesian and Computational Statistics March 2, 2006 Problem 1 a Fatal Accidents Poisson(θ I will set a prior for θ to be Gamma, as it is the conjugate prior. I will allow

More information

Celeste: Variational inference for a generative model of astronomical images

Celeste: Variational inference for a generative model of astronomical images Celeste: Variational inference for a generative model of astronomical images Jerey Regier Statistics Department UC Berkeley July 9, 2015 Joint work with Jon McAulie (UCB Statistics), Andrew Miller, Ryan

More information

Entities for Symbols and Greek Letters

Entities for Symbols and Greek Letters Entities for Symbols and Greek Letters The following table gives the character entity reference, decimal character reference, and hexadecimal character reference for symbols and Greek letters, as well

More information

An Analytic Solution to Discrete Bayesian Reinforcement Learning

An Analytic Solution to Discrete Bayesian Reinforcement Learning An Analytic Solution to Discrete Bayesian Reinforcement Learning Pascal Poupart (U of Waterloo) Nikos Vlassis (U of Amsterdam) Jesse Hoey (U of Toronto) Kevin Regan (U of Waterloo) 1 Motivation Automated

More information

Metric Predicted Variable on Two Groups

Metric Predicted Variable on Two Groups Metric Predicted Variable on Two Groups Tim Frasier Copyright Tim Frasier This work is licensed under the Creative Commons Attribution 4.0 International license. Click here for more information. Goals

More information

Module 4: Bayesian Methods Lecture 5: Linear regression

Module 4: Bayesian Methods Lecture 5: Linear regression 1/28 The linear regression model Module 4: Bayesian Methods Lecture 5: Linear regression Peter Hoff Departments of Statistics and Biostatistics University of Washington 2/28 The linear regression model

More information

MAS3301 Bayesian Statistics

MAS3301 Bayesian Statistics MAS331 Bayesian Statistics M. Farrow School of Mathematics and Statistics Newcastle University Semester 2, 28-9 1 9 Conjugate Priors II: More uses of the beta distribution 9.1 Geometric observations 9.1.1

More information

Metric Predicted Variable With One Nominal Predictor Variable

Metric Predicted Variable With One Nominal Predictor Variable Metric Predicted Variable With One Nominal Predictor Variable Tim Frasier Copyright Tim Frasier This work is licensed under the Creative Commons Attribution 4.0 International license. Click here for more

More information

MAS3301 Bayesian Statistics

MAS3301 Bayesian Statistics MAS3301 Bayesian Statistics M. Farrow School of Mathematics and Statistics Newcastle University Semester, 008-9 1 13 Sequential updating 13.1 Theory We have seen how we can change our beliefs about an

More information

GOV 2001/ 1002/ E-2001 Section 3 Theories of Inference

GOV 2001/ 1002/ E-2001 Section 3 Theories of Inference GOV 2001/ 1002/ E-2001 Section 3 Theories of Inference Solé Prillaman Harvard University February 11, 2015 1 / 48 LOGISTICS Reading Assignment- Unifying Political Methodology chs 2 and 4. Problem Set 3-

More information

CSSS/STAT/SOC 321 Case-Based Social Statistics I. Levels of Measurement

CSSS/STAT/SOC 321 Case-Based Social Statistics I. Levels of Measurement CSSS/STAT/SOC 321 Case-Based Social Statistics I Levels of Measurement Christopher Adolph Department of Political Science and Center for Statistics and the Social Sciences University of Washington, Seattle

More information

Package nicherover. February 20, 2015

Package nicherover. February 20, 2015 Package nicherover February 20, 2015 Title (Niche) (R)egion and Niche (Over)lap Metrics for Multidimensional Ecological Niches Version 1.0 Date 2014-07-21 This package uses a probabilistic method to calculate

More information

Explore the data. Anja Bråthen Kristoffersen

Explore the data. Anja Bråthen Kristoffersen Explore the data Anja Bråthen Kristoffersen density 0.2 0.4 0.6 0.8 Probability distributions Can be either discrete or continuous (uniform, bernoulli, normal, etc) Defined by a density function, p(x)

More information

1 Integration of Rational Functions Using Partial Fractions

1 Integration of Rational Functions Using Partial Fractions MTH Fall 008 Essex County College Division of Mathematics Handout Version 4 September 8, 008 Integration of Rational Functions Using Partial Fractions In the past it was far more usual to simplify or combine

More information

Rotation About a Fixed Axis

Rotation About a Fixed Axis 0 otation About a Fixed Axis ef: Hibbele 16.3, Bedfod & Fowle: Dynamics 9.1 Because dive motos ae outinely used, solving poblems dealing with otation about fixed axes is commonplace. The example used hee

More information

MCMC notes by Mark Holder

MCMC notes by Mark Holder MCMC notes by Mark Holder Bayesian inference Ultimately, we want to make probability statements about true values of parameters, given our data. For example P(α 0 < α 1 X). According to Bayes theorem:

More information

Sequential Monitoring of Clinical Trials Session 4 - Bayesian Evaluation of Group Sequential Designs

Sequential Monitoring of Clinical Trials Session 4 - Bayesian Evaluation of Group Sequential Designs Sequential Monitoring of Clinical Trials Session 4 - Bayesian Evaluation of Group Sequential Designs Presented August 8-10, 2012 Daniel L. Gillen Department of Statistics University of California, Irvine

More information

MA20226: STATISTICS 2A 2011/12 Assessed Coursework Sheet One. Set: Lecture 10 12:15-13:05, Thursday 3rd November, 2011 EB1.1

MA20226: STATISTICS 2A 2011/12 Assessed Coursework Sheet One. Set: Lecture 10 12:15-13:05, Thursday 3rd November, 2011 EB1.1 MA20226: STATISTICS 2A 2011/12 Assessed Coursework Sheet One Preamble Set: Lecture 10 12:15-13:05, Thursday 3rd November, 2011 EB1.1 Due: Please hand the work in to the coursework drop-box on level 1 of

More information

Chapter 8: Sampling distributions of estimators Sections

Chapter 8: Sampling distributions of estimators Sections Chapter 8: Sampling distributions of estimators Sections 8.1 Sampling distribution of a statistic 8.2 The Chi-square distributions 8.3 Joint Distribution of the sample mean and sample variance Skip: p.

More information

The statmath package

The statmath package The statmath package Sebastian Ankargren sebastian.ankargren@statistics.uu.se March 8, 2018 Abstract Applied and theoretical papers in statistics usually contain a number of notational conventions which

More information

Comparing two independent samples

Comparing two independent samples In many applications it is necessary to compare two competing methods (for example, to compare treatment effects of a standard drug and an experimental drug). To compare two methods from statistical point

More information

Time Series Models and Data Generation Processes

Time Series Models and Data Generation Processes Time Series Models and Data Generation Processes Athanassios Stavrakoudis http://stavrakoudis.econ.uoi.gr 27/3/2013 1 / 39 Table of Contents 1 without drift with drift (1,1) 2 / 39 example T

More information

lm statistics Chris Parrish

lm statistics Chris Parrish lm statistics Chris Parrish 2017-04-01 Contents s e and R 2 1 experiment1................................................. 2 experiment2................................................. 3 experiment3.................................................

More information

Two Mathematical Constants

Two Mathematical Constants 1 Two Mathematical Constants Two of the most important constants in the world of mathematics are π (pi) and e (Euler s number). π = 3.14159265358979323846264338327950288419716939937510... e = 2.71828182845904523536028747135266249775724709369995...

More information

Bayesian Modeling, Inference, Prediction and Decision-Making

Bayesian Modeling, Inference, Prediction and Decision-Making Bayesian Modeling, Inference, Prediction and Decision-Making 5: Bayesian Model Specification (Section 2) David Draper Department of Applied Mathematics and Statistics University of California, Santa Cruz

More information

The ssanv Package. February 14, 2006

The ssanv Package. February 14, 2006 The ssanv Package February 14, 2006 Type Package Title Sample Size Adjusted for Nonadherence or Variability of input parameters Version 1.0 Date 2006-02-08 Author Michael Fay Maintainer

More information

Computational Cognitive Science

Computational Cognitive Science Computational Cognitive Science Lecture 9: Bayesian Estimation Chris Lucas (Slides adapted from Frank Keller s) School of Informatics University of Edinburgh clucas2@inf.ed.ac.uk 17 October, 2017 1 / 28

More information

5601 Notes: The Subsampling Bootstrap

5601 Notes: The Subsampling Bootstrap 5601 Notes: The Subsampling Bootstrap Charles J. Geyer April 13, 2006 1 Web Pages This handout accompanies the web pages http://www.stat.umn.edu/geyer/5601/examp/subboot.html http://www.stat.umn.edu/geyer/5601/examp/subtoot.html

More information

2014 SISG Module 4: Bayesian Statistics for Genetics Lecture 3: Binomial Sampling. Outline

2014 SISG Module 4: Bayesian Statistics for Genetics Lecture 3: Binomial Sampling. Outline 2014 SISG Module 4: Bayesian Statistics for Genetics Lecture 3: Binomial Sampling Jon Wakefield Departments of Statistics and Biostatistics University of Washington Outline Introduction and Motivating

More information

Multiple Regression: Mixed Predictor Types. Tim Frasier

Multiple Regression: Mixed Predictor Types. Tim Frasier Multiple Regression: Mixed Predictor Types Tim Frasier Copyright Tim Frasier This work is licensed under the Creative Commons Attribution 4.0 International license. Click here for more information. The

More information

MATH 412: Homework # 5 Tim Ahn July 22, 2016

MATH 412: Homework # 5 Tim Ahn July 22, 2016 MATH 412: Homework # 5 Tim Ahn July 22, 2016 Supplementary Exercises Supp #23 Find the Fourier matrices F for = 2 and = 4. set.seed(2) = 2 omega = 2*pi/ F = exp(omega*outer(0:(-1),0:(-1))*(0+1i)) F ##

More information

Hierarchical Linear Models

Hierarchical Linear Models Hierarchical Linear Models Statistics 220 Spring 2005 Copyright c 2005 by Mark E. Irwin The linear regression model Hierarchical Linear Models y N(Xβ, Σ y ) β σ 2 p(β σ 2 ) σ 2 p(σ 2 ) can be extended

More information

Package GPfit. R topics documented: April 2, Title Gaussian Processes Modeling Version Date

Package GPfit. R topics documented: April 2, Title Gaussian Processes Modeling Version Date Title Gaussian Processes Modeling Version 1.0-0 Date 2015-04-01 Package GPfit April 2, 2015 Author Blake MacDoanld, Hugh Chipman, Pritam Ranjan Maintainer Hugh Chipman Description

More information

Contents. basic algebra. Learning outcomes. Time allocation. 1. Mathematical notation and symbols. 2. Indices. 3. Simplification and factorisation

Contents. basic algebra. Learning outcomes. Time allocation. 1. Mathematical notation and symbols. 2. Indices. 3. Simplification and factorisation basic algebra Contents. Mathematical notation and symbols 2. Indices 3. Simplification and factorisation 4. Arithmetic of algebraic fractions 5. Formulae and transposition Learning outcomes In this workbook

More information

Package ssanv. June 23, 2015

Package ssanv. June 23, 2015 Type Package Package ssanv June 23, 2015 Title Sample Size Adjusted for Nonadherence or Variability of Input Parameters Version 1.1 Date 2015-06-22 Author Michael P. Fay Maintainer

More information

Class 04 - Statistical Inference

Class 04 - Statistical Inference Class 4 - Statistical Inference Question 1: 1. What parameters control the shape of the normal distribution? Make some histograms of different normal distributions, in each, alter the parameter values

More information

Statistical Inference

Statistical Inference Statistical Inference Classical and Bayesian Methods Class 5 AMS-UCSC Tue 24, 2012 Winter 2012. Session 1 (Class 5) AMS-132/206 Tue 24, 2012 1 / 11 Topics Topics We will talk about... 1 Confidence Intervals

More information

Package BaPreStoPro. June 7, 2016

Package BaPreStoPro. June 7, 2016 Type Package Title Bayesian Prediction of Stochastic Processes Version 0.1 Date 2016-06-07 Author Simone Hermann Package BaPreStoPro June 7, 2016 Maintainer Simone Hermann

More information

Introduction to Markov Chain Monte Carlo

Introduction to Markov Chain Monte Carlo Introduction to Markov Chain Monte Carlo Jim Albert March 18, 2018 A Selected Data Problem Here is an interesting problem with selected data. Suppose you are measuring the speeds of cars driving on an

More information

Package face. January 19, 2018

Package face. January 19, 2018 Package face January 19, 2018 Title Fast Covariance Estimation for Sparse Functional Data Version 0.1-4 Author Luo Xiao [aut,cre], Cai Li [aut], William Checkley [aut], Ciprian Crainiceanu [aut] Maintainer

More information

A Comparison of the EKF, SPKF, and the Bayes Filter for Landmark-Based Localization

A Comparison of the EKF, SPKF, and the Bayes Filter for Landmark-Based Localization A Comparison of the EKF, SPKF, and the Bayes Filter for Landmark-Based Localization and Timothy D. Barfoot CRV 2 Outline Background Objective Experimental Setup Results Discussion Conclusion 2 Outline

More information

TEST 1 M3070 Fall 2003

TEST 1 M3070 Fall 2003 TEST 1 M3070 Fall 2003 Show all work. Name: Problem 1. (10 points Below are the daily high temperatures, in degrees Fahrenheit, for Salt Lake City during July 2003 (31 days. The decimal point is 1 digit(s

More information

557: MATHEMATICAL STATISTICS II HYPOTHESIS TESTING: EXAMPLES

557: MATHEMATICAL STATISTICS II HYPOTHESIS TESTING: EXAMPLES 557: MATHEMATICAL STATISTICS II HYPOTHESIS TESTING: EXAMPLES Example Suppose that X,..., X n N, ). To test H 0 : 0 H : the most powerful test at level α is based on the statistic λx) f π) X x ) n/ exp

More information

Practical Bayesian Optimization of Machine Learning. Learning Algorithms

Practical Bayesian Optimization of Machine Learning. Learning Algorithms Practical Bayesian Optimization of Machine Learning Algorithms CS 294 University of California, Berkeley Tuesday, April 20, 2016 Motivation Machine Learning Algorithms (MLA s) have hyperparameters that

More information

Timevarying VARs. Wouter J. Den Haan London School of Economics. c Wouter J. Den Haan

Timevarying VARs. Wouter J. Den Haan London School of Economics. c Wouter J. Den Haan Timevarying VARs Wouter J. Den Haan London School of Economics c Wouter J. Den Haan Time-Varying VARs Gibbs-Sampler general idea probit regression application (Inverted Wishart distribution Drawing from

More information

MFM Practitioner Module: Risk & Asset Allocation. John Dodson. January 28, 2015

MFM Practitioner Module: Risk & Asset Allocation. John Dodson. January 28, 2015 MFM Practitioner Module: Risk & Asset Allocation Estimator January 28, 2015 Estimator Estimator Review: tells us how to reverse the roles in conditional probability. f Y X {x} (y) f Y (y)f X Y {y} (x)

More information

Metropolis-Hastings sampling

Metropolis-Hastings sampling Metropolis-Hastings sampling Gibbs sampling requires that a sample from each full conditional distribution. In all the cases we have looked at so far the conditional distributions were conjugate so sampling

More information

Holiday Assignment PS 531

Holiday Assignment PS 531 Holiday Assignment PS 531 Prof: Jake Bowers TA: Paul Testa January 27, 2014 Overview Below is a brief assignment for you to complete over the break. It should serve as refresher, covering some of the basic

More information

Bayesian Model Comparison:

Bayesian Model Comparison: Bayesian Model Comparison: Modeling Petrobrás log-returns Hedibert Freitas Lopes February 2014 Log price: y t = log p t Time span: 12/29/2000-12/31/2013 (n = 3268 days) LOG PRICE 1 2 3 4 0 500 1000 1500

More information

Pr(E H j ) Pr(H j ) K k=1 Pr(E H k) Pr(H k )

Pr(E H j ) Pr(H j ) K k=1 Pr(E H k) Pr(H k ) Exercises Chapter 2 2.1 Marginal and conditional probability: The social mobility data from Section 2.5 gives a joint probability distribution on (Y 1, Y 2 )= (father s occupation, son s occupation). Using

More information

MAS3301 Bayesian Statistics Problems 5 and Solutions

MAS3301 Bayesian Statistics Problems 5 and Solutions MAS3301 Bayesian Statistics Problems 5 and Solutions Semester 008-9 Problems 5 1. (Some of this question is also in Problems 4). I recorded the attendance of students at tutorials for a module. Suppose

More information

Math 70 Homework 8 Michael Downs. and, given n iid observations, has log-likelihood function: k i n (2) = 1 n

Math 70 Homework 8 Michael Downs. and, given n iid observations, has log-likelihood function: k i n (2) = 1 n 1. (a) The poisson distribution has density function f(k; λ) = λk e λ k! and, given n iid observations, has log-likelihood function: l(λ; k 1,..., k n ) = ln(λ) k i nλ ln(k i!) (1) and score equation:

More information

Contents 1 Admin 2 General extensions 3 FWL theorem 4 Omitted variable bias 5 The R family Admin 1.1 What you will need Packages Data 1.

Contents 1 Admin 2 General extensions 3 FWL theorem 4 Omitted variable bias 5 The R family Admin 1.1 What you will need Packages Data 1. 2 2 dplyr lfe readr MASS auto.csv plot() plot() ggplot2 plot() # Start the.jpeg driver jpeg("your_plot.jpeg") # Make the plot plot(x = 1:10, y = 1:10) # Turn off the driver dev.off() # Start the.pdf driver

More information

STAT 430 (Fall 2017): Tutorial 2

STAT 430 (Fall 2017): Tutorial 2 STAT 430 (Fall 2017): Tutorial 2 A review of statistical power analysis Luyao Lin September 19/21, 2017 Department Statistics and Actuarial Science, Simon Fraser University Hypothesis Testing A statistical

More information

Non-Life Insurance: Mathematics and Statistics

Non-Life Insurance: Mathematics and Statistics ETH Zürich, D-MATH HS 207 Prof. Dr. Mario V. Wüthrich Coordinator A. Gabrielli Non-Life Insurance: Mathematics and Statistics Solution sheet Solution. Discrete Distribution a) Note that N only takes values

More information

Model Comparison and Out of Sample Prediction. ISLR Chapter 5

Model Comparison and Out of Sample Prediction. ISLR Chapter 5 Model Comparison and Out of Sample Prediction ISLR Chapter 5 Measuring Quality of Fit interactive building of models (transformations, interactions, etc) Measure of Quality of fit MSE = 1 n i (Y i f (x))

More information

Metric Predicted Variable on One Group

Metric Predicted Variable on One Group Metric Predicted Variable on One Group Tim Frasier Copyright Tim Frasier This work is licensed under the Creative Commons Attribution 4.0 International license. Click here for more information. Prior Homework

More information

10.1 Generate n = 100 standard lognormal data points via the R commands

10.1 Generate n = 100 standard lognormal data points via the R commands STAT 675 Statistical Computing Solutions to Homework Exercises Chapter 10 Note that some outputs may differ, depending on machine settings, generating seeds, random variate generation, etc. 10.1 Generate

More information

Count Predicted Variable & Contingency Tables

Count Predicted Variable & Contingency Tables Count Predicted Variable & Contingency Tables Tim Frasier Copyright Tim Frasier This work is licensed under the Creative Commons Attribution 4.0 International license. Click here for more information.

More information

Lecture 18. Models for areal data. Colin Rundel 03/22/2017

Lecture 18. Models for areal data. Colin Rundel 03/22/2017 Lecture 18 Models for areal data Colin Rundel 03/22/2017 1 areal / lattice data 2 Example - NC SIDS SID79 3 EDA - Moran s I If we have observations at n spatial locations s 1,... s n ) I = n i=1 n n j=1

More information