timeseries talk November 10, 2015

Size: px
Start display at page:

Download "timeseries talk November 10, 2015"

Transcription

1 timeseries talk November 10, Time series Analysis and the noise model Mark Bakker TU Delft Voorbeeld: Buis B58C0698 nabij Weert In [1]: import numpy as np import pandas as pd from pandas.tools.plotting import autocorrelation_plot import matplotlib.pyplot as plt %matplotlib inline plt.rcparams[ font.size ] = 16.0 # larger font size for slides gwdata = pd.read_csv( B58C _0.csv, skiprows=11, parse_dates=[ PEIL DATUM TIJD ], index_col= PEIL DATUM TIJD, skipinitialspace=true) gwdata.rename(columns={ STAND (MV) : h }, inplace=true) gwdata.index.names = [ date ] gwdata.h *= 0.01 gwdata.h = gwdata.h # NAP # rain = pd.read_csv( Heibloem_rain_data.dat, skiprows=4, delim_whitespace=true, parse_dates=[ date ], index_col= date ) rain = rain[ 1980 :] # cut off everything before 1980 # evap = pd.read_csv( Maastricht_E_June2015.csv, skiprows=4, sep= ;, parse_dates=[ DATE ], index_col= DATE ) evap.rename(columns={ VALUE (m-ref) : evap }, inplace=true) evap = evap[ 1980 :] # cut off everything before 1980 evap.evap *= 1000 # rain[ evap ] = evap.evap rain[ rech ] = rain.precip * rain.evap In [2]: dates = pd.date_range( , , freq= 30D ) # every 30 days ho = np.interp(dates.asi8, gwdata.index.asi8, gwdata.h) # interpolate every 30 days # fix nan values (this is pretty ugly) t = np.arange(len(ho)) ho = np.interp(t, t[~np.isnan(ho)], ho[~np.isnan(ho)]) 1

2 # index in rain where observed heads rain[ num ] = range(len(rain)) N = rain.rech.values io = rain.num.loc[dates].values In [3]: def timeseries_noise(p, N, ho, io=io, tmax=1000, simulate=false): A = p[0] a = np.exp( p[1] ) d = p[2] if len(p) > 3: alpha = p[3] else: alpha = 0.0 dt = 1 # time step t = np.arange(0,tmax,dt) # time F = A * (1 - np.exp(-t / a)) # step function H = F[1:] - F[:-1] # block function h = np.convolve(n,h) + d h = h[io] # simulated head rv = h - ho # residuals v = rv[1:] - alpha * rv[:-1] if simulate == sim : return h elif simulate == v : return v else: return sum(v**2) # innovations In [4]: Astart = 0.75 astart = 150 # memory dstart = np.mean(ho) # base alphastart = 0.01 pstart = np.array([astart, np.log(astart), dstart, alphastart]) In [5]: from scipy.optimize import fmin popt1 = fmin(timeseries_noise, pstart[:-1], args=(n, ho)) h1 = timeseries_noise(popt1, N, ho, simulate= sim ) v1 = timeseries_noise(popt1, N, ho, simulate= v ) Optimization terminated successfully. Current function value: Iterations: 64 Function evaluations: 114 In [6]: popt2 = fmin(timeseries_noise, pstart, args=(n, ho)) h2 = timeseries_noise(popt2, N, ho, simulate= sim ) v2 = timeseries_noise(popt2, N, ho, simulate= v ) Optimization terminated successfully. Current function value: Iterations: 330 Function evaluations: 559 In [7]: heads = pd.dataframe(index=dates) heads[ ho ] = ho 2

3 heads[ h1 ] = h1 heads[ h2 ] = h2 heads[ v1 ] = 0.0 heads.v1[1:] = v1 heads[ v2 ] = 0.0 heads.v2[1:] = v2 heads[ r1 ] = heads.h1 - heads.ho heads[ r2 ] = heads.h2 - heads.ho In [8]: plt.figure(figsize=(15,6)) heads.ho.plot(style= go ) heads.h1.plot(color= b ) RMSE = np.sqrt(np.mean(heads.r1**2)) plt.title( Model fit without noise model. RMSE= + str(rmse)); In [9]: plt.figure(figsize=(15,6)) heads.r1.plot(color= b ); plt.axhline(0, color= k, lw=2) plt.ylim(-0.5,0.5) plt.title( Residuals are correlated ); 3

4 In [10]: plt.figure(figsize=(15,6)) autocorrelation_plot(heads.v1, color= b ) plt.xlim(0,36) plt.title( Autocorrelation of residuals ) Out[10]: <matplotlib.text.text at 0x10a1ea050> 2 So the residuals are correlated The residual of today is a factor times the residual of the previous measurement r i = αr i+1 + n i We hope that the remaining error n i is independent of the remaining error at the previous measurement n i 1. The remaining error is also called the innovation. In [11]: plt.figure(figsize=(15,6)) heads.ho.plot(style= go ) heads.h2.plot(color= r ) RMSE = np.sqrt(np.mean(heads.r2**2)) plt.title( Model fit with noise model. RMSE= + str(rmse)); 4

5 In [12]: plt.figure(figsize=(15,6)) #heads.r1.plot(color= b ); heads.v2.plot(color= r ); plt.ylim(-0.5,0.5) plt.axhline(0, color= k, lw=2) plt.title( Innovations show little correlation ); In [13]: plt.figure(figsize=(15,6)) autocorrelation_plot(heads.v2, color= r ) plt.xlim(0,12) plt.title( Autocorrelation of innovations with noise model ); 5

6 In [14]: plt.figure(figsize=(15,6)) plt.subplot(121) heads.v1.hist(color= b, normed=true) plt.title( Histogram of residuals w/o noise model ) plt.xlim(-0.5,0.5) plt.subplot(122) heads.v2.hist(color= r, normed=true) plt.title( Histogram of innovations w/ noise model ) plt.xlim(-0.5,0.5) Out[14]: (-0.5, 0.5) In [52]: from scipy.stats import probplot plt.figure(figsize=(15,6)) ax1 = plt.subplot(121) ax2 = plt.subplot(122) 6

7 probplot(heads.v1.values, dist= norm, plot=ax1); probplot(heads.v2.values, dist= norm, plot=ax2); In [58]: from lmfit import Parameters, minimize, fit_report def timeseries_noise(p, N, ho, io=io, tmax=1000, simulate=false): vals = p.valuesdict() A = vals[ A ] a = vals[ a ] d = vals[ d ] alpha = vals[ alpha ] dt = 1 # time step t = np.arange(0,tmax,dt) # time F = A * (1 - np.exp(-t / a)) # step function H = F[1:] - F[:-1] # block function h = np.convolve(n,h) + d h = h[io] # simulated head rv = h - ho # residuals v = rv[1:] - alpha * rv[:-1] if simulate == sim : return h elif simulate == v : return v else: return v # innovations p = Parameters() p.add( A, value=0.75) p.add( a, value=150) p.add( d, value=np.mean(ho)) p.add( alpha, value=1) In [59]: # Full model p[ alpha ].value = 1 7

8 p[ alpha ].vary = True pout2 = minimize(timeseries_noise, p, args=(n,ho), kws={ io :io}) print fit_report(pout2) [[Fit Statistics]] # function evals = 110 # data points = 304 # variables = 4 chi-square = reduced chi-square = [[Variables]] A: / (9.09%) (init= 0.75) a: / (9.72%) (init= 150) d: / (0.22%) (init= ) alpha: / (5.14%) (init= 1) [[Correlations]] (unreported correlations are < 0.100) C(A, a) = C(A, d) = C(a, d) = In [64]: # No noise model p[ alpha ].value = 0 p[ alpha ].vary = False p[ d ].vary = True p[ d ].value = 0 pout1 = minimize(timeseries_noise, p, args=(n,ho), kws={ io :io}) print fit_report(pout1) [[Fit Statistics]] # function evals = 31 # data points = 304 # variables = 3 chi-square = reduced chi-square = [[Variables]] A: / (3.94%) (init= 0.75) a: / (4.87%) (init= 150) d: / (0.09%) (init= 0) alpha: 0 (fixed) [[Correlations]] (unreported correlations are < 0.100) C(A, d) = C(A, a) = C(a, d) = In [62]: minimize? In [ ]: 8

Linear Classification

Linear Classification Linear Classification by Prof. Seungchul Lee isystems Design Lab http://isystems.unist.ac.kr/ UNIS able of Contents I.. Supervised Learning II.. Classification III. 3. Perceptron I. 3.. Linear Classifier

More information

Week 2 Assignments. Inferring the Astrophysical Population of Black Hole Binaries. Name: Osase Omoruyi Mentor: Alan Weinstein LIGO SURF 2017

Week 2 Assignments. Inferring the Astrophysical Population of Black Hole Binaries. Name: Osase Omoruyi Mentor: Alan Weinstein LIGO SURF 2017 Week 2 Assignments Inferring the Astrophysical Population of Black Hole Binaries Name: Osase Omoruyi Mentor: Alan Weinstein LIGO SURF 2017 Caltech, CA LIGO Laboratory Astrophysics Group 12th July 2017

More information

Perceptron. by Prof. Seungchul Lee Industrial AI Lab POSTECH. Table of Contents

Perceptron. by Prof. Seungchul Lee Industrial AI Lab  POSTECH. Table of Contents Perceptron by Prof. Seungchul Lee Industrial AI Lab http://isystems.unist.ac.kr/ POSTECH Table of Contents I.. Supervised Learning II.. Classification III. 3. Perceptron I. 3.. Linear Classifier II. 3..

More information

Exploratory data analysis

Exploratory data analysis Exploratory data analysis November 29, 2017 Dr. Khajonpong Akkarajitsakul Department of Computer Engineering, Faculty of Engineering King Mongkut s University of Technology Thonburi Module III Overview

More information

Test 2 - Python Edition

Test 2 - Python Edition 'XNH8QLYHUVLW\ (GPXQG7UDWW-U6FKRRORI(QJLQHHULQJ EGR 10L Spring 2018 Test 2 - Python Edition Shaundra B. Daily & Michael R. Gustafson II Name (please print): NetID (please print): In keeping with the Community

More information

Omnical Convergence. August 22, by Aaron Parsons and Josh Dillon Redundant calibration boils down to solving a system of equations of the form:

Omnical Convergence. August 22, by Aaron Parsons and Josh Dillon Redundant calibration boils down to solving a system of equations of the form: Omnical Convergence August 22, 2018 1 Testing Convergence and χ 2 in Redundant Calibration by Aaron Parsons and Josh Dillon Redundant calibration boils down to solving a system of equations of the form:

More information

5615 Chapter 3. April 8, Set #3 RP Simulation 3 Working with rp1 in the Notebook Proper Numerical Integration 5

5615 Chapter 3. April 8, Set #3 RP Simulation 3 Working with rp1 in the Notebook Proper Numerical Integration 5 5615 Chapter 3 April 8, 2015 Contents Mixed RV and Moments 2 Simulation............................................... 2 Set #3 RP Simulation 3 Working with rp1 in the Notebook Proper.............................

More information

Test 2 Solutions - Python Edition

Test 2 Solutions - Python Edition 'XNH8QLYHUVLW\ (GPXQG73UDWW-U6FKRRORI(QJLQHHULQJ EGR 103L Fall 2017 Test 2 Solutions - Python Edition Michael R. Gustafson II Name (please print) NET ID (please print): In keeping with the Community Standard,

More information

Sheet 5 solutions. August 15, 2017

Sheet 5 solutions. August 15, 2017 Sheet 5 solutions August 15, 2017 Exercise 1: Sampling Implement three functions in Python which generate samples of a normal distribution N (µ, σ 2 ). The input parameters of these functions should be

More information

Introduction to Python

Introduction to Python Introduction to Python Luis Pedro Coelho Institute for Molecular Medicine (Lisbon) Lisbon Machine Learning School II Luis Pedro Coelho (IMM) Introduction to Python Lisbon Machine Learning School II (1

More information

Support Vector Machine

Support Vector Machine Support Vector Machine by Prof. Seungchul Lee isystems Design Lab http://isystems.unist.ac.kr/ UNIS able of Contents I.. Classification (Linear) II.. Distance from a Line III. 3. Illustrative Example I.

More information

Stochastic model of mrna production

Stochastic model of mrna production Stochastic model of mrna production We assume that the number of mrna (m) of a gene can change either due to the production of a mrna by transcription of DNA (which occurs at a rate α) or due to degradation

More information

Introduction to Python

Introduction to Python Introduction to Python Luis Pedro Coelho luis@luispedro.org @luispedrocoelho European Molecular Biology Laboratory Lisbon Machine Learning School 2015 Luis Pedro Coelho (@luispedrocoelho) Introduction

More information

Numpy. Luis Pedro Coelho. October 22, Programming for Scientists. Luis Pedro Coelho (Programming for Scientists) Numpy October 22, 2012 (1 / 26)

Numpy. Luis Pedro Coelho. October 22, Programming for Scientists. Luis Pedro Coelho (Programming for Scientists) Numpy October 22, 2012 (1 / 26) Numpy Luis Pedro Coelho Programming for Scientists October 22, 2012 Luis Pedro Coelho (Programming for Scientists) Numpy October 22, 2012 (1 / 26) Historical Numeric (1995) Numarray (for large arrays)

More information

Propensity Score Matching

Propensity Score Matching Propensity Score Matching This notebook illustrates how to do propensity score matching in Python. Original dataset available at: http://biostat.mc.vanderbilt.edu/wiki/main/datasets (http://biostat.mc.vanderbilt.edu/wiki/main/datasets)

More information

Supervised Learning. 1. Optimization. without Scikit Learn. by Prof. Seungchul Lee Industrial AI Lab

Supervised Learning. 1. Optimization. without Scikit Learn. by Prof. Seungchul Lee Industrial AI Lab Supervised Learning without Scikit Learn by Prof. Seungchul Lee Industrial AI Lab http://isystems.unist.ac.kr/ POSECH able of Contents I.. Optimization II.. Linear Regression III. 3. Classification (Linear)

More information

Optimization with Scipy (2)

Optimization with Scipy (2) Optimization with Scipy (2) Unconstrained Optimization Cont d & 1D optimization Harry Lee February 5, 2018 CEE 696 Table of contents 1. Unconstrained Optimization 2. 1D Optimization 3. Multi-dimensional

More information

L14. 1 Lecture 14: Crash Course in Probability. July 7, Overview and Objectives. 1.2 Part 1: Probability

L14. 1 Lecture 14: Crash Course in Probability. July 7, Overview and Objectives. 1.2 Part 1: Probability L14 July 7, 2017 1 Lecture 14: Crash Course in Probability CSCI 1360E: Foundations for Informatics and Analytics 1.1 Overview and Objectives To wrap up the fundamental concepts of core data science, as

More information

CS 237 Fall 2018, Homework 06 Solution

CS 237 Fall 2018, Homework 06 Solution 0/9/20 hw06.solution CS 237 Fall 20, Homework 06 Solution Due date: Thursday October th at :59 pm (0% off if up to 24 hours late) via Gradescope General Instructions Please complete this notebook by filling

More information

Symbolic computing 1: Proofs with SymPy

Symbolic computing 1: Proofs with SymPy Bachelor of Ecole Polytechnique Computational Mathematics, year 2, semester Lecturer: Lucas Gerin (send mail) (mailto:lucas.gerin@polytechnique.edu) Symbolic computing : Proofs with SymPy π 3072 3 + 2

More information

Homework 5 Computational Chemistry (CBE 60547)

Homework 5 Computational Chemistry (CBE 60547) Homework 5 Computational Chemistry (CBE 60547) Prof. William F. Schneider Due: 1 Not Ar again! As they say, there are many ways to skin a cat! You have computed the wavefunctions of Ar

More information

PANDAS FOUNDATIONS. pandas Foundations

PANDAS FOUNDATIONS. pandas Foundations PANDAS FOUNDATIONS pandas Foundations What is pandas? Python library for data analysis High-performance containers for data analysis Data structures with a lot of functionality Meaningful labels Time series

More information

Python Analysis. PHYS 224 September 25/26, 2014

Python Analysis. PHYS 224 September 25/26, 2014 Python Analysis PHYS 224 September 25/26, 2014 Goals Two things to teach in this lecture 1. How to use python to fit data 2. How to interpret what python gives you Some references: http://nbviewer.ipython.org/url/media.usm.maine.edu/~pauln/

More information

SKLearn Tutorial: DNN on Boston Data

SKLearn Tutorial: DNN on Boston Data SKLearn Tutorial: DNN on Boston Data This tutorial follows very closely two other good tutorials and merges elements from both: https://github.com/tensorflow/tensorflow/blob/master/tensorflow/examples/skflow/boston.py

More information

Python Analysis. PHYS 224 October 1/2, 2015

Python Analysis. PHYS 224 October 1/2, 2015 Python Analysis PHYS 224 October 1/2, 2015 Goals Two things to teach in this lecture 1. How to use python to fit data 2. How to interpret what python gives you Some references: http://nbviewer.ipython.org/url/media.usm.maine.edu/~pauln/

More information

Lorenz Equations. Lab 1. The Lorenz System

Lorenz Equations. Lab 1. The Lorenz System Lab 1 Lorenz Equations Chaos: When the present determines the future, but the approximate present does not approximately determine the future. Edward Lorenz Lab Objective: Investigate the behavior of a

More information

Error, Accuracy and Convergence

Error, Accuracy and Convergence Error, Accuracy and Convergence Error in Numerical Methods i Every result we compute in Numerical Methods is inaccurate. What is our model of that error? Approximate Result = True Value + Error. x = x

More information

Computer Assignment (CA) No. 11: Autocorrelation And Power Spectral Density

Computer Assignment (CA) No. 11: Autocorrelation And Power Spectral Density Computer Assignment (CA) No. 11: Autocorrelation And Power Spectral Density Problem Statement Recall the autocorrelation function is defined as: N 1 R(τ) = n=0 x[n]x[n τ],τ = 0, 1, 2,..., M Compute and

More information

CS 237 Fall 2018, Homework 07 Solution

CS 237 Fall 2018, Homework 07 Solution CS 237 Fall 2018, Homework 07 Solution Due date: Thursday November 1st at 11:59 pm (10% off if up to 24 hours late) via Gradescope General Instructions Please complete this notebook by filling in solutions

More information

Simulated Data Sets and a Demonstration of Central Limit Theorem

Simulated Data Sets and a Demonstration of Central Limit Theorem Simulated Data Sets and a Demonstration of Central Limit Theorem Material to accompany coverage in Hughes and Hase. Introductory section complements Section 3.5, and generates graphs like those in Figs.3.6,

More information

TF Mutiple Hidden Layers: Regression on Boston Data Batched, Parameterized, with Dropout

TF Mutiple Hidden Layers: Regression on Boston Data Batched, Parameterized, with Dropout TF Mutiple Hidden Layers: Regression on Boston Data Batched, Parameterized, with Dropout This is adapted from Frossard's tutorial (http://www.cs.toronto.edu/~frossard/post/tensorflow/). This approach is

More information

Power Spectrum Normalizations for HERA

Power Spectrum Normalizations for HERA Power Spectrum Normalizations for HERA April 4, 2017 by Aaron Parsons 1 Background The relation between the delay-transformed visibility, Ṽ, and the three-dimensional power spectrum of reionization, P

More information

Inverse Problems. Lab 19

Inverse Problems. Lab 19 Lab 19 Inverse Problems An important concept in mathematics is the idea of a well posed problem. The concept initially came from Jacques Hadamard. A mathematical problem is well posed if 1. a solution

More information

Simple model of mrna production

Simple model of mrna production Simple model of mrna production We assume that the number of mrna (m) of a gene can change either due to the production of a mrna by transcription of DNA (which occurs at a constant rate α) or due to degradation

More information

Stochastic processes and Data mining

Stochastic processes and Data mining Stochastic processes and Data mining Meher Krishna Patel Created on : Octorber, 2017 Last updated : May, 2018 More documents are freely available at PythonDSP Table of contents Table of contents i 1 The

More information

Diffusion processes on complex networks

Diffusion processes on complex networks Diffusion processes on complex networks Lecture 8 - SIR model Janusz Szwabiński Outlook Introduction into SIR model Analytical approximation Numerical solution Numerical solution on a grid Simulation on

More information

Lecture Unconstrained optimization. In this lecture we will study the unconstrained problem. minimize f(x), (2.1)

Lecture Unconstrained optimization. In this lecture we will study the unconstrained problem. minimize f(x), (2.1) Lecture 2 In this lecture we will study the unconstrained problem minimize f(x), (2.1) where x R n. Optimality conditions aim to identify properties that potential minimizers need to satisfy in relation

More information

Driven, damped, pendulum

Driven, damped, pendulum Driven, damped, pendulum Note: The notation and graphs in this notebook parallel those in Chaotic Dynamics by Baker and Gollub. (There's a copy in the department office.) For the driven, damped, pendulum,

More information

Tutorial for reading and manipulating catalogs in Python 1

Tutorial for reading and manipulating catalogs in Python 1 Tutorial for reading and manipulating catalogs in Python 1 First, download the data The catalogs that you will need for this exercise can be downloaded here: https://www.dropbox.com/sh/le0d8971tmufcqx/aaaxivdo2p_pg63orhkdudr7a?dl=0

More information

CS 1110: Introduction to Computing Using Python Sequence Algorithms

CS 1110: Introduction to Computing Using Python Sequence Algorithms CS 1110: Introduction to Computing Using Python Lecture 22 Sequence Algorithms [Andersen, Gries, Lee, Marschner, Van Loan, White] Final Exam: Announcements May 18 th, 9am-11:30am Location: Barton Hall

More information

Variational Monte Carlo to find Ground State Energy for Helium

Variational Monte Carlo to find Ground State Energy for Helium Variational Monte Carlo to find Ground State Energy for Helium Chris Dopilka December 2, 2011 1 Introduction[1][2] The variational principle from quantum mechanics gives us a way to estimate the ground

More information

Data Mining of Network Measurements

Data Mining of Network Measurements Data Mining of Network Measurements A short course at the 5th PhD School on Traffic Monitoring and Analysis Barcelona April 22, 2015 Mark Crovella Goals and Strategy Today we'll talk about the theory and

More information

fidimag Documentation

fidimag Documentation fidimag Documentation Release 0.1 Weiwei Wang Jan 24, 2019 Tutorials 1 Run fidimag inside a Docker Container 3 1.1 Setup Docker............................................... 3 1.2 Setup fidimag docker

More information

<br /> D. Thiebaut <br />August """Example of DNNRegressor for Housing dataset.""" In [94]:

<br /> D. Thiebaut <br />August Example of DNNRegressor for Housing dataset. In [94]: sklearn Tutorial: Linear Regression on Boston Data This is following the https://github.com/tensorflow/tensorflow/blob/maste

More information

An (Jack) Chan, Amit Pande, Eilwoo Baik and Prasant Mohapatra[4]C2012

An (Jack) Chan, Amit Pande, Eilwoo Baik and Prasant Mohapatra[4]C2012 2017 I E E I E E E APP E K K A A CE2E RTT E ] E E C E P C E EE2E RTT ] E E E ;T E [1](2015 S E / E d, [2](2017) S, (Android) S QoE S,. d QoE,, QoE(MOS ). QoE EP,, QoE E E [3](2017) S A E vmos E E S S E

More information

Homework 14. Problem 1. Problem of 6 3/7/2017 7:38 AM

Homework 14. Problem 1. Problem of 6 3/7/2017 7:38 AM http://localhost:8888/nbconvert/html/desktop/hw14ipynb?download=false 1 of 6 3/7/2017 7:38 AM Homework 14 Problem 1 Use fsolve to find the roots of the polynomial In [1]: import numpy as np from scipyoptimize

More information

Learning Deep Broadband Hongjoo LEE

Learning Deep Broadband Hongjoo LEE Learning Deep Broadband Network@HOME Hongjoo LEE Who am I? Machine Learning Engineer Software Engineer Fraud Detection System Software Defect Prediction Email Services (40+ mil. users) High traffic server

More information

SIR model. (Susceptible-Infected-Resistant/Removed) Outlook. Introduction into SIR model. Janusz Szwabiński

SIR model. (Susceptible-Infected-Resistant/Removed) Outlook. Introduction into SIR model. Janusz Szwabiński SIR model (Susceptible-Infected-Resistant/Removed) Janusz Szwabiński Outlook Introduction into SIR model Analytical approximation Numerical solution Numerical solution on a grid Simulation on networks

More information

Low-Pass Filter _012 Triple Butterworth Filter to avoid numerical instability for high order filter. In [5]: import numpy as np import matplotlib.pyplot as plt %matplotlib notebook fsz = (7,5) # figure

More information

import pandas as pd d = {"A":[1,2,np.nan], "B":[5,np.nan,np.nan], "C":[1,2,3]} In [9]: Out[8]: {'A': [1, 2, nan], 'B': [5, nan, nan], 'C': [1, 2, 3]}

import pandas as pd d = {A:[1,2,np.nan], B:[5,np.nan,np.nan], C:[1,2,3]} In [9]: Out[8]: {'A': [1, 2, nan], 'B': [5, nan, nan], 'C': [1, 2, 3]} In [4]: import numpy as np In [5]: import pandas as pd In [7]: d = {"":[1,2,np.nan], "B":[5,np.nan,np.nan], "C":[1,2,3]} In [8]: d Out[8]: {'': [1, 2, nan], 'B': [5, nan, nan], 'C': [1, 2, 3]} In [9]:

More information

The Metropolis Algorithm

The Metropolis Algorithm 16 Metropolis Algorithm Lab Objective: Understand the basic principles of the Metropolis algorithm and apply these ideas to the Ising Model. The Metropolis Algorithm Sampling from a given probability distribution

More information

Algorithms for Uncertainty Quantification

Algorithms for Uncertainty Quantification Technische Universität München SS 2017 Lehrstuhl für Informatik V Dr. Tobias Neckel M. Sc. Ionuț Farcaș April 26, 2017 Algorithms for Uncertainty Quantification Tutorial 1: Python overview In this worksheet,

More information

ECE661: Homework 6. Ahmed Mohamed October 28, 2014

ECE661: Homework 6. Ahmed Mohamed October 28, 2014 ECE661: Homework 6 Ahmed Mohamed (akaseb@purdue.edu) October 28, 2014 1 Otsu Segmentation Algorithm Given a grayscale image, my implementation of the Otsu algorithm follows these steps: 1. Construct a

More information

MAS212 Scientific Computing and Simulation

MAS212 Scientific Computing and Simulation MAS212 Scientific Computing and Simulation Dr. Sam Dolan School of Mathematics and Statistics, University of Sheffield Autumn 2017 http://sam-dolan.staff.shef.ac.uk/mas212/ G18 Hicks Building s.dolan@sheffield.ac.uk

More information

Creative Data Mining

Creative Data Mining Creative Data Mining Using ML algorithms in python Artem Chirkin Dr. Daniel Zünd Danielle Griego Lecture 7 0.04.207 /7 What we will cover today Outline Getting started Explore dataset content Inspect visually

More information

Exercise_set7_programming_exercises

Exercise_set7_programming_exercises Exercise_set7_programming_exercises May 13, 2018 1 Part 1(d) We start by defining some function that will be useful: The functions defining the system, and the Hamiltonian and angular momentum. In [1]:

More information

Ordinary Differential Equations II: Runge-Kutta and Advanced Methods

Ordinary Differential Equations II: Runge-Kutta and Advanced Methods Ordinary Differential Equations II: Runge-Kutta and Advanced Methods Sam Sinayoko Numerical Methods 3 Contents 1 Learning Outcomes 2 2 Introduction 2 2.1 Note................................ 4 2.2 Limitations

More information

MATH 829: Introduction to Data Mining and Analysis Linear Regression: old and new

MATH 829: Introduction to Data Mining and Analysis Linear Regression: old and new 1/15 MATH 829: Introduction to Data Mining and Analysis Linear Regression: old and new Dominique Guillot Departments of Mathematical Sciences University of Delaware February 10, 2016 Linear Regression:

More information

Line Search Algorithms

Line Search Algorithms Lab 1 Line Search Algorithms Investigate various Line-Search algorithms for numerical opti- Lab Objective: mization. Overview of Line Search Algorithms Imagine you are out hiking on a mountain, and you

More information

Lab 1: Iterative Methods for Solving Linear Systems

Lab 1: Iterative Methods for Solving Linear Systems Lab 1: Iterative Methods for Solving Linear Systems January 22, 2017 Introduction Many real world applications require the solution to very large and sparse linear systems where direct methods such as

More information

An example to illustrate frequentist and Bayesian approches

An example to illustrate frequentist and Bayesian approches Frequentist_Bayesian_Eample An eample to illustrate frequentist and Bayesian approches This is a trivial eample that illustrates the fundamentally different points of view of the frequentist and Bayesian

More information

Parameters. Data Science: Jordan Boyd-Graber University of Maryland FEBRUARY 12, Data Science: Jordan Boyd-Graber UMD Parameters 1 / 18

Parameters. Data Science: Jordan Boyd-Graber University of Maryland FEBRUARY 12, Data Science: Jordan Boyd-Graber UMD Parameters 1 / 18 Parameters Data Science: Jordan Boyd-Graber University of Maryland FEBRUARY 12, 2018 Data Science: Jordan Boyd-Graber UMD Parameters 1 / 18 True / False Test Baumgartner, Prosser, and Crowell are grading

More information

pyqg Documentation Release 0.1 PyQG team

pyqg Documentation Release 0.1 PyQG team pyqg Documentation Release 0.1 PyQG team Mar 23, 2018 Contents 1 Contents 3 Python Module Index 55 i ii pyqg is a python solver for quasigeostrophic systems. Quasigeostophic equations are an approximation

More information

lektion10 1 Lektion 10 January 29, Lineare Algebra II

lektion10 1 Lektion 10 January 29, Lineare Algebra II lektion January 29, 28 Table of Contents Lineare Algebra II. Matrixplots.2 Eigenwerte, Eigenvektoren, Jordannormalform.3 Berechung des Rangs.4 Normen.5 Kreuzprodukt 2 Reihenentwicklung (Taylor) In [2]:

More information

Quantum Mechanics with Python

Quantum Mechanics with Python Quantum Mechanics with Python Numerical Methods for Physicists, Lecture 5 Matúš Medo, Yi-Cheng Zhang Physics Department, Fribourg University, Switzerland March 23, 2015 Medo & Zhang (UNIFR) Quantum Python

More information

the method of steepest descent

the method of steepest descent MATH 3511 Spring 2018 the method of steepest descent http://www.phys.uconn.edu/ rozman/courses/m3511_18s/ Last modified: February 6, 2018 Abstract The Steepest Descent is an iterative method for solving

More information

Uniform and constant electromagnetic fields

Uniform and constant electromagnetic fields Fundamentals of Plasma Physics, Nuclear Fusion and Lasers Single Particle Motion Uniform and constant electromagnetic fields Nuno R. Pinhão 2015, March In this notebook we analyse the movement of individual

More information

CS1110. Lecture 20: Sequence algorithms. Announcements

CS1110. Lecture 20: Sequence algorithms. Announcements CS1110 Lecture 20: Sequence algorithms Announcements Upcoming schedule Today (April 4) A6 out: A4 due tomorrow. Fix to memotable printing posted; see Piazza @303. Tu Apr 9: lecture on searching &sorting

More information

Conjugate-Gradient. Learn about the Conjugate-Gradient Algorithm and its Uses. Descent Algorithms and the Conjugate-Gradient Method. Qx = b.

Conjugate-Gradient. Learn about the Conjugate-Gradient Algorithm and its Uses. Descent Algorithms and the Conjugate-Gradient Method. Qx = b. Lab 1 Conjugate-Gradient Lab Objective: Learn about the Conjugate-Gradient Algorithm and its Uses Descent Algorithms and the Conjugate-Gradient Method There are many possibilities for solving a linear

More information

Lecture01_Geometric_view_of_linear_systems

Lecture01_Geometric_view_of_linear_systems Lecture_Geometric_view_of_linear_systems 5/3/8, : PM This notebook is part of lecture The geometry of linear equations in the OCW MIT course 8.6 by Prof Gilbert Strang [] Created by me, Dr Juan H Klopper

More information

Math /Foundations of Algebra/Fall 2017 Foundations of the Foundations: Proofs

Math /Foundations of Algebra/Fall 2017 Foundations of the Foundations: Proofs Math 4030-001/Foundations of Algebra/Fall 017 Foundations of the Foundations: Proofs A proof is a demonstration of the truth of a mathematical statement. We already know what a mathematical statement is.

More information

Hypothesis Testing. Introduction to Data Science Algorithms Jordan Boyd-Graber and Michael Paul OCTOBER 11, 2016

Hypothesis Testing. Introduction to Data Science Algorithms Jordan Boyd-Graber and Michael Paul OCTOBER 11, 2016 Hypothesis Testing Introduction to Data Science Algorithms Jordan Boyd-Graber and Michael Paul OCTOBER 11, 2016 Introduction to Data Science Algorithms Boyd-Graber and Paul Hypothesis Testing 1 of 12 χ

More information

Gradient Descent Methods

Gradient Descent Methods Lab 18 Gradient Descent Methods Lab Objective: Many optimization methods fall under the umbrella of descent algorithms. The idea is to choose an initial guess, identify a direction from this point along

More information

Ordinary Differential Equations

Ordinary Differential Equations Ordinary Differential Equations 1 2 3 MCS 507 Lecture 30 Mathematical, Statistical and Scientific Software Jan Verschelde, 31 October 2011 Ordinary Differential Equations 1 2 3 a simple pendulum Imagine

More information

Aprendizagem Automática. Logistic Regression. Ludwig Krippahl

Aprendizagem Automática. Logistic Regression. Ludwig Krippahl Aprendizagem Automática Logistic Regression Ludwig Krippahl Logistic Regression Summary Classification, introduction Linear separability Logistic regression, and playing in higher dimensions 1 Logistic

More information

Solution to running JLA

Solution to running JLA Solution to running JLA Benjamin Audren École Polytechnique Fédérale de Lausanne 08/10/2014 Benjamin Audren (EPFL) CLASS/MP MP runs 08/10/2014 1 / 13 Parameter file data. e x p e r i m e n t s =[ JLA ]

More information

Numpy ndarray as vector and matrix. You cam also create a matrix by stacking arrays vertically

Numpy ndarray as vector and matrix. You cam also create a matrix by stacking arrays vertically Vectors and Matrices Computational Methods, Oct. 2017, Kenji Doya Here we work with vectors and matrices, and get acquainted with concepts in linear algebra by computing. In [1]: import numpy as np import

More information

Linear Transformations

Linear Transformations Lab 4 Linear Transformations Lab Objective: Linear transformations are the most basic and essential operators in vector space theory. In this lab we visually explore how linear transformations alter points

More information

Homework 2 Computational Chemistry (CBE 60553)

Homework 2 Computational Chemistry (CBE 60553) Homework 2 Computational Chemistry (CBE 60553) Prof. William F. Schneider Due: 1 Lectures 1-2: Review of quantum mechanics An electron is trapped in a one-dimensional box described by

More information

Neural network algorithms for the development of individual losses

Neural network algorithms for the development of individual losses Neural network algorithms for the development of individual losses by Andreas ZARKADOULAS Master thesis in Actuarial Science under the supervision of Professor Hansjoerg Albrecher and Dr. Frank Cuypers

More information

DESIGN OF SYSTEM LEVEL CONCEPT FOR TERRA 25994

DESIGN OF SYSTEM LEVEL CONCEPT FOR TERRA 25994 DESIGN OF SYSTEM LEVEL CONCEPT FOR TERRA 25994 Prof. S.Peik Group Number : 14 Submitted by: Deepthi Poonacha Machimada : 5007754 Shridhar Reddy : 5007751 1 P a g e Table of Contents 1. Introduction...

More information

Acoustics Documentation Release 0.0. Frederik Rietdijk

Acoustics Documentation Release 0.0. Frederik Rietdijk Acoustics Documentation Release 0.0 Frederik Rietdijk November 28, 2013 Contents 1 Reference 3 1.1 Octave... 3 1.2 Doppler... 5 1.3 Building... 5 1.4 Signal... 6 1.5 Directivity... 7 1.6 Reflection...

More information

Recurrent Neural Network

Recurrent Neural Network Recurrent Neural Network By Prof. Seungchul Lee Industrial AI Lab http://isystems.unist.ac.kr/ POSTECH Table of Contents I. 1. Time Series Data I. 1.1. Deterministic II. 1.2. Stochastic III. 1.3. Dealing

More information

Supplement. July 27, The aims of this process is to find the best model that predict the treatment cost of type 2 diabetes for individual

Supplement. July 27, The aims of this process is to find the best model that predict the treatment cost of type 2 diabetes for individual Supplement July 27, 2017 In [1]: import pandas as pd import numpy as np from scipy import stats import collections import matplotlib.pyplot as plt import seaborn as sns %matplotlib inline The aims of this

More information

Level 1 Science Data Description Version 1.0

Level 1 Science Data Description Version 1.0 Miniature X-ray Solar Spectrometer (MinXSS) CubeSat Level 1 Science Data Description Version 1.0 Contacts: Principal Investigator: tom.woods@lasp.colorado.edu Data access: james.mason@lasp.colorado.edu

More information

Automatic differentiation

Automatic differentiation Automatic differentiation Matthew J Johnson (mattjj@google.com) Deep Learning Summer School Montreal 2017 Dougal Maclaurin David Duvenaud Ryan P Adams brain Our awesome new world Our awesome new world

More information

Hysteresis. Lab 6. Recall that any ordinary differential equation can be written as a first order system of ODEs,

Hysteresis. Lab 6. Recall that any ordinary differential equation can be written as a first order system of ODEs, Lab 6 Hysteresis Recall that any ordinary differential equation can be written as a first order system of ODEs, ẋ = F (x), ẋ := d x(t). (6.1) dt Many interesting applications and physical phenomena can

More information

Data Analysis and Machine Learning: Logistic Regression

Data Analysis and Machine Learning: Logistic Regression Data Analysis and Machine Learning: Logistic Regression Morten Hjorth-Jensen 1,2 1 Department of Physics, University of Oslo 2 Department of Physics and Astronomy and National Superconducting Cyclotron

More information

Ordinary Differential Equations

Ordinary Differential Equations Ordinary Differential Equations 1 An Oscillating Pendulum applying the forward Euler method 2 Celestial Mechanics simulating the n-body problem 3 The Tractrix Problem setting up the differential equations

More information

1-D Optimization. Lab 16. Overview of Line Search Algorithms. Derivative versus Derivative-Free Methods

1-D Optimization. Lab 16. Overview of Line Search Algorithms. Derivative versus Derivative-Free Methods Lab 16 1-D Optimization Lab Objective: Many high-dimensional optimization algorithms rely on onedimensional optimization methods. In this lab, we implement four line search algorithms for optimizing scalar-valued

More information

Lectures about Python, useful both for beginners and experts, can be found at (http://scipy-lectures.github.io).

Lectures about Python, useful both for beginners and experts, can be found at  (http://scipy-lectures.github.io). Random Matrix Theory (Sethna, "Entropy, Order Parameters, and Complexity", ex. 1.6, developed with Piet Brouwer) 2016, James Sethna, all rights reserved. This is an ipython notebook. This hints file is

More information

Recurrent Neural Network

Recurrent Neural Network Recurrent Neural Network By Prof. Seungchul Lee Industrial AI Lab http://isystems.unist.ac.kr/ POSTECH Table of Contents I. 1. Time Series Data I. 1.1. Deterministic II. 1.2. Stochastic III. 1.3. Dealing

More information

Lecture 27: Theory of Computation. Marvin Zhang 08/08/2016

Lecture 27: Theory of Computation. Marvin Zhang 08/08/2016 Lecture 27: Theory of Computation Marvin Zhang 08/08/2016 Announcements Roadmap Introduction Functions Data Mutability Objects This week (Applications), the goals are: To go beyond CS 61A and see examples

More information

Part 1: Quantization and Huffman Compression

Part 1: Quantization and Huffman Compression Lab3 - Multimedia Lab The Multimedia Lab will be a two part lab designed to discuss each aspect of the life-cycle of a multimedia file. This story begins with generation of the file, which is the digitization

More information

Logistic Regression. by Prof. Seungchul Lee isystems Design Lab UNIST. Table of Contents

Logistic Regression. by Prof. Seungchul Lee isystems Design Lab  UNIST. Table of Contents Logistic Regression by Prof. Seungchul Lee isystes Design Lab http://isystes.unist.ac.kr/ UNIST Table of Contents I.. Linear Classification: Logistic Regression I... Using all Distances II..2. Probabilistic

More information

epistasis Documentation

epistasis Documentation epistasis Documentation Release 0.1 Zach Sailer May 19, 2017 Contents 1 Table of Contents 3 1.1 Setup................................................... 3 1.2 Basic Usage...............................................

More information

Lecture 08: Poisson and More. Lisa Yan July 13, 2018

Lecture 08: Poisson and More. Lisa Yan July 13, 2018 Lecture 08: Poisson and More Lisa Yan July 13, 2018 Announcements PS1: Grades out later today Solutions out after class today PS2 due today PS3 out today (due next Friday 7/20) 2 Midterm announcement Tuesday,

More information

UNIT NUMBER PROBABILITY 6 (Statistics for the binomial distribution) A.J.Hobson

UNIT NUMBER PROBABILITY 6 (Statistics for the binomial distribution) A.J.Hobson JUST THE MATHS UNIT NUMBER 19.6 PROBABILITY 6 (Statistics for the binomial distribution) by A.J.Hobson 19.6.1 Construction of histograms 19.6.2 Mean and standard deviation of a binomial distribution 19.6.3

More information

Solving Systems of ODEs in Python: From scalar to TOV. MP 10/2011

Solving Systems of ODEs in Python: From scalar to TOV. MP 10/2011 Solving Systems of ODEs in Python: From scalar to TOV. MP 10/2011 Homework: Integrate a scalar ODE in python y (t) =f(t, y) = 5ty 2 +5/t 1/t 2 y(1) = 1 Integrate the ODE using using the explicit Euler

More information

MNIST Example Kailai Xu September 8, This is one of the series notes on deep learning. The short note and code is based on [1].

MNIST Example Kailai Xu September 8, This is one of the series notes on deep learning. The short note and code is based on [1]. MNIST Example Kailai Xu September 8, 2017 This is one of the series notes on deep learning. The short note and code is based on [1]. The MNIST classification example is a classical example to illustrate

More information