From 9b79722984b05daa27f03510ff6bb50b967b8d71 Mon Sep 17 00:00:00 2001 From: Anders Olsson Date: Mon, 24 Feb 2020 11:51:26 +0100 Subject: [PATCH] Fixed a bug, and added a custom solve function (using lapacke). --- Cargo.toml | 5 ++- examples/abcdef.rs | 66 ++++++++++++++++++++++++++++++++++++ examples/kickscore-basics.rs | 2 +- examples/nba-history.rs | 2 +- src/fitter/recursive.rs | 36 ++++++++++++-------- src/lib.rs | 1 + src/linalg.rs | 43 +++++++++++++++++++++++ src/model.rs | 7 ++++ 8 files changed, 144 insertions(+), 18 deletions(-) create mode 100644 examples/abcdef.rs create mode 100644 src/linalg.rs diff --git a/Cargo.toml b/Cargo.toml index 4d9cc4c..a57bc42 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,17 +5,20 @@ authors = ["Anders Olsson "] edition = "2018" [dependencies] +blas = "0.20" cblas = "0.2" derivative = "1.0" expm = "0.1" +lapack = "0.16" lapacke = "0.2" ndarray = "0.13" ndarray-linalg = { version = "0.12" } -openblas-src = { version = "0.8", features = ["static"] } ordered-float = "1.0" rand = "0.7" rand_xoshiro = "0.4" statrs = "0.12" [dev-dependencies] +# openblas-src = { version = "0.8", features = ["static"] } +intel-mkl-src = "0.5" time = "0.2" diff --git a/examples/abcdef.rs b/examples/abcdef.rs new file mode 100644 index 0000000..cce7778 --- /dev/null +++ b/examples/abcdef.rs @@ -0,0 +1,66 @@ +extern crate intel_mkl_src; + +use kickscore as ks; + +fn main() { + let mut model = ks::BinaryModel::new(ks::BinaryModelObservation::Probit); + + for player in &["A", "B", "C", "D", "E", "F"] { + let kernel: Vec> = vec![ + Box::new(ks::kernel::Constant::new(1.0)), + Box::new(ks::kernel::Matern52::new(0.5, 1.0)), + ]; + + model.add_item(player, Box::new(kernel)); + } + + model.observe(&["A"], &["B"], 0.0); + + model.fit(); + + for player in &["A", "B", "C", "D", "E", "F"] { + let (mu, sigma) = model.item_score(player, 1.25); + + println!("{}: mu={} sigma={}", player, mu, sigma); + } + + model.observe(&["C"], &["D"], 0.25); + + model.fit(); + + for player in &["A", "B", "C", "D", "E", "F"] { + let (mu, sigma) = model.item_score(player, 1.25); + + println!("{}: mu={} sigma={}", player, mu, sigma); + } + + model.observe(&["E"], &["F"], 0.50); + + model.fit(); + + for player in &["A", "B", "C", "D", "E", "F"] { + let (mu, sigma) = model.item_score(player, 1.25); + + println!("{}: mu={} sigma={}", player, mu, sigma); + } + + model.observe(&["B"], &["C"], 0.75); + + model.fit(); + + for player in &["A", "B", "C", "D", "E", "F"] { + let (mu, sigma) = model.item_score(player, 1.25); + + println!("{}: mu={} sigma={}", player, mu, sigma); + } + + model.observe(&["D"], &["E"], 1.00); + + model.fit(); + + for player in &["A", "B", "C", "D", "E", "F"] { + let (mu, sigma) = model.item_score(player, 1.25); + + println!("{}: mu={} sigma={}", player, mu, sigma); + } +} diff --git a/examples/kickscore-basics.rs b/examples/kickscore-basics.rs index f452799..32c84dd 100644 --- a/examples/kickscore-basics.rs +++ b/examples/kickscore-basics.rs @@ -1,4 +1,4 @@ -extern crate openblas_src; +extern crate intel_mkl_src; use kickscore as ks; diff --git a/examples/nba-history.rs b/examples/nba-history.rs index fc81ba5..cc63a54 100644 --- a/examples/nba-history.rs +++ b/examples/nba-history.rs @@ -1,4 +1,4 @@ -extern crate openblas_src; +extern crate intel_mkl_src; use std::collections::HashSet; use std::fs; diff --git a/src/fitter/recursive.rs b/src/fitter/recursive.rs index ee99a9c..2ec9cb5 100644 --- a/src/fitter/recursive.rs +++ b/src/fitter/recursive.rs @@ -1,7 +1,6 @@ use derivative::Derivative; use ndarray::prelude::*; use ndarray::stack; -use ndarray_linalg::Inverse; use crate::kernel::Kernel; @@ -101,22 +100,22 @@ impl Fitter for RecursiveFitter { } // Compute the new transition and noise covariance matrices. + let m = self.kernel.order(); + + for _ in 0..n_new { + self.a.push(Array2::zeros((m, m))); + self.q.push(Array2::zeros((m, m))); + } + for i in (self.ts.len() - n_new)..self.ts.len() { if i == 0 { continue; } - self.a - .push(self.kernel.transition(self.ts[i - 1], self.ts[i])); - self.q - .push(self.kernel.noise_cov(self.ts[i - 1], self.ts[i])); + self.a[i - 1] = self.kernel.transition(self.ts[i - 1], self.ts[i]); + self.q[i - 1] = self.kernel.noise_cov(self.ts[i - 1], self.ts[i]); } - let m = self.kernel.order(); - - self.a.push(Array2::zeros((m, m))); - self.q.push(Array2::zeros((m, m))); - self.ts_new.clear(); } @@ -180,9 +179,17 @@ impl Fitter for RecursiveFitter { self.m_s[i] = self.m_f[i].clone(); self.p_s[i] = self.p_f[i].clone(); } else { + let a = self.p_p[i + 1].clone(); + let b = self.a[i].dot(&self.p_f[i]); + // println!("a={:#?}", a); + let g = crate::linalg::solve(a, b); + let g = g.t(); + + /* let g = self.a[i] .dot(&self.p_f[i]) .dot(&self.p_p[i + 1].inv().expect("failed to inverse matrix")); + */ self.m_s[i] = &self.m_f[i] + &g.dot(&(&self.m_s[i + 1] - &self.m_p[i + 1])); self.p_s[i] = @@ -248,11 +255,10 @@ impl Fitter for RecursiveFitter { // RTS update using the right neighbor. let a = self.kernel.transition(ts[i], self.ts[(j + 1) as usize]); - let g = a.dot(&p).dot( - &self.p_p[(j + 1) as usize] - .inv() - .expect("failed to inverse matrix"), - ); + let a = a.dot(&p); + let b = self.p_p[(j + 1) as usize].clone(); + let g = crate::linalg::solve(a, b); + let g = g.t(); ms.push(self.h.dot( &(&m + &g.dot(&(&self.m_s[(j + 1) as usize] - &self.m_p[(j + 1) as usize]))), diff --git a/src/lib.rs b/src/lib.rs index 07487ee..f33908a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -5,6 +5,7 @@ mod expm; mod fitter; mod item; pub mod kernel; +mod linalg; mod model; pub mod observation; mod storage; diff --git a/src/linalg.rs b/src/linalg.rs new file mode 100644 index 0000000..fdcb5b0 --- /dev/null +++ b/src/linalg.rs @@ -0,0 +1,43 @@ +use ndarray::{prelude::*, DataMut}; + +fn as_slice_with_layout_mut(a: &mut ArrayBase) -> Option<(&mut [T], lapacke::Layout)> +where + S: DataMut, + D: Dimension, +{ + if a.as_slice_mut().is_some() { + Some((a.as_slice_mut().unwrap(), lapacke::Layout::RowMajor)) + } else if a.as_slice_memory_order_mut().is_some() { + Some(( + a.as_slice_memory_order_mut().unwrap(), + lapacke::Layout::ColumnMajor, + )) + } else { + None + } +} + +pub fn solve(mut a: Array2, mut b: Array2) -> Array2 { + assert!(a.is_square()); + + let n = a.ncols() as i32; + let nrhs = b.ncols() as i32; + let lda = n; + let ldb = nrhs; + + let info; + let mut ipiv = vec![0; n as usize]; + + let (a_slice, layout) = as_slice_with_layout_mut(&mut a).expect("Matrix `a` not contiguous."); + let (b_slice, _) = as_slice_with_layout_mut(&mut b).expect("Matrix `a` not contiguous."); + + unsafe { + info = lapacke::dgesv(layout, n, nrhs, a_slice, lda, &mut ipiv, b_slice, ldb); + } + + if info != 0 { + panic!("info={}", info); + } + + b +} diff --git a/src/model.rs b/src/model.rs index 3679ef0..29e1bd3 100644 --- a/src/model.rs +++ b/src/model.rs @@ -47,6 +47,13 @@ impl BinaryModel { ); } + pub fn item_score(&self, name: &str, t: f64) -> (f64, f64) { + let id = self.storage.get_id(name); + let (ms, vs) = self.storage.item(id).fitter.predict(&[t]); + + (ms[0], vs[0]) + } + pub fn observe(&mut self, winners: &[&str], losers: &[&str], t: f64) { if t < self.last_t { panic!("observations must be added in chronological order");