Skip to content

Commit

Permalink
Calculation of R-Sq and R-Sq-Adj
Browse files Browse the repository at this point in the history
  • Loading branch information
ekoutanov committed Oct 27, 2023
1 parent 618893a commit 5cb1ef5
Show file tree
Hide file tree
Showing 10 changed files with 275 additions and 75 deletions.
16 changes: 9 additions & 7 deletions src/bin/backfit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ use std::path::PathBuf;

use anyhow::anyhow;
use clap::Parser;
use linregress::fit_low_level_regression_model;
use strum::{EnumCount, IntoEnumIterator};
use tracing::{debug, info};

Expand All @@ -13,7 +12,7 @@ use brumby::data::Factor;
use brumby::data::Factor::{ActiveRunners, Weight0};
use brumby::linear::matrix::Matrix;
use brumby::linear::regression;
use brumby::linear::regression::Regressor::{Exponent, NilIntercept, Ordinal};
use brumby::linear::regression::Regressor::{Exp, ZeroIntercept, Ordinal, Intercept};

#[derive(Debug, clap::Parser, Clone)]
struct Args {
Expand Down Expand Up @@ -92,15 +91,18 @@ fn main() -> Result<(), Box<dyn Error>> {

let regressors = vec![
Ordinal(Weight0),
Exponent(Box::new(Ordinal(Weight0)), 2),
Exponent(Box::new(Ordinal(Weight0)), 3),
Exp(Box::new(Ordinal(Weight0)), 2),
Exp(Box::new(Ordinal(Weight0)), 3),
Ordinal(ActiveRunners),
Exponent(Box::new(Ordinal(ActiveRunners)), 2),
Exponent(Box::new(Ordinal(ActiveRunners)), 3),
NilIntercept
Exp(Box::new(Ordinal(ActiveRunners)), 2),
Exp(Box::new(Ordinal(ActiveRunners)), 3),
ZeroIntercept
];
let model = regression::fit(Factor::Weight1, regressors, &data)?;
info!("model:\n{:#?}", model);
let r_squared = model.predictor.r_squared(&Factor::Weight1, &data);
debug!("r_squared: {}", r_squared.unadjusted());
debug!("r_squared_adj: {}", r_squared.adjusted());

// let records: Vec<_> = csv.collect();
// let rows = records.len();
Expand Down
5 changes: 3 additions & 2 deletions src/bin/datadump.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,12 @@ use clap::Parser;
use racing_scraper::models::EventType;
use strum::{EnumCount, IntoEnumIterator};
use tracing::{debug, info};
use brumby::{data, fit};
use brumby::{data};
use brumby::csv::{CsvWriter, Record};
use brumby::data::{EventDetailExt, Factor, PredicateClosures};
use brumby::fit::FitOptions;
use brumby::market::{Market, OverroundMethod};
use brumby::model::fit;
use brumby::model::fit::FitOptions;
use brumby::probs::SliceExt;

const MC_ITERATIONS_TRAIN: u64 = 100_000;
Expand Down
11 changes: 7 additions & 4 deletions src/bin/prices.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,21 @@ use std::time::Instant;

use anyhow::bail;
use clap::Parser;
use racing_scraper::models::EventDetail;
use stanza::renderer::console::Console;
use stanza::renderer::Renderer;
use stanza::style::{HAlign, MinWidth, Separator, Styles};
use stanza::table::{Col, Row, Table};
use tracing::{debug, info};

use brumby::{fit, market, mc, selection};
use brumby::data::{download_by_id, EventDetailExt, RaceSummary, read_from_file};
use brumby::{market, mc, selection};
use brumby::data::{download_by_id, EventDetailExt, RaceSummary};
use brumby::display::DisplaySlice;
use brumby::fit::FitOptions;
use brumby::file::FromJsonFile;
use brumby::linear::matrix::Matrix;
use brumby::market::{Market, OverroundMethod};
use brumby::model::fit;
use brumby::model::fit::FitOptions;
use brumby::opt::GradientDescentOutcome;
use brumby::print::{DerivedPrice, tabulate_derived_prices, tabulate_prices, tabulate_probs, tabulate_values};
use brumby::selection::{Selection, Selections};
Expand Down Expand Up @@ -250,7 +253,7 @@ async fn main() -> Result<(), Box<dyn Error>> {

async fn read_race_data(args: &Args) -> anyhow::Result<RaceSummary> {
if let Some(path) = args.file.as_ref() {
let event_detail = read_from_file(path)?;
let event_detail = EventDetail::from_json_file(path)?;
return Ok(event_detail.summarise());
}
if let Some(&id) = args.download.as_ref() {
Expand Down
45 changes: 21 additions & 24 deletions src/data.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
use std::fs;
use std::fs::File;
use std::path::{Path, PathBuf};
use std::path::Path;

use chrono::{DateTime, Utc};
use ordinalizer::Ordinal;
use racing_scraper::get_racing_data;
use racing_scraper::models::{EventDetail, EventType};
use strum_macros::{Display, EnumCount, EnumIter};
use tracing::trace;

use crate::file;
use crate::file::FromJsonFile;
use crate::linear::matrix::Matrix;
use crate::linear::regression::AsIndex;

Expand Down Expand Up @@ -65,12 +64,9 @@ pub struct RaceSummary {
pub prices: Matrix<f64>,
}

pub fn read_from_file(path: impl AsRef<Path>) -> anyhow::Result<EventDetail> {
let file = File::open(path)?;
trace!("reading from {file:?}");
let event_detail = serde_json::from_reader(file)?;
Ok(event_detail)
}
// pub fn read_from_file(path: impl AsRef<Path>) -> Result<EventDetail, io::Error> {
// file::read_json(path)
// }

#[derive(Debug)]
pub enum Predicate {
Expand Down Expand Up @@ -124,30 +120,31 @@ pub fn read_from_dir(
closurelike: impl Into<PredicateClosure>,
) -> anyhow::Result<Vec<EventDetail>> {
let mut files = vec![];
recurse_dir(path.as_ref().into(), &mut files)?;
// recurse_dir(path.as_ref().into(), &mut files)?;
file::recurse_dir(path.as_ref().into(), &mut files, &mut |ext| ext == "json")?;
let mut races = Vec::with_capacity(files.len());
let mut closure = closurelike.into();
for file in files {
let race = read_from_file(file)?;
let race = EventDetail::from_json_file(file)?;
if closure(&race) {
races.push(race);
}
}
Ok(races)
}

fn recurse_dir(path: PathBuf, files: &mut Vec<PathBuf>) -> anyhow::Result<()> {
let md = fs::metadata(&path)?;
if md.is_dir() {
let entries = fs::read_dir(path)?;
for entry in entries {
recurse_dir(entry?.path(), files)?;
}
} else if path.extension().unwrap_or_default() == "json" {
files.push(path);
}
Ok(())
}
// fn recurse_dir(path: PathBuf, files: &mut Vec<PathBuf>) -> anyhow::Result<()> {
// let md = fs::metadata(&path)?;
// if md.is_dir() {
// let entries = fs::read_dir(path)?;
// for entry in entries {
// recurse_dir(entry?.path(), files)?;
// }
// } else if path.extension().unwrap_or_default() == "json" {
// files.push(path);
// }
// Ok(())
// }

pub async fn download_by_id(id: u64) -> anyhow::Result<EventDetail> {
let event_detail = get_racing_data(&id).await?;
Expand Down
41 changes: 41 additions & 0 deletions src/file.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
//! File and directory manipulation utilities.
use std::fs::File;
use std::{fs, io};
use std::ffi::OsStr;
use std::io::Error;
use std::path::{Path, PathBuf};
use serde::de::DeserializeOwned;
use serde_json::from_reader;

/// Reads a JSON-encoded type from a given file `path`.
pub fn read_json<D: DeserializeOwned>(path: impl AsRef<Path>) -> Result<D, io::Error> {
let file = File::open(path)?;
Ok(from_reader(file)?)
}

pub trait FromJsonFile<D> {
fn from_json_file(path: impl AsRef<Path>) -> Result<D, io::Error>;
}

impl<D: DeserializeOwned> FromJsonFile<D> for D {
fn from_json_file(path: impl AsRef<Path>) -> Result<D, Error> {
read_json(path)
}
}

/// Recursively locates all files in a given directory matching the supplied `extension_filter`. The
/// located files are written into the `files` vector. If the given `path` is a file that matches the
/// filter (rather than a directory), it is added to `files`.
pub fn recurse_dir(path: PathBuf, files: &mut Vec<PathBuf>, extension_filter: &mut impl FnMut(&OsStr) -> bool) -> Result<(), io::Error> {
let md = fs::metadata(&path)?;
if md.is_dir() {
let entries = fs::read_dir(path)?;
for entry in entries {
recurse_dir(entry?.path(), files, extension_filter)?;
}
} else if extension_filter(path.extension().unwrap_or_default()) {
files.push(path);
}
Ok(())
}
3 changes: 2 additions & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,10 @@ pub mod capture;
pub mod csv;
pub mod data;
pub mod display;
pub mod fit;
pub mod file;
pub mod linear;
pub mod market;
pub mod model;
pub mod mc;
pub mod opt;
pub mod print;
Expand Down
Loading

0 comments on commit 5cb1ef5

Please sign in to comment.