From ad9750e81bbb72098747fc8f5011b69281ccc05c Mon Sep 17 00:00:00 2001 From: kirkbyers Date: Wed, 10 Apr 2024 23:50:47 -0500 Subject: [PATCH] Filter scrapes by the day. --- src/models/jobs.rs | 3 ++- src/routes/index.rs | 37 +++++++++++++++++++++++++++++++++++-- 2 files changed, 37 insertions(+), 3 deletions(-) diff --git a/src/models/jobs.rs b/src/models/jobs.rs index b4dc0e5..552bf3f 100644 --- a/src/models/jobs.rs +++ b/src/models/jobs.rs @@ -1,7 +1,7 @@ use crate::models::utils::create_paginator; use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize, Copy, Clone)] +#[derive(Serialize, Deserialize, Copy, Clone, Debug)] pub enum JobType { SMScrape = 0, Embed = 1, @@ -65,6 +65,7 @@ pub fn select_with_pagination( create_paginator("jobs")(columns, q, sort_by, sort_direction, limit, offset) } +// TODO: timestampz isn't easily comparable. pub const INIT_TABLE: &str = r#" CREATE TABLE IF NOT EXISTS jobs ( id uuid NOT NULL PRIMARY KEY, diff --git a/src/routes/index.rs b/src/routes/index.rs index d811d59..bae26a6 100644 --- a/src/routes/index.rs +++ b/src/routes/index.rs @@ -1,15 +1,48 @@ use actix_web::{get, web, HttpResponse}; use tera::{Context, Tera}; -use crate::{models::sm_scrape, routes::scrapes}; +use crate::{ + models::{jobs, sm_scrape}, + routes::scrapes, +}; #[get("/")] async fn home(conn: web::Data) -> HttpResponse { let mut tera_context = Context::new(); + let recent_scrape_job = jobs::select_with_pagination( + "created_at, completed_at", + &format!("job_type = {}", jobs::JobType::SMScrape.as_i32()), + "created_at", + "DESC", + 1, + 0, + ); + println!("{}", recent_scrape_job); + let scrape_row = match conn.get_ref().query(&recent_scrape_job, ()).await { + Ok(mut row) => match row.next().await { + Ok(row) => row, + Err(e) => { + eprintln!("Error: {:?}", e); + return HttpResponse::InternalServerError().body("Error parsing db results"); + } + }, + Err(e) => { + eprintln!("Error: {}", e); + return HttpResponse::InternalServerError().body("Error querying the database"); + } + }; + let scrape_created_at: String = match scrape_row { + Some(scrape) => scrape.get::(0).unwrap_or(String::new()), + None => String::new(), + }; + let select_page = sm_scrape::select_with_pagination( "id, url, arrival, lot_size, bag_size, score, packaging, cultivar_detail, spro_rec", - "score != ''", + &format!( + "score != '' AND strftime(scraped_at) > strftime('{}')", + scrape_created_at + ), "score", "DESC", 200,