Skip to content

Commit

Permalink
Init jobs routes.
Browse files Browse the repository at this point in the history
  • Loading branch information
kirkbyers committed Mar 31, 2024
1 parent 49407fa commit 5cd7192
Show file tree
Hide file tree
Showing 10 changed files with 29 additions and 12 deletions.
2 changes: 1 addition & 1 deletion src/bin/sm_scraper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@ use zero2prod::jobs;

#[tokio::main]
async fn main() {
jobs::scrape_sm::main().await;
jobs::scrape_sm::main().await;
}
3 changes: 1 addition & 2 deletions src/db.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use libsql::{Builder, Connection, Database, Error};

use crate::models::{
sm_scrape::INIT_TABLE as SCRAPE_INIT,
jobs::INIT_TABLE as JOBS_INIT, sm_scrape::INIT_TABLE as SCRAPE_INIT,
subscriptions::INIT_TABLE as SUBSCRIPTIONS_INIT,
jobs::INIT_TABLE as JOBS_INIT,
};

pub async fn local_db(db_path: &str) -> Result<Database, Error> {
Expand Down
2 changes: 1 addition & 1 deletion src/jobs/mod.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
pub mod scrape_sm;
pub mod embed_scrapes;
pub mod scrape_sm;
2 changes: 1 addition & 1 deletion src/jobs/scrape_sm.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use std::{thread::sleep, time::Duration};

use crate::{configuration::get_configuration, db::local_db, services::scraper};
use chrono::Utc;
use uuid::Uuid;
use crate::{configuration::get_configuration, db::local_db, services::scraper};

macro_rules! unwrap_table_data {
($table_data:expr, $key:expr) => {
Expand Down
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
pub mod configuration;
pub mod db;
pub mod jobs;
pub mod models;
pub mod routes;
pub mod services;
pub mod startup;
pub mod jobs;
6 changes: 3 additions & 3 deletions src/models/jobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@ use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
enum JobType {
SMScrape,
Embed
Embed,
}

#[derive(Serialize, Deserialize)]
enum JobStatus {
Pending,
Running,
Completed,
Failed
Failed,
}

pub const INIT_TABLE: &str = r#"
Expand All @@ -23,4 +23,4 @@ CREATE TABLE IF NOT EXISTS jobs (
updated_at timestampz NOT NULL,
completed_at timestampz
);
"#;
"#;
2 changes: 1 addition & 1 deletion src/models/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
pub mod jobs;
pub mod sm_scrape;
pub mod subscriptions;
pub mod jobs;
13 changes: 13 additions & 0 deletions src/routes/jobs.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
use actix_web::{get, post, web, HttpResponse};

// GET /jobs
#[get("")]
pub async fn get_jobs() -> HttpResponse {
HttpResponse::Ok().finish()
}

// POST /jobs
#[post("")]
pub async fn start_job() -> HttpResponse {
HttpResponse::Ok().finish()
}
2 changes: 2 additions & 0 deletions src/routes/mod.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
mod green_recs;
mod health_check;
mod index;
mod jobs;
mod scrapes;
mod subscriptions;

pub use green_recs::*;
pub use health_check::*;
pub use index::*;
pub use jobs::*;
pub use scrapes::*;
pub use subscriptions::*;
7 changes: 5 additions & 2 deletions src/startup.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
use crate::{
db,
routes::{create_green_rec, get_scrapes, health_check_route, home, subscribe},
routes::{
create_green_rec, get_jobs, get_scrapes, health_check_route, home, start_job, subscribe,
},
};
use actix_web::{dev::Server, web, App, HttpServer};
use std::net::TcpListener;
Expand Down Expand Up @@ -36,7 +38,8 @@ pub async fn run(listener: TcpListener, db_path: &str) -> Result<Server, std::io
.service(health_check_route)
.service(subscribe)
.service(create_green_rec)
.service(get_scrapes),
.service(get_scrapes)
.service(web::scope("/jobs").service(get_jobs).service(start_job)),
)
.service(home)
.app_data(connection_data.clone())
Expand Down

0 comments on commit 5cd7192

Please sign in to comment.