Skip to content

Commit

Permalink
Cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
frc4533-lincoln committed Oct 28, 2024
1 parent d277a8c commit b0fbed0
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 130 deletions.
8 changes: 3 additions & 5 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ __lua54 = ["mlua/lua54"]
__luau-jit = ["mlua/luau-jit"]

[dependencies]
axum = "0.7.7"
axum = { version = "0.7.7", default-features = false, features = ["form", "http1", "json", "matched-path", "original-uri", "query", "tokio", "tower-log"] }
env_logger = "0.11.5"
log = "0.4.22"
#lru = "0.12.5"
Expand All @@ -24,7 +24,7 @@ sled = "0.34.7"
# "mmap",
# "stopwords",
#] }
tera = "1.20.0"
tera = { version = "1.20.0", default-features = false }
#texting_robots = "0.2.2"
tokio = { version = "1.41.0", features = [
"rt-multi-thread",
Expand All @@ -33,10 +33,8 @@ tokio = { version = "1.41.0", features = [
] }
url = "2.5.2"
serde_qs = "0.13.0"
mlua = { version = "0.10.0", features = ["async", "macros", "serialize", "send"] }
mlua = { version = "0.10.0", features = ["async", "serialize"] }
axum-macros = "0.4.2"
#piccolo = "0.3.3"
#piccolo-util = "0.3.3"
serde_json = "1.0.132"
toml = { version = "0.8.19", default-features = false, features = ["parse"] }
strfmt = "0.2.4"
Expand Down
3 changes: 0 additions & 3 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,7 @@ extern crate tokio;
#[macro_use]
extern crate serde;

//pub mod crawler;
pub mod lua_api;
//pub mod page;
//pub mod ranking;
pub mod config;

use mlua::{FromLua, IntoLua, LuaSerdeExt};
Expand Down
13 changes: 5 additions & 8 deletions src/lua_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,20 @@ use std::{
collections::{HashMap, VecDeque},
error::Error,
fs::{read_dir, File},
future::IntoFuture,
io::Read,
sync::Arc,
thread,
};

use mlua::prelude::*;
use reqwest::Client;
use scraper::{node::Element, Html, Selector};
use strfmt::{strfmt, Format};
use strfmt::Format;
use tokio::{
sync::{oneshot, watch, Mutex},
task::{spawn_local, JoinHandle, JoinSet, LocalSet},
task::{spawn_local, LocalSet},
};

use crate::{config::{self, Config}, Kind, Query};
use crate::{config::Config, Query};

impl LuaUserData for Query {
fn add_fields<F: LuaUserDataFields<Self>>(fields: &mut F) {
Expand Down Expand Up @@ -293,10 +291,9 @@ pub struct PluginEnginePool {
queue: Arc<Mutex<VecDeque<(crate::Query, oneshot::Sender<Vec<crate::Result>>)>>>,
}
impl PluginEnginePool {
pub async fn new() -> (Self, JoinSet<()>) {
pub async fn new() -> Self {
let queue: Arc<Mutex<VecDeque<(crate::Query, oneshot::Sender<Vec<crate::Result>>)>>> =
Arc::new(Mutex::const_new(VecDeque::new()));
let mut joinset = JoinSet::new();

for _ in 0..4 {
let queue = queue.clone();
Expand All @@ -323,7 +320,7 @@ impl PluginEnginePool {
});
}

(Self { queue }, joinset)
Self { queue }
}
pub async fn search(&self, query: Query) -> Vec<crate::Result> {
let (res_tx, rx) = oneshot::channel::<Vec<crate::Result>>();
Expand Down
120 changes: 6 additions & 114 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,48 +12,30 @@ extern crate searched;

mod web;

use std::{sync::Arc, time::Instant};
use std::time::Instant;

use axum::{
http::{HeaderMap, HeaderValue},
routing::get,
Router,
};
use log::LevelFilter;
//use lru::LruCache;
//use searched::page::Page;
//use reqwest::Client;
//use scraper::Selector;
use searched::{config::Config, lua_api::PluginEnginePool};
//use sled::Db;
//use tantivy::{
// doc,
// query::QueryParser,
// schema::{Field, Schema, FAST, STORED, TEXT},
// store::{Compressor, ZstdCompressor},
// Index, IndexReader, IndexSettings,
//};
use tokio::net::TcpListener;

#[derive(Clone)]
pub struct AppState {
//index: Index,
//count_cache: Arc<Mutex<LruCache<String, usize>>>,
//reader: IndexReader,
//query_parser: QueryParser,
//client: Client,
//db: Db,
//eng: Arc<Mutex<PluginEngine>>,
pool: PluginEnginePool,
config: Config,
//query_tx: mpsc::Sender<searched::Query>,
//result_rx: Arc<broadcast::Receiver<(searched::Query, Vec<searched::Result>)>>,
//url: Field,
//title: Field,
//body: Field,
}

#[tokio::main(worker_threads = 12)]
// Need more worker threads if we do our own search index again:
// #[tokio::main(worker_threads = 12)]
#[tokio::main]
async fn main() {
let mut headers = HeaderMap::new();
for (key, val) in [
Expand Down Expand Up @@ -84,91 +66,14 @@ async fn main() {
// .build()
// .unwrap();

let st = Instant::now();
//let res = scrapers::stackexchange::StackExchange::search(client.clone(), Query { query: String::from("rust"), page: 2 }).await;
//println!("{res:?}");
println!("{:?}", st.elapsed());

//println!("{res:?}");

env_logger::builder()
.filter_level(LevelFilter::Info)
.parse_default_env()
.init();

info!("Starting up...");

//let (tx, mut rx): (mpsc::UnboundedSender<Page>, mpsc::UnboundedReceiver<Page>) =
// mpsc::unbounded_channel();

//let mut schema = Schema::builder();

//let url = schema.add_text_field("url", TEXT | FAST | STORED);
//let title = schema.add_text_field("title", TEXT | FAST | STORED);
//let body = schema.add_text_field("body", TEXT);

//let schema = schema.build();

//let mut index = match Index::open_in_dir("data/index") {
// Ok(index) => index,
// Err(_) => {
// warn!("no existing index found, creating one");

// fs::create_dir_all("data/index").unwrap();

// Index::builder()
// .schema(schema.clone())
// .settings(IndexSettings {
// docstore_compression: Compressor::Zstd(ZstdCompressor {
// compression_level: Some(10),
// }),
// ..Default::default()
// })
// .create_in_dir("data/index")
// .unwrap()
// }
//};
//index.set_default_multithread_executor().unwrap();

//let mut wr = index.writer(100_000_000).unwrap();

//tokio::spawn(async move {
// let body_sel = Selector::parse("body").unwrap();

// loop {
// if let Some(page) = rx.recv().await {
// wr.add_document(doc! {
// url => page.url().to_string(),
// title => page.title(),
// body => page.dom().select(&body_sel).next().map(|element| element.text().collect()).unwrap_or_else(|| "".to_string()),
// }).unwrap();

// println!("{} ({})", page.title(), page.url());

// wr.commit().unwrap();
// }
// }
//});

//info!("initializing crawler");
//let cr = Crawler::new(tx).await;

//info!("starting crawler");
//tokio::spawn(async move {
// cr.run().await.unwrap();
//});

//let query_parser = QueryParser::for_index(&index, vec![title, body]);
//let searcher = index.reader().unwrap().searcher();
//let res = searcher.search(&query_parser.parse_query("").unwrap(), &TopDocs::with_limit(20)).unwrap();
//println!("{} {}", searcher.num_docs(), res.len());
//let reader = index.reader().unwrap();
//let count_cache = Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(500).unwrap())));

//let db = sled::open("data/db").unwrap();

//let (engine, local) = PluginEngine::new().await.unwrap();
let (pool, joinset) = PluginEnginePool::new().await;
let pool = PluginEnginePool::new().await;

let config = Config::load("plugins/providers.toml");

Expand All @@ -180,18 +85,8 @@ async fn main() {
.route("/assets/logo.png", get(web::logo))
.route("/favicon.ico", get(web::icon))
.with_state(AppState {
//index,
//count_cache,
//reader,
//query_parser,
//client,
//db,
//eng: Arc::new(Mutex::new(engine)),
config,
pool,
//url,
//title,
//body,
});

tokio::spawn(async {
Expand All @@ -203,9 +98,6 @@ async fn main() {
.unwrap();
});

tokio::select! {
// _ = joinset.join_all() => {}
_ = tokio::signal::ctrl_c() => {}
};
tokio::signal::ctrl_c().await.unwrap();
info!("shutting down");
}

0 comments on commit b0fbed0

Please sign in to comment.