From 7db6f8cdf40d8cf6530a7ad677ab81a32b58a0b5 Mon Sep 17 00:00:00 2001
From: kr45732 <52721908+kr45732@users.noreply.github.com>
Date: Wed, 10 Aug 2022 22:36:30 -0400
Subject: [PATCH] 2.0.0
---
Cargo.lock | 2 +-
Cargo.toml | 2 +-
README.md | 24 ++++++++++++------------
src/api_handler.rs | 39 ++++++++++++++++++---------------------
src/config.rs | 2 +-
src/main.rs | 2 +-
src/structs.rs | 7 +++++++
src/utils.rs | 7 +++----
8 files changed, 44 insertions(+), 41 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 7ec3580..b831680 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1350,7 +1350,7 @@ dependencies = [
[[package]]
name = "query_api"
-version = "1.8.0"
+version = "2.0.0"
dependencies = [
"base64",
"dashmap",
diff --git a/Cargo.toml b/Cargo.toml
index cba28aa..a67265d 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "query_api"
-version = "1.8.0"
+version = "2.0.0"
edition = "2021"
[dependencies]
diff --git a/README.md b/README.md
index 37ba19e..4716e77 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,7 @@
-A versatile API facade for the Hypixel Auction API written in Rust. The entire auction house is fetched with NBT parsing and inserted into a PostgreSQL database in about 3-7 seconds every minute with low memory usage (can vary depending on enabled features, network speed, and latency of the Hypixel API)! You can query by auction UUID, auctioneer, end time, item name, item tier, item id, price, enchants, bin and bids. You can sort by the item's bin / starting price. You can track the average price of each unique pet-level-rarity combination. You can track the lowest prices of all bins. It also can track new bins that are at least one million lower than previous bins. Lastly, it can track the average auction prices and sales up to five days with custom 'averaging methods'.
+A versatile API facade for the Hypixel Auction API written in Rust. The entire auction house is fetched with NBT parsing and inserted into a PostgreSQL database in about 2-5 seconds every minute with low memory usage (varies depending on enabled features, network speed, hardware, and latency of the Hypixel API)! You can query by auction UUID, auctioneer, end time, item name, item tier, item id, price, enchants, bin and bids. You can sort by the item's bin / starting price. You can track the average price of each unique pet-level-rarity combination. You can track the lowest prices of all bins. It also can track new bins that are at least one million lower than previous bins. Lastly, it can track the average auction prices and sales up to five days with custom 'averaging methods'.
## Set Up
### Prerequisites
@@ -20,19 +20,19 @@ A versatile API facade for the Hypixel Auction API written in Rust. The entire a
### Steps
- Clone the repository
-- Rename the `.example_env` file to `.env` and fill out all fields **OR** set all fields using environment variables
-- Run `cargo run --release` (may take some time to build)
+- Rename the `.example_env` file to `.env` and fill out required fields **OR** set required fields using environment variables
+- Run `cargo run --release` (may take time to build)
- Use it!
### Configuration Fields or Environment Variables
-- `BASE_URL`: The base URL of the domain such as 127.0.0.1
-- `PORT`: The port such as 8080
-- `API_KEY`: Key needed to access this API (NOT a Hypixel API key)
-- `ADMIN_API_KEY`: Admin key required to use raw SQL parameters. Will default to the API_KEY if not provided
-- `POSTGRES_URL`: Full URL of a PostgreSQL database
-- `WEBHOOK_URL`: Discord webhook URL for logging
-- `FEATURES`: The features (QUERY, PETS, LOWESTBIN, UNDERBIN, AVERAGE_AUCTION, AVERAGE_BIN) you want to be enabled separated with a '+'
-- `DEBUG`: If the API should log to files and stdout (true or false)
+- `BASE_URL`: Base address of the host (e.g. 0.0.0.0)
+- `PORT`: The port such (e.g. 8000)
+- `API_KEY`: Optional key needed to access this API (NOT a Hypixel API key)
+- `ADMIN_API_KEY`: Optional admin key required to use raw SQL parameters (defaults to the API_KEY)
+- `POSTGRES_URL`: Full URL of a PostgreSQL database (should look like `postgres://[user]:[password]@[host]:[port]/[dbname]`)
+- `WEBHOOK_URL`: Optional Discord webhook URL for logging
+- `FEATURES`: Features (QUERY, PETS, LOWESTBIN, UNDERBIN, AVERAGE_AUCTION, AVERAGE_BIN) you want to enabled separated with a '+'
+- `DEBUG`: If the API should log to files and stdout (defaults to false)
## Usage
### Endpoints
@@ -51,7 +51,7 @@ A versatile API facade for the Hypixel Auction API written in Rust. The entire a
[![Deploy](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy)
### Free PostgreSQL Datbase
-For a free PostgreSQL database host, [Supabase](https://supabase.com/) is a really good choice and offers two free databases with plenty of space and performance.
+For a free cloud PostgreSQL database, the free tier of [Supabase](https://supabase.com/) is a really good choice with with plenty of storage and performance.
### Deploy To Railway
[![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/new/template?template=https://github.com/kr45732/rust-query-api&plugins=postgresql&envs=BASE_URL,API_KEY,ADMIN_API_KEY,POSTGRES_URL,WEBHOOK_URL,FEATURES&optionalEnvs=WEBHOOK_URL,ADMIN_API_KEY&BASE_URLDesc=The+base+URL+of+the+domain.+Do+not+modify+this&API_KEYDesc=Key+needed+to+access+this+API+(NOT+a+Hypixel+API+key)&ADMIN_API_KEYDesc=Admin+key+required+to+use+raw+SQL+parameters.+Will+default+to+the+API_KEY+if+not+provided&POSTGRES_URLDesc=Full+URL+of+a+PostgreSQL+database.+No+need+to+modify+this+unless+you+are+using+your+own+database+since+Railway+already+provides+this+for+you.&WEBHOOK_URLDesc=Discord+webhook+URL+for+logging&FEATURESDesc=The+features+(QUERY,+PETS,+LOWESTBIN,+UNDERBIN,+AVERAGE_AUCTION,+AVERAGE_BIN)+you+want+enabled+separated+with+commas&BASE_URLDefault=0.0.0.0&POSTGRES_URLDefault=$%7B%7BDATABASE_URL%7D%7D&FEATURESDefault=QUERY,LOWESTBIN,AVERAGE_AUCTION,AVERAGE_BIN&referralCode=WrEybV)
diff --git a/src/api_handler.rs b/src/api_handler.rs
index 73fddfb..2ffd657 100644
--- a/src/api_handler.rs
+++ b/src/api_handler.rs
@@ -156,8 +156,8 @@ pub async fn update_auctions(config: Arc) {
.await;
}
- let fetch_sec = started.elapsed().as_secs();
- info!("Total fetch time: {}s", started.elapsed().as_secs());
+ let fetch_sec = started.elapsed().as_secs_f32();
+ info!("Total fetch time: {:.2}s", fetch_sec);
debug!("Inserting into database");
let insert_started = Instant::now();
@@ -190,7 +190,7 @@ pub async fn update_auctions(config: Arc) {
if update_query {
let query_started = Instant::now();
- update_query_items_local(query_prices.iter().map(|o| o.item_name.clone()).collect()).await;
+ update_query_items_local(query_prices.iter().map(|o| o.item_name.as_str()).collect()).await;
let _ = match update_query_database(query_prices).await {
Ok(rows) => write!(
ok_logs,
@@ -256,11 +256,11 @@ pub async fn update_auctions(config: Arc) {
}
info(format!(
- "Fetch time: {}s ({} failed) | Insert time: {}s | Total time: {}s",
+ "Fetch time: {:.2}s ({} failed) | Insert time: {:.2}s | Total time: {:.2}s",
fetch_sec,
num_failed,
- insert_started.elapsed().as_secs(),
- started.elapsed().as_secs()
+ insert_started.elapsed().as_secs_f32(),
+ started.elapsed().as_secs_f32()
));
*IS_UPDATING.lock().await = false;
@@ -283,8 +283,7 @@ fn parse_auctions(
for auction in auctions {
// Prevent duplicate auctions (returns false if already exists)
if inserted_uuids.insert(auction.uuid.to_string()) {
- let mut tier = auction.tier.as_str();
- let pet_info;
+ let mut tier = auction.tier;
let nbt = &parse_nbt(&auction.item_bytes).unwrap().i[0];
let item_id = nbt.tag.extra_attributes.id.to_owned();
@@ -301,17 +300,17 @@ fn parse_auctions(
bin_prices,
);
}
+
if update_query {
enchants.push(format!("{};{}", entry.key().to_uppercase(), entry.value()));
}
}
} else if item_id == "PET" {
- pet_info =
- serde_json::from_str::(nbt.tag.extra_attributes.pet.as_ref().unwrap())
- .unwrap();
-
// If the pet is tier boosted, the tier field in the auction shows the rarity after boosting
- tier = pet_info.get("tier").unwrap().as_str().unwrap();
+ tier =
+ serde_json::from_str::(nbt.tag.extra_attributes.pet.as_ref().unwrap())
+ .unwrap()
+ .tier;
if auction.bin && update_lowestbin {
let mut split = auction.item_name.split("] ");
@@ -321,7 +320,7 @@ fn parse_auctions(
internal_id = format!(
"{};{}",
pet_name.replace(' ', "_").replace("_✦", "").to_uppercase(),
- match tier {
+ match tier.as_str() {
"COMMON" => 0,
"UNCOMMON" => 1,
"RARE" => 2,
@@ -440,7 +439,7 @@ async fn parse_ended_auctions(
_ => continue,
}
} else if id == "PET" {
- let pet_info = serde_json::from_str::(
+ let pet_info = serde_json::from_str::(
nbt.tag.extra_attributes.pet.as_ref().unwrap(),
)
.unwrap();
@@ -453,11 +452,9 @@ async fn parse_ended_auctions(
let pet_id = format!(
"{}_{}{}",
item_name.replace(' ', "_").replace("_✦", ""),
- pet_info.get("tier").unwrap().as_str().unwrap(),
- if let Some(held_item) =
- pet_info.get("heldItem").and_then(|v| v.as_str())
- {
- match held_item {
+ pet_info.tier,
+ if let Some(held_item) = pet_info.held_item {
+ match held_item.as_str() {
"PET_ITEM_TIER_BOOST"
| "PET_ITEM_VAMPIRE_FANG"
| "PET_ITEM_TOY_JERRY" => "_TB",
@@ -493,7 +490,7 @@ async fn parse_ended_auctions(
.replace(' ', "_")
.replace("_✦", "")
.to_uppercase(),
- match pet_info.get("tier").unwrap().as_str().unwrap() {
+ match pet_info.tier.as_str() {
"COMMON" => 0,
"UNCOMMON" => 1,
"RARE" => 2,
diff --git a/src/config.rs b/src/config.rs
index 3ea59aa..dee1e5e 100644
--- a/src/config.rs
+++ b/src/config.rs
@@ -48,7 +48,7 @@ impl Config {
pub fn load_or_panic() -> Self {
let base_url = get_env("BASE_URL");
let port = get_env("PORT").parse::().expect("PORT not valid");
- let api_key = get_env("API_KEY");
+ let api_key = env::var("API_KEY").unwrap_or_default();
let webhook_url = env::var("WEBHOOK_URL").unwrap_or_default();
let admin_api_key = env::var("ADMIN_API_KEY").unwrap_or_else(|_| api_key.clone());
let debug = env::var("DEBUG")
diff --git a/src/main.rs b/src/main.rs
index 46ab6da..772f2db 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -16,7 +16,7 @@
* along with this program. If not, see .
*/
-#![warn(clippy::all, clippy::pedantic)]
+#![warn(clippy::all)]
use std::sync::Arc;
use std::{
diff --git a/src/structs.rs b/src/structs.rs
index 94e16f8..5e797c5 100644
--- a/src/structs.rs
+++ b/src/structs.rs
@@ -181,6 +181,13 @@ pub struct DisplayInfo {
// pub lore: Vec,
}
+#[derive(Deserialize)]
+pub struct PetInfo {
+ pub tier: String,
+ #[serde(rename = "heldItem")]
+ pub held_item: Option,
+}
+
#[derive(Deserialize)]
pub struct Auctions {
pub page: i64,
diff --git a/src/utils.rs b/src/utils.rs
index fa460fc..e0c4a7e 100644
--- a/src/utils.rs
+++ b/src/utils.rs
@@ -176,10 +176,9 @@ pub fn update_lower_else_insert(id: &String, starting_bid: i64, prices: &mut Das
if starting_bid < *ele {
*ele = starting_bid;
}
- return;
+ } else {
+ prices.insert(id.clone(), starting_bid);
}
-
- prices.insert(id.clone(), starting_bid);
}
pub async fn update_query_database(auctions: Vec) -> Result {
@@ -350,7 +349,7 @@ pub async fn update_under_bins_local(bin_prices: &Vec) -> Result<(), serd
serde_json::to_writer(file, bin_prices)
}
-pub async fn update_query_items_local(query_items: DashSet) {
+pub async fn update_query_items_local(query_items: DashSet<&str>) {
let file = OpenOptions::new()
.create(true)
.write(true)