Skip to content

Commit

Permalink
Redis (#106)
Browse files Browse the repository at this point in the history
* redis

* all model

* somewhat faster

* config changed

* app

* router:

* user model

* user router, bots connected

* bug fixes

* Admin

* Revert config

* Squashed commit of the following:

commit 61382915c33287f9b5916f2aafc5bca69a34b1d4
Author: rangoiv <rangoiv@gmail.com>
Date:   Tue Apr 9 23:48:52 2024 +0200

    Logging

* dio testova

* fixing routers

* Rade router testovi!

* energy market tests

* resource market test fix

* orderbook tests

* fixed all tests

* reverted some configs

* small fix

---------

Co-authored-by: rangoiv <rangoiv@gmail.com>
  • Loading branch information
nitko12 and rangoiv authored Apr 10, 2024
1 parent 78caccb commit c9fe508
Show file tree
Hide file tree
Showing 83 changed files with 2,043 additions and 3,860 deletions.
1 change: 1 addition & 0 deletions backend/.gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
logfile.log
.venv
scores
3 changes: 3 additions & 0 deletions backend/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,6 @@ def path_constructor(loader, node):
with open("config.yaml", "r") as file:
config = yaml.load(file, Loader=yaml.FullLoader)
config["in_tests"] = False

if config['log_networth_delay'] is None:
config['log_networth_delay'] = 1
19 changes: 10 additions & 9 deletions backend/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,18 @@ server:

redis:
port: 6379

database:
url: postgresql://postgres:postgres@localhost:5432/mydatabase

test_database:
url: postgresql://postgres:postgres@localhost:5432/test_database
host: localhost

admin:
secret: mojkljuc

testing: ${TESTING}
debug: ${DEBUG}
drop_tables: ${DROP_TABLES}
fill_datasets: ${FILL_DATASETS}
fill_tables: ${FILL_TABLES}
log_level: ${LOG_LEVEL}
log_networth_delay: ${NETWORTH_TICKS}

dataset:
datasets_path: ./data
Expand Down Expand Up @@ -47,6 +47,7 @@ player:
starting_money: 50_000_000
max_orders: 20
max_energy_per_player: 0.4
log_top_players: 10

bots:
team_name: bots
Expand All @@ -59,9 +60,9 @@ bots:
max_price: 100
expiration_ticks: 3
price_change_coeff: 0.25
extra_orders: 5
extra_orders_price_diff: 0.02
extra_orders_volume_diff: 0.12
extra_orders: 4
extra_orders_price_diff: 0.01
extra_orders_volume_diff: 0.1
final_volume_multiplier: 1
final_price_multiplier: 1
log_when_no_orders: ${LOG_BOTS}
Expand Down
2 changes: 0 additions & 2 deletions backend/db/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1 @@
from .table import Table
from .db import database
from .rate_limit import limiter
11 changes: 6 additions & 5 deletions backend/db/db.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from databases import Database
from redis_om import get_redis_connection
from config import config


if config['testing']:
database = Database(config['test_database']['url'])
else:
database = Database(config['database']['url'])
redis_port = config["redis"]["port"]


def get_my_redis_connection():
return get_redis_connection(port=redis_port)
143 changes: 62 additions & 81 deletions backend/db/fill_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,99 +4,80 @@
from datetime import datetime
from config import config
from logger import logger
from redlock.lock import RedLock

from model.power_plant_model import PowerPlantsModel, ResourcesModel
from model.power_plant_type import PowerPlantType
from model.resource import Resource


datasets_path = config["dataset"]["datasets_path"]
price_multipliers = config["dataset"]["price_multiplier"]
energy_output_multipliers = config["dataset"]["energy_output_multiplier"]
energy_demand_multiplier = config["dataset"]["energy_demand_multiplier"]


async def fill_datasets():
def fill_datasets():
logger.info("Filling datasets")
for dataset in os.listdir(datasets_path):
if not dataset.endswith(".csv"):
pipe = DatasetData.db().pipeline()
for dataset_name in os.listdir(datasets_path):
if not dataset_name.endswith(".csv"):
continue
try:
await Datasets.get(dataset_name=dataset)
logger.debug(f"Dataset {dataset} already created")
if Datasets.find(Datasets.dataset_name == dataset_name).count() > 0:
logger.info(f"Dataset {dataset_name} already created")
continue
except Exception:
pass

df = pd.read_csv(f"{datasets_path}/{dataset}")

# TODO: asserts, async transaction - ne zelimo da se dataset kreira ako faila kreiranje redaka
dataset_id = await Datasets.create(dataset_name=dataset, dataset_description="Opis")
df = pd.read_csv(f"{datasets_path}/{dataset_name}")

price_multipliers = config["dataset"]["price_multiplier"]
energy_output_multipliers = config["dataset"]["energy_output_multiplier"]
energy_demand_multiplier = config["dataset"]["energy_demand_multiplier"]
dataset = Datasets(dataset_name=dataset_name, dataset_description="Opis")
dataset.save()

# date,COAL,URANIUM,BIOMASS,GAS,OIL,GEOTHERMAL,WIND,SOLAR,HYDRO,ENERGY_DEMAND,MAX_ENERGY_PRICE
tick = 0
dataset_data = []
for index, row in df.iterrows():
dataset_data.append(DatasetData(
dataset_data_id=0,
dataset_id=dataset_id,
tick=tick,
date=datetime.strptime(
row["date"], "%Y-%m-%d %H:%M:%S"),
coal=(
energy_output_multipliers["coal"] *
row["COAL"] // 1_000_000),
uranium=(
energy_output_multipliers["uranium"] *
row["URANIUM"] // 1_000_000),
biomass=(
energy_output_multipliers["biomass"] *
row["BIOMASS"] // 1_000_000),
gas=(
energy_output_multipliers["gas"] *
row["GAS"] // 1_000_000),
oil=(
energy_output_multipliers["oil"] *
row["OIL"] // 1_000_000),
geothermal=(
energy_output_multipliers["geothermal"] *
row["GEOTHERMAL"] // 1_000_000),
wind=(
energy_output_multipliers["wind"] *
row["WIND"] // 1_000_000),
solar=(
energy_output_multipliers["solar"] *
row["SOLAR"] // 1_000_000),
hydro=(
energy_output_multipliers["hydro"] *
row["HYDRO"] // 1_000_000),
energy_demand=(
energy_demand_multiplier *
row["ENERGY_DEMAND"] // 1_000_000),
max_energy_price=(
price_multipliers["energy"] *
row["MAX_ENERGY_PRICE"] // 1_000_000),
coal_price=(
price_multipliers["coal"] *
row["COAL_PRICE"] // 1_000_000),
uranium_price=(
price_multipliers["uranium"] *
row["URANIUM_PRICE"] // 1_000_000),
biomass_price=(
price_multipliers["biomass"] *
row["BIOMASS_PRICE"] // 1_000_000),
gas_price=(
price_multipliers["gas"] *
row["GAS_PRICE"] // 1_000_000),
oil_price=(
price_multipliers["oil"] *
row["OIL_PRICE"] // 1_000_000),
))
for _, row in df.iterrows():
from_row(dataset, tick, row).save(pipe)
tick += 1
logger.info(f"Added dataset {dataset_name}")
pipe.execute()

for x in dataset_data:
assert x.coal_price > -config["bots"]["min_price"]
assert x.uranium_price > -config["bots"]["min_price"]
assert x.biomass_price > -config["bots"]["min_price"]
assert x.gas_price > -config["bots"]["min_price"]
assert x.oil_price > -config["bots"]["min_price"]

await DatasetData.create_many(dataset_data)
logger.info(f"Added dataset {dataset}")
def from_row(dataset: Datasets, tick: int, row: pd.Series) -> DatasetData:
power_plants_output = PowerPlantsModel(
coal=(energy_output_multipliers["coal"] * row["COAL"] // 1_000_000),
uranium=(energy_output_multipliers["uranium"] * row["URANIUM"] // 1_000_000),
biomass=(energy_output_multipliers["biomass"] * row["BIOMASS"] // 1_000_000),
gas=(energy_output_multipliers["gas"] * row["GAS"] // 1_000_000),
oil=(energy_output_multipliers["oil"] * row["OIL"] // 1_000_000),
geothermal=(
energy_output_multipliers["geothermal"] * row["GEOTHERMAL"] // 1_000_000
),
wind=(energy_output_multipliers["wind"] * row["WIND"] // 1_000_000),
solar=(energy_output_multipliers["solar"] * row["SOLAR"] // 1_000_000),
hydro=(energy_output_multipliers["hydro"] * row["HYDRO"] // 1_000_000),
)
resource_prices = ResourcesModel(
coal=(price_multipliers["coal"] * row["COAL_PRICE"] // 1_000_000),
uranium=(
price_multipliers["uranium"] * row["URANIUM_PRICE"] // 1_000_000
),
biomass=(
price_multipliers["biomass"] * row["BIOMASS_PRICE"] // 1_000_000
),
gas=(price_multipliers["gas"] * row["GAS_PRICE"] // 1_000_000),
oil=(price_multipliers["oil"] * row["OIL_PRICE"] // 1_000_000),
)
for type in PowerPlantType:
assert power_plants_output[type] >= 0
for resource in Resource:
assert resource_prices[resource] > 0
return DatasetData(
dataset_id=dataset.pk,
tick=tick,
date=datetime.strptime(row["date"], "%Y-%m-%d %H:%M:%S"),

energy_demand=(energy_demand_multiplier * row["ENERGY_DEMAND"] // 1_000_000),
max_energy_price=(
price_multipliers["energy"] * row["MAX_ENERGY_PRICE"] // 1_000_000
),
power_plants_output = power_plants_output,
resource_prices = resource_prices
)
64 changes: 0 additions & 64 deletions backend/db/fill_teams.py

This file was deleted.

Loading

0 comments on commit c9fe508

Please sign in to comment.