Skip to content

Commit

Permalink
Fix CRT queries (#348)
Browse files Browse the repository at this point in the history
* Fix CRT queries

* GitHub workflows docker compose fix

* More docker compose CI fixes
  • Loading branch information
Lezek123 authored Nov 7, 2024
1 parent 5eeeb68 commit 41bf490
Show file tree
Hide file tree
Showing 14 changed files with 234 additions and 174 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,10 @@ jobs:
- name: create joystream_default network
run: docker network create joystream_default
- name: Start db
run: docker-compose up -d orion_db
run: docker compose up -d orion_db
- name: Wait for db
run: |
while ! docker-compose exec -T orion_db pg_isready -U postgres -p 23798; do
while ! docker compose exec -T orion_db pg_isready -U postgres -p 23798; do
sleep 1
done
- name: Run migrations
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ jobs:
- name: Prepare workspace
run: make prepare
- name: Start db
run: docker-compose up -d orion_db
run: docker compose up -d orion_db
- name: Wait for db
run: |
while ! docker-compose exec -T orion_db pg_isready -U postgres -p 23798; do
while ! docker compose exec -T orion_db pg_isready -U postgres -p 23798; do
sleep 1
done
- name: Run migrations
Expand Down
10 changes: 10 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,13 @@
# 4.1.0

## Misc
- `tokensWithPriceChange`, `marketplaceTokens` and `topSellingToken` queries now only take the currently active AMM sale into account when calculating values such as volume / liquidity / price change.
- added `minVolume` argument to `tokensWithPriceChange` query to allow filtering out tokens w/ negligible volume
- updated docker setup to support the latest docker version

## Bug Fixes:
- `tokensWithPriceChange` and `marketplaceTokens` queries returned incorrect price changes when no transactions were made in the given period. Now at least one transaction in the provided period is required to calculate price change.

# 4.0.6

## Bug Fixes:
Expand Down
8 changes: 4 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -59,19 +59,19 @@ prepare: install typegen codegen build

up-squid:
@docker network create joystream_default || true
@docker-compose up -d
@docker compose up -d

up-archive:
@docker network create joystream_default || true
@docker-compose -f archive/docker-compose.yml up -d
@docker compose -f archive/docker-compose.yml up -d

up: up-archive up-squid

down-squid:
@docker-compose down -v
@docker compose down -v

down-archive:
@docker-compose -f archive/docker-compose.yml down -v
@docker compose -f archive/docker-compose.yml down -v

down: down-squid down-archive

Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@

const { getViewDefinitions } = require('../viewDefinitions')

module.exports = class Views1721141313757 {
name = 'Views1721141313757'
module.exports = class Views1730895049782 {
name = 'Views1730895049782'

async up(db) {
// these two queries will be invoked and the cleaned up by the squid itself
Expand Down
148 changes: 53 additions & 95 deletions db/viewDefinitions.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
const { withPriceChange } = require('../lib/server-extension/resolvers/CreatorToken/utils')

const noCategoryVideosSupportedByDefault =
process.env.SUPPORT_NO_CATEGORY_VIDEOS === 'true' ||
process.env.SUPPORT_NO_CATEGORY_VIDEOS === '1'
Expand Down Expand Up @@ -90,101 +92,57 @@ function getViewDefinitions(db) {
// TODO (notifications v2): make this part of the admin schema with appropriate resolver for queries
// notification: ['FALSE'],
marketplace_token: `
WITH trading_volumes AS
(SELECT ac.token_id,
SUM(tr.price_paid) as amm_volume
FROM amm_transaction tr
JOIN amm_curve ac ON ac.id = tr.amm_id
GROUP BY token_id),
base_price_transaction AS (
WITH oldest_transactions AS (
SELECT DISTINCT ON (ac.token_id)
tr.amm_id,
ac.token_id,
tr.price_per_unit AS oldest_price_paid,
tr.created_in
FROM amm_transaction tr
JOIN amm_curve ac ON tr.amm_id = ac.id
WHERE tr.created_in < (SELECT height FROM squid_processor.status) - ${
BLOCKS_PER_DAY * 30
}
ORDER BY ac.token_id, tr.created_in DESC
),
fallback_transactions AS (
SELECT DISTINCT ON (ac.token_id)
tr.amm_id,
ac.token_id,
tr.price_per_unit AS oldest_price_paid,
tr.created_in
FROM amm_transaction tr
JOIN amm_curve ac ON tr.amm_id = ac.id
WHERE tr.created_in > (SELECT height FROM squid_processor.status) - ${
BLOCKS_PER_DAY * 30
}
ORDER BY ac.token_id, tr.created_in ASC
)
SELECT * FROM oldest_transactions
UNION ALL
SELECT * FROM fallback_transactions
WHERE NOT EXISTS (SELECT 1 FROM oldest_transactions)
)
SELECT
COALESCE(ac.total_liq, 0) as liquidity,
COALESCE((ct.last_price * ct.total_supply), 0) as market_cap,
c.cumulative_revenue,
c.id as channel_id,
COALESCE(tv.amm_volume, 0) as amm_volume,
CASE
WHEN ldt_o.oldest_price_paid = 0
OR ldt_o.oldest_price_paid IS NULL THEN 0
ELSE ((ct.last_price - ldt_o.oldest_price_paid) * 100.0 / ldt_o.oldest_price_paid)
END AS price_change,
CASE
WHEN liq_until.quantity IS NULL THEN 0
ELSE ((ac.total_liq - liq_until.quantity) * 100 / GREATEST(liq_until.quantity, 1))
END as liquidity_change,
ct.*
FROM creator_token ct
LEFT JOIN token_channel tc ON tc.token_id = ct.id
LEFT JOIN channel c ON c.id = tc.channel_id
LEFT JOIN base_price_transaction ldt_o ON ldt_o.token_id = ct.id
LEFT JOIN
(SELECT token_id,
SUM(CASE
WHEN transaction_type = 'BUY' THEN quantity
ELSE quantity * -1
END) AS total_liq
FROM
(SELECT ac.token_id,
tr.transaction_type,
tr.quantity
FROM amm_transaction tr
JOIN amm_curve ac ON tr.amm_id = ac.id) as tr
GROUP BY token_id) as ac ON ac.token_id = ct.id
LEFT JOIN
(SELECT token_id,
SUM(CASE
WHEN transaction_type = 'BUY' THEN quantity
ELSE quantity * -1
END) AS quantity
FROM
(SELECT ac.token_id,
tr.transaction_type,
tr.quantity
FROM amm_transaction tr
JOIN amm_curve ac ON tr.amm_id = ac.id
WHERE tr.created_in <
(SELECT height
FROM squid_processor.status) - ${BLOCKS_PER_DAY * 30}) as tr
GROUP BY token_id) as liq_until ON liq_until.token_id = ct.id
LEFT JOIN trading_volumes tv ON tv.token_id = ct.id
`,
WITH
last_block AS (
SELECT height FROM squid_processor.status
),
tokens_with_stats AS (
SELECT
ac.token_id,
SUM(CASE
WHEN (
transaction_type = 'BUY'
AND tr.created_in < last_block.height - ${BLOCKS_PER_DAY * 30}
) THEN quantity
WHEN (
transaction_type = 'SELL'
AND tr.created_in < last_block.height - ${BLOCKS_PER_DAY * 30}
) THEN quantity * -1
ELSE 0
END) AS total_liquidity_30d_ago,
SUM (CASE
WHEN transaction_type = 'BUY' THEN quantity
ELSE quantity * -1
END) AS total_liquidity,
SUM(tr.price_paid) as amm_volume
FROM amm_transaction tr
JOIN amm_curve ac ON ac.id = tr.amm_id
JOIN creator_token ct ON ct.current_amm_sale_id = ac.id
JOIN last_block ON 1=1
GROUP BY token_id
),
${withPriceChange({ periodDays: 30, currentBlock: 'last_block' })}
SELECT
COALESCE(tws.total_liquidity, 0) as liquidity,
(ct.last_price * ct.total_supply) as market_cap,
c.cumulative_revenue,
c.id as channel_id,
COALESCE(tws.amm_volume, 0) as amm_volume,
COALESCE(twpc.percentage_change, 0) price_change,
CASE
WHEN (tws.total_liquidity_30d_ago IS NULL OR tws.total_liquidity_30d_ago = 0) THEN 0
ELSE (
(tws.total_liquidity - tws.total_liquidity_30d_ago)
* 100
/ tws.total_liquidity_30d_ago
)
END as liquidity_change,
ct.*
FROM creator_token ct
LEFT JOIN token_channel tc ON tc.token_id = ct.id
LEFT JOIN channel c ON c.id = tc.channel_id
LEFT JOIN tokens_with_price_change twpc ON twpc.token_id = ct.id
LEFT JOIN tokens_with_stats tws ON tws.token_id = ct.id`,
}
}

Expand Down
15 changes: 10 additions & 5 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
version: '3'

services:
orion_db:
container_name: orion_db
Expand All @@ -9,6 +7,8 @@ services:
environment:
POSTGRES_DB: squid
POSTGRES_PASSWORD: squid
networks:
- joystream_default
ports:
- '127.0.0.1:${DB_PORT}:${DB_PORT}'
- '[::1]:${DB_PORT}:${DB_PORT}'
Expand All @@ -23,6 +23,8 @@ services:
hostname: orion_processor
image: node:18
restart: unless-stopped
networks:
- joystream_default
env_file:
- .env
- docker.env
Expand All @@ -43,6 +45,8 @@ services:
hostname: orion_graphql-server
image: node:18
restart: unless-stopped
networks:
- joystream_default
env_file:
- .env
- docker.env
Expand All @@ -65,6 +69,8 @@ services:
hostname: orion_auth-api
image: node:18
restart: unless-stopped
networks:
- joystream_default
env_file:
- .env
- docker.env
Expand All @@ -86,6 +92,5 @@ volumes:
orion_db_data:

networks:
default:
external:
name: joystream_default
joystream_default:
external: true
7 changes: 5 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "orion",
"version": "4.0.6",
"version": "4.1.0",
"engines": {
"node": ">=16"
},
Expand Down Expand Up @@ -142,5 +142,8 @@
},
"workspaces": [
"network-tests"
]
],
"volta": {
"node": "16.20.2"
}
}
6 changes: 3 additions & 3 deletions src/auth-server/tests/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ cd $SCRIPT_PATH/../../..

if ! [[ "$(docker container inspect -f '{{.State.Running}}' orion_db)" = "true" ]]; then
docker network create joystream_default || true
docker-compose up -d orion_db
until docker-compose logs orion_db | grep "database system is ready to accept connections"; do
docker-compose logs --tail 10 orion_db
docker compose up -d orion_db
until docker compose logs orion_db | grep "database system is ready to accept connections"; do
docker compose logs --tail 10 orion_db
echo "Waiting for the db to be ready..."
sleep 1
done
Expand Down
8 changes: 4 additions & 4 deletions src/mail-scheduler/tests/run-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,17 @@ cd $SCRIPT_PATH/../../..

if ! [[ "$(docker container inspect -f '{{.State.Running}}' orion_db)" = "true" ]]; then
docker network create joystream_default || true
docker-compose up -d orion_db
docker-compose up -d orion_processor # We need the processor to run the migrations
until docker-compose logs orion_db | grep "database system is ready to accept connections" >/dev/null; do
docker compose up -d orion_db
docker compose up -d orion_processor # We need the processor to run the migrations
until docker compose logs orion_db | grep "database system is ready to accept connections" >/dev/null; do
sleep 1
done
fi

sleep 10

cleanup() {
docker-compose down -v
docker compose down -v
}

# Run the tests
Expand Down
Loading

0 comments on commit 41bf490

Please sign in to comment.