Skip to content

Commit

Permalink
fix: Resolve errors in pre-commit hooks (ruff, mypy)
Browse files Browse the repository at this point in the history
  • Loading branch information
talhahussain7 committed Jun 25, 2024
1 parent b1591d1 commit d745deb
Show file tree
Hide file tree
Showing 24 changed files with 1,451 additions and 736 deletions.
34 changes: 17 additions & 17 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,21 +31,21 @@ repos:
hooks:
- id: black

# - repo: https://github.com/charliermarsh/ruff-pre-commit
# # Ruff version.
# rev: 'v0.1.8'
# hooks:
# - id: ruff
# args: [--fix,--exclude=example]
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.1.8'
hooks:
- id: ruff
args: [--fix,--exclude=example]

# - repo: https://github.com/pre-commit/mirrors-mypy
# rev: 'v1.7.1'
# hooks:
# - id: mypy
# exclude: |
# (?x)(
# tests|
# examples
# )
# disable_error_codes: ["attr-defined"]
# additional_dependencies: [types-requests==2.31.0.1]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: 'v1.7.1'
hooks:
- id: mypy
exclude: |
(?x)(
tests|
examples
)
disable_error_codes: ["attr-defined"]
additional_dependencies: [types-requests==2.31.0.1]
2 changes: 1 addition & 1 deletion ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ select = [
"NPY", # NumPy-specific rules
"RUF", # Ruff-specific rules
]
ignore = ["ANN101", "ANN102", "UP006"]
ignore = ["ANN101", "ANN102", "UP006" , "E501"]
unfixable = ["B"] # Avoid trying to fix flake8-bugbear violations.
target-version = "py39" # Assume Python 3.9.
extend-exclude = ["tests", "examples"]
Expand Down
4 changes: 4 additions & 0 deletions src/cardex/backend/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
"""This module provides the backend functionality for Cardex.
It includes interactions with the blockchain and other external services.
"""
1 change: 1 addition & 0 deletions src/cardex/backend/blockfrost.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""This module handles interactions with the Blockfrost API."""
129 changes: 78 additions & 51 deletions src/cardex/backend/dbsync.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import os
from datetime import datetime
from threading import Lock
from typing import Any

import psycopg_pool
from dotenv import load_dotenv
Expand Down Expand Up @@ -49,7 +50,7 @@ def get_dbsync_pool() -> psycopg_pool.ConnectionPool:
return POOL


def db_query(query: str, args: tuple | None = None) -> list[tuple]:
def db_query(query: str, args: tuple | None = None) -> list[dict[str, Any]]:
"""Fetch results from a query."""
with get_dbsync_pool().connection() as conn: # noqa: SIM117
with conn.cursor(row_factory=dict_row) as cursor:
Expand Down Expand Up @@ -145,17 +146,21 @@ def get_pool_utxos(
OFFSET %(offset)s
"""

values = {"limit": limit, "offset": page * limit}
values: dict[str, Any] = {"limit": limit, "offset": page * limit}
if assets is not None:
values.update({"policies": [bytes.fromhex(p[:56]) for p in assets]})
values.update({"names": [bytes.fromhex(p[56:]) for p in assets]})
values.update(
{
"policies": [bytes.fromhex(p[:56]) for p in assets],
"names": [bytes.fromhex(p[56:]) for p in assets],
},
)

elif addresses is not None:
values.update(
{"addresses": [Address.decode(a).payment_part.payload for a in addresses]},
)

r = db_query(datum_selector, values)
r = db_query(datum_selector, tuple(values))

return PoolStateList.model_validate(r)

Expand Down Expand Up @@ -202,7 +207,7 @@ def get_pool_in_tx(
WHERE datum.hash IS NOT NULL AND tx.hash = DECODE(%(tx_hash)s, 'hex')
"""

values = {"tx_hash": tx_hash}
values: dict[str, Any] = {"tx_hash": tx_hash}
if assets is not None:
values.update({"policies": [bytes.fromhex(p[:56]) for p in assets]})
values.update({"names": [bytes.fromhex(p[56:]) for p in assets]})
Expand All @@ -212,7 +217,7 @@ def get_pool_in_tx(
{"addresses": [Address.decode(a).payment_part.payload for a in addresses]},
)

r = db_query(datum_selector, values)
r = db_query(datum_selector, tuple(values))

return PoolStateList.model_validate(r)

Expand All @@ -232,7 +237,7 @@ def last_block(last_n_blocks: int = 2) -> BlockList:
WHERE block_no IS NOT null
ORDER BY block_no DESC
LIMIT %(last_n_blocks)s""",
{"last_n_blocks": last_n_blocks},
tuple({"last_n_blocks": last_n_blocks}),
)
return BlockList.model_validate(r)

Expand All @@ -250,13 +255,14 @@ def get_pool_utxos_in_block(block_no: int) -> PoolStateList:
WHERE block.block_no = %(block_no)s AND datum.hash IS NOT NULL
"""
)
r = db_query(datum_selector, {"block_no": block_no})
r = db_query(datum_selector, tuple({"block_no": block_no}))

return PoolStateList.model_validate(r)


def get_script_from_address(address: Address) -> ScriptReference:
SCRIPT_SELECTOR = """
"""Get script reference from address."""
script_selector = """
SELECT ENCODE(tx.hash, 'hex') as "tx_hash",
tx_out.index as "tx_index",
tx_out.address,
Expand All @@ -281,20 +287,32 @@ def get_script_from_address(address: Address) -> ScriptReference:
WHERE s.hash = %(address)b
LIMIT 1
"""
r = db_query(SCRIPT_SELECTOR, {"address": address.payment_part.payload})
r = db_query(script_selector, (address.payment_part.payload,))
result = r[0]

if r[0]["assets"] is not None and r[0]["assets"][0]["lovelace"] is None:
r[0]["assets"] = None
if result["assets"] is not None and result["assets"][0].get("lovelace") is None:
result["assets"] = None

return ScriptReference.model_validate(r[0])
return ScriptReference.model_validate(result)


def get_historical_order_utxos(
stake_addresses: list[str],
after_time: datetime | int | None = None,
limit: int = 1000,
page: int = 0,
):
) -> SwapTransactionList:
"""Retrieves historical order UTXOs for the given stake addresses.
Args:
stake_addresses: A list of stake addresses to filter by.
after_time: An optional datetime or timestamp to filter UTXOs created after a specific time.
limit: The maximum number of UTXOs to return.
page: The page number for pagination.
Returns:
A SwapTransactionList containing the matching UTXOs.
"""
if isinstance(after_time, int):
after_time = datetime.fromtimestamp(after_time)

Expand Down Expand Up @@ -406,29 +424,32 @@ def get_historical_order_utxos(

r = db_query(
utxo_selector,
{
"addresses": [
Address.decode(a).payment_part.payload for a in stake_addresses
],
"limit": limit,
"offset": page * limit,
"after_time": None
if after_time is None
else after_time.strftime("%Y-%m-%d %H:%M:%S"),
},
tuple(
{
"addresses": [
Address.decode(a).payment_part.payload for a in stake_addresses
],
"limit": limit,
"offset": page * limit,
"after_time": None
if after_time is None
else after_time.strftime("%Y-%m-%d %H:%M:%S"),
},
),
)

return SwapTransactionList.model_validate(r)


def get_order_utxos_by_block_or_tx(
def get_order_utxos_by_block_or_tx( # noqa: PLR0913
stake_addresses: list[str],
out_tx_hash: list[str] | None = None,
block_no: int | None = None,
after_block: int | None = None,
limit: int = 1000,
page: int = 0,
) -> SwapTransactionList:
"""Get order UTXOs by block or transaction."""
utxo_selector = """
SELECT (
SELECT array_agg(DISTINCT txo.address)
Expand Down Expand Up @@ -557,18 +578,20 @@ def get_order_utxos_by_block_or_tx(

r = db_query(
utxo_selector,
{
"addresses": [
Address.decode(a).payment_part.payload for a in stake_addresses
],
"limit": limit,
"offset": page * limit,
"block_no": block_no,
"after_block": after_block,
"out_tx_hash": None
if out_tx_hash is None
else [bytes.fromhex(h) for h in out_tx_hash],
},
tuple(
{
"addresses": [
Address.decode(a).payment_part.payload for a in stake_addresses
],
"limit": limit,
"offset": page * limit,
"block_no": block_no,
"after_block": after_block,
"out_tx_hash": None
if out_tx_hash is None
else [bytes.fromhex(h) for h in out_tx_hash],
},
),
)

return SwapTransactionList.model_validate(r)
Expand All @@ -580,7 +603,8 @@ def get_cancel_utxos(
after_time: datetime | int | None = None,
limit: int = 1000,
page: int = 0,
):
) -> SwapTransactionList:
"""Retrieve cancel UTXOs for given stake addresses."""
if isinstance(after_time, int):
after_time = datetime.fromtimestamp(after_time)

Expand Down Expand Up @@ -678,7 +702,8 @@ def get_cancel_utxos(
utxo_selector += """
WHERE block.block_no = %(block_no)s"""
else:
raise ValueError("Either after_time or block_no should be defined.")
error_msg = "Either after_time or block_no should be defined."
raise ValueError(error_msg)

utxo_selector += """
GROUP BY tx.hash, txo.value, txo.id, block.hash, block.time, block.block_no,
Expand All @@ -702,17 +727,19 @@ def get_cancel_utxos(

r = db_query(
utxo_selector,
{
"addresses": [
Address.decode(a).payment_part.payload for a in stake_addresses
],
"limit": limit,
"offset": page * limit,
"after_time": None
if after_time is None
else after_time.strftime("%Y-%m-%d %H:%M:%S"),
"block_no": block_no,
},
tuple(
{
"addresses": [
Address.decode(a).payment_part.payload for a in stake_addresses
],
"limit": limit,
"offset": page * limit,
"after_time": None
if after_time is None
else after_time.strftime("%Y-%m-%d %H:%M:%S"),
"block_no": block_no,
},
),
)

return SwapTransactionList.model_validate(r)
5 changes: 3 additions & 2 deletions src/cardex/dataclasses/datums.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,13 +69,14 @@ def from_address(cls, address: Address) -> "PlutusFullAddress":
),
)
else:
stake = PlutusNone
stake = PlutusNone()
return PlutusFullAddress(
PlutusPartAddress(bytes.fromhex(str(address.payment_part))),
stake=stake,
)

def to_address(self) -> Address:
"""Convert PlutusFullAddress to an Address object."""
payment_part = VerificationKeyHash(self.payment.address[:28])
if isinstance(self.stake, PlutusNone):
stake_part = None
Expand Down Expand Up @@ -117,7 +118,7 @@ def from_assets(cls, asset: Assets) -> "AssetClass":
return AssetClass(policy=policy, asset_name=asset_name)

@property
def assets(self):
def assets(self) -> Assets:
"""Convert back to assets."""
if self.policy.hex() == "":
asset = "lovelace"
Expand Down
Loading

0 comments on commit d745deb

Please sign in to comment.