Skip to content

Commit c7a93a5

Browse files
committed
Good stopping point until bt-decode is updated.
1 parent c4fcaca commit c7a93a5

File tree

2 files changed

+62
-30
lines changed

2 files changed

+62
-30
lines changed

async_substrate_interface/async_substrate.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2402,7 +2402,6 @@ async def get_block_metadata(
24022402
"MetadataVersioned", data=ScaleBytes(result)
24032403
)
24042404
metadata_decoder.decode()
2405-
24062405
return metadata_decoder
24072406
else:
24082407
return result

async_substrate_interface/utils/cache.py

Lines changed: 62 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -38,11 +38,13 @@ def __new__(cls, chain_endpoint: str):
3838
cls._instances[chain_endpoint] = instance
3939
return instance
4040

41-
async def _create_if_not_exists(self, chain: str, table_name: str):
41+
async def close(self):
4242
async with self._lock:
43-
if not self._db:
44-
_ensure_dir()
45-
self._db = await aiosqlite.connect(CACHE_LOCATION)
43+
if self._db:
44+
print(44)
45+
await self._db.close()
46+
47+
async def _create_if_not_exists(self, chain: str, table_name: str):
4648
if not (local_chain := _check_if_local(chain)) or not USE_CACHE:
4749
await self._db.execute(
4850
f"""
@@ -73,6 +75,10 @@ async def _create_if_not_exists(self, chain: str, table_name: str):
7375
return local_chain
7476

7577
async def __call__(self, chain, other_self, func, args, kwargs) -> Optional[Any]:
78+
async with self._lock:
79+
if not self._db:
80+
_ensure_dir()
81+
self._db = await aiosqlite.connect(CACHE_LOCATION)
7682
table_name = _get_table_name(func)
7783
local_chain = await self._create_if_not_exists(chain, table_name)
7884
key = pickle.dumps((args, kwargs or None))
@@ -99,26 +105,32 @@ async def __call__(self, chain, other_self, func, args, kwargs) -> Optional[Any]
99105
return result
100106

101107
async def load_runtime_cache(self, chain: str) -> tuple[dict, dict, dict]:
108+
async with self._lock:
109+
if not self._db:
110+
_ensure_dir()
111+
self._db = await aiosqlite.connect(CACHE_LOCATION)
102112
block_mapping = {}
103113
block_hash_mapping = {}
104114
version_mapping = {}
105115
tables = {
106-
"rt_cache_block": block_mapping,
107-
"rt_cache_block_hash": block_hash_mapping,
108-
"rt_cache_version": version_mapping
116+
"RuntimeCache_blocks": block_mapping,
117+
"RuntimeCache_block_hashes": block_hash_mapping,
118+
"RuntimeCache_versions": version_mapping,
109119
}
110120
for table in tables.keys():
111-
local_chain = await self._create_if_not_exists(chain, table)
121+
async with self._lock:
122+
local_chain = await self._create_if_not_exists(chain, table)
112123
if local_chain:
113124
return {}, {}, {}
114125
for table_name, mapping in tables.items():
115126
try:
116-
cursor: aiosqlite.Cursor = await self._db.execute(
117-
f"SELECT key, value FROM {table_name} WHERE chain=?",
118-
(chain,),
119-
)
120-
results = await cursor.fetchall()
121-
await cursor.close()
127+
async with self._lock:
128+
cursor: aiosqlite.Cursor = await self._db.execute(
129+
f"SELECT key, value FROM {table_name} WHERE chain=?",
130+
(chain,),
131+
)
132+
results = await cursor.fetchall()
133+
await cursor.close()
122134
if results is None:
123135
continue
124136
for row in results:
@@ -130,26 +142,47 @@ async def load_runtime_cache(self, chain: str) -> tuple[dict, dict, dict]:
130142
return {}, {}, {}
131143
return block_mapping, block_hash_mapping, version_mapping
132144

133-
async def dump_runtime_cache(self, chain: str, block_mapping: dict, block_hash_mapping: dict, version_mapping: dict) -> None:
145+
async def dump_runtime_cache(
146+
self,
147+
chain: str,
148+
block_mapping: dict,
149+
block_hash_mapping: dict,
150+
version_mapping: dict,
151+
) -> None:
134152
async with self._lock:
135153
if not self._db:
136154
_ensure_dir()
137155
self._db = await aiosqlite.connect(CACHE_LOCATION)
138-
tables = {
139-
"rt_cache_block": block_mapping,
140-
"rt_cache_block_hash": block_hash_mapping,
141-
"rt_cache_version": version_mapping
142-
}
143-
for table, mapping in tables.items():
144-
local_chain = await self._create_if_not_exists(chain, table)
145-
if local_chain:
146-
return None
147-
await self._db.executemany(
148-
f"INSERT OR REPLACE INTO {table} (key, value, chain) VALUES (?,?,?)",
149-
[(key, pickle.dumps(runtime.serialize()), chain) for key, runtime in mapping.items()],
150-
)
156+
157+
tables = {
158+
"RuntimeCache_blocks": block_mapping,
159+
"RuntimeCache_block_hashes": block_hash_mapping,
160+
"RuntimeCache_versions": version_mapping,
161+
}
162+
for table, mapping in tables.items():
163+
local_chain = await self._create_if_not_exists(chain, table)
164+
if local_chain:
165+
return None
166+
167+
for key, value in mapping.items():
168+
if not isinstance(value, (str, int)):
169+
serialized_runtime = pickle.dumps(value.serialize())
170+
else:
171+
serialized_runtime = pickle.dumps(value)
172+
173+
await self._db.execute(
174+
f"INSERT OR REPLACE INTO {table} (key, value, chain) VALUES (?,?,?)",
175+
(key, serialized_runtime, chain),
176+
)
177+
178+
# await self._db.executemany(
179+
# f"INSERT OR REPLACE INTO {table} (key, value, chain) VALUES (?,?,?)",
180+
# [(key, pickle.dumps(runtime.serialize()), chain) for key, runtime in mapping.items()],
181+
# )
182+
151183
await self._db.commit()
152-
return None
184+
185+
return None
153186

154187

155188
def _ensure_dir():

0 commit comments

Comments
 (0)