Skip to content

Commit b5d7b1a

Browse files
committed
[WIP]
1 parent d2c3d6a commit b5d7b1a

File tree

3 files changed

+103
-12
lines changed

3 files changed

+103
-12
lines changed

async_substrate_interface/async_substrate.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
TYPE_CHECKING,
2222
)
2323

24-
import asyncstdlib as a
2524
from bittensor_wallet.keypair import Keypair
2625
from bittensor_wallet.utils import SS58_FORMAT
2726
from bt_decode import MetadataV15, PortableRegistry, decode as decode_by_type_string
@@ -49,6 +48,7 @@
4948
Preprocessed,
5049
)
5150
from async_substrate_interface.utils import hex_to_bytes, json, get_next_id
51+
from async_substrate_interface.utils.cache import async_sql_lru_cache
5252
from async_substrate_interface.utils.decoding import (
5353
_determine_if_old_runtime_call,
5454
_bt_decode_to_dict_or_list,
@@ -1659,7 +1659,7 @@ def convert_event_data(data):
16591659
events.append(convert_event_data(item))
16601660
return events
16611661

1662-
@a.lru_cache(maxsize=512) # large cache with small items
1662+
@async_sql_lru_cache(max_size=512)
16631663
async def get_parent_block_hash(self, block_hash):
16641664
block_header = await self.rpc_request("chain_getHeader", [block_hash])
16651665

@@ -1672,15 +1672,15 @@ async def get_parent_block_hash(self, block_hash):
16721672
return block_hash
16731673
return parent_block_hash
16741674

1675-
@a.lru_cache(maxsize=16) # small cache with large items
1675+
@async_sql_lru_cache(max_size=16)
16761676
async def get_block_runtime_info(self, block_hash: str) -> dict:
16771677
"""
16781678
Retrieve the runtime info of given block_hash
16791679
"""
16801680
response = await self.rpc_request("state_getRuntimeVersion", [block_hash])
16811681
return response.get("result")
16821682

1683-
@a.lru_cache(maxsize=512) # large cache with small items
1683+
@async_sql_lru_cache(max_size=512)
16841684
async def get_block_runtime_version_for(self, block_hash: str):
16851685
"""
16861686
Retrieve the runtime version of the parent of a given block_hash
@@ -1914,7 +1914,7 @@ async def _make_rpc_request(
19141914

19151915
return request_manager.get_results()
19161916

1917-
@a.lru_cache(maxsize=512) # RPC methods are unlikely to change often
1917+
@async_sql_lru_cache(max_size=512)
19181918
async def supports_rpc_method(self, name: str) -> bool:
19191919
"""
19201920
Check if substrate RPC supports given method
@@ -1985,7 +1985,7 @@ async def rpc_request(
19851985
else:
19861986
raise SubstrateRequestException(result[payload_id][0])
19871987

1988-
@a.lru_cache(maxsize=512) # block_id->block_hash does not change
1988+
@async_sql_lru_cache(max_size=512)
19891989
async def get_block_hash(self, block_id: int) -> str:
19901990
return (await self.rpc_request("chain_getBlockHash", [block_id]))["result"]
19911991

async_substrate_interface/sync_substrate.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import logging
22
import random
3-
from functools import lru_cache
43
from hashlib import blake2b
54
from typing import Optional, Union, Callable, Any
65

@@ -31,6 +30,7 @@
3130
ScaleObj,
3231
)
3332
from async_substrate_interface.utils import hex_to_bytes, json, get_next_id
33+
from async_substrate_interface.utils.cache import sql_lru_cache
3434
from async_substrate_interface.utils.decoding import (
3535
_determine_if_old_runtime_call,
3636
_bt_decode_to_dict_or_list,
@@ -1406,7 +1406,7 @@ def convert_event_data(data):
14061406
events.append(convert_event_data(item))
14071407
return events
14081408

1409-
@lru_cache(maxsize=512) # large cache with small items
1409+
@sql_lru_cache(max_size=512)
14101410
def get_parent_block_hash(self, block_hash):
14111411
block_header = self.rpc_request("chain_getHeader", [block_hash])
14121412

@@ -1419,15 +1419,15 @@ def get_parent_block_hash(self, block_hash):
14191419
return block_hash
14201420
return parent_block_hash
14211421

1422-
@lru_cache(maxsize=16) # small cache with large items
1422+
@sql_lru_cache(max_size=16)
14231423
def get_block_runtime_info(self, block_hash: str) -> dict:
14241424
"""
14251425
Retrieve the runtime info of given block_hash
14261426
"""
14271427
response = self.rpc_request("state_getRuntimeVersion", [block_hash])
14281428
return response.get("result")
14291429

1430-
@lru_cache(maxsize=512) # large cache with small items
1430+
@sql_lru_cache(max_size=512)
14311431
def get_block_runtime_version_for(self, block_hash: str):
14321432
"""
14331433
Retrieve the runtime version of the parent of a given block_hash
@@ -1656,7 +1656,7 @@ def _make_rpc_request(
16561656
return request_manager.get_results()
16571657

16581658
# TODO change this logic
1659-
@lru_cache(maxsize=512) # RPC methods are unlikely to change often
1659+
@sql_lru_cache(max_size=512)
16601660
def supports_rpc_method(self, name: str) -> bool:
16611661
"""
16621662
Check if substrate RPC supports given method
@@ -1727,7 +1727,7 @@ def rpc_request(
17271727
else:
17281728
raise SubstrateRequestException(result[payload_id][0])
17291729

1730-
@lru_cache(maxsize=512) # block_id->block_hash does not change
1730+
@sql_lru_cache(max_size=512)
17311731
def get_block_hash(self, block_id: int) -> str:
17321732
return self.rpc_request("chain_getBlockHash", [block_id])["result"]
17331733

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
import functools
2+
import pickle
3+
import sqlite3
4+
import asyncstdlib as a
5+
6+
7+
def _get_table_name(func):
8+
"""Convert "ClassName.method_name" to "ClassName_method_name"""
9+
return func.__qualname__.replace(".", "_")
10+
11+
12+
def _create_table(conn, table_name):
13+
c = conn.cursor()
14+
c.execute(
15+
f"CREATE TABLE IF NOT EXISTS {table_name} (key BLOB PRIMARY KEY, value BLOB, chain TEXT)"
16+
)
17+
conn.commit()
18+
19+
20+
def _retrieve_from_cache(c, table_name, key, chain):
21+
try:
22+
c.execute(
23+
f"SELECT value FROM {table_name} WHERE key=? AND chain=?", (key, chain)
24+
)
25+
result = c.fetchone()
26+
if result is not None:
27+
return pickle.loads(result[0])
28+
except (pickle.PickleError, sqlite3.Error) as e:
29+
print(f"Cache error: {str(e)}")
30+
pass
31+
32+
33+
def _insert_into_cache(c, conn, table_name, key, result, chain):
34+
try:
35+
c.execute(
36+
f"INSERT OR REPLACE INTO {table_name} VALUES (?,?,?)",
37+
(key, pickle.dumps(result), chain),
38+
)
39+
conn.commit()
40+
except (pickle.PickleError, sqlite3.Error) as e:
41+
print(f"Cache error: {str(e)}")
42+
pass
43+
44+
45+
def sql_lru_cache(func, max_size=None):
46+
conn = sqlite3.connect("/tmp/cache.db")
47+
48+
table_name = _get_table_name(func)
49+
_create_table(conn, table_name)
50+
51+
@functools.lru_cache(maxsize=max_size)
52+
def inner(self, *args, **kwargs):
53+
c = conn.cursor()
54+
key = pickle.dumps((args, kwargs))
55+
chain = self._chain
56+
57+
result = _retrieve_from_cache(c, table_name, key, chain)
58+
if result is not None:
59+
return result
60+
61+
# If not in DB, call func and store in DB
62+
result = func(self, *args, **kwargs)
63+
_insert_into_cache(c, conn, table_name, key, result, chain)
64+
65+
return result
66+
67+
return inner
68+
69+
70+
def async_sql_lru_cache(func, max_size=None):
71+
conn = sqlite3.connect("/tmp/cache.db")
72+
table_name = _get_table_name(func)
73+
_create_table(conn, table_name)
74+
75+
@a.lru_cache(maxsize=max_size)
76+
async def inner(self, *args, **kwargs):
77+
c = conn.cursor()
78+
key = pickle.dumps((args, kwargs))
79+
chain = self._chain
80+
81+
result = _retrieve_from_cache(c, table_name, key, chain)
82+
if result is not None:
83+
return result
84+
85+
# If not in DB, call func and store in DB
86+
result = await func(self, *args, **kwargs)
87+
_insert_into_cache(c, conn, table_name, key, result, chain)
88+
89+
return result
90+
91+
return inner

0 commit comments

Comments
 (0)