Skip to content

Commit 2b0fac6

Browse files
authored
Merge pull request #93 from opentensor/release/1.1.0
Release/1.1.0
2 parents ed2b10a + 682e266 commit 2b0fac6

File tree

7 files changed

+245
-127
lines changed

7 files changed

+245
-127
lines changed

CHANGELOG.md

+9
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,14 @@
11
# Changelog
22

3+
## 1.1.0 /2025-04-07
4+
5+
## What's Changed
6+
* Fix: response is still missing for callback by @zyzniewski-reef in https://github.com/opentensor/async-substrate-interface/pull/90
7+
* Expose websockets exceptions by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/91
8+
* Improved Query Map Decodes by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/84
9+
10+
**Full Changelog**: https://github.com/opentensor/async-substrate-interface/compare/v1.0.9...v1.1.0
11+
312
## 1.0.9 /2025-03-26
413

514
## What's Changed

async_substrate_interface/async_substrate.py

+18-62
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,9 @@
5656
)
5757
from async_substrate_interface.utils.storage import StorageKey
5858
from async_substrate_interface.type_registry import _TYPE_REGISTRY
59+
from async_substrate_interface.utils.decoding import (
60+
decode_query_map,
61+
)
5962

6063
if TYPE_CHECKING:
6164
from websockets.asyncio.client import ClientConnection
@@ -898,7 +901,7 @@ async def decode_scale(
898901
else:
899902
return obj
900903

901-
async def load_runtime(self, runtime):
904+
def load_runtime(self, runtime):
902905
self.runtime = runtime
903906

904907
# Update type registry
@@ -954,7 +957,7 @@ async def init_runtime(
954957
)
955958

956959
if self.runtime and runtime_version == self.runtime.runtime_version:
957-
return
960+
return self.runtime
958961

959962
runtime = self.runtime_cache.retrieve(runtime_version=runtime_version)
960963
if not runtime:
@@ -990,7 +993,7 @@ async def init_runtime(
990993
runtime_version=runtime_version, runtime=runtime
991994
)
992995

993-
await self.load_runtime(runtime)
996+
self.load_runtime(runtime)
994997

995998
if self.ss58_format is None:
996999
# Check and apply runtime constants
@@ -1000,6 +1003,7 @@ async def init_runtime(
10001003

10011004
if ss58_prefix_constant:
10021005
self.ss58_format = ss58_prefix_constant
1006+
return runtime
10031007

10041008
async def create_storage_key(
10051009
self,
@@ -2892,12 +2896,11 @@ async def query_map(
28922896
Returns:
28932897
AsyncQueryMapResult object
28942898
"""
2895-
hex_to_bytes_ = hex_to_bytes
28962899
params = params or []
28972900
block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash)
28982901
if block_hash:
28992902
self.last_block_hash = block_hash
2900-
await self.init_runtime(block_hash=block_hash)
2903+
runtime = await self.init_runtime(block_hash=block_hash)
29012904

29022905
metadata_pallet = self.runtime.metadata.get_metadata_pallet(module)
29032906
if not metadata_pallet:
@@ -2952,19 +2955,6 @@ async def query_map(
29522955
result = []
29532956
last_key = None
29542957

2955-
def concat_hash_len(key_hasher: str) -> int:
2956-
"""
2957-
Helper function to avoid if statements
2958-
"""
2959-
if key_hasher == "Blake2_128Concat":
2960-
return 16
2961-
elif key_hasher == "Twox64Concat":
2962-
return 8
2963-
elif key_hasher == "Identity":
2964-
return 0
2965-
else:
2966-
raise ValueError("Unsupported hash type")
2967-
29682958
if len(result_keys) > 0:
29692959
last_key = result_keys[-1]
29702960

@@ -2975,51 +2965,17 @@ def concat_hash_len(key_hasher: str) -> int:
29752965

29762966
if "error" in response:
29772967
raise SubstrateRequestException(response["error"]["message"])
2978-
29792968
for result_group in response["result"]:
2980-
for item in result_group["changes"]:
2981-
try:
2982-
# Determine type string
2983-
key_type_string = []
2984-
for n in range(len(params), len(param_types)):
2985-
key_type_string.append(
2986-
f"[u8; {concat_hash_len(key_hashers[n])}]"
2987-
)
2988-
key_type_string.append(param_types[n])
2989-
2990-
item_key_obj = await self.decode_scale(
2991-
type_string=f"({', '.join(key_type_string)})",
2992-
scale_bytes=bytes.fromhex(item[0][len(prefix) :]),
2993-
return_scale_obj=True,
2994-
)
2995-
2996-
# strip key_hashers to use as item key
2997-
if len(param_types) - len(params) == 1:
2998-
item_key = item_key_obj[1]
2999-
else:
3000-
item_key = tuple(
3001-
item_key_obj[key + 1]
3002-
for key in range(len(params), len(param_types) + 1, 2)
3003-
)
3004-
3005-
except Exception as _:
3006-
if not ignore_decoding_errors:
3007-
raise
3008-
item_key = None
3009-
3010-
try:
3011-
item_bytes = hex_to_bytes_(item[1])
3012-
3013-
item_value = await self.decode_scale(
3014-
type_string=value_type,
3015-
scale_bytes=item_bytes,
3016-
return_scale_obj=True,
3017-
)
3018-
except Exception as _:
3019-
if not ignore_decoding_errors:
3020-
raise
3021-
item_value = None
3022-
result.append([item_key, item_value])
2969+
result = decode_query_map(
2970+
result_group["changes"],
2971+
prefix,
2972+
runtime,
2973+
param_types,
2974+
params,
2975+
value_type,
2976+
key_hashers,
2977+
ignore_decoding_errors,
2978+
)
30232979
return AsyncQueryMapResult(
30242980
records=result,
30252981
page_size=page_size,

async_substrate_interface/errors.py

+6
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
from websockets.exceptions import ConnectionClosed, InvalidHandshake
2+
3+
ConnectionClosed = ConnectionClosed
4+
InvalidHandshake = InvalidHandshake
5+
6+
17
class SubstrateRequestException(Exception):
28
pass
39

async_substrate_interface/sync_substrate.py

+18-60
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
from async_substrate_interface.utils.decoding import (
3535
_determine_if_old_runtime_call,
3636
_bt_decode_to_dict_or_list,
37+
decode_query_map,
3738
)
3839
from async_substrate_interface.utils.storage import StorageKey
3940
from async_substrate_interface.type_registry import _TYPE_REGISTRY
@@ -525,7 +526,9 @@ def __enter__(self):
525526
return self
526527

527528
def __del__(self):
528-
self.close()
529+
self.ws.close()
530+
print("DELETING SUBSTATE")
531+
# self.ws.protocol.fail(code=1006) # ABNORMAL_CLOSURE
529532

530533
def initialize(self):
531534
"""
@@ -703,7 +706,7 @@ def init_runtime(
703706
)
704707

705708
if self.runtime and runtime_version == self.runtime.runtime_version:
706-
return
709+
return self.runtime
707710

708711
runtime = self.runtime_cache.retrieve(runtime_version=runtime_version)
709712
if not runtime:
@@ -757,6 +760,7 @@ def init_runtime(
757760

758761
if ss58_prefix_constant:
759762
self.ss58_format = ss58_prefix_constant
763+
return runtime
760764

761765
def create_storage_key(
762766
self,
@@ -1626,7 +1630,7 @@ def _make_rpc_request(
16261630
if item_id not in request_manager.responses or isinstance(
16271631
result_handler, Callable
16281632
):
1629-
if response := _received.pop(item_id):
1633+
if response := _received.pop(item_id, None):
16301634
if (
16311635
isinstance(result_handler, Callable)
16321636
and not subscription_added
@@ -2598,7 +2602,7 @@ def query_map(
25982602
block_hash = self._get_current_block_hash(block_hash, reuse_block_hash)
25992603
if block_hash:
26002604
self.last_block_hash = block_hash
2601-
self.init_runtime(block_hash=block_hash)
2605+
runtime = self.init_runtime(block_hash=block_hash)
26022606

26032607
metadata_pallet = self.runtime.metadata.get_metadata_pallet(module)
26042608
if not metadata_pallet:
@@ -2654,19 +2658,6 @@ def query_map(
26542658
result = []
26552659
last_key = None
26562660

2657-
def concat_hash_len(key_hasher: str) -> int:
2658-
"""
2659-
Helper function to avoid if statements
2660-
"""
2661-
if key_hasher == "Blake2_128Concat":
2662-
return 16
2663-
elif key_hasher == "Twox64Concat":
2664-
return 8
2665-
elif key_hasher == "Identity":
2666-
return 0
2667-
else:
2668-
raise ValueError("Unsupported hash type")
2669-
26702661
if len(result_keys) > 0:
26712662
last_key = result_keys[-1]
26722663

@@ -2679,49 +2670,16 @@ def concat_hash_len(key_hasher: str) -> int:
26792670
raise SubstrateRequestException(response["error"]["message"])
26802671

26812672
for result_group in response["result"]:
2682-
for item in result_group["changes"]:
2683-
try:
2684-
# Determine type string
2685-
key_type_string = []
2686-
for n in range(len(params), len(param_types)):
2687-
key_type_string.append(
2688-
f"[u8; {concat_hash_len(key_hashers[n])}]"
2689-
)
2690-
key_type_string.append(param_types[n])
2691-
2692-
item_key_obj = self.decode_scale(
2693-
type_string=f"({', '.join(key_type_string)})",
2694-
scale_bytes=bytes.fromhex(item[0][len(prefix) :]),
2695-
return_scale_obj=True,
2696-
)
2697-
2698-
# strip key_hashers to use as item key
2699-
if len(param_types) - len(params) == 1:
2700-
item_key = item_key_obj[1]
2701-
else:
2702-
item_key = tuple(
2703-
item_key_obj[key + 1]
2704-
for key in range(len(params), len(param_types) + 1, 2)
2705-
)
2706-
2707-
except Exception as _:
2708-
if not ignore_decoding_errors:
2709-
raise
2710-
item_key = None
2711-
2712-
try:
2713-
item_bytes = hex_to_bytes_(item[1])
2714-
2715-
item_value = self.decode_scale(
2716-
type_string=value_type,
2717-
scale_bytes=item_bytes,
2718-
return_scale_obj=True,
2719-
)
2720-
except Exception as _:
2721-
if not ignore_decoding_errors:
2722-
raise
2723-
item_value = None
2724-
result.append([item_key, item_value])
2673+
result = decode_query_map(
2674+
result_group["changes"],
2675+
prefix,
2676+
runtime,
2677+
param_types,
2678+
params,
2679+
value_type,
2680+
key_hashers,
2681+
ignore_decoding_errors,
2682+
)
27252683
return QueryMapResult(
27262684
records=result,
27272685
page_size=page_size,

0 commit comments

Comments
 (0)