From 81b9f3da17a8e2006f71c09abdcb1029ef4ef577 Mon Sep 17 00:00:00 2001 From: QuarkChain Dev Date: Fri, 13 Mar 2026 04:09:34 +0100 Subject: [PATCH 01/11] replace jsonrpcserver/jsonrpcclient packages with custom implementation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The jsonrpcserver (3.x) and jsonrpcclient (2.x) packages are incompatible with Python 3.13 and are no longer maintained. Changes: - Add quarkchain/cluster/jsonrpcserver.py: lightweight custom JSON-RPC 2.0 server built on aiohttp (RpcMethods, JsonRpcError hierarchy, request dispatch with positional/named params) - Add quarkchain/jsonrpc_client.py: synchronous JsonRpcClient (uses httpx) and asynchronous AsyncJsonRpcClient (uses aiohttp) replacing jsonrpcclient.HTTPClient / aiohttpClient - Update quarkchain/cluster/jsonrpc.py to use the new RpcMethods class; make start_*_server() classmethod async; use get_running_loop() - Replace jsonrpcclient calls in all tools with JsonRpcClient.call() - Rename quarkchain/tools/stats → quarkchain/tools/stats.py - Remove jsonrpcserver config log suppression (no longer needed) - Update test_jsonrpc.py to use AsyncJsonRpcClient Co-Authored-By: Claude Sonnet 4.6 --- quarkchain/cluster/jsonrpc.py | 74 +++++----- quarkchain/cluster/jsonrpcserver.py | 169 ++++++++++++++++++++++ quarkchain/cluster/prom.py | 9 +- quarkchain/cluster/tests/test_jsonrpc.py | 42 ++---- quarkchain/jsonrpc_client.py | 58 ++++++++ quarkchain/tools/balance_watcher.py | 8 +- quarkchain/tools/batch_deploy_contract.py | 20 +-- quarkchain/tools/bootnode_health_check.py | 24 +-- quarkchain/tools/check_syncing_state.py | 28 +--- quarkchain/tools/count_total_balance.py | 25 +--- quarkchain/tools/erc20_balance_watcher.py | 9 +- quarkchain/tools/external_miner.py | 32 ++-- quarkchain/tools/fund_testnet.py | 22 +-- quarkchain/tools/monitoring.py | 11 +- quarkchain/tools/reorg_detector.py | 7 +- quarkchain/tools/{stats => stats.py} | 30 ++-- 16 files changed, 338 insertions(+), 230 deletions(-) create mode 100644 quarkchain/cluster/jsonrpcserver.py create mode 100644 quarkchain/jsonrpc_client.py rename quarkchain/tools/{stats => stats.py} (84%) mode change 100755 => 100644 diff --git a/quarkchain/cluster/jsonrpc.py b/quarkchain/cluster/jsonrpc.py index 8106fed1f..e0cc39ba4 100644 --- a/quarkchain/cluster/jsonrpc.py +++ b/quarkchain/cluster/jsonrpc.py @@ -7,11 +7,7 @@ import websockets import rlp from aiohttp import web -from async_armor import armor from decorator import decorator -from jsonrpcserver import config -from jsonrpcserver.async_methods import AsyncMethods -from jsonrpcserver.exceptions import InvalidParams, InvalidRequest, ServerError from quarkchain.cluster.master import MasterServer from quarkchain.cluster.rpc import AccountBranchData @@ -38,6 +34,7 @@ import uuid from quarkchain.cluster.log_filter import LogFilter from quarkchain.cluster.subscription import SUB_LOGS +from quarkchain.cluster.jsonrpcserver import RpcMethods, InvalidParams # defaults DEFAULT_STARTGAS = 100 * 1000 @@ -47,13 +44,9 @@ # TODO: revisit this parameter JSON_RPC_CLIENT_REQUEST_MAX_SIZE = 16 * 1024 * 1024 -# Disable jsonrpcserver logging -config.log_requests = False -config.log_responses = False EMPTY_TX_ID = "0x" + "0" * Constant.TX_ID_HEX_LENGTH - def quantity_decoder(hex_str, allow_optional=False): """Decode `hexStr` representing a quantity.""" if allow_optional and hex_str is None: @@ -463,14 +456,14 @@ def _parse_log_request( return addresses, topics -public_methods = AsyncMethods() -private_methods = AsyncMethods() +public_methods = RpcMethods() +private_methods = RpcMethods() # noinspection PyPep8Naming class JSONRPCHttpServer: @classmethod - def start_public_server(cls, env, master_server): + async def start_public_server(cls, env, master_server): server = cls( env, master_server, @@ -478,11 +471,11 @@ def start_public_server(cls, env, master_server): env.cluster_config.JSON_RPC_HOST, public_methods, ) - server.start() + await server.start() return server @classmethod - def start_private_server(cls, env, master_server): + async def start_private_server(cls, env, master_server): server = cls( env, master_server, @@ -490,12 +483,12 @@ def start_private_server(cls, env, master_server): env.cluster_config.PRIVATE_JSON_RPC_HOST, private_methods, ) - server.start() + await server.start() return server @classmethod - def start_test_server(cls, env, master_server): - methods = AsyncMethods() + async def start_test_server(cls, env, master_server): + methods = RpcMethods() for method in public_methods.values(): methods.add(method) for method in private_methods.values(): @@ -507,13 +500,13 @@ def start_test_server(cls, env, master_server): env.cluster_config.JSON_RPC_HOST, methods, ) - server.start() + await server.start() return server def __init__( - self, env, master_server: MasterServer, port, host, methods: AsyncMethods + self, env, master_server: MasterServer, port, host, methods: RpcMethods ): - self.loop = asyncio.get_event_loop() + self.loop = asyncio.get_running_loop() self.port = port self.host = host self.env = env @@ -521,7 +514,7 @@ def __init__( self.counters = dict() # Bind RPC handler functions to this instance - self.handlers = AsyncMethods() + self.handlers = RpcMethods() for rpc_name in methods: func = methods[rpc_name] self.handlers[rpc_name] = func.__get__(self, self.__class__) @@ -542,14 +535,14 @@ async def __handle(self, request): self.counters[method] = 1 # Use armor to prevent the handler from being cancelled when # aiohttp server loses connection to client - response = await armor(self.handlers.dispatch(request)) + response = await self.handlers.dispatch(d) + if response is None: + return web.Response() if "error" in response: Logger.error(response) - if response.is_notification: - return web.Response() - return web.json_response(response, status=response.http_status) + return web.json_response(response) - def start(self): + async def start(self): app = web.Application(client_max_size=JSON_RPC_CLIENT_REQUEST_MAX_SIZE) cors = aiohttp_cors.setup(app) route = app.router.add_post("/", self.__handle) @@ -565,12 +558,12 @@ def start(self): }, ) self.runner = web.AppRunner(app, access_log=None) - self.loop.run_until_complete(self.runner.setup()) + await self.runner.setup() site = web.TCPSite(self.runner, self.host, self.port) - self.loop.run_until_complete(site.start()) + await site.start() - def shutdown(self): - self.loop.run_until_complete(self.runner.cleanup()) + async def shutdown(self): + await self.runner.cleanup() # JSON RPC handlers @public_methods.add @@ -1452,7 +1445,7 @@ def get_data_default(key, decoder, default=None): class JSONRPCWebsocketServer: @classmethod - def start_websocket_server(cls, env, slave_server): + async def start_websocket_server(cls, env, slave_server): server = cls( env, slave_server, @@ -1460,13 +1453,13 @@ def start_websocket_server(cls, env, slave_server): env.slave_config.HOST, public_methods, ) - server.start() + await server.start() return server def __init__( - self, env, slave_server: SlaveServer, port, host, methods: AsyncMethods + self, env, slave_server: SlaveServer, port, host, methods: RpcMethods ): - self.loop = asyncio.get_event_loop() + self.loop = asyncio.get_running_loop() self.port = port self.host = host self.env = env @@ -1475,14 +1468,14 @@ def __init__( self.pending_tx_cache = LRUCache(maxsize=1024) # Bind RPC handler functions to this instance - self.handlers = AsyncMethods() + self.handlers = RpcMethods() for rpc_name in methods: func = methods[rpc_name] self.handlers[rpc_name] = func.__get__(self, self.__class__) self.shard_subscription_managers = self.slave.shard_subscription_managers - async def __handle(self, websocket, path): + async def __handle(self, websocket): sub_ids = dict() # per-websocket var, Dict[sub_id, full_shard_id] try: async for message in websocket: @@ -1501,7 +1494,7 @@ async def __handle(self, websocket, path): msg_id = d.get("id", 0) response = await self.handlers.dispatch( - message, + d, context={ "websocket": websocket, "msg_id": msg_id, @@ -1509,6 +1502,8 @@ async def __handle(self, websocket, path): }, ) + if response is None: + continue if "error" in response: Logger.error(response) else: @@ -1519,8 +1514,7 @@ async def __handle(self, websocket, path): elif method == "unsubscribe": sub_id = d.get("params")[0] del sub_ids[sub_id] - if not response.is_notification: - await websocket.send(json.dumps(response)) + await websocket.send(json.dumps(response)) finally: # current websocket connection terminates, remove subscribers in this connection for sub_id, full_shard_id in sub_ids.items(): try: @@ -1531,9 +1525,9 @@ async def __handle(self, websocket, path): except: pass - def start(self): + async def start(self): start_server = websockets.serve(self.__handle, self.host, self.port) - self.loop.run_until_complete(start_server) + await start_server def shutdown(self): pass # TODO diff --git a/quarkchain/cluster/jsonrpcserver.py b/quarkchain/cluster/jsonrpcserver.py new file mode 100644 index 000000000..139657a42 --- /dev/null +++ b/quarkchain/cluster/jsonrpcserver.py @@ -0,0 +1,169 @@ +import inspect +from typing import Any, Callable, Dict, Optional, Awaitable + +from aiohttp import web + + +class JsonRpcError(Exception): + code = -32000 + message = "Server error" + + def __init__(self, message=None, data=None): + super().__init__(message or self.message) + self.message = message or self.message + self.data = data + + def to_dict(self): + error = { + "code": self.code, + "message": self.message, + } + if self.data is not None: + error["data"] = self.data + return error + +class InvalidRequest(JsonRpcError): + code = -32600 + message = "Invalid Request" + +class MethodNotFound(JsonRpcError): + code = -32601 + message = "Method not found" + +class InvalidParams(JsonRpcError): + code = -32602 + message = "Invalid params" + + +class ServerError(JsonRpcError): + code = -32000 + message = "Server error" + +class RpcMethods: + def __init__(self): + self._methods: Dict[str, Callable[..., Awaitable[Any]]] = {} + + # ========== dict ========== + def __iter__(self): + return iter(self._methods) + + def __getitem__(self, key): + return self._methods[key] + + def __setitem__(self, key, value): + self._methods[key] = value + + def items(self): + return self._methods.items() + + def keys(self): + return self._methods.keys() + + def values(self): + return self._methods.values() + + # ========== decorator ========== + def add(self, func: Callable[..., Awaitable[Any]] = None, *, name: str = None): + """ + Usage: + + @methods.add + async def foo(...): + + or: + + @methods.add(name="customName") + async def foo(...): + """ + if func is None: + def wrapper(f): + method_name = name or f.__name__ + self._methods[method_name] = f + return f + return wrapper + + method_name = name or func.__name__ + self._methods[method_name] = func + return func + + async def dispatch(self, request_json: Dict[str, Any], context=None) -> Optional[Dict[str, Any]]: + req_id = None + + try: + if not isinstance(request_json, dict): + raise InvalidRequest("Request must be object") + + req_id = request_json.get("id") + + if request_json.get("jsonrpc") != "2.0": + raise InvalidRequest("Invalid JSON-RPC version") + + method = request_json.get("method") + if not isinstance(method, str): + raise InvalidRequest("Method must be string") + + is_notification = "id" not in request_json + + if method not in self._methods: + raise MethodNotFound() + + handler = self._methods[method] + params = request_json.get("params", []) + + # Check if handler accepts a context parameter + sig = inspect.signature(handler) + pass_context = context is not None and "context" in sig.parameters + + if isinstance(params, list): + result = await handler(*params, context=context) if pass_context else await handler(*params) + elif isinstance(params, dict): + result = await handler(**params, context=context) if pass_context else await handler(**params) + else: + raise InvalidParams() + + if is_notification: + return None + + return { + "jsonrpc": "2.0", + "result": result, + "id": req_id, + } + + except JsonRpcError as e: + return { + "jsonrpc": "2.0", + "error": e.to_dict(), + "id": req_id, + } + + except TypeError as e: + # Could be missing/extra arguments → treat as invalid params + return { + "jsonrpc": "2.0", + "error": { + "code": -32602, + "message": str(e), + }, + "id": req_id, + } + except Exception: + return { + "jsonrpc": "2.0", + "error": { + "code": -32603, + "message": "Internal error", + }, + "id": req_id, + } + + async def aiohttp_handler(self, request: web.Request) -> web.Response: + body = await request.json() + + # 支持 batch + if isinstance(body, list): + responses = [await self.dispatch(item) for item in body] + return web.json_response(responses) + + response = await self.dispatch(body) + return web.json_response(response) diff --git a/quarkchain/cluster/prom.py b/quarkchain/cluster/prom.py index 51e58ce87..d70d7a4de 100644 --- a/quarkchain/cluster/prom.py +++ b/quarkchain/cluster/prom.py @@ -15,11 +15,6 @@ print("======") raise e -import jsonrpcclient - -# Disable jsonrpcclient verbose logging. -logging.getLogger("jsonrpcclient.client.request").setLevel(logging.WARNING) -logging.getLogger("jsonrpcclient.client.response").setLevel(logging.WARNING) TIMEOUT = 10 fetcher = None @@ -54,9 +49,7 @@ def get_highest() -> int: global fetcher assert isinstance(fetcher, Fetcher) - res = fetcher.cli.send( - jsonrpcclient.Request("getRootBlockByHeight"), timeout=TIMEOUT - ) + res = fetcher.cli.call("getRootBlockByHeight") if not res: raise RuntimeError("Failed to get latest block height") return int(res["height"], 16) diff --git a/quarkchain/cluster/tests/test_jsonrpc.py b/quarkchain/cluster/tests/test_jsonrpc.py index 5a4050e98..663cc7cee 100644 --- a/quarkchain/cluster/tests/test_jsonrpc.py +++ b/quarkchain/cluster/tests/test_jsonrpc.py @@ -1,12 +1,6 @@ -import asyncio import json -import logging import unittest from contextlib import contextmanager - -import aiohttp -from jsonrpcclient.aiohttp_client import aiohttpClient -from jsonrpcclient.exceptions import ReceivedErrorResponse import websockets from quarkchain.cluster.cluster_config import ClusterConfig @@ -36,11 +30,7 @@ from quarkchain.evm.messages import mk_contract_address from quarkchain.evm.transactions import Transaction as EvmTransaction from quarkchain.utils import call_async, sha3_256, token_id_encode - - -# disable jsonrpcclient verbose logging -logging.getLogger("jsonrpcclient.client.request").setLevel(logging.WARNING) -logging.getLogger("jsonrpcclient.client.response").setLevel(logging.WARNING) +from quarkchain.jsonrpc_client import AsyncJsonRpcClient, JsonRpcError @contextmanager @@ -50,21 +40,17 @@ def jrpc_http_server_context(master): env.cluster_config.JSON_RPC_PORT = 38391 # to pass the circleCi env.cluster_config.JSON_RPC_HOST = "127.0.0.1" - server = JSONRPCHttpServer.start_test_server(env, master) + server = call_async(JSONRPCHttpServer.start_test_server(env, master)) try: yield server finally: - server.shutdown() + call_async(server.shutdown()) -def send_request(*args): - async def __send_request(*args): - async with aiohttp.ClientSession(loop=asyncio.get_event_loop()) as session: - client = aiohttpClient(session, "http://localhost:38391") - response = await client.request(*args) - return response +rpc_client = AsyncJsonRpcClient("http://localhost:38391") - return call_async(__send_request(*args)) +def send_request(method, *args): + return call_async(rpc_client.call(method, *args)) class TestJSONRPCHttp(unittest.TestCase): @@ -852,12 +838,12 @@ def test_getLogs(self): # no filter object as wild cards resp = req({}) self.assertEqual(1, len(resp)) - self.assertDictContainsSubset(expected_log_parts, resp[0]) + self.assertTrue(expected_log_parts.items() <= resp[0].items()) # filter with from/to blocks resp = req({"fromBlock": "0x0", "toBlock": "0x1"}) self.assertEqual(1, len(resp)) - self.assertDictContainsSubset(expected_log_parts, resp[0]) + self.assertTrue(expected_log_parts.items() <= resp[0].items()) resp = req({"fromBlock": "0x0", "toBlock": "0x0"}) self.assertEqual(0, len(resp)) @@ -893,7 +879,7 @@ def test_getLogs(self): for f in (filter_obj, filter_obj_nested): resp = req(f) self.assertEqual(1, len(resp)) - self.assertDictContainsSubset(expected_log_parts, resp[0]) + self.assertTrue(expected_log_parts.items() <= resp[0].items()) self.assertEqual( "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa", resp[0]["topics"][0], @@ -927,13 +913,13 @@ def test_getLogs(self): expected_log_parts["transactionIndex"] = "0x3" # after root block coinbase expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() expected_log_parts["blockHash"] = "0x" + block.header.get_hash().hex() - self.assertDictContainsSubset(expected_log_parts, resp[0]) + self.assertTrue(expected_log_parts.items() <= resp[0].items()) self.assertEqual(2, len(resp[0]["topics"])) # missing shard ID should fail for endpoint in ("getLogs", "eth_getLogs"): - with self.assertRaises(ReceivedErrorResponse): + with self.assertRaises(JsonRpcError): send_request(endpoint, [{}]) - with self.assertRaises(ReceivedErrorResponse): + with self.assertRaises(JsonRpcError): send_request(endpoint, [{}, None]) def test_estimateGas(self): @@ -1222,7 +1208,7 @@ def jrpc_websocket_server_context(slave_server, port=38590): env.slave_config = env.cluster_config.get_slave_config("S0") env.slave_config.HOST = "0.0.0.0" env.slave_config.WEBSOCKET_JSON_RPC_PORT = port - server = JSONRPCWebsocketServer.start_websocket_server(env, slave_server) + server = call_async(JSONRPCWebsocketServer.start_websocket_server(env, slave_server)) try: yield server finally: @@ -1632,7 +1618,7 @@ def test_logs(self): response = call_async(websocket.recv()) count += 1 d = json.loads(response) - self.assertDictContainsSubset(expected_log_parts, d["params"]["result"]) + self.assertTrue(expected_log_parts.items() <= d["params"]["result"].items()) self.assertEqual( "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa", d["params"]["result"]["topics"][0], diff --git a/quarkchain/jsonrpc_client.py b/quarkchain/jsonrpc_client.py new file mode 100644 index 000000000..3634b0ce2 --- /dev/null +++ b/quarkchain/jsonrpc_client.py @@ -0,0 +1,58 @@ +import httpx +import uuid + +class JsonRpcError(Exception): + def __init__(self, error): + self.code = error.get("code") + self.message = error.get("message") + self.data = error.get("data") + super().__init__(f"JSON-RPC Error {self.code}: {self.message}") + +class JsonRpcClient: + def __init__(self, url, timeout=10): + self.client = httpx.Client(base_url=url, timeout=timeout) + + def call(self, method, *params): + payload = { + "jsonrpc": "2.0", + "method": method, + "params": list(params), + "id": str(uuid.uuid4()), + } + + resp = self.client.post("", json=payload) + resp.raise_for_status() + data = resp.json() + + if "error" in data: + raise RuntimeError(data["error"]) + + return data.get("result") + + def close(self): + self.client.close() + + +class AsyncJsonRpcClient: + def __init__(self, url, timeout=10): + self.client = httpx.AsyncClient(base_url=url, timeout=timeout) + + async def call(self, method, params=None): + payload = { + "jsonrpc": "2.0", + "method": method, + "params": params if params is not None else [], + "id": str(uuid.uuid4()), + } + + resp = await self.client.post("", json=payload) + resp.raise_for_status() + data = resp.json() + + if "error" in data: + raise JsonRpcError(data["error"]) + + return data.get("result") + + async def close(self): + await self.client.aclose() \ No newline at end of file diff --git a/quarkchain/tools/balance_watcher.py b/quarkchain/tools/balance_watcher.py index 3fc103517..a8314f305 100644 --- a/quarkchain/tools/balance_watcher.py +++ b/quarkchain/tools/balance_watcher.py @@ -1,18 +1,15 @@ -import jsonrpcclient import time import logging import argparse import smtplib from email.message import EmailMessage - +from quarkchain.jsonrpc_client import JsonRpcClient HOST = "http://jrpc.mainnet.quarkchain.io" PORT = "38391" FORMAT = "%(asctime)-15s %(message)s" logging.basicConfig(format=FORMAT) -logging.getLogger("jsonrpcclient.client.request").setLevel(logging.WARNING) -logging.getLogger("jsonrpcclient.client.response").setLevel(logging.WARNING) logger = logging.getLogger() logger.setLevel(logging.INFO) @@ -21,7 +18,8 @@ def query(endpoint, *args): retry, resp = 0, None while retry <= 5: try: - resp = jsonrpcclient.request(HOST + ":" + PORT, endpoint, *args) + cli = JsonRpcClient(HOST + ":" + PORT) + resp = cli.call(endpoint, *args) break except Exception: retry += 1 diff --git a/quarkchain/tools/batch_deploy_contract.py b/quarkchain/tools/batch_deploy_contract.py index f1fd1cfcf..74a7c5543 100644 --- a/quarkchain/tools/batch_deploy_contract.py +++ b/quarkchain/tools/batch_deploy_contract.py @@ -1,26 +1,20 @@ import argparse -import aiohttp import asyncio import logging import rlp -from jsonrpcclient.aiohttp_client import aiohttpClient from quarkchain.env import DEFAULT_ENV from quarkchain.core import Address, Identity from quarkchain.evm.transactions import Transaction as EvmTransaction +from quarkchain.jsonrpc_client import AsyncJsonRpcClient class Endpoint: def __init__(self, url): - self.url = url - asyncio.get_event_loop().run_until_complete(self.__create_session()) + self.client = AsyncJsonRpcClient(url) - async def __create_session(self): - self.session = aiohttp.ClientSession() - - async def __send_request(self, *args): - client = aiohttpClient(self.session, self.url) - response = await client.request(*args) + async def __send_request(self, method, *args): + response = await self.client.call(method, *args) return response async def send_transaction(self, tx): @@ -110,15 +104,11 @@ def main(): parser.add_argument("--log_jrpc", default=False, type=bool) args = parser.parse_args() - if not args.log_jrpc: - logging.getLogger("jsonrpcclient.client.request").setLevel(logging.WARNING) - logging.getLogger("jsonrpcclient.client.response").setLevel(logging.WARNING) - data = bytes.fromhex(args.data) genesisId = Identity.create_from_key(DEFAULT_ENV.config.GENESIS_KEY) endpoint = Endpoint("http://" + args.jrpc_endpoint) - asyncio.get_event_loop().run_until_complete(deploy(endpoint, genesisId, data)) + asyncio.run(deploy(endpoint, genesisId, data)) if __name__ == "__main__": diff --git a/quarkchain/tools/bootnode_health_check.py b/quarkchain/tools/bootnode_health_check.py index 4b4138c3e..20dfc48b9 100644 --- a/quarkchain/tools/bootnode_health_check.py +++ b/quarkchain/tools/bootnode_health_check.py @@ -8,28 +8,18 @@ """ import argparse import asyncio -import logging +import os +import smtplib +import tempfile import time from datetime import datetime -import jsonrpcclient -import psutil -import numpy -from decimal import Decimal -import smtplib from quarkchain.cluster.cluster_config import ClusterConfig -from quarkchain.cluster.master import MasterServer from quarkchain.cluster.cluster import Cluster -import jsonrpcclient -import logging -import time -from datetime import datetime -import smtplib -import os -import tempfile +from quarkchain.jsonrpc_client import JsonRpcClient TIMEOUT = 10 PRIVATE_ENDPOINT = "http://{}:38491".format("localhost") -PRIVATE_CLIENT = jsonrpcclient.HTTPClient(PRIVATE_ENDPOINT) +PRIVATE_CLIENT = JsonRpcClient(PRIVATE_ENDPOINT, TIMEOUT) def now(): @@ -45,9 +35,7 @@ async def run(self): def check_routing_table(timeout=TIMEOUT): - result = PRIVATE_CLIENT.send( - jsonrpcclient.Request("getKadRoutingTable"), timeout=timeout - ) + result = PRIVATE_CLIENT.call("getKadRoutingTable") if len(result) == 0: print("Bootstrap node can not provide the routing table for a while!") subject = "Boostrap Node Alert!" diff --git a/quarkchain/tools/check_syncing_state.py b/quarkchain/tools/check_syncing_state.py index bd1ce861a..2763cf6f8 100644 --- a/quarkchain/tools/check_syncing_state.py +++ b/quarkchain/tools/check_syncing_state.py @@ -1,39 +1,25 @@ #! /usr/bin/env pypy3 import argparse -import logging import time from datetime import datetime -import jsonrpcclient -import psutil -import numpy -from decimal import Decimal +from quarkchain.jsonrpc_client import JsonRpcClient TIMEOUT=10 -# disable jsonrpcclient verbose logging -logging.getLogger("jsonrpcclient.client.request").setLevel(logging.WARNING) -logging.getLogger("jsonrpcclient.client.response").setLevel(logging.WARNING) - - def now(): return datetime.now().strftime("%Y-%m-%d %H:%M:%S") -def checkHeight(private_client, public_client, timeout=TIMEOUT): - result_private = private_client.send( - jsonrpcclient.Request("getRootBlockByHeight"), - timeout=timeout,) - result_public = public_client.send( - jsonrpcclient.Request("getRootBlockByHeight"), - timeout=timeout,) +def checkHeight(private_client: JsonRpcClient, public_client: JsonRpcClient): + result_private = private_client.call("getRootBlockByHeight") + result_public = public_client.call("getRootBlockByHeight") return { "height": int(result_private["height"], 16), "currentHeight": int(result_public["height"], 16), } - -def query_height(private_client, public_client, args): +def query_height(private_client: JsonRpcClient, public_client: JsonRpcClient, args): format = "{time:20} {syncing:>15}{height:>30}{currentHeight:>30}" print( format.format( @@ -75,10 +61,10 @@ def main(): args = parser.parse_args() private_endpoint = "http://{}:38391".format(args.ip) - private_client = jsonrpcclient.HTTPClient(private_endpoint) + private_client = JsonRpcClient(private_endpoint, TIMEOUT) public_endpoint = "http://{}:38391".format(args.bootstrapip) - public_client = jsonrpcclient.HTTPClient(public_endpoint) + public_client = JsonRpcClient(public_endpoint, TIMEOUT) diff --git a/quarkchain/tools/count_total_balance.py b/quarkchain/tools/count_total_balance.py index 0cd83d685..d29673bdf 100644 --- a/quarkchain/tools/count_total_balance.py +++ b/quarkchain/tools/count_total_balance.py @@ -2,37 +2,29 @@ import functools import logging from typing import List, Tuple, Dict, Any - -import jsonrpcclient +from quarkchain.jsonrpc_client import JsonRpcClient logging.root.setLevel(logging.INFO) log_format = "%(asctime)s: %(message)s" logging.basicConfig(format=log_format, datefmt="%Y-%m-%d %H:%M:%S") -# disable jsonrpcclient verbose logging -logging.getLogger("jsonrpcclient.client.request").setLevel(logging.WARNING) -logging.getLogger("jsonrpcclient.client.response").setLevel(logging.WARNING) - TIMEOUT = 10 TOTAL_SHARD = 8 @functools.lru_cache(maxsize=5) -def get_jsonrpc_cli(jrpc_url): - return jsonrpcclient.HTTPClient(jrpc_url) +def get_jsonrpc_cli(jrpc_url, timeout=10): + return JsonRpcClient(jrpc_url, timeout) class Fetcher(object): def __init__(self, host: str, timeout: int): - self.cli = get_jsonrpc_cli(host) + self.cli = get_jsonrpc_cli(host, timeout) self.timeout = timeout self.shard_to_latest_id = {} def _get_root_block(self, root_block_height: int) -> Dict[str, Any]: - res = self.cli.send( - jsonrpcclient.Request("getRootBlockByHeight", hex(root_block_height)), - timeout=self.timeout, - ) + res = self.cli.call("getRootBlockByHeight", hex(root_block_height)) if not res: raise RuntimeError( "Failed to query root block at height" % root_block_height @@ -62,12 +54,7 @@ def get_latest_minor_block_id_from_root_block( def count_total_balance( self, block_id: str, root_block_id: str, token_id: int, start: str ) -> Tuple[int, str]: - res = self.cli.send( - jsonrpcclient.Request( - "getTotalBalance", block_id, root_block_id, hex(token_id), start - ), - timeout=self.timeout, - ) + res = self.cli.call("getTotalBalance", block_id, root_block_id, hex(token_id), start) if not res: raise RuntimeError("Failed to count total balance") return int(res["totalBalance"], 16), res["next"] diff --git a/quarkchain/tools/erc20_balance_watcher.py b/quarkchain/tools/erc20_balance_watcher.py index 4456c0ad1..14f959040 100644 --- a/quarkchain/tools/erc20_balance_watcher.py +++ b/quarkchain/tools/erc20_balance_watcher.py @@ -1,11 +1,9 @@ -# jsonrpcclient==3.* -# requests==2.* -import jsonrpcclient import time import logging import argparse import smtplib from email.message import EmailMessage +from quarkchain.jsonrpc_client import JsonRpcClient HOST = "https://eth.llamarpc.com" @@ -13,8 +11,6 @@ FORMAT = "%(asctime)-15s %(message)s" logging.basicConfig(format=FORMAT) -logging.getLogger("jsonrpcclient.client.request").setLevel(logging.WARNING) -logging.getLogger("jsonrpcclient.client.response").setLevel(logging.WARNING) logger = logging.getLogger() logger.setLevel(logging.INFO) @@ -23,7 +19,8 @@ def query(endpoint, args): retry, resp = 0, None while retry <= 5: try: - resp = jsonrpcclient.request(HOST + ":" + PORT, endpoint, *args) + cli = JsonRpcClient(HOST + ":" + PORT) + resp = cli.call(endpoint, *args) break except Exception: retry += 1 diff --git a/quarkchain/tools/external_miner.py b/quarkchain/tools/external_miner.py index 481fc66bb..4b14a29cb 100644 --- a/quarkchain/tools/external_miner.py +++ b/quarkchain/tools/external_miner.py @@ -1,7 +1,6 @@ import argparse import copy import functools -import logging import random import signal import threading @@ -9,17 +8,12 @@ from itertools import cycle from typing import Dict, Optional, List, Tuple -import jsonrpcclient from queue import LifoQueue from quarkchain.cluster.miner import Miner, MiningWork, MiningResult from quarkchain.cluster.cluster_config import ClusterConfig from quarkchain.utils import int_left_most_bit - -# disable jsonrpcclient verbose logging - -logging.getLogger("jsonrpcclient.client.request").setLevel(logging.WARNING) -logging.getLogger("jsonrpcclient.client.response").setLevel(logging.WARNING) +from quarkchain.jsonrpc_client import JsonRpcClient TIMEOUT = 10 @@ -29,7 +23,7 @@ @functools.lru_cache(maxsize=5) def get_jsonrpc_cli(jrpc_url): - return jsonrpcclient.HTTPClient(jrpc_url) + return JsonRpcClient(jrpc_url, TIMEOUT) def get_work_rpc( @@ -40,12 +34,7 @@ def get_work_rpc( ) -> MiningWork: jrpc_url = "http://{}:{}".format(host, jrpc_port) cli = get_jsonrpc_cli(jrpc_url) - header_hash, height, diff = cli.send( - jsonrpcclient.Request( - "getWork", hex(full_shard_id) if full_shard_id is not None else None - ), - timeout=timeout, - ) + header_hash, height, diff = cli.call("getWork", hex(full_shard_id) if full_shard_id is not None else None) return MiningWork(bytes.fromhex(header_hash[2:]), int(height, 16), int(diff, 16)) @@ -58,15 +47,12 @@ def submit_work_rpc( ) -> bool: jrpc_url = "http://{}:{}".format(host, jrpc_port) cli = get_jsonrpc_cli(jrpc_url) - success = cli.send( - jsonrpcclient.Request( - "submitWork", - hex(full_shard_id) if full_shard_id is not None else None, - "0x" + res.header_hash.hex(), - hex(res.nonce), - "0x" + res.mixhash.hex(), - ), - timeout=timeout, + success = cli.call( + "submitWork", + hex(full_shard_id) if full_shard_id is not None else None, + "0x" + res.header_hash.hex(), + hex(res.nonce), + "0x" + res.mixhash.hex(), ) return success diff --git a/quarkchain/tools/fund_testnet.py b/quarkchain/tools/fund_testnet.py index b550fd5a3..016ad4077 100644 --- a/quarkchain/tools/fund_testnet.py +++ b/quarkchain/tools/fund_testnet.py @@ -1,33 +1,25 @@ import argparse -import aiohttp import asyncio import logging -import pickle import random import rlp from collections import defaultdict -from jsonrpcclient.aiohttp_client import aiohttpClient from typing import Dict, List from quarkchain.env import DEFAULT_ENV from quarkchain.core import Address, Identity from quarkchain.evm.transactions import Transaction as EvmTransaction - +from quarkchain.jsonrpc_client import AsyncJsonRpcClient class Endpoint: def __init__(self, url): - self.url = url - asyncio.get_event_loop().run_until_complete(self.__create_session()) - - async def __create_session(self): - self.session = aiohttp.ClientSession() + self.client = AsyncJsonRpcClient(url) - async def __send_request(self, *args): - client = aiohttpClient(self.session, self.url) + async def __send_request(self, method, *args): # manual retry since the library has hard-coded timeouts while True: try: - response = await client.request(*args) + response = await self.client.call(method, *args) break except Exception as e: print("{} !timeout! retrying {}".format(self.url, e)) @@ -167,15 +159,11 @@ def main(): parser.add_argument("--tqkc_file", required=True, type=str) args = parser.parse_args() - if not args.log_jrpc: - logging.getLogger("jsonrpcclient.client.request").setLevel(logging.WARNING) - logging.getLogger("jsonrpcclient.client.response").setLevel(logging.WARNING) - genesisId = Identity.create_from_key(DEFAULT_ENV.config.GENESIS_KEY) endpoint = Endpoint("http://" + args.jrpc_endpoint) addrByAmount = read_addr(args.tqkc_file) - asyncio.get_event_loop().run_until_complete(fund(endpoint, genesisId, addrByAmount)) + asyncio.run(fund(endpoint, genesisId, addrByAmount)) if __name__ == "__main__": diff --git a/quarkchain/tools/monitoring.py b/quarkchain/tools/monitoring.py index 3ce3139a6..c8dbd24d1 100644 --- a/quarkchain/tools/monitoring.py +++ b/quarkchain/tools/monitoring.py @@ -1,4 +1,3 @@ -import jsonrpcclient import ipaddress import argparse @@ -8,6 +7,7 @@ import asyncio from jsonrpc_async import Server +from quarkchain.jsonrpc_client import JsonRpcClient """ @@ -19,7 +19,8 @@ def fetch_peers(ip, jrpc_port): json_rpc_url = "http://{}:{}".format(ip, jrpc_port) print("calling {}".format(json_rpc_url)) - peers = jsonrpcclient.request(json_rpc_url, "getPeers") + cli = JsonRpcClient(json_rpc_url) + peers = cli.call("getPeers") return [ "{}:{}".format(ipaddress.ip_address(int(p["ip"], 16)), int(p["port"], 16)) for p in peers["peers"] @@ -67,9 +68,7 @@ async def crawl_async(ip, p2p_port, jrpc_port): def crawl_bfs(ip, p2p_port, jrpc_port): - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - cache = loop.run_until_complete(crawl_async(ip, p2p_port, jrpc_port)) + cache = asyncio.run(crawl_async(ip, p2p_port, jrpc_port)) res = {} # we can avoid the loop, but it will look crazy @@ -181,7 +180,7 @@ def watch_nodes_stats(ip, p2p_port, jrpc_port, ip_lookup={}): for idx, cluster in enumerate(clusters) ] ) - asyncio.get_event_loop().run_until_complete(async_watch(clusters)) + asyncio.run(async_watch(clusters)) def main(): diff --git a/quarkchain/tools/reorg_detector.py b/quarkchain/tools/reorg_detector.py index 774ade891..dea2f2c97 100644 --- a/quarkchain/tools/reorg_detector.py +++ b/quarkchain/tools/reorg_detector.py @@ -1,9 +1,9 @@ -import jsonrpcclient import time import logging import argparse import smtplib from email.message import EmailMessage +from quarkchain.jsonrpc_client import JsonRpcClient HOST = "http://jrpc.mainnet.quarkchain.io" @@ -11,8 +11,6 @@ FORMAT = "%(asctime)-15s %(message)s" logging.basicConfig(format=FORMAT) -logging.getLogger("jsonrpcclient.client.request").setLevel(logging.WARNING) -logging.getLogger("jsonrpcclient.client.response").setLevel(logging.WARNING) logger = logging.getLogger() logger.setLevel(logging.INFO) @@ -21,7 +19,8 @@ def query(endpoint, *args): retry, resp = 0, None while retry <= 5: try: - resp = jsonrpcclient.request(HOST + ":" + PORT, endpoint, *args) + cli = JsonRpcClient(HOST + ":" + PORT) + resp = cli.call(endpoint, *args) break except Exception: retry += 1 diff --git a/quarkchain/tools/stats b/quarkchain/tools/stats.py old mode 100755 new mode 100644 similarity index 84% rename from quarkchain/tools/stats rename to quarkchain/tools/stats.py index fe637d8e2..f3403ab58 --- a/quarkchain/tools/stats +++ b/quarkchain/tools/stats.py @@ -1,18 +1,12 @@ #! /usr/bin/env python3 import argparse -import logging import time from datetime import datetime -import jsonrpcclient import psutil import numpy from decimal import Decimal - - -# disable jsonrpcclient verbose logging -logging.getLogger("jsonrpcclient.client.request").setLevel(logging.WARNING) -logging.getLogger("jsonrpcclient.client.response").setLevel(logging.WARNING) +from quarkchain.jsonrpc_client import JsonRpcClient def now(): @@ -23,8 +17,8 @@ def fstr(v: float): return "{:.2f}".format(v) -def basic(client, ip): - s = client.send(jsonrpcclient.Request("getStats")) +def basic(client: JsonRpcClient, ip): + s = client.client("getStats") msg = "QuarkChain Cluster Stats\n\n" msg += "CPU: {}\n".format(psutil.cpu_count()) msg += "Memory: {} GB\n".format( @@ -37,8 +31,8 @@ def basic(client, ip): return msg -def stats(client): - s = client.send(jsonrpcclient.Request("getStats")) +def stats(client: JsonRpcClient): + s = client.call("getStats") return { "time": now(), "syncing": str(s["syncing"]), @@ -60,7 +54,7 @@ def stats(client): } -def query_stats(client, args): +def query_stats(client: JsonRpcClient, args): if args.verbose: format = "{time:20} {syncing:>8} {tps:>5} {pendingTx:>10} {confirmedTx:>10} {bps:>9} {sbps:>9} {cpu:>9} {root:>7} {shards}" else: @@ -91,7 +85,7 @@ def format_qkc(qkc: Decimal): return "{:.18f}".format(qkc).rstrip("0").rstrip(".") -def query_address(client, args): +def query_address(client: JsonRpcClient, args): address_hex = args.address.lower().lstrip("0").lstrip("x") token_str = args.token.upper() assert len(address_hex) == 48 @@ -103,11 +97,7 @@ def query_address(client, args): ) while True: - data = client.send( - jsonrpcclient.Request( - "getAccountData", address="0x" + address_hex, include_shards=True - ) - ) + data = client.call("getAccountData", address="0x" + address_hex, include_shards=True) shards_wei = [] for shard_balance in data["shards"]: for token_balances in shard_balance["balances"]: @@ -152,9 +142,9 @@ def main(): args = parser.parse_args() private_endpoint = "http://{}:38491".format(args.ip) - private_client = jsonrpcclient.HTTPClient(private_endpoint) + private_client = JsonRpcClient(private_endpoint) public_endpoint = "http://{}:38391".format(args.ip) - public_client = jsonrpcclient.HTTPClient(public_endpoint) + public_client = JsonRpcClient(public_endpoint) print(basic(private_client, args.ip)) From 8fbf9609086cb059603c8c91847c2ac0172785e1 Mon Sep 17 00:00:00 2001 From: QuarkChain Dev Date: Fri, 13 Mar 2026 05:07:17 +0100 Subject: [PATCH 02/11] fix remaining jsonrpc_async usages and import issues in tools - adjust_difficulty.py: replace bare `import monitoring` with `from quarkchain.tools import monitoring` (breaks when run from outside the tools directory) - adjust_difficulty.py: replace jsonrpc_async.Server with AsyncJsonRpcClient and use .call() method - monitoring.py: replace jsonrpc_async.Server with AsyncJsonRpcClient and use .call() method; close session via .close() - jsonrpc_client.py: fix AsyncJsonRpcClient.call signature to use *params (variadic) to match JsonRpcClient.call, fixing callers that pass positional arguments Co-Authored-By: Claude Sonnet 4.6 --- quarkchain/jsonrpc_client.py | 4 ++-- quarkchain/tools/adjust_difficulty.py | 16 ++++++++-------- quarkchain/tools/monitoring.py | 13 ++++++------- 3 files changed, 16 insertions(+), 17 deletions(-) diff --git a/quarkchain/jsonrpc_client.py b/quarkchain/jsonrpc_client.py index 3634b0ce2..f69a5953b 100644 --- a/quarkchain/jsonrpc_client.py +++ b/quarkchain/jsonrpc_client.py @@ -37,11 +37,11 @@ class AsyncJsonRpcClient: def __init__(self, url, timeout=10): self.client = httpx.AsyncClient(base_url=url, timeout=timeout) - async def call(self, method, params=None): + async def call(self, method, *params): payload = { "jsonrpc": "2.0", "method": method, - "params": params if params is not None else [], + "params": list(params), "id": str(uuid.uuid4()), } diff --git a/quarkchain/tools/adjust_difficulty.py b/quarkchain/tools/adjust_difficulty.py index 26782ff24..7a65b0519 100644 --- a/quarkchain/tools/adjust_difficulty.py +++ b/quarkchain/tools/adjust_difficulty.py @@ -1,10 +1,10 @@ -import monitoring - import argparse import asyncio import json from datetime import datetime -from jsonrpc_async import Server + +from quarkchain.tools import monitoring +from quarkchain.jsonrpc_client import AsyncJsonRpcClient """ this is a centralized place that sets mining difficulty @@ -15,9 +15,9 @@ async def async_adjust(idx, server, root, minor, mining): - response = await server.setTargetBlockTime(root, minor) + response = await server.call("setTargetBlockTime", root, minor) print("idx={};response={}".format(idx, response)) - await server.setMining(mining) + await server.call("setMining", mining) async def async_adjust_difficulty(args): @@ -35,7 +35,7 @@ async def async_adjust_difficulty(args): if count == num_nodes: raise Exception("no change") servers = [ - (idx, Server("http://{}".format(cluster))) + (idx, AsyncJsonRpcClient("http://{}".format(cluster))) for idx, cluster in enumerate(clusters) ] await asyncio.gather( @@ -89,11 +89,11 @@ async def adjust_imbalanced_hashpower(args): clusters_rich = clusters[:num_rich] clusters_poor = clusters[num_rich:] servers_rich = [ - (idx, Server("http://{}".format(cluster))) + (idx, AsyncJsonRpcClient("http://{}".format(cluster))) for idx, cluster in enumerate(clusters_rich) ] servers_poor = [ - (idx, Server("http://{}".format(cluster))) + (idx, AsyncJsonRpcClient("http://{}".format(cluster))) for idx, cluster in enumerate(clusters_poor) ] rich_root = int(num_nodes * args.base_root / 9) diff --git a/quarkchain/tools/monitoring.py b/quarkchain/tools/monitoring.py index c8dbd24d1..407cefd8e 100644 --- a/quarkchain/tools/monitoring.py +++ b/quarkchain/tools/monitoring.py @@ -6,8 +6,7 @@ from datetime import datetime import asyncio -from jsonrpc_async import Server -from quarkchain.jsonrpc_client import JsonRpcClient +from quarkchain.jsonrpc_client import JsonRpcClient, AsyncJsonRpcClient """ @@ -33,13 +32,13 @@ async def fetch_peers_async(node): :return: list of tuple(ip, p2p_port, jrpc_port) """ json_rpc_url = "http://{}:{}".format(node[0], node[2]) - server = Server(json_rpc_url) + server = AsyncJsonRpcClient(json_rpc_url, timeout=5) try: - peers = await asyncio.wait_for(server.get_peers(), 5) + peers = await server.call("getPeers") except Exception: print("Failed to get peers from {}".format(json_rpc_url)) peers = {"peers": []} - await server.session.close() + await server.close() return [ ( str(ipaddress.ip_address(int(p["ip"], 16))), @@ -148,13 +147,13 @@ def print_all_clusters(ip, p2p_port, jrpc_port, ip_lookup={}): async def async_stats(idx, server): - response = await server.get_stats() + response = await server.call("getStats") print("idx={};{}={}".format(idx, CONST_METRIC, response[CONST_METRIC])) async def async_watch(clusters): servers = [ - (idx, Server("http://{}".format(cluster))) + (idx, AsyncJsonRpcClient("http://{}".format(cluster))) for idx, cluster in enumerate(clusters) ] while True: From 9f603e699ce9a325d4e3399f9bb6213b17fb0457 Mon Sep 17 00:00:00 2001 From: ping-ke Date: Mon, 16 Mar 2026 21:15:20 +0800 Subject: [PATCH 03/11] fix jsonrpc test failures: params passing and websocket server shutdown MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Fix send_request in test_jsonrpc.py to unpack list params correctly instead of double-wrapping them (e.g. [["0x..."]] → ["0x..."]) - Add call_with_dict_params to AsyncJsonRpcClient for named params - Implement JSONRPCWebsocketServer.shutdown() to actually close the server, fixing test isolation hangs Co-Authored-By: Claude Opus 4.6 --- quarkchain/cluster/jsonrpc.py | 3160 +++++++++---------- quarkchain/cluster/tests/test_jsonrpc.py | 3650 +++++++++++----------- quarkchain/jsonrpc_client.py | 131 +- 3 files changed, 3481 insertions(+), 3460 deletions(-) diff --git a/quarkchain/cluster/jsonrpc.py b/quarkchain/cluster/jsonrpc.py index e0cc39ba4..019d399b2 100644 --- a/quarkchain/cluster/jsonrpc.py +++ b/quarkchain/cluster/jsonrpc.py @@ -1,1580 +1,1580 @@ -import asyncio -import inspect -import json -from typing import Callable, Dict, List, Optional - -import aiohttp_cors -import websockets -import rlp -from aiohttp import web -from decorator import decorator - -from quarkchain.cluster.master import MasterServer -from quarkchain.cluster.rpc import AccountBranchData -from quarkchain.cluster.slave import SlaveServer -from quarkchain.core import ( - Address, - Branch, - Log, - MinorBlock, - RootBlock, - SerializedEvmTransaction, - TokenBalanceMap, - TransactionReceipt, - TypedTransaction, - Constant, - MinorBlockHeader, - PoSWInfo, -) -from quarkchain.evm.transactions import Transaction as EvmTransaction -from quarkchain.evm.utils import denoms, is_numeric -from quarkchain.p2p.p2p_manager import P2PManager -from quarkchain.utils import Logger, token_id_decode, token_id_encode -from cachetools import LRUCache -import uuid -from quarkchain.cluster.log_filter import LogFilter -from quarkchain.cluster.subscription import SUB_LOGS -from quarkchain.cluster.jsonrpcserver import RpcMethods, InvalidParams - -# defaults -DEFAULT_STARTGAS = 100 * 1000 -DEFAULT_GASPRICE = 10 * denoms.gwei - -# Allow 16 MB request for submitting big blocks -# TODO: revisit this parameter -JSON_RPC_CLIENT_REQUEST_MAX_SIZE = 16 * 1024 * 1024 - - -EMPTY_TX_ID = "0x" + "0" * Constant.TX_ID_HEX_LENGTH - -def quantity_decoder(hex_str, allow_optional=False): - """Decode `hexStr` representing a quantity.""" - if allow_optional and hex_str is None: - return None - # must start with "0x" - if not hex_str.startswith("0x") or len(hex_str) < 3: - raise InvalidParams("Invalid quantity encoding") - - try: - return int(hex_str, 16) - except ValueError: - raise InvalidParams("Invalid quantity encoding") - - -def quantity_encoder(i): - """Encode integer quantity `data`.""" - assert is_numeric(i) - return hex(i) - - -def data_decoder(hex_str, allow_optional=False): - """Decode `hexStr` representing unformatted hex_str.""" - if allow_optional and hex_str is None: - return None - if not hex_str.startswith("0x"): - raise InvalidParams("Invalid hex_str encoding") - try: - return bytes.fromhex(hex_str[2:]) - except Exception: - raise InvalidParams("Invalid hex_str hex encoding") - - -def data_encoder(data_bytes): - """Encode unformatted binary `dataBytes`.""" - return "0x" + data_bytes.hex() - - -def address_decoder(hex_str): - """Decode an address from hex with 0x prefix to 24 bytes.""" - addr_bytes = data_decoder(hex_str) - if len(addr_bytes) not in (24, 0): - raise InvalidParams("Addresses must be 24 or 0 bytes long") - return addr_bytes - - -def address_encoder(addr_bytes): - assert len(addr_bytes) == 24 - return data_encoder(addr_bytes) - - -def recipient_decoder(hex_str, allow_optional=False): - """Decode an recipient from hex with 0x prefix to 20 bytes.""" - if allow_optional and hex_str is None: - return None - recipient_bytes = data_decoder(hex_str) - if len(recipient_bytes) not in (20, 0): - raise InvalidParams("Addresses must be 20 or 0 bytes long") - return recipient_bytes - - -def recipient_encoder(recipient_bytes): - assert len(recipient_bytes) == 20 - return data_encoder(recipient_bytes) - - -def full_shard_key_decoder(hex_str): - b = data_decoder(hex_str) - if len(b) != 4: - raise InvalidParams("Full shard id must be 4 bytes") - return int.from_bytes(b, byteorder="big") - - -def full_shard_key_encoder(full_shard_key): - return data_encoder(full_shard_key.to_bytes(4, byteorder="big")) - - -def id_encoder(hash_bytes, full_shard_key): - """Encode hash and full_shard_key into hex""" - return data_encoder(hash_bytes + full_shard_key.to_bytes(4, byteorder="big")) - - -def id_decoder(hex_str): - """Decode an id to (hash, full_shard_key)""" - data_bytes = data_decoder(hex_str) - if len(data_bytes) != 36: - raise InvalidParams("Invalid id encoding") - return data_bytes[:32], int.from_bytes(data_bytes[32:], byteorder="big") - - -def hash_decoder(hex_str): - """Decode a block hash.""" - decoded = data_decoder(hex_str) - if len(decoded) != 32: - raise InvalidParams("Hashes must be 32 bytes long") - return decoded - - -def signature_decoder(hex_str): - """Decode a block signature.""" - if not hex_str: - return None - decoded = data_decoder(hex_str) - if len(decoded) != 65: - raise InvalidParams("Signature must be 65 bytes long") - return decoded - - -def bool_decoder(data): - if not isinstance(data, bool): - raise InvalidParams("Parameter must be boolean") - return data - - -def _add_posw_info_to_resp(d: Dict, diff: int, posw_info: PoSWInfo): - d["effectiveDifficulty"] = quantity_encoder(posw_info.effective_difficulty) - d["poswMineableBlocks"] = quantity_encoder(posw_info.posw_mineable_blocks) - d["poswMinedBlocks"] = quantity_encoder(posw_info.posw_mined_blocks) - d["stakingApplied"] = posw_info.effective_difficulty < diff - - -def root_block_encoder(block, extra_info): - header = block.header - - d = { - "id": data_encoder(header.get_hash()), - "height": quantity_encoder(header.height), - "hash": data_encoder(header.get_hash()), - "sealHash": data_encoder(header.get_hash_for_mining()), - "hashPrevBlock": data_encoder(header.hash_prev_block), - "idPrevBlock": data_encoder(header.hash_prev_block), - "nonce": quantity_encoder(header.nonce), - "hashMerkleRoot": data_encoder(header.hash_merkle_root), - "miner": address_encoder(header.coinbase_address.serialize()), - "coinbase": balances_encoder(header.coinbase_amount_map), - "difficulty": quantity_encoder(header.difficulty), - "timestamp": quantity_encoder(header.create_time), - "size": quantity_encoder(len(block.serialize())), - "minorBlockHeaders": [], - "signature": data_encoder(header.signature), - } - if extra_info: - _add_posw_info_to_resp(d, header.difficulty, extra_info) - - for header in block.minor_block_header_list: - h = minor_block_header_encoder(header) - d["minorBlockHeaders"].append(h) - return d - - -def minor_block_encoder(block, include_transactions=False, extra_info=None): - """Encode a block as JSON object. - - :param block: a :class:`ethereum.block.Block` - :param include_transactions: if true transaction details are included, otherwise - only their hashes - :param extra_info: MinorBlockExtraInfo - :returns: a json encodable dictionary - """ - header = block.header - meta = block.meta - - header_info = minor_block_header_encoder(header) - d = { - **header_info, - "hashMerkleRoot": data_encoder(meta.hash_merkle_root), - "hashEvmStateRoot": data_encoder(meta.hash_evm_state_root), - "gasUsed": quantity_encoder(meta.evm_gas_used), - "size": quantity_encoder(len(block.serialize())), - } - if include_transactions: - d["transactions"] = [] - for i, _ in enumerate(block.tx_list): - d["transactions"].append(tx_encoder(block, i)) - else: - d["transactions"] = [ - id_encoder(tx.get_hash(), block.header.branch.get_full_shard_id()) - for tx in block.tx_list - ] - if extra_info: - _add_posw_info_to_resp(d, header.difficulty, extra_info) - return d - - -def minor_block_header_encoder(header: MinorBlockHeader) -> Dict: - d = { - "id": id_encoder(header.get_hash(), header.branch.get_full_shard_id()), - "height": quantity_encoder(header.height), - "hash": data_encoder(header.get_hash()), - "fullShardId": quantity_encoder(header.branch.get_full_shard_id()), - "chainId": quantity_encoder(header.branch.get_chain_id()), - "shardId": quantity_encoder(header.branch.get_shard_id()), - "hashPrevMinorBlock": data_encoder(header.hash_prev_minor_block), - "idPrevMinorBlock": id_encoder( - header.hash_prev_minor_block, header.branch.get_full_shard_id() - ), - "hashPrevRootBlock": data_encoder(header.hash_prev_root_block), - "nonce": quantity_encoder(header.nonce), - "miner": address_encoder(header.coinbase_address.serialize()), - "coinbase": balances_encoder(header.coinbase_amount_map), - "difficulty": quantity_encoder(header.difficulty), - "extraData": data_encoder(header.extra_data), - "gasLimit": quantity_encoder(header.evm_gas_limit), - "timestamp": quantity_encoder(header.create_time), - } - return d - - -def tx_encoder(block, i): - """Encode a transaction as JSON object. - - `transaction` is the `i`th transaction in `block`. - """ - tx = block.tx_list[i] - evm_tx = tx.tx.to_evm_tx() - branch = block.header.branch - return { - "id": id_encoder(tx.get_hash(), evm_tx.from_full_shard_key), - "hash": data_encoder(tx.get_hash()), - "nonce": quantity_encoder(evm_tx.nonce), - "timestamp": quantity_encoder(block.header.create_time), - "fullShardId": quantity_encoder(branch.get_full_shard_id()), - "chainId": quantity_encoder(branch.get_chain_id()), - "shardId": quantity_encoder(branch.get_shard_id()), - "blockId": id_encoder(block.header.get_hash(), branch.get_full_shard_id()), - "blockHeight": quantity_encoder(block.header.height), - "transactionIndex": quantity_encoder(i), - "from": data_encoder(evm_tx.sender), - "to": data_encoder(evm_tx.to), - "fromFullShardKey": full_shard_key_encoder(evm_tx.from_full_shard_key), - "toFullShardKey": full_shard_key_encoder(evm_tx.to_full_shard_key), - "value": quantity_encoder(evm_tx.value), - "gasPrice": quantity_encoder(evm_tx.gasprice), - "gas": quantity_encoder(evm_tx.startgas), - "data": data_encoder(evm_tx.data), - "networkId": quantity_encoder(evm_tx.network_id), - "transferTokenId": quantity_encoder(evm_tx.transfer_token_id), - "gasTokenId": quantity_encoder(evm_tx.gas_token_id), - "transferTokenStr": token_id_decode(evm_tx.transfer_token_id), - "gasTokenStr": token_id_decode(evm_tx.gas_token_id), - "version": quantity_encoder(evm_tx.version), - "r": quantity_encoder(evm_tx.r), - "s": quantity_encoder(evm_tx.s), - "v": quantity_encoder(evm_tx.v), - } - - -def tx_detail_encoder(tx): - """Encode a transaction detail object as JSON object. Used for indexing server.""" - return { - "txId": id_encoder(tx.tx_hash, tx.from_address.full_shard_key), - "fromAddress": address_encoder(tx.from_address.serialize()), - "toAddress": address_encoder(tx.to_address.serialize()) - if tx.to_address - else "0x", - "value": quantity_encoder(tx.value), - "transferTokenId": quantity_encoder(tx.transfer_token_id), - "transferTokenStr": token_id_decode(tx.transfer_token_id), - "gasTokenId": quantity_encoder(tx.gas_token_id), - "gasTokenStr": token_id_decode(tx.gas_token_id), - "blockHeight": quantity_encoder(tx.block_height), - "timestamp": quantity_encoder(tx.timestamp), - "success": tx.success, - "isFromRootChain": tx.is_from_root_chain, - "nonce": quantity_encoder(tx.nonce), - } - - -def loglist_encoder(loglist: List[Log], is_removed: bool = False): - """Encode a list of log""" - result = [] - for l in loglist: - result.append( - { - "logIndex": quantity_encoder(l.log_idx), - "transactionIndex": quantity_encoder(l.tx_idx), - "transactionHash": data_encoder(l.tx_hash), - "blockHash": data_encoder(l.block_hash), - "blockNumber": quantity_encoder(l.block_number), - "blockHeight": quantity_encoder(l.block_number), - "address": data_encoder(l.recipient), - "recipient": data_encoder(l.recipient), - "data": data_encoder(l.data), - "topics": [data_encoder(topic) for topic in l.topics], - "removed": is_removed, - } - ) - return result - - -def receipt_encoder(block: MinorBlock, i: int, receipt: TransactionReceipt): - tx_id, tx_hash = None, None # if empty, will be populated at call site - if i < len(block.tx_list): - tx = block.tx_list[i] - evm_tx = tx.tx.to_evm_tx() - tx_id = id_encoder(tx.get_hash(), evm_tx.from_full_shard_key) - tx_hash = data_encoder(tx.get_hash()) - resp = { - "transactionId": tx_id, - "transactionHash": tx_hash, - "transactionIndex": quantity_encoder(i), - "blockId": id_encoder( - block.header.get_hash(), block.header.branch.get_full_shard_id() - ), - "blockHash": data_encoder(block.header.get_hash()), - "blockHeight": quantity_encoder(block.header.height), - "blockNumber": quantity_encoder(block.header.height), - "cumulativeGasUsed": quantity_encoder(receipt.gas_used), - "gasUsed": quantity_encoder(receipt.gas_used - receipt.prev_gas_used), - "status": quantity_encoder(1 if receipt.success == b"\x01" else 0), - "contractAddress": ( - address_encoder(receipt.contract_address.serialize()) - if not receipt.contract_address.is_empty() - else None - ), - "logs": loglist_encoder(receipt.logs), - "timestamp": quantity_encoder(block.header.create_time), - } - - return resp - - -def balances_encoder(balances: TokenBalanceMap) -> List[Dict]: - balance_list = [] - for k, v in balances.balance_map.items(): - balance_list.append( - { - "tokenId": quantity_encoder(k), - "tokenStr": token_id_decode(k), - "balance": quantity_encoder(v), - } - ) - return balance_list - - -def decode_arg(name, decoder, allow_optional=False): - """Create a decorator that applies `decoder` to argument `name`.""" - - @decorator - def new_f(f, *args, **kwargs): - call_args = inspect.getcallargs(f, *args, **kwargs) - call_args[name] = ( - decoder(call_args[name], allow_optional=True) - if allow_optional - else decoder(call_args[name]) - ) - return f(**call_args) - - return new_f - - -def encode_res(encoder): - """Create a decorator that applies `encoder` to the return value of the - decorated function. - """ - - @decorator - async def new_f(f, *args, **kwargs): - res = await f(*args, **kwargs) - return encoder(res) - - return new_f - - -def block_height_decoder(data): - """Decode block height string, which can either be None, 'latest', 'earliest' or a hex number - of minor block height""" - if data is None or data == "latest": - return None - if data == "earliest": - return 0 - # TODO: support pending - return quantity_decoder(data) - - -def shard_id_decoder(data): - try: - return quantity_decoder(data) - except Exception: - return None - - -def eth_address_to_quarkchain_address_decoder(hex_str): - eth_hex = hex_str[2:] - if len(eth_hex) != 40: - raise InvalidParams("Addresses must be 40 or 0 bytes long") - return address_decoder("0x" + eth_hex + "00000001") - - -def _parse_log_request( - params: Dict, addr_decoder: Callable[[str], bytes] -) -> (bytes, bytes): - """Returns addresses and topics from a EVM log request.""" - addresses, topics = [], [] - if "address" in params: - if isinstance(params["address"], str): - addresses = [Address.deserialize(addr_decoder(params["address"]))] - elif isinstance(params["address"], list): - addresses = [ - Address.deserialize(addr_decoder(a)) for a in params["address"] - ] - if "topics" in params: - for topic_item in params["topics"]: - if isinstance(topic_item, str): - topics.append([data_decoder(topic_item)]) - elif isinstance(topic_item, list): - topics.append([data_decoder(tp) for tp in topic_item]) - return addresses, topics - - -public_methods = RpcMethods() -private_methods = RpcMethods() - - -# noinspection PyPep8Naming -class JSONRPCHttpServer: - @classmethod - async def start_public_server(cls, env, master_server): - server = cls( - env, - master_server, - env.cluster_config.JSON_RPC_PORT, - env.cluster_config.JSON_RPC_HOST, - public_methods, - ) - await server.start() - return server - - @classmethod - async def start_private_server(cls, env, master_server): - server = cls( - env, - master_server, - env.cluster_config.PRIVATE_JSON_RPC_PORT, - env.cluster_config.PRIVATE_JSON_RPC_HOST, - private_methods, - ) - await server.start() - return server - - @classmethod - async def start_test_server(cls, env, master_server): - methods = RpcMethods() - for method in public_methods.values(): - methods.add(method) - for method in private_methods.values(): - methods.add(method) - server = cls( - env, - master_server, - env.cluster_config.JSON_RPC_PORT, - env.cluster_config.JSON_RPC_HOST, - methods, - ) - await server.start() - return server - - def __init__( - self, env, master_server: MasterServer, port, host, methods: RpcMethods - ): - self.loop = asyncio.get_running_loop() - self.port = port - self.host = host - self.env = env - self.master = master_server - self.counters = dict() - - # Bind RPC handler functions to this instance - self.handlers = RpcMethods() - for rpc_name in methods: - func = methods[rpc_name] - self.handlers[rpc_name] = func.__get__(self, self.__class__) - - async def __handle(self, request): - request = await request.text() - Logger.info(request) - - d = dict() - try: - d = json.loads(request) - except Exception: - pass - method = d.get("method", "null") - if method in self.counters: - self.counters[method] += 1 - else: - self.counters[method] = 1 - # Use armor to prevent the handler from being cancelled when - # aiohttp server loses connection to client - response = await self.handlers.dispatch(d) - if response is None: - return web.Response() - if "error" in response: - Logger.error(response) - return web.json_response(response) - - async def start(self): - app = web.Application(client_max_size=JSON_RPC_CLIENT_REQUEST_MAX_SIZE) - cors = aiohttp_cors.setup(app) - route = app.router.add_post("/", self.__handle) - cors.add( - route, - { - "*": aiohttp_cors.ResourceOptions( - allow_credentials=True, - expose_headers=("X-Custom-Server-Header",), - allow_methods=["POST", "PUT"], - allow_headers=("X-Requested-With", "Content-Type"), - ) - }, - ) - self.runner = web.AppRunner(app, access_log=None) - await self.runner.setup() - site = web.TCPSite(self.runner, self.host, self.port) - await site.start() - - async def shutdown(self): - await self.runner.cleanup() - - # JSON RPC handlers - @public_methods.add - @decode_arg("quantity", quantity_decoder) - @encode_res(quantity_encoder) - async def echoQuantity(self, quantity): - return quantity - - @public_methods.add - @decode_arg("data", data_decoder) - @encode_res(data_encoder) - async def echoData(self, data): - return data - - @public_methods.add - async def networkInfo(self): - return { - "networkId": quantity_encoder( - self.master.env.quark_chain_config.NETWORK_ID - ), - "chainSize": quantity_encoder( - self.master.env.quark_chain_config.CHAIN_SIZE - ), - "shardSizes": [ - quantity_encoder(c.SHARD_SIZE) - for c in self.master.env.quark_chain_config.CHAINS - ], - "syncing": self.master.is_syncing(), - "mining": self.master.is_mining(), - "shardServerCount": len(self.master.slave_pool), - } - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("block_height", block_height_decoder) - @encode_res(quantity_encoder) - async def getTransactionCount(self, address, block_height=None): - account_branch_data = await self.master.get_primary_account_data( - Address.deserialize(address), block_height - ) - return account_branch_data.transaction_count - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("block_height", block_height_decoder) - async def getBalances(self, address, block_height=None): - account_branch_data = await self.master.get_primary_account_data( - Address.deserialize(address), block_height - ) - branch = account_branch_data.branch - balances = account_branch_data.token_balances - return { - "branch": quantity_encoder(branch.value), - "fullShardId": quantity_encoder(branch.get_full_shard_id()), - "shardId": quantity_encoder(branch.get_shard_id()), - "chainId": quantity_encoder(branch.get_chain_id()), - "balances": balances_encoder(balances), - } - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("block_height", block_height_decoder) - async def getAccountData(self, address, block_height=None, include_shards=False): - # do not allow specify height if client wants info on all shards - if include_shards and block_height is not None: - return None - - primary = None - address = Address.deserialize(address) - if not include_shards: - account_branch_data = await self.master.get_primary_account_data( - address, block_height - ) # type: AccountBranchData - branch = account_branch_data.branch - count = account_branch_data.transaction_count - - balances = account_branch_data.token_balances - primary = { - "fullShardId": quantity_encoder(branch.get_full_shard_id()), - "shardId": quantity_encoder(branch.get_shard_id()), - "chainId": quantity_encoder(branch.get_chain_id()), - "balances": balances_encoder(balances), - "transactionCount": quantity_encoder(count), - "isContract": account_branch_data.is_contract, - "minedBlocks": quantity_encoder(account_branch_data.mined_blocks), - "poswMineableBlocks": quantity_encoder( - account_branch_data.posw_mineable_blocks - ), - } - return {"primary": primary} - - branch_to_account_branch_data = await self.master.get_account_data(address) - - shards = [] - for branch, account_branch_data in branch_to_account_branch_data.items(): - balances = account_branch_data.token_balances - data = { - "fullShardId": quantity_encoder(branch.get_full_shard_id()), - "shardId": quantity_encoder(branch.get_shard_id()), - "chainId": quantity_encoder(branch.get_chain_id()), - "balances": balances_encoder(balances), - "transactionCount": quantity_encoder( - account_branch_data.transaction_count - ), - "isContract": account_branch_data.is_contract, - } - shards.append(data) - - if branch.get_full_shard_id() == self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - address.full_shard_key - ): - primary = data.copy() - primary["minedBlocks"] = quantity_encoder( - account_branch_data.mined_blocks - ) - primary["poswMineableBlocks"] = quantity_encoder( - account_branch_data.posw_mineable_blocks - ) - - return {"primary": primary, "shards": shards} - - @public_methods.add - async def sendTransaction(self, data): - def get_data_default(key, decoder, default=None): - if key in data: - return decoder(data[key]) - return default - - to = get_data_default("to", recipient_decoder, b"") - startgas = get_data_default("gas", quantity_decoder, DEFAULT_STARTGAS) - gasprice = get_data_default("gasPrice", quantity_decoder, DEFAULT_GASPRICE) - value = get_data_default("value", quantity_decoder, 0) - data_ = get_data_default("data", data_decoder, b"") - v = get_data_default("v", quantity_decoder, 0) - r = get_data_default("r", quantity_decoder, 0) - s = get_data_default("s", quantity_decoder, 0) - nonce = get_data_default("nonce", quantity_decoder, None) - - to_full_shard_key = get_data_default( - "toFullShardKey", full_shard_key_decoder, None - ) - from_full_shard_key = get_data_default( - "fromFullShardKey", full_shard_key_decoder, None - ) - network_id = get_data_default( - "networkId", quantity_decoder, self.master.env.quark_chain_config.NETWORK_ID - ) - - gas_token_id = get_data_default( - "gasTokenId", quantity_decoder, self.env.quark_chain_config.genesis_token - ) - transfer_token_id = get_data_default( - "transferTokenId", - quantity_decoder, - self.env.quark_chain_config.genesis_token, - ) - - if nonce is None: - raise InvalidParams("Missing nonce") - if not (v and r and s): - raise InvalidParams("Missing v, r, s") - if from_full_shard_key is None: - raise InvalidParams("Missing fromFullShardKey") - - if to_full_shard_key is None: - to_full_shard_key = from_full_shard_key - - evm_tx = EvmTransaction( - nonce, - gasprice, - startgas, - to, - value, - data_, - v=v, - r=r, - s=s, - from_full_shard_key=from_full_shard_key, - to_full_shard_key=to_full_shard_key, - network_id=network_id, - gas_token_id=gas_token_id, - transfer_token_id=transfer_token_id, - ) - tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) - success = await self.master.add_transaction(tx) - if not success: - return EMPTY_TX_ID - return id_encoder(tx.get_hash(), from_full_shard_key) - - @public_methods.add - @decode_arg("tx_data", data_decoder) - async def sendRawTransaction(self, tx_data): - evm_tx = rlp.decode(tx_data, EvmTransaction) - tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) - success = await self.master.add_transaction(tx) - if not success: - return EMPTY_TX_ID - return id_encoder(tx.get_hash(), evm_tx.from_full_shard_key) - - @public_methods.add - @decode_arg("block_id", data_decoder) - @decode_arg("need_extra_info", bool_decoder) - async def getRootBlockById(self, block_id, need_extra_info=True): - block, extra_info = await self.master.get_root_block_by_height_or_hash( - None, block_id, need_extra_info - ) - if not block: - return None - return root_block_encoder(block, extra_info) - - @public_methods.add - @decode_arg("need_extra_info", bool_decoder) - async def getRootBlockByHeight(self, height=None, need_extra_info=True): - if height is not None: - height = quantity_decoder(height) - block, extra_info = await self.master.get_root_block_by_height_or_hash( - height, None, need_extra_info - ) - if not block: - return None - return root_block_encoder(block, extra_info) - - @public_methods.add - @decode_arg("block_id", id_decoder) - @decode_arg("include_transactions", bool_decoder) - @decode_arg("need_extra_info", bool_decoder) - async def getMinorBlockById( - self, block_id, include_transactions=False, need_extra_info=True - ): - block_hash, full_shard_key = block_id - try: - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - except Exception: - return None - block, extra_info = await self.master.get_minor_block_by_hash( - block_hash, branch, need_extra_info - ) - if not block: - return None - return minor_block_encoder(block, include_transactions, extra_info) - - @public_methods.add - @decode_arg("full_shard_key", quantity_decoder) - @decode_arg("include_transactions", bool_decoder) - @decode_arg("need_extra_info", bool_decoder) - async def getMinorBlockByHeight( - self, - full_shard_key: int, - height=None, - include_transactions=False, - need_extra_info=True, - ): - if height is not None: - height = quantity_decoder(height) - try: - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - except Exception: - return None - block, extra_info = await self.master.get_minor_block_by_height( - height, branch, need_extra_info - ) - if not block: - return None - return minor_block_encoder(block, include_transactions, extra_info) - - @public_methods.add - @decode_arg("tx_id", id_decoder) - async def getTransactionById(self, tx_id): - tx_hash, full_shard_key = tx_id - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - minor_block, i = await self.master.get_transaction_by_hash(tx_hash, branch) - if not minor_block: - return None - if len(minor_block.tx_list) <= i: - return None - return tx_encoder(minor_block, i) - - @public_methods.add - @decode_arg("block_height", block_height_decoder) - async def call(self, data, block_height=None): - return await self._call_or_estimate_gas( - is_call=True, block_height=block_height, **data - ) - - @public_methods.add - async def estimateGas(self, data): - return await self._call_or_estimate_gas(is_call=False, **data) - - @public_methods.add - async def getTransactionReceipt(self, tx_id): - id_bytes = data_decoder(tx_id) - if len(id_bytes) != 36: - raise InvalidParams("Invalid id encoding") - tx_hash, full_shard_key = ( - id_bytes[:32], - int.from_bytes(id_bytes[32:], byteorder="big"), - ) - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - resp = await self.master.get_transaction_receipt(tx_hash, branch) - if not resp: - return None - minor_block, i, receipt = resp - - ret = receipt_encoder(minor_block, i, receipt) - if ret["transactionId"] is None: - ret["transactionId"] = tx_id - ret["transactionHash"] = data_encoder(tx_hash) - return ret - - @public_methods.add - @decode_arg("full_shard_key", shard_id_decoder) - async def getLogs(self, data, full_shard_key): - return await self._get_logs(data, full_shard_key, decoder=address_decoder) - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("key", quantity_decoder) - @decode_arg("block_height", block_height_decoder) - # TODO: add block number - async def getStorageAt(self, address, key, block_height=None): - res = await self.master.get_storage_at( - Address.deserialize(address), key, block_height - ) - return data_encoder(res) if res is not None else None - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("block_height", block_height_decoder) - async def getCode(self, address, block_height=None): - res = await self.master.get_code(Address.deserialize(address), block_height) - return data_encoder(res) if res is not None else None - - @public_methods.add - @decode_arg("full_shard_key", shard_id_decoder) - @decode_arg("start", data_decoder) - @decode_arg("limit", quantity_decoder) - async def getAllTransactions(self, full_shard_key, start="0x", limit="0xa"): - """ "start" should be the "next" in the response for fetching next page. - "start" can also be "0x" to fetch from the beginning (i.e., latest). - """ - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - if limit > 20: - limit = 20 - result = await self.master.get_all_transactions(branch, start, limit) - if not result: - return None - tx_list, next = result - return { - "txList": [tx_detail_encoder(tx) for tx in tx_list], - "next": data_encoder(next), - } - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("start", data_decoder) - @decode_arg("limit", quantity_decoder) - @decode_arg("transfer_token_id", quantity_decoder, allow_optional=True) - async def getTransactionsByAddress( - self, address, start="0x", limit="0xa", transfer_token_id=None - ): - """ "start" should be the "next" in the response for fetching next page. - "start" can also be "0x" to fetch from the beginning (i.e., latest). - "start" can be "0x00" to fetch the pending outgoing transactions. - """ - address = Address.create_from(address) - if limit > 20: - limit = 20 - result = await self.master.get_transactions_by_address( - address, transfer_token_id, start, limit - ) - if not result: - return None - tx_list, next = result - return { - "txList": [tx_detail_encoder(tx) for tx in tx_list], - "next": data_encoder(next), - } - - @public_methods.add - async def getJrpcCalls(self): - return self.counters - - @public_methods.add - async def gasPrice(self, full_shard_key: str, token_id: Optional[str] = None): - full_shard_key = shard_id_decoder(full_shard_key) - if full_shard_key is None: - return None - parsed_token_id = ( - quantity_decoder(token_id) if token_id else token_id_encode("QKC") - ) - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - ret = await self.master.gas_price(branch, parsed_token_id) - if ret is None: - return None - return quantity_encoder(ret) - - @public_methods.add - @decode_arg("full_shard_key", shard_id_decoder) - @decode_arg("header_hash", hash_decoder) - @decode_arg("nonce", quantity_decoder) - @decode_arg("mixhash", hash_decoder) - @decode_arg("signature", signature_decoder) - async def submitWork( - self, full_shard_key, header_hash, nonce, mixhash, signature=None - ): - branch = None # `None` means getting work from root chain - if full_shard_key is not None: - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - return await self.master.submit_work( - branch, header_hash, nonce, mixhash, signature - ) - - @public_methods.add - @decode_arg("full_shard_key", shard_id_decoder) - @decode_arg("coinbase_addr", recipient_decoder, allow_optional=True) - async def getWork(self, full_shard_key, coinbase_addr=None): - branch = None # `None` means getting work from root chain - if full_shard_key is not None: - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - work, optional_divider = await self.master.get_work(branch, coinbase_addr) - if work is None: - return None - ret = [ - data_encoder(work.hash), - quantity_encoder(work.height), - quantity_encoder(work.difficulty), - ] - if optional_divider is not None: - ret.append(quantity_encoder(optional_divider)) - return ret - - @public_methods.add - @decode_arg("block_id", data_decoder) - async def getRootHashConfirmingMinorBlockById(self, block_id): - retv = self.master.root_state.db.get_root_block_confirming_minor_block(block_id) - return data_encoder(retv) if retv else None - - @public_methods.add - @decode_arg("tx_id", id_decoder) - async def getTransactionConfirmedByNumberRootBlocks(self, tx_id): - tx_hash, full_shard_key = tx_id - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - minor_block, i = await self.master.get_transaction_by_hash(tx_hash, branch) - if not minor_block: - return None - confirming_hash = ( - self.master.root_state.db.get_root_block_confirming_minor_block( - minor_block.header.get_hash() - + minor_block.header.branch.get_full_shard_id().to_bytes( - 4, byteorder="big" - ) - ) - ) - if confirming_hash is None: - return quantity_encoder(0) - confirming_header = self.master.root_state.db.get_root_block_header_by_hash( - confirming_hash - ) - canonical_hash = self.master.root_state.db.get_root_block_hash_by_height( - confirming_header.height - ) - if canonical_hash != confirming_hash: - return quantity_encoder(0) - tip = self.master.root_state.tip - return quantity_encoder(tip.height - confirming_header.height + 1) - - ######################## Ethereum JSON RPC ######################## - - @public_methods.add - async def net_version(self): - return quantity_encoder(self.master.env.quark_chain_config.NETWORK_ID) - - @public_methods.add - async def eth_gasPrice(self, shard): - return await self.gasPrice(shard, quantity_encoder(token_id_encode("QKC"))) - - @public_methods.add - @decode_arg("block_height", block_height_decoder) - @decode_arg("include_transactions", bool_decoder) - async def eth_getBlockByNumber(self, block_height, include_transactions): - """ - NOTE: only support block_id "latest" or hex - """ - - def block_transcoder(block): - """ - QuarkChain Block => ETH Block - """ - return { - **block, - "number": block["height"], - "parentHash": block["hashPrevMinorBlock"], - "sha3Uncles": "", - "logsBloom": "", - "transactionsRoot": block["hashMerkleRoot"], # ? - "stateRoot": block["hashEvmStateRoot"], # ? - } - - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key(0) - ) - block, _ = await self.master.get_minor_block_by_height( - block_height, branch, need_extra_info=False - ) - if block is None: - return None - return block_transcoder(minor_block_encoder(block)) - - @public_methods.add - @decode_arg("address", eth_address_to_quarkchain_address_decoder) - @decode_arg("shard", shard_id_decoder) - @encode_res(quantity_encoder) - async def eth_getBalance(self, address, shard=None): - address = Address.deserialize(address) - if shard is not None: - address = Address(address.recipient, shard) - account_branch_data = await self.master.get_primary_account_data(address) - balance = account_branch_data.token_balances.balance_map.get( - token_id_encode("QKC"), 0 - ) - return balance - - @public_methods.add - @decode_arg("address", eth_address_to_quarkchain_address_decoder) - @decode_arg("shard", shard_id_decoder) - @encode_res(quantity_encoder) - async def eth_getTransactionCount(self, address, shard=None): - address = Address.deserialize(address) - if shard is not None: - address = Address(address.recipient, shard) - account_branch_data = await self.master.get_primary_account_data(address) - return account_branch_data.transaction_count - - @public_methods.add - @decode_arg("address", eth_address_to_quarkchain_address_decoder) - @decode_arg("shard", shard_id_decoder) - async def eth_getCode(self, address, shard=None): - addr = Address.deserialize(address) - if shard is not None: - addr = Address(addr.recipient, shard) - res = await self.master.get_code(addr, None) - return data_encoder(res) if res is not None else None - - @public_methods.add - @decode_arg("shard", shard_id_decoder) - async def eth_call(self, data, shard=None): - """Returns the result of the transaction application without putting in block chain""" - data = self._convert_eth_call_data(data, shard) - return await self.call(data) - - @public_methods.add - async def eth_sendRawTransaction(self, tx_data): - return await self.sendRawTransaction(tx_data) - - @public_methods.add - async def eth_getTransactionReceipt(self, tx_id): - return await self.getTransactionReceipt(tx_id) - - @public_methods.add - @decode_arg("shard", shard_id_decoder) - async def eth_estimateGas(self, data, shard): - data = self._convert_eth_call_data(data, shard) - return await self.estimateGas(**data) - - @public_methods.add - @decode_arg("shard", shard_id_decoder) - async def eth_getLogs(self, data, shard): - return await self._get_logs( - data, shard, decoder=eth_address_to_quarkchain_address_decoder - ) - - @public_methods.add - @decode_arg("address", eth_address_to_quarkchain_address_decoder) - @decode_arg("key", quantity_decoder) - @decode_arg("shard", shard_id_decoder) - async def eth_getStorageAt(self, address, key, shard=None): - addr = Address.deserialize(address) - if shard is not None: - addr = Address(addr.recipient, shard) - res = await self.master.get_storage_at(addr, key, None) - return data_encoder(res) if res is not None else None - - ######################## Private Methods ######################## - - @private_methods.add - @decode_arg("branch", quantity_decoder) - @decode_arg("block_data", data_decoder) - async def addBlock(self, branch, block_data): - if branch == 0: - block = RootBlock.deserialize(block_data) - return await self.master.add_root_block_from_miner(block) - return await self.master.add_raw_minor_block(Branch(branch), block_data) - - @private_methods.add - async def getPeers(self): - peer_list = [] - for peer_id, peer in self.master.network.active_peer_pool.items(): - peer_list.append( - { - "id": data_encoder(peer_id), - "ip": quantity_encoder(int(peer.ip)), - "port": quantity_encoder(peer.port), - } - ) - return {"peers": peer_list} - - @private_methods.add - async def getSyncStats(self): - return self.master.synchronizer.get_stats() - - @private_methods.add - async def getStats(self): - # This JRPC doesn't follow the standard encoding - return await self.master.get_stats() - - @private_methods.add - async def getBlockCount(self): - # This JRPC doesn't follow the standard encoding - return self.master.get_block_count() - - @private_methods.add - async def createTransactions(self, **load_test_data): - """Create transactions for load testing""" - - def get_data_default(key, decoder, default=None): - if key in load_test_data: - return decoder(load_test_data[key]) - return default - - num_tx_per_shard = load_test_data["numTxPerShard"] - x_shard_percent = load_test_data["xShardPercent"] - to = get_data_default("to", recipient_decoder, b"") - startgas = get_data_default("gas", quantity_decoder, DEFAULT_STARTGAS) - gasprice = get_data_default( - "gasPrice", quantity_decoder, int(DEFAULT_GASPRICE / 10) - ) - value = get_data_default("value", quantity_decoder, 0) - data = get_data_default("data", data_decoder, b"") - # FIXME: can't support specifying full shard ID to 0. currently is regarded as not set - from_full_shard_key = get_data_default( - "fromFullShardKey", full_shard_key_decoder, 0 - ) - gas_token_id = get_data_default( - "gas_token_id", quantity_decoder, self.env.quark_chain_config.genesis_token - ) - transfer_token_id = get_data_default( - "transfer_token_id", - quantity_decoder, - self.env.quark_chain_config.genesis_token, - ) - # build sample tx - evm_tx_sample = EvmTransaction( - 0, - gasprice, - startgas, - to, - value, - data, - from_full_shard_key=from_full_shard_key, - gas_token_id=gas_token_id, - transfer_token_id=transfer_token_id, - ) - tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx_sample)) - return await self.master.create_transactions( - num_tx_per_shard, x_shard_percent, tx - ) - - @private_methods.add - async def setTargetBlockTime(self, root_block_time=0, minor_block_time=0): - """0 will not update existing value""" - return await self.master.set_target_block_time( - root_block_time, minor_block_time - ) - - @public_methods.add - @decode_arg("block_id", id_decoder) - @decode_arg("root_block_id", data_decoder, allow_optional=True) - @decode_arg("token_id", quantity_decoder) # default: QKC - @decode_arg("start", data_decoder, allow_optional=True) - @decode_arg("limit", quantity_decoder) - async def getTotalBalance( - self, block_id, root_block_id=None, token_id="0x8bb0", start=None, limit="0x64" - ): - if limit > 10000: - limit = 10000 - block_hash, full_shard_key = block_id - full_shard_id = ( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - try: - result = await self.master.get_total_balance( - Branch(full_shard_id), block_hash, root_block_id, token_id, start, limit - ) - except: - raise ServerError - if not result: - raise InvalidRequest - total_balance, next_start = result - return { - "totalBalance": quantity_encoder(total_balance), - "next": data_encoder(next_start), - } - - @private_methods.add - async def setMining(self, mining): - """Turn on / off mining""" - return await self.master.set_mining(mining) - - @private_methods.add - async def getJrpcCalls(self): - return self.counters - - @private_methods.add - async def getKadRoutingTableSize(self): - """Returns number of nodes in the p2p discovery routing table""" - if not isinstance(self.master.network, P2PManager): - raise InvalidRequest("network is not P2P") - return len(self.master.network.server.discovery.proto.routing) - - @private_methods.add - async def getKadRoutingTable(self): - """returns a list of nodes in the p2p discovery routing table, in the enode format - eg. "enode://PUBKEY@IP:PORT" - """ - if not isinstance(self.master.network, P2PManager): - raise InvalidRequest("network is not P2P") - return [n.to_uri() for n in self.master.network.server.discovery.proto.routing] - - @public_methods.add - async def getTotalSupply(self): - total_supply = self.master.get_total_supply() - return quantity_encoder(total_supply) if total_supply else None - - @staticmethod - def _convert_eth_call_data(data, shard): - to_address = Address.create_from( - eth_address_to_quarkchain_address_decoder(data["to"]) - ) - if shard: - to_address = Address(to_address.recipient, shard) - data["to"] = "0x" + to_address.serialize().hex() - if "from" in data: - from_address = Address.create_from( - eth_address_to_quarkchain_address_decoder(data["from"]) - ) - if shard: - from_address = Address(from_address.recipient, shard) - data["from"] = "0x" + from_address.serialize().hex() - return data - - async def _get_logs(self, data, full_shard_key, decoder: Callable[[str], bytes]): - start_block = block_height_decoder(data.get("fromBlock", "latest")) - end_block = block_height_decoder(data.get("toBlock", "latest")) - addresses, topics = _parse_log_request(data, decoder) - if full_shard_key is None: - raise InvalidParams("Full shard key is required to get logs") - addresses = [Address(a.recipient, full_shard_key) for a in addresses] - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - logs = await self.master.get_logs( - addresses, topics, start_block, end_block, branch - ) - if logs is None: - return None - return loglist_encoder(logs) - - async def _call_or_estimate_gas(self, is_call: bool, **data): - """Returns the result of the transaction application without putting in block chain""" - if not isinstance(data, dict): - raise InvalidParams("Transaction must be an object") - - def get_data_default(key, decoder, default=None): - if key in data: - return decoder(data[key]) - return default - - to = get_data_default("to", address_decoder, None) - if to is None: - to_full_shard_byte = b"\x00" * 4 - to = b"" - else: - to_full_shard_byte = to[20:] - to = to[:20] - - to_full_shard_key = int.from_bytes(to_full_shard_byte, "big") - gas = get_data_default("gas", quantity_decoder, 0) - gas_price = get_data_default("gasPrice", quantity_decoder, 0) - value = get_data_default("value", quantity_decoder, 0) - data_ = get_data_default("data", data_decoder, b"") - sender = get_data_default( - "from", address_decoder, b"\x00" * 20 + to_full_shard_byte - ) - sender_address = Address.create_from(sender) - from_full_shard_key = sender_address.full_shard_key - - gas_token_id = get_data_default( - "gas_token_id", quantity_decoder, self.env.quark_chain_config.genesis_token - ) - transfer_token_id = get_data_default( - "transfer_token_id", - quantity_decoder, - self.env.quark_chain_config.genesis_token, - ) - - network_id = self.master.env.quark_chain_config.NETWORK_ID - - nonce = 0 # slave will fill in the real nonce - evm_tx = EvmTransaction( - nonce, - gas_price, - gas, - to, - value, - data_, - from_full_shard_key=from_full_shard_key, - to_full_shard_key=to_full_shard_key, - network_id=network_id, - gas_token_id=gas_token_id, - transfer_token_id=transfer_token_id, - ) - - tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) - if is_call: - # xshard not supported for now - is_same_shard = self.master.env.quark_chain_config.is_same_full_shard( - to_full_shard_key, from_full_shard_key - ) - if not is_same_shard: - raise InvalidParams("Call cross-shard tx not supported yet") - res = await self.master.execute_transaction( - tx, sender_address, data["block_height"] - ) - return data_encoder(res) if res is not None else None - else: # estimate gas - res = await self.master.estimate_gas(tx, sender_address) - return quantity_encoder(res) if res is not None else None - - -class JSONRPCWebsocketServer: - @classmethod - async def start_websocket_server(cls, env, slave_server): - server = cls( - env, - slave_server, - env.slave_config.WEBSOCKET_JSON_RPC_PORT, - env.slave_config.HOST, - public_methods, - ) - await server.start() - return server - - def __init__( - self, env, slave_server: SlaveServer, port, host, methods: RpcMethods - ): - self.loop = asyncio.get_running_loop() - self.port = port - self.host = host - self.env = env - self.slave = slave_server - self.counters = dict() - self.pending_tx_cache = LRUCache(maxsize=1024) - - # Bind RPC handler functions to this instance - self.handlers = RpcMethods() - for rpc_name in methods: - func = methods[rpc_name] - self.handlers[rpc_name] = func.__get__(self, self.__class__) - - self.shard_subscription_managers = self.slave.shard_subscription_managers - - async def __handle(self, websocket): - sub_ids = dict() # per-websocket var, Dict[sub_id, full_shard_id] - try: - async for message in websocket: - Logger.info(message) - - d = dict() - try: - d = json.loads(message) - except Exception: - raise InvalidParams("Cannot parse message as JSON") - method = d.get("method", "null") - if method in self.counters: - self.counters[method] += 1 - else: - self.counters[method] = 1 - msg_id = d.get("id", 0) - - response = await self.handlers.dispatch( - d, - context={ - "websocket": websocket, - "msg_id": msg_id, - "sub_ids": sub_ids, - }, - ) - - if response is None: - continue - if "error" in response: - Logger.error(response) - else: - if method == "subscribe": - sub_id = response["result"] - full_shard_id = shard_id_decoder(d.get("params")[1]) - sub_ids[sub_id] = full_shard_id - elif method == "unsubscribe": - sub_id = d.get("params")[0] - del sub_ids[sub_id] - await websocket.send(json.dumps(response)) - finally: # current websocket connection terminates, remove subscribers in this connection - for sub_id, full_shard_id in sub_ids.items(): - try: - shard_subscription_manager = self.shard_subscription_managers[ - full_shard_id - ] - shard_subscription_manager.remove_subscriber(sub_id) - except: - pass - - async def start(self): - start_server = websockets.serve(self.__handle, self.host, self.port) - await start_server - - def shutdown(self): - pass # TODO - - @staticmethod - def response_transcoder(sub_id, result): - return { - "jsonrpc": "2.0", - "method": "subscription", - "params": {"subscription": sub_id, "result": result}, - } - - @public_methods.add - async def subscribe(self, sub_type, full_shard_id, params=None, context=None): - assert context is not None - full_shard_id = shard_id_decoder(full_shard_id) - if full_shard_id is None: - raise InvalidParams("Invalid full shard ID") - branch = Branch(full_shard_id) - shard = self.slave.shards.get(branch, None) - if not shard: - raise InvalidParams("Full shard ID not found") - - websocket = context["websocket"] - sub_id = "0x" + uuid.uuid4().hex - shard_subscription_manager = self.shard_subscription_managers[full_shard_id] - - extra = None - if sub_type == SUB_LOGS: - addresses, topics = _parse_log_request(params, address_decoder) - addresses = [Address(a.recipient, full_shard_id) for a in addresses] - extra = lambda candidate_blocks: LogFilter.create_from_block_candidates( - shard.state.db, addresses, topics, candidate_blocks - ) - - shard_subscription_manager.add_subscriber(sub_type, sub_id, websocket, extra) - return sub_id - - @public_methods.add - async def unsubscribe(self, sub_id, context=None): - sub_ids = context["sub_ids"] - assert context is not None - if sub_id not in sub_ids: - raise InvalidParams("Subscription ID not found") - - full_shard_id = sub_ids[sub_id] - shard_subscription_manager = self.shard_subscription_managers[full_shard_id] - shard_subscription_manager.remove_subscriber(sub_id) - - return True +import asyncio +import inspect +import json +from typing import Callable, Dict, List, Optional + +import aiohttp_cors +import websockets +import rlp +from aiohttp import web +from decorator import decorator + +from quarkchain.cluster.master import MasterServer +from quarkchain.cluster.rpc import AccountBranchData +from quarkchain.cluster.slave import SlaveServer +from quarkchain.core import ( + Address, + Branch, + Log, + MinorBlock, + RootBlock, + SerializedEvmTransaction, + TokenBalanceMap, + TransactionReceipt, + TypedTransaction, + Constant, + MinorBlockHeader, + PoSWInfo, +) +from quarkchain.evm.transactions import Transaction as EvmTransaction +from quarkchain.evm.utils import denoms, is_numeric +from quarkchain.p2p.p2p_manager import P2PManager +from quarkchain.utils import Logger, token_id_decode, token_id_encode +from cachetools import LRUCache +import uuid +from quarkchain.cluster.log_filter import LogFilter +from quarkchain.cluster.subscription import SUB_LOGS +from quarkchain.cluster.jsonrpcserver import RpcMethods, InvalidParams + +# defaults +DEFAULT_STARTGAS = 100 * 1000 +DEFAULT_GASPRICE = 10 * denoms.gwei + +# Allow 16 MB request for submitting big blocks +# TODO: revisit this parameter +JSON_RPC_CLIENT_REQUEST_MAX_SIZE = 16 * 1024 * 1024 + + +EMPTY_TX_ID = "0x" + "0" * Constant.TX_ID_HEX_LENGTH + +def quantity_decoder(hex_str, allow_optional=False): + """Decode `hexStr` representing a quantity.""" + if allow_optional and hex_str is None: + return None + # must start with "0x" + if not hex_str.startswith("0x") or len(hex_str) < 3: + raise InvalidParams("Invalid quantity encoding") + + try: + return int(hex_str, 16) + except ValueError: + raise InvalidParams("Invalid quantity encoding") + + +def quantity_encoder(i): + """Encode integer quantity `data`.""" + assert is_numeric(i) + return hex(i) + + +def data_decoder(hex_str, allow_optional=False): + """Decode `hexStr` representing unformatted hex_str.""" + if allow_optional and hex_str is None: + return None + if not hex_str.startswith("0x"): + raise InvalidParams("Invalid hex_str encoding") + try: + return bytes.fromhex(hex_str[2:]) + except Exception: + raise InvalidParams("Invalid hex_str hex encoding") + + +def data_encoder(data_bytes): + """Encode unformatted binary `dataBytes`.""" + return "0x" + data_bytes.hex() + + +def address_decoder(hex_str): + """Decode an address from hex with 0x prefix to 24 bytes.""" + addr_bytes = data_decoder(hex_str) + if len(addr_bytes) not in (24, 0): + raise InvalidParams("Addresses must be 24 or 0 bytes long") + return addr_bytes + + +def address_encoder(addr_bytes): + assert len(addr_bytes) == 24 + return data_encoder(addr_bytes) + + +def recipient_decoder(hex_str, allow_optional=False): + """Decode an recipient from hex with 0x prefix to 20 bytes.""" + if allow_optional and hex_str is None: + return None + recipient_bytes = data_decoder(hex_str) + if len(recipient_bytes) not in (20, 0): + raise InvalidParams("Addresses must be 20 or 0 bytes long") + return recipient_bytes + + +def recipient_encoder(recipient_bytes): + assert len(recipient_bytes) == 20 + return data_encoder(recipient_bytes) + + +def full_shard_key_decoder(hex_str): + b = data_decoder(hex_str) + if len(b) != 4: + raise InvalidParams("Full shard id must be 4 bytes") + return int.from_bytes(b, byteorder="big") + + +def full_shard_key_encoder(full_shard_key): + return data_encoder(full_shard_key.to_bytes(4, byteorder="big")) + + +def id_encoder(hash_bytes, full_shard_key): + """Encode hash and full_shard_key into hex""" + return data_encoder(hash_bytes + full_shard_key.to_bytes(4, byteorder="big")) + + +def id_decoder(hex_str): + """Decode an id to (hash, full_shard_key)""" + data_bytes = data_decoder(hex_str) + if len(data_bytes) != 36: + raise InvalidParams("Invalid id encoding") + return data_bytes[:32], int.from_bytes(data_bytes[32:], byteorder="big") + + +def hash_decoder(hex_str): + """Decode a block hash.""" + decoded = data_decoder(hex_str) + if len(decoded) != 32: + raise InvalidParams("Hashes must be 32 bytes long") + return decoded + + +def signature_decoder(hex_str): + """Decode a block signature.""" + if not hex_str: + return None + decoded = data_decoder(hex_str) + if len(decoded) != 65: + raise InvalidParams("Signature must be 65 bytes long") + return decoded + + +def bool_decoder(data): + if not isinstance(data, bool): + raise InvalidParams("Parameter must be boolean") + return data + + +def _add_posw_info_to_resp(d: Dict, diff: int, posw_info: PoSWInfo): + d["effectiveDifficulty"] = quantity_encoder(posw_info.effective_difficulty) + d["poswMineableBlocks"] = quantity_encoder(posw_info.posw_mineable_blocks) + d["poswMinedBlocks"] = quantity_encoder(posw_info.posw_mined_blocks) + d["stakingApplied"] = posw_info.effective_difficulty < diff + + +def root_block_encoder(block, extra_info): + header = block.header + + d = { + "id": data_encoder(header.get_hash()), + "height": quantity_encoder(header.height), + "hash": data_encoder(header.get_hash()), + "sealHash": data_encoder(header.get_hash_for_mining()), + "hashPrevBlock": data_encoder(header.hash_prev_block), + "idPrevBlock": data_encoder(header.hash_prev_block), + "nonce": quantity_encoder(header.nonce), + "hashMerkleRoot": data_encoder(header.hash_merkle_root), + "miner": address_encoder(header.coinbase_address.serialize()), + "coinbase": balances_encoder(header.coinbase_amount_map), + "difficulty": quantity_encoder(header.difficulty), + "timestamp": quantity_encoder(header.create_time), + "size": quantity_encoder(len(block.serialize())), + "minorBlockHeaders": [], + "signature": data_encoder(header.signature), + } + if extra_info: + _add_posw_info_to_resp(d, header.difficulty, extra_info) + + for header in block.minor_block_header_list: + h = minor_block_header_encoder(header) + d["minorBlockHeaders"].append(h) + return d + + +def minor_block_encoder(block, include_transactions=False, extra_info=None): + """Encode a block as JSON object. + + :param block: a :class:`ethereum.block.Block` + :param include_transactions: if true transaction details are included, otherwise + only their hashes + :param extra_info: MinorBlockExtraInfo + :returns: a json encodable dictionary + """ + header = block.header + meta = block.meta + + header_info = minor_block_header_encoder(header) + d = { + **header_info, + "hashMerkleRoot": data_encoder(meta.hash_merkle_root), + "hashEvmStateRoot": data_encoder(meta.hash_evm_state_root), + "gasUsed": quantity_encoder(meta.evm_gas_used), + "size": quantity_encoder(len(block.serialize())), + } + if include_transactions: + d["transactions"] = [] + for i, _ in enumerate(block.tx_list): + d["transactions"].append(tx_encoder(block, i)) + else: + d["transactions"] = [ + id_encoder(tx.get_hash(), block.header.branch.get_full_shard_id()) + for tx in block.tx_list + ] + if extra_info: + _add_posw_info_to_resp(d, header.difficulty, extra_info) + return d + + +def minor_block_header_encoder(header: MinorBlockHeader) -> Dict: + d = { + "id": id_encoder(header.get_hash(), header.branch.get_full_shard_id()), + "height": quantity_encoder(header.height), + "hash": data_encoder(header.get_hash()), + "fullShardId": quantity_encoder(header.branch.get_full_shard_id()), + "chainId": quantity_encoder(header.branch.get_chain_id()), + "shardId": quantity_encoder(header.branch.get_shard_id()), + "hashPrevMinorBlock": data_encoder(header.hash_prev_minor_block), + "idPrevMinorBlock": id_encoder( + header.hash_prev_minor_block, header.branch.get_full_shard_id() + ), + "hashPrevRootBlock": data_encoder(header.hash_prev_root_block), + "nonce": quantity_encoder(header.nonce), + "miner": address_encoder(header.coinbase_address.serialize()), + "coinbase": balances_encoder(header.coinbase_amount_map), + "difficulty": quantity_encoder(header.difficulty), + "extraData": data_encoder(header.extra_data), + "gasLimit": quantity_encoder(header.evm_gas_limit), + "timestamp": quantity_encoder(header.create_time), + } + return d + + +def tx_encoder(block, i): + """Encode a transaction as JSON object. + + `transaction` is the `i`th transaction in `block`. + """ + tx = block.tx_list[i] + evm_tx = tx.tx.to_evm_tx() + branch = block.header.branch + return { + "id": id_encoder(tx.get_hash(), evm_tx.from_full_shard_key), + "hash": data_encoder(tx.get_hash()), + "nonce": quantity_encoder(evm_tx.nonce), + "timestamp": quantity_encoder(block.header.create_time), + "fullShardId": quantity_encoder(branch.get_full_shard_id()), + "chainId": quantity_encoder(branch.get_chain_id()), + "shardId": quantity_encoder(branch.get_shard_id()), + "blockId": id_encoder(block.header.get_hash(), branch.get_full_shard_id()), + "blockHeight": quantity_encoder(block.header.height), + "transactionIndex": quantity_encoder(i), + "from": data_encoder(evm_tx.sender), + "to": data_encoder(evm_tx.to), + "fromFullShardKey": full_shard_key_encoder(evm_tx.from_full_shard_key), + "toFullShardKey": full_shard_key_encoder(evm_tx.to_full_shard_key), + "value": quantity_encoder(evm_tx.value), + "gasPrice": quantity_encoder(evm_tx.gasprice), + "gas": quantity_encoder(evm_tx.startgas), + "data": data_encoder(evm_tx.data), + "networkId": quantity_encoder(evm_tx.network_id), + "transferTokenId": quantity_encoder(evm_tx.transfer_token_id), + "gasTokenId": quantity_encoder(evm_tx.gas_token_id), + "transferTokenStr": token_id_decode(evm_tx.transfer_token_id), + "gasTokenStr": token_id_decode(evm_tx.gas_token_id), + "version": quantity_encoder(evm_tx.version), + "r": quantity_encoder(evm_tx.r), + "s": quantity_encoder(evm_tx.s), + "v": quantity_encoder(evm_tx.v), + } + + +def tx_detail_encoder(tx): + """Encode a transaction detail object as JSON object. Used for indexing server.""" + return { + "txId": id_encoder(tx.tx_hash, tx.from_address.full_shard_key), + "fromAddress": address_encoder(tx.from_address.serialize()), + "toAddress": address_encoder(tx.to_address.serialize()) + if tx.to_address + else "0x", + "value": quantity_encoder(tx.value), + "transferTokenId": quantity_encoder(tx.transfer_token_id), + "transferTokenStr": token_id_decode(tx.transfer_token_id), + "gasTokenId": quantity_encoder(tx.gas_token_id), + "gasTokenStr": token_id_decode(tx.gas_token_id), + "blockHeight": quantity_encoder(tx.block_height), + "timestamp": quantity_encoder(tx.timestamp), + "success": tx.success, + "isFromRootChain": tx.is_from_root_chain, + "nonce": quantity_encoder(tx.nonce), + } + + +def loglist_encoder(loglist: List[Log], is_removed: bool = False): + """Encode a list of log""" + result = [] + for l in loglist: + result.append( + { + "logIndex": quantity_encoder(l.log_idx), + "transactionIndex": quantity_encoder(l.tx_idx), + "transactionHash": data_encoder(l.tx_hash), + "blockHash": data_encoder(l.block_hash), + "blockNumber": quantity_encoder(l.block_number), + "blockHeight": quantity_encoder(l.block_number), + "address": data_encoder(l.recipient), + "recipient": data_encoder(l.recipient), + "data": data_encoder(l.data), + "topics": [data_encoder(topic) for topic in l.topics], + "removed": is_removed, + } + ) + return result + + +def receipt_encoder(block: MinorBlock, i: int, receipt: TransactionReceipt): + tx_id, tx_hash = None, None # if empty, will be populated at call site + if i < len(block.tx_list): + tx = block.tx_list[i] + evm_tx = tx.tx.to_evm_tx() + tx_id = id_encoder(tx.get_hash(), evm_tx.from_full_shard_key) + tx_hash = data_encoder(tx.get_hash()) + resp = { + "transactionId": tx_id, + "transactionHash": tx_hash, + "transactionIndex": quantity_encoder(i), + "blockId": id_encoder( + block.header.get_hash(), block.header.branch.get_full_shard_id() + ), + "blockHash": data_encoder(block.header.get_hash()), + "blockHeight": quantity_encoder(block.header.height), + "blockNumber": quantity_encoder(block.header.height), + "cumulativeGasUsed": quantity_encoder(receipt.gas_used), + "gasUsed": quantity_encoder(receipt.gas_used - receipt.prev_gas_used), + "status": quantity_encoder(1 if receipt.success == b"\x01" else 0), + "contractAddress": ( + address_encoder(receipt.contract_address.serialize()) + if not receipt.contract_address.is_empty() + else None + ), + "logs": loglist_encoder(receipt.logs), + "timestamp": quantity_encoder(block.header.create_time), + } + + return resp + + +def balances_encoder(balances: TokenBalanceMap) -> List[Dict]: + balance_list = [] + for k, v in balances.balance_map.items(): + balance_list.append( + { + "tokenId": quantity_encoder(k), + "tokenStr": token_id_decode(k), + "balance": quantity_encoder(v), + } + ) + return balance_list + + +def decode_arg(name, decoder, allow_optional=False): + """Create a decorator that applies `decoder` to argument `name`.""" + + @decorator + def new_f(f, *args, **kwargs): + call_args = inspect.getcallargs(f, *args, **kwargs) + call_args[name] = ( + decoder(call_args[name], allow_optional=True) + if allow_optional + else decoder(call_args[name]) + ) + return f(**call_args) + + return new_f + + +def encode_res(encoder): + """Create a decorator that applies `encoder` to the return value of the + decorated function. + """ + + @decorator + async def new_f(f, *args, **kwargs): + res = await f(*args, **kwargs) + return encoder(res) + + return new_f + + +def block_height_decoder(data): + """Decode block height string, which can either be None, 'latest', 'earliest' or a hex number + of minor block height""" + if data is None or data == "latest": + return None + if data == "earliest": + return 0 + # TODO: support pending + return quantity_decoder(data) + + +def shard_id_decoder(data): + try: + return quantity_decoder(data) + except Exception: + return None + + +def eth_address_to_quarkchain_address_decoder(hex_str): + eth_hex = hex_str[2:] + if len(eth_hex) != 40: + raise InvalidParams("Addresses must be 40 or 0 bytes long") + return address_decoder("0x" + eth_hex + "00000001") + + +def _parse_log_request( + params: Dict, addr_decoder: Callable[[str], bytes] +) -> (bytes, bytes): + """Returns addresses and topics from a EVM log request.""" + addresses, topics = [], [] + if "address" in params: + if isinstance(params["address"], str): + addresses = [Address.deserialize(addr_decoder(params["address"]))] + elif isinstance(params["address"], list): + addresses = [ + Address.deserialize(addr_decoder(a)) for a in params["address"] + ] + if "topics" in params: + for topic_item in params["topics"]: + if isinstance(topic_item, str): + topics.append([data_decoder(topic_item)]) + elif isinstance(topic_item, list): + topics.append([data_decoder(tp) for tp in topic_item]) + return addresses, topics + + +public_methods = RpcMethods() +private_methods = RpcMethods() + + +# noinspection PyPep8Naming +class JSONRPCHttpServer: + @classmethod + async def start_public_server(cls, env, master_server): + server = cls( + env, + master_server, + env.cluster_config.JSON_RPC_PORT, + env.cluster_config.JSON_RPC_HOST, + public_methods, + ) + await server.start() + return server + + @classmethod + async def start_private_server(cls, env, master_server): + server = cls( + env, + master_server, + env.cluster_config.PRIVATE_JSON_RPC_PORT, + env.cluster_config.PRIVATE_JSON_RPC_HOST, + private_methods, + ) + await server.start() + return server + + @classmethod + async def start_test_server(cls, env, master_server): + methods = RpcMethods() + for method in public_methods.values(): + methods.add(method) + for method in private_methods.values(): + methods.add(method) + server = cls( + env, + master_server, + env.cluster_config.JSON_RPC_PORT, + env.cluster_config.JSON_RPC_HOST, + methods, + ) + await server.start() + return server + + def __init__( + self, env, master_server: MasterServer, port, host, methods: RpcMethods + ): + self.loop = asyncio.get_running_loop() + self.port = port + self.host = host + self.env = env + self.master = master_server + self.counters = dict() + + # Bind RPC handler functions to this instance + self.handlers = RpcMethods() + for rpc_name in methods: + func = methods[rpc_name] + self.handlers[rpc_name] = func.__get__(self, self.__class__) + + async def __handle(self, request): + request = await request.text() + Logger.info(request) + + d = dict() + try: + d = json.loads(request) + except Exception: + pass + method = d.get("method", "null") + if method in self.counters: + self.counters[method] += 1 + else: + self.counters[method] = 1 + # Use armor to prevent the handler from being cancelled when + # aiohttp server loses connection to client + response = await self.handlers.dispatch(d) + if response is None: + return web.Response() + if "error" in response: + Logger.error(response) + return web.json_response(response) + + async def start(self): + app = web.Application(client_max_size=JSON_RPC_CLIENT_REQUEST_MAX_SIZE) + cors = aiohttp_cors.setup(app) + route = app.router.add_post("/", self.__handle) + cors.add( + route, + { + "*": aiohttp_cors.ResourceOptions( + allow_credentials=True, + expose_headers=("X-Custom-Server-Header",), + allow_methods=["POST", "PUT"], + allow_headers=("X-Requested-With", "Content-Type"), + ) + }, + ) + self.runner = web.AppRunner(app, access_log=None) + await self.runner.setup() + site = web.TCPSite(self.runner, self.host, self.port) + await site.start() + + async def shutdown(self): + await self.runner.cleanup() + + # JSON RPC handlers + @public_methods.add + @decode_arg("quantity", quantity_decoder) + @encode_res(quantity_encoder) + async def echoQuantity(self, quantity): + return quantity + + @public_methods.add + @decode_arg("data", data_decoder) + @encode_res(data_encoder) + async def echoData(self, data): + return data + + @public_methods.add + async def networkInfo(self): + return { + "networkId": quantity_encoder( + self.master.env.quark_chain_config.NETWORK_ID + ), + "chainSize": quantity_encoder( + self.master.env.quark_chain_config.CHAIN_SIZE + ), + "shardSizes": [ + quantity_encoder(c.SHARD_SIZE) + for c in self.master.env.quark_chain_config.CHAINS + ], + "syncing": self.master.is_syncing(), + "mining": self.master.is_mining(), + "shardServerCount": len(self.master.slave_pool), + } + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("block_height", block_height_decoder) + @encode_res(quantity_encoder) + async def getTransactionCount(self, address, block_height=None): + account_branch_data = await self.master.get_primary_account_data( + Address.deserialize(address), block_height + ) + return account_branch_data.transaction_count + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("block_height", block_height_decoder) + async def getBalances(self, address, block_height=None): + account_branch_data = await self.master.get_primary_account_data( + Address.deserialize(address), block_height + ) + branch = account_branch_data.branch + balances = account_branch_data.token_balances + return { + "branch": quantity_encoder(branch.value), + "fullShardId": quantity_encoder(branch.get_full_shard_id()), + "shardId": quantity_encoder(branch.get_shard_id()), + "chainId": quantity_encoder(branch.get_chain_id()), + "balances": balances_encoder(balances), + } + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("block_height", block_height_decoder) + async def getAccountData(self, address, block_height=None, include_shards=False): + # do not allow specify height if client wants info on all shards + if include_shards and block_height is not None: + return None + + primary = None + address = Address.deserialize(address) + if not include_shards: + account_branch_data = await self.master.get_primary_account_data( + address, block_height + ) # type: AccountBranchData + branch = account_branch_data.branch + count = account_branch_data.transaction_count + + balances = account_branch_data.token_balances + primary = { + "fullShardId": quantity_encoder(branch.get_full_shard_id()), + "shardId": quantity_encoder(branch.get_shard_id()), + "chainId": quantity_encoder(branch.get_chain_id()), + "balances": balances_encoder(balances), + "transactionCount": quantity_encoder(count), + "isContract": account_branch_data.is_contract, + "minedBlocks": quantity_encoder(account_branch_data.mined_blocks), + "poswMineableBlocks": quantity_encoder( + account_branch_data.posw_mineable_blocks + ), + } + return {"primary": primary} + + branch_to_account_branch_data = await self.master.get_account_data(address) + + shards = [] + for branch, account_branch_data in branch_to_account_branch_data.items(): + balances = account_branch_data.token_balances + data = { + "fullShardId": quantity_encoder(branch.get_full_shard_id()), + "shardId": quantity_encoder(branch.get_shard_id()), + "chainId": quantity_encoder(branch.get_chain_id()), + "balances": balances_encoder(balances), + "transactionCount": quantity_encoder( + account_branch_data.transaction_count + ), + "isContract": account_branch_data.is_contract, + } + shards.append(data) + + if branch.get_full_shard_id() == self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + address.full_shard_key + ): + primary = data.copy() + primary["minedBlocks"] = quantity_encoder( + account_branch_data.mined_blocks + ) + primary["poswMineableBlocks"] = quantity_encoder( + account_branch_data.posw_mineable_blocks + ) + + return {"primary": primary, "shards": shards} + + @public_methods.add + async def sendTransaction(self, data): + def get_data_default(key, decoder, default=None): + if key in data: + return decoder(data[key]) + return default + + to = get_data_default("to", recipient_decoder, b"") + startgas = get_data_default("gas", quantity_decoder, DEFAULT_STARTGAS) + gasprice = get_data_default("gasPrice", quantity_decoder, DEFAULT_GASPRICE) + value = get_data_default("value", quantity_decoder, 0) + data_ = get_data_default("data", data_decoder, b"") + v = get_data_default("v", quantity_decoder, 0) + r = get_data_default("r", quantity_decoder, 0) + s = get_data_default("s", quantity_decoder, 0) + nonce = get_data_default("nonce", quantity_decoder, None) + + to_full_shard_key = get_data_default( + "toFullShardKey", full_shard_key_decoder, None + ) + from_full_shard_key = get_data_default( + "fromFullShardKey", full_shard_key_decoder, None + ) + network_id = get_data_default( + "networkId", quantity_decoder, self.master.env.quark_chain_config.NETWORK_ID + ) + + gas_token_id = get_data_default( + "gasTokenId", quantity_decoder, self.env.quark_chain_config.genesis_token + ) + transfer_token_id = get_data_default( + "transferTokenId", + quantity_decoder, + self.env.quark_chain_config.genesis_token, + ) + + if nonce is None: + raise InvalidParams("Missing nonce") + if not (v and r and s): + raise InvalidParams("Missing v, r, s") + if from_full_shard_key is None: + raise InvalidParams("Missing fromFullShardKey") + + if to_full_shard_key is None: + to_full_shard_key = from_full_shard_key + + evm_tx = EvmTransaction( + nonce, + gasprice, + startgas, + to, + value, + data_, + v=v, + r=r, + s=s, + from_full_shard_key=from_full_shard_key, + to_full_shard_key=to_full_shard_key, + network_id=network_id, + gas_token_id=gas_token_id, + transfer_token_id=transfer_token_id, + ) + tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) + success = await self.master.add_transaction(tx) + if not success: + return EMPTY_TX_ID + return id_encoder(tx.get_hash(), from_full_shard_key) + + @public_methods.add + @decode_arg("tx_data", data_decoder) + async def sendRawTransaction(self, tx_data): + evm_tx = rlp.decode(tx_data, EvmTransaction) + tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) + success = await self.master.add_transaction(tx) + if not success: + return EMPTY_TX_ID + return id_encoder(tx.get_hash(), evm_tx.from_full_shard_key) + + @public_methods.add + @decode_arg("block_id", data_decoder) + @decode_arg("need_extra_info", bool_decoder) + async def getRootBlockById(self, block_id, need_extra_info=True): + block, extra_info = await self.master.get_root_block_by_height_or_hash( + None, block_id, need_extra_info + ) + if not block: + return None + return root_block_encoder(block, extra_info) + + @public_methods.add + @decode_arg("need_extra_info", bool_decoder) + async def getRootBlockByHeight(self, height=None, need_extra_info=True): + if height is not None: + height = quantity_decoder(height) + block, extra_info = await self.master.get_root_block_by_height_or_hash( + height, None, need_extra_info + ) + if not block: + return None + return root_block_encoder(block, extra_info) + + @public_methods.add + @decode_arg("block_id", id_decoder) + @decode_arg("include_transactions", bool_decoder) + @decode_arg("need_extra_info", bool_decoder) + async def getMinorBlockById( + self, block_id, include_transactions=False, need_extra_info=True + ): + block_hash, full_shard_key = block_id + try: + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + except Exception: + return None + block, extra_info = await self.master.get_minor_block_by_hash( + block_hash, branch, need_extra_info + ) + if not block: + return None + return minor_block_encoder(block, include_transactions, extra_info) + + @public_methods.add + @decode_arg("full_shard_key", quantity_decoder) + @decode_arg("include_transactions", bool_decoder) + @decode_arg("need_extra_info", bool_decoder) + async def getMinorBlockByHeight( + self, + full_shard_key: int, + height=None, + include_transactions=False, + need_extra_info=True, + ): + if height is not None: + height = quantity_decoder(height) + try: + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + except Exception: + return None + block, extra_info = await self.master.get_minor_block_by_height( + height, branch, need_extra_info + ) + if not block: + return None + return minor_block_encoder(block, include_transactions, extra_info) + + @public_methods.add + @decode_arg("tx_id", id_decoder) + async def getTransactionById(self, tx_id): + tx_hash, full_shard_key = tx_id + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + minor_block, i = await self.master.get_transaction_by_hash(tx_hash, branch) + if not minor_block: + return None + if len(minor_block.tx_list) <= i: + return None + return tx_encoder(minor_block, i) + + @public_methods.add + @decode_arg("block_height", block_height_decoder) + async def call(self, data, block_height=None): + return await self._call_or_estimate_gas( + is_call=True, block_height=block_height, **data + ) + + @public_methods.add + async def estimateGas(self, data): + return await self._call_or_estimate_gas(is_call=False, **data) + + @public_methods.add + async def getTransactionReceipt(self, tx_id): + id_bytes = data_decoder(tx_id) + if len(id_bytes) != 36: + raise InvalidParams("Invalid id encoding") + tx_hash, full_shard_key = ( + id_bytes[:32], + int.from_bytes(id_bytes[32:], byteorder="big"), + ) + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + resp = await self.master.get_transaction_receipt(tx_hash, branch) + if not resp: + return None + minor_block, i, receipt = resp + + ret = receipt_encoder(minor_block, i, receipt) + if ret["transactionId"] is None: + ret["transactionId"] = tx_id + ret["transactionHash"] = data_encoder(tx_hash) + return ret + + @public_methods.add + @decode_arg("full_shard_key", shard_id_decoder) + async def getLogs(self, data, full_shard_key): + return await self._get_logs(data, full_shard_key, decoder=address_decoder) + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("key", quantity_decoder) + @decode_arg("block_height", block_height_decoder) + # TODO: add block number + async def getStorageAt(self, address, key, block_height=None): + res = await self.master.get_storage_at( + Address.deserialize(address), key, block_height + ) + return data_encoder(res) if res is not None else None + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("block_height", block_height_decoder) + async def getCode(self, address, block_height=None): + res = await self.master.get_code(Address.deserialize(address), block_height) + return data_encoder(res) if res is not None else None + + @public_methods.add + @decode_arg("full_shard_key", shard_id_decoder) + @decode_arg("start", data_decoder) + @decode_arg("limit", quantity_decoder) + async def getAllTransactions(self, full_shard_key, start="0x", limit="0xa"): + """ "start" should be the "next" in the response for fetching next page. + "start" can also be "0x" to fetch from the beginning (i.e., latest). + """ + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + if limit > 20: + limit = 20 + result = await self.master.get_all_transactions(branch, start, limit) + if not result: + return None + tx_list, next = result + return { + "txList": [tx_detail_encoder(tx) for tx in tx_list], + "next": data_encoder(next), + } + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("start", data_decoder) + @decode_arg("limit", quantity_decoder) + @decode_arg("transfer_token_id", quantity_decoder, allow_optional=True) + async def getTransactionsByAddress( + self, address, start="0x", limit="0xa", transfer_token_id=None + ): + """ "start" should be the "next" in the response for fetching next page. + "start" can also be "0x" to fetch from the beginning (i.e., latest). + "start" can be "0x00" to fetch the pending outgoing transactions. + """ + address = Address.create_from(address) + if limit > 20: + limit = 20 + result = await self.master.get_transactions_by_address( + address, transfer_token_id, start, limit + ) + if not result: + return None + tx_list, next = result + return { + "txList": [tx_detail_encoder(tx) for tx in tx_list], + "next": data_encoder(next), + } + + @public_methods.add + async def getJrpcCalls(self): + return self.counters + + @public_methods.add + async def gasPrice(self, full_shard_key: str, token_id: Optional[str] = None): + full_shard_key = shard_id_decoder(full_shard_key) + if full_shard_key is None: + return None + parsed_token_id = ( + quantity_decoder(token_id) if token_id else token_id_encode("QKC") + ) + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + ret = await self.master.gas_price(branch, parsed_token_id) + if ret is None: + return None + return quantity_encoder(ret) + + @public_methods.add + @decode_arg("full_shard_key", shard_id_decoder) + @decode_arg("header_hash", hash_decoder) + @decode_arg("nonce", quantity_decoder) + @decode_arg("mixhash", hash_decoder) + @decode_arg("signature", signature_decoder) + async def submitWork( + self, full_shard_key, header_hash, nonce, mixhash, signature=None + ): + branch = None # `None` means getting work from root chain + if full_shard_key is not None: + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + return await self.master.submit_work( + branch, header_hash, nonce, mixhash, signature + ) + + @public_methods.add + @decode_arg("full_shard_key", shard_id_decoder) + @decode_arg("coinbase_addr", recipient_decoder, allow_optional=True) + async def getWork(self, full_shard_key, coinbase_addr=None): + branch = None # `None` means getting work from root chain + if full_shard_key is not None: + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + work, optional_divider = await self.master.get_work(branch, coinbase_addr) + if work is None: + return None + ret = [ + data_encoder(work.hash), + quantity_encoder(work.height), + quantity_encoder(work.difficulty), + ] + if optional_divider is not None: + ret.append(quantity_encoder(optional_divider)) + return ret + + @public_methods.add + @decode_arg("block_id", data_decoder) + async def getRootHashConfirmingMinorBlockById(self, block_id): + retv = self.master.root_state.db.get_root_block_confirming_minor_block(block_id) + return data_encoder(retv) if retv else None + + @public_methods.add + @decode_arg("tx_id", id_decoder) + async def getTransactionConfirmedByNumberRootBlocks(self, tx_id): + tx_hash, full_shard_key = tx_id + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + minor_block, i = await self.master.get_transaction_by_hash(tx_hash, branch) + if not minor_block: + return None + confirming_hash = ( + self.master.root_state.db.get_root_block_confirming_minor_block( + minor_block.header.get_hash() + + minor_block.header.branch.get_full_shard_id().to_bytes( + 4, byteorder="big" + ) + ) + ) + if confirming_hash is None: + return quantity_encoder(0) + confirming_header = self.master.root_state.db.get_root_block_header_by_hash( + confirming_hash + ) + canonical_hash = self.master.root_state.db.get_root_block_hash_by_height( + confirming_header.height + ) + if canonical_hash != confirming_hash: + return quantity_encoder(0) + tip = self.master.root_state.tip + return quantity_encoder(tip.height - confirming_header.height + 1) + + ######################## Ethereum JSON RPC ######################## + + @public_methods.add + async def net_version(self): + return quantity_encoder(self.master.env.quark_chain_config.NETWORK_ID) + + @public_methods.add + async def eth_gasPrice(self, shard): + return await self.gasPrice(shard, quantity_encoder(token_id_encode("QKC"))) + + @public_methods.add + @decode_arg("block_height", block_height_decoder) + @decode_arg("include_transactions", bool_decoder) + async def eth_getBlockByNumber(self, block_height, include_transactions): + """ + NOTE: only support block_id "latest" or hex + """ + + def block_transcoder(block): + """ + QuarkChain Block => ETH Block + """ + return { + **block, + "number": block["height"], + "parentHash": block["hashPrevMinorBlock"], + "sha3Uncles": "", + "logsBloom": "", + "transactionsRoot": block["hashMerkleRoot"], # ? + "stateRoot": block["hashEvmStateRoot"], # ? + } + + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key(0) + ) + block, _ = await self.master.get_minor_block_by_height( + block_height, branch, need_extra_info=False + ) + if block is None: + return None + return block_transcoder(minor_block_encoder(block)) + + @public_methods.add + @decode_arg("address", eth_address_to_quarkchain_address_decoder) + @decode_arg("shard", shard_id_decoder) + @encode_res(quantity_encoder) + async def eth_getBalance(self, address, shard=None): + address = Address.deserialize(address) + if shard is not None: + address = Address(address.recipient, shard) + account_branch_data = await self.master.get_primary_account_data(address) + balance = account_branch_data.token_balances.balance_map.get( + token_id_encode("QKC"), 0 + ) + return balance + + @public_methods.add + @decode_arg("address", eth_address_to_quarkchain_address_decoder) + @decode_arg("shard", shard_id_decoder) + @encode_res(quantity_encoder) + async def eth_getTransactionCount(self, address, shard=None): + address = Address.deserialize(address) + if shard is not None: + address = Address(address.recipient, shard) + account_branch_data = await self.master.get_primary_account_data(address) + return account_branch_data.transaction_count + + @public_methods.add + @decode_arg("address", eth_address_to_quarkchain_address_decoder) + @decode_arg("shard", shard_id_decoder) + async def eth_getCode(self, address, shard=None): + addr = Address.deserialize(address) + if shard is not None: + addr = Address(addr.recipient, shard) + res = await self.master.get_code(addr, None) + return data_encoder(res) if res is not None else None + + @public_methods.add + @decode_arg("shard", shard_id_decoder) + async def eth_call(self, data, shard=None): + """Returns the result of the transaction application without putting in block chain""" + data = self._convert_eth_call_data(data, shard) + return await self.call(data) + + @public_methods.add + async def eth_sendRawTransaction(self, tx_data): + return await self.sendRawTransaction(tx_data) + + @public_methods.add + async def eth_getTransactionReceipt(self, tx_id): + return await self.getTransactionReceipt(tx_id) + + @public_methods.add + @decode_arg("shard", shard_id_decoder) + async def eth_estimateGas(self, data, shard): + data = self._convert_eth_call_data(data, shard) + return await self.estimateGas(**data) + + @public_methods.add + @decode_arg("shard", shard_id_decoder) + async def eth_getLogs(self, data, shard): + return await self._get_logs( + data, shard, decoder=eth_address_to_quarkchain_address_decoder + ) + + @public_methods.add + @decode_arg("address", eth_address_to_quarkchain_address_decoder) + @decode_arg("key", quantity_decoder) + @decode_arg("shard", shard_id_decoder) + async def eth_getStorageAt(self, address, key, shard=None): + addr = Address.deserialize(address) + if shard is not None: + addr = Address(addr.recipient, shard) + res = await self.master.get_storage_at(addr, key, None) + return data_encoder(res) if res is not None else None + + ######################## Private Methods ######################## + + @private_methods.add + @decode_arg("branch", quantity_decoder) + @decode_arg("block_data", data_decoder) + async def addBlock(self, branch, block_data): + if branch == 0: + block = RootBlock.deserialize(block_data) + return await self.master.add_root_block_from_miner(block) + return await self.master.add_raw_minor_block(Branch(branch), block_data) + + @private_methods.add + async def getPeers(self): + peer_list = [] + for peer_id, peer in self.master.network.active_peer_pool.items(): + peer_list.append( + { + "id": data_encoder(peer_id), + "ip": quantity_encoder(int(peer.ip)), + "port": quantity_encoder(peer.port), + } + ) + return {"peers": peer_list} + + @private_methods.add + async def getSyncStats(self): + return self.master.synchronizer.get_stats() + + @private_methods.add + async def getStats(self): + # This JRPC doesn't follow the standard encoding + return await self.master.get_stats() + + @private_methods.add + async def getBlockCount(self): + # This JRPC doesn't follow the standard encoding + return self.master.get_block_count() + + @private_methods.add + async def createTransactions(self, **load_test_data): + """Create transactions for load testing""" + + def get_data_default(key, decoder, default=None): + if key in load_test_data: + return decoder(load_test_data[key]) + return default + + num_tx_per_shard = load_test_data["numTxPerShard"] + x_shard_percent = load_test_data["xShardPercent"] + to = get_data_default("to", recipient_decoder, b"") + startgas = get_data_default("gas", quantity_decoder, DEFAULT_STARTGAS) + gasprice = get_data_default( + "gasPrice", quantity_decoder, int(DEFAULT_GASPRICE / 10) + ) + value = get_data_default("value", quantity_decoder, 0) + data = get_data_default("data", data_decoder, b"") + # FIXME: can't support specifying full shard ID to 0. currently is regarded as not set + from_full_shard_key = get_data_default( + "fromFullShardKey", full_shard_key_decoder, 0 + ) + gas_token_id = get_data_default( + "gas_token_id", quantity_decoder, self.env.quark_chain_config.genesis_token + ) + transfer_token_id = get_data_default( + "transfer_token_id", + quantity_decoder, + self.env.quark_chain_config.genesis_token, + ) + # build sample tx + evm_tx_sample = EvmTransaction( + 0, + gasprice, + startgas, + to, + value, + data, + from_full_shard_key=from_full_shard_key, + gas_token_id=gas_token_id, + transfer_token_id=transfer_token_id, + ) + tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx_sample)) + return await self.master.create_transactions( + num_tx_per_shard, x_shard_percent, tx + ) + + @private_methods.add + async def setTargetBlockTime(self, root_block_time=0, minor_block_time=0): + """0 will not update existing value""" + return await self.master.set_target_block_time( + root_block_time, minor_block_time + ) + + @public_methods.add + @decode_arg("block_id", id_decoder) + @decode_arg("root_block_id", data_decoder, allow_optional=True) + @decode_arg("token_id", quantity_decoder) # default: QKC + @decode_arg("start", data_decoder, allow_optional=True) + @decode_arg("limit", quantity_decoder) + async def getTotalBalance( + self, block_id, root_block_id=None, token_id="0x8bb0", start=None, limit="0x64" + ): + if limit > 10000: + limit = 10000 + block_hash, full_shard_key = block_id + full_shard_id = ( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + try: + result = await self.master.get_total_balance( + Branch(full_shard_id), block_hash, root_block_id, token_id, start, limit + ) + except: + raise ServerError + if not result: + raise InvalidRequest + total_balance, next_start = result + return { + "totalBalance": quantity_encoder(total_balance), + "next": data_encoder(next_start), + } + + @private_methods.add + async def setMining(self, mining): + """Turn on / off mining""" + return await self.master.set_mining(mining) + + @private_methods.add + async def getJrpcCalls(self): + return self.counters + + @private_methods.add + async def getKadRoutingTableSize(self): + """Returns number of nodes in the p2p discovery routing table""" + if not isinstance(self.master.network, P2PManager): + raise InvalidRequest("network is not P2P") + return len(self.master.network.server.discovery.proto.routing) + + @private_methods.add + async def getKadRoutingTable(self): + """returns a list of nodes in the p2p discovery routing table, in the enode format + eg. "enode://PUBKEY@IP:PORT" + """ + if not isinstance(self.master.network, P2PManager): + raise InvalidRequest("network is not P2P") + return [n.to_uri() for n in self.master.network.server.discovery.proto.routing] + + @public_methods.add + async def getTotalSupply(self): + total_supply = self.master.get_total_supply() + return quantity_encoder(total_supply) if total_supply else None + + @staticmethod + def _convert_eth_call_data(data, shard): + to_address = Address.create_from( + eth_address_to_quarkchain_address_decoder(data["to"]) + ) + if shard: + to_address = Address(to_address.recipient, shard) + data["to"] = "0x" + to_address.serialize().hex() + if "from" in data: + from_address = Address.create_from( + eth_address_to_quarkchain_address_decoder(data["from"]) + ) + if shard: + from_address = Address(from_address.recipient, shard) + data["from"] = "0x" + from_address.serialize().hex() + return data + + async def _get_logs(self, data, full_shard_key, decoder: Callable[[str], bytes]): + start_block = block_height_decoder(data.get("fromBlock", "latest")) + end_block = block_height_decoder(data.get("toBlock", "latest")) + addresses, topics = _parse_log_request(data, decoder) + if full_shard_key is None: + raise InvalidParams("Full shard key is required to get logs") + addresses = [Address(a.recipient, full_shard_key) for a in addresses] + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + logs = await self.master.get_logs( + addresses, topics, start_block, end_block, branch + ) + if logs is None: + return None + return loglist_encoder(logs) + + async def _call_or_estimate_gas(self, is_call: bool, **data): + """Returns the result of the transaction application without putting in block chain""" + if not isinstance(data, dict): + raise InvalidParams("Transaction must be an object") + + def get_data_default(key, decoder, default=None): + if key in data: + return decoder(data[key]) + return default + + to = get_data_default("to", address_decoder, None) + if to is None: + to_full_shard_byte = b"\x00" * 4 + to = b"" + else: + to_full_shard_byte = to[20:] + to = to[:20] + + to_full_shard_key = int.from_bytes(to_full_shard_byte, "big") + gas = get_data_default("gas", quantity_decoder, 0) + gas_price = get_data_default("gasPrice", quantity_decoder, 0) + value = get_data_default("value", quantity_decoder, 0) + data_ = get_data_default("data", data_decoder, b"") + sender = get_data_default( + "from", address_decoder, b"\x00" * 20 + to_full_shard_byte + ) + sender_address = Address.create_from(sender) + from_full_shard_key = sender_address.full_shard_key + + gas_token_id = get_data_default( + "gas_token_id", quantity_decoder, self.env.quark_chain_config.genesis_token + ) + transfer_token_id = get_data_default( + "transfer_token_id", + quantity_decoder, + self.env.quark_chain_config.genesis_token, + ) + + network_id = self.master.env.quark_chain_config.NETWORK_ID + + nonce = 0 # slave will fill in the real nonce + evm_tx = EvmTransaction( + nonce, + gas_price, + gas, + to, + value, + data_, + from_full_shard_key=from_full_shard_key, + to_full_shard_key=to_full_shard_key, + network_id=network_id, + gas_token_id=gas_token_id, + transfer_token_id=transfer_token_id, + ) + + tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) + if is_call: + # xshard not supported for now + is_same_shard = self.master.env.quark_chain_config.is_same_full_shard( + to_full_shard_key, from_full_shard_key + ) + if not is_same_shard: + raise InvalidParams("Call cross-shard tx not supported yet") + res = await self.master.execute_transaction( + tx, sender_address, data["block_height"] + ) + return data_encoder(res) if res is not None else None + else: # estimate gas + res = await self.master.estimate_gas(tx, sender_address) + return quantity_encoder(res) if res is not None else None + + +class JSONRPCWebsocketServer: + @classmethod + async def start_websocket_server(cls, env, slave_server): + server = cls( + env, + slave_server, + env.slave_config.WEBSOCKET_JSON_RPC_PORT, + env.slave_config.HOST, + public_methods, + ) + await server.start() + return server + + def __init__( + self, env, slave_server: SlaveServer, port, host, methods: RpcMethods + ): + self.loop = asyncio.get_running_loop() + self.port = port + self.host = host + self.env = env + self.slave = slave_server + self.counters = dict() + self.pending_tx_cache = LRUCache(maxsize=1024) + + # Bind RPC handler functions to this instance + self.handlers = RpcMethods() + for rpc_name in methods: + func = methods[rpc_name] + self.handlers[rpc_name] = func.__get__(self, self.__class__) + + self.shard_subscription_managers = self.slave.shard_subscription_managers + + async def __handle(self, websocket): + sub_ids = dict() # per-websocket var, Dict[sub_id, full_shard_id] + try: + async for message in websocket: + Logger.info(message) + + d = dict() + try: + d = json.loads(message) + except Exception: + raise InvalidParams("Cannot parse message as JSON") + method = d.get("method", "null") + if method in self.counters: + self.counters[method] += 1 + else: + self.counters[method] = 1 + msg_id = d.get("id", 0) + + response = await self.handlers.dispatch( + d, + context={ + "websocket": websocket, + "msg_id": msg_id, + "sub_ids": sub_ids, + }, + ) + + if response is None: + continue + if "error" in response: + Logger.error(response) + else: + if method == "subscribe": + sub_id = response["result"] + full_shard_id = shard_id_decoder(d.get("params")[1]) + sub_ids[sub_id] = full_shard_id + elif method == "unsubscribe": + sub_id = d.get("params")[0] + del sub_ids[sub_id] + await websocket.send(json.dumps(response)) + finally: # current websocket connection terminates, remove subscribers in this connection + for sub_id, full_shard_id in sub_ids.items(): + try: + shard_subscription_manager = self.shard_subscription_managers[ + full_shard_id + ] + shard_subscription_manager.remove_subscriber(sub_id) + except: + pass + + async def start(self): + self._server = await websockets.serve(self.__handle, self.host, self.port) + + def shutdown(self): + if hasattr(self, '_server') and self._server is not None: + self._server.close() + + @staticmethod + def response_transcoder(sub_id, result): + return { + "jsonrpc": "2.0", + "method": "subscription", + "params": {"subscription": sub_id, "result": result}, + } + + @public_methods.add + async def subscribe(self, sub_type, full_shard_id, params=None, context=None): + assert context is not None + full_shard_id = shard_id_decoder(full_shard_id) + if full_shard_id is None: + raise InvalidParams("Invalid full shard ID") + branch = Branch(full_shard_id) + shard = self.slave.shards.get(branch, None) + if not shard: + raise InvalidParams("Full shard ID not found") + + websocket = context["websocket"] + sub_id = "0x" + uuid.uuid4().hex + shard_subscription_manager = self.shard_subscription_managers[full_shard_id] + + extra = None + if sub_type == SUB_LOGS: + addresses, topics = _parse_log_request(params, address_decoder) + addresses = [Address(a.recipient, full_shard_id) for a in addresses] + extra = lambda candidate_blocks: LogFilter.create_from_block_candidates( + shard.state.db, addresses, topics, candidate_blocks + ) + + shard_subscription_manager.add_subscriber(sub_type, sub_id, websocket, extra) + return sub_id + + @public_methods.add + async def unsubscribe(self, sub_id, context=None): + sub_ids = context["sub_ids"] + assert context is not None + if sub_id not in sub_ids: + raise InvalidParams("Subscription ID not found") + + full_shard_id = sub_ids[sub_id] + shard_subscription_manager = self.shard_subscription_managers[full_shard_id] + shard_subscription_manager.remove_subscriber(sub_id) + + return True diff --git a/quarkchain/cluster/tests/test_jsonrpc.py b/quarkchain/cluster/tests/test_jsonrpc.py index 663cc7cee..3d4e6844b 100644 --- a/quarkchain/cluster/tests/test_jsonrpc.py +++ b/quarkchain/cluster/tests/test_jsonrpc.py @@ -1,1823 +1,1827 @@ -import json -import unittest -from contextlib import contextmanager -import websockets - -from quarkchain.cluster.cluster_config import ClusterConfig -from quarkchain.cluster.jsonrpc import ( - EMPTY_TX_ID, - JSONRPCHttpServer, - JSONRPCWebsocketServer, - quantity_encoder, - data_encoder, -) -from quarkchain.cluster.miner import DoubleSHA256, MiningWork -from quarkchain.cluster.tests.test_utils import ( - create_transfer_transaction, - ClusterContext, - create_contract_creation_transaction, - create_contract_creation_with_event_transaction, - create_contract_with_storage_transaction, -) -from quarkchain.config import ConsensusType -from quarkchain.core import ( - Address, - Identity, - SerializedEvmTransaction, - TypedTransaction, -) -from quarkchain.env import DEFAULT_ENV -from quarkchain.evm.messages import mk_contract_address -from quarkchain.evm.transactions import Transaction as EvmTransaction -from quarkchain.utils import call_async, sha3_256, token_id_encode -from quarkchain.jsonrpc_client import AsyncJsonRpcClient, JsonRpcError - - -@contextmanager -def jrpc_http_server_context(master): - env = DEFAULT_ENV.copy() - env.cluster_config = ClusterConfig() - env.cluster_config.JSON_RPC_PORT = 38391 - # to pass the circleCi - env.cluster_config.JSON_RPC_HOST = "127.0.0.1" - server = call_async(JSONRPCHttpServer.start_test_server(env, master)) - try: - yield server - finally: - call_async(server.shutdown()) - - -rpc_client = AsyncJsonRpcClient("http://localhost:38391") - -def send_request(method, *args): - return call_async(rpc_client.call(method, *args)) - - -class TestJSONRPCHttp(unittest.TestCase): - def test_getTransactionCount(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_random_account(full_shard_key=1) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - stats = call_async(master.get_stats()) - self.assertTrue("posw" in json.dumps(stats)) - - self.assertEqual( - call_async(master.get_primary_account_data(acc1)).transaction_count, 0 - ) - for i in range(3): - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertEqual(i + 1, block.header.height) - self.assertTrue( - call_async(clusters[0].get_shard(2 | 0).add_block(block)) - ) - - response = send_request( - "getTransactionCount", ["0x" + acc2.serialize().hex()] - ) - self.assertEqual(response, "0x0") - - response = send_request( - "getTransactionCount", ["0x" + acc1.serialize().hex()] - ) - self.assertEqual(response, "0x3") - response = send_request( - "getTransactionCount", ["0x" + acc1.serialize().hex(), "latest"] - ) - self.assertEqual(response, "0x3") - - for i in range(3): - response = send_request( - "getTransactionCount", ["0x" + acc1.serialize().hex(), hex(i + 1)] - ) - self.assertEqual(response, hex(i + 1)) - - def test_getBalance(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - response = send_request("getBalances", ["0x" + acc1.serialize().hex()]) - self.assertListEqual( - response["balances"], - [{"tokenId": "0x8bb0", "tokenStr": "QKC", "balance": "0xf4240"}], - ) - - response = send_request("eth_getBalance", ["0x" + acc1.recipient.hex()]) - self.assertEqual(response, "0xf4240") - - def test_sendTransaction(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_random_account(full_shard_key=1) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - slaves = clusters[0].slave_list - master = clusters[0].master - - block = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=None) - ) - call_async(master.add_root_block(block)) - - evm_tx = EvmTransaction( - nonce=0, - gasprice=6, - startgas=30000, - to=acc2.recipient, - value=15, - data=b"", - from_full_shard_key=acc1.full_shard_key, - to_full_shard_key=acc2.full_shard_key, - network_id=slaves[0].env.quark_chain_config.NETWORK_ID, - gas_token_id=master.env.quark_chain_config.genesis_token, - transfer_token_id=master.env.quark_chain_config.genesis_token, - ) - evm_tx.sign(id1.get_key()) - request = dict( - to="0x" + acc2.recipient.hex(), - gasPrice="0x6", - gas=hex(30000), - value="0xf", # 15 - v=quantity_encoder(evm_tx.v), - r=quantity_encoder(evm_tx.r), - s=quantity_encoder(evm_tx.s), - nonce="0x0", - fromFullShardKey="0x00000000", - toFullShardKey="0x00000001", - network_id=hex(slaves[0].env.quark_chain_config.NETWORK_ID), - ) - tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) - response = send_request("sendTransaction", [request]) - - self.assertEqual(response, "0x" + tx.get_hash().hex() + "00000000") - state = clusters[0].get_shard_state(2 | 0) - self.assertEqual(len(state.tx_queue), 1) - self.assertEqual( - state.tx_queue.pop_transaction( - state.get_transaction_count - ).tx.to_evm_tx(), - evm_tx, - ) - - def test_sendTransaction_with_bad_signature(self): - """ sendTransaction validates signature """ - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_random_account(full_shard_key=1) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - - block = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=None) - ) - call_async(master.add_root_block(block)) - - request = dict( - to="0x" + acc2.recipient.hex(), - gasPrice="0x6", - gas=hex(30000), - value="0xf", - v="0x1", - r="0x2", - s="0x3", - nonce="0x0", - fromFullShardKey="0x00000000", - toFullShardKey="0x00000001", - ) - self.assertEqual(send_request("sendTransaction", [request]), EMPTY_TX_ID) - self.assertEqual(len(clusters[0].get_shard_state(2 | 0).tx_queue), 0) - - def test_sendTransaction_missing_from_full_shard_key(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - request = dict( - to="0x" + acc1.recipient.hex(), - gasPrice="0x6", - gas=hex(30000), - value="0xf", - v="0x1", - r="0x2", - s="0x3", - nonce="0x0", - ) - - with self.assertRaises(Exception): - send_request("sendTransaction", [request]) - - def test_getMinorBlock(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - self.assertEqual( - call_async(master.get_primary_account_data(acc1)).transaction_count, 0 - ) - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - # By id - for need_extra_info in [True, False]: - resp = send_request( - "getMinorBlockById", - [ - "0x" + block1.header.get_hash().hex() + "0" * 8, - False, - need_extra_info, - ], - ) - self.assertEqual( - resp["transactions"][0], "0x" + tx.get_hash().hex() + "00000002" - ) - - resp = send_request( - "getMinorBlockById", - ["0x" + block1.header.get_hash().hex() + "0" * 8, True], - ) - self.assertEqual( - resp["transactions"][0]["hash"], "0x" + tx.get_hash().hex() - ) - - resp = send_request("getMinorBlockById", ["0x" + "ff" * 36, True]) - self.assertIsNone(resp) - - # By height - for need_extra_info in [True, False]: - resp = send_request( - "getMinorBlockByHeight", ["0x0", "0x1", False, need_extra_info] - ) - self.assertEqual( - resp["transactions"][0], "0x" + tx.get_hash().hex() + "00000002" - ) - - resp = send_request("getMinorBlockByHeight", ["0x0", "0x1", True]) - self.assertEqual( - resp["transactions"][0]["hash"], "0x" + tx.get_hash().hex() - ) - - resp = send_request("getMinorBlockByHeight", ["0x1", "0x2", False]) - self.assertIsNone(resp) - resp = send_request("getMinorBlockByHeight", ["0x0", "0x4", False]) - self.assertIsNone(resp) - - def test_getRootblockConfirmationIdAndCount(self): - # TODO test root chain forks - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - self.assertEqual( - call_async(master.get_primary_account_data(acc1)).transaction_count, 0 - ) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - tx_id = ( - "0x" - + tx.get_hash().hex() - + acc1.full_shard_key.to_bytes(4, "big").hex() - ) - resp = send_request("getTransactionById", [tx_id]) - self.assertEqual(resp["hash"], "0x" + tx.get_hash().hex()) - self.assertEqual( - resp["blockId"], - "0x" - + block1.header.get_hash().hex() - + block1.header.branch.get_full_shard_id() - .to_bytes(4, byteorder="big") - .hex(), - ) - minor_hash = resp["blockId"] - - # zero root block confirmation - resp_hash = send_request( - "getRootHashConfirmingMinorBlockById", [minor_hash] - ) - self.assertIsNone( - resp_hash, "should return None for unconfirmed minor blocks" - ) - resp_count = send_request( - "getTransactionConfirmedByNumberRootBlocks", [tx_id] - ) - self.assertEqual(resp_count, "0x0") - - # 1 root block confirmation - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - resp_hash = send_request( - "getRootHashConfirmingMinorBlockById", [minor_hash] - ) - self.assertIsNotNone(resp_hash, "confirmed by root block") - self.assertEqual(resp_hash, "0x" + block.header.get_hash().hex()) - resp_count = send_request( - "getTransactionConfirmedByNumberRootBlocks", [tx_id] - ) - self.assertEqual(resp_count, "0x1") - - # 2 root block confirmation - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - resp_hash = send_request( - "getRootHashConfirmingMinorBlockById", [minor_hash] - ) - self.assertIsNotNone(resp_hash, "confirmed by root block") - self.assertNotEqual(resp_hash, "0x" + block.header.get_hash().hex()) - resp_count = send_request( - "getTransactionConfirmedByNumberRootBlocks", [tx_id] - ) - self.assertEqual(resp_count, "0x2") - - def test_getTransactionById(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - self.assertEqual( - call_async(master.get_primary_account_data(acc1)).transaction_count, 0 - ) - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - resp = send_request( - "getTransactionById", - [ - "0x" - + tx.get_hash().hex() - + acc1.full_shard_key.to_bytes(4, "big").hex() - ], - ) - self.assertEqual(resp["hash"], "0x" + tx.get_hash().hex()) - - def test_call_success(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - slaves = clusters[0].slave_list - - response = send_request( - "call", [{"to": "0x" + acc1.serialize().hex(), "gas": hex(21000)}] - ) - - self.assertEqual(response, "0x") - self.assertEqual( - len(clusters[0].get_shard_state(2 | 0).tx_queue), - 0, - "should not affect tx queue", - ) - - def test_call_success_default_gas(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - slaves = clusters[0].slave_list - - # gas is not specified in the request - response = send_request( - "call", [{"to": "0x" + acc1.serialize().hex()}, "latest"] - ) - - self.assertEqual(response, "0x") - self.assertEqual( - len(clusters[0].get_shard_state(2 | 0).tx_queue), - 0, - "should not affect tx queue", - ) - - def test_call_failure(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - slaves = clusters[0].slave_list - - # insufficient gas - response = send_request( - "call", [{"to": "0x" + acc1.serialize().hex(), "gas": "0x1"}, None] - ) - - self.assertIsNone(response, "failed tx should return None") - self.assertEqual( - len(clusters[0].get_shard_state(2 | 0).tx_queue), - 0, - "should not affect tx queue", - ) - - def test_getTransactionReceipt_not_exist(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request(endpoint, ["0x" + bytes(36).hex()]) - self.assertIsNone(resp) - - def test_getTransactionReceipt_on_transfer(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request( - endpoint, - [ - "0x" - + tx.get_hash().hex() - + acc1.full_shard_key.to_bytes(4, "big").hex() - ], - ) - self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["cumulativeGasUsed"], "0x5208") - self.assertIsNone(resp["contractAddress"]) - - def test_getTransactionReceipt_on_xshard_transfer_before_enabling_EVM(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_from_identity(id1, full_shard_key=0x00010000) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - # disable EVM to have fake xshard receipts - master.env.quark_chain_config.ENABLE_EVM_TIMESTAMP = 2 ** 64 - 1 - - block = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=None) - ) - call_async(master.add_root_block(block)) - - s1, s2 = ( - clusters[0].get_shard_state(2 | 0), - clusters[0].get_shard_state(0x00010002), - ) - tx_gen = lambda s, f, t: create_transfer_transaction( - shard_state=s, - key=id1.get_key(), - from_address=f, - to_address=t, - gas=21000 if f == t else 30000, - value=12345, - ) - tx1 = tx_gen(s1, acc1, acc2) - self.assertTrue(slaves[0].add_tx(tx1)) - b1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) - - root_block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - - call_async(master.add_root_block(root_block)) - - tx2 = tx_gen(s2, acc2, acc2) - self.assertTrue(slaves[0].add_tx(tx2)) - b3 = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=0x00010002) - ) - self.assertTrue(call_async(clusters[0].get_shard(0x00010002).add_block(b3))) - - # in-shard tx 21000 + receiving x-shard tx 9000 - self.assertEqual(s2.evm_state.gas_used, 30000) - self.assertEqual(s2.evm_state.xshard_receive_gas_used, 9000) - - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request( - endpoint, - [ - "0x" - + tx2.get_hash().hex() - + acc2.full_shard_key.to_bytes(4, "big").hex() - ], - ) - self.assertEqual(resp["transactionHash"], "0x" + tx2.get_hash().hex()) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["cumulativeGasUsed"], hex(30000)) - self.assertEqual(resp["gasUsed"], hex(21000)) - self.assertIsNone(resp["contractAddress"]) - - # query xshard tx receipt on the target shard - resp = send_request( - endpoint, - [ - "0x" - + tx1.get_hash().hex() - + acc2.full_shard_key.to_bytes(4, "big").hex() - ], - ) - self.assertEqual(resp["status"], "0x1") - # other fields are fake - self.assertEqual(resp["cumulativeGasUsed"], hex(0)) - self.assertEqual(resp["gasUsed"], hex(0)) - - def test_getTransactionReceipt_on_xshard_transfer_after_enabling_EVM(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_from_identity(id1, full_shard_key=1) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - block = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=None) - ) - call_async(master.add_root_block(block)) - - s1, s2 = ( - clusters[0].get_shard_state(2 | 0), - clusters[0].get_shard_state(2 | 1), - ) - tx = create_transfer_transaction( - shard_state=s1, - key=id1.get_key(), - from_address=acc1, - to_address=acc2, - gas=30000, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - # source shard - b1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) - # root chain - root_block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(root_block)) - # target shard - b3 = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=0b11) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(b3))) - - # query xshard tx receipt on the target shard - resp = send_request( - "getTransactionReceipt", - [ - "0x" - + tx.get_hash().hex() - + acc2.full_shard_key.to_bytes(4, "big").hex() - ], - ) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["transactionIndex"], "0x3") - self.assertEqual(resp["cumulativeGasUsed"], hex(9000)) - self.assertEqual(resp["gasUsed"], hex(9000)) - - def test_getTransactionReceipt_on_contract_creation(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - to_full_shard_key = acc1.full_shard_key + 2 - tx = create_contract_creation_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=to_full_shard_key, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000002"]) - self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["cumulativeGasUsed"], "0x213eb") - - contract_address = mk_contract_address( - acc1.recipient, 0, to_full_shard_key - ) - self.assertEqual( - resp["contractAddress"], - "0x" - + contract_address.hex() - + to_full_shard_key.to_bytes(4, "big").hex(), - ) - - def test_getTransactionReceipt_on_xshard_contract_creation(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - # Add a root block to update block gas limit for xshard tx throttling - # so that the following tx can be processed - root_block = call_async( - master.get_next_block_to_mine(acc1, branch_value=None) - ) - call_async(master.add_root_block(root_block)) - - to_full_shard_key = acc1.full_shard_key + 1 - tx = create_contract_creation_with_event_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=to_full_shard_key, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000002"]) - self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["cumulativeGasUsed"], "0x11374") - self.assertIsNone(resp["contractAddress"]) - - # x-shard contract creation should succeed. check target shard - root_block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) # root chain - call_async(master.add_root_block(root_block)) - block2 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b11) - ) # target shard - self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(block2))) - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000003"]) - self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["cumulativeGasUsed"], "0xc515") - self.assertIsNotNone(resp["contractAddress"]) - - def test_getLogs(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - expected_log_parts = { - "logIndex": "0x0", - "transactionIndex": "0x0", - "blockNumber": "0x1", - "blockHeight": "0x1", - "data": "0x", - } - - with ClusterContext( - 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - # Add a root block to update block gas limit for xshard tx throttling - # so that the following tx can be processed - root_block = call_async( - master.get_next_block_to_mine(acc1, branch_value=None) - ) - call_async(master.add_root_block(root_block)) - - tx = create_contract_creation_with_event_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key, - ) - expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - - for using_eth_endpoint in (True, False): - shard_id = hex(acc1.full_shard_key) - if using_eth_endpoint: - req = lambda o: send_request("eth_getLogs", [o, shard_id]) - else: - # `None` needed to bypass some request modification - req = lambda o: send_request("getLogs", [o, shard_id]) - - # no filter object as wild cards - resp = req({}) - self.assertEqual(1, len(resp)) - self.assertTrue(expected_log_parts.items() <= resp[0].items()) - - # filter with from/to blocks - resp = req({"fromBlock": "0x0", "toBlock": "0x1"}) - self.assertEqual(1, len(resp)) - self.assertTrue(expected_log_parts.items() <= resp[0].items()) - resp = req({"fromBlock": "0x0", "toBlock": "0x0"}) - self.assertEqual(0, len(resp)) - - # filter by contract address - contract_addr = mk_contract_address( - acc1.recipient, 0, acc1.full_shard_key - ) - filter_obj = { - "address": "0x" - + contract_addr.hex() - + ( - "" - if using_eth_endpoint - else hex(acc1.full_shard_key)[2:].zfill(8) - ) - } - resp = req(filter_obj) - self.assertEqual(1, len(resp)) - - # filter by topics - filter_obj = { - "topics": [ - "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" - ] - } - filter_obj_nested = { - "topics": [ - [ - "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" - ] - ] - } - for f in (filter_obj, filter_obj_nested): - resp = req(f) - self.assertEqual(1, len(resp)) - self.assertTrue(expected_log_parts.items() <= resp[0].items()) - self.assertEqual( - "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa", - resp[0]["topics"][0], - ) - - # xshard creation and check logs: shard 0 -> shard 1 - tx = create_contract_creation_with_event_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key + 1, - ) - self.assertTrue(slaves[0].add_tx(tx)) - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) # source shard - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - root_block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) # root chain - call_async(master.add_root_block(root_block)) - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b11) - ) # target shard - self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(block))) - - req = lambda o: send_request("getLogs", [o, hex(0b11)]) - # no filter object as wild cards - resp = req({}) - self.assertEqual(1, len(resp)) - expected_log_parts["transactionIndex"] = "0x3" # after root block coinbase - expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() - expected_log_parts["blockHash"] = "0x" + block.header.get_hash().hex() - self.assertTrue(expected_log_parts.items() <= resp[0].items()) - self.assertEqual(2, len(resp[0]["topics"])) - # missing shard ID should fail - for endpoint in ("getLogs", "eth_getLogs"): - with self.assertRaises(JsonRpcError): - send_request(endpoint, [{}]) - with self.assertRaises(JsonRpcError): - send_request(endpoint, [{}, None]) - - def test_estimateGas(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - payload = {"to": "0x" + acc1.serialize().hex()} - response = send_request("estimateGas", [payload]) - self.assertEqual(response, "0x5208") # 21000 - # cross-shard - from_addr = "0x" + acc1.address_in_shard(1).serialize().hex() - payload["from"] = from_addr - response = send_request("estimateGas", [payload]) - self.assertEqual(response, "0x7530") # 30000 - - def test_getStorageAt(self): - key = bytes.fromhex( - "c987d4506fb6824639f9a9e3b8834584f5165e94680501d1b0044071cd36c3b3" - ) - id1 = Identity.create_from_key(key) - acc1 = Address.create_from_identity(id1, full_shard_key=0) - created_addr = "0x8531eb33bba796115f56ffa1b7df1ea3acdd8cdd00000000" - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - tx = create_contract_with_storage_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - - for using_eth_endpoint in (True, False): - if using_eth_endpoint: - req = lambda k: send_request( - "eth_getStorageAt", [created_addr[:-8], k, "0x0"] - ) - else: - req = lambda k: send_request("getStorageAt", [created_addr, k]) - - # first storage - response = req("0x0") - # equals 1234 - self.assertEqual( - response, - "0x00000000000000000000000000000000000000000000000000000000000004d2", - ) - - # mapping storage - k = sha3_256( - bytes.fromhex(acc1.recipient.hex().zfill(64) + "1".zfill(64)) - ) - response = req("0x" + k.hex()) - self.assertEqual( - response, - "0x000000000000000000000000000000000000000000000000000000000000162e", - ) - - # doesn't exist - response = req("0x3") - self.assertEqual( - response, - "0x0000000000000000000000000000000000000000000000000000000000000000", - ) - - def test_getCode(self): - key = bytes.fromhex( - "c987d4506fb6824639f9a9e3b8834584f5165e94680501d1b0044071cd36c3b3" - ) - id1 = Identity.create_from_key(key) - acc1 = Address.create_from_identity(id1, full_shard_key=0) - created_addr = "0x8531eb33bba796115f56ffa1b7df1ea3acdd8cdd00000000" - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - tx = create_contract_with_storage_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - - for using_eth_endpoint in (True, False): - if using_eth_endpoint: - resp = send_request("eth_getCode", [created_addr[:-8], "0x0"]) - else: - resp = send_request("getCode", [created_addr]) - - self.assertEqual( - resp, - "0x6080604052600080fd00a165627a7a72305820a6ef942c101f06333ac35072a8ff40332c71d0e11cd0e6d86de8cae7b42696550029", - ) - - def test_gasPrice(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - # run for multiple times - for _ in range(3): - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=0, - gas_price=12, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue( - call_async(clusters[0].get_shard(2 | 0).add_block(block)) - ) - - for using_eth_endpoint in (True, False): - if using_eth_endpoint: - resp = send_request("eth_gasPrice", ["0x0"]) - else: - resp = send_request( - "gasPrice", ["0x0", quantity_encoder(token_id_encode("QKC"))] - ) - - self.assertEqual(resp, "0xc") - - def test_getWork_and_submitWork(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, remote_mining=True, shard_size=1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(1 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=0, - gas_price=12, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - for shard_id in ["0x0", None]: # shard, then root - resp = send_request("getWork", [shard_id]) - self.assertEqual(resp[1:], ["0x1", "0xa"]) # height and diff - - header_hash_hex = resp[0] - if shard_id is not None: # shard 0 - miner_address = Address.create_from( - master.env.quark_chain_config.shards[1].COINBASE_ADDRESS - ) - else: # root - miner_address = Address.create_from( - master.env.quark_chain_config.ROOT.COINBASE_ADDRESS - ) - block = call_async( - master.get_next_block_to_mine( - address=miner_address, branch_value=shard_id and 0b01 - ) - ) - # solve it and submit - work = MiningWork(bytes.fromhex(header_hash_hex[2:]), 1, 10) - solver = DoubleSHA256(work) - nonce = solver.mine(0, 10000).nonce - mixhash = "0x" + sha3_256(b"").hex() - resp = send_request( - "submitWork", - [ - shard_id, - header_hash_hex, - hex(nonce), - mixhash, - "0x" + bytes(65).hex(), - ], - ) - self.assertTrue(resp) - - # show progress on shard 0 - self.assertEqual( - clusters[0].get_shard_state(1 | 0).get_tip().header.height, 1 - ) - - def test_getWork_with_optional_diff_divider(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, remote_mining=True, shard_size=1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - shard = next(iter(slaves[0].shards.values())) - qkc_config = master.env.quark_chain_config - qkc_config.ROOT.CONSENSUS_TYPE = ConsensusType.POW_SIMULATE - - # add a root block first to init shard chains - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - qkc_config.ROOT.POSW_CONFIG.ENABLED = True - qkc_config.ROOT.POSW_CONFIG.ENABLE_TIMESTAMP = 0 - qkc_config.ROOT.POSW_CONFIG.WINDOW_SIZE = 2 - - shard.state.get_root_chain_stakes = lambda _1, _2: ( - qkc_config.ROOT.POSW_CONFIG.TOTAL_STAKE_PER_BLOCK, - acc1.recipient, - ) - - resp = send_request("getWork", [None]) - # height and diff, and returns the diff divider since it's PoSW mineable - self.assertEqual(resp[1:], ["0x2", "0xa", hex(1000)]) - - def test_createTransactions(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_random_account(full_shard_key=1) - - loadtest_accounts = [ - { - "address": "b067ac9ebeeecb10bbcd1088317959d58d1e38f6b0ee10d5", - "key": "ca0143c9aa51c3013f08e83f3b6368a4f3ba5b52c4841c6e0c22c300f7ee6827", - }, - { - "address": "9f2b984937ff8e3f20d2a2592f342f47257870909fffa247", - "key": "40efdb8528de149c35fb43a572fc821d8fbdf2469dcc7fe1a9e847ef29e3c941", - }, - ] - - with ClusterContext( - 1, acc1, small_coinbase=True, loadtest_accounts=loadtest_accounts - ) as clusters, jrpc_http_server_context(clusters[0].master): - slaves = clusters[0].slave_list - master = clusters[0].master - - block = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=None) - ) - call_async(master.add_root_block(block)) - - send_request("createTransactions", {"numTxPerShard": 1, "xShardPercent": 0}) - - -# ------------------------------- Test for JSONRPCWebsocketServer ------------------------------- -@contextmanager -def jrpc_websocket_server_context(slave_server, port=38590): - env = DEFAULT_ENV.copy() - env.cluster_config = ClusterConfig() - env.cluster_config.JSON_RPC_PORT = 38391 - env.cluster_config.JSON_RPC_HOST = "127.0.0.1" - - env.slave_config = env.cluster_config.get_slave_config("S0") - env.slave_config.HOST = "0.0.0.0" - env.slave_config.WEBSOCKET_JSON_RPC_PORT = port - server = call_async(JSONRPCWebsocketServer.start_websocket_server(env, slave_server)) - try: - yield server - finally: - server.shutdown() - - -def send_websocket_request(request, num_response=1, port=38590): - responses = [] - - async def __send_request(request, port): - uri = "ws://0.0.0.0:" + str(port) - async with websockets.connect(uri) as websocket: - await websocket.send(request) - while True: - response = await websocket.recv() - responses.append(response) - if len(responses) == num_response: - return responses - - return call_async(__send_request(request, port)) - - -async def get_websocket(port=38590): - uri = "ws://0.0.0.0:" + str(port) - return await websockets.connect(uri) - - -class TestJSONRPCWebsocket(unittest.TestCase): - def test_new_heads(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context(clusters[0].slave_list[0]): - # clusters[0].slave_list[0] has two shards with full_shard_id 2 and 3 - master = clusters[0].master - - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newHeads", "0x00000002"], - "id": 3, - } - websocket = call_async(get_websocket()) - call_async(websocket.send(json.dumps(request))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["id"], 3) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - block_hash = block.header.get_hash() - block_height = block.header.height - - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual( - response["params"]["result"]["hash"], data_encoder(block_hash) - ) - self.assertEqual( - response["params"]["result"]["height"], quantity_encoder(block_height) - ) - - def test_new_heads_with_chain_reorg(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38591 - ): - websocket = call_async(get_websocket(port=38591)) - - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newHeads", "0x00000002"], - "id": 3, - } - call_async(websocket.send(json.dumps(request))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["id"], 3) - - state = clusters[0].get_shard_state(2 | 0) - tip = state.get_tip() - - # no chain reorg at this point - b0 = state.create_block_to_mine(address=acc1) - state.finalize_and_add_block(b0) - self.assertEqual(state.header_tip, b0.header) - response = call_async(websocket.recv()) - d = json.loads(response) - self.assertEqual( - d["params"]["result"]["hash"], data_encoder(b0.header.get_hash()) - ) - - # fork happens - b1 = tip.create_block_to_append(address=acc1) - state.finalize_and_add_block(b1) - b2 = b1.create_block_to_append(address=acc1) - state.finalize_and_add_block(b2) - self.assertEqual(state.header_tip, b2.header) - - # new heads b1, b2 emitted from new chain - blocks = [b1, b2] - for b in blocks: - response = call_async(websocket.recv()) - d = json.loads(response) - self.assertEqual( - d["params"]["result"]["hash"], data_encoder(b.header.get_hash()) - ) - - def test_new_pending_xshard_tx_sender(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0x0) - acc2 = Address.create_from_identity(id1, full_shard_key=0x10001) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38592 - ): - master = clusters[0].master - slaves = clusters[0].slave_list - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newPendingTransactions", "0x00000002"], - "id": 6, - } - - websocket = call_async(get_websocket(38592)) - call_async(websocket.send(json.dumps(request))) - - sub_response = json.loads(call_async(websocket.recv())) - self.assertEqual(sub_response["id"], 6) - self.assertEqual(len(sub_response["result"]), 34) - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc2, - gas=30000, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - tx_response = json.loads(call_async(websocket.recv())) - self.assertEqual( - tx_response["params"]["subscription"], sub_response["result"] - ) - self.assertTrue(tx_response["params"]["result"], tx.get_hash()) - - b1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) - - def test_new_pending_xshard_tx_target(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0x10001) - acc2 = Address.create_from_identity(id1, full_shard_key=0x0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38593 - ): - master = clusters[0].master - slaves = clusters[0].slave_list - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newPendingTransactions", "0x00000002"], - "id": 6, - } - websocket = call_async(get_websocket(38593)) - call_async(websocket.send(json.dumps(request))) - - sub_response = json.loads(call_async(websocket.recv())) - self.assertEqual(sub_response["id"], 6) - self.assertEqual(len(sub_response["result"]), 34) - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(0x10003), - key=id1.get_key(), - from_address=acc1, - to_address=acc2, - gas=30000, - value=12345, - ) - self.assertTrue(slaves[1].add_tx(tx)) - - b1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0x10003) - ) - self.assertTrue(call_async(clusters[0].get_shard(0x10003).add_block(b1))) - - tx_response = json.loads(call_async(websocket.recv())) - self.assertEqual( - tx_response["params"]["subscription"], sub_response["result"] - ) - self.assertTrue(tx_response["params"]["result"], tx.get_hash()) - - def test_new_pending_tx_same_acc_multi_subscriptions(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0x0) - acc2 = Address.create_from_identity(id1, full_shard_key=0x10001) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38594 - ): - master = clusters[0].master - slaves = clusters[0].slave_list - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - requests = [] - REQ_NUM = 5 - for i in range(REQ_NUM): - req = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newPendingTransactions", "0x00000002"], - "id": i, - } - requests.append(req) - - websocket = call_async(get_websocket(38594)) - [call_async(websocket.send(json.dumps(req))) for req in requests] - sub_responses = [json.loads(call_async(websocket.recv())) for _ in requests] - - for i, resp in enumerate(sub_responses): - self.assertEqual(resp["id"], i) - self.assertEqual(len(resp["result"]), 34) - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc2, - gas=30000, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - tx_responses = [json.loads(call_async(websocket.recv())) for _ in requests] - for i, resp in enumerate(tx_responses): - self.assertEqual( - resp["params"]["subscription"], sub_responses[i]["result"] - ) - self.assertTrue(resp["params"]["result"], tx.get_hash()) - - def test_new_pending_tx_with_reorg(self): - id1 = Identity.create_random_identity() - id2 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_from_identity(id2, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38595 - ): - websocket = call_async(get_websocket(port=38595)) - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newPendingTransactions", "0x00000002"], - "id": 3, - } - call_async(websocket.send(json.dumps(request))) - - sub_response = json.loads(call_async(websocket.recv())) - self.assertEqual(sub_response["id"], 3) - self.assertEqual(len(sub_response["result"]), 34) - - state = clusters[0].get_shard_state(2 | 0) - tip = state.get_tip() - - tx = create_transfer_transaction( - shard_state=state, - key=id1.get_key(), - from_address=acc1, - to_address=acc2, - gas=30000, - value=12345, - ) - self.assertTrue(state.add_tx(tx)) - tx_response1 = json.loads(call_async(websocket.recv())) - self.assertEqual( - tx_response1["params"]["subscription"], sub_response["result"] - ) - self.assertTrue(tx_response1["params"]["result"], tx.get_hash()) - - b0 = state.create_block_to_mine() - state.finalize_and_add_block(b0) - b1 = tip.create_block_to_append() - state.finalize_and_add_block(b1) - b2 = b1.create_block_to_append() - state.finalize_and_add_block(b2) # fork should happen, b0-b2 is picked up - - tx_response2 = json.loads(call_async(websocket.recv())) - self.assertEqual(state.header_tip, b2.header) - self.assertEqual(tx_response2, tx_response1) - - def test_logs(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - expected_log_parts = { - "logIndex": "0x0", - "transactionIndex": "0x0", - "blockNumber": "0x1", - "blockHeight": "0x1", - "data": "0x", - } - - with ClusterContext( - 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38596 - ): - master = clusters[0].master - slaves = clusters[0].slave_list - websocket = call_async(get_websocket(port=38596)) - - # filter by contract address - contract_addr = mk_contract_address(acc1.recipient, 0, acc1.full_shard_key) - filter_req = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": [ - "logs", - "0x00000002", - { - "address": "0x" - + contract_addr.hex() - + hex(acc1.full_shard_key)[2:].zfill(8) - }, - ], - "id": 4, - } - call_async(websocket.send(json.dumps(filter_req))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["id"], 4) - - # filter by topics - filter_req = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": [ - "logs", - "0x00000002", - { - "topics": [ - "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" - ] - }, - ], - "id": 5, - } - call_async(websocket.send(json.dumps(filter_req))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["id"], 5) - - tx = create_contract_creation_with_event_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), # full_shard_id = 2 - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key, - ) - expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine( - address=acc1, branch_value=0b10 - ) # branch_value = 2 - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - - count = 0 - while count < 2: - response = call_async(websocket.recv()) - count += 1 - d = json.loads(response) - self.assertTrue(expected_log_parts.items() <= d["params"]["result"].items()) - self.assertEqual( - "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa", - d["params"]["result"]["topics"][0], - ) - self.assertEqual(count, 2) - - def test_log_removed_flag_with_chain_reorg(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38597 - ): - websocket = call_async(get_websocket(port=38597)) - - # a log subscriber with no-filter request - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["logs", "0x00000002", {}], - "id": 3, - } - call_async(websocket.send(json.dumps(request))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["id"], 3) - - state = clusters[0].get_shard_state(2 | 0) - tip = state.get_tip() - b0 = state.create_block_to_mine(address=acc1) - tx = create_contract_creation_with_event_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), # full_shard_id = 2 - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key, - ) - b0.add_tx(tx) - state.finalize_and_add_block(b0) - self.assertEqual(state.header_tip, b0.header) - tx_hash = tx.get_hash() - - response = call_async(websocket.recv()) - d = json.loads(response) - self.assertEqual( - d["params"]["result"]["transactionHash"], data_encoder(tx_hash) - ) - self.assertEqual(d["params"]["result"]["removed"], False) - - # fork happens - b1 = tip.create_block_to_append(address=acc1) - b1.add_tx(tx) - state.finalize_and_add_block(b1) - b2 = b1.create_block_to_append(address=acc1) - state.finalize_and_add_block(b2) - self.assertEqual(state.header_tip, b2.header) - - # log emitted from old chain, flag is set to True - response = call_async(websocket.recv()) - d = json.loads(response) - self.assertEqual( - d["params"]["result"]["transactionHash"], data_encoder(tx_hash) - ) - self.assertEqual(d["params"]["result"]["removed"], True) - - # log emitted from new chain - response = call_async(websocket.recv()) - d = json.loads(response) - self.assertEqual( - d["params"]["result"]["transactionHash"], data_encoder(tx_hash) - ) - self.assertEqual(d["params"]["result"]["removed"], False) - - def test_invalid_subscription(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38598 - ): - # Invalid subscription type - request1 = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newBlocks", "0x00000002"], - "id": 3, - } - # Invalid full shard id - request2 = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newHeads", "0x00040002"], - "id": 3, - } - - websocket = call_async(get_websocket(port=38598)) - [ - call_async(websocket.send(json.dumps(req))) - for req in [request1, request2] - ] - responses = [json.loads(call_async(websocket.recv())) for _ in range(2)] - [self.assertTrue(resp["error"]) for resp in responses] # emit error message - - def test_multi_subs_with_some_unsubs_in_one_ws_conn(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38599 - ): - # clusters[0].slave_list[0] has two shards with full_shard_id 2 and 3 - master = clusters[0].master - websocket = call_async(get_websocket(port=38599)) - - # make 3 subscriptions on new heads - ids = [3, 4, 5] - sub_ids = [] - for id in ids: - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newHeads", "0x00000002"], - "id": id, - } - call_async(websocket.send(json.dumps(request))) - response = call_async(websocket.recv()) - response = json.loads(response) - sub_ids.append(response["result"]) - self.assertEqual(response["id"], id) - - # cancel the first subscription - request = { - "jsonrpc": "2.0", - "method": "unsubscribe", - "params": [sub_ids[0]], - "id": 3, - } - call_async(websocket.send(json.dumps(request))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["result"], True) # unsubscribed successfully - - # add a new block, should expect only 2 responses - root_block = call_async( - master.get_next_block_to_mine(acc1, branch_value=None) - ) - call_async(master.add_root_block(root_block)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - - for sub_id in sub_ids[1:]: - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["params"]["subscription"], sub_id) - - def test_unsubscribe(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38600 - ): - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newPendingTransactions", "0x00000002"], - "id": 6, - } - websocket = call_async(get_websocket(port=38600)) - call_async(websocket.send(json.dumps(request))) - sub_response = json.loads(call_async(websocket.recv())) - - # Check subscription response - self.assertEqual(sub_response["id"], 6) - self.assertEqual(len(sub_response["result"]), 34) - - unsubscribe = { - "jsonrpc": "2.0", - "method": "unsubscribe", - "params": [sub_response["result"]], - "id": 3, - } - - # Unsubscribe successfully - call_async(websocket.send(json.dumps(unsubscribe))) - response = json.loads(call_async(websocket.recv())) - self.assertTrue(response["result"]) - self.assertEqual(response["id"], 3) - - # Invalid unsubscription if sub_id does not exist - call_async(websocket.send(json.dumps(unsubscribe))) - response = json.loads(call_async(websocket.recv())) - self.assertTrue(response["error"]) +import json +import unittest +from contextlib import contextmanager +import websockets + +from quarkchain.cluster.cluster_config import ClusterConfig +from quarkchain.cluster.jsonrpc import ( + EMPTY_TX_ID, + JSONRPCHttpServer, + JSONRPCWebsocketServer, + quantity_encoder, + data_encoder, +) +from quarkchain.cluster.miner import DoubleSHA256, MiningWork +from quarkchain.cluster.tests.test_utils import ( + create_transfer_transaction, + ClusterContext, + create_contract_creation_transaction, + create_contract_creation_with_event_transaction, + create_contract_with_storage_transaction, +) +from quarkchain.config import ConsensusType +from quarkchain.core import ( + Address, + Identity, + SerializedEvmTransaction, + TypedTransaction, +) +from quarkchain.env import DEFAULT_ENV +from quarkchain.evm.messages import mk_contract_address +from quarkchain.evm.transactions import Transaction as EvmTransaction +from quarkchain.utils import call_async, sha3_256, token_id_encode +from quarkchain.jsonrpc_client import AsyncJsonRpcClient, JsonRpcError + + +@contextmanager +def jrpc_http_server_context(master): + env = DEFAULT_ENV.copy() + env.cluster_config = ClusterConfig() + env.cluster_config.JSON_RPC_PORT = 38391 + # to pass the circleCi + env.cluster_config.JSON_RPC_HOST = "127.0.0.1" + server = call_async(JSONRPCHttpServer.start_test_server(env, master)) + try: + yield server + finally: + call_async(server.shutdown()) + + +rpc_client = AsyncJsonRpcClient("http://localhost:38391") + +def send_request(method, params=None): + if params is None: + params = [] + if isinstance(params, dict): + return call_async(rpc_client.call_with_dict_params(method, params)) + return call_async(rpc_client.call(method, *params)) + + +class TestJSONRPCHttp(unittest.TestCase): + def test_getTransactionCount(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_random_account(full_shard_key=1) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + stats = call_async(master.get_stats()) + self.assertTrue("posw" in json.dumps(stats)) + + self.assertEqual( + call_async(master.get_primary_account_data(acc1)).transaction_count, 0 + ) + for i in range(3): + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertEqual(i + 1, block.header.height) + self.assertTrue( + call_async(clusters[0].get_shard(2 | 0).add_block(block)) + ) + + response = send_request( + "getTransactionCount", ["0x" + acc2.serialize().hex()] + ) + self.assertEqual(response, "0x0") + + response = send_request( + "getTransactionCount", ["0x" + acc1.serialize().hex()] + ) + self.assertEqual(response, "0x3") + response = send_request( + "getTransactionCount", ["0x" + acc1.serialize().hex(), "latest"] + ) + self.assertEqual(response, "0x3") + + for i in range(3): + response = send_request( + "getTransactionCount", ["0x" + acc1.serialize().hex(), hex(i + 1)] + ) + self.assertEqual(response, hex(i + 1)) + + def test_getBalance(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + response = send_request("getBalances", ["0x" + acc1.serialize().hex()]) + self.assertListEqual( + response["balances"], + [{"tokenId": "0x8bb0", "tokenStr": "QKC", "balance": "0xf4240"}], + ) + + response = send_request("eth_getBalance", ["0x" + acc1.recipient.hex()]) + self.assertEqual(response, "0xf4240") + + def test_sendTransaction(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_random_account(full_shard_key=1) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + slaves = clusters[0].slave_list + master = clusters[0].master + + block = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=None) + ) + call_async(master.add_root_block(block)) + + evm_tx = EvmTransaction( + nonce=0, + gasprice=6, + startgas=30000, + to=acc2.recipient, + value=15, + data=b"", + from_full_shard_key=acc1.full_shard_key, + to_full_shard_key=acc2.full_shard_key, + network_id=slaves[0].env.quark_chain_config.NETWORK_ID, + gas_token_id=master.env.quark_chain_config.genesis_token, + transfer_token_id=master.env.quark_chain_config.genesis_token, + ) + evm_tx.sign(id1.get_key()) + request = dict( + to="0x" + acc2.recipient.hex(), + gasPrice="0x6", + gas=hex(30000), + value="0xf", # 15 + v=quantity_encoder(evm_tx.v), + r=quantity_encoder(evm_tx.r), + s=quantity_encoder(evm_tx.s), + nonce="0x0", + fromFullShardKey="0x00000000", + toFullShardKey="0x00000001", + network_id=hex(slaves[0].env.quark_chain_config.NETWORK_ID), + ) + tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) + response = send_request("sendTransaction", [request]) + + self.assertEqual(response, "0x" + tx.get_hash().hex() + "00000000") + state = clusters[0].get_shard_state(2 | 0) + self.assertEqual(len(state.tx_queue), 1) + self.assertEqual( + state.tx_queue.pop_transaction( + state.get_transaction_count + ).tx.to_evm_tx(), + evm_tx, + ) + + def test_sendTransaction_with_bad_signature(self): + """ sendTransaction validates signature """ + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_random_account(full_shard_key=1) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + + block = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=None) + ) + call_async(master.add_root_block(block)) + + request = dict( + to="0x" + acc2.recipient.hex(), + gasPrice="0x6", + gas=hex(30000), + value="0xf", + v="0x1", + r="0x2", + s="0x3", + nonce="0x0", + fromFullShardKey="0x00000000", + toFullShardKey="0x00000001", + ) + self.assertEqual(send_request("sendTransaction", [request]), EMPTY_TX_ID) + self.assertEqual(len(clusters[0].get_shard_state(2 | 0).tx_queue), 0) + + def test_sendTransaction_missing_from_full_shard_key(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + request = dict( + to="0x" + acc1.recipient.hex(), + gasPrice="0x6", + gas=hex(30000), + value="0xf", + v="0x1", + r="0x2", + s="0x3", + nonce="0x0", + ) + + with self.assertRaises(Exception): + send_request("sendTransaction", [request]) + + def test_getMinorBlock(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + self.assertEqual( + call_async(master.get_primary_account_data(acc1)).transaction_count, 0 + ) + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + # By id + for need_extra_info in [True, False]: + resp = send_request( + "getMinorBlockById", + [ + "0x" + block1.header.get_hash().hex() + "0" * 8, + False, + need_extra_info, + ], + ) + self.assertEqual( + resp["transactions"][0], "0x" + tx.get_hash().hex() + "00000002" + ) + + resp = send_request( + "getMinorBlockById", + ["0x" + block1.header.get_hash().hex() + "0" * 8, True], + ) + self.assertEqual( + resp["transactions"][0]["hash"], "0x" + tx.get_hash().hex() + ) + + resp = send_request("getMinorBlockById", ["0x" + "ff" * 36, True]) + self.assertIsNone(resp) + + # By height + for need_extra_info in [True, False]: + resp = send_request( + "getMinorBlockByHeight", ["0x0", "0x1", False, need_extra_info] + ) + self.assertEqual( + resp["transactions"][0], "0x" + tx.get_hash().hex() + "00000002" + ) + + resp = send_request("getMinorBlockByHeight", ["0x0", "0x1", True]) + self.assertEqual( + resp["transactions"][0]["hash"], "0x" + tx.get_hash().hex() + ) + + resp = send_request("getMinorBlockByHeight", ["0x1", "0x2", False]) + self.assertIsNone(resp) + resp = send_request("getMinorBlockByHeight", ["0x0", "0x4", False]) + self.assertIsNone(resp) + + def test_getRootblockConfirmationIdAndCount(self): + # TODO test root chain forks + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + self.assertEqual( + call_async(master.get_primary_account_data(acc1)).transaction_count, 0 + ) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + tx_id = ( + "0x" + + tx.get_hash().hex() + + acc1.full_shard_key.to_bytes(4, "big").hex() + ) + resp = send_request("getTransactionById", [tx_id]) + self.assertEqual(resp["hash"], "0x" + tx.get_hash().hex()) + self.assertEqual( + resp["blockId"], + "0x" + + block1.header.get_hash().hex() + + block1.header.branch.get_full_shard_id() + .to_bytes(4, byteorder="big") + .hex(), + ) + minor_hash = resp["blockId"] + + # zero root block confirmation + resp_hash = send_request( + "getRootHashConfirmingMinorBlockById", [minor_hash] + ) + self.assertIsNone( + resp_hash, "should return None for unconfirmed minor blocks" + ) + resp_count = send_request( + "getTransactionConfirmedByNumberRootBlocks", [tx_id] + ) + self.assertEqual(resp_count, "0x0") + + # 1 root block confirmation + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + resp_hash = send_request( + "getRootHashConfirmingMinorBlockById", [minor_hash] + ) + self.assertIsNotNone(resp_hash, "confirmed by root block") + self.assertEqual(resp_hash, "0x" + block.header.get_hash().hex()) + resp_count = send_request( + "getTransactionConfirmedByNumberRootBlocks", [tx_id] + ) + self.assertEqual(resp_count, "0x1") + + # 2 root block confirmation + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + resp_hash = send_request( + "getRootHashConfirmingMinorBlockById", [minor_hash] + ) + self.assertIsNotNone(resp_hash, "confirmed by root block") + self.assertNotEqual(resp_hash, "0x" + block.header.get_hash().hex()) + resp_count = send_request( + "getTransactionConfirmedByNumberRootBlocks", [tx_id] + ) + self.assertEqual(resp_count, "0x2") + + def test_getTransactionById(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + self.assertEqual( + call_async(master.get_primary_account_data(acc1)).transaction_count, 0 + ) + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + resp = send_request( + "getTransactionById", + [ + "0x" + + tx.get_hash().hex() + + acc1.full_shard_key.to_bytes(4, "big").hex() + ], + ) + self.assertEqual(resp["hash"], "0x" + tx.get_hash().hex()) + + def test_call_success(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + slaves = clusters[0].slave_list + + response = send_request( + "call", [{"to": "0x" + acc1.serialize().hex(), "gas": hex(21000)}] + ) + + self.assertEqual(response, "0x") + self.assertEqual( + len(clusters[0].get_shard_state(2 | 0).tx_queue), + 0, + "should not affect tx queue", + ) + + def test_call_success_default_gas(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + slaves = clusters[0].slave_list + + # gas is not specified in the request + response = send_request( + "call", [{"to": "0x" + acc1.serialize().hex()}, "latest"] + ) + + self.assertEqual(response, "0x") + self.assertEqual( + len(clusters[0].get_shard_state(2 | 0).tx_queue), + 0, + "should not affect tx queue", + ) + + def test_call_failure(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + slaves = clusters[0].slave_list + + # insufficient gas + response = send_request( + "call", [{"to": "0x" + acc1.serialize().hex(), "gas": "0x1"}, None] + ) + + self.assertIsNone(response, "failed tx should return None") + self.assertEqual( + len(clusters[0].get_shard_state(2 | 0).tx_queue), + 0, + "should not affect tx queue", + ) + + def test_getTransactionReceipt_not_exist(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request(endpoint, ["0x" + bytes(36).hex()]) + self.assertIsNone(resp) + + def test_getTransactionReceipt_on_transfer(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request( + endpoint, + [ + "0x" + + tx.get_hash().hex() + + acc1.full_shard_key.to_bytes(4, "big").hex() + ], + ) + self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["cumulativeGasUsed"], "0x5208") + self.assertIsNone(resp["contractAddress"]) + + def test_getTransactionReceipt_on_xshard_transfer_before_enabling_EVM(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_from_identity(id1, full_shard_key=0x00010000) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + # disable EVM to have fake xshard receipts + master.env.quark_chain_config.ENABLE_EVM_TIMESTAMP = 2 ** 64 - 1 + + block = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=None) + ) + call_async(master.add_root_block(block)) + + s1, s2 = ( + clusters[0].get_shard_state(2 | 0), + clusters[0].get_shard_state(0x00010002), + ) + tx_gen = lambda s, f, t: create_transfer_transaction( + shard_state=s, + key=id1.get_key(), + from_address=f, + to_address=t, + gas=21000 if f == t else 30000, + value=12345, + ) + tx1 = tx_gen(s1, acc1, acc2) + self.assertTrue(slaves[0].add_tx(tx1)) + b1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) + + root_block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + + call_async(master.add_root_block(root_block)) + + tx2 = tx_gen(s2, acc2, acc2) + self.assertTrue(slaves[0].add_tx(tx2)) + b3 = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=0x00010002) + ) + self.assertTrue(call_async(clusters[0].get_shard(0x00010002).add_block(b3))) + + # in-shard tx 21000 + receiving x-shard tx 9000 + self.assertEqual(s2.evm_state.gas_used, 30000) + self.assertEqual(s2.evm_state.xshard_receive_gas_used, 9000) + + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request( + endpoint, + [ + "0x" + + tx2.get_hash().hex() + + acc2.full_shard_key.to_bytes(4, "big").hex() + ], + ) + self.assertEqual(resp["transactionHash"], "0x" + tx2.get_hash().hex()) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["cumulativeGasUsed"], hex(30000)) + self.assertEqual(resp["gasUsed"], hex(21000)) + self.assertIsNone(resp["contractAddress"]) + + # query xshard tx receipt on the target shard + resp = send_request( + endpoint, + [ + "0x" + + tx1.get_hash().hex() + + acc2.full_shard_key.to_bytes(4, "big").hex() + ], + ) + self.assertEqual(resp["status"], "0x1") + # other fields are fake + self.assertEqual(resp["cumulativeGasUsed"], hex(0)) + self.assertEqual(resp["gasUsed"], hex(0)) + + def test_getTransactionReceipt_on_xshard_transfer_after_enabling_EVM(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_from_identity(id1, full_shard_key=1) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + block = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=None) + ) + call_async(master.add_root_block(block)) + + s1, s2 = ( + clusters[0].get_shard_state(2 | 0), + clusters[0].get_shard_state(2 | 1), + ) + tx = create_transfer_transaction( + shard_state=s1, + key=id1.get_key(), + from_address=acc1, + to_address=acc2, + gas=30000, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + # source shard + b1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) + # root chain + root_block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(root_block)) + # target shard + b3 = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=0b11) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(b3))) + + # query xshard tx receipt on the target shard + resp = send_request( + "getTransactionReceipt", + [ + "0x" + + tx.get_hash().hex() + + acc2.full_shard_key.to_bytes(4, "big").hex() + ], + ) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["transactionIndex"], "0x3") + self.assertEqual(resp["cumulativeGasUsed"], hex(9000)) + self.assertEqual(resp["gasUsed"], hex(9000)) + + def test_getTransactionReceipt_on_contract_creation(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + to_full_shard_key = acc1.full_shard_key + 2 + tx = create_contract_creation_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=to_full_shard_key, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000002"]) + self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["cumulativeGasUsed"], "0x213eb") + + contract_address = mk_contract_address( + acc1.recipient, 0, to_full_shard_key + ) + self.assertEqual( + resp["contractAddress"], + "0x" + + contract_address.hex() + + to_full_shard_key.to_bytes(4, "big").hex(), + ) + + def test_getTransactionReceipt_on_xshard_contract_creation(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + # Add a root block to update block gas limit for xshard tx throttling + # so that the following tx can be processed + root_block = call_async( + master.get_next_block_to_mine(acc1, branch_value=None) + ) + call_async(master.add_root_block(root_block)) + + to_full_shard_key = acc1.full_shard_key + 1 + tx = create_contract_creation_with_event_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=to_full_shard_key, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000002"]) + self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["cumulativeGasUsed"], "0x11374") + self.assertIsNone(resp["contractAddress"]) + + # x-shard contract creation should succeed. check target shard + root_block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) # root chain + call_async(master.add_root_block(root_block)) + block2 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b11) + ) # target shard + self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(block2))) + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000003"]) + self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["cumulativeGasUsed"], "0xc515") + self.assertIsNotNone(resp["contractAddress"]) + + def test_getLogs(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + expected_log_parts = { + "logIndex": "0x0", + "transactionIndex": "0x0", + "blockNumber": "0x1", + "blockHeight": "0x1", + "data": "0x", + } + + with ClusterContext( + 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + # Add a root block to update block gas limit for xshard tx throttling + # so that the following tx can be processed + root_block = call_async( + master.get_next_block_to_mine(acc1, branch_value=None) + ) + call_async(master.add_root_block(root_block)) + + tx = create_contract_creation_with_event_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key, + ) + expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + + for using_eth_endpoint in (True, False): + shard_id = hex(acc1.full_shard_key) + if using_eth_endpoint: + req = lambda o: send_request("eth_getLogs", [o, shard_id]) + else: + # `None` needed to bypass some request modification + req = lambda o: send_request("getLogs", [o, shard_id]) + + # no filter object as wild cards + resp = req({}) + self.assertEqual(1, len(resp)) + self.assertTrue(expected_log_parts.items() <= resp[0].items()) + + # filter with from/to blocks + resp = req({"fromBlock": "0x0", "toBlock": "0x1"}) + self.assertEqual(1, len(resp)) + self.assertTrue(expected_log_parts.items() <= resp[0].items()) + resp = req({"fromBlock": "0x0", "toBlock": "0x0"}) + self.assertEqual(0, len(resp)) + + # filter by contract address + contract_addr = mk_contract_address( + acc1.recipient, 0, acc1.full_shard_key + ) + filter_obj = { + "address": "0x" + + contract_addr.hex() + + ( + "" + if using_eth_endpoint + else hex(acc1.full_shard_key)[2:].zfill(8) + ) + } + resp = req(filter_obj) + self.assertEqual(1, len(resp)) + + # filter by topics + filter_obj = { + "topics": [ + "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" + ] + } + filter_obj_nested = { + "topics": [ + [ + "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" + ] + ] + } + for f in (filter_obj, filter_obj_nested): + resp = req(f) + self.assertEqual(1, len(resp)) + self.assertTrue(expected_log_parts.items() <= resp[0].items()) + self.assertEqual( + "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa", + resp[0]["topics"][0], + ) + + # xshard creation and check logs: shard 0 -> shard 1 + tx = create_contract_creation_with_event_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key + 1, + ) + self.assertTrue(slaves[0].add_tx(tx)) + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) # source shard + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + root_block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) # root chain + call_async(master.add_root_block(root_block)) + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b11) + ) # target shard + self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(block))) + + req = lambda o: send_request("getLogs", [o, hex(0b11)]) + # no filter object as wild cards + resp = req({}) + self.assertEqual(1, len(resp)) + expected_log_parts["transactionIndex"] = "0x3" # after root block coinbase + expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() + expected_log_parts["blockHash"] = "0x" + block.header.get_hash().hex() + self.assertTrue(expected_log_parts.items() <= resp[0].items()) + self.assertEqual(2, len(resp[0]["topics"])) + # missing shard ID should fail + for endpoint in ("getLogs", "eth_getLogs"): + with self.assertRaises(JsonRpcError): + send_request(endpoint, [{}]) + with self.assertRaises(JsonRpcError): + send_request(endpoint, [{}, None]) + + def test_estimateGas(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + payload = {"to": "0x" + acc1.serialize().hex()} + response = send_request("estimateGas", [payload]) + self.assertEqual(response, "0x5208") # 21000 + # cross-shard + from_addr = "0x" + acc1.address_in_shard(1).serialize().hex() + payload["from"] = from_addr + response = send_request("estimateGas", [payload]) + self.assertEqual(response, "0x7530") # 30000 + + def test_getStorageAt(self): + key = bytes.fromhex( + "c987d4506fb6824639f9a9e3b8834584f5165e94680501d1b0044071cd36c3b3" + ) + id1 = Identity.create_from_key(key) + acc1 = Address.create_from_identity(id1, full_shard_key=0) + created_addr = "0x8531eb33bba796115f56ffa1b7df1ea3acdd8cdd00000000" + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + tx = create_contract_with_storage_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + + for using_eth_endpoint in (True, False): + if using_eth_endpoint: + req = lambda k: send_request( + "eth_getStorageAt", [created_addr[:-8], k, "0x0"] + ) + else: + req = lambda k: send_request("getStorageAt", [created_addr, k]) + + # first storage + response = req("0x0") + # equals 1234 + self.assertEqual( + response, + "0x00000000000000000000000000000000000000000000000000000000000004d2", + ) + + # mapping storage + k = sha3_256( + bytes.fromhex(acc1.recipient.hex().zfill(64) + "1".zfill(64)) + ) + response = req("0x" + k.hex()) + self.assertEqual( + response, + "0x000000000000000000000000000000000000000000000000000000000000162e", + ) + + # doesn't exist + response = req("0x3") + self.assertEqual( + response, + "0x0000000000000000000000000000000000000000000000000000000000000000", + ) + + def test_getCode(self): + key = bytes.fromhex( + "c987d4506fb6824639f9a9e3b8834584f5165e94680501d1b0044071cd36c3b3" + ) + id1 = Identity.create_from_key(key) + acc1 = Address.create_from_identity(id1, full_shard_key=0) + created_addr = "0x8531eb33bba796115f56ffa1b7df1ea3acdd8cdd00000000" + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + tx = create_contract_with_storage_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + + for using_eth_endpoint in (True, False): + if using_eth_endpoint: + resp = send_request("eth_getCode", [created_addr[:-8], "0x0"]) + else: + resp = send_request("getCode", [created_addr]) + + self.assertEqual( + resp, + "0x6080604052600080fd00a165627a7a72305820a6ef942c101f06333ac35072a8ff40332c71d0e11cd0e6d86de8cae7b42696550029", + ) + + def test_gasPrice(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + # run for multiple times + for _ in range(3): + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=0, + gas_price=12, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue( + call_async(clusters[0].get_shard(2 | 0).add_block(block)) + ) + + for using_eth_endpoint in (True, False): + if using_eth_endpoint: + resp = send_request("eth_gasPrice", ["0x0"]) + else: + resp = send_request( + "gasPrice", ["0x0", quantity_encoder(token_id_encode("QKC"))] + ) + + self.assertEqual(resp, "0xc") + + def test_getWork_and_submitWork(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, remote_mining=True, shard_size=1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(1 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=0, + gas_price=12, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + for shard_id in ["0x0", None]: # shard, then root + resp = send_request("getWork", [shard_id]) + self.assertEqual(resp[1:], ["0x1", "0xa"]) # height and diff + + header_hash_hex = resp[0] + if shard_id is not None: # shard 0 + miner_address = Address.create_from( + master.env.quark_chain_config.shards[1].COINBASE_ADDRESS + ) + else: # root + miner_address = Address.create_from( + master.env.quark_chain_config.ROOT.COINBASE_ADDRESS + ) + block = call_async( + master.get_next_block_to_mine( + address=miner_address, branch_value=shard_id and 0b01 + ) + ) + # solve it and submit + work = MiningWork(bytes.fromhex(header_hash_hex[2:]), 1, 10) + solver = DoubleSHA256(work) + nonce = solver.mine(0, 10000).nonce + mixhash = "0x" + sha3_256(b"").hex() + resp = send_request( + "submitWork", + [ + shard_id, + header_hash_hex, + hex(nonce), + mixhash, + "0x" + bytes(65).hex(), + ], + ) + self.assertTrue(resp) + + # show progress on shard 0 + self.assertEqual( + clusters[0].get_shard_state(1 | 0).get_tip().header.height, 1 + ) + + def test_getWork_with_optional_diff_divider(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, remote_mining=True, shard_size=1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + shard = next(iter(slaves[0].shards.values())) + qkc_config = master.env.quark_chain_config + qkc_config.ROOT.CONSENSUS_TYPE = ConsensusType.POW_SIMULATE + + # add a root block first to init shard chains + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + qkc_config.ROOT.POSW_CONFIG.ENABLED = True + qkc_config.ROOT.POSW_CONFIG.ENABLE_TIMESTAMP = 0 + qkc_config.ROOT.POSW_CONFIG.WINDOW_SIZE = 2 + + shard.state.get_root_chain_stakes = lambda _1, _2: ( + qkc_config.ROOT.POSW_CONFIG.TOTAL_STAKE_PER_BLOCK, + acc1.recipient, + ) + + resp = send_request("getWork", [None]) + # height and diff, and returns the diff divider since it's PoSW mineable + self.assertEqual(resp[1:], ["0x2", "0xa", hex(1000)]) + + def test_createTransactions(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_random_account(full_shard_key=1) + + loadtest_accounts = [ + { + "address": "b067ac9ebeeecb10bbcd1088317959d58d1e38f6b0ee10d5", + "key": "ca0143c9aa51c3013f08e83f3b6368a4f3ba5b52c4841c6e0c22c300f7ee6827", + }, + { + "address": "9f2b984937ff8e3f20d2a2592f342f47257870909fffa247", + "key": "40efdb8528de149c35fb43a572fc821d8fbdf2469dcc7fe1a9e847ef29e3c941", + }, + ] + + with ClusterContext( + 1, acc1, small_coinbase=True, loadtest_accounts=loadtest_accounts + ) as clusters, jrpc_http_server_context(clusters[0].master): + slaves = clusters[0].slave_list + master = clusters[0].master + + block = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=None) + ) + call_async(master.add_root_block(block)) + + send_request("createTransactions", {"numTxPerShard": 1, "xShardPercent": 0}) + + +# ------------------------------- Test for JSONRPCWebsocketServer ------------------------------- +@contextmanager +def jrpc_websocket_server_context(slave_server, port=38590): + env = DEFAULT_ENV.copy() + env.cluster_config = ClusterConfig() + env.cluster_config.JSON_RPC_PORT = 38391 + env.cluster_config.JSON_RPC_HOST = "127.0.0.1" + + env.slave_config = env.cluster_config.get_slave_config("S0") + env.slave_config.HOST = "0.0.0.0" + env.slave_config.WEBSOCKET_JSON_RPC_PORT = port + server = call_async(JSONRPCWebsocketServer.start_websocket_server(env, slave_server)) + try: + yield server + finally: + server.shutdown() + + +def send_websocket_request(request, num_response=1, port=38590): + responses = [] + + async def __send_request(request, port): + uri = "ws://0.0.0.0:" + str(port) + async with websockets.connect(uri) as websocket: + await websocket.send(request) + while True: + response = await websocket.recv() + responses.append(response) + if len(responses) == num_response: + return responses + + return call_async(__send_request(request, port)) + + +async def get_websocket(port=38590): + uri = "ws://0.0.0.0:" + str(port) + return await websockets.connect(uri) + + +class TestJSONRPCWebsocket(unittest.TestCase): + def test_new_heads(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context(clusters[0].slave_list[0]): + # clusters[0].slave_list[0] has two shards with full_shard_id 2 and 3 + master = clusters[0].master + + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newHeads", "0x00000002"], + "id": 3, + } + websocket = call_async(get_websocket()) + call_async(websocket.send(json.dumps(request))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["id"], 3) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + block_hash = block.header.get_hash() + block_height = block.header.height + + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual( + response["params"]["result"]["hash"], data_encoder(block_hash) + ) + self.assertEqual( + response["params"]["result"]["height"], quantity_encoder(block_height) + ) + + def test_new_heads_with_chain_reorg(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38591 + ): + websocket = call_async(get_websocket(port=38591)) + + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newHeads", "0x00000002"], + "id": 3, + } + call_async(websocket.send(json.dumps(request))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["id"], 3) + + state = clusters[0].get_shard_state(2 | 0) + tip = state.get_tip() + + # no chain reorg at this point + b0 = state.create_block_to_mine(address=acc1) + state.finalize_and_add_block(b0) + self.assertEqual(state.header_tip, b0.header) + response = call_async(websocket.recv()) + d = json.loads(response) + self.assertEqual( + d["params"]["result"]["hash"], data_encoder(b0.header.get_hash()) + ) + + # fork happens + b1 = tip.create_block_to_append(address=acc1) + state.finalize_and_add_block(b1) + b2 = b1.create_block_to_append(address=acc1) + state.finalize_and_add_block(b2) + self.assertEqual(state.header_tip, b2.header) + + # new heads b1, b2 emitted from new chain + blocks = [b1, b2] + for b in blocks: + response = call_async(websocket.recv()) + d = json.loads(response) + self.assertEqual( + d["params"]["result"]["hash"], data_encoder(b.header.get_hash()) + ) + + def test_new_pending_xshard_tx_sender(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0x0) + acc2 = Address.create_from_identity(id1, full_shard_key=0x10001) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38592 + ): + master = clusters[0].master + slaves = clusters[0].slave_list + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newPendingTransactions", "0x00000002"], + "id": 6, + } + + websocket = call_async(get_websocket(38592)) + call_async(websocket.send(json.dumps(request))) + + sub_response = json.loads(call_async(websocket.recv())) + self.assertEqual(sub_response["id"], 6) + self.assertEqual(len(sub_response["result"]), 34) + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc2, + gas=30000, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + tx_response = json.loads(call_async(websocket.recv())) + self.assertEqual( + tx_response["params"]["subscription"], sub_response["result"] + ) + self.assertTrue(tx_response["params"]["result"], tx.get_hash()) + + b1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) + + def test_new_pending_xshard_tx_target(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0x10001) + acc2 = Address.create_from_identity(id1, full_shard_key=0x0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38593 + ): + master = clusters[0].master + slaves = clusters[0].slave_list + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newPendingTransactions", "0x00000002"], + "id": 6, + } + websocket = call_async(get_websocket(38593)) + call_async(websocket.send(json.dumps(request))) + + sub_response = json.loads(call_async(websocket.recv())) + self.assertEqual(sub_response["id"], 6) + self.assertEqual(len(sub_response["result"]), 34) + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(0x10003), + key=id1.get_key(), + from_address=acc1, + to_address=acc2, + gas=30000, + value=12345, + ) + self.assertTrue(slaves[1].add_tx(tx)) + + b1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0x10003) + ) + self.assertTrue(call_async(clusters[0].get_shard(0x10003).add_block(b1))) + + tx_response = json.loads(call_async(websocket.recv())) + self.assertEqual( + tx_response["params"]["subscription"], sub_response["result"] + ) + self.assertTrue(tx_response["params"]["result"], tx.get_hash()) + + def test_new_pending_tx_same_acc_multi_subscriptions(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0x0) + acc2 = Address.create_from_identity(id1, full_shard_key=0x10001) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38594 + ): + master = clusters[0].master + slaves = clusters[0].slave_list + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + requests = [] + REQ_NUM = 5 + for i in range(REQ_NUM): + req = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newPendingTransactions", "0x00000002"], + "id": i, + } + requests.append(req) + + websocket = call_async(get_websocket(38594)) + [call_async(websocket.send(json.dumps(req))) for req in requests] + sub_responses = [json.loads(call_async(websocket.recv())) for _ in requests] + + for i, resp in enumerate(sub_responses): + self.assertEqual(resp["id"], i) + self.assertEqual(len(resp["result"]), 34) + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc2, + gas=30000, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + tx_responses = [json.loads(call_async(websocket.recv())) for _ in requests] + for i, resp in enumerate(tx_responses): + self.assertEqual( + resp["params"]["subscription"], sub_responses[i]["result"] + ) + self.assertTrue(resp["params"]["result"], tx.get_hash()) + + def test_new_pending_tx_with_reorg(self): + id1 = Identity.create_random_identity() + id2 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_from_identity(id2, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38595 + ): + websocket = call_async(get_websocket(port=38595)) + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newPendingTransactions", "0x00000002"], + "id": 3, + } + call_async(websocket.send(json.dumps(request))) + + sub_response = json.loads(call_async(websocket.recv())) + self.assertEqual(sub_response["id"], 3) + self.assertEqual(len(sub_response["result"]), 34) + + state = clusters[0].get_shard_state(2 | 0) + tip = state.get_tip() + + tx = create_transfer_transaction( + shard_state=state, + key=id1.get_key(), + from_address=acc1, + to_address=acc2, + gas=30000, + value=12345, + ) + self.assertTrue(state.add_tx(tx)) + tx_response1 = json.loads(call_async(websocket.recv())) + self.assertEqual( + tx_response1["params"]["subscription"], sub_response["result"] + ) + self.assertTrue(tx_response1["params"]["result"], tx.get_hash()) + + b0 = state.create_block_to_mine() + state.finalize_and_add_block(b0) + b1 = tip.create_block_to_append() + state.finalize_and_add_block(b1) + b2 = b1.create_block_to_append() + state.finalize_and_add_block(b2) # fork should happen, b0-b2 is picked up + + tx_response2 = json.loads(call_async(websocket.recv())) + self.assertEqual(state.header_tip, b2.header) + self.assertEqual(tx_response2, tx_response1) + + def test_logs(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + expected_log_parts = { + "logIndex": "0x0", + "transactionIndex": "0x0", + "blockNumber": "0x1", + "blockHeight": "0x1", + "data": "0x", + } + + with ClusterContext( + 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38596 + ): + master = clusters[0].master + slaves = clusters[0].slave_list + websocket = call_async(get_websocket(port=38596)) + + # filter by contract address + contract_addr = mk_contract_address(acc1.recipient, 0, acc1.full_shard_key) + filter_req = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": [ + "logs", + "0x00000002", + { + "address": "0x" + + contract_addr.hex() + + hex(acc1.full_shard_key)[2:].zfill(8) + }, + ], + "id": 4, + } + call_async(websocket.send(json.dumps(filter_req))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["id"], 4) + + # filter by topics + filter_req = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": [ + "logs", + "0x00000002", + { + "topics": [ + "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" + ] + }, + ], + "id": 5, + } + call_async(websocket.send(json.dumps(filter_req))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["id"], 5) + + tx = create_contract_creation_with_event_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), # full_shard_id = 2 + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key, + ) + expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine( + address=acc1, branch_value=0b10 + ) # branch_value = 2 + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + + count = 0 + while count < 2: + response = call_async(websocket.recv()) + count += 1 + d = json.loads(response) + self.assertTrue(expected_log_parts.items() <= d["params"]["result"].items()) + self.assertEqual( + "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa", + d["params"]["result"]["topics"][0], + ) + self.assertEqual(count, 2) + + def test_log_removed_flag_with_chain_reorg(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38597 + ): + websocket = call_async(get_websocket(port=38597)) + + # a log subscriber with no-filter request + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["logs", "0x00000002", {}], + "id": 3, + } + call_async(websocket.send(json.dumps(request))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["id"], 3) + + state = clusters[0].get_shard_state(2 | 0) + tip = state.get_tip() + b0 = state.create_block_to_mine(address=acc1) + tx = create_contract_creation_with_event_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), # full_shard_id = 2 + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key, + ) + b0.add_tx(tx) + state.finalize_and_add_block(b0) + self.assertEqual(state.header_tip, b0.header) + tx_hash = tx.get_hash() + + response = call_async(websocket.recv()) + d = json.loads(response) + self.assertEqual( + d["params"]["result"]["transactionHash"], data_encoder(tx_hash) + ) + self.assertEqual(d["params"]["result"]["removed"], False) + + # fork happens + b1 = tip.create_block_to_append(address=acc1) + b1.add_tx(tx) + state.finalize_and_add_block(b1) + b2 = b1.create_block_to_append(address=acc1) + state.finalize_and_add_block(b2) + self.assertEqual(state.header_tip, b2.header) + + # log emitted from old chain, flag is set to True + response = call_async(websocket.recv()) + d = json.loads(response) + self.assertEqual( + d["params"]["result"]["transactionHash"], data_encoder(tx_hash) + ) + self.assertEqual(d["params"]["result"]["removed"], True) + + # log emitted from new chain + response = call_async(websocket.recv()) + d = json.loads(response) + self.assertEqual( + d["params"]["result"]["transactionHash"], data_encoder(tx_hash) + ) + self.assertEqual(d["params"]["result"]["removed"], False) + + def test_invalid_subscription(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38598 + ): + # Invalid subscription type + request1 = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newBlocks", "0x00000002"], + "id": 3, + } + # Invalid full shard id + request2 = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newHeads", "0x00040002"], + "id": 3, + } + + websocket = call_async(get_websocket(port=38598)) + [ + call_async(websocket.send(json.dumps(req))) + for req in [request1, request2] + ] + responses = [json.loads(call_async(websocket.recv())) for _ in range(2)] + [self.assertTrue(resp["error"]) for resp in responses] # emit error message + + def test_multi_subs_with_some_unsubs_in_one_ws_conn(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38599 + ): + # clusters[0].slave_list[0] has two shards with full_shard_id 2 and 3 + master = clusters[0].master + websocket = call_async(get_websocket(port=38599)) + + # make 3 subscriptions on new heads + ids = [3, 4, 5] + sub_ids = [] + for id in ids: + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newHeads", "0x00000002"], + "id": id, + } + call_async(websocket.send(json.dumps(request))) + response = call_async(websocket.recv()) + response = json.loads(response) + sub_ids.append(response["result"]) + self.assertEqual(response["id"], id) + + # cancel the first subscription + request = { + "jsonrpc": "2.0", + "method": "unsubscribe", + "params": [sub_ids[0]], + "id": 3, + } + call_async(websocket.send(json.dumps(request))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["result"], True) # unsubscribed successfully + + # add a new block, should expect only 2 responses + root_block = call_async( + master.get_next_block_to_mine(acc1, branch_value=None) + ) + call_async(master.add_root_block(root_block)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + + for sub_id in sub_ids[1:]: + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["params"]["subscription"], sub_id) + + def test_unsubscribe(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38600 + ): + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newPendingTransactions", "0x00000002"], + "id": 6, + } + websocket = call_async(get_websocket(port=38600)) + call_async(websocket.send(json.dumps(request))) + sub_response = json.loads(call_async(websocket.recv())) + + # Check subscription response + self.assertEqual(sub_response["id"], 6) + self.assertEqual(len(sub_response["result"]), 34) + + unsubscribe = { + "jsonrpc": "2.0", + "method": "unsubscribe", + "params": [sub_response["result"]], + "id": 3, + } + + # Unsubscribe successfully + call_async(websocket.send(json.dumps(unsubscribe))) + response = json.loads(call_async(websocket.recv())) + self.assertTrue(response["result"]) + self.assertEqual(response["id"], 3) + + # Invalid unsubscription if sub_id does not exist + call_async(websocket.send(json.dumps(unsubscribe))) + response = json.loads(call_async(websocket.recv())) + self.assertTrue(response["error"]) diff --git a/quarkchain/jsonrpc_client.py b/quarkchain/jsonrpc_client.py index f69a5953b..111256700 100644 --- a/quarkchain/jsonrpc_client.py +++ b/quarkchain/jsonrpc_client.py @@ -1,58 +1,75 @@ -import httpx -import uuid - -class JsonRpcError(Exception): - def __init__(self, error): - self.code = error.get("code") - self.message = error.get("message") - self.data = error.get("data") - super().__init__(f"JSON-RPC Error {self.code}: {self.message}") - -class JsonRpcClient: - def __init__(self, url, timeout=10): - self.client = httpx.Client(base_url=url, timeout=timeout) - - def call(self, method, *params): - payload = { - "jsonrpc": "2.0", - "method": method, - "params": list(params), - "id": str(uuid.uuid4()), - } - - resp = self.client.post("", json=payload) - resp.raise_for_status() - data = resp.json() - - if "error" in data: - raise RuntimeError(data["error"]) - - return data.get("result") - - def close(self): - self.client.close() - - -class AsyncJsonRpcClient: - def __init__(self, url, timeout=10): - self.client = httpx.AsyncClient(base_url=url, timeout=timeout) - - async def call(self, method, *params): - payload = { - "jsonrpc": "2.0", - "method": method, - "params": list(params), - "id": str(uuid.uuid4()), - } - - resp = await self.client.post("", json=payload) - resp.raise_for_status() - data = resp.json() - - if "error" in data: - raise JsonRpcError(data["error"]) - - return data.get("result") - - async def close(self): +import httpx +import uuid + +class JsonRpcError(Exception): + def __init__(self, error): + self.code = error.get("code") + self.message = error.get("message") + self.data = error.get("data") + super().__init__(f"JSON-RPC Error {self.code}: {self.message}") + +class JsonRpcClient: + def __init__(self, url, timeout=10): + self.client = httpx.Client(base_url=url, timeout=timeout) + + def call(self, method, *params): + payload = { + "jsonrpc": "2.0", + "method": method, + "params": list(params), + "id": str(uuid.uuid4()), + } + + resp = self.client.post("", json=payload) + resp.raise_for_status() + data = resp.json() + + if "error" in data: + raise RuntimeError(data["error"]) + + return data.get("result") + + def close(self): + self.client.close() + + +class AsyncJsonRpcClient: + def __init__(self, url, timeout=10): + self.client = httpx.AsyncClient(base_url=url, timeout=timeout) + + async def call(self, method, *params): + payload = { + "jsonrpc": "2.0", + "method": method, + "params": list(params), + "id": str(uuid.uuid4()), + } + + resp = await self.client.post("", json=payload) + resp.raise_for_status() + data = resp.json() + + if "error" in data: + raise JsonRpcError(data["error"]) + + return data.get("result") + + async def call_with_dict_params(self, method, params): + payload = { + "jsonrpc": "2.0", + "method": method, + "params": params, + "id": str(uuid.uuid4()), + } + + resp = await self.client.post("", json=payload) + resp.raise_for_status() + data = resp.json() + + if "error" in data: + raise JsonRpcError(data["error"]) + + return data.get("result") + + async def close(self): await self.client.aclose() \ No newline at end of file From 4ffeaa8c5462062777ef1b364aa0b2bf943992ee Mon Sep 17 00:00:00 2001 From: ping-ke Date: Mon, 16 Mar 2026 23:32:47 +0800 Subject: [PATCH 04/11] Revert "fix jsonrpc test failures: params passing and websocket server shutdown" This reverts commit 9f603e699ce9a325d4e3399f9bb6213b17fb0457. --- quarkchain/cluster/jsonrpc.py | 3160 +++++++++---------- quarkchain/cluster/tests/test_jsonrpc.py | 3650 +++++++++++----------- quarkchain/jsonrpc_client.py | 131 +- 3 files changed, 3460 insertions(+), 3481 deletions(-) diff --git a/quarkchain/cluster/jsonrpc.py b/quarkchain/cluster/jsonrpc.py index 019d399b2..e0cc39ba4 100644 --- a/quarkchain/cluster/jsonrpc.py +++ b/quarkchain/cluster/jsonrpc.py @@ -1,1580 +1,1580 @@ -import asyncio -import inspect -import json -from typing import Callable, Dict, List, Optional - -import aiohttp_cors -import websockets -import rlp -from aiohttp import web -from decorator import decorator - -from quarkchain.cluster.master import MasterServer -from quarkchain.cluster.rpc import AccountBranchData -from quarkchain.cluster.slave import SlaveServer -from quarkchain.core import ( - Address, - Branch, - Log, - MinorBlock, - RootBlock, - SerializedEvmTransaction, - TokenBalanceMap, - TransactionReceipt, - TypedTransaction, - Constant, - MinorBlockHeader, - PoSWInfo, -) -from quarkchain.evm.transactions import Transaction as EvmTransaction -from quarkchain.evm.utils import denoms, is_numeric -from quarkchain.p2p.p2p_manager import P2PManager -from quarkchain.utils import Logger, token_id_decode, token_id_encode -from cachetools import LRUCache -import uuid -from quarkchain.cluster.log_filter import LogFilter -from quarkchain.cluster.subscription import SUB_LOGS -from quarkchain.cluster.jsonrpcserver import RpcMethods, InvalidParams - -# defaults -DEFAULT_STARTGAS = 100 * 1000 -DEFAULT_GASPRICE = 10 * denoms.gwei - -# Allow 16 MB request for submitting big blocks -# TODO: revisit this parameter -JSON_RPC_CLIENT_REQUEST_MAX_SIZE = 16 * 1024 * 1024 - - -EMPTY_TX_ID = "0x" + "0" * Constant.TX_ID_HEX_LENGTH - -def quantity_decoder(hex_str, allow_optional=False): - """Decode `hexStr` representing a quantity.""" - if allow_optional and hex_str is None: - return None - # must start with "0x" - if not hex_str.startswith("0x") or len(hex_str) < 3: - raise InvalidParams("Invalid quantity encoding") - - try: - return int(hex_str, 16) - except ValueError: - raise InvalidParams("Invalid quantity encoding") - - -def quantity_encoder(i): - """Encode integer quantity `data`.""" - assert is_numeric(i) - return hex(i) - - -def data_decoder(hex_str, allow_optional=False): - """Decode `hexStr` representing unformatted hex_str.""" - if allow_optional and hex_str is None: - return None - if not hex_str.startswith("0x"): - raise InvalidParams("Invalid hex_str encoding") - try: - return bytes.fromhex(hex_str[2:]) - except Exception: - raise InvalidParams("Invalid hex_str hex encoding") - - -def data_encoder(data_bytes): - """Encode unformatted binary `dataBytes`.""" - return "0x" + data_bytes.hex() - - -def address_decoder(hex_str): - """Decode an address from hex with 0x prefix to 24 bytes.""" - addr_bytes = data_decoder(hex_str) - if len(addr_bytes) not in (24, 0): - raise InvalidParams("Addresses must be 24 or 0 bytes long") - return addr_bytes - - -def address_encoder(addr_bytes): - assert len(addr_bytes) == 24 - return data_encoder(addr_bytes) - - -def recipient_decoder(hex_str, allow_optional=False): - """Decode an recipient from hex with 0x prefix to 20 bytes.""" - if allow_optional and hex_str is None: - return None - recipient_bytes = data_decoder(hex_str) - if len(recipient_bytes) not in (20, 0): - raise InvalidParams("Addresses must be 20 or 0 bytes long") - return recipient_bytes - - -def recipient_encoder(recipient_bytes): - assert len(recipient_bytes) == 20 - return data_encoder(recipient_bytes) - - -def full_shard_key_decoder(hex_str): - b = data_decoder(hex_str) - if len(b) != 4: - raise InvalidParams("Full shard id must be 4 bytes") - return int.from_bytes(b, byteorder="big") - - -def full_shard_key_encoder(full_shard_key): - return data_encoder(full_shard_key.to_bytes(4, byteorder="big")) - - -def id_encoder(hash_bytes, full_shard_key): - """Encode hash and full_shard_key into hex""" - return data_encoder(hash_bytes + full_shard_key.to_bytes(4, byteorder="big")) - - -def id_decoder(hex_str): - """Decode an id to (hash, full_shard_key)""" - data_bytes = data_decoder(hex_str) - if len(data_bytes) != 36: - raise InvalidParams("Invalid id encoding") - return data_bytes[:32], int.from_bytes(data_bytes[32:], byteorder="big") - - -def hash_decoder(hex_str): - """Decode a block hash.""" - decoded = data_decoder(hex_str) - if len(decoded) != 32: - raise InvalidParams("Hashes must be 32 bytes long") - return decoded - - -def signature_decoder(hex_str): - """Decode a block signature.""" - if not hex_str: - return None - decoded = data_decoder(hex_str) - if len(decoded) != 65: - raise InvalidParams("Signature must be 65 bytes long") - return decoded - - -def bool_decoder(data): - if not isinstance(data, bool): - raise InvalidParams("Parameter must be boolean") - return data - - -def _add_posw_info_to_resp(d: Dict, diff: int, posw_info: PoSWInfo): - d["effectiveDifficulty"] = quantity_encoder(posw_info.effective_difficulty) - d["poswMineableBlocks"] = quantity_encoder(posw_info.posw_mineable_blocks) - d["poswMinedBlocks"] = quantity_encoder(posw_info.posw_mined_blocks) - d["stakingApplied"] = posw_info.effective_difficulty < diff - - -def root_block_encoder(block, extra_info): - header = block.header - - d = { - "id": data_encoder(header.get_hash()), - "height": quantity_encoder(header.height), - "hash": data_encoder(header.get_hash()), - "sealHash": data_encoder(header.get_hash_for_mining()), - "hashPrevBlock": data_encoder(header.hash_prev_block), - "idPrevBlock": data_encoder(header.hash_prev_block), - "nonce": quantity_encoder(header.nonce), - "hashMerkleRoot": data_encoder(header.hash_merkle_root), - "miner": address_encoder(header.coinbase_address.serialize()), - "coinbase": balances_encoder(header.coinbase_amount_map), - "difficulty": quantity_encoder(header.difficulty), - "timestamp": quantity_encoder(header.create_time), - "size": quantity_encoder(len(block.serialize())), - "minorBlockHeaders": [], - "signature": data_encoder(header.signature), - } - if extra_info: - _add_posw_info_to_resp(d, header.difficulty, extra_info) - - for header in block.minor_block_header_list: - h = minor_block_header_encoder(header) - d["minorBlockHeaders"].append(h) - return d - - -def minor_block_encoder(block, include_transactions=False, extra_info=None): - """Encode a block as JSON object. - - :param block: a :class:`ethereum.block.Block` - :param include_transactions: if true transaction details are included, otherwise - only their hashes - :param extra_info: MinorBlockExtraInfo - :returns: a json encodable dictionary - """ - header = block.header - meta = block.meta - - header_info = minor_block_header_encoder(header) - d = { - **header_info, - "hashMerkleRoot": data_encoder(meta.hash_merkle_root), - "hashEvmStateRoot": data_encoder(meta.hash_evm_state_root), - "gasUsed": quantity_encoder(meta.evm_gas_used), - "size": quantity_encoder(len(block.serialize())), - } - if include_transactions: - d["transactions"] = [] - for i, _ in enumerate(block.tx_list): - d["transactions"].append(tx_encoder(block, i)) - else: - d["transactions"] = [ - id_encoder(tx.get_hash(), block.header.branch.get_full_shard_id()) - for tx in block.tx_list - ] - if extra_info: - _add_posw_info_to_resp(d, header.difficulty, extra_info) - return d - - -def minor_block_header_encoder(header: MinorBlockHeader) -> Dict: - d = { - "id": id_encoder(header.get_hash(), header.branch.get_full_shard_id()), - "height": quantity_encoder(header.height), - "hash": data_encoder(header.get_hash()), - "fullShardId": quantity_encoder(header.branch.get_full_shard_id()), - "chainId": quantity_encoder(header.branch.get_chain_id()), - "shardId": quantity_encoder(header.branch.get_shard_id()), - "hashPrevMinorBlock": data_encoder(header.hash_prev_minor_block), - "idPrevMinorBlock": id_encoder( - header.hash_prev_minor_block, header.branch.get_full_shard_id() - ), - "hashPrevRootBlock": data_encoder(header.hash_prev_root_block), - "nonce": quantity_encoder(header.nonce), - "miner": address_encoder(header.coinbase_address.serialize()), - "coinbase": balances_encoder(header.coinbase_amount_map), - "difficulty": quantity_encoder(header.difficulty), - "extraData": data_encoder(header.extra_data), - "gasLimit": quantity_encoder(header.evm_gas_limit), - "timestamp": quantity_encoder(header.create_time), - } - return d - - -def tx_encoder(block, i): - """Encode a transaction as JSON object. - - `transaction` is the `i`th transaction in `block`. - """ - tx = block.tx_list[i] - evm_tx = tx.tx.to_evm_tx() - branch = block.header.branch - return { - "id": id_encoder(tx.get_hash(), evm_tx.from_full_shard_key), - "hash": data_encoder(tx.get_hash()), - "nonce": quantity_encoder(evm_tx.nonce), - "timestamp": quantity_encoder(block.header.create_time), - "fullShardId": quantity_encoder(branch.get_full_shard_id()), - "chainId": quantity_encoder(branch.get_chain_id()), - "shardId": quantity_encoder(branch.get_shard_id()), - "blockId": id_encoder(block.header.get_hash(), branch.get_full_shard_id()), - "blockHeight": quantity_encoder(block.header.height), - "transactionIndex": quantity_encoder(i), - "from": data_encoder(evm_tx.sender), - "to": data_encoder(evm_tx.to), - "fromFullShardKey": full_shard_key_encoder(evm_tx.from_full_shard_key), - "toFullShardKey": full_shard_key_encoder(evm_tx.to_full_shard_key), - "value": quantity_encoder(evm_tx.value), - "gasPrice": quantity_encoder(evm_tx.gasprice), - "gas": quantity_encoder(evm_tx.startgas), - "data": data_encoder(evm_tx.data), - "networkId": quantity_encoder(evm_tx.network_id), - "transferTokenId": quantity_encoder(evm_tx.transfer_token_id), - "gasTokenId": quantity_encoder(evm_tx.gas_token_id), - "transferTokenStr": token_id_decode(evm_tx.transfer_token_id), - "gasTokenStr": token_id_decode(evm_tx.gas_token_id), - "version": quantity_encoder(evm_tx.version), - "r": quantity_encoder(evm_tx.r), - "s": quantity_encoder(evm_tx.s), - "v": quantity_encoder(evm_tx.v), - } - - -def tx_detail_encoder(tx): - """Encode a transaction detail object as JSON object. Used for indexing server.""" - return { - "txId": id_encoder(tx.tx_hash, tx.from_address.full_shard_key), - "fromAddress": address_encoder(tx.from_address.serialize()), - "toAddress": address_encoder(tx.to_address.serialize()) - if tx.to_address - else "0x", - "value": quantity_encoder(tx.value), - "transferTokenId": quantity_encoder(tx.transfer_token_id), - "transferTokenStr": token_id_decode(tx.transfer_token_id), - "gasTokenId": quantity_encoder(tx.gas_token_id), - "gasTokenStr": token_id_decode(tx.gas_token_id), - "blockHeight": quantity_encoder(tx.block_height), - "timestamp": quantity_encoder(tx.timestamp), - "success": tx.success, - "isFromRootChain": tx.is_from_root_chain, - "nonce": quantity_encoder(tx.nonce), - } - - -def loglist_encoder(loglist: List[Log], is_removed: bool = False): - """Encode a list of log""" - result = [] - for l in loglist: - result.append( - { - "logIndex": quantity_encoder(l.log_idx), - "transactionIndex": quantity_encoder(l.tx_idx), - "transactionHash": data_encoder(l.tx_hash), - "blockHash": data_encoder(l.block_hash), - "blockNumber": quantity_encoder(l.block_number), - "blockHeight": quantity_encoder(l.block_number), - "address": data_encoder(l.recipient), - "recipient": data_encoder(l.recipient), - "data": data_encoder(l.data), - "topics": [data_encoder(topic) for topic in l.topics], - "removed": is_removed, - } - ) - return result - - -def receipt_encoder(block: MinorBlock, i: int, receipt: TransactionReceipt): - tx_id, tx_hash = None, None # if empty, will be populated at call site - if i < len(block.tx_list): - tx = block.tx_list[i] - evm_tx = tx.tx.to_evm_tx() - tx_id = id_encoder(tx.get_hash(), evm_tx.from_full_shard_key) - tx_hash = data_encoder(tx.get_hash()) - resp = { - "transactionId": tx_id, - "transactionHash": tx_hash, - "transactionIndex": quantity_encoder(i), - "blockId": id_encoder( - block.header.get_hash(), block.header.branch.get_full_shard_id() - ), - "blockHash": data_encoder(block.header.get_hash()), - "blockHeight": quantity_encoder(block.header.height), - "blockNumber": quantity_encoder(block.header.height), - "cumulativeGasUsed": quantity_encoder(receipt.gas_used), - "gasUsed": quantity_encoder(receipt.gas_used - receipt.prev_gas_used), - "status": quantity_encoder(1 if receipt.success == b"\x01" else 0), - "contractAddress": ( - address_encoder(receipt.contract_address.serialize()) - if not receipt.contract_address.is_empty() - else None - ), - "logs": loglist_encoder(receipt.logs), - "timestamp": quantity_encoder(block.header.create_time), - } - - return resp - - -def balances_encoder(balances: TokenBalanceMap) -> List[Dict]: - balance_list = [] - for k, v in balances.balance_map.items(): - balance_list.append( - { - "tokenId": quantity_encoder(k), - "tokenStr": token_id_decode(k), - "balance": quantity_encoder(v), - } - ) - return balance_list - - -def decode_arg(name, decoder, allow_optional=False): - """Create a decorator that applies `decoder` to argument `name`.""" - - @decorator - def new_f(f, *args, **kwargs): - call_args = inspect.getcallargs(f, *args, **kwargs) - call_args[name] = ( - decoder(call_args[name], allow_optional=True) - if allow_optional - else decoder(call_args[name]) - ) - return f(**call_args) - - return new_f - - -def encode_res(encoder): - """Create a decorator that applies `encoder` to the return value of the - decorated function. - """ - - @decorator - async def new_f(f, *args, **kwargs): - res = await f(*args, **kwargs) - return encoder(res) - - return new_f - - -def block_height_decoder(data): - """Decode block height string, which can either be None, 'latest', 'earliest' or a hex number - of minor block height""" - if data is None or data == "latest": - return None - if data == "earliest": - return 0 - # TODO: support pending - return quantity_decoder(data) - - -def shard_id_decoder(data): - try: - return quantity_decoder(data) - except Exception: - return None - - -def eth_address_to_quarkchain_address_decoder(hex_str): - eth_hex = hex_str[2:] - if len(eth_hex) != 40: - raise InvalidParams("Addresses must be 40 or 0 bytes long") - return address_decoder("0x" + eth_hex + "00000001") - - -def _parse_log_request( - params: Dict, addr_decoder: Callable[[str], bytes] -) -> (bytes, bytes): - """Returns addresses and topics from a EVM log request.""" - addresses, topics = [], [] - if "address" in params: - if isinstance(params["address"], str): - addresses = [Address.deserialize(addr_decoder(params["address"]))] - elif isinstance(params["address"], list): - addresses = [ - Address.deserialize(addr_decoder(a)) for a in params["address"] - ] - if "topics" in params: - for topic_item in params["topics"]: - if isinstance(topic_item, str): - topics.append([data_decoder(topic_item)]) - elif isinstance(topic_item, list): - topics.append([data_decoder(tp) for tp in topic_item]) - return addresses, topics - - -public_methods = RpcMethods() -private_methods = RpcMethods() - - -# noinspection PyPep8Naming -class JSONRPCHttpServer: - @classmethod - async def start_public_server(cls, env, master_server): - server = cls( - env, - master_server, - env.cluster_config.JSON_RPC_PORT, - env.cluster_config.JSON_RPC_HOST, - public_methods, - ) - await server.start() - return server - - @classmethod - async def start_private_server(cls, env, master_server): - server = cls( - env, - master_server, - env.cluster_config.PRIVATE_JSON_RPC_PORT, - env.cluster_config.PRIVATE_JSON_RPC_HOST, - private_methods, - ) - await server.start() - return server - - @classmethod - async def start_test_server(cls, env, master_server): - methods = RpcMethods() - for method in public_methods.values(): - methods.add(method) - for method in private_methods.values(): - methods.add(method) - server = cls( - env, - master_server, - env.cluster_config.JSON_RPC_PORT, - env.cluster_config.JSON_RPC_HOST, - methods, - ) - await server.start() - return server - - def __init__( - self, env, master_server: MasterServer, port, host, methods: RpcMethods - ): - self.loop = asyncio.get_running_loop() - self.port = port - self.host = host - self.env = env - self.master = master_server - self.counters = dict() - - # Bind RPC handler functions to this instance - self.handlers = RpcMethods() - for rpc_name in methods: - func = methods[rpc_name] - self.handlers[rpc_name] = func.__get__(self, self.__class__) - - async def __handle(self, request): - request = await request.text() - Logger.info(request) - - d = dict() - try: - d = json.loads(request) - except Exception: - pass - method = d.get("method", "null") - if method in self.counters: - self.counters[method] += 1 - else: - self.counters[method] = 1 - # Use armor to prevent the handler from being cancelled when - # aiohttp server loses connection to client - response = await self.handlers.dispatch(d) - if response is None: - return web.Response() - if "error" in response: - Logger.error(response) - return web.json_response(response) - - async def start(self): - app = web.Application(client_max_size=JSON_RPC_CLIENT_REQUEST_MAX_SIZE) - cors = aiohttp_cors.setup(app) - route = app.router.add_post("/", self.__handle) - cors.add( - route, - { - "*": aiohttp_cors.ResourceOptions( - allow_credentials=True, - expose_headers=("X-Custom-Server-Header",), - allow_methods=["POST", "PUT"], - allow_headers=("X-Requested-With", "Content-Type"), - ) - }, - ) - self.runner = web.AppRunner(app, access_log=None) - await self.runner.setup() - site = web.TCPSite(self.runner, self.host, self.port) - await site.start() - - async def shutdown(self): - await self.runner.cleanup() - - # JSON RPC handlers - @public_methods.add - @decode_arg("quantity", quantity_decoder) - @encode_res(quantity_encoder) - async def echoQuantity(self, quantity): - return quantity - - @public_methods.add - @decode_arg("data", data_decoder) - @encode_res(data_encoder) - async def echoData(self, data): - return data - - @public_methods.add - async def networkInfo(self): - return { - "networkId": quantity_encoder( - self.master.env.quark_chain_config.NETWORK_ID - ), - "chainSize": quantity_encoder( - self.master.env.quark_chain_config.CHAIN_SIZE - ), - "shardSizes": [ - quantity_encoder(c.SHARD_SIZE) - for c in self.master.env.quark_chain_config.CHAINS - ], - "syncing": self.master.is_syncing(), - "mining": self.master.is_mining(), - "shardServerCount": len(self.master.slave_pool), - } - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("block_height", block_height_decoder) - @encode_res(quantity_encoder) - async def getTransactionCount(self, address, block_height=None): - account_branch_data = await self.master.get_primary_account_data( - Address.deserialize(address), block_height - ) - return account_branch_data.transaction_count - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("block_height", block_height_decoder) - async def getBalances(self, address, block_height=None): - account_branch_data = await self.master.get_primary_account_data( - Address.deserialize(address), block_height - ) - branch = account_branch_data.branch - balances = account_branch_data.token_balances - return { - "branch": quantity_encoder(branch.value), - "fullShardId": quantity_encoder(branch.get_full_shard_id()), - "shardId": quantity_encoder(branch.get_shard_id()), - "chainId": quantity_encoder(branch.get_chain_id()), - "balances": balances_encoder(balances), - } - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("block_height", block_height_decoder) - async def getAccountData(self, address, block_height=None, include_shards=False): - # do not allow specify height if client wants info on all shards - if include_shards and block_height is not None: - return None - - primary = None - address = Address.deserialize(address) - if not include_shards: - account_branch_data = await self.master.get_primary_account_data( - address, block_height - ) # type: AccountBranchData - branch = account_branch_data.branch - count = account_branch_data.transaction_count - - balances = account_branch_data.token_balances - primary = { - "fullShardId": quantity_encoder(branch.get_full_shard_id()), - "shardId": quantity_encoder(branch.get_shard_id()), - "chainId": quantity_encoder(branch.get_chain_id()), - "balances": balances_encoder(balances), - "transactionCount": quantity_encoder(count), - "isContract": account_branch_data.is_contract, - "minedBlocks": quantity_encoder(account_branch_data.mined_blocks), - "poswMineableBlocks": quantity_encoder( - account_branch_data.posw_mineable_blocks - ), - } - return {"primary": primary} - - branch_to_account_branch_data = await self.master.get_account_data(address) - - shards = [] - for branch, account_branch_data in branch_to_account_branch_data.items(): - balances = account_branch_data.token_balances - data = { - "fullShardId": quantity_encoder(branch.get_full_shard_id()), - "shardId": quantity_encoder(branch.get_shard_id()), - "chainId": quantity_encoder(branch.get_chain_id()), - "balances": balances_encoder(balances), - "transactionCount": quantity_encoder( - account_branch_data.transaction_count - ), - "isContract": account_branch_data.is_contract, - } - shards.append(data) - - if branch.get_full_shard_id() == self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - address.full_shard_key - ): - primary = data.copy() - primary["minedBlocks"] = quantity_encoder( - account_branch_data.mined_blocks - ) - primary["poswMineableBlocks"] = quantity_encoder( - account_branch_data.posw_mineable_blocks - ) - - return {"primary": primary, "shards": shards} - - @public_methods.add - async def sendTransaction(self, data): - def get_data_default(key, decoder, default=None): - if key in data: - return decoder(data[key]) - return default - - to = get_data_default("to", recipient_decoder, b"") - startgas = get_data_default("gas", quantity_decoder, DEFAULT_STARTGAS) - gasprice = get_data_default("gasPrice", quantity_decoder, DEFAULT_GASPRICE) - value = get_data_default("value", quantity_decoder, 0) - data_ = get_data_default("data", data_decoder, b"") - v = get_data_default("v", quantity_decoder, 0) - r = get_data_default("r", quantity_decoder, 0) - s = get_data_default("s", quantity_decoder, 0) - nonce = get_data_default("nonce", quantity_decoder, None) - - to_full_shard_key = get_data_default( - "toFullShardKey", full_shard_key_decoder, None - ) - from_full_shard_key = get_data_default( - "fromFullShardKey", full_shard_key_decoder, None - ) - network_id = get_data_default( - "networkId", quantity_decoder, self.master.env.quark_chain_config.NETWORK_ID - ) - - gas_token_id = get_data_default( - "gasTokenId", quantity_decoder, self.env.quark_chain_config.genesis_token - ) - transfer_token_id = get_data_default( - "transferTokenId", - quantity_decoder, - self.env.quark_chain_config.genesis_token, - ) - - if nonce is None: - raise InvalidParams("Missing nonce") - if not (v and r and s): - raise InvalidParams("Missing v, r, s") - if from_full_shard_key is None: - raise InvalidParams("Missing fromFullShardKey") - - if to_full_shard_key is None: - to_full_shard_key = from_full_shard_key - - evm_tx = EvmTransaction( - nonce, - gasprice, - startgas, - to, - value, - data_, - v=v, - r=r, - s=s, - from_full_shard_key=from_full_shard_key, - to_full_shard_key=to_full_shard_key, - network_id=network_id, - gas_token_id=gas_token_id, - transfer_token_id=transfer_token_id, - ) - tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) - success = await self.master.add_transaction(tx) - if not success: - return EMPTY_TX_ID - return id_encoder(tx.get_hash(), from_full_shard_key) - - @public_methods.add - @decode_arg("tx_data", data_decoder) - async def sendRawTransaction(self, tx_data): - evm_tx = rlp.decode(tx_data, EvmTransaction) - tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) - success = await self.master.add_transaction(tx) - if not success: - return EMPTY_TX_ID - return id_encoder(tx.get_hash(), evm_tx.from_full_shard_key) - - @public_methods.add - @decode_arg("block_id", data_decoder) - @decode_arg("need_extra_info", bool_decoder) - async def getRootBlockById(self, block_id, need_extra_info=True): - block, extra_info = await self.master.get_root_block_by_height_or_hash( - None, block_id, need_extra_info - ) - if not block: - return None - return root_block_encoder(block, extra_info) - - @public_methods.add - @decode_arg("need_extra_info", bool_decoder) - async def getRootBlockByHeight(self, height=None, need_extra_info=True): - if height is not None: - height = quantity_decoder(height) - block, extra_info = await self.master.get_root_block_by_height_or_hash( - height, None, need_extra_info - ) - if not block: - return None - return root_block_encoder(block, extra_info) - - @public_methods.add - @decode_arg("block_id", id_decoder) - @decode_arg("include_transactions", bool_decoder) - @decode_arg("need_extra_info", bool_decoder) - async def getMinorBlockById( - self, block_id, include_transactions=False, need_extra_info=True - ): - block_hash, full_shard_key = block_id - try: - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - except Exception: - return None - block, extra_info = await self.master.get_minor_block_by_hash( - block_hash, branch, need_extra_info - ) - if not block: - return None - return minor_block_encoder(block, include_transactions, extra_info) - - @public_methods.add - @decode_arg("full_shard_key", quantity_decoder) - @decode_arg("include_transactions", bool_decoder) - @decode_arg("need_extra_info", bool_decoder) - async def getMinorBlockByHeight( - self, - full_shard_key: int, - height=None, - include_transactions=False, - need_extra_info=True, - ): - if height is not None: - height = quantity_decoder(height) - try: - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - except Exception: - return None - block, extra_info = await self.master.get_minor_block_by_height( - height, branch, need_extra_info - ) - if not block: - return None - return minor_block_encoder(block, include_transactions, extra_info) - - @public_methods.add - @decode_arg("tx_id", id_decoder) - async def getTransactionById(self, tx_id): - tx_hash, full_shard_key = tx_id - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - minor_block, i = await self.master.get_transaction_by_hash(tx_hash, branch) - if not minor_block: - return None - if len(minor_block.tx_list) <= i: - return None - return tx_encoder(minor_block, i) - - @public_methods.add - @decode_arg("block_height", block_height_decoder) - async def call(self, data, block_height=None): - return await self._call_or_estimate_gas( - is_call=True, block_height=block_height, **data - ) - - @public_methods.add - async def estimateGas(self, data): - return await self._call_or_estimate_gas(is_call=False, **data) - - @public_methods.add - async def getTransactionReceipt(self, tx_id): - id_bytes = data_decoder(tx_id) - if len(id_bytes) != 36: - raise InvalidParams("Invalid id encoding") - tx_hash, full_shard_key = ( - id_bytes[:32], - int.from_bytes(id_bytes[32:], byteorder="big"), - ) - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - resp = await self.master.get_transaction_receipt(tx_hash, branch) - if not resp: - return None - minor_block, i, receipt = resp - - ret = receipt_encoder(minor_block, i, receipt) - if ret["transactionId"] is None: - ret["transactionId"] = tx_id - ret["transactionHash"] = data_encoder(tx_hash) - return ret - - @public_methods.add - @decode_arg("full_shard_key", shard_id_decoder) - async def getLogs(self, data, full_shard_key): - return await self._get_logs(data, full_shard_key, decoder=address_decoder) - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("key", quantity_decoder) - @decode_arg("block_height", block_height_decoder) - # TODO: add block number - async def getStorageAt(self, address, key, block_height=None): - res = await self.master.get_storage_at( - Address.deserialize(address), key, block_height - ) - return data_encoder(res) if res is not None else None - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("block_height", block_height_decoder) - async def getCode(self, address, block_height=None): - res = await self.master.get_code(Address.deserialize(address), block_height) - return data_encoder(res) if res is not None else None - - @public_methods.add - @decode_arg("full_shard_key", shard_id_decoder) - @decode_arg("start", data_decoder) - @decode_arg("limit", quantity_decoder) - async def getAllTransactions(self, full_shard_key, start="0x", limit="0xa"): - """ "start" should be the "next" in the response for fetching next page. - "start" can also be "0x" to fetch from the beginning (i.e., latest). - """ - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - if limit > 20: - limit = 20 - result = await self.master.get_all_transactions(branch, start, limit) - if not result: - return None - tx_list, next = result - return { - "txList": [tx_detail_encoder(tx) for tx in tx_list], - "next": data_encoder(next), - } - - @public_methods.add - @decode_arg("address", address_decoder) - @decode_arg("start", data_decoder) - @decode_arg("limit", quantity_decoder) - @decode_arg("transfer_token_id", quantity_decoder, allow_optional=True) - async def getTransactionsByAddress( - self, address, start="0x", limit="0xa", transfer_token_id=None - ): - """ "start" should be the "next" in the response for fetching next page. - "start" can also be "0x" to fetch from the beginning (i.e., latest). - "start" can be "0x00" to fetch the pending outgoing transactions. - """ - address = Address.create_from(address) - if limit > 20: - limit = 20 - result = await self.master.get_transactions_by_address( - address, transfer_token_id, start, limit - ) - if not result: - return None - tx_list, next = result - return { - "txList": [tx_detail_encoder(tx) for tx in tx_list], - "next": data_encoder(next), - } - - @public_methods.add - async def getJrpcCalls(self): - return self.counters - - @public_methods.add - async def gasPrice(self, full_shard_key: str, token_id: Optional[str] = None): - full_shard_key = shard_id_decoder(full_shard_key) - if full_shard_key is None: - return None - parsed_token_id = ( - quantity_decoder(token_id) if token_id else token_id_encode("QKC") - ) - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - ret = await self.master.gas_price(branch, parsed_token_id) - if ret is None: - return None - return quantity_encoder(ret) - - @public_methods.add - @decode_arg("full_shard_key", shard_id_decoder) - @decode_arg("header_hash", hash_decoder) - @decode_arg("nonce", quantity_decoder) - @decode_arg("mixhash", hash_decoder) - @decode_arg("signature", signature_decoder) - async def submitWork( - self, full_shard_key, header_hash, nonce, mixhash, signature=None - ): - branch = None # `None` means getting work from root chain - if full_shard_key is not None: - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - return await self.master.submit_work( - branch, header_hash, nonce, mixhash, signature - ) - - @public_methods.add - @decode_arg("full_shard_key", shard_id_decoder) - @decode_arg("coinbase_addr", recipient_decoder, allow_optional=True) - async def getWork(self, full_shard_key, coinbase_addr=None): - branch = None # `None` means getting work from root chain - if full_shard_key is not None: - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - work, optional_divider = await self.master.get_work(branch, coinbase_addr) - if work is None: - return None - ret = [ - data_encoder(work.hash), - quantity_encoder(work.height), - quantity_encoder(work.difficulty), - ] - if optional_divider is not None: - ret.append(quantity_encoder(optional_divider)) - return ret - - @public_methods.add - @decode_arg("block_id", data_decoder) - async def getRootHashConfirmingMinorBlockById(self, block_id): - retv = self.master.root_state.db.get_root_block_confirming_minor_block(block_id) - return data_encoder(retv) if retv else None - - @public_methods.add - @decode_arg("tx_id", id_decoder) - async def getTransactionConfirmedByNumberRootBlocks(self, tx_id): - tx_hash, full_shard_key = tx_id - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - minor_block, i = await self.master.get_transaction_by_hash(tx_hash, branch) - if not minor_block: - return None - confirming_hash = ( - self.master.root_state.db.get_root_block_confirming_minor_block( - minor_block.header.get_hash() - + minor_block.header.branch.get_full_shard_id().to_bytes( - 4, byteorder="big" - ) - ) - ) - if confirming_hash is None: - return quantity_encoder(0) - confirming_header = self.master.root_state.db.get_root_block_header_by_hash( - confirming_hash - ) - canonical_hash = self.master.root_state.db.get_root_block_hash_by_height( - confirming_header.height - ) - if canonical_hash != confirming_hash: - return quantity_encoder(0) - tip = self.master.root_state.tip - return quantity_encoder(tip.height - confirming_header.height + 1) - - ######################## Ethereum JSON RPC ######################## - - @public_methods.add - async def net_version(self): - return quantity_encoder(self.master.env.quark_chain_config.NETWORK_ID) - - @public_methods.add - async def eth_gasPrice(self, shard): - return await self.gasPrice(shard, quantity_encoder(token_id_encode("QKC"))) - - @public_methods.add - @decode_arg("block_height", block_height_decoder) - @decode_arg("include_transactions", bool_decoder) - async def eth_getBlockByNumber(self, block_height, include_transactions): - """ - NOTE: only support block_id "latest" or hex - """ - - def block_transcoder(block): - """ - QuarkChain Block => ETH Block - """ - return { - **block, - "number": block["height"], - "parentHash": block["hashPrevMinorBlock"], - "sha3Uncles": "", - "logsBloom": "", - "transactionsRoot": block["hashMerkleRoot"], # ? - "stateRoot": block["hashEvmStateRoot"], # ? - } - - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key(0) - ) - block, _ = await self.master.get_minor_block_by_height( - block_height, branch, need_extra_info=False - ) - if block is None: - return None - return block_transcoder(minor_block_encoder(block)) - - @public_methods.add - @decode_arg("address", eth_address_to_quarkchain_address_decoder) - @decode_arg("shard", shard_id_decoder) - @encode_res(quantity_encoder) - async def eth_getBalance(self, address, shard=None): - address = Address.deserialize(address) - if shard is not None: - address = Address(address.recipient, shard) - account_branch_data = await self.master.get_primary_account_data(address) - balance = account_branch_data.token_balances.balance_map.get( - token_id_encode("QKC"), 0 - ) - return balance - - @public_methods.add - @decode_arg("address", eth_address_to_quarkchain_address_decoder) - @decode_arg("shard", shard_id_decoder) - @encode_res(quantity_encoder) - async def eth_getTransactionCount(self, address, shard=None): - address = Address.deserialize(address) - if shard is not None: - address = Address(address.recipient, shard) - account_branch_data = await self.master.get_primary_account_data(address) - return account_branch_data.transaction_count - - @public_methods.add - @decode_arg("address", eth_address_to_quarkchain_address_decoder) - @decode_arg("shard", shard_id_decoder) - async def eth_getCode(self, address, shard=None): - addr = Address.deserialize(address) - if shard is not None: - addr = Address(addr.recipient, shard) - res = await self.master.get_code(addr, None) - return data_encoder(res) if res is not None else None - - @public_methods.add - @decode_arg("shard", shard_id_decoder) - async def eth_call(self, data, shard=None): - """Returns the result of the transaction application without putting in block chain""" - data = self._convert_eth_call_data(data, shard) - return await self.call(data) - - @public_methods.add - async def eth_sendRawTransaction(self, tx_data): - return await self.sendRawTransaction(tx_data) - - @public_methods.add - async def eth_getTransactionReceipt(self, tx_id): - return await self.getTransactionReceipt(tx_id) - - @public_methods.add - @decode_arg("shard", shard_id_decoder) - async def eth_estimateGas(self, data, shard): - data = self._convert_eth_call_data(data, shard) - return await self.estimateGas(**data) - - @public_methods.add - @decode_arg("shard", shard_id_decoder) - async def eth_getLogs(self, data, shard): - return await self._get_logs( - data, shard, decoder=eth_address_to_quarkchain_address_decoder - ) - - @public_methods.add - @decode_arg("address", eth_address_to_quarkchain_address_decoder) - @decode_arg("key", quantity_decoder) - @decode_arg("shard", shard_id_decoder) - async def eth_getStorageAt(self, address, key, shard=None): - addr = Address.deserialize(address) - if shard is not None: - addr = Address(addr.recipient, shard) - res = await self.master.get_storage_at(addr, key, None) - return data_encoder(res) if res is not None else None - - ######################## Private Methods ######################## - - @private_methods.add - @decode_arg("branch", quantity_decoder) - @decode_arg("block_data", data_decoder) - async def addBlock(self, branch, block_data): - if branch == 0: - block = RootBlock.deserialize(block_data) - return await self.master.add_root_block_from_miner(block) - return await self.master.add_raw_minor_block(Branch(branch), block_data) - - @private_methods.add - async def getPeers(self): - peer_list = [] - for peer_id, peer in self.master.network.active_peer_pool.items(): - peer_list.append( - { - "id": data_encoder(peer_id), - "ip": quantity_encoder(int(peer.ip)), - "port": quantity_encoder(peer.port), - } - ) - return {"peers": peer_list} - - @private_methods.add - async def getSyncStats(self): - return self.master.synchronizer.get_stats() - - @private_methods.add - async def getStats(self): - # This JRPC doesn't follow the standard encoding - return await self.master.get_stats() - - @private_methods.add - async def getBlockCount(self): - # This JRPC doesn't follow the standard encoding - return self.master.get_block_count() - - @private_methods.add - async def createTransactions(self, **load_test_data): - """Create transactions for load testing""" - - def get_data_default(key, decoder, default=None): - if key in load_test_data: - return decoder(load_test_data[key]) - return default - - num_tx_per_shard = load_test_data["numTxPerShard"] - x_shard_percent = load_test_data["xShardPercent"] - to = get_data_default("to", recipient_decoder, b"") - startgas = get_data_default("gas", quantity_decoder, DEFAULT_STARTGAS) - gasprice = get_data_default( - "gasPrice", quantity_decoder, int(DEFAULT_GASPRICE / 10) - ) - value = get_data_default("value", quantity_decoder, 0) - data = get_data_default("data", data_decoder, b"") - # FIXME: can't support specifying full shard ID to 0. currently is regarded as not set - from_full_shard_key = get_data_default( - "fromFullShardKey", full_shard_key_decoder, 0 - ) - gas_token_id = get_data_default( - "gas_token_id", quantity_decoder, self.env.quark_chain_config.genesis_token - ) - transfer_token_id = get_data_default( - "transfer_token_id", - quantity_decoder, - self.env.quark_chain_config.genesis_token, - ) - # build sample tx - evm_tx_sample = EvmTransaction( - 0, - gasprice, - startgas, - to, - value, - data, - from_full_shard_key=from_full_shard_key, - gas_token_id=gas_token_id, - transfer_token_id=transfer_token_id, - ) - tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx_sample)) - return await self.master.create_transactions( - num_tx_per_shard, x_shard_percent, tx - ) - - @private_methods.add - async def setTargetBlockTime(self, root_block_time=0, minor_block_time=0): - """0 will not update existing value""" - return await self.master.set_target_block_time( - root_block_time, minor_block_time - ) - - @public_methods.add - @decode_arg("block_id", id_decoder) - @decode_arg("root_block_id", data_decoder, allow_optional=True) - @decode_arg("token_id", quantity_decoder) # default: QKC - @decode_arg("start", data_decoder, allow_optional=True) - @decode_arg("limit", quantity_decoder) - async def getTotalBalance( - self, block_id, root_block_id=None, token_id="0x8bb0", start=None, limit="0x64" - ): - if limit > 10000: - limit = 10000 - block_hash, full_shard_key = block_id - full_shard_id = ( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - try: - result = await self.master.get_total_balance( - Branch(full_shard_id), block_hash, root_block_id, token_id, start, limit - ) - except: - raise ServerError - if not result: - raise InvalidRequest - total_balance, next_start = result - return { - "totalBalance": quantity_encoder(total_balance), - "next": data_encoder(next_start), - } - - @private_methods.add - async def setMining(self, mining): - """Turn on / off mining""" - return await self.master.set_mining(mining) - - @private_methods.add - async def getJrpcCalls(self): - return self.counters - - @private_methods.add - async def getKadRoutingTableSize(self): - """Returns number of nodes in the p2p discovery routing table""" - if not isinstance(self.master.network, P2PManager): - raise InvalidRequest("network is not P2P") - return len(self.master.network.server.discovery.proto.routing) - - @private_methods.add - async def getKadRoutingTable(self): - """returns a list of nodes in the p2p discovery routing table, in the enode format - eg. "enode://PUBKEY@IP:PORT" - """ - if not isinstance(self.master.network, P2PManager): - raise InvalidRequest("network is not P2P") - return [n.to_uri() for n in self.master.network.server.discovery.proto.routing] - - @public_methods.add - async def getTotalSupply(self): - total_supply = self.master.get_total_supply() - return quantity_encoder(total_supply) if total_supply else None - - @staticmethod - def _convert_eth_call_data(data, shard): - to_address = Address.create_from( - eth_address_to_quarkchain_address_decoder(data["to"]) - ) - if shard: - to_address = Address(to_address.recipient, shard) - data["to"] = "0x" + to_address.serialize().hex() - if "from" in data: - from_address = Address.create_from( - eth_address_to_quarkchain_address_decoder(data["from"]) - ) - if shard: - from_address = Address(from_address.recipient, shard) - data["from"] = "0x" + from_address.serialize().hex() - return data - - async def _get_logs(self, data, full_shard_key, decoder: Callable[[str], bytes]): - start_block = block_height_decoder(data.get("fromBlock", "latest")) - end_block = block_height_decoder(data.get("toBlock", "latest")) - addresses, topics = _parse_log_request(data, decoder) - if full_shard_key is None: - raise InvalidParams("Full shard key is required to get logs") - addresses = [Address(a.recipient, full_shard_key) for a in addresses] - branch = Branch( - self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( - full_shard_key - ) - ) - logs = await self.master.get_logs( - addresses, topics, start_block, end_block, branch - ) - if logs is None: - return None - return loglist_encoder(logs) - - async def _call_or_estimate_gas(self, is_call: bool, **data): - """Returns the result of the transaction application without putting in block chain""" - if not isinstance(data, dict): - raise InvalidParams("Transaction must be an object") - - def get_data_default(key, decoder, default=None): - if key in data: - return decoder(data[key]) - return default - - to = get_data_default("to", address_decoder, None) - if to is None: - to_full_shard_byte = b"\x00" * 4 - to = b"" - else: - to_full_shard_byte = to[20:] - to = to[:20] - - to_full_shard_key = int.from_bytes(to_full_shard_byte, "big") - gas = get_data_default("gas", quantity_decoder, 0) - gas_price = get_data_default("gasPrice", quantity_decoder, 0) - value = get_data_default("value", quantity_decoder, 0) - data_ = get_data_default("data", data_decoder, b"") - sender = get_data_default( - "from", address_decoder, b"\x00" * 20 + to_full_shard_byte - ) - sender_address = Address.create_from(sender) - from_full_shard_key = sender_address.full_shard_key - - gas_token_id = get_data_default( - "gas_token_id", quantity_decoder, self.env.quark_chain_config.genesis_token - ) - transfer_token_id = get_data_default( - "transfer_token_id", - quantity_decoder, - self.env.quark_chain_config.genesis_token, - ) - - network_id = self.master.env.quark_chain_config.NETWORK_ID - - nonce = 0 # slave will fill in the real nonce - evm_tx = EvmTransaction( - nonce, - gas_price, - gas, - to, - value, - data_, - from_full_shard_key=from_full_shard_key, - to_full_shard_key=to_full_shard_key, - network_id=network_id, - gas_token_id=gas_token_id, - transfer_token_id=transfer_token_id, - ) - - tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) - if is_call: - # xshard not supported for now - is_same_shard = self.master.env.quark_chain_config.is_same_full_shard( - to_full_shard_key, from_full_shard_key - ) - if not is_same_shard: - raise InvalidParams("Call cross-shard tx not supported yet") - res = await self.master.execute_transaction( - tx, sender_address, data["block_height"] - ) - return data_encoder(res) if res is not None else None - else: # estimate gas - res = await self.master.estimate_gas(tx, sender_address) - return quantity_encoder(res) if res is not None else None - - -class JSONRPCWebsocketServer: - @classmethod - async def start_websocket_server(cls, env, slave_server): - server = cls( - env, - slave_server, - env.slave_config.WEBSOCKET_JSON_RPC_PORT, - env.slave_config.HOST, - public_methods, - ) - await server.start() - return server - - def __init__( - self, env, slave_server: SlaveServer, port, host, methods: RpcMethods - ): - self.loop = asyncio.get_running_loop() - self.port = port - self.host = host - self.env = env - self.slave = slave_server - self.counters = dict() - self.pending_tx_cache = LRUCache(maxsize=1024) - - # Bind RPC handler functions to this instance - self.handlers = RpcMethods() - for rpc_name in methods: - func = methods[rpc_name] - self.handlers[rpc_name] = func.__get__(self, self.__class__) - - self.shard_subscription_managers = self.slave.shard_subscription_managers - - async def __handle(self, websocket): - sub_ids = dict() # per-websocket var, Dict[sub_id, full_shard_id] - try: - async for message in websocket: - Logger.info(message) - - d = dict() - try: - d = json.loads(message) - except Exception: - raise InvalidParams("Cannot parse message as JSON") - method = d.get("method", "null") - if method in self.counters: - self.counters[method] += 1 - else: - self.counters[method] = 1 - msg_id = d.get("id", 0) - - response = await self.handlers.dispatch( - d, - context={ - "websocket": websocket, - "msg_id": msg_id, - "sub_ids": sub_ids, - }, - ) - - if response is None: - continue - if "error" in response: - Logger.error(response) - else: - if method == "subscribe": - sub_id = response["result"] - full_shard_id = shard_id_decoder(d.get("params")[1]) - sub_ids[sub_id] = full_shard_id - elif method == "unsubscribe": - sub_id = d.get("params")[0] - del sub_ids[sub_id] - await websocket.send(json.dumps(response)) - finally: # current websocket connection terminates, remove subscribers in this connection - for sub_id, full_shard_id in sub_ids.items(): - try: - shard_subscription_manager = self.shard_subscription_managers[ - full_shard_id - ] - shard_subscription_manager.remove_subscriber(sub_id) - except: - pass - - async def start(self): - self._server = await websockets.serve(self.__handle, self.host, self.port) - - def shutdown(self): - if hasattr(self, '_server') and self._server is not None: - self._server.close() - - @staticmethod - def response_transcoder(sub_id, result): - return { - "jsonrpc": "2.0", - "method": "subscription", - "params": {"subscription": sub_id, "result": result}, - } - - @public_methods.add - async def subscribe(self, sub_type, full_shard_id, params=None, context=None): - assert context is not None - full_shard_id = shard_id_decoder(full_shard_id) - if full_shard_id is None: - raise InvalidParams("Invalid full shard ID") - branch = Branch(full_shard_id) - shard = self.slave.shards.get(branch, None) - if not shard: - raise InvalidParams("Full shard ID not found") - - websocket = context["websocket"] - sub_id = "0x" + uuid.uuid4().hex - shard_subscription_manager = self.shard_subscription_managers[full_shard_id] - - extra = None - if sub_type == SUB_LOGS: - addresses, topics = _parse_log_request(params, address_decoder) - addresses = [Address(a.recipient, full_shard_id) for a in addresses] - extra = lambda candidate_blocks: LogFilter.create_from_block_candidates( - shard.state.db, addresses, topics, candidate_blocks - ) - - shard_subscription_manager.add_subscriber(sub_type, sub_id, websocket, extra) - return sub_id - - @public_methods.add - async def unsubscribe(self, sub_id, context=None): - sub_ids = context["sub_ids"] - assert context is not None - if sub_id not in sub_ids: - raise InvalidParams("Subscription ID not found") - - full_shard_id = sub_ids[sub_id] - shard_subscription_manager = self.shard_subscription_managers[full_shard_id] - shard_subscription_manager.remove_subscriber(sub_id) - - return True +import asyncio +import inspect +import json +from typing import Callable, Dict, List, Optional + +import aiohttp_cors +import websockets +import rlp +from aiohttp import web +from decorator import decorator + +from quarkchain.cluster.master import MasterServer +from quarkchain.cluster.rpc import AccountBranchData +from quarkchain.cluster.slave import SlaveServer +from quarkchain.core import ( + Address, + Branch, + Log, + MinorBlock, + RootBlock, + SerializedEvmTransaction, + TokenBalanceMap, + TransactionReceipt, + TypedTransaction, + Constant, + MinorBlockHeader, + PoSWInfo, +) +from quarkchain.evm.transactions import Transaction as EvmTransaction +from quarkchain.evm.utils import denoms, is_numeric +from quarkchain.p2p.p2p_manager import P2PManager +from quarkchain.utils import Logger, token_id_decode, token_id_encode +from cachetools import LRUCache +import uuid +from quarkchain.cluster.log_filter import LogFilter +from quarkchain.cluster.subscription import SUB_LOGS +from quarkchain.cluster.jsonrpcserver import RpcMethods, InvalidParams + +# defaults +DEFAULT_STARTGAS = 100 * 1000 +DEFAULT_GASPRICE = 10 * denoms.gwei + +# Allow 16 MB request for submitting big blocks +# TODO: revisit this parameter +JSON_RPC_CLIENT_REQUEST_MAX_SIZE = 16 * 1024 * 1024 + + +EMPTY_TX_ID = "0x" + "0" * Constant.TX_ID_HEX_LENGTH + +def quantity_decoder(hex_str, allow_optional=False): + """Decode `hexStr` representing a quantity.""" + if allow_optional and hex_str is None: + return None + # must start with "0x" + if not hex_str.startswith("0x") or len(hex_str) < 3: + raise InvalidParams("Invalid quantity encoding") + + try: + return int(hex_str, 16) + except ValueError: + raise InvalidParams("Invalid quantity encoding") + + +def quantity_encoder(i): + """Encode integer quantity `data`.""" + assert is_numeric(i) + return hex(i) + + +def data_decoder(hex_str, allow_optional=False): + """Decode `hexStr` representing unformatted hex_str.""" + if allow_optional and hex_str is None: + return None + if not hex_str.startswith("0x"): + raise InvalidParams("Invalid hex_str encoding") + try: + return bytes.fromhex(hex_str[2:]) + except Exception: + raise InvalidParams("Invalid hex_str hex encoding") + + +def data_encoder(data_bytes): + """Encode unformatted binary `dataBytes`.""" + return "0x" + data_bytes.hex() + + +def address_decoder(hex_str): + """Decode an address from hex with 0x prefix to 24 bytes.""" + addr_bytes = data_decoder(hex_str) + if len(addr_bytes) not in (24, 0): + raise InvalidParams("Addresses must be 24 or 0 bytes long") + return addr_bytes + + +def address_encoder(addr_bytes): + assert len(addr_bytes) == 24 + return data_encoder(addr_bytes) + + +def recipient_decoder(hex_str, allow_optional=False): + """Decode an recipient from hex with 0x prefix to 20 bytes.""" + if allow_optional and hex_str is None: + return None + recipient_bytes = data_decoder(hex_str) + if len(recipient_bytes) not in (20, 0): + raise InvalidParams("Addresses must be 20 or 0 bytes long") + return recipient_bytes + + +def recipient_encoder(recipient_bytes): + assert len(recipient_bytes) == 20 + return data_encoder(recipient_bytes) + + +def full_shard_key_decoder(hex_str): + b = data_decoder(hex_str) + if len(b) != 4: + raise InvalidParams("Full shard id must be 4 bytes") + return int.from_bytes(b, byteorder="big") + + +def full_shard_key_encoder(full_shard_key): + return data_encoder(full_shard_key.to_bytes(4, byteorder="big")) + + +def id_encoder(hash_bytes, full_shard_key): + """Encode hash and full_shard_key into hex""" + return data_encoder(hash_bytes + full_shard_key.to_bytes(4, byteorder="big")) + + +def id_decoder(hex_str): + """Decode an id to (hash, full_shard_key)""" + data_bytes = data_decoder(hex_str) + if len(data_bytes) != 36: + raise InvalidParams("Invalid id encoding") + return data_bytes[:32], int.from_bytes(data_bytes[32:], byteorder="big") + + +def hash_decoder(hex_str): + """Decode a block hash.""" + decoded = data_decoder(hex_str) + if len(decoded) != 32: + raise InvalidParams("Hashes must be 32 bytes long") + return decoded + + +def signature_decoder(hex_str): + """Decode a block signature.""" + if not hex_str: + return None + decoded = data_decoder(hex_str) + if len(decoded) != 65: + raise InvalidParams("Signature must be 65 bytes long") + return decoded + + +def bool_decoder(data): + if not isinstance(data, bool): + raise InvalidParams("Parameter must be boolean") + return data + + +def _add_posw_info_to_resp(d: Dict, diff: int, posw_info: PoSWInfo): + d["effectiveDifficulty"] = quantity_encoder(posw_info.effective_difficulty) + d["poswMineableBlocks"] = quantity_encoder(posw_info.posw_mineable_blocks) + d["poswMinedBlocks"] = quantity_encoder(posw_info.posw_mined_blocks) + d["stakingApplied"] = posw_info.effective_difficulty < diff + + +def root_block_encoder(block, extra_info): + header = block.header + + d = { + "id": data_encoder(header.get_hash()), + "height": quantity_encoder(header.height), + "hash": data_encoder(header.get_hash()), + "sealHash": data_encoder(header.get_hash_for_mining()), + "hashPrevBlock": data_encoder(header.hash_prev_block), + "idPrevBlock": data_encoder(header.hash_prev_block), + "nonce": quantity_encoder(header.nonce), + "hashMerkleRoot": data_encoder(header.hash_merkle_root), + "miner": address_encoder(header.coinbase_address.serialize()), + "coinbase": balances_encoder(header.coinbase_amount_map), + "difficulty": quantity_encoder(header.difficulty), + "timestamp": quantity_encoder(header.create_time), + "size": quantity_encoder(len(block.serialize())), + "minorBlockHeaders": [], + "signature": data_encoder(header.signature), + } + if extra_info: + _add_posw_info_to_resp(d, header.difficulty, extra_info) + + for header in block.minor_block_header_list: + h = minor_block_header_encoder(header) + d["minorBlockHeaders"].append(h) + return d + + +def minor_block_encoder(block, include_transactions=False, extra_info=None): + """Encode a block as JSON object. + + :param block: a :class:`ethereum.block.Block` + :param include_transactions: if true transaction details are included, otherwise + only their hashes + :param extra_info: MinorBlockExtraInfo + :returns: a json encodable dictionary + """ + header = block.header + meta = block.meta + + header_info = minor_block_header_encoder(header) + d = { + **header_info, + "hashMerkleRoot": data_encoder(meta.hash_merkle_root), + "hashEvmStateRoot": data_encoder(meta.hash_evm_state_root), + "gasUsed": quantity_encoder(meta.evm_gas_used), + "size": quantity_encoder(len(block.serialize())), + } + if include_transactions: + d["transactions"] = [] + for i, _ in enumerate(block.tx_list): + d["transactions"].append(tx_encoder(block, i)) + else: + d["transactions"] = [ + id_encoder(tx.get_hash(), block.header.branch.get_full_shard_id()) + for tx in block.tx_list + ] + if extra_info: + _add_posw_info_to_resp(d, header.difficulty, extra_info) + return d + + +def minor_block_header_encoder(header: MinorBlockHeader) -> Dict: + d = { + "id": id_encoder(header.get_hash(), header.branch.get_full_shard_id()), + "height": quantity_encoder(header.height), + "hash": data_encoder(header.get_hash()), + "fullShardId": quantity_encoder(header.branch.get_full_shard_id()), + "chainId": quantity_encoder(header.branch.get_chain_id()), + "shardId": quantity_encoder(header.branch.get_shard_id()), + "hashPrevMinorBlock": data_encoder(header.hash_prev_minor_block), + "idPrevMinorBlock": id_encoder( + header.hash_prev_minor_block, header.branch.get_full_shard_id() + ), + "hashPrevRootBlock": data_encoder(header.hash_prev_root_block), + "nonce": quantity_encoder(header.nonce), + "miner": address_encoder(header.coinbase_address.serialize()), + "coinbase": balances_encoder(header.coinbase_amount_map), + "difficulty": quantity_encoder(header.difficulty), + "extraData": data_encoder(header.extra_data), + "gasLimit": quantity_encoder(header.evm_gas_limit), + "timestamp": quantity_encoder(header.create_time), + } + return d + + +def tx_encoder(block, i): + """Encode a transaction as JSON object. + + `transaction` is the `i`th transaction in `block`. + """ + tx = block.tx_list[i] + evm_tx = tx.tx.to_evm_tx() + branch = block.header.branch + return { + "id": id_encoder(tx.get_hash(), evm_tx.from_full_shard_key), + "hash": data_encoder(tx.get_hash()), + "nonce": quantity_encoder(evm_tx.nonce), + "timestamp": quantity_encoder(block.header.create_time), + "fullShardId": quantity_encoder(branch.get_full_shard_id()), + "chainId": quantity_encoder(branch.get_chain_id()), + "shardId": quantity_encoder(branch.get_shard_id()), + "blockId": id_encoder(block.header.get_hash(), branch.get_full_shard_id()), + "blockHeight": quantity_encoder(block.header.height), + "transactionIndex": quantity_encoder(i), + "from": data_encoder(evm_tx.sender), + "to": data_encoder(evm_tx.to), + "fromFullShardKey": full_shard_key_encoder(evm_tx.from_full_shard_key), + "toFullShardKey": full_shard_key_encoder(evm_tx.to_full_shard_key), + "value": quantity_encoder(evm_tx.value), + "gasPrice": quantity_encoder(evm_tx.gasprice), + "gas": quantity_encoder(evm_tx.startgas), + "data": data_encoder(evm_tx.data), + "networkId": quantity_encoder(evm_tx.network_id), + "transferTokenId": quantity_encoder(evm_tx.transfer_token_id), + "gasTokenId": quantity_encoder(evm_tx.gas_token_id), + "transferTokenStr": token_id_decode(evm_tx.transfer_token_id), + "gasTokenStr": token_id_decode(evm_tx.gas_token_id), + "version": quantity_encoder(evm_tx.version), + "r": quantity_encoder(evm_tx.r), + "s": quantity_encoder(evm_tx.s), + "v": quantity_encoder(evm_tx.v), + } + + +def tx_detail_encoder(tx): + """Encode a transaction detail object as JSON object. Used for indexing server.""" + return { + "txId": id_encoder(tx.tx_hash, tx.from_address.full_shard_key), + "fromAddress": address_encoder(tx.from_address.serialize()), + "toAddress": address_encoder(tx.to_address.serialize()) + if tx.to_address + else "0x", + "value": quantity_encoder(tx.value), + "transferTokenId": quantity_encoder(tx.transfer_token_id), + "transferTokenStr": token_id_decode(tx.transfer_token_id), + "gasTokenId": quantity_encoder(tx.gas_token_id), + "gasTokenStr": token_id_decode(tx.gas_token_id), + "blockHeight": quantity_encoder(tx.block_height), + "timestamp": quantity_encoder(tx.timestamp), + "success": tx.success, + "isFromRootChain": tx.is_from_root_chain, + "nonce": quantity_encoder(tx.nonce), + } + + +def loglist_encoder(loglist: List[Log], is_removed: bool = False): + """Encode a list of log""" + result = [] + for l in loglist: + result.append( + { + "logIndex": quantity_encoder(l.log_idx), + "transactionIndex": quantity_encoder(l.tx_idx), + "transactionHash": data_encoder(l.tx_hash), + "blockHash": data_encoder(l.block_hash), + "blockNumber": quantity_encoder(l.block_number), + "blockHeight": quantity_encoder(l.block_number), + "address": data_encoder(l.recipient), + "recipient": data_encoder(l.recipient), + "data": data_encoder(l.data), + "topics": [data_encoder(topic) for topic in l.topics], + "removed": is_removed, + } + ) + return result + + +def receipt_encoder(block: MinorBlock, i: int, receipt: TransactionReceipt): + tx_id, tx_hash = None, None # if empty, will be populated at call site + if i < len(block.tx_list): + tx = block.tx_list[i] + evm_tx = tx.tx.to_evm_tx() + tx_id = id_encoder(tx.get_hash(), evm_tx.from_full_shard_key) + tx_hash = data_encoder(tx.get_hash()) + resp = { + "transactionId": tx_id, + "transactionHash": tx_hash, + "transactionIndex": quantity_encoder(i), + "blockId": id_encoder( + block.header.get_hash(), block.header.branch.get_full_shard_id() + ), + "blockHash": data_encoder(block.header.get_hash()), + "blockHeight": quantity_encoder(block.header.height), + "blockNumber": quantity_encoder(block.header.height), + "cumulativeGasUsed": quantity_encoder(receipt.gas_used), + "gasUsed": quantity_encoder(receipt.gas_used - receipt.prev_gas_used), + "status": quantity_encoder(1 if receipt.success == b"\x01" else 0), + "contractAddress": ( + address_encoder(receipt.contract_address.serialize()) + if not receipt.contract_address.is_empty() + else None + ), + "logs": loglist_encoder(receipt.logs), + "timestamp": quantity_encoder(block.header.create_time), + } + + return resp + + +def balances_encoder(balances: TokenBalanceMap) -> List[Dict]: + balance_list = [] + for k, v in balances.balance_map.items(): + balance_list.append( + { + "tokenId": quantity_encoder(k), + "tokenStr": token_id_decode(k), + "balance": quantity_encoder(v), + } + ) + return balance_list + + +def decode_arg(name, decoder, allow_optional=False): + """Create a decorator that applies `decoder` to argument `name`.""" + + @decorator + def new_f(f, *args, **kwargs): + call_args = inspect.getcallargs(f, *args, **kwargs) + call_args[name] = ( + decoder(call_args[name], allow_optional=True) + if allow_optional + else decoder(call_args[name]) + ) + return f(**call_args) + + return new_f + + +def encode_res(encoder): + """Create a decorator that applies `encoder` to the return value of the + decorated function. + """ + + @decorator + async def new_f(f, *args, **kwargs): + res = await f(*args, **kwargs) + return encoder(res) + + return new_f + + +def block_height_decoder(data): + """Decode block height string, which can either be None, 'latest', 'earliest' or a hex number + of minor block height""" + if data is None or data == "latest": + return None + if data == "earliest": + return 0 + # TODO: support pending + return quantity_decoder(data) + + +def shard_id_decoder(data): + try: + return quantity_decoder(data) + except Exception: + return None + + +def eth_address_to_quarkchain_address_decoder(hex_str): + eth_hex = hex_str[2:] + if len(eth_hex) != 40: + raise InvalidParams("Addresses must be 40 or 0 bytes long") + return address_decoder("0x" + eth_hex + "00000001") + + +def _parse_log_request( + params: Dict, addr_decoder: Callable[[str], bytes] +) -> (bytes, bytes): + """Returns addresses and topics from a EVM log request.""" + addresses, topics = [], [] + if "address" in params: + if isinstance(params["address"], str): + addresses = [Address.deserialize(addr_decoder(params["address"]))] + elif isinstance(params["address"], list): + addresses = [ + Address.deserialize(addr_decoder(a)) for a in params["address"] + ] + if "topics" in params: + for topic_item in params["topics"]: + if isinstance(topic_item, str): + topics.append([data_decoder(topic_item)]) + elif isinstance(topic_item, list): + topics.append([data_decoder(tp) for tp in topic_item]) + return addresses, topics + + +public_methods = RpcMethods() +private_methods = RpcMethods() + + +# noinspection PyPep8Naming +class JSONRPCHttpServer: + @classmethod + async def start_public_server(cls, env, master_server): + server = cls( + env, + master_server, + env.cluster_config.JSON_RPC_PORT, + env.cluster_config.JSON_RPC_HOST, + public_methods, + ) + await server.start() + return server + + @classmethod + async def start_private_server(cls, env, master_server): + server = cls( + env, + master_server, + env.cluster_config.PRIVATE_JSON_RPC_PORT, + env.cluster_config.PRIVATE_JSON_RPC_HOST, + private_methods, + ) + await server.start() + return server + + @classmethod + async def start_test_server(cls, env, master_server): + methods = RpcMethods() + for method in public_methods.values(): + methods.add(method) + for method in private_methods.values(): + methods.add(method) + server = cls( + env, + master_server, + env.cluster_config.JSON_RPC_PORT, + env.cluster_config.JSON_RPC_HOST, + methods, + ) + await server.start() + return server + + def __init__( + self, env, master_server: MasterServer, port, host, methods: RpcMethods + ): + self.loop = asyncio.get_running_loop() + self.port = port + self.host = host + self.env = env + self.master = master_server + self.counters = dict() + + # Bind RPC handler functions to this instance + self.handlers = RpcMethods() + for rpc_name in methods: + func = methods[rpc_name] + self.handlers[rpc_name] = func.__get__(self, self.__class__) + + async def __handle(self, request): + request = await request.text() + Logger.info(request) + + d = dict() + try: + d = json.loads(request) + except Exception: + pass + method = d.get("method", "null") + if method in self.counters: + self.counters[method] += 1 + else: + self.counters[method] = 1 + # Use armor to prevent the handler from being cancelled when + # aiohttp server loses connection to client + response = await self.handlers.dispatch(d) + if response is None: + return web.Response() + if "error" in response: + Logger.error(response) + return web.json_response(response) + + async def start(self): + app = web.Application(client_max_size=JSON_RPC_CLIENT_REQUEST_MAX_SIZE) + cors = aiohttp_cors.setup(app) + route = app.router.add_post("/", self.__handle) + cors.add( + route, + { + "*": aiohttp_cors.ResourceOptions( + allow_credentials=True, + expose_headers=("X-Custom-Server-Header",), + allow_methods=["POST", "PUT"], + allow_headers=("X-Requested-With", "Content-Type"), + ) + }, + ) + self.runner = web.AppRunner(app, access_log=None) + await self.runner.setup() + site = web.TCPSite(self.runner, self.host, self.port) + await site.start() + + async def shutdown(self): + await self.runner.cleanup() + + # JSON RPC handlers + @public_methods.add + @decode_arg("quantity", quantity_decoder) + @encode_res(quantity_encoder) + async def echoQuantity(self, quantity): + return quantity + + @public_methods.add + @decode_arg("data", data_decoder) + @encode_res(data_encoder) + async def echoData(self, data): + return data + + @public_methods.add + async def networkInfo(self): + return { + "networkId": quantity_encoder( + self.master.env.quark_chain_config.NETWORK_ID + ), + "chainSize": quantity_encoder( + self.master.env.quark_chain_config.CHAIN_SIZE + ), + "shardSizes": [ + quantity_encoder(c.SHARD_SIZE) + for c in self.master.env.quark_chain_config.CHAINS + ], + "syncing": self.master.is_syncing(), + "mining": self.master.is_mining(), + "shardServerCount": len(self.master.slave_pool), + } + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("block_height", block_height_decoder) + @encode_res(quantity_encoder) + async def getTransactionCount(self, address, block_height=None): + account_branch_data = await self.master.get_primary_account_data( + Address.deserialize(address), block_height + ) + return account_branch_data.transaction_count + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("block_height", block_height_decoder) + async def getBalances(self, address, block_height=None): + account_branch_data = await self.master.get_primary_account_data( + Address.deserialize(address), block_height + ) + branch = account_branch_data.branch + balances = account_branch_data.token_balances + return { + "branch": quantity_encoder(branch.value), + "fullShardId": quantity_encoder(branch.get_full_shard_id()), + "shardId": quantity_encoder(branch.get_shard_id()), + "chainId": quantity_encoder(branch.get_chain_id()), + "balances": balances_encoder(balances), + } + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("block_height", block_height_decoder) + async def getAccountData(self, address, block_height=None, include_shards=False): + # do not allow specify height if client wants info on all shards + if include_shards and block_height is not None: + return None + + primary = None + address = Address.deserialize(address) + if not include_shards: + account_branch_data = await self.master.get_primary_account_data( + address, block_height + ) # type: AccountBranchData + branch = account_branch_data.branch + count = account_branch_data.transaction_count + + balances = account_branch_data.token_balances + primary = { + "fullShardId": quantity_encoder(branch.get_full_shard_id()), + "shardId": quantity_encoder(branch.get_shard_id()), + "chainId": quantity_encoder(branch.get_chain_id()), + "balances": balances_encoder(balances), + "transactionCount": quantity_encoder(count), + "isContract": account_branch_data.is_contract, + "minedBlocks": quantity_encoder(account_branch_data.mined_blocks), + "poswMineableBlocks": quantity_encoder( + account_branch_data.posw_mineable_blocks + ), + } + return {"primary": primary} + + branch_to_account_branch_data = await self.master.get_account_data(address) + + shards = [] + for branch, account_branch_data in branch_to_account_branch_data.items(): + balances = account_branch_data.token_balances + data = { + "fullShardId": quantity_encoder(branch.get_full_shard_id()), + "shardId": quantity_encoder(branch.get_shard_id()), + "chainId": quantity_encoder(branch.get_chain_id()), + "balances": balances_encoder(balances), + "transactionCount": quantity_encoder( + account_branch_data.transaction_count + ), + "isContract": account_branch_data.is_contract, + } + shards.append(data) + + if branch.get_full_shard_id() == self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + address.full_shard_key + ): + primary = data.copy() + primary["minedBlocks"] = quantity_encoder( + account_branch_data.mined_blocks + ) + primary["poswMineableBlocks"] = quantity_encoder( + account_branch_data.posw_mineable_blocks + ) + + return {"primary": primary, "shards": shards} + + @public_methods.add + async def sendTransaction(self, data): + def get_data_default(key, decoder, default=None): + if key in data: + return decoder(data[key]) + return default + + to = get_data_default("to", recipient_decoder, b"") + startgas = get_data_default("gas", quantity_decoder, DEFAULT_STARTGAS) + gasprice = get_data_default("gasPrice", quantity_decoder, DEFAULT_GASPRICE) + value = get_data_default("value", quantity_decoder, 0) + data_ = get_data_default("data", data_decoder, b"") + v = get_data_default("v", quantity_decoder, 0) + r = get_data_default("r", quantity_decoder, 0) + s = get_data_default("s", quantity_decoder, 0) + nonce = get_data_default("nonce", quantity_decoder, None) + + to_full_shard_key = get_data_default( + "toFullShardKey", full_shard_key_decoder, None + ) + from_full_shard_key = get_data_default( + "fromFullShardKey", full_shard_key_decoder, None + ) + network_id = get_data_default( + "networkId", quantity_decoder, self.master.env.quark_chain_config.NETWORK_ID + ) + + gas_token_id = get_data_default( + "gasTokenId", quantity_decoder, self.env.quark_chain_config.genesis_token + ) + transfer_token_id = get_data_default( + "transferTokenId", + quantity_decoder, + self.env.quark_chain_config.genesis_token, + ) + + if nonce is None: + raise InvalidParams("Missing nonce") + if not (v and r and s): + raise InvalidParams("Missing v, r, s") + if from_full_shard_key is None: + raise InvalidParams("Missing fromFullShardKey") + + if to_full_shard_key is None: + to_full_shard_key = from_full_shard_key + + evm_tx = EvmTransaction( + nonce, + gasprice, + startgas, + to, + value, + data_, + v=v, + r=r, + s=s, + from_full_shard_key=from_full_shard_key, + to_full_shard_key=to_full_shard_key, + network_id=network_id, + gas_token_id=gas_token_id, + transfer_token_id=transfer_token_id, + ) + tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) + success = await self.master.add_transaction(tx) + if not success: + return EMPTY_TX_ID + return id_encoder(tx.get_hash(), from_full_shard_key) + + @public_methods.add + @decode_arg("tx_data", data_decoder) + async def sendRawTransaction(self, tx_data): + evm_tx = rlp.decode(tx_data, EvmTransaction) + tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) + success = await self.master.add_transaction(tx) + if not success: + return EMPTY_TX_ID + return id_encoder(tx.get_hash(), evm_tx.from_full_shard_key) + + @public_methods.add + @decode_arg("block_id", data_decoder) + @decode_arg("need_extra_info", bool_decoder) + async def getRootBlockById(self, block_id, need_extra_info=True): + block, extra_info = await self.master.get_root_block_by_height_or_hash( + None, block_id, need_extra_info + ) + if not block: + return None + return root_block_encoder(block, extra_info) + + @public_methods.add + @decode_arg("need_extra_info", bool_decoder) + async def getRootBlockByHeight(self, height=None, need_extra_info=True): + if height is not None: + height = quantity_decoder(height) + block, extra_info = await self.master.get_root_block_by_height_or_hash( + height, None, need_extra_info + ) + if not block: + return None + return root_block_encoder(block, extra_info) + + @public_methods.add + @decode_arg("block_id", id_decoder) + @decode_arg("include_transactions", bool_decoder) + @decode_arg("need_extra_info", bool_decoder) + async def getMinorBlockById( + self, block_id, include_transactions=False, need_extra_info=True + ): + block_hash, full_shard_key = block_id + try: + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + except Exception: + return None + block, extra_info = await self.master.get_minor_block_by_hash( + block_hash, branch, need_extra_info + ) + if not block: + return None + return minor_block_encoder(block, include_transactions, extra_info) + + @public_methods.add + @decode_arg("full_shard_key", quantity_decoder) + @decode_arg("include_transactions", bool_decoder) + @decode_arg("need_extra_info", bool_decoder) + async def getMinorBlockByHeight( + self, + full_shard_key: int, + height=None, + include_transactions=False, + need_extra_info=True, + ): + if height is not None: + height = quantity_decoder(height) + try: + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + except Exception: + return None + block, extra_info = await self.master.get_minor_block_by_height( + height, branch, need_extra_info + ) + if not block: + return None + return minor_block_encoder(block, include_transactions, extra_info) + + @public_methods.add + @decode_arg("tx_id", id_decoder) + async def getTransactionById(self, tx_id): + tx_hash, full_shard_key = tx_id + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + minor_block, i = await self.master.get_transaction_by_hash(tx_hash, branch) + if not minor_block: + return None + if len(minor_block.tx_list) <= i: + return None + return tx_encoder(minor_block, i) + + @public_methods.add + @decode_arg("block_height", block_height_decoder) + async def call(self, data, block_height=None): + return await self._call_or_estimate_gas( + is_call=True, block_height=block_height, **data + ) + + @public_methods.add + async def estimateGas(self, data): + return await self._call_or_estimate_gas(is_call=False, **data) + + @public_methods.add + async def getTransactionReceipt(self, tx_id): + id_bytes = data_decoder(tx_id) + if len(id_bytes) != 36: + raise InvalidParams("Invalid id encoding") + tx_hash, full_shard_key = ( + id_bytes[:32], + int.from_bytes(id_bytes[32:], byteorder="big"), + ) + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + resp = await self.master.get_transaction_receipt(tx_hash, branch) + if not resp: + return None + minor_block, i, receipt = resp + + ret = receipt_encoder(minor_block, i, receipt) + if ret["transactionId"] is None: + ret["transactionId"] = tx_id + ret["transactionHash"] = data_encoder(tx_hash) + return ret + + @public_methods.add + @decode_arg("full_shard_key", shard_id_decoder) + async def getLogs(self, data, full_shard_key): + return await self._get_logs(data, full_shard_key, decoder=address_decoder) + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("key", quantity_decoder) + @decode_arg("block_height", block_height_decoder) + # TODO: add block number + async def getStorageAt(self, address, key, block_height=None): + res = await self.master.get_storage_at( + Address.deserialize(address), key, block_height + ) + return data_encoder(res) if res is not None else None + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("block_height", block_height_decoder) + async def getCode(self, address, block_height=None): + res = await self.master.get_code(Address.deserialize(address), block_height) + return data_encoder(res) if res is not None else None + + @public_methods.add + @decode_arg("full_shard_key", shard_id_decoder) + @decode_arg("start", data_decoder) + @decode_arg("limit", quantity_decoder) + async def getAllTransactions(self, full_shard_key, start="0x", limit="0xa"): + """ "start" should be the "next" in the response for fetching next page. + "start" can also be "0x" to fetch from the beginning (i.e., latest). + """ + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + if limit > 20: + limit = 20 + result = await self.master.get_all_transactions(branch, start, limit) + if not result: + return None + tx_list, next = result + return { + "txList": [tx_detail_encoder(tx) for tx in tx_list], + "next": data_encoder(next), + } + + @public_methods.add + @decode_arg("address", address_decoder) + @decode_arg("start", data_decoder) + @decode_arg("limit", quantity_decoder) + @decode_arg("transfer_token_id", quantity_decoder, allow_optional=True) + async def getTransactionsByAddress( + self, address, start="0x", limit="0xa", transfer_token_id=None + ): + """ "start" should be the "next" in the response for fetching next page. + "start" can also be "0x" to fetch from the beginning (i.e., latest). + "start" can be "0x00" to fetch the pending outgoing transactions. + """ + address = Address.create_from(address) + if limit > 20: + limit = 20 + result = await self.master.get_transactions_by_address( + address, transfer_token_id, start, limit + ) + if not result: + return None + tx_list, next = result + return { + "txList": [tx_detail_encoder(tx) for tx in tx_list], + "next": data_encoder(next), + } + + @public_methods.add + async def getJrpcCalls(self): + return self.counters + + @public_methods.add + async def gasPrice(self, full_shard_key: str, token_id: Optional[str] = None): + full_shard_key = shard_id_decoder(full_shard_key) + if full_shard_key is None: + return None + parsed_token_id = ( + quantity_decoder(token_id) if token_id else token_id_encode("QKC") + ) + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + ret = await self.master.gas_price(branch, parsed_token_id) + if ret is None: + return None + return quantity_encoder(ret) + + @public_methods.add + @decode_arg("full_shard_key", shard_id_decoder) + @decode_arg("header_hash", hash_decoder) + @decode_arg("nonce", quantity_decoder) + @decode_arg("mixhash", hash_decoder) + @decode_arg("signature", signature_decoder) + async def submitWork( + self, full_shard_key, header_hash, nonce, mixhash, signature=None + ): + branch = None # `None` means getting work from root chain + if full_shard_key is not None: + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + return await self.master.submit_work( + branch, header_hash, nonce, mixhash, signature + ) + + @public_methods.add + @decode_arg("full_shard_key", shard_id_decoder) + @decode_arg("coinbase_addr", recipient_decoder, allow_optional=True) + async def getWork(self, full_shard_key, coinbase_addr=None): + branch = None # `None` means getting work from root chain + if full_shard_key is not None: + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + work, optional_divider = await self.master.get_work(branch, coinbase_addr) + if work is None: + return None + ret = [ + data_encoder(work.hash), + quantity_encoder(work.height), + quantity_encoder(work.difficulty), + ] + if optional_divider is not None: + ret.append(quantity_encoder(optional_divider)) + return ret + + @public_methods.add + @decode_arg("block_id", data_decoder) + async def getRootHashConfirmingMinorBlockById(self, block_id): + retv = self.master.root_state.db.get_root_block_confirming_minor_block(block_id) + return data_encoder(retv) if retv else None + + @public_methods.add + @decode_arg("tx_id", id_decoder) + async def getTransactionConfirmedByNumberRootBlocks(self, tx_id): + tx_hash, full_shard_key = tx_id + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + minor_block, i = await self.master.get_transaction_by_hash(tx_hash, branch) + if not minor_block: + return None + confirming_hash = ( + self.master.root_state.db.get_root_block_confirming_minor_block( + minor_block.header.get_hash() + + minor_block.header.branch.get_full_shard_id().to_bytes( + 4, byteorder="big" + ) + ) + ) + if confirming_hash is None: + return quantity_encoder(0) + confirming_header = self.master.root_state.db.get_root_block_header_by_hash( + confirming_hash + ) + canonical_hash = self.master.root_state.db.get_root_block_hash_by_height( + confirming_header.height + ) + if canonical_hash != confirming_hash: + return quantity_encoder(0) + tip = self.master.root_state.tip + return quantity_encoder(tip.height - confirming_header.height + 1) + + ######################## Ethereum JSON RPC ######################## + + @public_methods.add + async def net_version(self): + return quantity_encoder(self.master.env.quark_chain_config.NETWORK_ID) + + @public_methods.add + async def eth_gasPrice(self, shard): + return await self.gasPrice(shard, quantity_encoder(token_id_encode("QKC"))) + + @public_methods.add + @decode_arg("block_height", block_height_decoder) + @decode_arg("include_transactions", bool_decoder) + async def eth_getBlockByNumber(self, block_height, include_transactions): + """ + NOTE: only support block_id "latest" or hex + """ + + def block_transcoder(block): + """ + QuarkChain Block => ETH Block + """ + return { + **block, + "number": block["height"], + "parentHash": block["hashPrevMinorBlock"], + "sha3Uncles": "", + "logsBloom": "", + "transactionsRoot": block["hashMerkleRoot"], # ? + "stateRoot": block["hashEvmStateRoot"], # ? + } + + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key(0) + ) + block, _ = await self.master.get_minor_block_by_height( + block_height, branch, need_extra_info=False + ) + if block is None: + return None + return block_transcoder(minor_block_encoder(block)) + + @public_methods.add + @decode_arg("address", eth_address_to_quarkchain_address_decoder) + @decode_arg("shard", shard_id_decoder) + @encode_res(quantity_encoder) + async def eth_getBalance(self, address, shard=None): + address = Address.deserialize(address) + if shard is not None: + address = Address(address.recipient, shard) + account_branch_data = await self.master.get_primary_account_data(address) + balance = account_branch_data.token_balances.balance_map.get( + token_id_encode("QKC"), 0 + ) + return balance + + @public_methods.add + @decode_arg("address", eth_address_to_quarkchain_address_decoder) + @decode_arg("shard", shard_id_decoder) + @encode_res(quantity_encoder) + async def eth_getTransactionCount(self, address, shard=None): + address = Address.deserialize(address) + if shard is not None: + address = Address(address.recipient, shard) + account_branch_data = await self.master.get_primary_account_data(address) + return account_branch_data.transaction_count + + @public_methods.add + @decode_arg("address", eth_address_to_quarkchain_address_decoder) + @decode_arg("shard", shard_id_decoder) + async def eth_getCode(self, address, shard=None): + addr = Address.deserialize(address) + if shard is not None: + addr = Address(addr.recipient, shard) + res = await self.master.get_code(addr, None) + return data_encoder(res) if res is not None else None + + @public_methods.add + @decode_arg("shard", shard_id_decoder) + async def eth_call(self, data, shard=None): + """Returns the result of the transaction application without putting in block chain""" + data = self._convert_eth_call_data(data, shard) + return await self.call(data) + + @public_methods.add + async def eth_sendRawTransaction(self, tx_data): + return await self.sendRawTransaction(tx_data) + + @public_methods.add + async def eth_getTransactionReceipt(self, tx_id): + return await self.getTransactionReceipt(tx_id) + + @public_methods.add + @decode_arg("shard", shard_id_decoder) + async def eth_estimateGas(self, data, shard): + data = self._convert_eth_call_data(data, shard) + return await self.estimateGas(**data) + + @public_methods.add + @decode_arg("shard", shard_id_decoder) + async def eth_getLogs(self, data, shard): + return await self._get_logs( + data, shard, decoder=eth_address_to_quarkchain_address_decoder + ) + + @public_methods.add + @decode_arg("address", eth_address_to_quarkchain_address_decoder) + @decode_arg("key", quantity_decoder) + @decode_arg("shard", shard_id_decoder) + async def eth_getStorageAt(self, address, key, shard=None): + addr = Address.deserialize(address) + if shard is not None: + addr = Address(addr.recipient, shard) + res = await self.master.get_storage_at(addr, key, None) + return data_encoder(res) if res is not None else None + + ######################## Private Methods ######################## + + @private_methods.add + @decode_arg("branch", quantity_decoder) + @decode_arg("block_data", data_decoder) + async def addBlock(self, branch, block_data): + if branch == 0: + block = RootBlock.deserialize(block_data) + return await self.master.add_root_block_from_miner(block) + return await self.master.add_raw_minor_block(Branch(branch), block_data) + + @private_methods.add + async def getPeers(self): + peer_list = [] + for peer_id, peer in self.master.network.active_peer_pool.items(): + peer_list.append( + { + "id": data_encoder(peer_id), + "ip": quantity_encoder(int(peer.ip)), + "port": quantity_encoder(peer.port), + } + ) + return {"peers": peer_list} + + @private_methods.add + async def getSyncStats(self): + return self.master.synchronizer.get_stats() + + @private_methods.add + async def getStats(self): + # This JRPC doesn't follow the standard encoding + return await self.master.get_stats() + + @private_methods.add + async def getBlockCount(self): + # This JRPC doesn't follow the standard encoding + return self.master.get_block_count() + + @private_methods.add + async def createTransactions(self, **load_test_data): + """Create transactions for load testing""" + + def get_data_default(key, decoder, default=None): + if key in load_test_data: + return decoder(load_test_data[key]) + return default + + num_tx_per_shard = load_test_data["numTxPerShard"] + x_shard_percent = load_test_data["xShardPercent"] + to = get_data_default("to", recipient_decoder, b"") + startgas = get_data_default("gas", quantity_decoder, DEFAULT_STARTGAS) + gasprice = get_data_default( + "gasPrice", quantity_decoder, int(DEFAULT_GASPRICE / 10) + ) + value = get_data_default("value", quantity_decoder, 0) + data = get_data_default("data", data_decoder, b"") + # FIXME: can't support specifying full shard ID to 0. currently is regarded as not set + from_full_shard_key = get_data_default( + "fromFullShardKey", full_shard_key_decoder, 0 + ) + gas_token_id = get_data_default( + "gas_token_id", quantity_decoder, self.env.quark_chain_config.genesis_token + ) + transfer_token_id = get_data_default( + "transfer_token_id", + quantity_decoder, + self.env.quark_chain_config.genesis_token, + ) + # build sample tx + evm_tx_sample = EvmTransaction( + 0, + gasprice, + startgas, + to, + value, + data, + from_full_shard_key=from_full_shard_key, + gas_token_id=gas_token_id, + transfer_token_id=transfer_token_id, + ) + tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx_sample)) + return await self.master.create_transactions( + num_tx_per_shard, x_shard_percent, tx + ) + + @private_methods.add + async def setTargetBlockTime(self, root_block_time=0, minor_block_time=0): + """0 will not update existing value""" + return await self.master.set_target_block_time( + root_block_time, minor_block_time + ) + + @public_methods.add + @decode_arg("block_id", id_decoder) + @decode_arg("root_block_id", data_decoder, allow_optional=True) + @decode_arg("token_id", quantity_decoder) # default: QKC + @decode_arg("start", data_decoder, allow_optional=True) + @decode_arg("limit", quantity_decoder) + async def getTotalBalance( + self, block_id, root_block_id=None, token_id="0x8bb0", start=None, limit="0x64" + ): + if limit > 10000: + limit = 10000 + block_hash, full_shard_key = block_id + full_shard_id = ( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + try: + result = await self.master.get_total_balance( + Branch(full_shard_id), block_hash, root_block_id, token_id, start, limit + ) + except: + raise ServerError + if not result: + raise InvalidRequest + total_balance, next_start = result + return { + "totalBalance": quantity_encoder(total_balance), + "next": data_encoder(next_start), + } + + @private_methods.add + async def setMining(self, mining): + """Turn on / off mining""" + return await self.master.set_mining(mining) + + @private_methods.add + async def getJrpcCalls(self): + return self.counters + + @private_methods.add + async def getKadRoutingTableSize(self): + """Returns number of nodes in the p2p discovery routing table""" + if not isinstance(self.master.network, P2PManager): + raise InvalidRequest("network is not P2P") + return len(self.master.network.server.discovery.proto.routing) + + @private_methods.add + async def getKadRoutingTable(self): + """returns a list of nodes in the p2p discovery routing table, in the enode format + eg. "enode://PUBKEY@IP:PORT" + """ + if not isinstance(self.master.network, P2PManager): + raise InvalidRequest("network is not P2P") + return [n.to_uri() for n in self.master.network.server.discovery.proto.routing] + + @public_methods.add + async def getTotalSupply(self): + total_supply = self.master.get_total_supply() + return quantity_encoder(total_supply) if total_supply else None + + @staticmethod + def _convert_eth_call_data(data, shard): + to_address = Address.create_from( + eth_address_to_quarkchain_address_decoder(data["to"]) + ) + if shard: + to_address = Address(to_address.recipient, shard) + data["to"] = "0x" + to_address.serialize().hex() + if "from" in data: + from_address = Address.create_from( + eth_address_to_quarkchain_address_decoder(data["from"]) + ) + if shard: + from_address = Address(from_address.recipient, shard) + data["from"] = "0x" + from_address.serialize().hex() + return data + + async def _get_logs(self, data, full_shard_key, decoder: Callable[[str], bytes]): + start_block = block_height_decoder(data.get("fromBlock", "latest")) + end_block = block_height_decoder(data.get("toBlock", "latest")) + addresses, topics = _parse_log_request(data, decoder) + if full_shard_key is None: + raise InvalidParams("Full shard key is required to get logs") + addresses = [Address(a.recipient, full_shard_key) for a in addresses] + branch = Branch( + self.master.env.quark_chain_config.get_full_shard_id_by_full_shard_key( + full_shard_key + ) + ) + logs = await self.master.get_logs( + addresses, topics, start_block, end_block, branch + ) + if logs is None: + return None + return loglist_encoder(logs) + + async def _call_or_estimate_gas(self, is_call: bool, **data): + """Returns the result of the transaction application without putting in block chain""" + if not isinstance(data, dict): + raise InvalidParams("Transaction must be an object") + + def get_data_default(key, decoder, default=None): + if key in data: + return decoder(data[key]) + return default + + to = get_data_default("to", address_decoder, None) + if to is None: + to_full_shard_byte = b"\x00" * 4 + to = b"" + else: + to_full_shard_byte = to[20:] + to = to[:20] + + to_full_shard_key = int.from_bytes(to_full_shard_byte, "big") + gas = get_data_default("gas", quantity_decoder, 0) + gas_price = get_data_default("gasPrice", quantity_decoder, 0) + value = get_data_default("value", quantity_decoder, 0) + data_ = get_data_default("data", data_decoder, b"") + sender = get_data_default( + "from", address_decoder, b"\x00" * 20 + to_full_shard_byte + ) + sender_address = Address.create_from(sender) + from_full_shard_key = sender_address.full_shard_key + + gas_token_id = get_data_default( + "gas_token_id", quantity_decoder, self.env.quark_chain_config.genesis_token + ) + transfer_token_id = get_data_default( + "transfer_token_id", + quantity_decoder, + self.env.quark_chain_config.genesis_token, + ) + + network_id = self.master.env.quark_chain_config.NETWORK_ID + + nonce = 0 # slave will fill in the real nonce + evm_tx = EvmTransaction( + nonce, + gas_price, + gas, + to, + value, + data_, + from_full_shard_key=from_full_shard_key, + to_full_shard_key=to_full_shard_key, + network_id=network_id, + gas_token_id=gas_token_id, + transfer_token_id=transfer_token_id, + ) + + tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) + if is_call: + # xshard not supported for now + is_same_shard = self.master.env.quark_chain_config.is_same_full_shard( + to_full_shard_key, from_full_shard_key + ) + if not is_same_shard: + raise InvalidParams("Call cross-shard tx not supported yet") + res = await self.master.execute_transaction( + tx, sender_address, data["block_height"] + ) + return data_encoder(res) if res is not None else None + else: # estimate gas + res = await self.master.estimate_gas(tx, sender_address) + return quantity_encoder(res) if res is not None else None + + +class JSONRPCWebsocketServer: + @classmethod + async def start_websocket_server(cls, env, slave_server): + server = cls( + env, + slave_server, + env.slave_config.WEBSOCKET_JSON_RPC_PORT, + env.slave_config.HOST, + public_methods, + ) + await server.start() + return server + + def __init__( + self, env, slave_server: SlaveServer, port, host, methods: RpcMethods + ): + self.loop = asyncio.get_running_loop() + self.port = port + self.host = host + self.env = env + self.slave = slave_server + self.counters = dict() + self.pending_tx_cache = LRUCache(maxsize=1024) + + # Bind RPC handler functions to this instance + self.handlers = RpcMethods() + for rpc_name in methods: + func = methods[rpc_name] + self.handlers[rpc_name] = func.__get__(self, self.__class__) + + self.shard_subscription_managers = self.slave.shard_subscription_managers + + async def __handle(self, websocket): + sub_ids = dict() # per-websocket var, Dict[sub_id, full_shard_id] + try: + async for message in websocket: + Logger.info(message) + + d = dict() + try: + d = json.loads(message) + except Exception: + raise InvalidParams("Cannot parse message as JSON") + method = d.get("method", "null") + if method in self.counters: + self.counters[method] += 1 + else: + self.counters[method] = 1 + msg_id = d.get("id", 0) + + response = await self.handlers.dispatch( + d, + context={ + "websocket": websocket, + "msg_id": msg_id, + "sub_ids": sub_ids, + }, + ) + + if response is None: + continue + if "error" in response: + Logger.error(response) + else: + if method == "subscribe": + sub_id = response["result"] + full_shard_id = shard_id_decoder(d.get("params")[1]) + sub_ids[sub_id] = full_shard_id + elif method == "unsubscribe": + sub_id = d.get("params")[0] + del sub_ids[sub_id] + await websocket.send(json.dumps(response)) + finally: # current websocket connection terminates, remove subscribers in this connection + for sub_id, full_shard_id in sub_ids.items(): + try: + shard_subscription_manager = self.shard_subscription_managers[ + full_shard_id + ] + shard_subscription_manager.remove_subscriber(sub_id) + except: + pass + + async def start(self): + start_server = websockets.serve(self.__handle, self.host, self.port) + await start_server + + def shutdown(self): + pass # TODO + + @staticmethod + def response_transcoder(sub_id, result): + return { + "jsonrpc": "2.0", + "method": "subscription", + "params": {"subscription": sub_id, "result": result}, + } + + @public_methods.add + async def subscribe(self, sub_type, full_shard_id, params=None, context=None): + assert context is not None + full_shard_id = shard_id_decoder(full_shard_id) + if full_shard_id is None: + raise InvalidParams("Invalid full shard ID") + branch = Branch(full_shard_id) + shard = self.slave.shards.get(branch, None) + if not shard: + raise InvalidParams("Full shard ID not found") + + websocket = context["websocket"] + sub_id = "0x" + uuid.uuid4().hex + shard_subscription_manager = self.shard_subscription_managers[full_shard_id] + + extra = None + if sub_type == SUB_LOGS: + addresses, topics = _parse_log_request(params, address_decoder) + addresses = [Address(a.recipient, full_shard_id) for a in addresses] + extra = lambda candidate_blocks: LogFilter.create_from_block_candidates( + shard.state.db, addresses, topics, candidate_blocks + ) + + shard_subscription_manager.add_subscriber(sub_type, sub_id, websocket, extra) + return sub_id + + @public_methods.add + async def unsubscribe(self, sub_id, context=None): + sub_ids = context["sub_ids"] + assert context is not None + if sub_id not in sub_ids: + raise InvalidParams("Subscription ID not found") + + full_shard_id = sub_ids[sub_id] + shard_subscription_manager = self.shard_subscription_managers[full_shard_id] + shard_subscription_manager.remove_subscriber(sub_id) + + return True diff --git a/quarkchain/cluster/tests/test_jsonrpc.py b/quarkchain/cluster/tests/test_jsonrpc.py index 3d4e6844b..663cc7cee 100644 --- a/quarkchain/cluster/tests/test_jsonrpc.py +++ b/quarkchain/cluster/tests/test_jsonrpc.py @@ -1,1827 +1,1823 @@ -import json -import unittest -from contextlib import contextmanager -import websockets - -from quarkchain.cluster.cluster_config import ClusterConfig -from quarkchain.cluster.jsonrpc import ( - EMPTY_TX_ID, - JSONRPCHttpServer, - JSONRPCWebsocketServer, - quantity_encoder, - data_encoder, -) -from quarkchain.cluster.miner import DoubleSHA256, MiningWork -from quarkchain.cluster.tests.test_utils import ( - create_transfer_transaction, - ClusterContext, - create_contract_creation_transaction, - create_contract_creation_with_event_transaction, - create_contract_with_storage_transaction, -) -from quarkchain.config import ConsensusType -from quarkchain.core import ( - Address, - Identity, - SerializedEvmTransaction, - TypedTransaction, -) -from quarkchain.env import DEFAULT_ENV -from quarkchain.evm.messages import mk_contract_address -from quarkchain.evm.transactions import Transaction as EvmTransaction -from quarkchain.utils import call_async, sha3_256, token_id_encode -from quarkchain.jsonrpc_client import AsyncJsonRpcClient, JsonRpcError - - -@contextmanager -def jrpc_http_server_context(master): - env = DEFAULT_ENV.copy() - env.cluster_config = ClusterConfig() - env.cluster_config.JSON_RPC_PORT = 38391 - # to pass the circleCi - env.cluster_config.JSON_RPC_HOST = "127.0.0.1" - server = call_async(JSONRPCHttpServer.start_test_server(env, master)) - try: - yield server - finally: - call_async(server.shutdown()) - - -rpc_client = AsyncJsonRpcClient("http://localhost:38391") - -def send_request(method, params=None): - if params is None: - params = [] - if isinstance(params, dict): - return call_async(rpc_client.call_with_dict_params(method, params)) - return call_async(rpc_client.call(method, *params)) - - -class TestJSONRPCHttp(unittest.TestCase): - def test_getTransactionCount(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_random_account(full_shard_key=1) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - stats = call_async(master.get_stats()) - self.assertTrue("posw" in json.dumps(stats)) - - self.assertEqual( - call_async(master.get_primary_account_data(acc1)).transaction_count, 0 - ) - for i in range(3): - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertEqual(i + 1, block.header.height) - self.assertTrue( - call_async(clusters[0].get_shard(2 | 0).add_block(block)) - ) - - response = send_request( - "getTransactionCount", ["0x" + acc2.serialize().hex()] - ) - self.assertEqual(response, "0x0") - - response = send_request( - "getTransactionCount", ["0x" + acc1.serialize().hex()] - ) - self.assertEqual(response, "0x3") - response = send_request( - "getTransactionCount", ["0x" + acc1.serialize().hex(), "latest"] - ) - self.assertEqual(response, "0x3") - - for i in range(3): - response = send_request( - "getTransactionCount", ["0x" + acc1.serialize().hex(), hex(i + 1)] - ) - self.assertEqual(response, hex(i + 1)) - - def test_getBalance(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - response = send_request("getBalances", ["0x" + acc1.serialize().hex()]) - self.assertListEqual( - response["balances"], - [{"tokenId": "0x8bb0", "tokenStr": "QKC", "balance": "0xf4240"}], - ) - - response = send_request("eth_getBalance", ["0x" + acc1.recipient.hex()]) - self.assertEqual(response, "0xf4240") - - def test_sendTransaction(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_random_account(full_shard_key=1) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - slaves = clusters[0].slave_list - master = clusters[0].master - - block = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=None) - ) - call_async(master.add_root_block(block)) - - evm_tx = EvmTransaction( - nonce=0, - gasprice=6, - startgas=30000, - to=acc2.recipient, - value=15, - data=b"", - from_full_shard_key=acc1.full_shard_key, - to_full_shard_key=acc2.full_shard_key, - network_id=slaves[0].env.quark_chain_config.NETWORK_ID, - gas_token_id=master.env.quark_chain_config.genesis_token, - transfer_token_id=master.env.quark_chain_config.genesis_token, - ) - evm_tx.sign(id1.get_key()) - request = dict( - to="0x" + acc2.recipient.hex(), - gasPrice="0x6", - gas=hex(30000), - value="0xf", # 15 - v=quantity_encoder(evm_tx.v), - r=quantity_encoder(evm_tx.r), - s=quantity_encoder(evm_tx.s), - nonce="0x0", - fromFullShardKey="0x00000000", - toFullShardKey="0x00000001", - network_id=hex(slaves[0].env.quark_chain_config.NETWORK_ID), - ) - tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) - response = send_request("sendTransaction", [request]) - - self.assertEqual(response, "0x" + tx.get_hash().hex() + "00000000") - state = clusters[0].get_shard_state(2 | 0) - self.assertEqual(len(state.tx_queue), 1) - self.assertEqual( - state.tx_queue.pop_transaction( - state.get_transaction_count - ).tx.to_evm_tx(), - evm_tx, - ) - - def test_sendTransaction_with_bad_signature(self): - """ sendTransaction validates signature """ - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_random_account(full_shard_key=1) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - - block = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=None) - ) - call_async(master.add_root_block(block)) - - request = dict( - to="0x" + acc2.recipient.hex(), - gasPrice="0x6", - gas=hex(30000), - value="0xf", - v="0x1", - r="0x2", - s="0x3", - nonce="0x0", - fromFullShardKey="0x00000000", - toFullShardKey="0x00000001", - ) - self.assertEqual(send_request("sendTransaction", [request]), EMPTY_TX_ID) - self.assertEqual(len(clusters[0].get_shard_state(2 | 0).tx_queue), 0) - - def test_sendTransaction_missing_from_full_shard_key(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - request = dict( - to="0x" + acc1.recipient.hex(), - gasPrice="0x6", - gas=hex(30000), - value="0xf", - v="0x1", - r="0x2", - s="0x3", - nonce="0x0", - ) - - with self.assertRaises(Exception): - send_request("sendTransaction", [request]) - - def test_getMinorBlock(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - self.assertEqual( - call_async(master.get_primary_account_data(acc1)).transaction_count, 0 - ) - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - # By id - for need_extra_info in [True, False]: - resp = send_request( - "getMinorBlockById", - [ - "0x" + block1.header.get_hash().hex() + "0" * 8, - False, - need_extra_info, - ], - ) - self.assertEqual( - resp["transactions"][0], "0x" + tx.get_hash().hex() + "00000002" - ) - - resp = send_request( - "getMinorBlockById", - ["0x" + block1.header.get_hash().hex() + "0" * 8, True], - ) - self.assertEqual( - resp["transactions"][0]["hash"], "0x" + tx.get_hash().hex() - ) - - resp = send_request("getMinorBlockById", ["0x" + "ff" * 36, True]) - self.assertIsNone(resp) - - # By height - for need_extra_info in [True, False]: - resp = send_request( - "getMinorBlockByHeight", ["0x0", "0x1", False, need_extra_info] - ) - self.assertEqual( - resp["transactions"][0], "0x" + tx.get_hash().hex() + "00000002" - ) - - resp = send_request("getMinorBlockByHeight", ["0x0", "0x1", True]) - self.assertEqual( - resp["transactions"][0]["hash"], "0x" + tx.get_hash().hex() - ) - - resp = send_request("getMinorBlockByHeight", ["0x1", "0x2", False]) - self.assertIsNone(resp) - resp = send_request("getMinorBlockByHeight", ["0x0", "0x4", False]) - self.assertIsNone(resp) - - def test_getRootblockConfirmationIdAndCount(self): - # TODO test root chain forks - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - self.assertEqual( - call_async(master.get_primary_account_data(acc1)).transaction_count, 0 - ) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - tx_id = ( - "0x" - + tx.get_hash().hex() - + acc1.full_shard_key.to_bytes(4, "big").hex() - ) - resp = send_request("getTransactionById", [tx_id]) - self.assertEqual(resp["hash"], "0x" + tx.get_hash().hex()) - self.assertEqual( - resp["blockId"], - "0x" - + block1.header.get_hash().hex() - + block1.header.branch.get_full_shard_id() - .to_bytes(4, byteorder="big") - .hex(), - ) - minor_hash = resp["blockId"] - - # zero root block confirmation - resp_hash = send_request( - "getRootHashConfirmingMinorBlockById", [minor_hash] - ) - self.assertIsNone( - resp_hash, "should return None for unconfirmed minor blocks" - ) - resp_count = send_request( - "getTransactionConfirmedByNumberRootBlocks", [tx_id] - ) - self.assertEqual(resp_count, "0x0") - - # 1 root block confirmation - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - resp_hash = send_request( - "getRootHashConfirmingMinorBlockById", [minor_hash] - ) - self.assertIsNotNone(resp_hash, "confirmed by root block") - self.assertEqual(resp_hash, "0x" + block.header.get_hash().hex()) - resp_count = send_request( - "getTransactionConfirmedByNumberRootBlocks", [tx_id] - ) - self.assertEqual(resp_count, "0x1") - - # 2 root block confirmation - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - resp_hash = send_request( - "getRootHashConfirmingMinorBlockById", [minor_hash] - ) - self.assertIsNotNone(resp_hash, "confirmed by root block") - self.assertNotEqual(resp_hash, "0x" + block.header.get_hash().hex()) - resp_count = send_request( - "getTransactionConfirmedByNumberRootBlocks", [tx_id] - ) - self.assertEqual(resp_count, "0x2") - - def test_getTransactionById(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - self.assertEqual( - call_async(master.get_primary_account_data(acc1)).transaction_count, 0 - ) - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - resp = send_request( - "getTransactionById", - [ - "0x" - + tx.get_hash().hex() - + acc1.full_shard_key.to_bytes(4, "big").hex() - ], - ) - self.assertEqual(resp["hash"], "0x" + tx.get_hash().hex()) - - def test_call_success(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - slaves = clusters[0].slave_list - - response = send_request( - "call", [{"to": "0x" + acc1.serialize().hex(), "gas": hex(21000)}] - ) - - self.assertEqual(response, "0x") - self.assertEqual( - len(clusters[0].get_shard_state(2 | 0).tx_queue), - 0, - "should not affect tx queue", - ) - - def test_call_success_default_gas(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - slaves = clusters[0].slave_list - - # gas is not specified in the request - response = send_request( - "call", [{"to": "0x" + acc1.serialize().hex()}, "latest"] - ) - - self.assertEqual(response, "0x") - self.assertEqual( - len(clusters[0].get_shard_state(2 | 0).tx_queue), - 0, - "should not affect tx queue", - ) - - def test_call_failure(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - slaves = clusters[0].slave_list - - # insufficient gas - response = send_request( - "call", [{"to": "0x" + acc1.serialize().hex(), "gas": "0x1"}, None] - ) - - self.assertIsNone(response, "failed tx should return None") - self.assertEqual( - len(clusters[0].get_shard_state(2 | 0).tx_queue), - 0, - "should not affect tx queue", - ) - - def test_getTransactionReceipt_not_exist(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request(endpoint, ["0x" + bytes(36).hex()]) - self.assertIsNone(resp) - - def test_getTransactionReceipt_on_transfer(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request( - endpoint, - [ - "0x" - + tx.get_hash().hex() - + acc1.full_shard_key.to_bytes(4, "big").hex() - ], - ) - self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["cumulativeGasUsed"], "0x5208") - self.assertIsNone(resp["contractAddress"]) - - def test_getTransactionReceipt_on_xshard_transfer_before_enabling_EVM(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_from_identity(id1, full_shard_key=0x00010000) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - # disable EVM to have fake xshard receipts - master.env.quark_chain_config.ENABLE_EVM_TIMESTAMP = 2 ** 64 - 1 - - block = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=None) - ) - call_async(master.add_root_block(block)) - - s1, s2 = ( - clusters[0].get_shard_state(2 | 0), - clusters[0].get_shard_state(0x00010002), - ) - tx_gen = lambda s, f, t: create_transfer_transaction( - shard_state=s, - key=id1.get_key(), - from_address=f, - to_address=t, - gas=21000 if f == t else 30000, - value=12345, - ) - tx1 = tx_gen(s1, acc1, acc2) - self.assertTrue(slaves[0].add_tx(tx1)) - b1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) - - root_block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - - call_async(master.add_root_block(root_block)) - - tx2 = tx_gen(s2, acc2, acc2) - self.assertTrue(slaves[0].add_tx(tx2)) - b3 = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=0x00010002) - ) - self.assertTrue(call_async(clusters[0].get_shard(0x00010002).add_block(b3))) - - # in-shard tx 21000 + receiving x-shard tx 9000 - self.assertEqual(s2.evm_state.gas_used, 30000) - self.assertEqual(s2.evm_state.xshard_receive_gas_used, 9000) - - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request( - endpoint, - [ - "0x" - + tx2.get_hash().hex() - + acc2.full_shard_key.to_bytes(4, "big").hex() - ], - ) - self.assertEqual(resp["transactionHash"], "0x" + tx2.get_hash().hex()) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["cumulativeGasUsed"], hex(30000)) - self.assertEqual(resp["gasUsed"], hex(21000)) - self.assertIsNone(resp["contractAddress"]) - - # query xshard tx receipt on the target shard - resp = send_request( - endpoint, - [ - "0x" - + tx1.get_hash().hex() - + acc2.full_shard_key.to_bytes(4, "big").hex() - ], - ) - self.assertEqual(resp["status"], "0x1") - # other fields are fake - self.assertEqual(resp["cumulativeGasUsed"], hex(0)) - self.assertEqual(resp["gasUsed"], hex(0)) - - def test_getTransactionReceipt_on_xshard_transfer_after_enabling_EVM(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_from_identity(id1, full_shard_key=1) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - block = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=None) - ) - call_async(master.add_root_block(block)) - - s1, s2 = ( - clusters[0].get_shard_state(2 | 0), - clusters[0].get_shard_state(2 | 1), - ) - tx = create_transfer_transaction( - shard_state=s1, - key=id1.get_key(), - from_address=acc1, - to_address=acc2, - gas=30000, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - # source shard - b1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) - # root chain - root_block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(root_block)) - # target shard - b3 = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=0b11) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(b3))) - - # query xshard tx receipt on the target shard - resp = send_request( - "getTransactionReceipt", - [ - "0x" - + tx.get_hash().hex() - + acc2.full_shard_key.to_bytes(4, "big").hex() - ], - ) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["transactionIndex"], "0x3") - self.assertEqual(resp["cumulativeGasUsed"], hex(9000)) - self.assertEqual(resp["gasUsed"], hex(9000)) - - def test_getTransactionReceipt_on_contract_creation(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - to_full_shard_key = acc1.full_shard_key + 2 - tx = create_contract_creation_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=to_full_shard_key, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000002"]) - self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["cumulativeGasUsed"], "0x213eb") - - contract_address = mk_contract_address( - acc1.recipient, 0, to_full_shard_key - ) - self.assertEqual( - resp["contractAddress"], - "0x" - + contract_address.hex() - + to_full_shard_key.to_bytes(4, "big").hex(), - ) - - def test_getTransactionReceipt_on_xshard_contract_creation(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - # Add a root block to update block gas limit for xshard tx throttling - # so that the following tx can be processed - root_block = call_async( - master.get_next_block_to_mine(acc1, branch_value=None) - ) - call_async(master.add_root_block(root_block)) - - to_full_shard_key = acc1.full_shard_key + 1 - tx = create_contract_creation_with_event_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=to_full_shard_key, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) - - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000002"]) - self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["cumulativeGasUsed"], "0x11374") - self.assertIsNone(resp["contractAddress"]) - - # x-shard contract creation should succeed. check target shard - root_block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) # root chain - call_async(master.add_root_block(root_block)) - block2 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b11) - ) # target shard - self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(block2))) - for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): - resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000003"]) - self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) - self.assertEqual(resp["status"], "0x1") - self.assertEqual(resp["cumulativeGasUsed"], "0xc515") - self.assertIsNotNone(resp["contractAddress"]) - - def test_getLogs(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - expected_log_parts = { - "logIndex": "0x0", - "transactionIndex": "0x0", - "blockNumber": "0x1", - "blockHeight": "0x1", - "data": "0x", - } - - with ClusterContext( - 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - # Add a root block to update block gas limit for xshard tx throttling - # so that the following tx can be processed - root_block = call_async( - master.get_next_block_to_mine(acc1, branch_value=None) - ) - call_async(master.add_root_block(root_block)) - - tx = create_contract_creation_with_event_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key, - ) - expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - - for using_eth_endpoint in (True, False): - shard_id = hex(acc1.full_shard_key) - if using_eth_endpoint: - req = lambda o: send_request("eth_getLogs", [o, shard_id]) - else: - # `None` needed to bypass some request modification - req = lambda o: send_request("getLogs", [o, shard_id]) - - # no filter object as wild cards - resp = req({}) - self.assertEqual(1, len(resp)) - self.assertTrue(expected_log_parts.items() <= resp[0].items()) - - # filter with from/to blocks - resp = req({"fromBlock": "0x0", "toBlock": "0x1"}) - self.assertEqual(1, len(resp)) - self.assertTrue(expected_log_parts.items() <= resp[0].items()) - resp = req({"fromBlock": "0x0", "toBlock": "0x0"}) - self.assertEqual(0, len(resp)) - - # filter by contract address - contract_addr = mk_contract_address( - acc1.recipient, 0, acc1.full_shard_key - ) - filter_obj = { - "address": "0x" - + contract_addr.hex() - + ( - "" - if using_eth_endpoint - else hex(acc1.full_shard_key)[2:].zfill(8) - ) - } - resp = req(filter_obj) - self.assertEqual(1, len(resp)) - - # filter by topics - filter_obj = { - "topics": [ - "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" - ] - } - filter_obj_nested = { - "topics": [ - [ - "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" - ] - ] - } - for f in (filter_obj, filter_obj_nested): - resp = req(f) - self.assertEqual(1, len(resp)) - self.assertTrue(expected_log_parts.items() <= resp[0].items()) - self.assertEqual( - "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa", - resp[0]["topics"][0], - ) - - # xshard creation and check logs: shard 0 -> shard 1 - tx = create_contract_creation_with_event_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key + 1, - ) - self.assertTrue(slaves[0].add_tx(tx)) - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) # source shard - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - root_block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) # root chain - call_async(master.add_root_block(root_block)) - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b11) - ) # target shard - self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(block))) - - req = lambda o: send_request("getLogs", [o, hex(0b11)]) - # no filter object as wild cards - resp = req({}) - self.assertEqual(1, len(resp)) - expected_log_parts["transactionIndex"] = "0x3" # after root block coinbase - expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() - expected_log_parts["blockHash"] = "0x" + block.header.get_hash().hex() - self.assertTrue(expected_log_parts.items() <= resp[0].items()) - self.assertEqual(2, len(resp[0]["topics"])) - # missing shard ID should fail - for endpoint in ("getLogs", "eth_getLogs"): - with self.assertRaises(JsonRpcError): - send_request(endpoint, [{}]) - with self.assertRaises(JsonRpcError): - send_request(endpoint, [{}, None]) - - def test_estimateGas(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - payload = {"to": "0x" + acc1.serialize().hex()} - response = send_request("estimateGas", [payload]) - self.assertEqual(response, "0x5208") # 21000 - # cross-shard - from_addr = "0x" + acc1.address_in_shard(1).serialize().hex() - payload["from"] = from_addr - response = send_request("estimateGas", [payload]) - self.assertEqual(response, "0x7530") # 30000 - - def test_getStorageAt(self): - key = bytes.fromhex( - "c987d4506fb6824639f9a9e3b8834584f5165e94680501d1b0044071cd36c3b3" - ) - id1 = Identity.create_from_key(key) - acc1 = Address.create_from_identity(id1, full_shard_key=0) - created_addr = "0x8531eb33bba796115f56ffa1b7df1ea3acdd8cdd00000000" - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - tx = create_contract_with_storage_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - - for using_eth_endpoint in (True, False): - if using_eth_endpoint: - req = lambda k: send_request( - "eth_getStorageAt", [created_addr[:-8], k, "0x0"] - ) - else: - req = lambda k: send_request("getStorageAt", [created_addr, k]) - - # first storage - response = req("0x0") - # equals 1234 - self.assertEqual( - response, - "0x00000000000000000000000000000000000000000000000000000000000004d2", - ) - - # mapping storage - k = sha3_256( - bytes.fromhex(acc1.recipient.hex().zfill(64) + "1".zfill(64)) - ) - response = req("0x" + k.hex()) - self.assertEqual( - response, - "0x000000000000000000000000000000000000000000000000000000000000162e", - ) - - # doesn't exist - response = req("0x3") - self.assertEqual( - response, - "0x0000000000000000000000000000000000000000000000000000000000000000", - ) - - def test_getCode(self): - key = bytes.fromhex( - "c987d4506fb6824639f9a9e3b8834584f5165e94680501d1b0044071cd36c3b3" - ) - id1 = Identity.create_from_key(key) - acc1 = Address.create_from_identity(id1, full_shard_key=0) - created_addr = "0x8531eb33bba796115f56ffa1b7df1ea3acdd8cdd00000000" - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - tx = create_contract_with_storage_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - - for using_eth_endpoint in (True, False): - if using_eth_endpoint: - resp = send_request("eth_getCode", [created_addr[:-8], "0x0"]) - else: - resp = send_request("getCode", [created_addr]) - - self.assertEqual( - resp, - "0x6080604052600080fd00a165627a7a72305820a6ef942c101f06333ac35072a8ff40332c71d0e11cd0e6d86de8cae7b42696550029", - ) - - def test_gasPrice(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - # run for multiple times - for _ in range(3): - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=0, - gas_price=12, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue( - call_async(clusters[0].get_shard(2 | 0).add_block(block)) - ) - - for using_eth_endpoint in (True, False): - if using_eth_endpoint: - resp = send_request("eth_gasPrice", ["0x0"]) - else: - resp = send_request( - "gasPrice", ["0x0", quantity_encoder(token_id_encode("QKC"))] - ) - - self.assertEqual(resp, "0xc") - - def test_getWork_and_submitWork(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, remote_mining=True, shard_size=1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(1 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc1, - value=0, - gas_price=12, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - for shard_id in ["0x0", None]: # shard, then root - resp = send_request("getWork", [shard_id]) - self.assertEqual(resp[1:], ["0x1", "0xa"]) # height and diff - - header_hash_hex = resp[0] - if shard_id is not None: # shard 0 - miner_address = Address.create_from( - master.env.quark_chain_config.shards[1].COINBASE_ADDRESS - ) - else: # root - miner_address = Address.create_from( - master.env.quark_chain_config.ROOT.COINBASE_ADDRESS - ) - block = call_async( - master.get_next_block_to_mine( - address=miner_address, branch_value=shard_id and 0b01 - ) - ) - # solve it and submit - work = MiningWork(bytes.fromhex(header_hash_hex[2:]), 1, 10) - solver = DoubleSHA256(work) - nonce = solver.mine(0, 10000).nonce - mixhash = "0x" + sha3_256(b"").hex() - resp = send_request( - "submitWork", - [ - shard_id, - header_hash_hex, - hex(nonce), - mixhash, - "0x" + bytes(65).hex(), - ], - ) - self.assertTrue(resp) - - # show progress on shard 0 - self.assertEqual( - clusters[0].get_shard_state(1 | 0).get_tip().header.height, 1 - ) - - def test_getWork_with_optional_diff_divider(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, remote_mining=True, shard_size=1, small_coinbase=True - ) as clusters, jrpc_http_server_context(clusters[0].master): - master = clusters[0].master - slaves = clusters[0].slave_list - shard = next(iter(slaves[0].shards.values())) - qkc_config = master.env.quark_chain_config - qkc_config.ROOT.CONSENSUS_TYPE = ConsensusType.POW_SIMULATE - - # add a root block first to init shard chains - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - qkc_config.ROOT.POSW_CONFIG.ENABLED = True - qkc_config.ROOT.POSW_CONFIG.ENABLE_TIMESTAMP = 0 - qkc_config.ROOT.POSW_CONFIG.WINDOW_SIZE = 2 - - shard.state.get_root_chain_stakes = lambda _1, _2: ( - qkc_config.ROOT.POSW_CONFIG.TOTAL_STAKE_PER_BLOCK, - acc1.recipient, - ) - - resp = send_request("getWork", [None]) - # height and diff, and returns the diff divider since it's PoSW mineable - self.assertEqual(resp[1:], ["0x2", "0xa", hex(1000)]) - - def test_createTransactions(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_random_account(full_shard_key=1) - - loadtest_accounts = [ - { - "address": "b067ac9ebeeecb10bbcd1088317959d58d1e38f6b0ee10d5", - "key": "ca0143c9aa51c3013f08e83f3b6368a4f3ba5b52c4841c6e0c22c300f7ee6827", - }, - { - "address": "9f2b984937ff8e3f20d2a2592f342f47257870909fffa247", - "key": "40efdb8528de149c35fb43a572fc821d8fbdf2469dcc7fe1a9e847ef29e3c941", - }, - ] - - with ClusterContext( - 1, acc1, small_coinbase=True, loadtest_accounts=loadtest_accounts - ) as clusters, jrpc_http_server_context(clusters[0].master): - slaves = clusters[0].slave_list - master = clusters[0].master - - block = call_async( - master.get_next_block_to_mine(address=acc2, branch_value=None) - ) - call_async(master.add_root_block(block)) - - send_request("createTransactions", {"numTxPerShard": 1, "xShardPercent": 0}) - - -# ------------------------------- Test for JSONRPCWebsocketServer ------------------------------- -@contextmanager -def jrpc_websocket_server_context(slave_server, port=38590): - env = DEFAULT_ENV.copy() - env.cluster_config = ClusterConfig() - env.cluster_config.JSON_RPC_PORT = 38391 - env.cluster_config.JSON_RPC_HOST = "127.0.0.1" - - env.slave_config = env.cluster_config.get_slave_config("S0") - env.slave_config.HOST = "0.0.0.0" - env.slave_config.WEBSOCKET_JSON_RPC_PORT = port - server = call_async(JSONRPCWebsocketServer.start_websocket_server(env, slave_server)) - try: - yield server - finally: - server.shutdown() - - -def send_websocket_request(request, num_response=1, port=38590): - responses = [] - - async def __send_request(request, port): - uri = "ws://0.0.0.0:" + str(port) - async with websockets.connect(uri) as websocket: - await websocket.send(request) - while True: - response = await websocket.recv() - responses.append(response) - if len(responses) == num_response: - return responses - - return call_async(__send_request(request, port)) - - -async def get_websocket(port=38590): - uri = "ws://0.0.0.0:" + str(port) - return await websockets.connect(uri) - - -class TestJSONRPCWebsocket(unittest.TestCase): - def test_new_heads(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context(clusters[0].slave_list[0]): - # clusters[0].slave_list[0] has two shards with full_shard_id 2 and 3 - master = clusters[0].master - - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newHeads", "0x00000002"], - "id": 3, - } - websocket = call_async(get_websocket()) - call_async(websocket.send(json.dumps(request))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["id"], 3) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - block_hash = block.header.get_hash() - block_height = block.header.height - - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual( - response["params"]["result"]["hash"], data_encoder(block_hash) - ) - self.assertEqual( - response["params"]["result"]["height"], quantity_encoder(block_height) - ) - - def test_new_heads_with_chain_reorg(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38591 - ): - websocket = call_async(get_websocket(port=38591)) - - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newHeads", "0x00000002"], - "id": 3, - } - call_async(websocket.send(json.dumps(request))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["id"], 3) - - state = clusters[0].get_shard_state(2 | 0) - tip = state.get_tip() - - # no chain reorg at this point - b0 = state.create_block_to_mine(address=acc1) - state.finalize_and_add_block(b0) - self.assertEqual(state.header_tip, b0.header) - response = call_async(websocket.recv()) - d = json.loads(response) - self.assertEqual( - d["params"]["result"]["hash"], data_encoder(b0.header.get_hash()) - ) - - # fork happens - b1 = tip.create_block_to_append(address=acc1) - state.finalize_and_add_block(b1) - b2 = b1.create_block_to_append(address=acc1) - state.finalize_and_add_block(b2) - self.assertEqual(state.header_tip, b2.header) - - # new heads b1, b2 emitted from new chain - blocks = [b1, b2] - for b in blocks: - response = call_async(websocket.recv()) - d = json.loads(response) - self.assertEqual( - d["params"]["result"]["hash"], data_encoder(b.header.get_hash()) - ) - - def test_new_pending_xshard_tx_sender(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0x0) - acc2 = Address.create_from_identity(id1, full_shard_key=0x10001) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38592 - ): - master = clusters[0].master - slaves = clusters[0].slave_list - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newPendingTransactions", "0x00000002"], - "id": 6, - } - - websocket = call_async(get_websocket(38592)) - call_async(websocket.send(json.dumps(request))) - - sub_response = json.loads(call_async(websocket.recv())) - self.assertEqual(sub_response["id"], 6) - self.assertEqual(len(sub_response["result"]), 34) - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc2, - gas=30000, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - tx_response = json.loads(call_async(websocket.recv())) - self.assertEqual( - tx_response["params"]["subscription"], sub_response["result"] - ) - self.assertTrue(tx_response["params"]["result"], tx.get_hash()) - - b1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) - - def test_new_pending_xshard_tx_target(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0x10001) - acc2 = Address.create_from_identity(id1, full_shard_key=0x0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38593 - ): - master = clusters[0].master - slaves = clusters[0].slave_list - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newPendingTransactions", "0x00000002"], - "id": 6, - } - websocket = call_async(get_websocket(38593)) - call_async(websocket.send(json.dumps(request))) - - sub_response = json.loads(call_async(websocket.recv())) - self.assertEqual(sub_response["id"], 6) - self.assertEqual(len(sub_response["result"]), 34) - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(0x10003), - key=id1.get_key(), - from_address=acc1, - to_address=acc2, - gas=30000, - value=12345, - ) - self.assertTrue(slaves[1].add_tx(tx)) - - b1 = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0x10003) - ) - self.assertTrue(call_async(clusters[0].get_shard(0x10003).add_block(b1))) - - tx_response = json.loads(call_async(websocket.recv())) - self.assertEqual( - tx_response["params"]["subscription"], sub_response["result"] - ) - self.assertTrue(tx_response["params"]["result"], tx.get_hash()) - - def test_new_pending_tx_same_acc_multi_subscriptions(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0x0) - acc2 = Address.create_from_identity(id1, full_shard_key=0x10001) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38594 - ): - master = clusters[0].master - slaves = clusters[0].slave_list - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=None) - ) - call_async(master.add_root_block(block)) - - requests = [] - REQ_NUM = 5 - for i in range(REQ_NUM): - req = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newPendingTransactions", "0x00000002"], - "id": i, - } - requests.append(req) - - websocket = call_async(get_websocket(38594)) - [call_async(websocket.send(json.dumps(req))) for req in requests] - sub_responses = [json.loads(call_async(websocket.recv())) for _ in requests] - - for i, resp in enumerate(sub_responses): - self.assertEqual(resp["id"], i) - self.assertEqual(len(resp["result"]), 34) - - tx = create_transfer_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), - key=id1.get_key(), - from_address=acc1, - to_address=acc2, - gas=30000, - value=12345, - ) - self.assertTrue(slaves[0].add_tx(tx)) - - tx_responses = [json.loads(call_async(websocket.recv())) for _ in requests] - for i, resp in enumerate(tx_responses): - self.assertEqual( - resp["params"]["subscription"], sub_responses[i]["result"] - ) - self.assertTrue(resp["params"]["result"], tx.get_hash()) - - def test_new_pending_tx_with_reorg(self): - id1 = Identity.create_random_identity() - id2 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - acc2 = Address.create_from_identity(id2, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38595 - ): - websocket = call_async(get_websocket(port=38595)) - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newPendingTransactions", "0x00000002"], - "id": 3, - } - call_async(websocket.send(json.dumps(request))) - - sub_response = json.loads(call_async(websocket.recv())) - self.assertEqual(sub_response["id"], 3) - self.assertEqual(len(sub_response["result"]), 34) - - state = clusters[0].get_shard_state(2 | 0) - tip = state.get_tip() - - tx = create_transfer_transaction( - shard_state=state, - key=id1.get_key(), - from_address=acc1, - to_address=acc2, - gas=30000, - value=12345, - ) - self.assertTrue(state.add_tx(tx)) - tx_response1 = json.loads(call_async(websocket.recv())) - self.assertEqual( - tx_response1["params"]["subscription"], sub_response["result"] - ) - self.assertTrue(tx_response1["params"]["result"], tx.get_hash()) - - b0 = state.create_block_to_mine() - state.finalize_and_add_block(b0) - b1 = tip.create_block_to_append() - state.finalize_and_add_block(b1) - b2 = b1.create_block_to_append() - state.finalize_and_add_block(b2) # fork should happen, b0-b2 is picked up - - tx_response2 = json.loads(call_async(websocket.recv())) - self.assertEqual(state.header_tip, b2.header) - self.assertEqual(tx_response2, tx_response1) - - def test_logs(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - expected_log_parts = { - "logIndex": "0x0", - "transactionIndex": "0x0", - "blockNumber": "0x1", - "blockHeight": "0x1", - "data": "0x", - } - - with ClusterContext( - 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38596 - ): - master = clusters[0].master - slaves = clusters[0].slave_list - websocket = call_async(get_websocket(port=38596)) - - # filter by contract address - contract_addr = mk_contract_address(acc1.recipient, 0, acc1.full_shard_key) - filter_req = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": [ - "logs", - "0x00000002", - { - "address": "0x" - + contract_addr.hex() - + hex(acc1.full_shard_key)[2:].zfill(8) - }, - ], - "id": 4, - } - call_async(websocket.send(json.dumps(filter_req))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["id"], 4) - - # filter by topics - filter_req = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": [ - "logs", - "0x00000002", - { - "topics": [ - "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" - ] - }, - ], - "id": 5, - } - call_async(websocket.send(json.dumps(filter_req))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["id"], 5) - - tx = create_contract_creation_with_event_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), # full_shard_id = 2 - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key, - ) - expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() - self.assertTrue(slaves[0].add_tx(tx)) - - block = call_async( - master.get_next_block_to_mine( - address=acc1, branch_value=0b10 - ) # branch_value = 2 - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - - count = 0 - while count < 2: - response = call_async(websocket.recv()) - count += 1 - d = json.loads(response) - self.assertTrue(expected_log_parts.items() <= d["params"]["result"].items()) - self.assertEqual( - "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa", - d["params"]["result"]["topics"][0], - ) - self.assertEqual(count, 2) - - def test_log_removed_flag_with_chain_reorg(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38597 - ): - websocket = call_async(get_websocket(port=38597)) - - # a log subscriber with no-filter request - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["logs", "0x00000002", {}], - "id": 3, - } - call_async(websocket.send(json.dumps(request))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["id"], 3) - - state = clusters[0].get_shard_state(2 | 0) - tip = state.get_tip() - b0 = state.create_block_to_mine(address=acc1) - tx = create_contract_creation_with_event_transaction( - shard_state=clusters[0].get_shard_state(2 | 0), # full_shard_id = 2 - key=id1.get_key(), - from_address=acc1, - to_full_shard_key=acc1.full_shard_key, - ) - b0.add_tx(tx) - state.finalize_and_add_block(b0) - self.assertEqual(state.header_tip, b0.header) - tx_hash = tx.get_hash() - - response = call_async(websocket.recv()) - d = json.loads(response) - self.assertEqual( - d["params"]["result"]["transactionHash"], data_encoder(tx_hash) - ) - self.assertEqual(d["params"]["result"]["removed"], False) - - # fork happens - b1 = tip.create_block_to_append(address=acc1) - b1.add_tx(tx) - state.finalize_and_add_block(b1) - b2 = b1.create_block_to_append(address=acc1) - state.finalize_and_add_block(b2) - self.assertEqual(state.header_tip, b2.header) - - # log emitted from old chain, flag is set to True - response = call_async(websocket.recv()) - d = json.loads(response) - self.assertEqual( - d["params"]["result"]["transactionHash"], data_encoder(tx_hash) - ) - self.assertEqual(d["params"]["result"]["removed"], True) - - # log emitted from new chain - response = call_async(websocket.recv()) - d = json.loads(response) - self.assertEqual( - d["params"]["result"]["transactionHash"], data_encoder(tx_hash) - ) - self.assertEqual(d["params"]["result"]["removed"], False) - - def test_invalid_subscription(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38598 - ): - # Invalid subscription type - request1 = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newBlocks", "0x00000002"], - "id": 3, - } - # Invalid full shard id - request2 = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newHeads", "0x00040002"], - "id": 3, - } - - websocket = call_async(get_websocket(port=38598)) - [ - call_async(websocket.send(json.dumps(req))) - for req in [request1, request2] - ] - responses = [json.loads(call_async(websocket.recv())) for _ in range(2)] - [self.assertTrue(resp["error"]) for resp in responses] # emit error message - - def test_multi_subs_with_some_unsubs_in_one_ws_conn(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38599 - ): - # clusters[0].slave_list[0] has two shards with full_shard_id 2 and 3 - master = clusters[0].master - websocket = call_async(get_websocket(port=38599)) - - # make 3 subscriptions on new heads - ids = [3, 4, 5] - sub_ids = [] - for id in ids: - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newHeads", "0x00000002"], - "id": id, - } - call_async(websocket.send(json.dumps(request))) - response = call_async(websocket.recv()) - response = json.loads(response) - sub_ids.append(response["result"]) - self.assertEqual(response["id"], id) - - # cancel the first subscription - request = { - "jsonrpc": "2.0", - "method": "unsubscribe", - "params": [sub_ids[0]], - "id": 3, - } - call_async(websocket.send(json.dumps(request))) - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["result"], True) # unsubscribed successfully - - # add a new block, should expect only 2 responses - root_block = call_async( - master.get_next_block_to_mine(acc1, branch_value=None) - ) - call_async(master.add_root_block(root_block)) - - block = call_async( - master.get_next_block_to_mine(address=acc1, branch_value=0b10) - ) - self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) - - for sub_id in sub_ids[1:]: - response = call_async(websocket.recv()) - response = json.loads(response) - self.assertEqual(response["params"]["subscription"], sub_id) - - def test_unsubscribe(self): - id1 = Identity.create_random_identity() - acc1 = Address.create_from_identity(id1, full_shard_key=0) - - with ClusterContext( - 1, acc1, small_coinbase=True - ) as clusters, jrpc_websocket_server_context( - clusters[0].slave_list[0], port=38600 - ): - request = { - "jsonrpc": "2.0", - "method": "subscribe", - "params": ["newPendingTransactions", "0x00000002"], - "id": 6, - } - websocket = call_async(get_websocket(port=38600)) - call_async(websocket.send(json.dumps(request))) - sub_response = json.loads(call_async(websocket.recv())) - - # Check subscription response - self.assertEqual(sub_response["id"], 6) - self.assertEqual(len(sub_response["result"]), 34) - - unsubscribe = { - "jsonrpc": "2.0", - "method": "unsubscribe", - "params": [sub_response["result"]], - "id": 3, - } - - # Unsubscribe successfully - call_async(websocket.send(json.dumps(unsubscribe))) - response = json.loads(call_async(websocket.recv())) - self.assertTrue(response["result"]) - self.assertEqual(response["id"], 3) - - # Invalid unsubscription if sub_id does not exist - call_async(websocket.send(json.dumps(unsubscribe))) - response = json.loads(call_async(websocket.recv())) - self.assertTrue(response["error"]) +import json +import unittest +from contextlib import contextmanager +import websockets + +from quarkchain.cluster.cluster_config import ClusterConfig +from quarkchain.cluster.jsonrpc import ( + EMPTY_TX_ID, + JSONRPCHttpServer, + JSONRPCWebsocketServer, + quantity_encoder, + data_encoder, +) +from quarkchain.cluster.miner import DoubleSHA256, MiningWork +from quarkchain.cluster.tests.test_utils import ( + create_transfer_transaction, + ClusterContext, + create_contract_creation_transaction, + create_contract_creation_with_event_transaction, + create_contract_with_storage_transaction, +) +from quarkchain.config import ConsensusType +from quarkchain.core import ( + Address, + Identity, + SerializedEvmTransaction, + TypedTransaction, +) +from quarkchain.env import DEFAULT_ENV +from quarkchain.evm.messages import mk_contract_address +from quarkchain.evm.transactions import Transaction as EvmTransaction +from quarkchain.utils import call_async, sha3_256, token_id_encode +from quarkchain.jsonrpc_client import AsyncJsonRpcClient, JsonRpcError + + +@contextmanager +def jrpc_http_server_context(master): + env = DEFAULT_ENV.copy() + env.cluster_config = ClusterConfig() + env.cluster_config.JSON_RPC_PORT = 38391 + # to pass the circleCi + env.cluster_config.JSON_RPC_HOST = "127.0.0.1" + server = call_async(JSONRPCHttpServer.start_test_server(env, master)) + try: + yield server + finally: + call_async(server.shutdown()) + + +rpc_client = AsyncJsonRpcClient("http://localhost:38391") + +def send_request(method, *args): + return call_async(rpc_client.call(method, *args)) + + +class TestJSONRPCHttp(unittest.TestCase): + def test_getTransactionCount(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_random_account(full_shard_key=1) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + stats = call_async(master.get_stats()) + self.assertTrue("posw" in json.dumps(stats)) + + self.assertEqual( + call_async(master.get_primary_account_data(acc1)).transaction_count, 0 + ) + for i in range(3): + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertEqual(i + 1, block.header.height) + self.assertTrue( + call_async(clusters[0].get_shard(2 | 0).add_block(block)) + ) + + response = send_request( + "getTransactionCount", ["0x" + acc2.serialize().hex()] + ) + self.assertEqual(response, "0x0") + + response = send_request( + "getTransactionCount", ["0x" + acc1.serialize().hex()] + ) + self.assertEqual(response, "0x3") + response = send_request( + "getTransactionCount", ["0x" + acc1.serialize().hex(), "latest"] + ) + self.assertEqual(response, "0x3") + + for i in range(3): + response = send_request( + "getTransactionCount", ["0x" + acc1.serialize().hex(), hex(i + 1)] + ) + self.assertEqual(response, hex(i + 1)) + + def test_getBalance(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + response = send_request("getBalances", ["0x" + acc1.serialize().hex()]) + self.assertListEqual( + response["balances"], + [{"tokenId": "0x8bb0", "tokenStr": "QKC", "balance": "0xf4240"}], + ) + + response = send_request("eth_getBalance", ["0x" + acc1.recipient.hex()]) + self.assertEqual(response, "0xf4240") + + def test_sendTransaction(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_random_account(full_shard_key=1) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + slaves = clusters[0].slave_list + master = clusters[0].master + + block = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=None) + ) + call_async(master.add_root_block(block)) + + evm_tx = EvmTransaction( + nonce=0, + gasprice=6, + startgas=30000, + to=acc2.recipient, + value=15, + data=b"", + from_full_shard_key=acc1.full_shard_key, + to_full_shard_key=acc2.full_shard_key, + network_id=slaves[0].env.quark_chain_config.NETWORK_ID, + gas_token_id=master.env.quark_chain_config.genesis_token, + transfer_token_id=master.env.quark_chain_config.genesis_token, + ) + evm_tx.sign(id1.get_key()) + request = dict( + to="0x" + acc2.recipient.hex(), + gasPrice="0x6", + gas=hex(30000), + value="0xf", # 15 + v=quantity_encoder(evm_tx.v), + r=quantity_encoder(evm_tx.r), + s=quantity_encoder(evm_tx.s), + nonce="0x0", + fromFullShardKey="0x00000000", + toFullShardKey="0x00000001", + network_id=hex(slaves[0].env.quark_chain_config.NETWORK_ID), + ) + tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) + response = send_request("sendTransaction", [request]) + + self.assertEqual(response, "0x" + tx.get_hash().hex() + "00000000") + state = clusters[0].get_shard_state(2 | 0) + self.assertEqual(len(state.tx_queue), 1) + self.assertEqual( + state.tx_queue.pop_transaction( + state.get_transaction_count + ).tx.to_evm_tx(), + evm_tx, + ) + + def test_sendTransaction_with_bad_signature(self): + """ sendTransaction validates signature """ + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_random_account(full_shard_key=1) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + + block = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=None) + ) + call_async(master.add_root_block(block)) + + request = dict( + to="0x" + acc2.recipient.hex(), + gasPrice="0x6", + gas=hex(30000), + value="0xf", + v="0x1", + r="0x2", + s="0x3", + nonce="0x0", + fromFullShardKey="0x00000000", + toFullShardKey="0x00000001", + ) + self.assertEqual(send_request("sendTransaction", [request]), EMPTY_TX_ID) + self.assertEqual(len(clusters[0].get_shard_state(2 | 0).tx_queue), 0) + + def test_sendTransaction_missing_from_full_shard_key(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + request = dict( + to="0x" + acc1.recipient.hex(), + gasPrice="0x6", + gas=hex(30000), + value="0xf", + v="0x1", + r="0x2", + s="0x3", + nonce="0x0", + ) + + with self.assertRaises(Exception): + send_request("sendTransaction", [request]) + + def test_getMinorBlock(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + self.assertEqual( + call_async(master.get_primary_account_data(acc1)).transaction_count, 0 + ) + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + # By id + for need_extra_info in [True, False]: + resp = send_request( + "getMinorBlockById", + [ + "0x" + block1.header.get_hash().hex() + "0" * 8, + False, + need_extra_info, + ], + ) + self.assertEqual( + resp["transactions"][0], "0x" + tx.get_hash().hex() + "00000002" + ) + + resp = send_request( + "getMinorBlockById", + ["0x" + block1.header.get_hash().hex() + "0" * 8, True], + ) + self.assertEqual( + resp["transactions"][0]["hash"], "0x" + tx.get_hash().hex() + ) + + resp = send_request("getMinorBlockById", ["0x" + "ff" * 36, True]) + self.assertIsNone(resp) + + # By height + for need_extra_info in [True, False]: + resp = send_request( + "getMinorBlockByHeight", ["0x0", "0x1", False, need_extra_info] + ) + self.assertEqual( + resp["transactions"][0], "0x" + tx.get_hash().hex() + "00000002" + ) + + resp = send_request("getMinorBlockByHeight", ["0x0", "0x1", True]) + self.assertEqual( + resp["transactions"][0]["hash"], "0x" + tx.get_hash().hex() + ) + + resp = send_request("getMinorBlockByHeight", ["0x1", "0x2", False]) + self.assertIsNone(resp) + resp = send_request("getMinorBlockByHeight", ["0x0", "0x4", False]) + self.assertIsNone(resp) + + def test_getRootblockConfirmationIdAndCount(self): + # TODO test root chain forks + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + self.assertEqual( + call_async(master.get_primary_account_data(acc1)).transaction_count, 0 + ) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + tx_id = ( + "0x" + + tx.get_hash().hex() + + acc1.full_shard_key.to_bytes(4, "big").hex() + ) + resp = send_request("getTransactionById", [tx_id]) + self.assertEqual(resp["hash"], "0x" + tx.get_hash().hex()) + self.assertEqual( + resp["blockId"], + "0x" + + block1.header.get_hash().hex() + + block1.header.branch.get_full_shard_id() + .to_bytes(4, byteorder="big") + .hex(), + ) + minor_hash = resp["blockId"] + + # zero root block confirmation + resp_hash = send_request( + "getRootHashConfirmingMinorBlockById", [minor_hash] + ) + self.assertIsNone( + resp_hash, "should return None for unconfirmed minor blocks" + ) + resp_count = send_request( + "getTransactionConfirmedByNumberRootBlocks", [tx_id] + ) + self.assertEqual(resp_count, "0x0") + + # 1 root block confirmation + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + resp_hash = send_request( + "getRootHashConfirmingMinorBlockById", [minor_hash] + ) + self.assertIsNotNone(resp_hash, "confirmed by root block") + self.assertEqual(resp_hash, "0x" + block.header.get_hash().hex()) + resp_count = send_request( + "getTransactionConfirmedByNumberRootBlocks", [tx_id] + ) + self.assertEqual(resp_count, "0x1") + + # 2 root block confirmation + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + resp_hash = send_request( + "getRootHashConfirmingMinorBlockById", [minor_hash] + ) + self.assertIsNotNone(resp_hash, "confirmed by root block") + self.assertNotEqual(resp_hash, "0x" + block.header.get_hash().hex()) + resp_count = send_request( + "getTransactionConfirmedByNumberRootBlocks", [tx_id] + ) + self.assertEqual(resp_count, "0x2") + + def test_getTransactionById(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + self.assertEqual( + call_async(master.get_primary_account_data(acc1)).transaction_count, 0 + ) + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + resp = send_request( + "getTransactionById", + [ + "0x" + + tx.get_hash().hex() + + acc1.full_shard_key.to_bytes(4, "big").hex() + ], + ) + self.assertEqual(resp["hash"], "0x" + tx.get_hash().hex()) + + def test_call_success(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + slaves = clusters[0].slave_list + + response = send_request( + "call", [{"to": "0x" + acc1.serialize().hex(), "gas": hex(21000)}] + ) + + self.assertEqual(response, "0x") + self.assertEqual( + len(clusters[0].get_shard_state(2 | 0).tx_queue), + 0, + "should not affect tx queue", + ) + + def test_call_success_default_gas(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + slaves = clusters[0].slave_list + + # gas is not specified in the request + response = send_request( + "call", [{"to": "0x" + acc1.serialize().hex()}, "latest"] + ) + + self.assertEqual(response, "0x") + self.assertEqual( + len(clusters[0].get_shard_state(2 | 0).tx_queue), + 0, + "should not affect tx queue", + ) + + def test_call_failure(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + slaves = clusters[0].slave_list + + # insufficient gas + response = send_request( + "call", [{"to": "0x" + acc1.serialize().hex(), "gas": "0x1"}, None] + ) + + self.assertIsNone(response, "failed tx should return None") + self.assertEqual( + len(clusters[0].get_shard_state(2 | 0).tx_queue), + 0, + "should not affect tx queue", + ) + + def test_getTransactionReceipt_not_exist(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request(endpoint, ["0x" + bytes(36).hex()]) + self.assertIsNone(resp) + + def test_getTransactionReceipt_on_transfer(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request( + endpoint, + [ + "0x" + + tx.get_hash().hex() + + acc1.full_shard_key.to_bytes(4, "big").hex() + ], + ) + self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["cumulativeGasUsed"], "0x5208") + self.assertIsNone(resp["contractAddress"]) + + def test_getTransactionReceipt_on_xshard_transfer_before_enabling_EVM(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_from_identity(id1, full_shard_key=0x00010000) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + # disable EVM to have fake xshard receipts + master.env.quark_chain_config.ENABLE_EVM_TIMESTAMP = 2 ** 64 - 1 + + block = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=None) + ) + call_async(master.add_root_block(block)) + + s1, s2 = ( + clusters[0].get_shard_state(2 | 0), + clusters[0].get_shard_state(0x00010002), + ) + tx_gen = lambda s, f, t: create_transfer_transaction( + shard_state=s, + key=id1.get_key(), + from_address=f, + to_address=t, + gas=21000 if f == t else 30000, + value=12345, + ) + tx1 = tx_gen(s1, acc1, acc2) + self.assertTrue(slaves[0].add_tx(tx1)) + b1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) + + root_block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + + call_async(master.add_root_block(root_block)) + + tx2 = tx_gen(s2, acc2, acc2) + self.assertTrue(slaves[0].add_tx(tx2)) + b3 = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=0x00010002) + ) + self.assertTrue(call_async(clusters[0].get_shard(0x00010002).add_block(b3))) + + # in-shard tx 21000 + receiving x-shard tx 9000 + self.assertEqual(s2.evm_state.gas_used, 30000) + self.assertEqual(s2.evm_state.xshard_receive_gas_used, 9000) + + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request( + endpoint, + [ + "0x" + + tx2.get_hash().hex() + + acc2.full_shard_key.to_bytes(4, "big").hex() + ], + ) + self.assertEqual(resp["transactionHash"], "0x" + tx2.get_hash().hex()) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["cumulativeGasUsed"], hex(30000)) + self.assertEqual(resp["gasUsed"], hex(21000)) + self.assertIsNone(resp["contractAddress"]) + + # query xshard tx receipt on the target shard + resp = send_request( + endpoint, + [ + "0x" + + tx1.get_hash().hex() + + acc2.full_shard_key.to_bytes(4, "big").hex() + ], + ) + self.assertEqual(resp["status"], "0x1") + # other fields are fake + self.assertEqual(resp["cumulativeGasUsed"], hex(0)) + self.assertEqual(resp["gasUsed"], hex(0)) + + def test_getTransactionReceipt_on_xshard_transfer_after_enabling_EVM(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_from_identity(id1, full_shard_key=1) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + block = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=None) + ) + call_async(master.add_root_block(block)) + + s1, s2 = ( + clusters[0].get_shard_state(2 | 0), + clusters[0].get_shard_state(2 | 1), + ) + tx = create_transfer_transaction( + shard_state=s1, + key=id1.get_key(), + from_address=acc1, + to_address=acc2, + gas=30000, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + # source shard + b1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) + # root chain + root_block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(root_block)) + # target shard + b3 = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=0b11) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(b3))) + + # query xshard tx receipt on the target shard + resp = send_request( + "getTransactionReceipt", + [ + "0x" + + tx.get_hash().hex() + + acc2.full_shard_key.to_bytes(4, "big").hex() + ], + ) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["transactionIndex"], "0x3") + self.assertEqual(resp["cumulativeGasUsed"], hex(9000)) + self.assertEqual(resp["gasUsed"], hex(9000)) + + def test_getTransactionReceipt_on_contract_creation(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + to_full_shard_key = acc1.full_shard_key + 2 + tx = create_contract_creation_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=to_full_shard_key, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000002"]) + self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["cumulativeGasUsed"], "0x213eb") + + contract_address = mk_contract_address( + acc1.recipient, 0, to_full_shard_key + ) + self.assertEqual( + resp["contractAddress"], + "0x" + + contract_address.hex() + + to_full_shard_key.to_bytes(4, "big").hex(), + ) + + def test_getTransactionReceipt_on_xshard_contract_creation(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + # Add a root block to update block gas limit for xshard tx throttling + # so that the following tx can be processed + root_block = call_async( + master.get_next_block_to_mine(acc1, branch_value=None) + ) + call_async(master.add_root_block(root_block)) + + to_full_shard_key = acc1.full_shard_key + 1 + tx = create_contract_creation_with_event_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=to_full_shard_key, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block1))) + + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000002"]) + self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["cumulativeGasUsed"], "0x11374") + self.assertIsNone(resp["contractAddress"]) + + # x-shard contract creation should succeed. check target shard + root_block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) # root chain + call_async(master.add_root_block(root_block)) + block2 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b11) + ) # target shard + self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(block2))) + for endpoint in ("getTransactionReceipt", "eth_getTransactionReceipt"): + resp = send_request(endpoint, ["0x" + tx.get_hash().hex() + "00000003"]) + self.assertEqual(resp["transactionHash"], "0x" + tx.get_hash().hex()) + self.assertEqual(resp["status"], "0x1") + self.assertEqual(resp["cumulativeGasUsed"], "0xc515") + self.assertIsNotNone(resp["contractAddress"]) + + def test_getLogs(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + expected_log_parts = { + "logIndex": "0x0", + "transactionIndex": "0x0", + "blockNumber": "0x1", + "blockHeight": "0x1", + "data": "0x", + } + + with ClusterContext( + 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + # Add a root block to update block gas limit for xshard tx throttling + # so that the following tx can be processed + root_block = call_async( + master.get_next_block_to_mine(acc1, branch_value=None) + ) + call_async(master.add_root_block(root_block)) + + tx = create_contract_creation_with_event_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key, + ) + expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + + for using_eth_endpoint in (True, False): + shard_id = hex(acc1.full_shard_key) + if using_eth_endpoint: + req = lambda o: send_request("eth_getLogs", [o, shard_id]) + else: + # `None` needed to bypass some request modification + req = lambda o: send_request("getLogs", [o, shard_id]) + + # no filter object as wild cards + resp = req({}) + self.assertEqual(1, len(resp)) + self.assertTrue(expected_log_parts.items() <= resp[0].items()) + + # filter with from/to blocks + resp = req({"fromBlock": "0x0", "toBlock": "0x1"}) + self.assertEqual(1, len(resp)) + self.assertTrue(expected_log_parts.items() <= resp[0].items()) + resp = req({"fromBlock": "0x0", "toBlock": "0x0"}) + self.assertEqual(0, len(resp)) + + # filter by contract address + contract_addr = mk_contract_address( + acc1.recipient, 0, acc1.full_shard_key + ) + filter_obj = { + "address": "0x" + + contract_addr.hex() + + ( + "" + if using_eth_endpoint + else hex(acc1.full_shard_key)[2:].zfill(8) + ) + } + resp = req(filter_obj) + self.assertEqual(1, len(resp)) + + # filter by topics + filter_obj = { + "topics": [ + "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" + ] + } + filter_obj_nested = { + "topics": [ + [ + "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" + ] + ] + } + for f in (filter_obj, filter_obj_nested): + resp = req(f) + self.assertEqual(1, len(resp)) + self.assertTrue(expected_log_parts.items() <= resp[0].items()) + self.assertEqual( + "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa", + resp[0]["topics"][0], + ) + + # xshard creation and check logs: shard 0 -> shard 1 + tx = create_contract_creation_with_event_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key + 1, + ) + self.assertTrue(slaves[0].add_tx(tx)) + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) # source shard + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + root_block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) # root chain + call_async(master.add_root_block(root_block)) + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b11) + ) # target shard + self.assertTrue(call_async(clusters[0].get_shard(2 | 1).add_block(block))) + + req = lambda o: send_request("getLogs", [o, hex(0b11)]) + # no filter object as wild cards + resp = req({}) + self.assertEqual(1, len(resp)) + expected_log_parts["transactionIndex"] = "0x3" # after root block coinbase + expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() + expected_log_parts["blockHash"] = "0x" + block.header.get_hash().hex() + self.assertTrue(expected_log_parts.items() <= resp[0].items()) + self.assertEqual(2, len(resp[0]["topics"])) + # missing shard ID should fail + for endpoint in ("getLogs", "eth_getLogs"): + with self.assertRaises(JsonRpcError): + send_request(endpoint, [{}]) + with self.assertRaises(JsonRpcError): + send_request(endpoint, [{}, None]) + + def test_estimateGas(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + payload = {"to": "0x" + acc1.serialize().hex()} + response = send_request("estimateGas", [payload]) + self.assertEqual(response, "0x5208") # 21000 + # cross-shard + from_addr = "0x" + acc1.address_in_shard(1).serialize().hex() + payload["from"] = from_addr + response = send_request("estimateGas", [payload]) + self.assertEqual(response, "0x7530") # 30000 + + def test_getStorageAt(self): + key = bytes.fromhex( + "c987d4506fb6824639f9a9e3b8834584f5165e94680501d1b0044071cd36c3b3" + ) + id1 = Identity.create_from_key(key) + acc1 = Address.create_from_identity(id1, full_shard_key=0) + created_addr = "0x8531eb33bba796115f56ffa1b7df1ea3acdd8cdd00000000" + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + tx = create_contract_with_storage_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + + for using_eth_endpoint in (True, False): + if using_eth_endpoint: + req = lambda k: send_request( + "eth_getStorageAt", [created_addr[:-8], k, "0x0"] + ) + else: + req = lambda k: send_request("getStorageAt", [created_addr, k]) + + # first storage + response = req("0x0") + # equals 1234 + self.assertEqual( + response, + "0x00000000000000000000000000000000000000000000000000000000000004d2", + ) + + # mapping storage + k = sha3_256( + bytes.fromhex(acc1.recipient.hex().zfill(64) + "1".zfill(64)) + ) + response = req("0x" + k.hex()) + self.assertEqual( + response, + "0x000000000000000000000000000000000000000000000000000000000000162e", + ) + + # doesn't exist + response = req("0x3") + self.assertEqual( + response, + "0x0000000000000000000000000000000000000000000000000000000000000000", + ) + + def test_getCode(self): + key = bytes.fromhex( + "c987d4506fb6824639f9a9e3b8834584f5165e94680501d1b0044071cd36c3b3" + ) + id1 = Identity.create_from_key(key) + acc1 = Address.create_from_identity(id1, full_shard_key=0) + created_addr = "0x8531eb33bba796115f56ffa1b7df1ea3acdd8cdd00000000" + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + tx = create_contract_with_storage_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + + for using_eth_endpoint in (True, False): + if using_eth_endpoint: + resp = send_request("eth_getCode", [created_addr[:-8], "0x0"]) + else: + resp = send_request("getCode", [created_addr]) + + self.assertEqual( + resp, + "0x6080604052600080fd00a165627a7a72305820a6ef942c101f06333ac35072a8ff40332c71d0e11cd0e6d86de8cae7b42696550029", + ) + + def test_gasPrice(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + # run for multiple times + for _ in range(3): + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=0, + gas_price=12, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue( + call_async(clusters[0].get_shard(2 | 0).add_block(block)) + ) + + for using_eth_endpoint in (True, False): + if using_eth_endpoint: + resp = send_request("eth_gasPrice", ["0x0"]) + else: + resp = send_request( + "gasPrice", ["0x0", quantity_encoder(token_id_encode("QKC"))] + ) + + self.assertEqual(resp, "0xc") + + def test_getWork_and_submitWork(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, remote_mining=True, shard_size=1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(1 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=0, + gas_price=12, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + for shard_id in ["0x0", None]: # shard, then root + resp = send_request("getWork", [shard_id]) + self.assertEqual(resp[1:], ["0x1", "0xa"]) # height and diff + + header_hash_hex = resp[0] + if shard_id is not None: # shard 0 + miner_address = Address.create_from( + master.env.quark_chain_config.shards[1].COINBASE_ADDRESS + ) + else: # root + miner_address = Address.create_from( + master.env.quark_chain_config.ROOT.COINBASE_ADDRESS + ) + block = call_async( + master.get_next_block_to_mine( + address=miner_address, branch_value=shard_id and 0b01 + ) + ) + # solve it and submit + work = MiningWork(bytes.fromhex(header_hash_hex[2:]), 1, 10) + solver = DoubleSHA256(work) + nonce = solver.mine(0, 10000).nonce + mixhash = "0x" + sha3_256(b"").hex() + resp = send_request( + "submitWork", + [ + shard_id, + header_hash_hex, + hex(nonce), + mixhash, + "0x" + bytes(65).hex(), + ], + ) + self.assertTrue(resp) + + # show progress on shard 0 + self.assertEqual( + clusters[0].get_shard_state(1 | 0).get_tip().header.height, 1 + ) + + def test_getWork_with_optional_diff_divider(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, remote_mining=True, shard_size=1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + shard = next(iter(slaves[0].shards.values())) + qkc_config = master.env.quark_chain_config + qkc_config.ROOT.CONSENSUS_TYPE = ConsensusType.POW_SIMULATE + + # add a root block first to init shard chains + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + qkc_config.ROOT.POSW_CONFIG.ENABLED = True + qkc_config.ROOT.POSW_CONFIG.ENABLE_TIMESTAMP = 0 + qkc_config.ROOT.POSW_CONFIG.WINDOW_SIZE = 2 + + shard.state.get_root_chain_stakes = lambda _1, _2: ( + qkc_config.ROOT.POSW_CONFIG.TOTAL_STAKE_PER_BLOCK, + acc1.recipient, + ) + + resp = send_request("getWork", [None]) + # height and diff, and returns the diff divider since it's PoSW mineable + self.assertEqual(resp[1:], ["0x2", "0xa", hex(1000)]) + + def test_createTransactions(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_random_account(full_shard_key=1) + + loadtest_accounts = [ + { + "address": "b067ac9ebeeecb10bbcd1088317959d58d1e38f6b0ee10d5", + "key": "ca0143c9aa51c3013f08e83f3b6368a4f3ba5b52c4841c6e0c22c300f7ee6827", + }, + { + "address": "9f2b984937ff8e3f20d2a2592f342f47257870909fffa247", + "key": "40efdb8528de149c35fb43a572fc821d8fbdf2469dcc7fe1a9e847ef29e3c941", + }, + ] + + with ClusterContext( + 1, acc1, small_coinbase=True, loadtest_accounts=loadtest_accounts + ) as clusters, jrpc_http_server_context(clusters[0].master): + slaves = clusters[0].slave_list + master = clusters[0].master + + block = call_async( + master.get_next_block_to_mine(address=acc2, branch_value=None) + ) + call_async(master.add_root_block(block)) + + send_request("createTransactions", {"numTxPerShard": 1, "xShardPercent": 0}) + + +# ------------------------------- Test for JSONRPCWebsocketServer ------------------------------- +@contextmanager +def jrpc_websocket_server_context(slave_server, port=38590): + env = DEFAULT_ENV.copy() + env.cluster_config = ClusterConfig() + env.cluster_config.JSON_RPC_PORT = 38391 + env.cluster_config.JSON_RPC_HOST = "127.0.0.1" + + env.slave_config = env.cluster_config.get_slave_config("S0") + env.slave_config.HOST = "0.0.0.0" + env.slave_config.WEBSOCKET_JSON_RPC_PORT = port + server = call_async(JSONRPCWebsocketServer.start_websocket_server(env, slave_server)) + try: + yield server + finally: + server.shutdown() + + +def send_websocket_request(request, num_response=1, port=38590): + responses = [] + + async def __send_request(request, port): + uri = "ws://0.0.0.0:" + str(port) + async with websockets.connect(uri) as websocket: + await websocket.send(request) + while True: + response = await websocket.recv() + responses.append(response) + if len(responses) == num_response: + return responses + + return call_async(__send_request(request, port)) + + +async def get_websocket(port=38590): + uri = "ws://0.0.0.0:" + str(port) + return await websockets.connect(uri) + + +class TestJSONRPCWebsocket(unittest.TestCase): + def test_new_heads(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context(clusters[0].slave_list[0]): + # clusters[0].slave_list[0] has two shards with full_shard_id 2 and 3 + master = clusters[0].master + + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newHeads", "0x00000002"], + "id": 3, + } + websocket = call_async(get_websocket()) + call_async(websocket.send(json.dumps(request))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["id"], 3) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + block_hash = block.header.get_hash() + block_height = block.header.height + + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual( + response["params"]["result"]["hash"], data_encoder(block_hash) + ) + self.assertEqual( + response["params"]["result"]["height"], quantity_encoder(block_height) + ) + + def test_new_heads_with_chain_reorg(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38591 + ): + websocket = call_async(get_websocket(port=38591)) + + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newHeads", "0x00000002"], + "id": 3, + } + call_async(websocket.send(json.dumps(request))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["id"], 3) + + state = clusters[0].get_shard_state(2 | 0) + tip = state.get_tip() + + # no chain reorg at this point + b0 = state.create_block_to_mine(address=acc1) + state.finalize_and_add_block(b0) + self.assertEqual(state.header_tip, b0.header) + response = call_async(websocket.recv()) + d = json.loads(response) + self.assertEqual( + d["params"]["result"]["hash"], data_encoder(b0.header.get_hash()) + ) + + # fork happens + b1 = tip.create_block_to_append(address=acc1) + state.finalize_and_add_block(b1) + b2 = b1.create_block_to_append(address=acc1) + state.finalize_and_add_block(b2) + self.assertEqual(state.header_tip, b2.header) + + # new heads b1, b2 emitted from new chain + blocks = [b1, b2] + for b in blocks: + response = call_async(websocket.recv()) + d = json.loads(response) + self.assertEqual( + d["params"]["result"]["hash"], data_encoder(b.header.get_hash()) + ) + + def test_new_pending_xshard_tx_sender(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0x0) + acc2 = Address.create_from_identity(id1, full_shard_key=0x10001) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38592 + ): + master = clusters[0].master + slaves = clusters[0].slave_list + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newPendingTransactions", "0x00000002"], + "id": 6, + } + + websocket = call_async(get_websocket(38592)) + call_async(websocket.send(json.dumps(request))) + + sub_response = json.loads(call_async(websocket.recv())) + self.assertEqual(sub_response["id"], 6) + self.assertEqual(len(sub_response["result"]), 34) + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc2, + gas=30000, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + tx_response = json.loads(call_async(websocket.recv())) + self.assertEqual( + tx_response["params"]["subscription"], sub_response["result"] + ) + self.assertTrue(tx_response["params"]["result"], tx.get_hash()) + + b1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(b1))) + + def test_new_pending_xshard_tx_target(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0x10001) + acc2 = Address.create_from_identity(id1, full_shard_key=0x0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38593 + ): + master = clusters[0].master + slaves = clusters[0].slave_list + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newPendingTransactions", "0x00000002"], + "id": 6, + } + websocket = call_async(get_websocket(38593)) + call_async(websocket.send(json.dumps(request))) + + sub_response = json.loads(call_async(websocket.recv())) + self.assertEqual(sub_response["id"], 6) + self.assertEqual(len(sub_response["result"]), 34) + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(0x10003), + key=id1.get_key(), + from_address=acc1, + to_address=acc2, + gas=30000, + value=12345, + ) + self.assertTrue(slaves[1].add_tx(tx)) + + b1 = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0x10003) + ) + self.assertTrue(call_async(clusters[0].get_shard(0x10003).add_block(b1))) + + tx_response = json.loads(call_async(websocket.recv())) + self.assertEqual( + tx_response["params"]["subscription"], sub_response["result"] + ) + self.assertTrue(tx_response["params"]["result"], tx.get_hash()) + + def test_new_pending_tx_same_acc_multi_subscriptions(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0x0) + acc2 = Address.create_from_identity(id1, full_shard_key=0x10001) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38594 + ): + master = clusters[0].master + slaves = clusters[0].slave_list + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=None) + ) + call_async(master.add_root_block(block)) + + requests = [] + REQ_NUM = 5 + for i in range(REQ_NUM): + req = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newPendingTransactions", "0x00000002"], + "id": i, + } + requests.append(req) + + websocket = call_async(get_websocket(38594)) + [call_async(websocket.send(json.dumps(req))) for req in requests] + sub_responses = [json.loads(call_async(websocket.recv())) for _ in requests] + + for i, resp in enumerate(sub_responses): + self.assertEqual(resp["id"], i) + self.assertEqual(len(resp["result"]), 34) + + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc2, + gas=30000, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx)) + + tx_responses = [json.loads(call_async(websocket.recv())) for _ in requests] + for i, resp in enumerate(tx_responses): + self.assertEqual( + resp["params"]["subscription"], sub_responses[i]["result"] + ) + self.assertTrue(resp["params"]["result"], tx.get_hash()) + + def test_new_pending_tx_with_reorg(self): + id1 = Identity.create_random_identity() + id2 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_from_identity(id2, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38595 + ): + websocket = call_async(get_websocket(port=38595)) + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newPendingTransactions", "0x00000002"], + "id": 3, + } + call_async(websocket.send(json.dumps(request))) + + sub_response = json.loads(call_async(websocket.recv())) + self.assertEqual(sub_response["id"], 3) + self.assertEqual(len(sub_response["result"]), 34) + + state = clusters[0].get_shard_state(2 | 0) + tip = state.get_tip() + + tx = create_transfer_transaction( + shard_state=state, + key=id1.get_key(), + from_address=acc1, + to_address=acc2, + gas=30000, + value=12345, + ) + self.assertTrue(state.add_tx(tx)) + tx_response1 = json.loads(call_async(websocket.recv())) + self.assertEqual( + tx_response1["params"]["subscription"], sub_response["result"] + ) + self.assertTrue(tx_response1["params"]["result"], tx.get_hash()) + + b0 = state.create_block_to_mine() + state.finalize_and_add_block(b0) + b1 = tip.create_block_to_append() + state.finalize_and_add_block(b1) + b2 = b1.create_block_to_append() + state.finalize_and_add_block(b2) # fork should happen, b0-b2 is picked up + + tx_response2 = json.loads(call_async(websocket.recv())) + self.assertEqual(state.header_tip, b2.header) + self.assertEqual(tx_response2, tx_response1) + + def test_logs(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + expected_log_parts = { + "logIndex": "0x0", + "transactionIndex": "0x0", + "blockNumber": "0x1", + "blockHeight": "0x1", + "data": "0x", + } + + with ClusterContext( + 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38596 + ): + master = clusters[0].master + slaves = clusters[0].slave_list + websocket = call_async(get_websocket(port=38596)) + + # filter by contract address + contract_addr = mk_contract_address(acc1.recipient, 0, acc1.full_shard_key) + filter_req = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": [ + "logs", + "0x00000002", + { + "address": "0x" + + contract_addr.hex() + + hex(acc1.full_shard_key)[2:].zfill(8) + }, + ], + "id": 4, + } + call_async(websocket.send(json.dumps(filter_req))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["id"], 4) + + # filter by topics + filter_req = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": [ + "logs", + "0x00000002", + { + "topics": [ + "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa" + ] + }, + ], + "id": 5, + } + call_async(websocket.send(json.dumps(filter_req))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["id"], 5) + + tx = create_contract_creation_with_event_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), # full_shard_id = 2 + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key, + ) + expected_log_parts["transactionHash"] = "0x" + tx.get_hash().hex() + self.assertTrue(slaves[0].add_tx(tx)) + + block = call_async( + master.get_next_block_to_mine( + address=acc1, branch_value=0b10 + ) # branch_value = 2 + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + + count = 0 + while count < 2: + response = call_async(websocket.recv()) + count += 1 + d = json.loads(response) + self.assertTrue(expected_log_parts.items() <= d["params"]["result"].items()) + self.assertEqual( + "0xa9378d5bd800fae4d5b8d4c6712b2b64e8ecc86fdc831cb51944000fc7c8ecfa", + d["params"]["result"]["topics"][0], + ) + self.assertEqual(count, 2) + + def test_log_removed_flag_with_chain_reorg(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True, genesis_minor_quarkash=10000000 + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38597 + ): + websocket = call_async(get_websocket(port=38597)) + + # a log subscriber with no-filter request + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["logs", "0x00000002", {}], + "id": 3, + } + call_async(websocket.send(json.dumps(request))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["id"], 3) + + state = clusters[0].get_shard_state(2 | 0) + tip = state.get_tip() + b0 = state.create_block_to_mine(address=acc1) + tx = create_contract_creation_with_event_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), # full_shard_id = 2 + key=id1.get_key(), + from_address=acc1, + to_full_shard_key=acc1.full_shard_key, + ) + b0.add_tx(tx) + state.finalize_and_add_block(b0) + self.assertEqual(state.header_tip, b0.header) + tx_hash = tx.get_hash() + + response = call_async(websocket.recv()) + d = json.loads(response) + self.assertEqual( + d["params"]["result"]["transactionHash"], data_encoder(tx_hash) + ) + self.assertEqual(d["params"]["result"]["removed"], False) + + # fork happens + b1 = tip.create_block_to_append(address=acc1) + b1.add_tx(tx) + state.finalize_and_add_block(b1) + b2 = b1.create_block_to_append(address=acc1) + state.finalize_and_add_block(b2) + self.assertEqual(state.header_tip, b2.header) + + # log emitted from old chain, flag is set to True + response = call_async(websocket.recv()) + d = json.loads(response) + self.assertEqual( + d["params"]["result"]["transactionHash"], data_encoder(tx_hash) + ) + self.assertEqual(d["params"]["result"]["removed"], True) + + # log emitted from new chain + response = call_async(websocket.recv()) + d = json.loads(response) + self.assertEqual( + d["params"]["result"]["transactionHash"], data_encoder(tx_hash) + ) + self.assertEqual(d["params"]["result"]["removed"], False) + + def test_invalid_subscription(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38598 + ): + # Invalid subscription type + request1 = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newBlocks", "0x00000002"], + "id": 3, + } + # Invalid full shard id + request2 = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newHeads", "0x00040002"], + "id": 3, + } + + websocket = call_async(get_websocket(port=38598)) + [ + call_async(websocket.send(json.dumps(req))) + for req in [request1, request2] + ] + responses = [json.loads(call_async(websocket.recv())) for _ in range(2)] + [self.assertTrue(resp["error"]) for resp in responses] # emit error message + + def test_multi_subs_with_some_unsubs_in_one_ws_conn(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38599 + ): + # clusters[0].slave_list[0] has two shards with full_shard_id 2 and 3 + master = clusters[0].master + websocket = call_async(get_websocket(port=38599)) + + # make 3 subscriptions on new heads + ids = [3, 4, 5] + sub_ids = [] + for id in ids: + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newHeads", "0x00000002"], + "id": id, + } + call_async(websocket.send(json.dumps(request))) + response = call_async(websocket.recv()) + response = json.loads(response) + sub_ids.append(response["result"]) + self.assertEqual(response["id"], id) + + # cancel the first subscription + request = { + "jsonrpc": "2.0", + "method": "unsubscribe", + "params": [sub_ids[0]], + "id": 3, + } + call_async(websocket.send(json.dumps(request))) + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["result"], True) # unsubscribed successfully + + # add a new block, should expect only 2 responses + root_block = call_async( + master.get_next_block_to_mine(acc1, branch_value=None) + ) + call_async(master.add_root_block(root_block)) + + block = call_async( + master.get_next_block_to_mine(address=acc1, branch_value=0b10) + ) + self.assertTrue(call_async(clusters[0].get_shard(2 | 0).add_block(block))) + + for sub_id in sub_ids[1:]: + response = call_async(websocket.recv()) + response = json.loads(response) + self.assertEqual(response["params"]["subscription"], sub_id) + + def test_unsubscribe(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_websocket_server_context( + clusters[0].slave_list[0], port=38600 + ): + request = { + "jsonrpc": "2.0", + "method": "subscribe", + "params": ["newPendingTransactions", "0x00000002"], + "id": 6, + } + websocket = call_async(get_websocket(port=38600)) + call_async(websocket.send(json.dumps(request))) + sub_response = json.loads(call_async(websocket.recv())) + + # Check subscription response + self.assertEqual(sub_response["id"], 6) + self.assertEqual(len(sub_response["result"]), 34) + + unsubscribe = { + "jsonrpc": "2.0", + "method": "unsubscribe", + "params": [sub_response["result"]], + "id": 3, + } + + # Unsubscribe successfully + call_async(websocket.send(json.dumps(unsubscribe))) + response = json.loads(call_async(websocket.recv())) + self.assertTrue(response["result"]) + self.assertEqual(response["id"], 3) + + # Invalid unsubscription if sub_id does not exist + call_async(websocket.send(json.dumps(unsubscribe))) + response = json.loads(call_async(websocket.recv())) + self.assertTrue(response["error"]) diff --git a/quarkchain/jsonrpc_client.py b/quarkchain/jsonrpc_client.py index 111256700..f69a5953b 100644 --- a/quarkchain/jsonrpc_client.py +++ b/quarkchain/jsonrpc_client.py @@ -1,75 +1,58 @@ -import httpx -import uuid - -class JsonRpcError(Exception): - def __init__(self, error): - self.code = error.get("code") - self.message = error.get("message") - self.data = error.get("data") - super().__init__(f"JSON-RPC Error {self.code}: {self.message}") - -class JsonRpcClient: - def __init__(self, url, timeout=10): - self.client = httpx.Client(base_url=url, timeout=timeout) - - def call(self, method, *params): - payload = { - "jsonrpc": "2.0", - "method": method, - "params": list(params), - "id": str(uuid.uuid4()), - } - - resp = self.client.post("", json=payload) - resp.raise_for_status() - data = resp.json() - - if "error" in data: - raise RuntimeError(data["error"]) - - return data.get("result") - - def close(self): - self.client.close() - - -class AsyncJsonRpcClient: - def __init__(self, url, timeout=10): - self.client = httpx.AsyncClient(base_url=url, timeout=timeout) - - async def call(self, method, *params): - payload = { - "jsonrpc": "2.0", - "method": method, - "params": list(params), - "id": str(uuid.uuid4()), - } - - resp = await self.client.post("", json=payload) - resp.raise_for_status() - data = resp.json() - - if "error" in data: - raise JsonRpcError(data["error"]) - - return data.get("result") - - async def call_with_dict_params(self, method, params): - payload = { - "jsonrpc": "2.0", - "method": method, - "params": params, - "id": str(uuid.uuid4()), - } - - resp = await self.client.post("", json=payload) - resp.raise_for_status() - data = resp.json() - - if "error" in data: - raise JsonRpcError(data["error"]) - - return data.get("result") - - async def close(self): +import httpx +import uuid + +class JsonRpcError(Exception): + def __init__(self, error): + self.code = error.get("code") + self.message = error.get("message") + self.data = error.get("data") + super().__init__(f"JSON-RPC Error {self.code}: {self.message}") + +class JsonRpcClient: + def __init__(self, url, timeout=10): + self.client = httpx.Client(base_url=url, timeout=timeout) + + def call(self, method, *params): + payload = { + "jsonrpc": "2.0", + "method": method, + "params": list(params), + "id": str(uuid.uuid4()), + } + + resp = self.client.post("", json=payload) + resp.raise_for_status() + data = resp.json() + + if "error" in data: + raise RuntimeError(data["error"]) + + return data.get("result") + + def close(self): + self.client.close() + + +class AsyncJsonRpcClient: + def __init__(self, url, timeout=10): + self.client = httpx.AsyncClient(base_url=url, timeout=timeout) + + async def call(self, method, *params): + payload = { + "jsonrpc": "2.0", + "method": method, + "params": list(params), + "id": str(uuid.uuid4()), + } + + resp = await self.client.post("", json=payload) + resp.raise_for_status() + data = resp.json() + + if "error" in data: + raise JsonRpcError(data["error"]) + + return data.get("result") + + async def close(self): await self.client.aclose() \ No newline at end of file From d67a44c8eb2c36db6b2348f8d2df3735c2cec69b Mon Sep 17 00:00:00 2001 From: ping-ke Date: Mon, 16 Mar 2026 23:58:54 +0800 Subject: [PATCH 05/11] fix jsonrpc test failures: params passing and websocket server shutdown --- quarkchain/cluster/jsonrpc.py | 6 +++--- quarkchain/cluster/tests/test_jsonrpc.py | 2 +- quarkchain/jsonrpc_client.py | 17 +++++++++++++++++ quarkchain/tools/fund_testnet.py | 2 +- 4 files changed, 22 insertions(+), 5 deletions(-) diff --git a/quarkchain/cluster/jsonrpc.py b/quarkchain/cluster/jsonrpc.py index e0cc39ba4..5af317a3f 100644 --- a/quarkchain/cluster/jsonrpc.py +++ b/quarkchain/cluster/jsonrpc.py @@ -1526,11 +1526,11 @@ async def __handle(self, websocket): pass async def start(self): - start_server = websockets.serve(self.__handle, self.host, self.port) - await start_server + self._server = await websockets.serve(self.__handle, self.host, self.port) def shutdown(self): - pass # TODO + if hasattr(self, '_server') and self._server is not None: + self._server.close() @staticmethod def response_transcoder(sub_id, result): diff --git a/quarkchain/cluster/tests/test_jsonrpc.py b/quarkchain/cluster/tests/test_jsonrpc.py index 663cc7cee..0194984d8 100644 --- a/quarkchain/cluster/tests/test_jsonrpc.py +++ b/quarkchain/cluster/tests/test_jsonrpc.py @@ -50,7 +50,7 @@ def jrpc_http_server_context(master): rpc_client = AsyncJsonRpcClient("http://localhost:38391") def send_request(method, *args): - return call_async(rpc_client.call(method, *args)) + return call_async(rpc_client.call_with_dict_params(method, *args)) class TestJSONRPCHttp(unittest.TestCase): diff --git a/quarkchain/jsonrpc_client.py b/quarkchain/jsonrpc_client.py index f69a5953b..45cd4c981 100644 --- a/quarkchain/jsonrpc_client.py +++ b/quarkchain/jsonrpc_client.py @@ -53,6 +53,23 @@ async def call(self, method, *params): raise JsonRpcError(data["error"]) return data.get("result") + + async def call_with_dict_params(self, method, params): + payload = { + "jsonrpc": "2.0", + "method": method, + "params": params, + "id": str(uuid.uuid4()), + } + resp = await self.client.post("", json=payload) + resp.raise_for_status() + data = resp.json() + + if "error" in data: + raise JsonRpcError(data["error"]) + + return data.get("result") + async def close(self): await self.client.aclose() \ No newline at end of file diff --git a/quarkchain/tools/fund_testnet.py b/quarkchain/tools/fund_testnet.py index 016ad4077..8fb9b170a 100644 --- a/quarkchain/tools/fund_testnet.py +++ b/quarkchain/tools/fund_testnet.py @@ -15,7 +15,7 @@ class Endpoint: def __init__(self, url): self.client = AsyncJsonRpcClient(url) - async def __send_request(self, method, *args): + async def __send_request(self, method, *args): # manual retry since the library has hard-coded timeouts while True: try: From 0abbb3054e3b4909e09f72716b2627cfbdb174ef Mon Sep 17 00:00:00 2001 From: ping-ke Date: Thu, 26 Mar 2026 11:46:27 +0800 Subject: [PATCH 06/11] move async related changes to update/asyncio branch --- quarkchain/cluster/jsonrpc.py | 30 +++++++++++------------ quarkchain/cluster/tests/test_jsonrpc.py | 6 ++--- quarkchain/tools/batch_deploy_contract.py | 2 +- quarkchain/tools/fund_testnet.py | 2 +- quarkchain/tools/monitoring.py | 6 +++-- 5 files changed, 24 insertions(+), 22 deletions(-) diff --git a/quarkchain/cluster/jsonrpc.py b/quarkchain/cluster/jsonrpc.py index e0cc39ba4..defdc1550 100644 --- a/quarkchain/cluster/jsonrpc.py +++ b/quarkchain/cluster/jsonrpc.py @@ -463,7 +463,7 @@ def _parse_log_request( # noinspection PyPep8Naming class JSONRPCHttpServer: @classmethod - async def start_public_server(cls, env, master_server): + def start_public_server(cls, env, master_server): server = cls( env, master_server, @@ -471,11 +471,11 @@ async def start_public_server(cls, env, master_server): env.cluster_config.JSON_RPC_HOST, public_methods, ) - await server.start() + server.start() return server @classmethod - async def start_private_server(cls, env, master_server): + def start_private_server(cls, env, master_server): server = cls( env, master_server, @@ -483,11 +483,11 @@ async def start_private_server(cls, env, master_server): env.cluster_config.PRIVATE_JSON_RPC_HOST, private_methods, ) - await server.start() + server.start() return server @classmethod - async def start_test_server(cls, env, master_server): + def start_test_server(cls, env, master_server): methods = RpcMethods() for method in public_methods.values(): methods.add(method) @@ -500,7 +500,7 @@ async def start_test_server(cls, env, master_server): env.cluster_config.JSON_RPC_HOST, methods, ) - await server.start() + server.start() return server def __init__( @@ -542,7 +542,7 @@ async def __handle(self, request): Logger.error(response) return web.json_response(response) - async def start(self): + def start(self): app = web.Application(client_max_size=JSON_RPC_CLIENT_REQUEST_MAX_SIZE) cors = aiohttp_cors.setup(app) route = app.router.add_post("/", self.__handle) @@ -558,12 +558,12 @@ async def start(self): }, ) self.runner = web.AppRunner(app, access_log=None) - await self.runner.setup() + self.loop.run_until_complete(self.runner.setup()) site = web.TCPSite(self.runner, self.host, self.port) - await site.start() + self.loop.run_until_complete(site.start()) - async def shutdown(self): - await self.runner.cleanup() + def shutdown(self): + self.loop.run_until_complete(self.runner.cleanup()) # JSON RPC handlers @public_methods.add @@ -1445,7 +1445,7 @@ def get_data_default(key, decoder, default=None): class JSONRPCWebsocketServer: @classmethod - async def start_websocket_server(cls, env, slave_server): + def start_websocket_server(cls, env, slave_server): server = cls( env, slave_server, @@ -1453,7 +1453,7 @@ async def start_websocket_server(cls, env, slave_server): env.slave_config.HOST, public_methods, ) - await server.start() + server.start() return server def __init__( @@ -1525,9 +1525,9 @@ async def __handle(self, websocket): except: pass - async def start(self): + def start(self): start_server = websockets.serve(self.__handle, self.host, self.port) - await start_server + self.loop.run_until_complete(start_server) def shutdown(self): pass # TODO diff --git a/quarkchain/cluster/tests/test_jsonrpc.py b/quarkchain/cluster/tests/test_jsonrpc.py index 663cc7cee..63b4ebec5 100644 --- a/quarkchain/cluster/tests/test_jsonrpc.py +++ b/quarkchain/cluster/tests/test_jsonrpc.py @@ -40,11 +40,11 @@ def jrpc_http_server_context(master): env.cluster_config.JSON_RPC_PORT = 38391 # to pass the circleCi env.cluster_config.JSON_RPC_HOST = "127.0.0.1" - server = call_async(JSONRPCHttpServer.start_test_server(env, master)) + server = JSONRPCHttpServer.start_test_server(env, master) try: yield server finally: - call_async(server.shutdown()) + server.shutdown() rpc_client = AsyncJsonRpcClient("http://localhost:38391") @@ -1208,7 +1208,7 @@ def jrpc_websocket_server_context(slave_server, port=38590): env.slave_config = env.cluster_config.get_slave_config("S0") env.slave_config.HOST = "0.0.0.0" env.slave_config.WEBSOCKET_JSON_RPC_PORT = port - server = call_async(JSONRPCWebsocketServer.start_websocket_server(env, slave_server)) + server = JSONRPCWebsocketServer.start_websocket_server(env, slave_server) try: yield server finally: diff --git a/quarkchain/tools/batch_deploy_contract.py b/quarkchain/tools/batch_deploy_contract.py index 74a7c5543..24fab4113 100644 --- a/quarkchain/tools/batch_deploy_contract.py +++ b/quarkchain/tools/batch_deploy_contract.py @@ -108,7 +108,7 @@ def main(): genesisId = Identity.create_from_key(DEFAULT_ENV.config.GENESIS_KEY) endpoint = Endpoint("http://" + args.jrpc_endpoint) - asyncio.run(deploy(endpoint, genesisId, data)) + asyncio.get_event_loop().run_until_complete(deploy(endpoint, genesisId, data)) if __name__ == "__main__": diff --git a/quarkchain/tools/fund_testnet.py b/quarkchain/tools/fund_testnet.py index 016ad4077..537840e4c 100644 --- a/quarkchain/tools/fund_testnet.py +++ b/quarkchain/tools/fund_testnet.py @@ -163,7 +163,7 @@ def main(): endpoint = Endpoint("http://" + args.jrpc_endpoint) addrByAmount = read_addr(args.tqkc_file) - asyncio.run(fund(endpoint, genesisId, addrByAmount)) + asyncio.get_event_loop().run_until_complete(fund(endpoint, genesisId, addrByAmount)) if __name__ == "__main__": diff --git a/quarkchain/tools/monitoring.py b/quarkchain/tools/monitoring.py index 407cefd8e..fc15baa0a 100644 --- a/quarkchain/tools/monitoring.py +++ b/quarkchain/tools/monitoring.py @@ -67,7 +67,9 @@ async def crawl_async(ip, p2p_port, jrpc_port): def crawl_bfs(ip, p2p_port, jrpc_port): - cache = asyncio.run(crawl_async(ip, p2p_port, jrpc_port)) + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + cache = loop.run_until_complete(crawl_async(ip, p2p_port, jrpc_port)) res = {} # we can avoid the loop, but it will look crazy @@ -179,7 +181,7 @@ def watch_nodes_stats(ip, p2p_port, jrpc_port, ip_lookup={}): for idx, cluster in enumerate(clusters) ] ) - asyncio.run(async_watch(clusters)) + asyncio.get_event_loop().run_until_complete(async_watch(clusters)) def main(): From f867cb332b6ff81912b53e8c0041353bebd67d1f Mon Sep 17 00:00:00 2001 From: ping-ke Date: Tue, 31 Mar 2026 10:39:53 +0800 Subject: [PATCH 07/11] add rationale comment for httpx dependency choice in jsonrpc_client --- quarkchain/jsonrpc_client.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/quarkchain/jsonrpc_client.py b/quarkchain/jsonrpc_client.py index 45cd4c981..7d9961dff 100644 --- a/quarkchain/jsonrpc_client.py +++ b/quarkchain/jsonrpc_client.py @@ -1,3 +1,8 @@ +# httpx is chosen over aiohttp because it provides both sync and async clients +# with a unified API, keeping this module simple and consistent. aiohttp is +# async-only, so the sync client (JsonRpcClient) would need a separate HTTP +# implementation (e.g. urllib.request). httpx as a pure client library is +# lightweight (~200KB with deps) and doesn't overlap with aiohttp's server role. import httpx import uuid From cd2843dbb82cb95cb6d4e105c9304aab10ff0c42 Mon Sep 17 00:00:00 2001 From: ping-ke Date: Tue, 31 Mar 2026 16:59:25 +0800 Subject: [PATCH 08/11] rename jsonrpcserver to jsonrpc_server and fix related imports and issues - Rename jsonrpcserver.py to jsonrpc_server.py for consistent naming - Replace armor with asyncio.shield for cancellation protection - Fix get_running_loop to get_event_loop for compatibility - Fix subscription.py import from old jsonrpcserver package - Fix indentation in test_jsonrpc.py and method name in stats.py --- quarkchain/cluster/jsonrpc.py | 10 +++++----- .../cluster/{jsonrpcserver.py => jsonrpc_server.py} | 2 +- quarkchain/cluster/subscription.py | 7 +++---- quarkchain/cluster/tests/test_jsonrpc.py | 2 +- quarkchain/tools/stats.py | 2 +- 5 files changed, 11 insertions(+), 12 deletions(-) rename quarkchain/cluster/{jsonrpcserver.py => jsonrpc_server.py} (100%) diff --git a/quarkchain/cluster/jsonrpc.py b/quarkchain/cluster/jsonrpc.py index 51123f014..477b4d8fb 100644 --- a/quarkchain/cluster/jsonrpc.py +++ b/quarkchain/cluster/jsonrpc.py @@ -34,7 +34,7 @@ import uuid from quarkchain.cluster.log_filter import LogFilter from quarkchain.cluster.subscription import SUB_LOGS -from quarkchain.cluster.jsonrpcserver import RpcMethods, InvalidParams +from quarkchain.cluster.jsonrpc_server import RpcMethods, InvalidParams # defaults DEFAULT_STARTGAS = 100 * 1000 @@ -506,7 +506,7 @@ def start_test_server(cls, env, master_server): def __init__( self, env, master_server: MasterServer, port, host, methods: RpcMethods ): - self.loop = asyncio.get_running_loop() + self.loop = asyncio.get_event_loop() self.port = port self.host = host self.env = env @@ -533,9 +533,9 @@ async def __handle(self, request): self.counters[method] += 1 else: self.counters[method] = 1 - # Use armor to prevent the handler from being cancelled when + # Use asyncio.shield to prevent the handler from being cancelled when # aiohttp server loses connection to client - response = await self.handlers.dispatch(d) + response = await asyncio.shield(self.handlers.dispatch(d)) if response is None: return web.Response() if "error" in response: @@ -1459,7 +1459,7 @@ def start_websocket_server(cls, env, slave_server): def __init__( self, env, slave_server: SlaveServer, port, host, methods: RpcMethods ): - self.loop = asyncio.get_running_loop() + self.loop = asyncio.get_event_loop() self.port = port self.host = host self.env = env diff --git a/quarkchain/cluster/jsonrpcserver.py b/quarkchain/cluster/jsonrpc_server.py similarity index 100% rename from quarkchain/cluster/jsonrpcserver.py rename to quarkchain/cluster/jsonrpc_server.py index 139657a42..9219e6df8 100644 --- a/quarkchain/cluster/jsonrpcserver.py +++ b/quarkchain/cluster/jsonrpc_server.py @@ -34,11 +34,11 @@ class InvalidParams(JsonRpcError): code = -32602 message = "Invalid params" - class ServerError(JsonRpcError): code = -32000 message = "Server error" + class RpcMethods: def __init__(self): self._methods: Dict[str, Callable[..., Awaitable[Any]]] = {} diff --git a/quarkchain/cluster/subscription.py b/quarkchain/cluster/subscription.py index 11b6c84a7..eba13e644 100644 --- a/quarkchain/cluster/subscription.py +++ b/quarkchain/cluster/subscription.py @@ -1,9 +1,8 @@ import asyncio import json -from typing import List, Dict, Tuple, Optional, Callable +from typing import Any, List, Dict, Tuple, Optional, Callable -from jsonrpcserver.exceptions import InvalidParams -from websockets import WebSocketServerProtocol +from quarkchain.cluster.jsonrpc_server import InvalidParams from quarkchain.core import MinorBlock @@ -20,7 +19,7 @@ def __init__(self): SUB_NEW_PENDING_TX: {}, SUB_LOGS: {}, SUB_SYNC: {}, - } # type: Dict[str, Dict[str, WebSocketServerProtocol]] + } # type: Dict[str, Dict[str, Any]] self.log_filter_gen = {} # type: Dict[str, Callable] def add_subscriber(self, sub_type, sub_id, conn, extra=None): diff --git a/quarkchain/cluster/tests/test_jsonrpc.py b/quarkchain/cluster/tests/test_jsonrpc.py index 3970a1981..b934bfd60 100644 --- a/quarkchain/cluster/tests/test_jsonrpc.py +++ b/quarkchain/cluster/tests/test_jsonrpc.py @@ -50,7 +50,7 @@ def jrpc_http_server_context(master): rpc_client = AsyncJsonRpcClient("http://localhost:38391") def send_request(method, *args): - return call_async(rpc_client.call_with_dict_params(method, *args)) + return call_async(rpc_client.call_with_dict_params(method, *args)) class TestJSONRPCHttp(unittest.TestCase): diff --git a/quarkchain/tools/stats.py b/quarkchain/tools/stats.py index f3403ab58..d2688432a 100644 --- a/quarkchain/tools/stats.py +++ b/quarkchain/tools/stats.py @@ -18,7 +18,7 @@ def fstr(v: float): def basic(client: JsonRpcClient, ip): - s = client.client("getStats") + s = client.call("getStats") msg = "QuarkChain Cluster Stats\n\n" msg += "CPU: {}\n".format(psutil.cpu_count()) msg += "Memory: {} GB\n".format( From 50ffeb2fe5ae02e3c0e36b6018961e5bfbfccbfe Mon Sep 17 00:00:00 2001 From: ping-ke Date: Tue, 31 Mar 2026 23:54:56 +0800 Subject: [PATCH 09/11] fix tools compatibility with new JsonRpcClient - Fix snake_case field names to camelCase for JSON-RPC responses (network_id->networkId, shard_size->chainSize, block_height->blockHeight, contract_address->contractAddress) - Fix resp.data.result access pattern to direct dict access - Add exception handling and cli.close() to prevent connection leaks - Add 0x prefix for address in balance_watcher query_balance --- quarkchain/tools/balance_watcher.py | 8 +++++--- quarkchain/tools/batch_deploy_contract.py | 7 +++---- quarkchain/tools/erc20_balance_watcher.py | 6 ++++-- quarkchain/tools/fund_testnet.py | 6 +++--- quarkchain/tools/monitoring.py | 9 ++++++++- quarkchain/tools/reorg_detector.py | 8 +++++--- 6 files changed, 28 insertions(+), 16 deletions(-) diff --git a/quarkchain/tools/balance_watcher.py b/quarkchain/tools/balance_watcher.py index a8314f305..972ef35bb 100644 --- a/quarkchain/tools/balance_watcher.py +++ b/quarkchain/tools/balance_watcher.py @@ -17,22 +17,24 @@ def query(endpoint, *args): retry, resp = 0, None while retry <= 5: + cli = JsonRpcClient(HOST + ":" + PORT) try: - cli = JsonRpcClient(HOST + ":" + PORT) resp = cli.call(endpoint, *args) break except Exception: retry += 1 time.sleep(0.5) + finally: + cli.close() return resp def query_balance(recipient, chain_id, token_str): resp = query( "getBalances", - recipient.lower() + chain_id.to_bytes(2, byteorder="big").hex() + "0000", + "0x" + recipient.lower().lstrip("0x") + chain_id.to_bytes(2, byteorder="big").hex() + "0000", ) - for balance in resp.data.result["balances"]: + for balance in resp["balances"]: if balance["tokenStr"] == token_str: return int(balance["balance"], 16) return 0 diff --git a/quarkchain/tools/batch_deploy_contract.py b/quarkchain/tools/batch_deploy_contract.py index 24fab4113..a91014276 100644 --- a/quarkchain/tools/batch_deploy_contract.py +++ b/quarkchain/tools/batch_deploy_contract.py @@ -1,6 +1,5 @@ import argparse import asyncio -import logging import rlp from quarkchain.env import DEFAULT_ENV @@ -27,7 +26,7 @@ async def get_contract_address(self, tx_id): resp = await self.__send_request("getTransactionReceipt", tx_id) if not resp: return None - return resp["contract_address"] + return resp["contractAddress"] async def get_nonce(self, account): addressHex = "0x" + account.serialize().hex() @@ -36,11 +35,11 @@ async def get_nonce(self, account): async def get_shard_size(self): resp = await self.__send_request("networkInfo") - return int(resp["shard_size"], 16) + return int(resp["chainSize"], 16) async def get_network_id(self): resp = await self.__send_request("networkInfo") - return int(resp["network_id"], 16) + return int(resp["networkId"], 16) def create_transaction(address, key, nonce, data, network_id) -> EvmTransaction: diff --git a/quarkchain/tools/erc20_balance_watcher.py b/quarkchain/tools/erc20_balance_watcher.py index 14f959040..110636422 100644 --- a/quarkchain/tools/erc20_balance_watcher.py +++ b/quarkchain/tools/erc20_balance_watcher.py @@ -18,13 +18,15 @@ def query(endpoint, args): retry, resp = 0, None while retry <= 5: + cli = JsonRpcClient(HOST + ":" + PORT) try: - cli = JsonRpcClient(HOST + ":" + PORT) resp = cli.call(endpoint, *args) break except Exception: retry += 1 time.sleep(0.5) + finally: + cli.close() return resp @@ -33,7 +35,7 @@ def query_balance(recipient): "eth_call", [{"from": None, "to": "0xea26c4ac16d4a5a106820bc8aee85fd0b7b2b664", "data": "0x70a08231"+int(recipient, 0).to_bytes(32, byteorder="big").hex()}, "latest"] ) - return int(resp.data.result, 0) + return int(resp, 0) def main(): diff --git a/quarkchain/tools/fund_testnet.py b/quarkchain/tools/fund_testnet.py index 926290f9c..088316f32 100644 --- a/quarkchain/tools/fund_testnet.py +++ b/quarkchain/tools/fund_testnet.py @@ -43,11 +43,11 @@ async def get_nonce(self, account): async def get_shard_size(self): resp = await self.__send_request("networkInfo") - return int(resp["shard_size"], 16) + return int(resp["chainSize"], 16) async def get_network_id(self): resp = await self.__send_request("networkInfo") - return int(resp["network_id"], 16) + return int(resp["networkId"], 16) def create_transaction(address, key, nonce, to, network_id, amount) -> EvmTransaction: @@ -85,7 +85,7 @@ async def fund_shard(endpoint, genesisId, to, network_id, shard, amount): print("retry tx={}".format(tx_id)) await endpoint.send_transaction(tx) - height = int(resp["block_height"], 16) + height = int(resp["blockHeight"], 16) status = int(resp["status"], 16) print( "shard={} tx={} block={} status={} amount={}".format( diff --git a/quarkchain/tools/monitoring.py b/quarkchain/tools/monitoring.py index fc15baa0a..0ccf6b28f 100644 --- a/quarkchain/tools/monitoring.py +++ b/quarkchain/tools/monitoring.py @@ -19,7 +19,14 @@ def fetch_peers(ip, jrpc_port): json_rpc_url = "http://{}:{}".format(ip, jrpc_port) print("calling {}".format(json_rpc_url)) cli = JsonRpcClient(json_rpc_url) - peers = cli.call("getPeers") + try: + peers = cli.call("getPeers") + print("success {}".format(json_rpc_url)) + except Exception: + print("Failed to get peers from {}".format(json_rpc_url)) + return [] + finally: + cli.close() return [ "{}:{}".format(ipaddress.ip_address(int(p["ip"], 16)), int(p["port"], 16)) for p in peers["peers"] diff --git a/quarkchain/tools/reorg_detector.py b/quarkchain/tools/reorg_detector.py index dea2f2c97..a2d0fc72d 100644 --- a/quarkchain/tools/reorg_detector.py +++ b/quarkchain/tools/reorg_detector.py @@ -18,26 +18,28 @@ def query(endpoint, *args): retry, resp = 0, None while retry <= 5: + cli = JsonRpcClient(HOST + ":" + PORT) try: - cli = JsonRpcClient(HOST + ":" + PORT) resp = cli.call(endpoint, *args) break except Exception: retry += 1 time.sleep(0.5) + finally: + cli.close() return resp def query_tip(): resp = query("getRootBlockByHeight", None) - return int(resp.data.result["height"], 16), resp.data.result["hash"] + return int(resp["height"], 16), resp["hash"] def query_rblock_with_height(height): if isinstance(height, int): height = hex(height) resp = query("getRootBlockByHeight", height) - return int(resp.data.result["height"], 16), resp.data.result["hash"] + return int(resp["height"], 16), resp["hash"] def main(): From d0ea0a47b42a1aa1d35903adf9b9d2238114cebd Mon Sep 17 00:00:00 2001 From: ping-ke Date: Fri, 3 Apr 2026 17:20:38 +0800 Subject: [PATCH 10/11] add unit test for jsonrpc --- quarkchain/cluster/tests/test_jsonrpc.py | 391 +++++++++++++++++++++++ 1 file changed, 391 insertions(+) diff --git a/quarkchain/cluster/tests/test_jsonrpc.py b/quarkchain/cluster/tests/test_jsonrpc.py index b934bfd60..97874e2d8 100644 --- a/quarkchain/cluster/tests/test_jsonrpc.py +++ b/quarkchain/cluster/tests/test_jsonrpc.py @@ -1,6 +1,8 @@ import json import unittest from contextlib import contextmanager + +import rlp import websockets from quarkchain.cluster.cluster_config import ClusterConfig @@ -1196,6 +1198,395 @@ def test_createTransactions(self): send_request("createTransactions", {"numTxPerShard": 1, "xShardPercent": 0}) + async def test_echoQuantity(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + resp = await send_request("echoQuantity", ["0x1234"]) + self.assertEqual(resp, "0x1234") + + resp = await send_request("echoQuantity", ["0x0"]) + self.assertEqual(resp, "0x0") + + async def test_echoData(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + resp = await send_request("echoData", ["0xdeadbeef"]) + self.assertEqual(resp, "0xdeadbeef") + + async def test_networkInfo(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + resp = await send_request("networkInfo") + self.assertEqual( + resp["networkId"], + quantity_encoder(master.env.quark_chain_config.NETWORK_ID), + ) + self.assertEqual( + resp["chainSize"], + quantity_encoder(master.env.quark_chain_config.CHAIN_SIZE), + ) + self.assertEqual(len(resp["shardSizes"]), master.env.quark_chain_config.CHAIN_SIZE) + self.assertFalse(resp["syncing"]) + self.assertFalse(resp["mining"]) + self.assertEqual(resp["shardServerCount"], len(master.slave_pool)) + + async def test_getAccountData(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + + # without include_shards + resp = await send_request( + "getAccountData", ["0x" + acc1.serialize().hex()] + ) + primary = resp["primary"] + self.assertEqual(primary["transactionCount"], "0x0") + self.assertFalse(primary["isContract"]) + self.assertEqual( + primary["balances"], + [{"tokenId": "0x8bb0", "tokenStr": "QKC", "balance": "0xf4240"}], + ) + + # with include_shards + resp = await send_request( + "getAccountData", ["0x" + acc1.serialize().hex(), None, True] + ) + self.assertIsNotNone(resp["primary"]) + # should have one entry per shard + self.assertEqual( + len(resp["shards"]), + len(master.env.quark_chain_config.shards), + ) + + async def test_sendRawTransaction(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_random_account(full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + slaves = clusters[0].slave_list + master = clusters[0].master + + evm_tx = EvmTransaction( + nonce=0, + gasprice=6, + startgas=30000, + to=acc2.recipient, + value=15, + data=b"", + from_full_shard_key=acc1.full_shard_key, + to_full_shard_key=acc2.full_shard_key, + network_id=slaves[0].env.quark_chain_config.NETWORK_ID, + gas_token_id=master.env.quark_chain_config.genesis_token, + transfer_token_id=master.env.quark_chain_config.genesis_token, + ) + evm_tx.sign(id1.get_key()) + + raw_tx_data = "0x" + rlp.encode(evm_tx).hex() + response = await send_request("sendRawTransaction", [raw_tx_data]) + tx = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx)) + self.assertEqual(response, "0x" + tx.get_hash().hex() + "00000000") + + state = clusters[0].get_shard_state(2 | 0) + self.assertEqual(len(state.tx_queue), 1) + + # eth_sendRawTransaction should also work + evm_tx2 = EvmTransaction( + nonce=1, + gasprice=6, + startgas=30000, + to=acc2.recipient, + value=10, + data=b"", + from_full_shard_key=acc1.full_shard_key, + to_full_shard_key=acc2.full_shard_key, + network_id=slaves[0].env.quark_chain_config.NETWORK_ID, + gas_token_id=master.env.quark_chain_config.genesis_token, + transfer_token_id=master.env.quark_chain_config.genesis_token, + ) + evm_tx2.sign(id1.get_key()) + + raw_tx_data2 = "0x" + rlp.encode(evm_tx2).hex() + response2 = await send_request("eth_sendRawTransaction", [raw_tx_data2]) + tx2 = TypedTransaction(SerializedEvmTransaction.from_evm_tx(evm_tx2)) + self.assertEqual(response2, "0x" + tx2.get_hash().hex() + "00000000") + self.assertEqual(len(state.tx_queue), 2) + + async def test_getRootBlockById(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + + block = await master.get_next_block_to_mine( + address=acc1, branch_value=None + ) + await master.add_root_block(block) + + resp = await send_request( + "getRootBlockById", ["0x" + block.header.get_hash().hex(), True] + ) + self.assertEqual(resp["hash"], data_encoder(block.header.get_hash())) + self.assertEqual(resp["height"], quantity_encoder(block.header.height)) + self.assertEqual( + resp["miner"], + "0x" + block.header.coinbase_address.serialize().hex(), + ) + self.assertEqual( + resp["difficulty"], quantity_encoder(block.header.difficulty) + ) + + # non-existent block + resp = await send_request( + "getRootBlockById", ["0x" + "ff" * 32, True] + ) + self.assertIsNone(resp) + + async def test_getRootBlockByHeight(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + + block = await master.get_next_block_to_mine( + address=acc1, branch_value=None + ) + await master.add_root_block(block) + + # by specific height + resp = await send_request( + "getRootBlockByHeight", [quantity_encoder(block.header.height), True] + ) + self.assertEqual(resp["hash"], data_encoder(block.header.get_hash())) + self.assertEqual(resp["height"], quantity_encoder(block.header.height)) + + # latest (no height) should return the same block + resp = await send_request("getRootBlockByHeight", [None, True]) + self.assertEqual(resp["hash"], data_encoder(block.header.get_hash())) + self.assertEqual(resp["height"], quantity_encoder(block.header.height)) + + async def test_getAllTransactions(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + # send 3 transactions, each in its own block + txs = [] + values = [12345, 67890, 99999] + for v in values: + tx = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=v, + ) + self.assertTrue(slaves[0].add_tx(tx)) + txs.append(tx) + + block = await master.get_next_block_to_mine( + address=acc1, branch_value=0b10 + ) + self.assertTrue( + (await clusters[0].get_shard(2 | 0).add_block(block)) + ) + + # fetch all + resp = await send_request("getAllTransactions", ["0x0", "0x", "0xa"]) + self.assertEqual(len(resp["txList"]), 3) + tx_ids = {item["txId"] for item in resp["txList"]} + for tx in txs: + expected_id = ( + "0x" + + tx.get_hash().hex() + + acc1.full_shard_key.to_bytes(4, "big").hex() + ) + self.assertIn(expected_id, tx_ids) + returned_values = {item["value"] for item in resp["txList"]} + self.assertEqual(returned_values, {hex(v) for v in values}) + + # test limit: only fetch 2 + resp = await send_request("getAllTransactions", ["0x0", "0x", "0x2"]) + self.assertEqual(len(resp["txList"]), 2) + + # use "next" to fetch the remaining 1 + resp2 = await send_request( + "getAllTransactions", ["0x0", resp["next"], "0xa"] + ) + self.assertEqual(len(resp2["txList"]), 1) + + async def test_getTransactionsByAddress(self): + id1 = Identity.create_random_identity() + id2 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + acc2 = Address.create_from_identity(id2, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + slaves = clusters[0].slave_list + + # tx1: acc1 -> acc1, value=12345 + tx1 = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc1, + value=12345, + ) + self.assertTrue(slaves[0].add_tx(tx1)) + + block = await master.get_next_block_to_mine( + address=acc1, branch_value=0b10 + ) + self.assertTrue((await clusters[0].get_shard(2 | 0).add_block(block))) + + # tx2: acc1 -> acc2, value=67890 + tx2 = create_transfer_transaction( + shard_state=clusters[0].get_shard_state(2 | 0), + key=id1.get_key(), + from_address=acc1, + to_address=acc2, + value=67890, + ) + self.assertTrue(slaves[0].add_tx(tx2)) + + block = await master.get_next_block_to_mine( + address=acc1, branch_value=0b10 + ) + self.assertTrue((await clusters[0].get_shard(2 | 0).add_block(block))) + + # query by acc1: should see both txs (as sender) + resp = await send_request( + "getTransactionsByAddress", + ["0x" + acc1.serialize().hex(), "0x", "0xa"], + ) + self.assertEqual(len(resp["txList"]), 2) + for item in resp["txList"]: + self.assertEqual( + item["fromAddress"], "0x" + acc1.serialize().hex() + ) + returned_values = {item["value"] for item in resp["txList"]} + self.assertEqual(returned_values, {hex(12345), hex(67890)}) + + # query by acc2: should see 1 tx (as receiver) + resp = await send_request( + "getTransactionsByAddress", + ["0x" + acc2.serialize().hex(), "0x", "0xa"], + ) + self.assertEqual(len(resp["txList"]), 1) + self.assertEqual(resp["txList"][0]["value"], hex(67890)) + + async def test_getTotalSupply(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + resp = await send_request("getTotalSupply") + total_supply = master.get_total_supply() + self.assertEqual(resp, quantity_encoder(total_supply)) + + async def test_net_version(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + resp = await send_request("net_version") + self.assertEqual( + resp, + quantity_encoder(master.env.quark_chain_config.NETWORK_ID), + ) + + async def test_getJrpcCalls(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + # call networkInfo twice to register counters + await send_request("networkInfo") + await send_request("networkInfo") + + resp = await send_request("getJrpcCalls") + self.assertEqual(resp["networkInfo"], 2) + # getJrpcCalls itself should also be counted + self.assertEqual(resp["getJrpcCalls"], 1) + + async def test_eth_getBlockByNumber(self): + id1 = Identity.create_random_identity() + acc1 = Address.create_from_identity(id1, full_shard_key=0) + + async with ClusterContext( + 1, acc1, small_coinbase=True + ) as clusters, jrpc_http_server_context(clusters[0].master): + master = clusters[0].master + + block = await master.get_next_block_to_mine( + address=acc1, branch_value=0b10 + ) + self.assertTrue((await clusters[0].get_shard(2 | 0).add_block(block))) + + # by height + resp = await send_request("eth_getBlockByNumber", ["0x1", False]) + self.assertEqual(resp["number"], "0x1") + self.assertEqual(resp["hash"], data_encoder(block.header.get_hash())) + self.assertEqual( + resp["parentHash"], data_encoder(block.header.hash_prev_minor_block) + ) + self.assertEqual( + resp["miner"], + "0x" + block.header.coinbase_address.serialize().hex(), + ) + + # latest should return the same block + resp = await send_request("eth_getBlockByNumber", ["latest", False]) + self.assertEqual(resp["number"], "0x1") + self.assertEqual(resp["hash"], data_encoder(block.header.get_hash())) + + # non-existent + resp = await send_request("eth_getBlockByNumber", ["0xff", False]) + self.assertIsNone(resp) + + + # ------------------------------- Test for JSONRPCWebsocketServer ------------------------------- @contextmanager From 4496def1ea69a2ac3e426f1a7b13cef0a3fb2f0d Mon Sep 17 00:00:00 2001 From: ping-ke Date: Mon, 6 Apr 2026 20:12:21 +0800 Subject: [PATCH 11/11] remove call_with_dict_params, unify call() for both param types, log internal RPC errors --- quarkchain/cluster/jsonrpc_server.py | 6 ++++- quarkchain/cluster/tests/test_jsonrpc.py | 2 +- quarkchain/jsonrpc_client.py | 30 ++++++++++-------------- 3 files changed, 18 insertions(+), 20 deletions(-) diff --git a/quarkchain/cluster/jsonrpc_server.py b/quarkchain/cluster/jsonrpc_server.py index 9219e6df8..b56cda0dc 100644 --- a/quarkchain/cluster/jsonrpc_server.py +++ b/quarkchain/cluster/jsonrpc_server.py @@ -1,8 +1,11 @@ import inspect +import logging from typing import Any, Callable, Dict, Optional, Awaitable from aiohttp import web +logger = logging.getLogger(__name__) + class JsonRpcError(Exception): code = -32000 @@ -148,6 +151,7 @@ async def dispatch(self, request_json: Dict[str, Any], context=None) -> Optional "id": req_id, } except Exception: + logger.exception("Internal JSON-RPC error for method %s", method) return { "jsonrpc": "2.0", "error": { @@ -160,7 +164,7 @@ async def dispatch(self, request_json: Dict[str, Any], context=None) -> Optional async def aiohttp_handler(self, request: web.Request) -> web.Response: body = await request.json() - # 支持 batch + # support batch if isinstance(body, list): responses = [await self.dispatch(item) for item in body] return web.json_response(responses) diff --git a/quarkchain/cluster/tests/test_jsonrpc.py b/quarkchain/cluster/tests/test_jsonrpc.py index 97874e2d8..7b93ba682 100644 --- a/quarkchain/cluster/tests/test_jsonrpc.py +++ b/quarkchain/cluster/tests/test_jsonrpc.py @@ -52,7 +52,7 @@ def jrpc_http_server_context(master): rpc_client = AsyncJsonRpcClient("http://localhost:38391") def send_request(method, *args): - return call_async(rpc_client.call_with_dict_params(method, *args)) + return call_async(rpc_client.call(method, *args)) class TestJSONRPCHttp(unittest.TestCase): diff --git a/quarkchain/jsonrpc_client.py b/quarkchain/jsonrpc_client.py index 7d9961dff..16d978abb 100644 --- a/quarkchain/jsonrpc_client.py +++ b/quarkchain/jsonrpc_client.py @@ -30,7 +30,7 @@ def call(self, method, *params): data = resp.json() if "error" in data: - raise RuntimeError(data["error"]) + raise JsonRpcError(data["error"]) return data.get("result") @@ -43,27 +43,21 @@ def __init__(self, url, timeout=10): self.client = httpx.AsyncClient(base_url=url, timeout=timeout) async def call(self, method, *params): - payload = { - "jsonrpc": "2.0", - "method": method, - "params": list(params), - "id": str(uuid.uuid4()), - } - - resp = await self.client.post("", json=payload) - resp.raise_for_status() - data = resp.json() - - if "error" in data: - raise JsonRpcError(data["error"]) + # JSON-RPC "params" can be a list (positional) or dict (named). + # The old jsonrpcclient library handled this internally; since we + # replaced it with a hand-rolled client we replicate the logic here: + # call("method", [a, b]) -> params = [a, b] (positional) + # call("method", {k: v}) -> params = {k: v} (named) + # call("method", a, b) -> params = [a, b] (positional) + if len(params) == 1 and isinstance(params[0], (dict, list)): + rpc_params = params[0] + else: + rpc_params = list(params) - return data.get("result") - - async def call_with_dict_params(self, method, params): payload = { "jsonrpc": "2.0", "method": method, - "params": params, + "params": rpc_params, "id": str(uuid.uuid4()), }