From 91888358a1e9979fd85235d72d09c596867bf944 Mon Sep 17 00:00:00 2001 From: yungwine Date: Mon, 4 Dec 2023 18:19:21 +0800 Subject: [PATCH] add getLibraries --- ton-http-api/pyTON/main.py | 103 +++++++++++++++++++--------------- ton-http-api/pyTON/manager.py | 30 +++++----- 2 files changed, 74 insertions(+), 59 deletions(-) diff --git a/ton-http-api/pyTON/main.py b/ton-http-api/pyTON/main.py index 654fcf2..2a74ac6 100644 --- a/ton-http-api/pyTON/main.py +++ b/ton-http-api/pyTON/main.py @@ -78,7 +78,7 @@ def main_config(binder): "name": "transactions", "description": "Fetching and locating transactions.", }, - { + { "name": "get config", "description": "Get blockchain config" }, @@ -112,7 +112,7 @@ def main_config(binder): ) -tonlib = None +tonlib: TonlibManager = None @app.on_event("startup") async def startup(): @@ -205,7 +205,7 @@ def f(**kwargs): # Add function's default value parameters to kwargs. if k not in kwargs and v.default is not inspect._empty: default_val = v.default - + if isinstance(default_val, Param) or isinstance(default_val, Body): if default_val.default == ...: raise TypeError("Non-optional argument expected") @@ -296,11 +296,11 @@ async def get_wallet_information( @json_rpc('getTransactions') @wrap_result async def get_transactions( - address: str = Query(..., description="Identifier of target TON account in any form."), - limit: Optional[int] = Query(default=10, description="Maximum number of transactions in response.", gt=0, le=100), - lt: Optional[int] = Query(default=None, description="Logical time of transaction to start with, must be sent with *hash*."), - hash: Optional[str] = Query(default=None, description="Hash of transaction to start with, in *base64* or *hex* encoding , must be sent with *lt*."), - to_lt: Optional[int] = Query(default=0, description="Logical time of transaction to finish with (to get tx from *lt* to *to_lt*)."), + address: str = Query(..., description="Identifier of target TON account in any form."), + limit: Optional[int] = Query(default=10, description="Maximum number of transactions in response.", gt=0, le=100), + lt: Optional[int] = Query(default=None, description="Logical time of transaction to start with, must be sent with *hash*."), + hash: Optional[str] = Query(default=None, description="Hash of transaction to start with, in *base64* or *hex* encoding , must be sent with *lt*."), + to_lt: Optional[int] = Query(default=0, description="Logical time of transaction to finish with (to get tx from *lt* to *to_lt*)."), archival: bool = Query(default=False, description="By default getTransaction request is processed by any available liteserver. If *archival=true* only liteservers with full history are used.") ): """ @@ -384,7 +384,7 @@ async def get_masterchain_block_signatures( @wrap_result async def get_shard_block_proof( workchain: int = Query(..., description="Block workchain id"), - shard: int = Query(..., description="Block shard id"), + shard: int = Query(..., description="Block shard id"), seqno: int = Query(..., description="Block seqno"), from_seqno: Optional[int] = Query(None, description="Seqno of masterchain block starting from which proof is required. If not specified latest masterchain block is used."), ): @@ -406,10 +406,10 @@ async def get_consensus_block(): @json_rpc('lookupBlock') @wrap_result async def lookup_block( - workchain: int = Query(..., description="Workchain id to look up block in"), + workchain: int = Query(..., description="Workchain id to look up block in"), shard: int = Query(..., description="Shard id to look up block in"), seqno: Optional[int] = Query(None, description="Block's height"), - lt: Optional[int] = Query(None, description="Block's logical time"), + lt: Optional[int] = Query(None, description="Block's logical time"), unixtime: Optional[int] = Query(None, description="Block's unixtime") ): """ @@ -432,13 +432,13 @@ async def shards( @json_rpc('getBlockTransactions') @wrap_result async def get_block_transactions( - workchain: int, - shard: int, - seqno: int, - root_hash: Optional[str] = None, - file_hash: Optional[str] = None, - after_lt: Optional[int] = None, - after_hash: Optional[str] = None, + workchain: int, + shard: int, + seqno: int, + root_hash: Optional[str] = None, + file_hash: Optional[str] = None, + after_lt: Optional[int] = None, + after_hash: Optional[str] = None, count: int = 40 ): """ @@ -450,10 +450,10 @@ async def get_block_transactions( @json_rpc('getBlockHeader') @wrap_result async def get_block_header( - workchain: int, - shard: int, - seqno: int, - root_hash: Optional[str] = None, + workchain: int, + shard: int, + seqno: int, + root_hash: Optional[str] = None, file_hash: Optional[str] = None ): """ @@ -485,12 +485,23 @@ async def get_token_data( address = prepare_address(address) return await tonlib.get_token_data(address) +@app.get('/getLibraries', response_model=TonResponse, response_model_exclude_none=True, tags=['blocks']) +@json_rpc('getLibraries') +@wrap_result +async def get_libraries( + libraries: list = Query(..., description="List of base64 encoded libraries hashes") + ): + """ + Get libraries codes. + """ + return await tonlib.getLibraries(libraries) + @app.get('/tryLocateTx', response_model=TonResponse, response_model_exclude_none=True, tags=['transactions']) @json_rpc('tryLocateTx') @wrap_result async def get_try_locate_tx( - source: str, - destination: str, + source: str, + destination: str, created_lt: int ): """ @@ -502,8 +513,8 @@ async def get_try_locate_tx( @json_rpc('tryLocateResultTx') @wrap_result async def get_try_locate_result_tx( - source: str, - destination: str, + source: str, + destination: str, created_lt: int ): """ @@ -515,8 +526,8 @@ async def get_try_locate_result_tx( @json_rpc('tryLocateSourceTx') @wrap_result async def get_try_locate_source_tx( - source: str, - destination: str, + source: str, + destination: str, created_lt: int ): """ @@ -604,9 +615,9 @@ async def send_cell( @json_rpc('sendQuery') @wrap_result async def send_query( - address: str = Body(..., description="Address in any format"), - body: str = Body(..., description="b64-encoded boc-serialized cell with message body"), - init_code: str = Body(default='', description="b64-encoded boc-serialized cell with init-code"), + address: str = Body(..., description="Address in any format"), + body: str = Body(..., description="b64-encoded boc-serialized cell with message body"), + init_code: str = Body(default='', description="b64-encoded boc-serialized cell with init-code"), init_data: str = Body(default='', description="b64-encoded boc-serialized cell with init-data") ): """ @@ -622,9 +633,9 @@ async def send_query( @json_rpc('sendQuerySimple') @wrap_result async def send_query_cell( - address: str = Body(..., description="Address in any format"), - body: str = Body(..., description='Body cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'), - init_code: Optional[Dict[str, Any]] = Body(default=None, description='init-code cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'), + address: str = Body(..., description="Address in any format"), + body: str = Body(..., description='Body cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'), + init_code: Optional[Dict[str, Any]] = Body(default=None, description='init-code cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'), init_data: Optional[Dict[str, Any]] = Body(default=None, description='init-data cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`') ): """ @@ -646,10 +657,10 @@ async def send_query_cell( @json_rpc('estimateFee') @wrap_result async def estimate_fee( - address: str = Body(..., description='Address in any format'), - body: str = Body(..., description='b64-encoded cell with message body'), - init_code: str = Body(default='', description='b64-encoded cell with init-code'), - init_data: str = Body(default='', description='b64-encoded cell with init-data'), + address: str = Body(..., description='Address in any format'), + body: str = Body(..., description='b64-encoded cell with message body'), + init_code: str = Body(default='', description='b64-encoded cell with init-code'), + init_data: str = Body(default='', description='b64-encoded cell with init-data'), ignore_chksig: bool = Body(default=True, description='If true during test query processing assume that all chksig operations return True') ): """ @@ -665,10 +676,10 @@ async def estimate_fee( @json_rpc('estimateFeeSimple') @wrap_result async def estimate_fee_cell( - address: str = Body(..., description='Address in any format'), - body: Dict[str, Any] = Body(..., description='Body cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'), - init_code: Optional[Dict[str, Any]] = Body(default=None, description='init-code cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'), - init_data: Optional[Dict[str, Any]] = Body(default=None, description='init-data cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'), + address: str = Body(..., description='Address in any format'), + body: Dict[str, Any] = Body(..., description='Body cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'), + init_code: Optional[Dict[str, Any]] = Body(default=None, description='init-code cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'), + init_data: Optional[Dict[str, Any]] = Body(default=None, description='init-data cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'), ignore_chksig: bool = Body(default=True, description='If true during test query processing assume that all chksig operations return True') ): """ @@ -692,8 +703,8 @@ async def estimate_fee_cell( @json_rpc('runGetMethod') @wrap_result async def run_get_method( - address: str = Body(..., description='Contract address'), - method: Union[str, int] = Body(..., description='Method name or method id'), + address: str = Body(..., description='Contract address'), + method: Union[str, int] = Body(..., description='Method name or method id'), stack: List[List[Any]] = Body(..., description="Array of stack elements: `[['num',3], ['cell', cell_object], ['slice', slice_object]]`") ): """ @@ -707,7 +718,7 @@ async def run_get_method( @app.post('/jsonRPC', response_model=TonResponseJsonRPC, response_model_exclude_none=True, tags=['json rpc']) async def jsonrpc_handler(json_rpc: TonRequestJsonRPC, request: Request, response: Response, background_tasks: BackgroundTasks): """ - All methods in the API are available through JSON-RPC protocol ([spec](https://www.jsonrpc.org/specification)). + All methods in the API are available through JSON-RPC protocol ([spec](https://www.jsonrpc.org/specification)). """ params = json_rpc.params method = json_rpc.method @@ -728,5 +739,5 @@ async def jsonrpc_handler(json_rpc: TonRequestJsonRPC, request: Request, respons except TypeError as e: response.status_code = status.HTTP_422_UNPROCESSABLE_ENTITY return TonResponseJsonRPC(ok=False, error=f'TypeError: {e}', id=_id) - + return TonResponseJsonRPC(ok=result.ok, result=result.result, error=result.error, code=result.code, id=_id) diff --git a/ton-http-api/pyTON/manager.py b/ton-http-api/pyTON/manager.py index fbf72ce..1e77f37 100644 --- a/ton-http-api/pyTON/manager.py +++ b/ton-http-api/pyTON/manager.py @@ -55,10 +55,10 @@ def __init__(self, async def shutdown(self): for i in self.futures: self.futures[i].cancel() - + self.tasks['check_working'].cancel() await self.tasks['check_working'] - + self.tasks['check_children_alive'].cancel() await self.tasks['check_children_alive'] @@ -80,6 +80,7 @@ def setup_cache(self): self.getShards = self.cache_manager.cached(expire=600)(self.getShards) self.raw_getBlockTransactions = self.cache_manager.cached(expire=600)(self.raw_getBlockTransactions) self.getBlockTransactions = self.cache_manager.cached(expire=600)(self.getBlockTransactions) + self.getLibraries = self.cache_manager.cached(expire=600)(self.getLibraries) self.getBlockHeader = self.cache_manager.cached(expire=600)(self.getBlockHeader) self.get_config_param = self.cache_manager.cached(expire=5)(self.get_config_param) self.get_token_data = self.cache_manager.cached(expire=15)(self.get_token_data) @@ -93,7 +94,7 @@ def spawn_worker(self, ls_index, force_restart=False): logger.warning('Worker for liteserver #{ls_index} already exists', ls_index=ls_index) return try: - worker_info['reader'].cancel() + worker_info['reader'].cancel() worker_info['worker'].exit_event.set() worker_info['worker'].output_queue.cancel_join_thread() worker_info['worker'].input_queue.cancel_join_thread() @@ -110,7 +111,7 @@ def spawn_worker(self, ls_index, force_restart=False): 'restart_count': -1, 'tasks_count': 0 } - + tonlib_settings = deepcopy(self.tonlib_settings) tonlib_settings.keystore += f'worker_{ls_index}' self.workers[ls_index]['worker'] = TonlibWorker(ls_index, tonlib_settings) @@ -129,7 +130,7 @@ async def worker_control(self, ls_index, enabled): self.workers[ls_index]['worker'].input_queue.close() self.workers[ls_index]['worker'].join() - + await self.workers[ls_index]['reader'] self.workers[ls_index]['is_enabled'] = enabled @@ -141,7 +142,7 @@ def log_liteserver_task(self, task_result: TonlibClientResult): else: result_type = type(task_result.result).__name__ details = {} - + rec = { 'timestamp': datetime.utcnow(), 'elapsed': task_result.elapsed_time, @@ -149,7 +150,7 @@ def log_liteserver_task(self, task_result: TonlibClientResult): 'method': task_result.method, 'liteserver_info': task_result.liteserver_info, 'result_type': result_type, - 'exception': task_result.exception + 'exception': task_result.exception } logger.info("Received result of type: {result_type}, method: {method}, task_id: {task_id}", **rec) @@ -168,7 +169,7 @@ async def read_results(self, ls_index): if task_id in self.futures and not self.futures[task_id].done(): if msg_content.exception is not None: self.futures[task_id].set_exception(msg_content.exception) - if msg_content.result is not None: + if msg_content.result is not None: self.futures[task_id].set_result(msg_content.result) else: logger.warning("TonlibManager received result from TonlibWorker #{ls_index:03d} whose task '{task_id}' doesn't exist or is done.", ls_index=ls_index, task_id=task_id) @@ -185,7 +186,7 @@ async def read_results(self, ls_index): return except: logger.error("read_results exception {format_exc}", format_exc=traceback.format_exc()) - + async def check_working(self): while True: try: @@ -255,9 +256,9 @@ def get_workers_state(self): def select_worker(self, ls_index=None, archival=None, count=1): if count == 1 and ls_index is not None and self.workers[ls_index]['is_working']: - return ls_index + return ls_index - suitable = [ls_index for ls_index, worker_info in self.workers.items() if worker_info['is_working'] and + suitable = [ls_index for ls_index, worker_info in self.workers.items() if worker_info['is_working'] and (archival is None or worker_info['worker'].is_archival == archival)] random.shuffle(suitable) if len(suitable) < count: @@ -271,7 +272,7 @@ async def dispatch_request_to_worker(self, method, ls_index, *args, **kwargs): timeout = time.time() + self.tonlib_settings.request_timeout self.workers[ls_index]['tasks_count'] += 1 - logger.info("Sending request method: {method}, task_id: {task_id}, ls_index: {ls_index}", + logger.info("Sending request method: {method}, task_id: {task_id}, ls_index: {ls_index}", method=method, task_id=task_id, ls_index=ls_index) await self.loop.run_in_executor(self.threadpool_executor, self.workers[ls_index]['worker'].input_queue.put, (task_id, timeout, method, args, kwargs)) @@ -376,9 +377,12 @@ async def raw_estimate_fees(self, destination, body, init_code=b'', init_data=b' async def getMasterchainInfo(self): return await self.dispatch_request('get_masterchain_info') + async def getLibraries(self, lib_hashes: list): + return await self.dispatch_request('get_libraries', lib_hashes) + async def getMasterchainBlockSignatures(self, seqno): return await self.dispatch_request('get_masterchain_block_signatures', seqno) - + async def getShardBlockProof(self, workchain, shard, seqno, from_seqno): return await self.dispatch_request('get_shard_block_proof', workchain, shard, seqno, from_seqno)