From 752cb4dfc28654e28181122881e454108541afec Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Thu, 3 Oct 2024 21:05:30 +0000 Subject: [PATCH 01/46] Add balance logs to log_route. --- src/contracts/route.py | 2 ++ src/scheduler.py | 39 +++++++++++++++++++++++++++++++++++++-- 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/src/contracts/route.py b/src/contracts/route.py index c52cc8bb4..899a8bbc3 100644 --- a/src/contracts/route.py +++ b/src/contracts/route.py @@ -53,6 +53,7 @@ class Route: uid: int route: list[LegRepr] + legs: list[Leg] theoretical_profit: int expected_profit: int realized_profit: Optional[int] @@ -104,6 +105,7 @@ def load_route(s: str) -> Route: return Route( loaded["uid"], [load_leg_repr(json_leg) for json_leg in loaded["route"]], + [], loaded["theoretical_profit"], loaded["expected_profit"], loaded["realized_profit"], diff --git a/src/scheduler.py b/src/scheduler.py index 298cbd428..49fca7b26 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -8,10 +8,12 @@ from typing import Callable, List, Self, Optional, Awaitable, Any, TypeVar, Generic from dataclasses import dataclass from cosmpy.aerial.client import LedgerClient +from cosmpy.crypto.address import Address from cosmpy.aerial.wallet import LocalWallet from src.contracts.auction import AuctionDirectory, AuctionProvider from src.contracts.route import Route, load_route, LegRepr, Status, Leg from src.contracts.pool.provider import PoolProvider +from src.util import try_multiple_clients import aiohttp import grpc @@ -21,6 +23,10 @@ MAX_ROUTE_HISTORY_LEN = 200000 +# Length to truncate denoms in balance logs to +DENOM_BALANCE_PREFIX_MAX_DENOM_LEN = 12 + + TState = TypeVar("TState") @@ -104,6 +110,7 @@ def queue_route( LegRepr(leg.in_asset(), leg.out_asset(), leg.backend.kind, False) for leg in route ], + route, theoretical_profit, expected_profit, None, @@ -133,14 +140,42 @@ def log_route( Writes a log to the standard logger and to the log file of a route. """ - route.logs.append(f"{log_level.upper()} {fmt_string % tuple(args)}") + prefix = "" + + balance_resp_in = try_multiple_clients( + self.clients[route.legs[0].backend.chain_id], + lambda client: client.query_bank_balance( + Address( + self.wallet.public_key(), prefix=route.legs[0].backend.chain_prefix + ), + route.legs[0].in_asset(), + ), + ) + + if balance_resp_in: + prefix += f"BALANCE[{route.legs[0].in_asset()[:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_in} " + + balance_resp_base_denom = try_multiple_clients( + self.clients[route.legs[0].backend.chain_id], + lambda client: client.query_bank_balance( + Address( + self.wallet.public_key(), prefix=route.legs[0].backend.chain_prefix + ), + self.cli_args["base_denom"], + ), + ) + + if balance_resp_base_denom: + prefix += f"BALANCE[{self.cli_args['base_denom'][:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_in} " + + route.logs.append(f"{log_level.upper()} {prefix}{fmt_string % tuple(args)}") if route.uid >= len(self.order_history) or route.uid < 0: return self.order_history[route.uid] = route - fmt_string = f"%s- {fmt_string}" + fmt_string = f"{prefix}%s- {fmt_string}" if log_level == "info": logger.info(fmt_string, str(route), *args) From 95a5eae3f5e02eb329f8a09741c01ba1b9a5eaff Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Thu, 3 Oct 2024 21:06:16 +0000 Subject: [PATCH 02/46] Make log balance prefix lowercase. --- src/scheduler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/scheduler.py b/src/scheduler.py index 49fca7b26..d3b10b594 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -153,7 +153,7 @@ def log_route( ) if balance_resp_in: - prefix += f"BALANCE[{route.legs[0].in_asset()[:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_in} " + prefix += f"balance[{route.legs[0].in_asset()[:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_in} " balance_resp_base_denom = try_multiple_clients( self.clients[route.legs[0].backend.chain_id], @@ -166,7 +166,7 @@ def log_route( ) if balance_resp_base_denom: - prefix += f"BALANCE[{self.cli_args['base_denom'][:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_in} " + prefix += f"balance[{self.cli_args['base_denom'][:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_in} " route.logs.append(f"{log_level.upper()} {prefix}{fmt_string % tuple(args)}") From 8ebba6c56017835c41f9223c27d258e7e75d7f46 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Thu, 3 Oct 2024 21:07:12 +0000 Subject: [PATCH 03/46] Fix balance log prefix base denom log. --- src/scheduler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/scheduler.py b/src/scheduler.py index d3b10b594..bbdbc833e 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -166,7 +166,7 @@ def log_route( ) if balance_resp_base_denom: - prefix += f"balance[{self.cli_args['base_denom'][:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_in} " + prefix += f"balance[{self.cli_args['base_denom'][:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_base_denom} " route.logs.append(f"{log_level.upper()} {prefix}{fmt_string % tuple(args)}") From b1de2fbf20ab216a26c346ebe31f2fe9c7136f4f Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Thu, 3 Oct 2024 21:22:37 +0000 Subject: [PATCH 04/46] Log next queued leg-related balances in route logs. --- src/scheduler.py | 35 ++++++++++++++++++++++------------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/src/scheduler.py b/src/scheduler.py index bbdbc833e..da306810f 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -142,25 +142,34 @@ def log_route( prefix = "" - balance_resp_in = try_multiple_clients( - self.clients[route.legs[0].backend.chain_id], - lambda client: client.query_bank_balance( - Address( - self.wallet.public_key(), prefix=route.legs[0].backend.chain_prefix - ), - route.legs[0].in_asset(), + curr_leg = next( + ( + leg + for (leg_repr, leg) in zip(route.route, route.legs) + if not leg_repr.executed ), + default=None, ) - if balance_resp_in: - prefix += f"balance[{route.legs[0].in_asset()[:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_in} " + if curr_leg: + balance_resp_in = try_multiple_clients( + self.clients[curr_leg.backend.chain_id], + lambda client: client.query_bank_balance( + Address( + self.wallet.public_key(), + prefix=route.legs[0].backend.chain_prefix, + ), + curr_leg.in_asset(), + ), + ) + + if balance_resp_in: + prefix += f"balance[{curr_leg.in_asset()[:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_in} " balance_resp_base_denom = try_multiple_clients( - self.clients[route.legs[0].backend.chain_id], + self.clients[curr_leg.backend.chain_id], lambda client: client.query_bank_balance( - Address( - self.wallet.public_key(), prefix=route.legs[0].backend.chain_prefix - ), + Address(self.wallet.public_key(), prefix=curr_leg.backend.chain_prefix), self.cli_args["base_denom"], ), ) From 2a174eb2da72e8c16d5155d1d8b2de2acab44b07 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Fri, 4 Oct 2024 15:19:09 +0000 Subject: [PATCH 05/46] Display balance information in log prefixes for all in/out per prov. --- src/scheduler.py | 53 ++++++++++++++++++++++-------------------------- 1 file changed, 24 insertions(+), 29 deletions(-) diff --git a/src/scheduler.py b/src/scheduler.py index da306810f..efc18547d 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -140,51 +140,46 @@ def log_route( Writes a log to the standard logger and to the log file of a route. """ - prefix = "" - - curr_leg = next( - ( - leg - for (leg_repr, leg) in zip(route.route, route.legs) - if not leg_repr.executed - ), - default=None, - ) - - if curr_leg: - balance_resp_in = try_multiple_clients( - self.clients[curr_leg.backend.chain_id], + def asset_balance_prefix(leg: Leg, asset: str) -> Optional[str]: + balance_resp_asset = try_multiple_clients( + self.clients[leg.backend.chain_id], lambda client: client.query_bank_balance( Address( self.wallet.public_key(), - prefix=route.legs[0].backend.chain_prefix, + prefix=leg.backend.chain_prefix, ), - curr_leg.in_asset(), + asset, ), ) - if balance_resp_in: - prefix += f"balance[{curr_leg.in_asset()[:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_in} " + if not balance_resp_asset: + return None - balance_resp_base_denom = try_multiple_clients( - self.clients[curr_leg.backend.chain_id], - lambda client: client.query_bank_balance( - Address(self.wallet.public_key(), prefix=curr_leg.backend.chain_prefix), - self.cli_args["base_denom"], - ), - ) + return f"balance[{leg.backend.chain_id}]({asset[:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}): {balance_resp_asset}" - if balance_resp_base_denom: - prefix += f"balance[{self.cli_args['base_denom'][:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}]: {balance_resp_base_denom} " + def leg_balance_prefixes(leg: Leg) -> list[str]: + assets = [leg.in_asset(), leg.out_asset()] + + return [ + x for x in (asset_balance_prefix(leg, asset) for asset in assets) if x + ] + + prefix = " ".join( + { + prefix + for leg_prefixes in [leg_balance_prefixes(leg) for leg in route.legs] + for prefix in leg_prefixes + } + ) - route.logs.append(f"{log_level.upper()} {prefix}{fmt_string % tuple(args)}") + route.logs.append(f"{log_level.upper()} {prefix} {fmt_string % tuple(args)}") if route.uid >= len(self.order_history) or route.uid < 0: return self.order_history[route.uid] = route - fmt_string = f"{prefix}%s- {fmt_string}" + fmt_string = f"{prefix} %s- {fmt_string}" if log_level == "info": logger.info(fmt_string, str(route), *args) From 82ad22675c626f208dde966325113b1d9b3a39d1 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Fri, 4 Oct 2024 15:54:39 +0000 Subject: [PATCH 06/46] Update local-ic docker image. --- local-interchaintest/chains/neutron_osmosis_gaia.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/local-interchaintest/chains/neutron_osmosis_gaia.json b/local-interchaintest/chains/neutron_osmosis_gaia.json index e3aebf1b9..6b5402560 100644 --- a/local-interchaintest/chains/neutron_osmosis_gaia.json +++ b/local-interchaintest/chains/neutron_osmosis_gaia.json @@ -167,7 +167,7 @@ "binary": "osmosisd", "bech32_prefix": "osmo", "docker_image": { - "version": "v25.0.4", + "version": "v26.0.2", "repository": "ghcr.io/strangelove-ventures/heighliner/osmosis" }, "gas_prices": "0.0025%DENOM%", From dc80f961136779b30fbedc7a22d6840395ed0363 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Fri, 4 Oct 2024 16:52:39 +0000 Subject: [PATCH 07/46] Fix failing tests. --- tests/test_auction.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_auction.py b/tests/test_auction.py index 5325ed4c6..bad0c4997 100644 --- a/tests/test_auction.py +++ b/tests/test_auction.py @@ -84,7 +84,7 @@ async def test_auction_provider() -> None: * price ) assert liq_estimate - liquidity < 5 - assert liquidity - liq_estimate < 100 + assert liquidity - liq_estimate < 1000 @pytest.mark.asyncio From caf00b8a153bd460fee56e0bb5e00c5017861324 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Fri, 4 Oct 2024 18:26:54 +0000 Subject: [PATCH 08/46] Reactive osmosis local-ic integration tests. --- local-interchaintest/src/main.rs | 13 ++++++------- local-interchaintest/src/setup.rs | 2 +- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/local-interchaintest/src/main.rs b/local-interchaintest/src/main.rs index 20ea2497a..df7bce67d 100644 --- a/local-interchaintest/src/main.rs +++ b/local-interchaintest/src/main.rs @@ -183,7 +183,6 @@ fn main() -> Result<(), Box> { .with_test(Box::new(tests::test_unprofitable_arb) as TestFn) .build()?, )? - /* // Test case (astro -> osmo arb): // // - Astro: untrn-bruhtoken @ 1.5 bruhtoken/untrn @@ -213,7 +212,7 @@ fn main() -> Result<(), Box> { bruhtoken_osmo.clone(), Pool::Osmosis( OsmosisPoolBuilder::default() - .with_funds(bruhtoken_osmo.clone(), 10000000u128) + .with_funds(bruhtoken_osmo.clone(), 100000000u128) .with_funds(uosmo.clone(), 10000000u128) .with_weight(bruhtoken_osmo.clone(), 1u128) .with_weight(uosmo.clone(), 1u128) @@ -223,16 +222,16 @@ fn main() -> Result<(), Box> { .with_pool( untrn.clone(), bruhtoken.clone(), - Pool::Astroport( - AstroportPoolBuilder::default() - .with_balance_asset_a(10000000u128) - .with_balance_asset_b(10000000u128) + Pool::Auction( + AuctionPoolBuilder::default() + .with_balance_offer_asset(10000000u128) + .with_price(Decimal::percent(10)) .build()?, ), ) .with_arbbot() .with_test(Box::new(tests::test_osmo_arb) as TestFn) .build()?, - )?*/ + )? .join() } diff --git a/local-interchaintest/src/setup.rs b/local-interchaintest/src/setup.rs index a7e5ed36d..cd862d700 100644 --- a/local-interchaintest/src/setup.rs +++ b/local-interchaintest/src/setup.rs @@ -641,7 +641,7 @@ pub fn with_arb_bot_output(test: OwnedTestFn) -> TestResult { let proc_handle = Arc::new(proc); let proc_handle_watcher = proc_handle.clone(); let (tx_res, rx_res) = mpsc::channel(); - let mut finished = AtomicBool::new(false); + let finished = AtomicBool::new(false); let test_handle = test.clone(); From e2ea39238fb0b0134b9f9c6eec45f7fcbfbb0bd1 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 7 Oct 2024 21:21:31 +0000 Subject: [PATCH 09/46] Centralize all skip queries in context with caching. --- .../tests/transfer_neutron.py | 2 + .../tests/transfer_osmosis.py | 2 + main.py | 14 +- src/scheduler.py | 169 +++++++++++++- src/strategies/bellman_ford.py | 35 ++- src/strategies/util.py | 89 +++----- src/util.py | 216 ++---------------- tests/test_naive_strategy.py | 47 +--- tests/test_scheduler.py | 141 +++--------- tests/test_strategy_util.py | 2 +- tests/test_util.py | 70 +++--- tests/util.py | 102 ++++++++- 12 files changed, 406 insertions(+), 483 deletions(-) diff --git a/local-interchaintest/tests/transfer_neutron.py b/local-interchaintest/tests/transfer_neutron.py index 78fbda183..ab077a248 100644 --- a/local-interchaintest/tests/transfer_neutron.py +++ b/local-interchaintest/tests/transfer_neutron.py @@ -46,6 +46,8 @@ async def main() -> None: [], {}, denoms, + {}, + {}, ) await transfer_raw( diff --git a/local-interchaintest/tests/transfer_osmosis.py b/local-interchaintest/tests/transfer_osmosis.py index 7e609781c..8d2c04b6b 100644 --- a/local-interchaintest/tests/transfer_osmosis.py +++ b/local-interchaintest/tests/transfer_osmosis.py @@ -46,6 +46,8 @@ async def main() -> None: [], {}, denoms, + {}, + {}, ) await transfer_raw( diff --git a/main.py b/main.py index c0c9195ed..d58c66f02 100644 --- a/main.py +++ b/main.py @@ -13,7 +13,7 @@ import sys from os import path import os -from typing import Any, cast, Optional +from typing import Any, cast from cosmpy.aerial.client import LedgerClient from cosmpy.aerial.wallet import LocalWallet from src.scheduler import Scheduler, Ctx @@ -105,12 +105,16 @@ async def main() -> None: f, ) - denom_map: Optional[dict[str, list[dict[str, str]]]] = None + denom_file: dict[str, Any] = { + "denom_map": {}, + "denom_routes": {}, + "chain_info": {}, + } # If the user has specified a denom map, use that instead of skip if args.denom_file is not None and path.isfile(args.denom_file): with open(args.denom_file, "r", encoding="utf-8") as f: - denom_map = json.load(f) + denom_file = json.load(f) # If the user specified a poolfile, create the poolfile if it is empty if args.pool_file is not None and not path.isfile(args.pool_file): @@ -188,7 +192,9 @@ async def main() -> None: session, [], cast(dict[str, Any], json.load(f)), - denom_map, + denom_file["denom_map"], + denom_file["denom_routes"], + denom_file["chain_info"], ).recover_history() sched = Scheduler(ctx, strategy) diff --git a/src/scheduler.py b/src/scheduler.py index efc18547d..0f956c2e3 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -13,7 +13,13 @@ from src.contracts.auction import AuctionDirectory, AuctionProvider from src.contracts.route import Route, load_route, LegRepr, Status, Leg from src.contracts.pool.provider import PoolProvider -from src.util import try_multiple_clients +from src.util import ( + try_multiple_clients, + DenomRouteLeg, + DenomRouteQuery, + ChainInfo, + DenomChainInfo, +) import aiohttp import grpc @@ -47,7 +53,9 @@ class Ctx(Generic[TState]): http_session: aiohttp.ClientSession order_history: list[Route] deployments: dict[str, Any] - denom_map: Optional[dict[str, list[dict[str, str]]]] + denom_map: dict[str, list[DenomChainInfo]] + denom_routes: dict[DenomRouteQuery, list[DenomRouteLeg]] + chain_info: dict[str, ChainInfo] def with_state(self, state: Any) -> Self: """ @@ -194,6 +202,163 @@ def leg_balance_prefixes(leg: Leg) -> list[str]: if log_level == "debug": logger.debug(fmt_string, str(route), *args) + async def query_denom_route( + self, query: DenomRouteQuery + ) -> Optional[list[DenomRouteLeg]]: + if self.denom_routes and query in self.denom_routes: + return self.denom_routes[query] + + head = {"accept": "application/json", "content-type": "application/json"} + + async with self.http_session.post( + "https://api.skip.money/v2/fungible/route", + headers=head, + json={ + "amount_in": "1", + "source_asset_denom": query.src_denom, + "source_asset_chain_id": query.src_chain, + "dest_asset_denom": query.dest_denom, + "dest_asset_chain_id": query.dest_chain, + "allow_multi_tx": True, + "allow_unsafe": False, + "bridges": ["IBC"], + }, + ) as resp: + if resp.status != 200: + return None + + ops = (await resp.json())["operations"] + + # The transfer includes a swap or some other operation + # we can't handle + if any(("transfer" not in op for op in ops)): + return None + + transfer_info = ops[0]["transfer"] + + from_chain_info = await self.query_chain_info( + transfer_info["from_chain_id"] + ) + to_chain_info = await self.query_chain_info(transfer_info["to_chain_id"]) + + if not from_chain_info or not to_chain_info: + return None + + route = [ + DenomRouteLeg( + src_chain=query.src_chain, + dest_chain=query.dest_chain, + src_denom=query.src_denom, + dest_denom=query.dest_denom, + from_chain=from_chain_info, + to_chain=to_chain_info, + denom_in=transfer_info["denom_in"], + denom_out=transfer_info["denom_out"], + port=transfer_info["port"], + channel=transfer_info["channel"], + ) + for op in ops + ] + + self.denom_routes[query] = route + + return route + + async def query_chain_info( + self, + chain_id: str, + ) -> Optional[ChainInfo]: + """ + Gets basic information about a cosmos chain. + """ + + if chain_id in self.chain_info: + return self.chain_info[chain_id] + + head = {"accept": "application/json", "content-type": "application/json"} + + async with self.http_session.get( + f"https://api.skip.money/v2/info/chains?chain_ids={chain_id}", + headers=head, + ) as resp: + if resp.status != 200: + return None + + chains = (await resp.json())["chains"] + + if len(chains) == 0: + return None + + chain = chains[0] + + chain_info = ChainInfo( + chain_name=chain["chain_name"], + chain_id=chain["chain_id"], + pfm_enabled=chain["pfm_enabled"], + supports_memo=chain["supports_memo"], + bech32_prefix=chain["bech32_prefix"], + fee_asset=chain["fee_assets"][0]["denom"], + chain_type=chain["chain_type"], + pretty_name=chain["pretty_name"], + ) + + self.chain_info[chain_id] = chain_info + + return chain_info + + async def query_denom_info_on_chain( + self, + src_chain: str, + src_denom: str, + dest_chain: str, + ) -> Optional[DenomChainInfo]: + """ + Gets a neutron denom's denom and channel on/to another chain. + """ + + infos = await self.query_denom_info(src_chain, src_denom) + + return next((info for info in infos if info.dest_chain_id == dest_chain)) + + async def query_denom_info( + self, + src_chain: str, + src_denom: str, + ) -> list[DenomChainInfo]: + """ + Gets a denom's denom and channel on/to other chains. + """ + + if src_denom in self.denom_map: + return self.denom_map[src_denom] + + head = {"accept": "application/json", "content-type": "application/json"} + + async with self.http_session.post( + "https://api.skip.money/v1/fungible/assets_from_source", + headers=head, + json={ + "allow_multi_tx": False, + "include_cw20_assets": True, + "source_asset_denom": src_denom, + "source_asset_chain_id": src_chain, + "client_id": "timewave-arb-bot", + }, + ) as resp: + if resp.status != 200: + return [] + + dests = (await resp.json())["dest_assets"] + + def chain_info(chain_id: str, info: dict[str, Any]) -> DenomChainInfo: + info = info["assets"][0] + + return DenomChainInfo( + src_chain_id=src_chain, denom=info["denom"], dest_chain_id=chain_id + ) + + return [chain_info(chain_id, info) for chain_id, info in dests.items()] + class Scheduler(Generic[TState]): """ diff --git a/src/strategies/bellman_ford.py b/src/strategies/bellman_ford.py index d70ed918e..4c5e145a4 100644 --- a/src/strategies/bellman_ford.py +++ b/src/strategies/bellman_ford.py @@ -21,7 +21,6 @@ quantities_for_route_profit, ) from src.util import ( - denom_info_on_chain, try_multiple_clients, ) from cosmpy.crypto.address import Address @@ -409,17 +408,15 @@ async def route_bellman_ford( for edge_a, edge_b in ctx.state.weights.values(): async def relax_edge(edge: Edge) -> None: - in_asset_infos = await denom_info_on_chain( + in_asset_infos = await ctx.query_denom_info_on_chain( edge.backend.backend.chain_id, edge.backend.in_asset(), "neutron-1", - ctx.http_session, ) - out_asset_infos = await denom_info_on_chain( + out_asset_infos = await ctx.query_denom_info_on_chain( edge.backend.backend.chain_id, edge.backend.out_asset(), "neutron-1", - ctx.http_session, ) if not in_asset_infos: @@ -428,8 +425,8 @@ async def relax_edge(edge: Edge) -> None: if not out_asset_infos: return - in_asset = in_asset_infos[0].denom - out_asset = out_asset_infos[0].denom + in_asset = in_asset_infos.denom + out_asset = out_asset_infos.denom if ( ( @@ -507,40 +504,36 @@ def check_cycle(edge: Edge) -> Optional[list[str]]: # If this trade doesn't start and end with USDC # construct it to do so if legs[0].in_asset() != src or legs[-1].out_asset() != src: - in_denom = await denom_info_on_chain( + in_denom = await ctx.query_denom_info_on_chain( "neutron-1", src, legs[0].backend.chain_id, - ctx.http_session, ) if not in_denom: return None - out_denom = await denom_info_on_chain( + out_denom = await ctx.query_denom_info_on_chain( "neutron-1", src, legs[-1].backend.chain_id, - ctx.http_session, ) if not out_denom: return None in_legs: list[Union[PoolProvider, AuctionProvider]] = list( - pools.get(in_denom[0].denom, {}).get(legs[0].in_asset(), []) + pools.get(in_denom.denom, {}).get(legs[0].in_asset(), []) ) - in_auction = auctions.get(in_denom[0].denom, {}).get(legs[0].in_asset(), None) + in_auction = auctions.get(in_denom.denom, {}).get(legs[0].in_asset(), None) if in_auction: in_legs.append(in_auction) out_legs: list[Union[PoolProvider, AuctionProvider]] = list( - pools.get(legs[-1].out_asset(), {}).get(out_denom[0].denom, []) - ) - out_auction = auctions.get(legs[-1].out_asset(), {}).get( - out_denom[0].denom, None + pools.get(legs[-1].out_asset(), {}).get(out_denom.denom, []) ) + out_auction = auctions.get(legs[-1].out_asset(), {}).get(out_denom.denom, None) if out_auction: out_legs.append(out_auction) @@ -556,12 +549,12 @@ def check_cycle(edge: Edge) -> Optional[list[str]]: Leg( ( in_leg.asset_a - if in_leg.asset_a() == in_denom[0].denom + if in_leg.asset_a() == in_denom.denom else in_leg.asset_b ), ( in_leg.asset_b - if in_leg.asset_a() == in_denom[0].denom + if in_leg.asset_a() == in_denom.denom else in_leg.asset_a ), in_leg, @@ -572,12 +565,12 @@ def check_cycle(edge: Edge) -> Optional[list[str]]: Leg( ( out_leg.asset_b - if out_leg.asset_a() == out_denom[0].denom + if out_leg.asset_a() == out_denom.denom else out_leg.asset_a ), ( out_leg.asset_a - if out_leg.asset_a() == out_denom[0].denom + if out_leg.asset_a() == out_denom.denom else out_leg.asset_b ), out_leg, diff --git a/src/strategies/util.py b/src/strategies/util.py index efae0dfe5..7354c8418 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -21,17 +21,13 @@ NeutronAstroportPoolProvider, ) from src.util import ( - chain_info, IBC_TRANSFER_TIMEOUT_SEC, IBC_TRANSFER_POLL_INTERVAL_SEC, try_multiple_rest_endpoints, try_multiple_clients_fatal, try_multiple_clients, DENOM_QUANTITY_ABORT_ARB, - denom_route, - denom_info_on_chain, - denom_info, - DenomChainInfo, + DenomRouteQuery, ) from src.scheduler import Ctx from cosmos.base.v1beta1 import coin_pb2 @@ -458,7 +454,7 @@ async def rebalance_portfolio( for chain_id in ctx.endpoints.keys(): logger.info("Rebalancing portfolio for chain %s", chain_id) - chain_meta = await chain_info(chain_id, ctx.http_session) + chain_meta = await ctx.query_chain_info(chain_id) if not chain_meta: continue @@ -585,8 +581,6 @@ async def listen_routes_with_depth_dfs( ] ] = None, ) -> AsyncGenerator[tuple[Route, list[Leg]], None]: - denom_cache: dict[str, dict[str, list[str]]] = {} - start_pools: list[Union[AuctionProvider, PoolProvider]] = [ *auctions.get(src, {}).values(), *(pool for pool_set in pools.get(src, {}).values() for pool in pool_set), @@ -604,17 +598,19 @@ async def listen_routes_with_depth_dfs( async def next_legs( path: list[Leg], ) -> AsyncGenerator[tuple[Route, list[Leg]], None]: - nonlocal denom_cache nonlocal eval_profit + matching_denoms = [ + info.denom + for info in await ctx.query_denom_info( + path[-2].backend.chain_id, + path[-2].out_asset(), + ) + ] + if len(path) >= 2 and not ( path[-1].in_asset() == path[-2].out_asset() - or path[-1].in_asset() - in [ - denom - for denom_list in denom_cache[path[-2].out_asset()].values() - for denom in denom_list - ] + or path[-1].in_asset() in matching_denoms ): return @@ -662,29 +658,7 @@ async def next_legs( # no more work to do end = prev_pool.out_asset() - if end not in denom_cache: - try: - denom_infos = await denom_info( - prev_pool.backend.chain_id, end, ctx.http_session, ctx.denom_map - ) - - denom_cache[end] = { - info[0].chain_id: [i.denom for i in info] - for info in ( - denom_infos - + [ - [ - DenomChainInfo( - denom=end, - chain_id=prev_pool.backend.chain_id, - ) - ] - ] - ) - if len(info) > 0 and info[0].chain_id - } - except asyncio.TimeoutError: - return + denom_infos = await ctx.query_denom_info(prev_pool.backend.chain_id, end) # A pool is a candidate to be a next pool if it has a denom # contained in denom_cache[end] or one of its denoms *is* end @@ -721,38 +695,40 @@ async def next_legs( Leg( ( auction.asset_a - if auction.asset_a() == src or auction.asset_a() == denom + if auction.asset_a() == src + or auction.asset_a() == denom_info.denom else auction.asset_b ), ( auction.asset_a - if auction.asset_a() != src and auction.asset_a() != denom + if auction.asset_a() != src + and auction.asset_a() != denom_info.denom else auction.asset_b ), auction, ) - for denom_set in denom_cache[end].values() - for denom in denom_set - for auction in auctions.get(denom, {}).values() + for denom_info in denom_infos + for auction in auctions.get(denom_info.denom, {}).values() if auction.chain_id != prev_pool.backend.chain_id ), *( Leg( ( pool.asset_a - if pool.asset_a() == src or pool.asset_a() == denom + if pool.asset_a() == src + or pool.asset_a() == denom_info.denom else pool.asset_b ), ( pool.asset_a - if pool.asset_a() != src and pool.asset_a() != denom + if pool.asset_a() != src + and pool.asset_a() != denom_info.denom else pool.asset_b ), pool, ) - for denom_set in denom_cache[end].values() - for denom in denom_set - for pool_set in pools.get(denom, {}).values() + for denom_info in denom_infos + for pool_set in pools.get(denom_info.denom, {}).values() for pool in pool_set if pool.chain_id != prev_pool.backend.chain_id ), @@ -862,12 +838,10 @@ async def transfer( succeeded. """ - denom_infos_on_dest = await denom_info_on_chain( + denom_infos_on_dest = await ctx.query_denom_info_on_chain( prev_leg.backend.chain_id, denom, leg.backend.chain_id, - ctx.http_session, - ctx.denom_map, ) if not denom_infos_on_dest: @@ -875,12 +849,13 @@ async def transfer( f"Missing denom info for transfer {denom} ({prev_leg.backend.chain_id}) -> {leg.backend.chain_id}" ) - ibc_route = await denom_route( - prev_leg.backend.chain_id, - denom, - leg.backend.chain_id, - denom_infos_on_dest[0].denom, - ctx.http_session, + ibc_route = await ctx.query_denom_route( + DenomRouteQuery( + src_chain=prev_leg.backend.chain_id, + src_denom=denom, + dest_chain=leg.backend.chain_id, + dest_denom=denom_infos_on_dest.denom, + ) ) if not ibc_route or len(ibc_route) == 0: diff --git a/src/util.py b/src/util.py index d36124d90..87276631d 100644 --- a/src/util.py +++ b/src/util.py @@ -255,6 +255,21 @@ class ChainInfo: pretty_name: str +@dataclass +class DenomRouteQuery: + """ + Information identifying a request for a denom route. + """ + + src_chain: str + src_denom: str + dest_chain: str + dest_denom: str + + def __hash__(self) -> int: + return hash((self.src_chain, self.src_denom, self.dest_chain, self.dest_denom)) + + @dataclass class DenomRouteLeg: """ @@ -290,205 +305,8 @@ class DenomChainInfo: """ denom: str - chain_id: Optional[str] - - -async def denom_info( - src_chain: str, - src_denom: str, - session: aiohttp.ClientSession, - denom_map: Optional[dict[str, list[dict[str, str]]]] = None, -) -> list[list[DenomChainInfo]]: - """ - Gets a denom's denom and channel on/to other chains. - """ - - if denom_map: - return [ - [ - DenomChainInfo( - denom_info["denom"], - denom_info["chain_id"], - ) - ] - for denom_info in denom_map.get(src_denom, []) - ] - - head = {"accept": "application/json", "content-type": "application/json"} - - async with session.post( - "https://api.skip.money/v1/fungible/assets_from_source", - headers=head, - json={ - "allow_multi_tx": False, - "include_cw20_assets": True, - "source_asset_denom": src_denom, - "source_asset_chain_id": src_chain, - "client_id": "timewave-arb-bot", - }, - ) as resp: - if resp.status != 200: - return [] - - dests = (await resp.json())["dest_assets"] - - def chain_info(chain_id: str, info: dict[str, Any]) -> DenomChainInfo: - info = info["assets"][0] - - return DenomChainInfo(denom=info["denom"], chain_id=chain_id) - - return [[chain_info(chain_id, info) for chain_id, info in dests.items()]] - - -async def denom_info_on_chain( - src_chain: str, - src_denom: str, - dest_chain: str, - session: aiohttp.ClientSession, - denom_map: Optional[dict[str, list[dict[str, str]]]] = None, -) -> Optional[list[DenomChainInfo]]: - """ - Gets a neutron denom's denom and channel on/to another chain. - """ - - if denom_map: - return [ - DenomChainInfo( - denom_info["denom"], - denom_info["chain_id"], - ) - for denom_info in denom_map.get(src_denom, []) - if denom_info["chain_id"] == dest_chain - ][:1] - - head = {"accept": "application/json", "content-type": "application/json"} - - async with session.post( - "https://api.skip.money/v1/fungible/assets_from_source", - headers=head, - json={ - "allow_multi_tx": False, - "include_cw20_assets": True, - "source_asset_denom": src_denom, - "source_asset_chain_id": src_chain, - "client_id": "timewave-arb-bot", - }, - ) as resp: - if resp.status != 200: - return None - - dests = (await resp.json())["dest_assets"] - - if dest_chain in dests: - info = dests[dest_chain]["assets"][0] - - return [DenomChainInfo(denom=info["denom"], chain_id=dest_chain)] - - return None - - -async def denom_route( - src_chain: str, - src_denom: str, - dest_chain: str, - dest_denom: str, - session: aiohttp.ClientSession, - denom_map: Optional[dict[str, list[dict[str, str]]]] = None, -) -> Optional[list[DenomRouteLeg]]: - """ - Gets a neutron denom's denom and channel on/to another chain. - """ - - head = {"accept": "application/json", "content-type": "application/json"} - - async with session.post( - "https://api.skip.money/v2/fungible/route", - headers=head, - json={ - "amount_in": "1", - "source_asset_denom": src_denom, - "source_asset_chain_id": src_chain, - "dest_asset_denom": dest_denom, - "dest_asset_chain_id": dest_chain, - "allow_multi_tx": True, - "allow_unsafe": False, - "bridges": ["IBC"], - }, - ) as resp: - if resp.status != 200: - return None - - ops = (await resp.json())["operations"] - - # The transfer includes a swap or some other operation - # we can't handle - if any(("transfer" not in op for op in ops)): - return None - - transfer_info = ops[0]["transfer"] - - from_chain_info = await chain_info( - transfer_info["from_chain_id"], session, denom_map - ) - to_chain_info = await chain_info( - transfer_info["to_chain_id"], session, denom_map - ) - - if not from_chain_info or not to_chain_info: - return None - - return [ - DenomRouteLeg( - src_chain=src_chain, - dest_chain=dest_chain, - src_denom=src_denom, - dest_denom=dest_denom, - from_chain=from_chain_info, - to_chain=to_chain_info, - denom_in=transfer_info["denom_in"], - denom_out=transfer_info["denom_out"], - port=transfer_info["port"], - channel=transfer_info["channel"], - ) - for op in ops - ] - - -async def chain_info( - chain_id: str, - session: aiohttp.ClientSession, - denom_map: Optional[dict[str, list[dict[str, str]]]] = None, -) -> Optional[ChainInfo]: - """ - Gets basic information about a cosmos chain. - """ - - head = {"accept": "application/json", "content-type": "application/json"} - - async with session.get( - f"https://api.skip.money/v2/info/chains?chain_ids={chain_id}", - headers=head, - ) as resp: - if resp.status != 200: - return None - - chains = (await resp.json())["chains"] - - if len(chains) == 0: - return None - - chain = chains[0] - - return ChainInfo( - chain_name=chain["chain_name"], - chain_id=chain["chain_id"], - pfm_enabled=chain["pfm_enabled"], - supports_memo=chain["supports_memo"], - bech32_prefix=chain["bech32_prefix"], - fee_asset=chain["fee_assets"][0]["denom"], - chain_type=chain["chain_type"], - pretty_name=chain["pretty_name"], - ) + src_chain_id: str + dest_chain_id: str @dataclass diff --git a/tests/test_naive_strategy.py b/tests/test_naive_strategy.py index f24a57a4b..cbf792004 100644 --- a/tests/test_naive_strategy.py +++ b/tests/test_naive_strategy.py @@ -3,11 +3,7 @@ """ import typing -from typing import Any from dataclasses import dataclass -import json -from src.util import custom_neutron_network_config -from src.scheduler import Ctx from src.strategies.util import fmt_route_leg, IBC_TRANSFER_GAS from src.strategies.naive import State, route_gas from src.contracts.pool.osmosis import OsmosisPoolDirectory @@ -15,9 +11,7 @@ from src.contracts.pool.astroport import NeutronAstroportPoolDirectory from src.contracts.auction import AuctionDirectory from src.util import DISCOVERY_CONCURRENCY_FACTOR -from tests.util import deployments -from cosmpy.aerial.client import LedgerClient -from cosmpy.aerial.wallet import LocalWallet +from tests.util import deployments, ctx import pytest import aiohttp import grpc @@ -91,44 +85,9 @@ async def test_fmt_route_leg() -> None: @pytest.mark.asyncio async def test_state_poll() -> None: - net_config, deployments = (None, None) - - with open("net_conf.json", "r", encoding="utf-8") as nf: - net_config = json.load(nf) - - with open("contracts/deployments.json", "r", encoding="utf-8") as f: - deployments = json.load(f) - - async with aiohttp.ClientSession( - connector=aiohttp.TCPConnector(force_close=True, limit_per_host=1), - timeout=aiohttp.ClientTimeout(total=30), - ) as session: - ctx: Ctx[Any] = Ctx( - { - chain_id: [ - LedgerClient( - custom_neutron_network_config(endpoint, chain_id=chain_id) - ) - for endpoint in endpoints["grpc"] - ] - for chain_id, endpoints in net_config.items() - }, - net_config, - LocalWallet.from_mnemonic( - "decorate bright ozone fork gallery riot bus exhaust worth way bone indoor calm squirrel merry zero scheme cotton until shop any excess stage laundry", - prefix="neutron", - ), - {"base_denom": "untrn"}, - None, - False, - session, - [], - deployments, - None, - ) - + async with ctx() as test_ctx: s = State(None) - s.poll(ctx, {}, {}) + s.poll(test_ctx, {}, {}) assert s.balance assert s.balance > 0 diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py index 9c59e56a6..62e943a87 100644 --- a/tests/test_scheduler.py +++ b/tests/test_scheduler.py @@ -2,42 +2,23 @@ Tests that the scheduler works as expected. """ -from dataclasses import dataclass -import json -from typing import List, cast, Any -from cosmpy.aerial.client import LedgerClient, NetworkConfig -from cosmpy.aerial.wallet import LocalWallet from src.scheduler import Scheduler, Ctx -from src.util import ( - NEUTRON_NETWORK_CONFIG, - DISCOVERY_CONCURRENCY_FACTOR, - custom_neutron_network_config, -) +from src.util import DISCOVERY_CONCURRENCY_FACTOR from src.contracts.pool.osmosis import OsmosisPoolDirectory from src.contracts.pool.astroport import NeutronAstroportPoolDirectory from src.contracts.pool.provider import PoolProvider from src.contracts.auction import AuctionProvider -from tests.util import deployments +from tests.util import deployments, ctx, State import aiohttp import pytest import grpc pytest_plugins = ("pytest_asyncio",) -# Note: this account has no funds and is not used for anything -TEST_WALLET_MNEMONIC = ( - "update armed valve web gate shiver birth exclude curtain cotton juice property" -) - - -@dataclass -class State: - balance: int - async def strategy( strat_ctx: Ctx[State], - _pools: dict[str, dict[str, List[PoolProvider]]], + _pools: dict[str, dict[str, list[PoolProvider]]], _auctions: dict[str, dict[str, AuctionProvider]], ) -> Ctx[State]: """ @@ -47,86 +28,14 @@ async def strategy( return strat_ctx -def ctx(session: aiohttp.ClientSession) -> Ctx[State]: - """ - Gets a default context for test schedulers. - """ - - endpoints: dict[str, dict[str, list[str]]] = { - "neutron-1": { - "http": ["https://neutron-rest.publicnode.com"], - "grpc": ["grpc+https://neutron-grpc.publicnode.com:443"], - }, - "osmosis-1": { - "http": ["https://lcd.osmosis.zone"], - "grpc": ["grpc+https://osmosis-grpc.publicnode.com:443"], - }, - } - - with open("contracts/deployments.json", encoding="utf-8") as f: - return Ctx( - { - "neutron-1": [ - LedgerClient(NEUTRON_NETWORK_CONFIG), - *[ - LedgerClient(custom_neutron_network_config(endpoint)) - for endpoint in endpoints["neutron-1"]["grpc"] - ], - ], - "osmosis-1": [ - *[ - LedgerClient( - NetworkConfig( - chain_id="osmosis-1", - url=endpoint, - fee_minimum_gas_price=0.0053, - fee_denomination="uosmo", - staking_denomination="uosmo", - ) - ) - for endpoint in endpoints["osmosis-1"]["grpc"] - ], - ], - }, - endpoints, - LocalWallet.from_mnemonic(TEST_WALLET_MNEMONIC, prefix="neutron"), - { - "pool_file": None, - "poll_interval": 120, - "hops": 3, - "pools": 100, - "require_leg_types": set(), - "base_denom": "", - "profit_margin": 100, - "wallet_mnemonic": "", - "cmd": "", - "net_config": "", - "log_file": "", - "history_file": "", - "skip_api_key": None, - }, - None, - False, - session, - [], - cast(dict[str, Any], json.load(f)), - None, - ).with_state(State(1000)) - - @pytest.mark.asyncio async def test_init() -> None: """ Test that a scheduler can be instantiated. """ - async with aiohttp.ClientSession( - connector=aiohttp.TCPConnector( - force_close=True, limit_per_host=DISCOVERY_CONCURRENCY_FACTOR - ), - timeout=aiohttp.ClientTimeout(total=30), - ) as session: - sched = Scheduler(ctx(session), strategy) + async with ctx() as test_ctx: + sched = Scheduler(test_ctx, strategy) assert sched is not None @@ -145,16 +54,17 @@ async def test_register_provider() -> None: osmosis = OsmosisPoolDirectory(deployments(), session) pool = list(list((await osmosis.pools()).values())[0].values())[0] - sched = Scheduler(ctx(session), strategy) + async with ctx() as test_ctx: + sched = Scheduler(test_ctx, strategy) - directory = OsmosisPoolDirectory(deployments(), session) - pools = await directory.pools() + directory = OsmosisPoolDirectory(deployments(), session) + pools = await directory.pools() - for base in pools.values(): - for pool in base.values(): - sched.register_provider(pool) + for base in pools.values(): + for pool in base.values(): + sched.register_provider(pool) - assert len(sched.providers) > 0 + assert len(sched.providers) > 0 @pytest.mark.asyncio @@ -184,7 +94,7 @@ async def test_poll() -> None: async def simple_strategy( strat_ctx: Ctx[State], - pools: dict[str, dict[str, List[PoolProvider]]], + pools: dict[str, dict[str, list[PoolProvider]]], auctions: dict[str, dict[str, AuctionProvider]], ) -> Ctx[State]: assert len(pools) > 0 @@ -192,18 +102,19 @@ async def simple_strategy( return strat_ctx - sched = Scheduler(ctx(session), simple_strategy) + async with ctx() as test_ctx: + sched = Scheduler(test_ctx, simple_strategy) - await sched.register_auctions() - osmos_pools = await osmosis.pools() - astro_pools = await astroport.pools() + await sched.register_auctions() + osmos_pools = await osmosis.pools() + astro_pools = await astroport.pools() - for base in osmos_pools.values(): - for pool in base.values(): - sched.register_provider(pool) + for base in osmos_pools.values(): + for pool in base.values(): + sched.register_provider(pool) - for astro_base in astro_pools.values(): - for astro_pool in astro_base.values(): - sched.register_provider(astro_pool) + for astro_base in astro_pools.values(): + for astro_pool in astro_base.values(): + sched.register_provider(astro_pool) - await sched.poll() + await sched.poll() diff --git a/tests/test_strategy_util.py b/tests/test_strategy_util.py index e1a73be8f..1f4f984b7 100644 --- a/tests/test_strategy_util.py +++ b/tests/test_strategy_util.py @@ -3,7 +3,7 @@ from typing import Any from src.contracts.route import Leg from src.strategies.util import collapse_route, build_atomic_arb -from tests.test_scheduler import TEST_WALLET_MNEMONIC +from tests.util import TEST_WALLET_MNEMONIC from cosmpy.aerial.wallet import LocalWallet diff --git a/tests/test_util.py b/tests/test_util.py index 9f2d91ae4..cf9b1addd 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -2,9 +2,9 @@ Tests that the skip util methods work as expected. """ -from src.util import denom_info, denom_info_on_chain, denom_route +from src.util import DenomRouteQuery +from tests.util import ctx import pytest -import aiohttp pytest_plugins = ("pytest_asyncio",) @@ -15,19 +15,15 @@ async def test_denom_info() -> None: Tests that skip can fetch the destination chains for untrn. """ - async with aiohttp.ClientSession( - connector=aiohttp.TCPConnector(force_close=True, limit_per_host=1), - timeout=aiohttp.ClientTimeout(total=30), - ) as session: - info = await denom_info("neutron-1", "untrn", session) + async with ctx() as test_ctx: + info = await test_ctx.query_denom_info("neutron-1", "untrn") assert info assert len(info) > 0 - assert len(info[0]) > 0 - assert info[0][0].chain_id == "archway-1" + assert info[0].dest_chain_id == "archway-1" assert ( - info[0][0].denom + info[0].denom == "ibc/9E3CDA65E02637E219B43802452D6B37D782F466CF76ECB9F47A2E00C07C4769" ) @@ -38,33 +34,29 @@ async def test_denom_info_on_chain() -> None: Tests that skip can fetch the osmosis destination chain info for untrn. """ - async with aiohttp.ClientSession( - connector=aiohttp.TCPConnector(force_close=True, limit_per_host=1), - timeout=aiohttp.ClientTimeout(total=30), - ) as session: - info = await denom_info_on_chain("neutron-1", "untrn", "osmosis-1", session) + async with ctx() as test_ctx: + info = await test_ctx.query_denom_info_on_chain( + "neutron-1", "untrn", "osmosis-1" + ) assert info - assert len(info) > 0 assert ( - info[0].denom + info.denom == "ibc/126DA09104B71B164883842B769C0E9EC1486C0887D27A9999E395C2C8FB5682" ) - assert info[0].chain_id == "osmosis-1" + assert info.dest_chain_id == "osmosis-1" - info = await denom_info_on_chain( + info = await test_ctx.query_denom_info_on_chain( "neutron-1", "ibc/376222D6D9DAE23092E29740E56B758580935A6D77C24C2ABD57A6A78A1F3955", "osmosis-1", - session, ) assert info - assert len(info) > 0 - assert info[0].denom == "uosmo" - assert info[0].chain_id == "osmosis-1" + assert info.denom == "uosmo" + assert info.dest_chain_id == "osmosis-1" @pytest.mark.asyncio @@ -73,16 +65,15 @@ async def test_denom_route() -> None: Tests that skip can fetch the route for USDC from neutron to osmosis. """ - async with aiohttp.ClientSession( - connector=aiohttp.TCPConnector(force_close=True, limit_per_host=1), - timeout=aiohttp.ClientTimeout(total=30), - ) as session: - info = await denom_route( - "neutron-1", - "ibc/B559A80D62249C8AA07A380E2A2BEA6E5CA9A6F079C912C3A9E9B494105E4F81", - "osmosis-1", - "ibc/498A0751C798A0D9A389AA3691123DADA57DAA4FE165D5C75894505B876BA6E4", - session, + async with ctx() as test_ctx: + + info = await test_ctx.query_denom_route( + DenomRouteQuery( + src_chain="neutron-1", + src_denom="ibc/376222D6D9DAE23092E29740E56B758580935A6D77C24C2ABD57A6A78A1F3955", + dest_chain="osmosis-1", + dest_denom="uosmo", + ) ) assert info @@ -90,12 +81,13 @@ async def test_denom_route() -> None: assert info - info = await denom_route( - "neutron-1", - "ibc/376222D6D9DAE23092E29740E56B758580935A6D77C24C2ABD57A6A78A1F3955", - "osmosis-1", - "uosmo", - session, + info = await test_ctx.query_denom_route( + DenomRouteQuery( + src_chain="neutron-1", + src_denom="ibc/376222D6D9DAE23092E29740E56B758580935A6D77C24C2ABD57A6A78A1F3955", + dest_chain="osmosis-1", + dest_denom="uosmo", + ) ) assert info diff --git a/tests/util.py b/tests/util.py index f48211edc..e45489e20 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,5 +1,27 @@ -from typing import Any, cast +from typing import Any, cast, AsyncIterator import json +import aiohttp +from dataclasses import dataclass +from contextlib import asynccontextmanager +from cosmpy.aerial.client import LedgerClient, NetworkConfig +from cosmpy.aerial.wallet import LocalWallet +from src.scheduler import Ctx +from src.util import ( + DISCOVERY_CONCURRENCY_FACTOR, + NEUTRON_NETWORK_CONFIG, + custom_neutron_network_config, +) + + +@dataclass +class State: + balance: int + + +# Note: this account has no funds and is not used for anything +TEST_WALLET_MNEMONIC = ( + "update armed valve web gate shiver birth exclude curtain cotton juice property" +) def deployments() -> dict[str, Any]: @@ -9,3 +31,81 @@ def deployments() -> dict[str, Any]: """ with open("contracts/deployments.json", encoding="utf-8") as f: return cast(dict[str, Any], json.load(f)) + + +@asynccontextmanager +async def ctx() -> AsyncIterator[Ctx[Any]]: + """ + Gets a default context for test schedulers. + """ + + async with aiohttp.ClientSession( + connector=aiohttp.TCPConnector( + force_close=True, limit_per_host=DISCOVERY_CONCURRENCY_FACTOR + ), + timeout=aiohttp.ClientTimeout(total=30), + ) as session: + endpoints: dict[str, dict[str, list[str]]] = { + "neutron-1": { + "http": ["https://neutron-rest.publicnode.com"], + "grpc": ["grpc+https://neutron-grpc.publicnode.com:443"], + }, + "osmosis-1": { + "http": ["https://lcd.osmosis.zone"], + "grpc": ["grpc+https://osmosis-grpc.publicnode.com:443"], + }, + } + + with open("contracts/deployments.json", encoding="utf-8") as f: + yield Ctx( + clients={ + "neutron-1": [ + LedgerClient(NEUTRON_NETWORK_CONFIG), + *[ + LedgerClient(custom_neutron_network_config(endpoint)) + for endpoint in endpoints["neutron-1"]["grpc"] + ], + ], + "osmosis-1": [ + *[ + LedgerClient( + NetworkConfig( + chain_id="osmosis-1", + url=endpoint, + fee_minimum_gas_price=0.0053, + fee_denomination="uosmo", + staking_denomination="uosmo", + ) + ) + for endpoint in endpoints["osmosis-1"]["grpc"] + ], + ], + }, + endpoints=endpoints, + wallet=LocalWallet.from_mnemonic( + TEST_WALLET_MNEMONIC, prefix="neutron" + ), + cli_args={ + "pool_file": None, + "poll_interval": 120, + "hops": 3, + "pools": 100, + "require_leg_types": set(), + "base_denom": "", + "profit_margin": 100, + "wallet_mnemonic": "", + "cmd": "", + "net_config": "", + "log_file": "", + "history_file": "", + "skip_api_key": None, + }, + state=None, + terminated=False, + http_session=session, + order_history=[], + deployments=cast(dict[str, Any], json.load(f)), + denom_map={}, + denom_routes={}, + chain_info={}, + ).with_state(State(1000)) From 6e44e5ed4dad2f8673ff79bad2832844fb6cf208 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 09:02:38 +0000 Subject: [PATCH 10/46] Get osmo tests working. --- local-interchaintest/src/setup.rs | 197 +++++++++++-------- local-interchaintest/src/tests.rs | 2 +- local-interchaintest/src/util.rs | 101 ++++++++-- main.py | 23 ++- src/contracts/pool/astroport.py | 2 +- src/scheduler.py | 32 ++-- src/strategies/bellman_ford.py | 2 +- src/strategies/naive.py | 7 +- src/strategies/util.py | 308 ++++++++++++++++++++++-------- src/util.py | 42 +++- tests/test_auction.py | 1 - 11 files changed, 516 insertions(+), 201 deletions(-) diff --git a/local-interchaintest/src/setup.rs b/local-interchaintest/src/setup.rs index cd862d700..ef4e46d2b 100644 --- a/local-interchaintest/src/setup.rs +++ b/local-interchaintest/src/setup.rs @@ -1,5 +1,5 @@ use super::{ - util::{self, DenomMapEntry}, + util::{self, BidirectionalDenomRouteLeg, ChainInfo, DenomFile, DenomRouteLeg}, ARBFILE_PATH, OSMO_OWNER_ADDR, OWNER_ADDR, TEST_MNEMONIC, }; use clap::Parser; @@ -53,10 +53,41 @@ impl Denom { &self, amount: u128, ctx: &mut TestContext, - ) -> Result<(String, Option<(DenomMapEntry, DenomMapEntry)>), Box> - { + ) -> Result> { match self { - Self::Local { base_denom, .. } => Ok((base_denom.to_owned(), None)), + Self::Local { + base_denom, + base_chain, + } => { + let src_chain = ctx.get_chain(&base_chain); + + let chain_info = ChainInfo { + chain_name: src_chain.chain_name.clone(), + chain_id: src_chain.rb.chain_id.clone(), + pfm_enabled: true, + supports_memo: true, + bech32_prefix: src_chain.chain_prefix.clone(), + fee_asset: src_chain.native_denom.clone(), + chain_type: String::from("cosmos"), + pretty_name: src_chain.chain_name.clone(), + }; + + let leg = DenomRouteLeg { + src_chain: base_chain.to_owned(), + dest_chain: base_chain.to_owned(), + src_denom: base_denom.to_owned(), + dest_denom: base_denom.to_owned(), + from_chain: chain_info.clone(), + to_chain: chain_info, + port: String::from("transfer"), + channel: String::new(), + }; + + Ok(BidirectionalDenomRouteLeg { + src_to_dest: leg.clone(), + dest_to_src: leg, + }) + } Self::Interchain { base_denom, base_chain, @@ -87,23 +118,49 @@ impl Denom { let src_chain = ctx.get_chain(&base_chain); let dest_chain = ctx.get_chain(&dest_chain); - Ok(( - ibc_denom_a.clone(), - Some(( - DenomMapEntry { - chain_id: dest_chain.rb.chain_id.clone(), - denom: ibc_denom_a.clone(), - channel_id: trace_a.to_owned(), - port_id: "transfer".to_owned(), - }, - DenomMapEntry { - chain_id: src_chain.rb.chain_id.clone(), - denom: base_denom.to_string(), - channel_id: trace_a_counter.to_owned(), - port_id: "transfer".to_owned(), - }, - )), - )) + let chain_a_info = ChainInfo { + chain_name: src_chain.chain_name.clone(), + chain_id: src_chain.rb.chain_id.clone(), + pfm_enabled: true, + supports_memo: true, + bech32_prefix: src_chain.chain_prefix.clone(), + fee_asset: src_chain.native_denom.clone(), + chain_type: String::from("cosmos"), + pretty_name: src_chain.chain_name.clone(), + }; + let chain_b_info = ChainInfo { + chain_name: dest_chain.chain_name.clone(), + chain_id: dest_chain.rb.chain_id.clone(), + pfm_enabled: true, + supports_memo: true, + bech32_prefix: dest_chain.chain_prefix.clone(), + fee_asset: dest_chain.native_denom.clone(), + chain_type: String::from("cosmos"), + pretty_name: dest_chain.chain_name.clone(), + }; + + Ok(BidirectionalDenomRouteLeg { + src_to_dest: DenomRouteLeg { + src_chain: src_chain.rb.chain_id.clone(), + dest_chain: dest_chain.rb.chain_id.clone(), + src_denom: base_denom.clone(), + dest_denom: ibc_denom_a.clone(), + channel: trace_a.to_owned(), + port: "transfer".to_owned(), + from_chain: chain_a_info.clone(), + to_chain: chain_b_info.clone(), + }, + dest_to_src: DenomRouteLeg { + src_chain: dest_chain.rb.chain_id.clone(), + dest_chain: src_chain.rb.chain_id.clone(), + src_denom: ibc_denom_a, + dest_denom: base_denom.clone(), + channel: trace_a_counter.to_owned(), + port: "transfer".to_owned(), + from_chain: chain_b_info, + to_chain: chain_a_info, + }, + }) } } } @@ -137,8 +194,7 @@ pub struct Args { pub struct TestRunner<'a> { test_statuses: Arc>>, cli_args: Args, - /// Mapping from (src_denom, dest_chain) -> dest_denom - denom_map: HashMap<(String, String), DenomMapEntry>, + denom_file: DenomFile, created_denoms: HashSet, test_ctx: &'a mut TestContext, } @@ -148,7 +204,7 @@ impl<'a> TestRunner<'a> { Self { test_statuses: Default::default(), cli_args, - denom_map: Default::default(), + denom_file: Default::default(), created_denoms: Default::default(), test_ctx: ctx, } @@ -195,7 +251,7 @@ impl<'a> TestRunner<'a> { // Perform hot start setup // Mapping of denoms to their matching denoms, chain id's, channel id's, and ports - self.denom_map = Default::default(); + self.denom_file = Default::default(); let ctx = &mut self.test_ctx; @@ -231,7 +287,7 @@ impl<'a> TestRunner<'a> { ctx.build_tx_create_price_oracle().send()?; ctx.build_tx_update_auction_oracle().send()?; - test.setup(&mut self.denom_map, ctx)?; + test.setup(&mut self.denom_file, ctx)?; let ntrn_to_osmo = ctx .transfer_channel_ids @@ -260,7 +316,7 @@ impl<'a> TestRunner<'a> { .expect("Failed to create deployments file"); util::create_arbs_file().expect("Failed to create arbs file"); util::create_netconfig().expect("Failed to create net config"); - util::create_denom_file(&self.denom_map).expect("Failed to create denom file"); + util::create_denom_file(&self.denom_file).expect("Failed to create denom file"); let statuses = self.test_statuses.clone(); @@ -359,7 +415,7 @@ pub struct Test { impl Test { pub fn setup( &mut self, - denom_map: &mut HashMap<(String, String), DenomMapEntry>, + denom_file: &mut DenomFile, ctx: &mut TestContext, ) -> Result<&mut Self, Box> { self.tokenfactory_token_balances_acc0.iter().try_for_each( @@ -421,32 +477,19 @@ impl Test { let funds_b = spec.balance_asset_b; // Create the osmo pool and join it - let (norm_denom_a, denom_map_ent_1) = - denom_a.normalize(funds_a, ctx).unwrap(); - let (norm_denom_b, denom_map_ent_2) = - denom_b.normalize(funds_b, ctx).unwrap(); - - if let Some((map_ent_a_1, map_ent_a_2)) = denom_map_ent_1 { - // (denom, neutron) -> denom' - // (denom', osmo) -> denom - denom_map.insert((denom_a.to_string(), "neutron".into()), map_ent_a_1); - denom_map.insert((norm_denom_a.clone(), "osmosis".into()), map_ent_a_2); - } - - if let Some((map_ent_b_1, map_ent_b_2)) = denom_map_ent_2 { - // (denom, neutron) -> denom' - // (denom', osmo) -> denom - denom_map.insert((denom_b.to_string(), "neutron".into()), map_ent_b_1); - denom_map.insert((norm_denom_b.clone(), "osmosis".into()), map_ent_b_2); - } + let route_leg_a = denom_a.normalize(funds_a, ctx).unwrap(); + let route_leg_b = denom_b.normalize(funds_b, ctx).unwrap(); + + denom_file.push_denom(route_leg_a.clone()); + denom_file.push_denom(route_leg_b.clone()); ctx.build_tx_create_pool() - .with_denom_a(&norm_denom_a) - .with_denom_b(&norm_denom_b) + .with_denom_a(&route_leg_a.src_to_dest.dest_denom) + .with_denom_b(&route_leg_b.src_to_dest.dest_denom) .send()?; ctx.build_tx_fund_pool() - .with_denom_a(&norm_denom_a) - .with_denom_b(&norm_denom_b) + .with_denom_a(&route_leg_a.src_to_dest.dest_denom) + .with_denom_b(&route_leg_b.src_to_dest.dest_denom) .with_amount_denom_a(spec.balance_asset_a) .with_amount_denom_b(spec.balance_asset_b) .with_liq_token_receiver(OWNER_ADDR) @@ -460,39 +503,41 @@ impl Test { let weight_b = spec.denom_weights.get(denom_b).unwrap_or(&0); // Create the osmo pool and join it - let (norm_denom_a, denom_map_ent_1) = - denom_a.normalize(*funds_a, ctx).unwrap(); - let (norm_denom_b, denom_map_ent_2) = - denom_b.normalize(*funds_b, ctx).unwrap(); - - if let Some((map_ent_a_1, map_ent_a_2)) = denom_map_ent_1 { - // (denom, neutron) -> denom' - // (denom', osmo) -> denom - denom_map.insert((denom_a.to_string(), "osmosis".into()), map_ent_a_1); - denom_map.insert((norm_denom_a.clone(), "neutron".into()), map_ent_a_2); - } - - if let Some((map_ent_b_1, map_ent_b_2)) = denom_map_ent_2 { - // (denom, neutron) -> denom' - // (denom', osmo) -> denom - denom_map.insert((denom_b.to_string(), "osmosis".into()), map_ent_b_1); - denom_map.insert((norm_denom_b.clone(), "neutron".into()), map_ent_b_2); - } + let route_leg_a = denom_a.normalize(*funds_a, ctx).unwrap(); + let route_leg_b = denom_b.normalize(*funds_b, ctx).unwrap(); + + denom_file.push_denom(route_leg_a.clone()); + denom_file.push_denom(route_leg_b.clone()); ctx.build_tx_create_osmo_pool() - .with_weight(&norm_denom_a, *weight_a as u64) - .with_weight(&norm_denom_b, *weight_b as u64) - .with_initial_deposit(&norm_denom_a, *funds_a as u64) - .with_initial_deposit(&norm_denom_b, *funds_b as u64) + .with_weight(&route_leg_a.src_to_dest.dest_denom, *weight_a as u64) + .with_weight(&route_leg_b.src_to_dest.dest_denom, *weight_b as u64) + .with_initial_deposit( + &route_leg_a.src_to_dest.dest_denom, + *funds_a as u64, + ) + .with_initial_deposit( + &route_leg_b.src_to_dest.dest_denom, + *funds_b as u64, + ) .send()?; - let pool_id = ctx.get_osmo_pool(&norm_denom_a, &norm_denom_b)?; + let pool_id = ctx.get_osmo_pool( + &route_leg_a.src_to_dest.dest_denom, + &route_leg_b.src_to_dest.dest_denom, + )?; // Fund the pool ctx.build_tx_fund_osmo_pool() .with_pool_id(pool_id) - .with_max_amount_in(&norm_denom_a, *funds_a as u64) - .with_max_amount_in(&norm_denom_b, *funds_b as u64) + .with_max_amount_in( + &route_leg_a.src_to_dest.dest_denom, + *funds_a as u64, + ) + .with_max_amount_in( + &route_leg_b.src_to_dest.dest_denom, + *funds_b as u64, + ) .with_share_amount_out(1000000000000) .send() } @@ -517,7 +562,7 @@ impl Test { ctx.build_tx_start_auction() .with_offer_asset(&denom_a.to_string()) .with_ask_asset(&denom_b.to_string()) - .with_end_block_delta(10000) + .with_end_block_delta(1000000000000000000) .send() } }) diff --git a/local-interchaintest/src/tests.rs b/local-interchaintest/src/tests.rs index 9c75b16fb..a663ff047 100644 --- a/local-interchaintest/src/tests.rs +++ b/local-interchaintest/src/tests.rs @@ -143,7 +143,7 @@ pub fn test_osmo_arb(arbfile: Option) -> Result<(), Box for DenomChainInfo { + fn from(v: DenomRouteLeg) -> Self { + Self { + denom: v.dest_denom, + src_chain_id: v.src_chain, + dest_chain_id: v.dest_chain, + } + } +} + +#[derive(Serialize, Default)] +pub(crate) struct DenomFile { + pub(crate) denom_map: HashMap>, + pub(crate) denom_routes: HashMap>>, + pub(crate) chain_info: HashMap, +} + +impl DenomFile { + /// Registers a denom in the denom file by adding its + /// matching denoms on all respective chains, + /// and the routes needed to get here. + pub fn push_denom(&mut self, v: BidirectionalDenomRouteLeg) -> &mut Self { + self.denom_map + .entry(v.src_to_dest.src_denom.clone()) + .or_default() + .push(v.src_to_dest.clone().into()); + self.denom_routes + .entry(v.src_to_dest.src_denom.clone()) + .or_default() + .entry(v.src_to_dest.dest_denom.clone()) + .or_default() + .push(v.src_to_dest); + + self.denom_map + .entry(v.dest_to_src.src_denom.clone()) + .or_default() + .push(v.dest_to_src.clone().into()); + self.denom_routes + .entry(v.dest_to_src.src_denom.clone()) + .or_default() + .entry(v.dest_to_src.clone().dest_denom) + .or_default() + .push(v.dest_to_src); + + self + } } /// Creates an error representing a failed assertion. @@ -130,24 +207,14 @@ pub(crate) fn create_netconfig() -> Result<(), Box> { Ok(()) } -pub(crate) fn create_denom_file( - denoms: &HashMap<(String, String), DenomMapEntry>, -) -> Result<(), Box> { +pub(crate) fn create_denom_file(file: &DenomFile) -> Result<(), Box> { let mut f = OpenOptions::new() .create(true) .truncate(true) .write(true) .open("../denoms.json")?; - let denoms_for_src = - denoms - .iter() - .fold(HashMap::new(), |mut acc, ((src_denom, _), dest_denom)| { - acc.insert(src_denom, vec![dest_denom]); - acc - }); - - f.write_all(serde_json::to_string(&denoms_for_src)?.as_bytes())?; + f.write_all(serde_json::to_string(file)?.as_bytes())?; Ok(()) } diff --git a/main.py b/main.py index d58c66f02..78a3ca26a 100644 --- a/main.py +++ b/main.py @@ -20,6 +20,9 @@ from src.util import ( custom_neutron_network_config, DISCOVERY_CONCURRENCY_FACTOR, + load_denom_chain_info, + load_denom_route_leg, + load_chain_info, ) from src.contracts.pool.osmosis import OsmosisPoolDirectory from src.contracts.pool.astroport import NeutronAstroportPoolDirectory @@ -192,9 +195,23 @@ async def main() -> None: session, [], cast(dict[str, Any], json.load(f)), - denom_file["denom_map"], - denom_file["denom_routes"], - denom_file["chain_info"], + { + denom: [load_denom_chain_info(info) for info in infos] + for (denom, infos) in denom_file["denom_map"].items() + }, + { + src_denom: { + dest_denom: [load_denom_route_leg(route) for route in routes] + for (dest_denom, routes) in dest_denom_routes.items() + } + for (src_denom, dest_denom_routes) in denom_file[ + "denom_routes" + ].items() + }, + { + chain_id: load_chain_info(info) + for (chain_id, info) in denom_file["chain_info"].items() + }, ).recover_history() sched = Scheduler(ctx, strategy) diff --git a/src/contracts/pool/astroport.py b/src/contracts/pool/astroport.py index 0171883a8..b28599eef 100644 --- a/src/contracts/pool/astroport.py +++ b/src/contracts/pool/astroport.py @@ -254,7 +254,7 @@ async def __balance(self, asset: Token | NativeToken) -> int: balance = next(b for b in balances if b["info"] == token_to_asset_info(asset)) - if not balance: + if balance is None: return 0 return int(balance["amount"]) diff --git a/src/scheduler.py b/src/scheduler.py index 0f956c2e3..4480e602e 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -54,7 +54,7 @@ class Ctx(Generic[TState]): order_history: list[Route] deployments: dict[str, Any] denom_map: dict[str, list[DenomChainInfo]] - denom_routes: dict[DenomRouteQuery, list[DenomRouteLeg]] + denom_routes: dict[str, dict[str, list[DenomRouteLeg]]] chain_info: dict[str, ChainInfo] def with_state(self, state: Any) -> Self: @@ -160,7 +160,7 @@ def asset_balance_prefix(leg: Leg, asset: str) -> Optional[str]: ), ) - if not balance_resp_asset: + if balance_resp_asset is None or not isinstance(balance_resp_asset, int): return None return f"balance[{leg.backend.chain_id}]({asset[:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}): {balance_resp_asset}" @@ -173,11 +173,17 @@ def leg_balance_prefixes(leg: Leg) -> list[str]: ] prefix = " ".join( - { - prefix - for leg_prefixes in [leg_balance_prefixes(leg) for leg in route.legs] - for prefix in leg_prefixes - } + list( + dict.fromkeys( + [ + prefix + for leg_prefixes in [ + leg_balance_prefixes(leg) for leg in route.legs + ] + for prefix in leg_prefixes + ] + ) + ) ) route.logs.append(f"{log_level.upper()} {prefix} {fmt_string % tuple(args)}") @@ -205,8 +211,12 @@ def leg_balance_prefixes(leg: Leg) -> list[str]: async def query_denom_route( self, query: DenomRouteQuery ) -> Optional[list[DenomRouteLeg]]: - if self.denom_routes and query in self.denom_routes: - return self.denom_routes[query] + if ( + self.denom_routes + and query.src_denom in self.denom_routes + and query.dest_denom in self.denom_routes[query.src_denom] + ): + return self.denom_routes[query.src_denom][query.dest_denom] head = {"accept": "application/json", "content-type": "application/json"} @@ -252,15 +262,13 @@ async def query_denom_route( dest_denom=query.dest_denom, from_chain=from_chain_info, to_chain=to_chain_info, - denom_in=transfer_info["denom_in"], - denom_out=transfer_info["denom_out"], port=transfer_info["port"], channel=transfer_info["channel"], ) for op in ops ] - self.denom_routes[query] = route + self.denom_routes.get(query.src_denom, {})[query.dest_denom] = route return route diff --git a/src/strategies/bellman_ford.py b/src/strategies/bellman_ford.py index 4c5e145a4..ef7dd6c51 100644 --- a/src/strategies/bellman_ford.py +++ b/src/strategies/bellman_ford.py @@ -290,7 +290,7 @@ async def strategy( ), ) - if not balance_resp: + if balance_resp is None or not isinstance(balance_resp, int): return ctx profit = await route_base_denom_profit(balance_resp, route) diff --git a/src/strategies/naive.py b/src/strategies/naive.py index 9bdecc877..e58c836a6 100644 --- a/src/strategies/naive.py +++ b/src/strategies/naive.py @@ -63,8 +63,7 @@ def poll( ), ) - if balance_resp: - self.balance = balance_resp + self.balance = balance_resp return self @@ -113,7 +112,7 @@ async def strategy( ): ctx.log_route(r, "info", "Route queued: %s", [fmt_route(route)]) - if not state.balance: + if state.balance is None: return ctx ctx.log_route( @@ -233,7 +232,7 @@ async def eval_route( if not state: return None - if not state.balance: + if state.balance is None or not isinstance(state.balance, int): logger.error( "Failed to fetch bot wallet balance for account %s", str(Address(ctx.wallet.public_key(), prefix="neutron")), diff --git a/src/strategies/util.py b/src/strategies/util.py index 7354c8418..ebcc49b92 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -2,6 +2,7 @@ Defines common utilities shared across arbitrage strategies. """ +from bisect import insort import random import traceback import asyncio @@ -26,7 +27,6 @@ try_multiple_rest_endpoints, try_multiple_clients_fatal, try_multiple_clients, - DENOM_QUANTITY_ABORT_ARB, DenomRouteQuery, ) from src.scheduler import Ctx @@ -108,6 +108,14 @@ def collapse_route( ] +def expand_route(route_sublegs: list[list[tuple[Leg, int]]]) -> list[tuple[Leg, int]]: + """ + Ungroups legs grouped together by consecutive elements. + """ + + return [leg for sublegs in route_sublegs for leg in sublegs] + + def build_atomic_arb( sublegs: list[tuple[Leg, int]], wallet: LocalWallet ) -> Transaction: @@ -134,6 +142,28 @@ def build_atomic_arb( return tx +def denom_balance_on_chain( + provider: Union[AuctionProvider, PoolProvider], denom: str, ctx: Ctx[Any] +) -> int: + """ + Gets the maximum order size for the provider of the leg, + given the balance in the user's wallet. + """ + + balance_resp = try_multiple_clients( + ctx.clients[provider.chain_id], + lambda client: client.query_bank_balance( + Address(ctx.wallet.public_key(), prefix=provider.chain_prefix), + denom, + ), + ) + + if isinstance(balance_resp, int): + return balance_resp + + return 0 + + async def exec_arb( route_ent: Route, profit: int, @@ -183,9 +213,73 @@ async def exec_arb( ], ) - for sublegs in to_execute: + for i, sublegs in enumerate(to_execute): + (leg, predicted_to_swap) = sublegs[0] + + # The execution plan must have a sufficient balance + # in order to execute the step. + # Otherwise, the step's quantity should be set to the balance + # and the plan should be updated and reevaluated for profit. + # If the route is no longer profitable, it should be canceled + + to_swap = min( + predicted_to_swap, + denom_balance_on_chain( + prev_leg.backend if prev_leg else leg.backend, + prev_leg.out_asset() if prev_leg else leg.in_asset(), + ctx, + ), + ) + + ctx.log_route( + route_ent, + "info", + "Execution plan for leg %s requires %d, and maximum spendable for leg is %d", + [fmt_route([leg]), predicted_to_swap, to_swap], + ) + + # Recalculate execution plan and update all legs, + # or abort if the route is no longer profitable + if to_swap != predicted_to_swap: + ctx.log_route( + route_ent, + "info", + "Maximum spendable for leg %s (%d) is insufficient for execution plan (requires %d); reevaluating", + [fmt_route([leg]), predicted_to_swap, to_swap], + ) + + remaining_legs = expand_route(to_execute[i:]) + + _, new_execution_plan = await quantities_for_route_profit( + to_swap, + [leg for leg, _ in remaining_legs], + route_ent, + ctx, + seek_profit=False, + ) + + # The execution plan was aborted + if len(new_execution_plan) < len(remaining_legs): + ctx.log_route( + route_ent, + "info", + "Insufficient execution planning (%d) for remaining legs (%d); skipping", + [len(new_execution_plan), len(remaining_legs)], + ) + + continue + + # The execution plan indicates the trade is no longer profitable + if new_execution_plan[-1] < quantities[0]: + raise ValueError( + "Execution plan indicates arb is no longer profitable." + ) + + # Update the remaining execution plan + to_execute[i:] = collapse_route(iter(remaining_legs)) + leg_to_swap: tuple[Leg, int] = sublegs[0] - (leg, to_swap) = leg_to_swap + (leg, _) = leg_to_swap # Log legs on the same chain if len(sublegs) > 1: @@ -600,19 +694,20 @@ async def next_legs( ) -> AsyncGenerator[tuple[Route, list[Leg]], None]: nonlocal eval_profit - matching_denoms = [ - info.denom - for info in await ctx.query_denom_info( - path[-2].backend.chain_id, - path[-2].out_asset(), - ) - ] + if len(path) >= 2: + matching_denoms = [ + info.denom + for info in await ctx.query_denom_info( + path[-2].backend.chain_id, + path[-2].out_asset(), + ) + ] - if len(path) >= 2 and not ( - path[-1].in_asset() == path[-2].out_asset() - or path[-1].in_asset() in matching_denoms - ): - return + if not ( + path[-1].in_asset() == path[-2].out_asset() + or path[-1].in_asset() in matching_denoms + ): + return # Only find `limit` pools # with a depth less than `depth @@ -790,7 +885,7 @@ async def recover_funds( ), ) - if not balance_resp: + if balance_resp is None or not isinstance(balance_resp, int): raise ValueError(f"Couldn't get balance for asset {curr_leg.in_asset()}.") if curr_leg.backend.chain_id != backtracked[0].backend.chain_id: @@ -979,7 +1074,7 @@ async def transfer_or_continue() -> bool: # Check for a package acknowledgement by querying osmosis ack_resp = await try_multiple_rest_endpoints( - ctx.endpoints[src_chain_id]["http"], + ctx.endpoints[dest_chain_id]["http"], ( f"/ibc/core/channel/v1/channels/{src_channel_id}/" f"ports/transfer/packet_acks/" @@ -1014,6 +1109,68 @@ async def transfer_or_continue() -> bool: raise ValueError("IBC transfer timed out.") +async def quantities_for_starting_amount( + starting_amount: int, route: list[Leg] +) -> list[int]: + """ + Gets the order size for each subsequent trade given a starting amount, + and the liquidity in each pool. + """ + + quantities = [starting_amount] + + for leg in route: + if quantities[-1] == 0: + quantities = [starting_amount] + + break + + prev_amt = quantities[-1] + + if isinstance(leg.backend, AuctionProvider): + if leg.in_asset != leg.backend.asset_a: + return quantities + + if await leg.backend.remaining_asset_b() == 0: + return quantities + + quantities.append( + min( + int(await leg.backend.exchange_rate() * prev_amt), + await leg.backend.remaining_asset_b(), + ) + ) + + continue + + if leg.in_asset == leg.backend.asset_a: + quantities.append(int(await leg.backend.simulate_swap_asset_a(prev_amt))) + + pool_liquidity = await leg.backend.balance_asset_b() + + if ( + pool_liquidity == 0 + or Decimal(quantities[-1]) / Decimal(pool_liquidity) + > MAX_POOL_LIQUIDITY_TRADE + ): + break + + continue + + quantities.append(int(await leg.backend.simulate_swap_asset_b(prev_amt))) + + pool_liquidity = await leg.backend.balance_asset_a() + + if ( + pool_liquidity == 0 + or Decimal(quantities[-1]) / Decimal(pool_liquidity) + > MAX_POOL_LIQUIDITY_TRADE + ): + break + + return quantities + + async def quantities_for_route_profit( starting_amount: int, route: list[Leg], @@ -1029,84 +1186,81 @@ async def quantities_for_route_profit( if len(route) <= 1: return (0, []) - quantities: list[int] = [starting_amount] - - while (seek_profit and quantities[-1] - quantities[0] <= 0) or len( - quantities - ) <= len(route): - ctx.log_route(r, "info", "Route has possible execution plan: %s", [quantities]) - - if starting_amount < DENOM_QUANTITY_ABORT_ARB: - logger.debug( - "Hit investment backstop (%d) in route planning: %s (%s)", - DENOM_QUANTITY_ABORT_ARB, - starting_amount, - quantities, - ) - - return (0, []) + left = 0 + right = starting_amount + mid = starting_amount // 2 - quantities = [starting_amount] + plans: dict[int, list[int]] = {} - for leg in route: - if quantities[-1] == 0: - quantities = [starting_amount] + # Plans sorted by profit, for purposes of returning the best plan + plans_by_profit: list[int] = [] - break - - prev_amt = quantities[-1] + while mid > 0 and mid <= starting_amount: + quantities: list[int] = await quantities_for_starting_amount(mid, route) + plans[mid] = quantities - if isinstance(leg.backend, AuctionProvider): - if leg.in_asset != leg.backend.asset_a: - return (0, []) + ctx.log_route( + r, + "info", + "Got execution plan @ %d: [%s] (best candidates: [%s]", + [ + mid, + ", ".join((str(qty) for qty in quantities)), + ", ".join( + ( + f"[{', '.join((str(qty) for qty in plans[plan_idx]))}]" + for plan_idx in plans_by_profit[:5] + ) + ), + ], + ) - if await leg.backend.remaining_asset_b() == 0: - return (0, []) + profit = 0 if len(quantities) == 0 else quantities[-1] - quantities[0] - quantities.append( - min( - int(await leg.backend.exchange_rate() * prev_amt), - await leg.backend.remaining_asset_b(), - ) - ) + # Insert in sorted position + if len(quantities) >= len(route): + insort(plans_by_profit, mid, key=lambda idx: plans[idx][-1] - plans[idx][0]) - continue + # Continue checking plans, since this quantity was not profitable + if len(quantities) < len(route) or profit <= 0: + right = mid + mid = right // 2 - if leg.in_asset == leg.backend.asset_a: - quantities.append( - int(await leg.backend.simulate_swap_asset_a(prev_amt)) - ) + ctx.log_route(r, "info", "Probing lower execution plans", []) - pool_liquidity = await leg.backend.balance_asset_b() + continue - if ( - pool_liquidity == 0 - or Decimal(quantities[-1]) / Decimal(pool_liquidity) - > MAX_POOL_LIQUIDITY_TRADE - ): - break + higher_plan = plans.get(mid + (right - mid) // 2, []) - continue + # No more to evaluate, since greater starting amount was less profitable + if ( + len(higher_plan) > 0 + and len(higher_plan) >= len(route) + and higher_plan[-1] - higher_plan[0] <= profit + ): + ctx.log_route(r, "info", "Best execution plan identified", []) - quantities.append(int(await leg.backend.simulate_swap_asset_b(prev_amt))) + break - pool_liquidity = await leg.backend.balance_asset_a() + # This plan is profitable, but a bigger plan might be even more profitable + left = mid + mid += (right - left) // 2 - if ( - pool_liquidity == 0 - or Decimal(quantities[-1]) / Decimal(pool_liquidity) - > MAX_POOL_LIQUIDITY_TRADE - ): - break + ctx.log_route(r, "info", "Probing higher execution plans", []) - starting_amount = int(Decimal(starting_amount) / Decimal(2.0)) + if len(plans_by_profit) == 0: + return (0, []) - ctx.log_route(r, "info", "Got execution plan: %s", [quantities]) + best_plan = plans[plans_by_profit[0]] - if quantities[-1] - quantities[0] > 0: - ctx.log_route(r, "info", "Route is profitable: %s", [fmt_route(route)]) + ctx.log_route( + r, + "info", + "Best execution plan: [%s]", + [", ".join((str(qty) for qty in best_plan))], + ) - return (quantities[-1] - quantities[0], quantities) + return (best_plan[-1] - best_plan[0] if len(best_plan) > 0 else 0, best_plan) async def route_base_denom_profit( diff --git a/src/util.py b/src/util.py index 87276631d..9ca28bba1 100644 --- a/src/util.py +++ b/src/util.py @@ -36,11 +36,6 @@ EVALUATION_CONCURRENCY_FACTOR = 10 -# The quantity of a denom below which -# it is no longer worthwhile checking for profit -DENOM_QUANTITY_ABORT_ARB = 500 - - NEUTRON_NETWORK_CONFIG = NetworkConfig( chain_id="neutron-1", url="grpc+http://grpc-kralum.neutron-1.neutron.org:80", @@ -255,6 +250,19 @@ class ChainInfo: pretty_name: str +def load_chain_info(obj: dict[str, Any]) -> ChainInfo: + return ChainInfo( + chain_name=obj["chain_name"], + chain_id=obj["chain_id"], + pfm_enabled=obj["pfm_enabled"], + supports_memo=obj["supports_memo"], + bech32_prefix=obj["bech32_prefix"], + fee_asset=obj["fee_asset"], + chain_type=obj["chain_type"], + pretty_name=obj["pretty_name"], + ) + + @dataclass class DenomRouteQuery: """ @@ -289,13 +297,23 @@ class DenomRouteLeg: from_chain: ChainInfo to_chain: ChainInfo - denom_in: str - denom_out: str - port: str channel: str +def load_denom_route_leg(obj: dict[str, Any]) -> DenomRouteLeg: + return DenomRouteLeg( + src_chain=obj["src_chain"], + dest_chain=obj["dest_chain"], + src_denom=obj["src_denom"], + dest_denom=obj["dest_denom"], + from_chain=load_chain_info(obj["from_chain"]), + to_chain=load_chain_info(obj["to_chain"]), + port=obj["port"], + channel=obj["channel"], + ) + + @dataclass class DenomChainInfo: """ @@ -309,6 +327,14 @@ class DenomChainInfo: dest_chain_id: str +def load_denom_chain_info(obj: dict[str, Any]) -> DenomChainInfo: + return DenomChainInfo( + denom=obj["denom"], + src_chain_id=obj["src_chain_id"], + dest_chain_id=obj["dest_chain_id"], + ) + + @dataclass class ContractInfo: """ diff --git a/tests/test_auction.py b/tests/test_auction.py index bad0c4997..3987ca4be 100644 --- a/tests/test_auction.py +++ b/tests/test_auction.py @@ -84,7 +84,6 @@ async def test_auction_provider() -> None: * price ) assert liq_estimate - liquidity < 5 - assert liquidity - liq_estimate < 1000 @pytest.mark.asyncio From 7cce8932eed0d4c29bd38d4970ee03407ccba356 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 10:07:53 +0000 Subject: [PATCH 11/46] Fix a small bug in route exec planning probing conditions. --- local-interchaintest/src/tests.rs | 12 +++++++++++- src/strategies/util.py | 19 +++++++++---------- 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/local-interchaintest/src/tests.rs b/local-interchaintest/src/tests.rs index a663ff047..1e01b9317 100644 --- a/local-interchaintest/src/tests.rs +++ b/local-interchaintest/src/tests.rs @@ -143,7 +143,17 @@ pub fn test_osmo_arb(arbfile: Option) -> Result<(), Box profit > 200000 - PROFIT_MARGIN", + 200000 + ERROR_MARGIN_PROFIT > osmo_profit && osmo_profit > 200000 - ERROR_MARGIN_PROFIT, + true, + )?; + util::assert_err( + "200000 + PROFIT_MARGIN > auction_profit > 200000 - PROFIT_MARGIN", + 200000 + ERROR_MARGIN_PROFIT > auction_profit + && auction_profit > 200000 - ERROR_MARGIN_PROFIT, + true, + )?; Ok(()) } diff --git a/src/strategies/util.py b/src/strategies/util.py index ebcc49b92..1849a110a 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -695,13 +695,12 @@ async def next_legs( nonlocal eval_profit if len(path) >= 2: - matching_denoms = [ - info.denom - for info in await ctx.query_denom_info( - path[-2].backend.chain_id, - path[-2].out_asset(), - ) - ] + denom_infos = await ctx.query_denom_info( + path[-2].backend.chain_id, + path[-2].out_asset(), + ) + + matching_denoms = [info.denom for info in denom_infos] if not ( path[-1].in_asset() == path[-2].out_asset() @@ -1209,7 +1208,7 @@ async def quantities_for_route_profit( ", ".join( ( f"[{', '.join((str(qty) for qty in plans[plan_idx]))}]" - for plan_idx in plans_by_profit[:5] + for plan_idx in plans_by_profit[:-5] ) ), ], @@ -1218,11 +1217,11 @@ async def quantities_for_route_profit( profit = 0 if len(quantities) == 0 else quantities[-1] - quantities[0] # Insert in sorted position - if len(quantities) >= len(route): + if len(quantities) > len(route): insort(plans_by_profit, mid, key=lambda idx: plans[idx][-1] - plans[idx][0]) # Continue checking plans, since this quantity was not profitable - if len(quantities) < len(route) or profit <= 0: + if len(quantities) <= len(route) or profit <= 0: right = mid mid = right // 2 From a57f3dd41863bedd8399449a793e03cbe5420292 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 10:13:45 +0000 Subject: [PATCH 12/46] Increase HTTP timeout. --- main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.py b/main.py index 78a3ca26a..097e31585 100644 --- a/main.py +++ b/main.py @@ -154,7 +154,7 @@ async def main() -> None: connector=aiohttp.TCPConnector( force_close=True, limit_per_host=DISCOVERY_CONCURRENCY_FACTOR ), - timeout=aiohttp.ClientTimeout(total=30), + timeout=aiohttp.ClientTimeout(total=90), ) as session: ctx: Ctx[Any] = Ctx( { From 4c14798a4289e5d8702e078d6f2a5d3387734dd6 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 10:18:53 +0000 Subject: [PATCH 13/46] Cache query_denom_info. --- src/scheduler.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/scheduler.py b/src/scheduler.py index 4480e602e..312bc4a22 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -365,7 +365,11 @@ def chain_info(chain_id: str, info: dict[str, Any]) -> DenomChainInfo: src_chain_id=src_chain, denom=info["denom"], dest_chain_id=chain_id ) - return [chain_info(chain_id, info) for chain_id, info in dests.items()] + infos = [chain_info(chain_id, info) for chain_id, info in dests.items()] + + self.denom_map[src_denom] = infos + + return infos class Scheduler(Generic[TState]): From f7d577f6e86df11b9e412f28350abf26c8a6572a Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 10:27:50 +0000 Subject: [PATCH 14/46] Update integration tests target profit values. --- local-interchaintest/src/tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/local-interchaintest/src/tests.rs b/local-interchaintest/src/tests.rs index 1e01b9317..d78317a49 100644 --- a/local-interchaintest/src/tests.rs +++ b/local-interchaintest/src/tests.rs @@ -145,7 +145,7 @@ pub fn test_osmo_arb(arbfile: Option) -> Result<(), Box profit > 200000 - PROFIT_MARGIN", - 200000 + ERROR_MARGIN_PROFIT > osmo_profit && osmo_profit > 200000 - ERROR_MARGIN_PROFIT, + 9000000 + ERROR_MARGIN_PROFIT > osmo_profit && osmo_profit > 9000000 - ERROR_MARGIN_PROFIT, true, )?; util::assert_err( From 42da5b4ed5d8d8ecc37ff70f360ddf32b669c879 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 10:31:44 +0000 Subject: [PATCH 15/46] Update osmo integration test target profit again. --- local-interchaintest/src/tests.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/local-interchaintest/src/tests.rs b/local-interchaintest/src/tests.rs index d78317a49..3e5f7790a 100644 --- a/local-interchaintest/src/tests.rs +++ b/local-interchaintest/src/tests.rs @@ -144,14 +144,14 @@ pub fn test_osmo_arb(arbfile: Option) -> Result<(), Box profit > 200000 - PROFIT_MARGIN", - 9000000 + ERROR_MARGIN_PROFIT > osmo_profit && osmo_profit > 9000000 - ERROR_MARGIN_PROFIT, + "9500000 + PROFIT_MARGIN > profit > 9500000 - PROFIT_MARGIN", + 9500000 + ERROR_MARGIN_PROFIT > osmo_profit && osmo_profit > 9500000 - ERROR_MARGIN_PROFIT, true, )?; util::assert_err( - "200000 + PROFIT_MARGIN > auction_profit > 200000 - PROFIT_MARGIN", - 200000 + ERROR_MARGIN_PROFIT > auction_profit - && auction_profit > 200000 - ERROR_MARGIN_PROFIT, + "9500000 + PROFIT_MARGIN > auction_profit > 9500000 - PROFIT_MARGIN", + 9500000 + ERROR_MARGIN_PROFIT > auction_profit + && auction_profit > 9500000 - ERROR_MARGIN_PROFIT, true, )?; From fccaa4c0a43475950f1b7ed4b0ae80b543956252 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 10:44:21 +0000 Subject: [PATCH 16/46] Add extra comments to one-liners in prefixing. --- src/scheduler.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/scheduler.py b/src/scheduler.py index 312bc4a22..350911ced 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -166,12 +166,18 @@ def asset_balance_prefix(leg: Leg, asset: str) -> Optional[str]: return f"balance[{leg.backend.chain_id}]({asset[:DENOM_BALANCE_PREFIX_MAX_DENOM_LEN]}): {balance_resp_asset}" def leg_balance_prefixes(leg: Leg) -> list[str]: + """ + Get the chain, denom, and denom balance for the in and out assets in the leg. + """ + assets = [leg.in_asset(), leg.out_asset()] return [ x for x in (asset_balance_prefix(leg, asset) for asset in assets) if x ] + # Log all in and out asset balances for each leg in the route, + # removing any duplicate prefixes using dict.fromkeys prefix = " ".join( list( dict.fromkeys( From ff18c7962e38a46d6a6cf35552763cb7033de978 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 10:50:13 +0000 Subject: [PATCH 17/46] Diable rebalancing logs by default. --- main.py | 2 ++ src/contracts/route.py | 2 ++ src/scheduler.py | 2 ++ src/strategies/util.py | 2 ++ 4 files changed, 8 insertions(+) diff --git a/main.py b/main.py index 097e31585..ebb477564 100644 --- a/main.py +++ b/main.py @@ -78,6 +78,7 @@ async def main() -> None: "-df", "--deployments_file", default="contracts/deployments.json" ) parser.add_argument("-rt", "--rebalance_threshold", default=1000) + parser.add_argument("-lr", "--log_rebalancing", default=False) parser.add_argument("cmd", nargs="*", default=None) args = parser.parse_args() @@ -189,6 +190,7 @@ async def main() -> None: if "SKIP_API_KEY" in os.environ else None ), + "log_rebalancing": args.log_rebalancing, }, None, False, diff --git a/src/contracts/route.py b/src/contracts/route.py index 899a8bbc3..29680c935 100644 --- a/src/contracts/route.py +++ b/src/contracts/route.py @@ -61,6 +61,7 @@ class Route: status: Status time_created: str logs: list[str] + logs_enabled: bool def __hash__(self) -> int: return hash(self.uid) @@ -113,6 +114,7 @@ def load_route(s: str) -> Route: Status[loaded["status"].split(".")[1]], loaded["time_created"], loaded["logs"], + True, ) diff --git a/src/scheduler.py b/src/scheduler.py index 350911ced..859163667 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -106,6 +106,7 @@ def queue_route( theoretical_profit: int, expected_profit: int, quantities: list[int], + enable_logs: bool = True, ) -> Route: """ Creates a new identified route, inserting it into the order history, @@ -126,6 +127,7 @@ def queue_route( Status.QUEUED, datetime.now().strftime("%Y-%m-%d @ %H:%M:%S"), [], + enable_logs, ) self.order_history.append(r) diff --git a/src/strategies/util.py b/src/strategies/util.py index 1849a110a..bdc517184 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -598,6 +598,8 @@ async def eval_sell_denom(denom: str, sell_denom: str, balance: int) -> None: auctions, ctx, ): + route_ent.logs_enabled = ctx.cli_args["log_rebalancing"] + # For logging _, execution_plan = await quantities_for_route_profit( balance, route, route_ent, ctx, seek_profit=False From c27a4fced82274c34d836002cff5c0fb81ffc523 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 10:50:41 +0000 Subject: [PATCH 18/46] Skip logging routes with logs disabled. --- src/scheduler.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/scheduler.py b/src/scheduler.py index 859163667..575a90ffc 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -150,6 +150,9 @@ def log_route( Writes a log to the standard logger and to the log file of a route. """ + if not route.logs_enabled: + return + def asset_balance_prefix(leg: Leg, asset: str) -> Optional[str]: balance_resp_asset = try_multiple_clients( self.clients[leg.backend.chain_id], From 8e39fa23536a7f78cf804767e8f9348ebf444978 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 11:02:57 +0000 Subject: [PATCH 19/46] Increase http timeout again. --- main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.py b/main.py index ebb477564..b83bc2fbb 100644 --- a/main.py +++ b/main.py @@ -155,7 +155,7 @@ async def main() -> None: connector=aiohttp.TCPConnector( force_close=True, limit_per_host=DISCOVERY_CONCURRENCY_FACTOR ), - timeout=aiohttp.ClientTimeout(total=90), + timeout=aiohttp.ClientTimeout(total=240), ) as session: ctx: Ctx[Any] = Ctx( { From 2903e151bf2f39704ac54b427d9a7d7c032ce00b Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 11:04:56 +0000 Subject: [PATCH 20/46] Use skip API v2. --- src/scheduler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/scheduler.py b/src/scheduler.py index 575a90ffc..0c3e689c8 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -354,7 +354,7 @@ async def query_denom_info( head = {"accept": "application/json", "content-type": "application/json"} async with self.http_session.post( - "https://api.skip.money/v1/fungible/assets_from_source", + "https://api.skip.money/v2/fungible/assets_from_source", headers=head, json={ "allow_multi_tx": False, From 40052334d3150c4a0119dd33bc5c79ba0fea7e0a Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 11:05:16 +0000 Subject: [PATCH 21/46] Lower concurrency factor. --- src/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/util.py b/src/util.py index 9ca28bba1..20c9c5f02 100644 --- a/src/util.py +++ b/src/util.py @@ -28,7 +28,7 @@ # Dictates the maximum number of concurrent calls to the skip # API in searching -DISCOVERY_CONCURRENCY_FACTOR = 20 +DISCOVERY_CONCURRENCY_FACTOR = 15 # Dictates the maximum number of concurrent calls to pool providers From 6a6bb11a1e83240770eed89247e9a3d18da3a204 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 11:14:28 +0000 Subject: [PATCH 22/46] Another route planning fix. --- src/strategies/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/strategies/util.py b/src/strategies/util.py index bdc517184..f5b75146d 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -1252,7 +1252,7 @@ async def quantities_for_route_profit( if len(plans_by_profit) == 0: return (0, []) - best_plan = plans[plans_by_profit[0]] + best_plan = plans[plans_by_profit[-1]] ctx.log_route( r, From 00e2b261d7aeac3147ba055c8566c18425ec7e6d Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 18:48:36 +0000 Subject: [PATCH 23/46] Ratelimit skip requests, fix CI failing tests. --- local-interchaintest/src/tests.rs | 28 +-- .../tests/transfer_neutron.py | 2 + .../tests/transfer_osmosis.py | 2 + main.py | 2 + src/contracts/pool/astroport.py | 2 +- src/scheduler.py | 212 ++++++++++-------- src/util.py | 5 - tests/util.py | 2 + 8 files changed, 126 insertions(+), 129 deletions(-) diff --git a/local-interchaintest/src/tests.rs b/local-interchaintest/src/tests.rs index 3e5f7790a..38c57eebe 100644 --- a/local-interchaintest/src/tests.rs +++ b/local-interchaintest/src/tests.rs @@ -2,8 +2,6 @@ use super::util; use serde_json::Value; use std::{error::Error, process::Command}; -const ERROR_MARGIN_PROFIT: u64 = 50000; - pub fn test_transfer_osmosis( _: Option, ) -> Result<(), Box> { @@ -56,17 +54,8 @@ pub fn test_profitable_arb( println!("ARB BOT PROFIT: {profit}"); println!("AUCTION BOT PROFIT: {auction_profit}"); - util::assert_err( - "200000 + PROFIT_MARGIN > profit > 200000 - PROFIT_MARGIN", - 200000 + ERROR_MARGIN_PROFIT > profit && profit > 200000 - ERROR_MARGIN_PROFIT, - true, - )?; - util::assert_err( - "200000 + PROFIT_MARGIN > auction_profit > 200000 - PROFIT_MARGIN", - 200000 + ERROR_MARGIN_PROFIT > auction_profit - && auction_profit > 200000 - ERROR_MARGIN_PROFIT, - true, - )?; + util::assert_err("profit > 0", profit > 0, true)?; + util::assert_err("auction_profit > 0", auction_profit > 0, true)?; Ok(()) } @@ -143,17 +132,8 @@ pub fn test_osmo_arb(arbfile: Option) -> Result<(), Box profit > 9500000 - PROFIT_MARGIN", - 9500000 + ERROR_MARGIN_PROFIT > osmo_profit && osmo_profit > 9500000 - ERROR_MARGIN_PROFIT, - true, - )?; - util::assert_err( - "9500000 + PROFIT_MARGIN > auction_profit > 9500000 - PROFIT_MARGIN", - 9500000 + ERROR_MARGIN_PROFIT > auction_profit - && auction_profit > 9500000 - ERROR_MARGIN_PROFIT, - true, - )?; + util::assert_err("osmo_profit > 0", osmo_profit > 0, true)?; + util::assert_err("auction_profit > 0", auction_profit > 0, true)?; Ok(()) } diff --git a/local-interchaintest/tests/transfer_neutron.py b/local-interchaintest/tests/transfer_neutron.py index ab077a248..a09c04ecf 100644 --- a/local-interchaintest/tests/transfer_neutron.py +++ b/local-interchaintest/tests/transfer_neutron.py @@ -1,5 +1,6 @@ import json import asyncio +from asyncio import Lock from typing import Any from src.strategies.util import transfer_raw from src.scheduler import Ctx @@ -48,6 +49,7 @@ async def main() -> None: denoms, {}, {}, + Lock(), ) await transfer_raw( diff --git a/local-interchaintest/tests/transfer_osmosis.py b/local-interchaintest/tests/transfer_osmosis.py index 8d2c04b6b..a74a2a01f 100644 --- a/local-interchaintest/tests/transfer_osmosis.py +++ b/local-interchaintest/tests/transfer_osmosis.py @@ -1,4 +1,5 @@ import json +from asyncio import Lock import asyncio from typing import Any from src.strategies.util import transfer_raw @@ -48,6 +49,7 @@ async def main() -> None: denoms, {}, {}, + Lock(), ) await transfer_raw( diff --git a/main.py b/main.py index b83bc2fbb..41239a865 100644 --- a/main.py +++ b/main.py @@ -4,6 +4,7 @@ Implements a command-line interface for running arbitrage strategies. """ +from asyncio import Lock import traceback import asyncio from multiprocessing import Process @@ -214,6 +215,7 @@ async def main() -> None: chain_id: load_chain_info(info) for (chain_id, info) in denom_file["chain_info"].items() }, + Lock(), ).recover_history() sched = Scheduler(ctx, strategy) diff --git a/src/contracts/pool/astroport.py b/src/contracts/pool/astroport.py index b28599eef..8754b934e 100644 --- a/src/contracts/pool/astroport.py +++ b/src/contracts/pool/astroport.py @@ -31,7 +31,7 @@ from cosmpy.aerial.tx import Transaction -MAX_SPREAD = "0.05" +MAX_SPREAD = "0.1" @dataclass diff --git a/src/scheduler.py b/src/scheduler.py index 0c3e689c8..36e0e1fac 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -2,6 +2,7 @@ Implements a strategy runner with an arbitrary provider set in an event-loop style. """ +from asyncio import Lock import logging from datetime import datetime import json @@ -29,6 +30,11 @@ MAX_ROUTE_HISTORY_LEN = 200000 +# The maximum number of concurrent connections +# that can be open to +MAX_SKIP_CONCURRENT_CALLS = 5 + + # Length to truncate denoms in balance logs to DENOM_BALANCE_PREFIX_MAX_DENOM_LEN = 12 @@ -56,6 +62,7 @@ class Ctx(Generic[TState]): denom_map: dict[str, list[DenomChainInfo]] denom_routes: dict[str, dict[str, list[DenomRouteLeg]]] chain_info: dict[str, ChainInfo] + http_session_lock: Lock def with_state(self, state: Any) -> Self: """ @@ -231,57 +238,60 @@ async def query_denom_route( head = {"accept": "application/json", "content-type": "application/json"} - async with self.http_session.post( - "https://api.skip.money/v2/fungible/route", - headers=head, - json={ - "amount_in": "1", - "source_asset_denom": query.src_denom, - "source_asset_chain_id": query.src_chain, - "dest_asset_denom": query.dest_denom, - "dest_asset_chain_id": query.dest_chain, - "allow_multi_tx": True, - "allow_unsafe": False, - "bridges": ["IBC"], - }, - ) as resp: - if resp.status != 200: - return None - - ops = (await resp.json())["operations"] - - # The transfer includes a swap or some other operation - # we can't handle - if any(("transfer" not in op for op in ops)): - return None - - transfer_info = ops[0]["transfer"] - - from_chain_info = await self.query_chain_info( - transfer_info["from_chain_id"] - ) - to_chain_info = await self.query_chain_info(transfer_info["to_chain_id"]) - - if not from_chain_info or not to_chain_info: - return None - - route = [ - DenomRouteLeg( - src_chain=query.src_chain, - dest_chain=query.dest_chain, - src_denom=query.src_denom, - dest_denom=query.dest_denom, - from_chain=from_chain_info, - to_chain=to_chain_info, - port=transfer_info["port"], - channel=transfer_info["channel"], + async with self.http_session_lock: + async with self.http_session.post( + "https://api.skip.money/v2/fungible/route", + headers=head, + json={ + "amount_in": "1", + "source_asset_denom": query.src_denom, + "source_asset_chain_id": query.src_chain, + "dest_asset_denom": query.dest_denom, + "dest_asset_chain_id": query.dest_chain, + "allow_multi_tx": True, + "allow_unsafe": False, + "bridges": ["IBC"], + }, + ) as resp: + if resp.status != 200: + return None + + ops = (await resp.json())["operations"] + + # The transfer includes a swap or some other operation + # we can't handle + if any(("transfer" not in op for op in ops)): + return None + + transfer_info = ops[0]["transfer"] + + from_chain_info = await self.query_chain_info( + transfer_info["from_chain_id"] ) - for op in ops - ] + to_chain_info = await self.query_chain_info( + transfer_info["to_chain_id"] + ) + + if not from_chain_info or not to_chain_info: + return None + + route = [ + DenomRouteLeg( + src_chain=query.src_chain, + dest_chain=query.dest_chain, + src_denom=query.src_denom, + dest_denom=query.dest_denom, + from_chain=from_chain_info, + to_chain=to_chain_info, + port=transfer_info["port"], + channel=transfer_info["channel"], + ) + for op in ops + ] - self.denom_routes.get(query.src_denom, {})[query.dest_denom] = route + self.denom_routes.get(query.src_denom, {})[query.dest_denom] = route - return route + return route async def query_chain_info( self, @@ -296,34 +306,35 @@ async def query_chain_info( head = {"accept": "application/json", "content-type": "application/json"} - async with self.http_session.get( - f"https://api.skip.money/v2/info/chains?chain_ids={chain_id}", - headers=head, - ) as resp: - if resp.status != 200: - return None - - chains = (await resp.json())["chains"] - - if len(chains) == 0: - return None - - chain = chains[0] - - chain_info = ChainInfo( - chain_name=chain["chain_name"], - chain_id=chain["chain_id"], - pfm_enabled=chain["pfm_enabled"], - supports_memo=chain["supports_memo"], - bech32_prefix=chain["bech32_prefix"], - fee_asset=chain["fee_assets"][0]["denom"], - chain_type=chain["chain_type"], - pretty_name=chain["pretty_name"], - ) + async with self.http_session_lock: + async with self.http_session.get( + f"https://api.skip.money/v2/info/chains?chain_ids={chain_id}", + headers=head, + ) as resp: + if resp.status != 200: + return None + + chains = (await resp.json())["chains"] + + if len(chains) == 0: + return None + + chain = chains[0] + + chain_info = ChainInfo( + chain_name=chain["chain_name"], + chain_id=chain["chain_id"], + pfm_enabled=chain["pfm_enabled"], + supports_memo=chain["supports_memo"], + bech32_prefix=chain["bech32_prefix"], + fee_asset=chain["fee_assets"][0]["denom"], + chain_type=chain["chain_type"], + pretty_name=chain["pretty_name"], + ) - self.chain_info[chain_id] = chain_info + self.chain_info[chain_id] = chain_info - return chain_info + return chain_info async def query_denom_info_on_chain( self, @@ -353,34 +364,37 @@ async def query_denom_info( head = {"accept": "application/json", "content-type": "application/json"} - async with self.http_session.post( - "https://api.skip.money/v2/fungible/assets_from_source", - headers=head, - json={ - "allow_multi_tx": False, - "include_cw20_assets": True, - "source_asset_denom": src_denom, - "source_asset_chain_id": src_chain, - "client_id": "timewave-arb-bot", - }, - ) as resp: - if resp.status != 200: - return [] - - dests = (await resp.json())["dest_assets"] - - def chain_info(chain_id: str, info: dict[str, Any]) -> DenomChainInfo: - info = info["assets"][0] - - return DenomChainInfo( - src_chain_id=src_chain, denom=info["denom"], dest_chain_id=chain_id - ) + async with self.http_session_lock: + async with self.http_session.post( + "https://api.skip.money/v2/fungible/assets_from_source", + headers=head, + json={ + "allow_multi_tx": False, + "include_cw20_assets": True, + "source_asset_denom": src_denom, + "source_asset_chain_id": src_chain, + "client_id": "timewave-arb-bot", + }, + ) as resp: + if resp.status != 200: + return [] + + dests = (await resp.json())["dest_assets"] + + def chain_info(chain_id: str, info: dict[str, Any]) -> DenomChainInfo: + info = info["assets"][0] + + return DenomChainInfo( + src_chain_id=src_chain, + denom=info["denom"], + dest_chain_id=chain_id, + ) - infos = [chain_info(chain_id, info) for chain_id, info in dests.items()] + infos = [chain_info(chain_id, info) for chain_id, info in dests.items()] - self.denom_map[src_denom] = infos + self.denom_map[src_denom] = infos - return infos + return infos class Scheduler(Generic[TState]): diff --git a/src/util.py b/src/util.py index 20c9c5f02..b9805c4c3 100644 --- a/src/util.py +++ b/src/util.py @@ -21,11 +21,6 @@ DENOM_RESOLVER_TIMEOUT_SEC = 5 -# The maximum number of concurrent connections -# that can be open to -MAX_SKIP_CONCURRENT_CALLS = 5 - - # Dictates the maximum number of concurrent calls to the skip # API in searching DISCOVERY_CONCURRENCY_FACTOR = 15 diff --git a/tests/util.py b/tests/util.py index e45489e20..87aaae460 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,3 +1,4 @@ +from asyncio import Lock from typing import Any, cast, AsyncIterator import json import aiohttp @@ -108,4 +109,5 @@ async def ctx() -> AsyncIterator[Ctx[Any]]: denom_map={}, denom_routes={}, chain_info={}, + http_session_lock=Lock(), ).with_state(State(1000)) From 94a188f735736d4e1b777360bf56bbaab19f37e7 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 18:49:20 +0000 Subject: [PATCH 24/46] Lower HTTP client timeout. --- main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.py b/main.py index 41239a865..9bd9b1468 100644 --- a/main.py +++ b/main.py @@ -156,7 +156,7 @@ async def main() -> None: connector=aiohttp.TCPConnector( force_close=True, limit_per_host=DISCOVERY_CONCURRENCY_FACTOR ), - timeout=aiohttp.ClientTimeout(total=240), + timeout=aiohttp.ClientTimeout(total=60), ) as session: ctx: Ctx[Any] = Ctx( { From 3ad374c540f6f26761789759bd0540963a43eef9 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 19:34:22 +0000 Subject: [PATCH 25/46] Remove extraneous sources of randomness from integration tests. --- src/strategies/bellman_ford.py | 3 +-- src/strategies/util.py | 3 --- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/src/strategies/bellman_ford.py b/src/strategies/bellman_ford.py index ef7dd6c51..fdc4ff408 100644 --- a/src/strategies/bellman_ford.py +++ b/src/strategies/bellman_ford.py @@ -4,7 +4,6 @@ from functools import cache import traceback -import random import logging from decimal import Decimal import asyncio @@ -391,7 +390,7 @@ async def route_bellman_ford( } if ctx.cli_args["pools"]: - vertices = set(random.sample(list(vertices), ctx.cli_args["pools"] - 1)) + vertices = set(list(vertices)) # How far a given denom is from the `src` denom distances: dict[str, Decimal] = {} diff --git a/src/strategies/util.py b/src/strategies/util.py index f5b75146d..179703a01 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -3,7 +3,6 @@ """ from bisect import insort -import random import traceback import asyncio from itertools import groupby @@ -835,8 +834,6 @@ async def next_legs( if len(next_pools) == 0: return - random.shuffle(next_pools) - routes = stream.merge(*[next_legs(path + [pool]) for pool in next_pools]) async with routes.stream() as streamer: From 3ba9498422799d6c939892fc0151193bb46532e0 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 20:00:12 +0000 Subject: [PATCH 26/46] Disable exhaustive rebalancing attempts. --- src/strategies/util.py | 111 +++++++++++++++++++++-------------------- 1 file changed, 57 insertions(+), 54 deletions(-) diff --git a/src/strategies/util.py b/src/strategies/util.py index 179703a01..2037128f5 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -588,69 +588,72 @@ async def eval_sell_denom(denom: str, sell_denom: str, balance: int) -> None: logger.info("Rebalancing %d %s", balance, denom) - async for route_ent, route in listen_routes_with_depth_dfs( - ctx.cli_args["hops"], - denom, - sell_denom, - set(), - pools, - auctions, - ctx, - ): - route_ent.logs_enabled = ctx.cli_args["log_rebalancing"] - - # For logging - _, execution_plan = await quantities_for_route_profit( - balance, route, route_ent, ctx, seek_profit=False + route_ent, route = await anext( + listen_routes_with_depth_dfs( + ctx.cli_args["hops"], + denom, + sell_denom, + set(), + pools, + auctions, + ctx, ) + ) - # The execution plan was aborted - if len(execution_plan) <= len(route): - ctx.log_route( - route_ent, - "info", - "Insufficient execution planning for rebalancing for %s; skipping", - [denom], - ) + route_ent.logs_enabled = ctx.cli_args["log_rebalancing"] - continue + # For logging + _, execution_plan = await quantities_for_route_profit( + balance, route, route_ent, ctx, seek_profit=False + ) - # Check that the execution plan results in a liquidatable quantity - if execution_plan[-1] < ctx.cli_args["rebalance_threshold"]: - ctx.log_route( - route_ent, - "info", - "Not enough funds for rebalancing %s; trying a different execution plan", - [denom], - ) + # The execution plan was aborted + if len(execution_plan) <= len(route): + ctx.log_route( + route_ent, + "info", + "Insufficient execution planning for rebalancing for %s; skipping", + [denom], + ) - continue + continue + # Check that the execution plan results in a liquidatable quantity + if execution_plan[-1] < ctx.cli_args["rebalance_threshold"]: ctx.log_route( - route_ent, "info", "Executing rebalancing plan for %s", [denom] + route_ent, + "info", + "Not enough funds for rebalancing %s; trying a different execution plan", + [denom], ) - # Execute the plan - route_ent.quantities = execution_plan - ctx.update_route(route_ent) - - try: - await exec_arb(route_ent, 0, execution_plan, route, ctx) - - break - except Exception: - ctx.log_route( - route_ent, - "error", - "Arb failed - rebalancing of %s failed: %s", - [ - denom, - traceback.format_exc().replace( - "\n", - f"\n{route_ent.uid}- Arb failed - failed to rebalance funds: ", - ), - ], - ) + continue + + ctx.log_route( + route_ent, "info", "Executing rebalancing plan for %s", [denom] + ) + + # Execute the plan + route_ent.quantities = execution_plan + ctx.update_route(route_ent) + + try: + await exec_arb(route_ent, 0, execution_plan, route, ctx) + + break + except Exception: + ctx.log_route( + route_ent, + "error", + "Arb failed - rebalancing of %s failed: %s", + [ + denom, + traceback.format_exc().replace( + "\n", + f"\n{route_ent.uid}- Arb failed - failed to rebalance funds: ", + ), + ], + ) await asyncio.gather( *[ From b36b31959f40a7c5b8da3d5c4386160db0774e46 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 20:05:17 +0000 Subject: [PATCH 27/46] See previous. --- src/strategies/util.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/strategies/util.py b/src/strategies/util.py index 2037128f5..f613fcb23 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -616,7 +616,7 @@ async def eval_sell_denom(denom: str, sell_denom: str, balance: int) -> None: [denom], ) - continue + return # Check that the execution plan results in a liquidatable quantity if execution_plan[-1] < ctx.cli_args["rebalance_threshold"]: @@ -627,7 +627,7 @@ async def eval_sell_denom(denom: str, sell_denom: str, balance: int) -> None: [denom], ) - continue + return ctx.log_route( route_ent, "info", "Executing rebalancing plan for %s", [denom] @@ -639,8 +639,6 @@ async def eval_sell_denom(denom: str, sell_denom: str, balance: int) -> None: try: await exec_arb(route_ent, 0, execution_plan, route, ctx) - - break except Exception: ctx.log_route( route_ent, From 454b85844d10870fc642f771f16e1c1e23d6d8ba Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 20:37:17 +0000 Subject: [PATCH 28/46] Log rebalancing routes. --- src/strategies/util.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/src/strategies/util.py b/src/strategies/util.py index f613fcb23..548c3361f 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -254,7 +254,6 @@ async def exec_arb( [leg for leg, _ in remaining_legs], route_ent, ctx, - seek_profit=False, ) # The execution plan was aborted @@ -600,6 +599,13 @@ async def eval_sell_denom(denom: str, sell_denom: str, balance: int) -> None: ) ) + ctx.log_route( + route_ent, + "info", + "Rebalancing route discovered: %s", + [fmt_route(route)], + ) + route_ent.logs_enabled = ctx.cli_args["log_rebalancing"] # For logging @@ -899,9 +905,7 @@ async def recover_funds( to_transfer, ) - resp = await quantities_for_route_profit( - balance_resp, backtracked, r, ctx, seek_profit=False - ) + resp = await quantities_for_route_profit(balance_resp, backtracked, r, ctx) if not resp: raise ValueError("Couldn't get execution plan.") @@ -1175,7 +1179,7 @@ async def quantities_for_route_profit( route: list[Leg], r: Route, ctx: Ctx[Any], - seek_profit: Optional[bool] = True, + seek_profit: bool = True, ) -> tuple[int, list[int]]: """ Calculates what quantities should be used to obtain @@ -1201,7 +1205,7 @@ async def quantities_for_route_profit( ctx.log_route( r, "info", - "Got execution plan @ %d: [%s] (best candidates: [%s]", + "Got execution plan @ %d: [%s] (best candidates: [%s])", [ mid, ", ".join((str(qty) for qty in quantities)), @@ -1214,6 +1218,9 @@ async def quantities_for_route_profit( ], ) + if not seek_profit and len(quantities) > len(route): + return (quantities[-1] - quantities[0], quantities) + profit = 0 if len(quantities) == 0 else quantities[-1] - quantities[0] # Insert in sorted position From 70b020cdb8c95311d7cc44a3d59ea1e9122c8a20 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 20:43:07 +0000 Subject: [PATCH 29/46] Put a hard stop on maximum iterations per route planning. --- src/strategies/util.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/strategies/util.py b/src/strategies/util.py index 548c3361f..602c80f57 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -42,6 +42,13 @@ MAX_POOL_LIQUIDITY_TRADE = Decimal("0.05") + +""" +Prevent routes from being evaluated excessively when binary search fails. +""" +MAX_EVAL_PROBES = 2**6 + + """ The amount of the summed gas limit that will be consumed if messages are batched together. @@ -1198,7 +1205,11 @@ async def quantities_for_route_profit( # Plans sorted by profit, for purposes of returning the best plan plans_by_profit: list[int] = [] - while mid > 0 and mid <= starting_amount: + attempts: int = 0 + + while mid > 0 and mid <= starting_amount and attempts < MAX_EVAL_PROBES: + attempts += 1 + quantities: list[int] = await quantities_for_starting_amount(mid, route) plans[mid] = quantities From f1cf81a208d803d8b88d983be5599381a6274070 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 21:00:14 +0000 Subject: [PATCH 30/46] Addm ore debug logs to IBC transfers. --- src/strategies/util.py | 8 ++++++++ src/util.py | 6 ++++++ 2 files changed, 14 insertions(+) diff --git a/src/strategies/util.py b/src/strategies/util.py index 602c80f57..13c6c4fc8 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -27,6 +27,7 @@ try_multiple_clients_fatal, try_multiple_clients, DenomRouteQuery, + fmt_denom_route_leg, ) from src.scheduler import Ctx from cosmos.base.v1beta1 import coin_pb2 @@ -963,6 +964,13 @@ async def transfer( ) ) + ctx.log_route( + route, + "info", + "Got potential transfer route: %s", + [fmt_denom_route_leg(leg) for leg in ibc_route], + ) + if not ibc_route or len(ibc_route) == 0: raise ValueError(f"No route from {denom} to {leg.backend.chain_id}") diff --git a/src/util.py b/src/util.py index b9805c4c3..1f6f442a9 100644 --- a/src/util.py +++ b/src/util.py @@ -309,6 +309,12 @@ def load_denom_route_leg(obj: dict[str, Any]) -> DenomRouteLeg: ) +def fmt_denom_route_leg(leg: DenomRouteLeg) -> str: + return ( + f"{src_denom} ({src_chain}) -> {dest_denom} ({dest_chain}) via {channel}/{port}" + ) + + @dataclass class DenomChainInfo: """ From 107e1b4f95d7fb6f9df2abba5a55464de8a4b081 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 21:03:17 +0000 Subject: [PATCH 31/46] See previous. --- src/strategies/util.py | 6 +++--- src/util.py | 4 +--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/strategies/util.py b/src/strategies/util.py index 13c6c4fc8..22f5d2456 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -964,6 +964,9 @@ async def transfer( ) ) + if not ibc_route or len(ibc_route) == 0: + raise ValueError(f"No route from {denom} to {leg.backend.chain_id}") + ctx.log_route( route, "info", @@ -971,9 +974,6 @@ async def transfer( [fmt_denom_route_leg(leg) for leg in ibc_route], ) - if not ibc_route or len(ibc_route) == 0: - raise ValueError(f"No route from {denom} to {leg.backend.chain_id}") - src_channel_id = ibc_route[0].channel sender_addr = str( Address(ctx.wallet.public_key(), prefix=ibc_route[0].from_chain.bech32_prefix) diff --git a/src/util.py b/src/util.py index 1f6f442a9..b61b45ce9 100644 --- a/src/util.py +++ b/src/util.py @@ -310,9 +310,7 @@ def load_denom_route_leg(obj: dict[str, Any]) -> DenomRouteLeg: def fmt_denom_route_leg(leg: DenomRouteLeg) -> str: - return ( - f"{src_denom} ({src_chain}) -> {dest_denom} ({dest_chain}) via {channel}/{port}" - ) + return f"{leg.src_denom} ({leg.src_chain}) -> {leg.dest_denom} ({leg.dest_chain}) via {leg.channel}/{leg.port}" @dataclass From 8f9bfef677898802b1734307306cb65f791f8178 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 21:17:27 +0000 Subject: [PATCH 32/46] Allow more concurrent skip calls. --- local-interchaintest/tests/transfer_neutron.py | 6 +++--- local-interchaintest/tests/transfer_osmosis.py | 6 +++--- main.py | 4 ++-- src/scheduler.py | 4 ++-- tests/util.py | 6 +++--- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/local-interchaintest/tests/transfer_neutron.py b/local-interchaintest/tests/transfer_neutron.py index a09c04ecf..3bc67c25e 100644 --- a/local-interchaintest/tests/transfer_neutron.py +++ b/local-interchaintest/tests/transfer_neutron.py @@ -1,9 +1,9 @@ import json import asyncio -from asyncio import Lock +from asyncio import Semaphore from typing import Any from src.strategies.util import transfer_raw -from src.scheduler import Ctx +from src.scheduler import Ctx, MAX_SKIP_CONCURRENT_CALLS from src.util import try_multiple_clients from src.util import custom_neutron_network_config import aiohttp @@ -49,7 +49,7 @@ async def main() -> None: denoms, {}, {}, - Lock(), + Semaphore(MAX_SKIP_CONCURRENT_CALLS), ) await transfer_raw( diff --git a/local-interchaintest/tests/transfer_osmosis.py b/local-interchaintest/tests/transfer_osmosis.py index a74a2a01f..61ab12377 100644 --- a/local-interchaintest/tests/transfer_osmosis.py +++ b/local-interchaintest/tests/transfer_osmosis.py @@ -1,9 +1,9 @@ import json -from asyncio import Lock +from asyncio import Semaphore import asyncio from typing import Any from src.strategies.util import transfer_raw -from src.scheduler import Ctx +from src.scheduler import Ctx, MAX_SKIP_CONCURRENT_CALLS from src.util import try_multiple_clients from src.util import custom_neutron_network_config import aiohttp @@ -49,7 +49,7 @@ async def main() -> None: denoms, {}, {}, - Lock(), + Semaphore(MAX_SKIP_CONCURRENT_CALLS), ) await transfer_raw( diff --git a/main.py b/main.py index 9bd9b1468..a42d382c5 100644 --- a/main.py +++ b/main.py @@ -4,7 +4,7 @@ Implements a command-line interface for running arbitrage strategies. """ -from asyncio import Lock +from asyncio import Semaphore import traceback import asyncio from multiprocessing import Process @@ -215,7 +215,7 @@ async def main() -> None: chain_id: load_chain_info(info) for (chain_id, info) in denom_file["chain_info"].items() }, - Lock(), + Semaphore(), ).recover_history() sched = Scheduler(ctx, strategy) diff --git a/src/scheduler.py b/src/scheduler.py index 36e0e1fac..8e6259ee7 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -2,7 +2,7 @@ Implements a strategy runner with an arbitrary provider set in an event-loop style. """ -from asyncio import Lock +from asyncio import Semaphore import logging from datetime import datetime import json @@ -62,7 +62,7 @@ class Ctx(Generic[TState]): denom_map: dict[str, list[DenomChainInfo]] denom_routes: dict[str, dict[str, list[DenomRouteLeg]]] chain_info: dict[str, ChainInfo] - http_session_lock: Lock + http_session_lock: Semaphore def with_state(self, state: Any) -> Self: """ diff --git a/tests/util.py b/tests/util.py index 87aaae460..72a2ec4c2 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,4 +1,4 @@ -from asyncio import Lock +from asyncio import Semaphore from typing import Any, cast, AsyncIterator import json import aiohttp @@ -6,7 +6,7 @@ from contextlib import asynccontextmanager from cosmpy.aerial.client import LedgerClient, NetworkConfig from cosmpy.aerial.wallet import LocalWallet -from src.scheduler import Ctx +from src.scheduler import Ctx, MAX_SKIP_CONCURRENT_CALLS from src.util import ( DISCOVERY_CONCURRENCY_FACTOR, NEUTRON_NETWORK_CONFIG, @@ -109,5 +109,5 @@ async def ctx() -> AsyncIterator[Ctx[Any]]: denom_map={}, denom_routes={}, chain_info={}, - http_session_lock=Lock(), + http_session_lock=Semaphore(MAX_SKIP_CONCURRENT_CALLS), ).with_state(State(1000)) From 260408ae5aa79bb74f60f4133a7639449b7b7870 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Mon, 14 Oct 2024 21:47:52 +0000 Subject: [PATCH 33/46] Current status: debugging a blocking skip denom chain info call in transfer. --- src/scheduler.py | 3 +++ src/strategies/util.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/scheduler.py b/src/scheduler.py index 8e6259ee7..09bea6345 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -268,10 +268,13 @@ async def query_denom_route( from_chain_info = await self.query_chain_info( transfer_info["from_chain_id"] ) + print("here", transfer_info["to_chain_id"]) to_chain_info = await self.query_chain_info( transfer_info["to_chain_id"] ) + print(to_chain_info) + if not from_chain_info or not to_chain_info: return None diff --git a/src/strategies/util.py b/src/strategies/util.py index 22f5d2456..064042241 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -1249,7 +1249,7 @@ async def quantities_for_route_profit( # Continue checking plans, since this quantity was not profitable if len(quantities) <= len(route) or profit <= 0: right = mid - mid = right // 2 + mid = left + (right - left) // 2 ctx.log_route(r, "info", "Probing lower execution plans", []) From efb95e3f33bc4a2131e9400b71d6c77610eaa50f Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Tue, 15 Oct 2024 14:47:50 +0000 Subject: [PATCH 34/46] Fix dealocks, add caching in binary search. --- main.py | 4 ++-- src/scheduler.py | 3 --- src/strategies/util.py | 34 +++++++++++++++++++--------------- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/main.py b/main.py index a42d382c5..5dc563a10 100644 --- a/main.py +++ b/main.py @@ -17,7 +17,7 @@ from typing import Any, cast from cosmpy.aerial.client import LedgerClient from cosmpy.aerial.wallet import LocalWallet -from src.scheduler import Scheduler, Ctx +from src.scheduler import Scheduler, Ctx, MAX_SKIP_CONCURRENT_CALLS from src.util import ( custom_neutron_network_config, DISCOVERY_CONCURRENCY_FACTOR, @@ -215,7 +215,7 @@ async def main() -> None: chain_id: load_chain_info(info) for (chain_id, info) in denom_file["chain_info"].items() }, - Semaphore(), + Semaphore(MAX_SKIP_CONCURRENT_CALLS), ).recover_history() sched = Scheduler(ctx, strategy) diff --git a/src/scheduler.py b/src/scheduler.py index 09bea6345..8e6259ee7 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -268,13 +268,10 @@ async def query_denom_route( from_chain_info = await self.query_chain_info( transfer_info["from_chain_id"] ) - print("here", transfer_info["to_chain_id"]) to_chain_info = await self.query_chain_info( transfer_info["to_chain_id"] ) - print(to_chain_info) - if not from_chain_info or not to_chain_info: return None diff --git a/src/strategies/util.py b/src/strategies/util.py index 064042241..3da3ae6dd 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -1209,31 +1209,33 @@ async def quantities_for_route_profit( mid = starting_amount // 2 plans: dict[int, list[int]] = {} - - # Plans sorted by profit, for purposes of returning the best plan plans_by_profit: list[int] = [] attempts: int = 0 - while mid > 0 and mid <= starting_amount and attempts < MAX_EVAL_PROBES: + while ( + left != right + and mid != right + and mid > 0 + and mid <= starting_amount + and attempts < MAX_EVAL_PROBES + ): attempts += 1 - quantities: list[int] = await quantities_for_starting_amount(mid, route) + quantities: list[int] = ( + plans[mid] + if mid in plans + else await quantities_for_starting_amount(mid, route) + ) plans[mid] = quantities ctx.log_route( r, "info", - "Got execution plan @ %d: [%s] (best candidates: [%s])", + "Got execution plan @ %d: [%s]", [ mid, ", ".join((str(qty) for qty in quantities)), - ", ".join( - ( - f"[{', '.join((str(qty) for qty in plans[plan_idx]))}]" - for plan_idx in plans_by_profit[:-5] - ) - ), ], ) @@ -1244,14 +1246,14 @@ async def quantities_for_route_profit( # Insert in sorted position if len(quantities) > len(route): - insort(plans_by_profit, mid, key=lambda idx: plans[idx][-1] - plans[idx][0]) + plans_by_profit.append(mid) # Continue checking plans, since this quantity was not profitable if len(quantities) <= len(route) or profit <= 0: right = mid mid = left + (right - left) // 2 - ctx.log_route(r, "info", "Probing lower execution plans", []) + ctx.log_route(r, "debug", "Probing lower execution plans", []) continue @@ -1269,9 +1271,11 @@ async def quantities_for_route_profit( # This plan is profitable, but a bigger plan might be even more profitable left = mid - mid += (right - left) // 2 + mid = (right - left) // 2 + + ctx.log_route(r, "debug", "Probing higher execution plans", []) - ctx.log_route(r, "info", "Probing higher execution plans", []) + plans_by_profit.sort(key=lambda idx: plans[idx][-1] - plans[idx][0]) if len(plans_by_profit) == 0: return (0, []) From ea0d630898a01e59de8aa593541c25ddbde9a06b Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Wed, 16 Oct 2024 03:55:09 +0000 Subject: [PATCH 35/46] Migrate order history to sqlite. --- local-interchaintest/Cargo.lock | 29 ++ local-interchaintest/Cargo.toml | 1 + local-interchaintest/src/main.rs | 2 +- local-interchaintest/src/setup.rs | 46 +- local-interchaintest/src/tests.rs | 185 ++++---- local-interchaintest/src/util.rs | 4 +- .../tests/transfer_neutron.py | 2 + .../tests/transfer_osmosis.py | 2 + main.py | 438 ++++++------------ src/contracts/route.py | 91 ++-- src/scheduler.py | 51 +- src/strategies/util.py | 5 +- tests/util.py | 4 +- 13 files changed, 401 insertions(+), 459 deletions(-) diff --git a/local-interchaintest/Cargo.lock b/local-interchaintest/Cargo.lock index dfe203736..90ba8fc31 100644 --- a/local-interchaintest/Cargo.lock +++ b/local-interchaintest/Cargo.lock @@ -1207,6 +1207,7 @@ dependencies = [ "serde", "serde_json", "shared_child", + "sqlite", ] [[package]] @@ -1968,6 +1969,34 @@ dependencies = [ "der", ] +[[package]] +name = "sqlite" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfe6fb16f2bee6452feeb4d12bfa404fbcd3cfc121b2950e501d1ae9cae718e" +dependencies = [ + "sqlite3-sys", +] + +[[package]] +name = "sqlite3-src" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "174d4a6df77c27db281fb23de1a6d968f3aaaa4807c2a1afa8056b971f947b4a" +dependencies = [ + "cc", + "pkg-config", +] + +[[package]] +name = "sqlite3-sys" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3901ada7090c3c3584dc92ec7ef1b7091868d13bfe6d7de9f0bcaffee7d0ade5" +dependencies = [ + "sqlite3-src", +] + [[package]] name = "static_assertions" version = "1.1.0" diff --git a/local-interchaintest/Cargo.toml b/local-interchaintest/Cargo.toml index da3bd8725..f8b0761fb 100644 --- a/local-interchaintest/Cargo.toml +++ b/local-interchaintest/Cargo.toml @@ -15,3 +15,4 @@ itertools = "0.13.0" shared_child = "1.0.0" clap = { version = "4.5.8", features = ["derive"] } derive_builder = "0.20.0" +sqlite = { version = "0.36.1" } \ No newline at end of file diff --git a/local-interchaintest/src/main.rs b/local-interchaintest/src/main.rs index df7bce67d..221270a75 100644 --- a/local-interchaintest/src/main.rs +++ b/local-interchaintest/src/main.rs @@ -15,7 +15,7 @@ mod util; const TEST_MNEMONIC: &str = "decorate bright ozone fork gallery riot bus exhaust worth way bone indoor calm squirrel merry zero scheme cotton until shop any excess stage laundry"; /// Path to a file where found arbs are stored -const ARBFILE_PATH: &str = "../arbs.json"; +const ARBFILE_PATH: &str = "../arbs.db"; /// The address that should principally own all contracts const OWNER_ADDR: &str = "neutron1hj5fveer5cjtn4wd6wstzugjfdxzl0xpznmsky"; diff --git a/local-interchaintest/src/setup.rs b/local-interchaintest/src/setup.rs index ef4e46d2b..f88690ff9 100644 --- a/local-interchaintest/src/setup.rs +++ b/local-interchaintest/src/setup.rs @@ -7,8 +7,8 @@ use cosmwasm_std::Decimal; use derive_builder::Builder; use localic_utils::{types::contract::MinAmount, utils::test_context::TestContext}; use notify::{Event, EventKind, RecursiveMode, Result as NotifyResult, Watcher}; -use serde_json::Value; use shared_child::SharedChild; +use sqlite::State; use std::{ borrow::BorrowMut, collections::{HashMap, HashSet}, @@ -21,11 +21,15 @@ use std::{ atomic::{AtomicBool, Ordering}, mpsc, Arc, Mutex, }, + thread, + time::Duration, }; const EXIT_STATUS_SUCCESS: i32 = 9; const EXIT_STATUS_SIGKILL: i32 = 9; +const EMPTY_ARB_DB_SIZE: u64 = 10000; + /// A lazily evaluated denom hash, /// based on an src chain, a dest chain /// and a base denom. If the dest chain @@ -321,10 +325,10 @@ impl<'a> TestRunner<'a> { let statuses = self.test_statuses.clone(); if test.run_arbbot { - with_arb_bot_output(Arc::new(Box::new(move |arbfile: Option| { + with_arb_bot_output(Arc::new(Box::new(move || { statuses.lock().expect("Failed to lock statuses").insert( (test.name.clone(), test.description.clone()), - (*test.test)(arbfile), + (*test.test)(), ); Ok(()) @@ -335,7 +339,7 @@ impl<'a> TestRunner<'a> { statuses.lock().expect("Failed to lock statuses").insert( (test.name.clone(), test.description.clone()), - (*test.test)(None), + (*test.test)(), ); Ok(self) @@ -377,7 +381,7 @@ impl<'a> TestRunner<'a> { } /// A test that receives arb bot executable output. -pub type TestFn = Box) -> TestResult + Send + Sync>; +pub type TestFn = Box TestResult + Send + Sync>; pub type OwnedTestFn = Arc; pub type TestResult = Result<(), Box>; @@ -690,9 +694,9 @@ pub fn with_arb_bot_output(test: OwnedTestFn) -> TestResult { let test_handle = test.clone(); - // Wait until the arbs.json file has been produced + // Wait until the arbs.db file has been produced let mut watcher = notify::recommended_watcher(move |res: NotifyResult| { - let e = res.expect("failed to watch arbs.json"); + let e = res.expect("failed to watch arbs.db"); // An arb was found if let EventKind::Modify(_) = e.kind { @@ -706,21 +710,33 @@ pub fn with_arb_bot_output(test: OwnedTestFn) -> TestResult { let f = OpenOptions::new() .read(true) .open(ARBFILE_PATH) - .expect("failed to open arbs.json"); + .expect("failed to open arbs.db"); - if f.metadata().expect("can't get arbs metadata").len() == 0 { + if f.metadata().expect("can't get arbs metadata").len() < EMPTY_ARB_DB_SIZE { return; } - let arbfile: Value = - serde_json::from_reader(&f).expect("failed to deserialize arbs.json"); - - let res = test_handle(Some(arbfile)); + thread::sleep(Duration::from_secs(1)); proc_handle_watcher.kill().expect("failed to kill arb bot"); - tx_res.send(res).expect("failed to send test results"); - finished.store(true, Ordering::SeqCst); + thread::sleep(Duration::from_secs(2)); + + let conn = sqlite::open(ARBFILE_PATH).expect("failed to open db"); + + let query = "SELECT COUNT(*) AS cnt FROM orders"; + let mut statement = conn.prepare(query).unwrap(); + + if let Ok(State::Row) = statement.next() { + // The db is committed, we can run the tests now + if statement.read::("cnt").unwrap() > 0 { + let res = test_handle(); + + tx_res.send(res).expect("failed to send test results"); + + finished.store(true, Ordering::SeqCst); + } + } } })?; diff --git a/local-interchaintest/src/tests.rs b/local-interchaintest/src/tests.rs index 38c57eebe..c592ebb56 100644 --- a/local-interchaintest/src/tests.rs +++ b/local-interchaintest/src/tests.rs @@ -1,10 +1,7 @@ -use super::util; -use serde_json::Value; +use super::{util, ARBFILE_PATH}; use std::{error::Error, process::Command}; -pub fn test_transfer_osmosis( - _: Option, -) -> Result<(), Box> { +pub fn test_transfer_osmosis() -> Result<(), Box> { Command::new("python") .current_dir("tests") .arg("transfer_osmosis.py") @@ -13,9 +10,7 @@ pub fn test_transfer_osmosis( Ok(()) } -pub fn test_transfer_neutron( - _: Option, -) -> Result<(), Box> { +pub fn test_transfer_neutron() -> Result<(), Box> { Command::new("python") .current_dir("tests") .arg("transfer_neutron.py") @@ -24,32 +19,31 @@ pub fn test_transfer_neutron( Ok(()) } -pub fn test_profitable_arb( - arbfile: Option, -) -> Result<(), Box> { - let arbfile = arbfile.unwrap(); - let arbs = arbfile.as_array().expect("no arbs in arbfile"); - - util::assert_err("!arbs.is_empty()", arbs.is_empty(), false)?; - - let profit: u64 = arbs - .iter() - .filter_map(|arb_str| arb_str.as_str()) - .filter_map(|arb_str| { - serde_json::from_str::(arb_str) - .ok()? - .get("realized_profit")? - .as_number()? - .as_u64() - }) - .sum(); - let auction_profit: u64 = arbs - .iter() - .filter_map(|arb_str| arb_str.as_str()) - .filter(|arb_str| arb_str.contains("auction")) - .filter_map(|arb_str| serde_json::from_str::(arb_str).ok()) - .filter_map(|arb| arb.get("realized_profit")?.as_number()?.as_u64()) - .sum(); +pub fn test_profitable_arb() -> Result<(), Box> { + let conn = sqlite::open(ARBFILE_PATH).expect("failed to open db"); + + let profit = { + let query = "SELECT SUM(o.realized_profit) AS total_profit FROM orders o"; + + let mut statement = conn.prepare(query).unwrap(); + + statement + .next() + .ok() + .and_then(|_| statement.read::("total_profit").ok()) + .unwrap_or_default() + }; + + let auction_profit = { + let query = "SELECT SUM(order_profit) AS total_profit FROM (SELECT MAX(o.realized_profit) AS order_profit FROM orders o INNER JOIN legs l ON o.uid = l.order_uid GROUP BY o.uid)"; + let mut statement = conn.prepare(query).unwrap(); + + statement + .next() + .ok() + .and_then(|_| statement.read::("total_profit").ok()) + .unwrap_or_default() + }; println!("ARB BOT PROFIT: {profit}"); println!("AUCTION BOT PROFIT: {auction_profit}"); @@ -60,78 +54,83 @@ pub fn test_profitable_arb( Ok(()) } -pub fn test_unprofitable_arb( - arbfile: Option, -) -> Result<(), Box> { - let arbfile = arbfile.unwrap(); - let arbs = arbfile.as_array().expect("no arbs in arbfile"); - - util::assert_err("!arbs.is_empty()", arbs.is_empty(), false)?; - - let profit: u64 = arbs - .iter() - .filter_map(|arb_str| arb_str.as_str()) - .filter_map(|arb_str| { - serde_json::from_str::(arb_str) - .ok()? - .get("realized_profit")? - .as_number()? - .as_u64() - }) - .sum(); - let auction_profit: u64 = arbs - .iter() - .filter_map(|arb_str| arb_str.as_str()) - .filter(|arb_str| arb_str.contains("auction")) - .filter_map(|arb_str| serde_json::from_str::(arb_str).ok()) - .filter_map(|arb| arb.get("realized_profit")?.as_number()?.as_u64()) - .sum(); +pub fn test_unprofitable_arb() -> Result<(), Box> { + let conn = sqlite::open(ARBFILE_PATH).expect("failed to open db"); + + let profit = { + let query = "SELECT SUM(o.realized_profit) AS total_profit FROM orders o"; + + let mut statement = conn.prepare(query).unwrap(); + + statement + .next() + .ok() + .and_then(|_| statement.read::("total_profit").ok()) + .unwrap_or_default() + }; + + let auction_profit = { + let query = "SELECT SUM(order_profit) AS total_profit FROM (SELECT MAX(o.realized_profit) AS order_profit FROM orders o INNER JOIN legs l ON o.uid = l.order_uid GROUP BY o.uid)"; + let mut statement = conn.prepare(query).unwrap(); + + statement + .next() + .ok() + .and_then(|_| statement.read::("total_profit").ok()) + .unwrap_or_default() + }; println!("ARB BOT PROFIT: {profit}"); println!("AUCTION BOT PROFIT: {auction_profit}"); - util::assert_err("profit == 0", profit, 0)?; - util::assert_err("auction_profit == 0", auction_profit, 0)?; + util::assert_err("profit == 0", profit == 0, true)?; + util::assert_err("auction_profit == 0", auction_profit == 0, true)?; Ok(()) } -pub fn test_osmo_arb(arbfile: Option) -> Result<(), Box> { - let arbfile = arbfile.unwrap(); - let arbs = arbfile.as_array().expect("no arbs in arbfile"); - - util::assert_err("!arbs.is_empty()", arbs.is_empty(), false)?; - - let profit: u64 = arbs - .iter() - .filter_map(|arb_str| arb_str.as_str()) - .filter_map(|arb_str| { - serde_json::from_str::(arb_str) - .ok()? - .get("realized_profit")? - .as_number()? - .as_u64() - }) - .sum(); - let auction_profit: u64 = arbs - .iter() - .filter_map(|arb_str| arb_str.as_str()) - .filter(|arb_str| arb_str.contains("auction")) - .filter_map(|arb_str| serde_json::from_str::(arb_str).ok()) - .filter_map(|arb| arb.get("realized_profit")?.as_number()?.as_u64()) - .sum(); - let osmo_profit: u64 = arbs - .iter() - .filter_map(|arb_str| arb_str.as_str()) - .filter(|arb_str| arb_str.contains("osmosis")) - .filter_map(|arb_str| serde_json::from_str::(arb_str).ok()) - .filter_map(|arb| arb.get("realized_profit")?.as_number()?.as_u64()) - .sum(); +pub fn test_osmo_arb() -> Result<(), Box> { + let conn = sqlite::open(ARBFILE_PATH).expect("failed to open db"); + + let profit = { + let query = "SELECT SUM(o.realized_profit) AS total_profit FROM orders o"; + + let mut statement = conn.prepare(query).unwrap(); + + statement + .next() + .ok() + .and_then(|_| statement.read::("total_profit").ok()) + .unwrap_or_default() + }; + + let auction_profit = { + let query = "SELECT SUM(order_profit) AS total_profit FROM (SELECT MAX(o.realized_profit) AS order_profit FROM orders o WHERE l.kind = 'auction' INNER JOIN legs l ON o.uid == l.order_uid GROUP BY o.uid)"; + let mut statement = conn.prepare(query).unwrap(); + + statement + .next() + .ok() + .and_then(|_| statement.read::("total_profit").ok()) + .unwrap_or_default() + }; + + let osmo_profit = { + let query = "SELECT SUM(order_profit) AS total_profit FROM (SELECT MAX(o.realized_profit) AS order_profit FROM orders o WHERE l.kind = 'osmosis' INNER JOIN legs l ON o.uid == l.order_uid GROUP BY o.uid)"; + let mut statement = conn.prepare(query).unwrap(); + + statement + .next() + .ok() + .and_then(|_| statement.read::("total_profit").ok()) + .unwrap_or_default() + }; println!("ARB BOT PROFIT: {profit}"); println!("AUCTION BOT PROFIT: {auction_profit}"); println!("OSMOSIS BOT PROFIT: {osmo_profit}"); + util::assert_err("profit > 0", profit > 0, true)?; util::assert_err("osmo_profit > 0", osmo_profit > 0, true)?; util::assert_err("auction_profit > 0", auction_profit > 0, true)?; diff --git a/local-interchaintest/src/util.rs b/local-interchaintest/src/util.rs index c29db2d04..471a21443 100644 --- a/local-interchaintest/src/util.rs +++ b/local-interchaintest/src/util.rs @@ -175,9 +175,9 @@ pub(crate) fn create_arbs_file() -> Result<(), Box> { .create(true) .truncate(true) .write(true) - .open("../arbs.json")?; + .open("../arbs.db")?; - f.write_all(serde_json::json!([]).to_string().as_bytes())?; + f.write_all(&[])?; Ok(()) } diff --git a/local-interchaintest/tests/transfer_neutron.py b/local-interchaintest/tests/transfer_neutron.py index 3bc67c25e..b9d49997d 100644 --- a/local-interchaintest/tests/transfer_neutron.py +++ b/local-interchaintest/tests/transfer_neutron.py @@ -1,3 +1,4 @@ +from sqlite3 import connect import json import asyncio from asyncio import Semaphore @@ -50,6 +51,7 @@ async def main() -> None: {}, {}, Semaphore(MAX_SKIP_CONCURRENT_CALLS), + connect("test_db.db"), ) await transfer_raw( diff --git a/local-interchaintest/tests/transfer_osmosis.py b/local-interchaintest/tests/transfer_osmosis.py index 61ab12377..d7c2bd9a8 100644 --- a/local-interchaintest/tests/transfer_osmosis.py +++ b/local-interchaintest/tests/transfer_osmosis.py @@ -1,3 +1,4 @@ +from sqlite3 import connect import json from asyncio import Semaphore import asyncio @@ -50,6 +51,7 @@ async def main() -> None: {}, {}, Semaphore(MAX_SKIP_CONCURRENT_CALLS), + connect("test_db.db"), ) await transfer_raw( diff --git a/main.py b/main.py index 5dc563a10..543d40c27 100644 --- a/main.py +++ b/main.py @@ -4,6 +4,8 @@ Implements a command-line interface for running arbitrage strategies. """ +from contextlib import closing +from sqlite3 import connect from asyncio import Semaphore import traceback import asyncio @@ -27,7 +29,6 @@ ) from src.contracts.pool.osmosis import OsmosisPoolDirectory from src.contracts.pool.astroport import NeutronAstroportPoolDirectory -from src.contracts.route import Status from src.strategies.naive import strategy from dotenv import load_dotenv import aiohttp @@ -73,7 +74,7 @@ async def main() -> None: "-l", "--log_file", ) - parser.add_argument("-hf", "--history_file", default="arbs.json") + parser.add_argument("-hf", "--history_db", default="arbs.db") parser.add_argument("-c", "--net_config", default="net_conf.json") parser.add_argument( "-df", "--deployments_file", default="contracts/deployments.json" @@ -100,16 +101,6 @@ async def main() -> None: level=os.environ.get("LOGLEVEL", "INFO").upper(), ) - # Always make sure the history file exists - if args.history_file is not None and not path.isfile(args.history_file): - logger.info("Creating pool file") - - with open(args.history_file, "w+", encoding="utf-8") as f: - json.dump( - [], - f, - ) - denom_file: dict[str, Any] = { "denom_map": {}, "denom_routes": {}, @@ -158,300 +149,157 @@ async def main() -> None: ), timeout=aiohttp.ClientTimeout(total=60), ) as session: - ctx: Ctx[Any] = Ctx( - { - chain_id: [ - LedgerClient( - custom_neutron_network_config(endpoint, chain_id=chain_id) - ) - for endpoint in endpoints["grpc"] - ] - for chain_id, endpoints in endpoints.items() - }, - endpoints, - LocalWallet.from_mnemonic( - os.environ.get("WALLET_MNEMONIC"), prefix="neutron" - ), - { - "pool_file": args.pool_file, - "poll_interval": int(args.poll_interval), - "hops": int(args.hops), - "pools": int(args.pools) if args.pools else None, - "require_leg_types": args.require_leg_types, - "base_denom": args.base_denom, - "profit_margin": int(args.profit_margin), - "rebalance_threshold": int(args.rebalance_threshold), - "wallet_mnemonic": os.environ.get("WALLET_MNEMONIC"), - "cmd": args.cmd, - "net_config": args.net_config, - "log_file": args.log_file, - "history_file": args.history_file, - "skip_api_key": ( - os.environ.get("SKIP_API_KEY") - if "SKIP_API_KEY" in os.environ - else None + with closing(connect(args.history_db, autocommit=False)) as conn: + ctx: Ctx[Any] = Ctx( + { + chain_id: [ + LedgerClient( + custom_neutron_network_config( + endpoint, chain_id=chain_id + ) + ) + for endpoint in endpoints["grpc"] + ] + for chain_id, endpoints in endpoints.items() + }, + endpoints, + LocalWallet.from_mnemonic( + os.environ.get("WALLET_MNEMONIC"), prefix="neutron" ), - "log_rebalancing": args.log_rebalancing, - }, - None, - False, - session, - [], - cast(dict[str, Any], json.load(f)), - { - denom: [load_denom_chain_info(info) for info in infos] - for (denom, infos) in denom_file["denom_map"].items() - }, - { - src_denom: { - dest_denom: [load_denom_route_leg(route) for route in routes] - for (dest_denom, routes) in dest_denom_routes.items() - } - for (src_denom, dest_denom_routes) in denom_file[ - "denom_routes" - ].items() - }, - { - chain_id: load_chain_info(info) - for (chain_id, info) in denom_file["chain_info"].items() - }, - Semaphore(MAX_SKIP_CONCURRENT_CALLS), - ).recover_history() - sched = Scheduler(ctx, strategy) - - # Register Osmosis and Astroport providers - osmosis = OsmosisPoolDirectory( - ctx.deployments, - ctx.http_session, - poolfile_path=args.pool_file, - endpoints=endpoints[ - list(ctx.deployments["pools"]["osmosis"].keys())[0] - ], - ) - astros = [ - NeutronAstroportPoolDirectory( + { + "pool_file": args.pool_file, + "poll_interval": int(args.poll_interval), + "hops": int(args.hops), + "pools": int(args.pools) if args.pools else None, + "require_leg_types": args.require_leg_types, + "base_denom": args.base_denom, + "profit_margin": int(args.profit_margin), + "rebalance_threshold": int(args.rebalance_threshold), + "wallet_mnemonic": os.environ.get("WALLET_MNEMONIC"), + "cmd": args.cmd, + "net_config": args.net_config, + "log_file": args.log_file, + "history_db": args.history_db, + "skip_api_key": ( + os.environ.get("SKIP_API_KEY") + if "SKIP_API_KEY" in os.environ + else None + ), + "log_rebalancing": args.log_rebalancing, + }, + None, + False, + session, + [], + cast(dict[str, Any], json.load(f)), + { + denom: [load_denom_chain_info(info) for info in infos] + for (denom, infos) in denom_file["denom_map"].items() + }, + { + src_denom: { + dest_denom: [ + load_denom_route_leg(route) for route in routes + ] + for (dest_denom, routes) in dest_denom_routes.items() + } + for (src_denom, dest_denom_routes) in denom_file[ + "denom_routes" + ].items() + }, + { + chain_id: load_chain_info(info) + for (chain_id, info) in denom_file["chain_info"].items() + }, + Semaphore(MAX_SKIP_CONCURRENT_CALLS), + conn, + ) + sched = Scheduler(ctx, strategy) + + # Register Osmosis and Astroport providers + osmosis = OsmosisPoolDirectory( ctx.deployments, - chain_id, ctx.http_session, - [ - ( - grpc.aio.secure_channel( - endpoint.split("grpc+https://")[1], - grpc.ssl_channel_credentials(), - ) - if "https" in endpoint - else grpc.aio.insecure_channel( - endpoint.split("grpc+http://")[1], - ) - ) - for endpoint in endpoints[chain_id]["grpc"] - ], poolfile_path=args.pool_file, - endpoints=endpoints[chain_id], + endpoints=endpoints[ + list(ctx.deployments["pools"]["osmosis"].keys())[0] + ], ) - for chain_id in ctx.deployments["pools"]["astroport"].keys() - if chain_id in endpoints - ] - - osmo_pools = await osmosis.pools() - astros_pools = [await astro.pools() for astro in astros] - - if args.cmd is not None and len(args.cmd) > 0 and args.cmd[0] == "hist": - # The user wnats to see a specific route - if len(args.cmd) == 3 and args.cmd[1] == "show": - order_id = int(args.cmd[2]) - - if order_id < 0 or order_id >= len(ctx.order_history): - logger.critical("Route does not exist.") - - sys.exit(1) - - logger.info("%s", ctx.order_history[order_id].fmt_pretty()) - - logger.info("Execution trace:") - - for log in ctx.order_history[order_id].logs: - logger.info("%s", log) - else: - for order in ctx.order_history: - logger.info( - "%s (%s) expected ROI: %d, realized P/L: %d, status: %s, is_osmo: %s, is_valence: %s", - order, - order.time_created, - order.expected_profit, - order.realized_profit if order.realized_profit else 0, - order.status, - any([leg.kind == "osmosis" for leg in order.route]), - any([leg.kind == "auction" for leg in order.route]), - ) - - # Print a profit summary - logger.info( - "Summary - total routes attepmted: %d, total routes completed: %d, min P/L: %d, max P/L: %d, total P/L: %d", - len(ctx.order_history), - len( - [ - order - for order in ctx.order_history - if order.status == Status.EXECUTED - ] - ), - min( - [ - order.realized_profit - for order in ctx.order_history - if order.realized_profit - ], - default=0, - ), - max( - [ - order.realized_profit - for order in ctx.order_history - if order.realized_profit - ], - default=0, - ), - sum( - [ - order.realized_profit - for order in ctx.order_history - if order.realized_profit - ] - ), - ) - - atomic_orders = [ - order - for order in ctx.order_history - if all( - [ - leg.kind == "astroport" or leg.kind == "auction" - for leg in order.route - ] - ) - ] - - ibc_orders = [ - order - for order in ctx.order_history - if any([leg.kind == "osmosis" for leg in order.route]) - ] - - logger.info( - "Summary - total atomic routes attepmted: %d, total atomic routes completed: %d, min P/L: %d, max P/L: %d, total atomic P/L: %d", - len(atomic_orders), - len( - [ - order - for order in atomic_orders - if order.status == Status.EXECUTED - ] - ), - min( - [ - order.realized_profit - for order in atomic_orders - if order.realized_profit - ], - default=0, - ), - max( - [ - order.realized_profit - for order in atomic_orders - if order.realized_profit - ], - default=0, - ), - sum( - [ - order.realized_profit - for order in atomic_orders - if order.realized_profit - ] - ), - ) - logger.info( - "Summary - total IBC routes attepmted: %d, total IBC routes completed: %d, min P/L: %d, max P/L: %d, total IBC P/L: %d", - len(ibc_orders), - len( - [ - order - for order in ibc_orders - if order.status == Status.EXECUTED - ] - ), - min( - [ - order.realized_profit - for order in atomic_orders - if order.realized_profit - ], - default=0, - ), - max( - [ - order.realized_profit - for order in atomic_orders - if order.realized_profit - ], - default=0, - ), - sum( - [ - order.realized_profit - for order in ibc_orders - if order.realized_profit - ] - ), + astros = [ + NeutronAstroportPoolDirectory( + ctx.deployments, + chain_id, + ctx.http_session, + [ + ( + grpc.aio.secure_channel( + endpoint.split("grpc+https://")[1], + grpc.ssl_channel_credentials(), + ) + if "https" in endpoint + else grpc.aio.insecure_channel( + endpoint.split("grpc+http://")[1], + ) + ) + for endpoint in endpoints[chain_id]["grpc"] + ], + poolfile_path=args.pool_file, + endpoints=endpoints[chain_id], ) + for chain_id in ctx.deployments["pools"]["astroport"].keys() + if chain_id in endpoints + ] + + osmo_pools = await osmosis.pools() + astros_pools = [await astro.pools() for astro in astros] + + for osmo_base in osmo_pools.values(): + for osmo_pool in osmo_base.values(): + sched.register_provider(osmo_pool) + + for astro_pools in astros_pools: + for astro_base in astro_pools.values(): + for astro_pool in astro_base.values(): + sched.register_provider(astro_pool) + + await sched.register_auctions() + + # Calculate the number of pools by summing up the number of pools for a particular base + # in Osmosis and Astroport + n_pools: int = sum( + map(lambda base: len(base.values()), osmo_pools.values()) + ) + sum(map(lambda base: len(base.values()), astro_pools.values())) + + logger.info("Built pool catalogue with %d pools", n_pools) + + async def event_loop() -> None: + while True: + try: + async with asyncio.timeout(args.poll_interval): + await sched.poll() + except Exception: + logger.info( + "Arbitrage round failed: %s", traceback.format_exc() + ) - return - - for osmo_base in osmo_pools.values(): - for osmo_pool in osmo_base.values(): - sched.register_provider(osmo_pool) - - for astro_pools in astros_pools: - for astro_base in astro_pools.values(): - for astro_pool in astro_base.values(): - sched.register_provider(astro_pool) - - await sched.register_auctions() - - # Calculate the number of pools by summing up the number of pools for a particular base - # in Osmosis and Astroport - n_pools: int = sum( - map(lambda base: len(base.values()), osmo_pools.values()) - ) + sum(map(lambda base: len(base.values()), astro_pools.values())) - - logger.info("Built pool catalogue with %d pools", n_pools) - - async def event_loop() -> None: - while True: - try: - async with asyncio.timeout(args.poll_interval): - await sched.poll() - except Exception: - logger.info( - "Arbitrage round failed: %s", traceback.format_exc() - ) - - continue + continue - def daemon() -> None: - loop = asyncio.get_event_loop() - loop.run_until_complete(event_loop()) + def daemon() -> None: + loop = asyncio.get_event_loop() + loop.run_until_complete(event_loop()) - # Save pools to the specified file if the user wants to dump pools - if args.cmd is not None and len(args.cmd) > 0 and args.cmd[0] == "daemon": - Process(target=daemon, args=[]).run() - logger.info("Spawned searcher daemon") + # Save pools to the specified file if the user wants to dump pools + if ( + args.cmd is not None + and len(args.cmd) > 0 + and args.cmd[0] == "daemon" + ): + Process(target=daemon, args=[]).run() + logger.info("Spawned searcher daemon") - return + return - await event_loop() + await event_loop() if __name__ == "__main__": diff --git a/src/contracts/route.py b/src/contracts/route.py index 29680c935..fcaeaac09 100644 --- a/src/contracts/route.py +++ b/src/contracts/route.py @@ -1,7 +1,8 @@ +from datetime import datetime import json from enum import Enum from dataclasses import dataclass -from typing import Union, Callable, Optional +from typing import Union, Callable, Optional, Any from src.contracts.auction import AuctionProvider from src.contracts.pool.provider import PoolProvider @@ -59,7 +60,7 @@ class Route: realized_profit: Optional[int] quantities: list[int] status: Status - time_created: str + time_created: datetime logs: list[str] logs_enabled: bool @@ -69,10 +70,62 @@ def __hash__(self) -> int: def __str__(self) -> str: return f"r{self.uid}" - def fmt_pretty(self) -> str: - route_fmt = " -> ".join(map(lambda route_leg: str(route_leg), self.route)) - - return f"{str(self)} ({self.time_created}) expected ROI: {self.expected_profit}, realized P/L: {self.realized_profit}, status: {self.status}, path: {route_fmt}, execution plan: {self.quantities}" + def db_row(self) -> list[Any]: + """ + Creates a tuple representing the route's metadata for + persistence purposes. + Expects insertioni columns (theoretical_profit, + expected_profit, + realized_profit, + status, + time_created, + logs_enabled) + """ + + return [ + str(self.theoretical_profit), + str(self.expected_profit), + str(self.realized_profit), + str(self.status), + self.time_created, + self.logs_enabled, + ] + + def legs_db_rows(self, order_uid: int) -> list[list[Any]]: + """ + Creates a db row for each leg in the route. + Expects insertion columns (in_amount, out_amount, in_asset, out_asset, kind, executed) + """ + + legs = [] + + for i, (leg, leg_repr, in_amount) in enumerate( + zip(self.legs, self.route, self.quantities) + ): + out_amount = self.quantities[i + 1] + + legs.append( + [ + i, + order_uid, + str(in_amount), + str(out_amount), + leg.in_asset(), + leg.out_asset(), + leg_repr.kind, + leg_repr.executed, + ] + ) + + return legs + + def logs_db_rows(self, order_uid: int) -> list[tuple[int, int, str]]: + """ + Creates a db row for each log in the route. + Expects insertionc columns (contents) + """ + + return [(i, order_uid, log) for (i, log) in enumerate(self.logs)] def dumps(self) -> str: return json.dumps( @@ -98,29 +151,3 @@ def dumps(self) -> str: "logs": self.logs, } ) - - -def load_route(s: str) -> Route: - loaded = json.loads(s) - - return Route( - loaded["uid"], - [load_leg_repr(json_leg) for json_leg in loaded["route"]], - [], - loaded["theoretical_profit"], - loaded["expected_profit"], - loaded["realized_profit"], - loaded["quantities"], - Status[loaded["status"].split(".")[1]], - loaded["time_created"], - loaded["logs"], - True, - ) - - -def load_leg_repr(s: str) -> LegRepr: - loaded = json.loads(s) - - return LegRepr( - loaded["in_asset"], loaded["out_asset"], loaded["kind"], loaded["executed"] - ) diff --git a/src/scheduler.py b/src/scheduler.py index 8e6259ee7..4535bb917 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -2,18 +2,25 @@ Implements a strategy runner with an arbitrary provider set in an event-loop style. """ +from sqlite3 import Connection from asyncio import Semaphore import logging from datetime import datetime -import json from typing import Callable, List, Self, Optional, Awaitable, Any, TypeVar, Generic from dataclasses import dataclass from cosmpy.aerial.client import LedgerClient from cosmpy.crypto.address import Address from cosmpy.aerial.wallet import LocalWallet from src.contracts.auction import AuctionDirectory, AuctionProvider -from src.contracts.route import Route, load_route, LegRepr, Status, Leg +from src.contracts.route import Route, LegRepr, Status, Leg from src.contracts.pool.provider import PoolProvider +from src.db import ( + migrate, + insert_legs_rows, + insert_logs_rows, + insert_order_rows, + order_row_count, +) from src.util import ( try_multiple_clients, DenomRouteLeg, @@ -63,6 +70,7 @@ class Ctx(Generic[TState]): denom_routes: dict[str, dict[str, list[DenomRouteLeg]]] chain_info: dict[str, ChainInfo] http_session_lock: Semaphore + db_connection: Connection def with_state(self, state: Any) -> Self: """ @@ -79,22 +87,32 @@ def commit_history(self) -> Self: Commits the order history to disk. """ - with open(self.cli_args["history_file"], "w", encoding="utf-8") as f: - f.seek(0) - json.dump([order.dumps() for order in self.order_history], f) + cur = self.db_connection.cursor() - return self + migrate(cur) - def recover_history(self) -> Self: - """ - Retrieves the order history from disk - """ + starting_uid = order_row_count(cur) - with open(self.cli_args["history_file"], "r", encoding="utf-8") as f: - f.seek(0) - self.order_history = [ - load_route(json_route) for json_route in json.load(f) - ][:-MAX_ROUTE_HISTORY_LEN] + route_rows = [route.db_row() for route in self.order_history] + leg_rows = [ + leg_row + for (i, route) in enumerate(self.order_history) + for leg_row in route.legs_db_rows(starting_uid + i) + ] + log_rows = [ + log_row + for (i, route) in enumerate(self.order_history) + for log_row in route.logs_db_rows(starting_uid + i) + ] + + insert_order_rows(cur, route_rows) + insert_legs_rows(cur, leg_rows) + insert_logs_rows(cur, log_rows) + + cur.close() + self.db_connection.commit() + + self.order_history = [] return self @@ -132,10 +150,11 @@ def queue_route( None, quantities, Status.QUEUED, - datetime.now().strftime("%Y-%m-%d @ %H:%M:%S"), + datetime.now(), [], enable_logs, ) + self.order_history.append(r) return r diff --git a/src/strategies/util.py b/src/strategies/util.py index 3da3ae6dd..3142ee471 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -2,7 +2,6 @@ Defines common utilities shared across arbitrage strategies. """ -from bisect import insort import traceback import asyncio from itertools import groupby @@ -920,9 +919,7 @@ async def recover_funds( profit, quantities = resp - r = ctx.queue_route( - backtracked, -r.theoretical_profit, -r.expected_profit, quantities - ) + r = ctx.queue_route(backtracked, 0, 0, quantities) ctx.log_route(r, "info", "Executing recovery", []) diff --git a/tests/util.py b/tests/util.py index 72a2ec4c2..f3c539d82 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,3 +1,4 @@ +from sqlite3 import connect from asyncio import Semaphore from typing import Any, cast, AsyncIterator import json @@ -98,12 +99,13 @@ async def ctx() -> AsyncIterator[Ctx[Any]]: "cmd": "", "net_config": "", "log_file": "", - "history_file": "", + "history_db": "", "skip_api_key": None, }, state=None, terminated=False, http_session=session, + db_connection=connect("test_db.db"), order_history=[], deployments=cast(dict[str, Any], json.load(f)), denom_map={}, From d99c1857d3ca119ae4fdd95fd59672c4d7493350 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Wed, 16 Oct 2024 15:41:42 +0000 Subject: [PATCH 36/46] Create db.py. --- src/db.py | 109 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 109 insertions(+) create mode 100644 src/db.py diff --git a/src/db.py b/src/db.py new file mode 100644 index 000000000..9d544dcce --- /dev/null +++ b/src/db.py @@ -0,0 +1,109 @@ +from sqlite3 import Cursor +from typing import Any + + +def order_row_count(cur: Cursor) -> int: + """ + Gets the number of orders in the database. + """ + + res = cur.execute("SELECT COUNT(*) AS cnt FROM orders") + + (cnt,) = res.fetchone() + + return int(cnt) + + +def insert_order_rows(cur: Cursor, rows: list[list[Any]]) -> None: + """ + Inserts a new row into orders for each order row. + Does not commit the transaction. + """ + + cur.executemany( + """INSERT INTO orders( + theoretical_profit, + expected_profit, + realized_profit, + status, + time_created, + logs_enabled + ) VALUES (?, ?, ?, ?, ?, ?)""", + rows, + ) + + +def insert_legs_rows(cur: Cursor, rows: list[list[Any]]) -> None: + """ + Inserts a new row into legs for each leg row. + Does not commit the transaction. + """ + + cur.executemany( + """INSERT INTO legs( + route_index, + order_uid, + in_amount, + out_amount, + in_asset, + out_asset, + kind, + executed + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""", + rows, + ) + + +def insert_logs_rows(cur: Cursor, rows: list[tuple[int, int, str]]) -> None: + """ + Inserts a new row for each log row. + Does not commit the transaction. + """ + + cur.executemany( + "INSERT INTO logs(log_index, order_uid, contents) VALUES (?, ?, ?)", rows + ) + + +def migrate(cur: Cursor) -> None: + """ + Creates requisite tables in the on-disk db in case they do not already exist. + Does not commit the transaction. + """ + + cur.execute( + """CREATE TABLE IF NOT EXISTS orders( + uid INTEGER NOT NULL PRIMARY KEY, + theoretical_profit TEXT NOT NULL, + expected_profit TEXT NOT NULL, + realized_profit TEXT, + status TEXT NOT NULL, + time_created DATETIME NOT NULL, + logs_enabled BOOL NOT NULL + )""" + ) + + cur.execute( + """CREATE TABLE IF NOT EXISTS legs( + route_index INTEGER NOT NULL, + order_uid INTEGER NOT NULL, + in_amount TEXT NOT NULL, + out_amount TEXT, + in_asset TEXT NOT NULL, + out_asset TEXT NOT NULL, + kind TEXT NOT NULL, + executed BOOL NOT NULL, + PRIMARY KEY (route_index, order_uid), + FOREIGN KEY(order_uid) REFERENCES orders(uid) + )""" + ) + + cur.execute( + """CREATE TABLE IF NOT EXISTS logs( + log_index INTEGER NOT NULL, + order_uid INTEGER NOT NULL, + contents TEXT NOT NULL, + PRIMARY KEY (log_index, order_uid), + FOREIGN KEY(order_uid) REFERENCES orders(uid) + )""" + ) From 2bbce5e6b327bfd2268b27f4ac2de21f68c266dc Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Wed, 16 Oct 2024 15:45:58 +0000 Subject: [PATCH 37/46] Remove autocommit arg to sqlite connect call. --- main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.py b/main.py index 543d40c27..9c77a9249 100644 --- a/main.py +++ b/main.py @@ -149,7 +149,7 @@ async def main() -> None: ), timeout=aiohttp.ClientTimeout(total=60), ) as session: - with closing(connect(args.history_db, autocommit=False)) as conn: + with closing(connect(args.history_db)) as conn: ctx: Ctx[Any] = Ctx( { chain_id: [ From 7486a56308667ad2c8343dc03d44c14fc8ce6216 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Wed, 16 Oct 2024 17:45:27 +0000 Subject: [PATCH 38/46] Restore old route planning algorithm. --- src/strategies/util.py | 186 +++++++++++++---------------------------- 1 file changed, 58 insertions(+), 128 deletions(-) diff --git a/src/strategies/util.py b/src/strategies/util.py index 3142ee471..80cdfdaf9 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -1124,74 +1124,12 @@ async def transfer_or_continue() -> bool: raise ValueError("IBC transfer timed out.") -async def quantities_for_starting_amount( - starting_amount: int, route: list[Leg] -) -> list[int]: - """ - Gets the order size for each subsequent trade given a starting amount, - and the liquidity in each pool. - """ - - quantities = [starting_amount] - - for leg in route: - if quantities[-1] == 0: - quantities = [starting_amount] - - break - - prev_amt = quantities[-1] - - if isinstance(leg.backend, AuctionProvider): - if leg.in_asset != leg.backend.asset_a: - return quantities - - if await leg.backend.remaining_asset_b() == 0: - return quantities - - quantities.append( - min( - int(await leg.backend.exchange_rate() * prev_amt), - await leg.backend.remaining_asset_b(), - ) - ) - - continue - - if leg.in_asset == leg.backend.asset_a: - quantities.append(int(await leg.backend.simulate_swap_asset_a(prev_amt))) - - pool_liquidity = await leg.backend.balance_asset_b() - - if ( - pool_liquidity == 0 - or Decimal(quantities[-1]) / Decimal(pool_liquidity) - > MAX_POOL_LIQUIDITY_TRADE - ): - break - - continue - - quantities.append(int(await leg.backend.simulate_swap_asset_b(prev_amt))) - - pool_liquidity = await leg.backend.balance_asset_a() - - if ( - pool_liquidity == 0 - or Decimal(quantities[-1]) / Decimal(pool_liquidity) - > MAX_POOL_LIQUIDITY_TRADE - ): - break - - return quantities - - async def quantities_for_route_profit( starting_amount: int, route: list[Leg], r: Route, ctx: Ctx[Any], - seek_profit: bool = True, + seek_profit: Optional[bool] = True, ) -> tuple[int, list[int]]: """ Calculates what quantities should be used to obtain @@ -1201,92 +1139,84 @@ async def quantities_for_route_profit( if len(route) <= 1: return (0, []) - left = 0 - right = starting_amount - mid = starting_amount // 2 + quantities: list[int] = [starting_amount] - plans: dict[int, list[int]] = {} - plans_by_profit: list[int] = [] + while (seek_profit and quantities[-1] - quantities[0] <= 0) or len( + quantities + ) <= len(route): + ctx.log_route(r, "info", "Route has possible execution plan: %s", [quantities]) - attempts: int = 0 + if starting_amount < DENOM_QUANTITY_ABORT_ARB: + logger.debug( + "Hit investment backstop (%d) in route planning: %s (%s)", + DENOM_QUANTITY_ABORT_ARB, + starting_amount, + quantities, + ) - while ( - left != right - and mid != right - and mid > 0 - and mid <= starting_amount - and attempts < MAX_EVAL_PROBES - ): - attempts += 1 + return (0, []) - quantities: list[int] = ( - plans[mid] - if mid in plans - else await quantities_for_starting_amount(mid, route) - ) - plans[mid] = quantities + quantities = [starting_amount] - ctx.log_route( - r, - "info", - "Got execution plan @ %d: [%s]", - [ - mid, - ", ".join((str(qty) for qty in quantities)), - ], - ) + for leg in route: + if quantities[-1] == 0: + quantities = [starting_amount] - if not seek_profit and len(quantities) > len(route): - return (quantities[-1] - quantities[0], quantities) + break - profit = 0 if len(quantities) == 0 else quantities[-1] - quantities[0] + prev_amt = quantities[-1] - # Insert in sorted position - if len(quantities) > len(route): - plans_by_profit.append(mid) + if isinstance(leg.backend, AuctionProvider): + if leg.in_asset != leg.backend.asset_a: + return (0, []) - # Continue checking plans, since this quantity was not profitable - if len(quantities) <= len(route) or profit <= 0: - right = mid - mid = left + (right - left) // 2 + if await leg.backend.remaining_asset_b() == 0: + return (0, []) - ctx.log_route(r, "debug", "Probing lower execution plans", []) + quantities.append( + min( + int(await leg.backend.exchange_rate() * prev_amt), + await leg.backend.remaining_asset_b(), + ) + ) - continue + continue - higher_plan = plans.get(mid + (right - mid) // 2, []) + if leg.in_asset == leg.backend.asset_a: + quantities.append( + int(await leg.backend.simulate_swap_asset_a(prev_amt)) + ) - # No more to evaluate, since greater starting amount was less profitable - if ( - len(higher_plan) > 0 - and len(higher_plan) >= len(route) - and higher_plan[-1] - higher_plan[0] <= profit - ): - ctx.log_route(r, "info", "Best execution plan identified", []) + pool_liquidity = await leg.backend.balance_asset_b() - break + if ( + pool_liquidity == 0 + or Decimal(quantities[-1]) / Decimal(pool_liquidity) + > MAX_POOL_LIQUIDITY_TRADE + ): + break - # This plan is profitable, but a bigger plan might be even more profitable - left = mid - mid = (right - left) // 2 + continue - ctx.log_route(r, "debug", "Probing higher execution plans", []) + quantities.append(int(await leg.backend.simulate_swap_asset_b(prev_amt))) - plans_by_profit.sort(key=lambda idx: plans[idx][-1] - plans[idx][0]) + pool_liquidity = await leg.backend.balance_asset_a() - if len(plans_by_profit) == 0: - return (0, []) + if ( + pool_liquidity == 0 + or Decimal(quantities[-1]) / Decimal(pool_liquidity) + > MAX_POOL_LIQUIDITY_TRADE + ): + break - best_plan = plans[plans_by_profit[-1]] + starting_amount = int(Decimal(starting_amount) / Decimal(2.0)) - ctx.log_route( - r, - "info", - "Best execution plan: [%s]", - [", ".join((str(qty) for qty in best_plan))], - ) + ctx.log_route(r, "info", "Got execution plan: %s", [quantities]) + + if quantities[-1] - quantities[0] > 0: + ctx.log_route(r, "info", "Route is profitable: %s", [fmt_route(route)]) - return (best_plan[-1] - best_plan[0] if len(best_plan) > 0 else 0, best_plan) + return (quantities[-1] - quantities[0], quantities) async def route_base_denom_profit( From ad52f28f5fd1d643967c08753b4daa6da35393b9 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Wed, 16 Oct 2024 17:49:22 +0000 Subject: [PATCH 39/46] See previous. --- src/util.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/util.py b/src/util.py index b61b45ce9..ba108daa9 100644 --- a/src/util.py +++ b/src/util.py @@ -21,6 +21,9 @@ DENOM_RESOLVER_TIMEOUT_SEC = 5 +DENOM_QUANTITY_ABORT_ARB = 500 + + # Dictates the maximum number of concurrent calls to the skip # API in searching DISCOVERY_CONCURRENCY_FACTOR = 15 From b13e9b8cdee438bea83ad958c32e31e155d55c75 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Wed, 16 Oct 2024 17:52:29 +0000 Subject: [PATCH 40/46] See previous. --- src/strategies/util.py | 3 +++ src/util.py | 3 --- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/strategies/util.py b/src/strategies/util.py index 80cdfdaf9..3e622a121 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -43,6 +43,9 @@ MAX_POOL_LIQUIDITY_TRADE = Decimal("0.05") +DENOM_QUANTITY_ABORT_ARB = 500 + + """ Prevent routes from being evaluated excessively when binary search fails. """ diff --git a/src/util.py b/src/util.py index ba108daa9..b61b45ce9 100644 --- a/src/util.py +++ b/src/util.py @@ -21,9 +21,6 @@ DENOM_RESOLVER_TIMEOUT_SEC = 5 -DENOM_QUANTITY_ABORT_ARB = 500 - - # Dictates the maximum number of concurrent calls to the skip # API in searching DISCOVERY_CONCURRENCY_FACTOR = 15 From 060385e80941d968aea8f0869ffb3a58bc70ad71 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Thu, 17 Oct 2024 21:11:24 +0000 Subject: [PATCH 41/46] Gate log prefixes behind debug log level. --- README.md | 2 +- .../tests/transfer_neutron.py | 5 +- .../tests/transfer_osmosis.py | 5 +- main.py | 10 ++- src/scheduler.py | 67 +++++++++++++------ 5 files changed, 64 insertions(+), 25 deletions(-) diff --git a/README.md b/README.md index 7d3e9f50d..0e1ecc6eb 100644 --- a/README.md +++ b/README.md @@ -208,7 +208,7 @@ Debug information is logged to the DEBUG level, including: * Profit levels for considered trades * Intermediate debug logs -The log level may be set via the `LOGLEVEL` environment variable. Possible values are: `INFO`, `DEBUG`, or `ERROR`. +The log level may be set via the `LOGLEVEL` environment variable. Possible values are: `info`, `debug`, or `error`. An example output is as follows: diff --git a/local-interchaintest/tests/transfer_neutron.py b/local-interchaintest/tests/transfer_neutron.py index b9d49997d..539ce5a44 100644 --- a/local-interchaintest/tests/transfer_neutron.py +++ b/local-interchaintest/tests/transfer_neutron.py @@ -4,7 +4,10 @@ from asyncio import Semaphore from typing import Any from src.strategies.util import transfer_raw -from src.scheduler import Ctx, MAX_SKIP_CONCURRENT_CALLS +from src.scheduler import ( + Ctx, + MAX_SKIP_CONCURRENT_CALLS, +) from src.util import try_multiple_clients from src.util import custom_neutron_network_config import aiohttp diff --git a/local-interchaintest/tests/transfer_osmosis.py b/local-interchaintest/tests/transfer_osmosis.py index d7c2bd9a8..03bc07cc1 100644 --- a/local-interchaintest/tests/transfer_osmosis.py +++ b/local-interchaintest/tests/transfer_osmosis.py @@ -4,7 +4,10 @@ import asyncio from typing import Any from src.strategies.util import transfer_raw -from src.scheduler import Ctx, MAX_SKIP_CONCURRENT_CALLS +from src.scheduler import ( + Ctx, + MAX_SKIP_CONCURRENT_CALLS, +) from src.util import try_multiple_clients from src.util import custom_neutron_network_config import aiohttp diff --git a/main.py b/main.py index 9c77a9249..507c59904 100644 --- a/main.py +++ b/main.py @@ -19,7 +19,11 @@ from typing import Any, cast from cosmpy.aerial.client import LedgerClient from cosmpy.aerial.wallet import LocalWallet -from src.scheduler import Scheduler, Ctx, MAX_SKIP_CONCURRENT_CALLS +from src.scheduler import ( + Scheduler, + Ctx, + MAX_SKIP_CONCURRENT_CALLS, +) from src.util import ( custom_neutron_network_config, DISCOVERY_CONCURRENCY_FACTOR, @@ -90,7 +94,7 @@ async def main() -> None: format="%(asctime)s %(levelname)-8s %(message)s", datefmt="%Y-%m-%d %H:%M:%S", filename=args.log_file, - level=os.environ.get("LOGLEVEL", "INFO").upper(), + level=os.environ.get("LOGLEVEL", "info").upper(), ) else: @@ -98,7 +102,7 @@ async def main() -> None: format="%(asctime)s %(levelname)-8s %(message)s", datefmt="%Y-%m-%d %H:%M:%S", stream=sys.stdout, - level=os.environ.get("LOGLEVEL", "INFO").upper(), + level=os.environ.get("LOGLEVEL", "info").upper(), ) denom_file: dict[str, Any] = { diff --git a/src/scheduler.py b/src/scheduler.py index 4535bb917..e904ea64a 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -116,6 +116,9 @@ def commit_history(self) -> Self: return self + def refresh_denom_balances(self) -> None: + self.denom_balance_cache.clear() + def cancel(self) -> Self: """ Marks the event loop for termination. @@ -180,15 +183,8 @@ def log_route( return def asset_balance_prefix(leg: Leg, asset: str) -> Optional[str]: - balance_resp_asset = try_multiple_clients( - self.clients[leg.backend.chain_id], - lambda client: client.query_bank_balance( - Address( - self.wallet.public_key(), - prefix=leg.backend.chain_prefix, - ), - asset, - ), + balance_resp_asset = self.query_denom_balance( + asset, leg.backend.chain_id, leg.backend.chain_prefix ) if balance_resp_asset is None or not isinstance(balance_resp_asset, int): @@ -209,19 +205,22 @@ def leg_balance_prefixes(leg: Leg) -> list[str]: # Log all in and out asset balances for each leg in the route, # removing any duplicate prefixes using dict.fromkeys - prefix = " ".join( - list( - dict.fromkeys( - [ - prefix - for leg_prefixes in [ - leg_balance_prefixes(leg) for leg in route.legs + prefix = "" + + if log_level == "debug": + prefix = " ".join( + list( + dict.fromkeys( + [ + prefix + for leg_prefixes in [ + leg_balance_prefixes(leg) for leg in route.legs + ] + for prefix in leg_prefixes ] - for prefix in leg_prefixes - ] + ) ) ) - ) route.logs.append(f"{log_level.upper()} {prefix} {fmt_string % tuple(args)}") @@ -245,6 +244,36 @@ def leg_balance_prefixes(leg: Leg) -> list[str]: if log_level == "debug": logger.debug(fmt_string, str(route), *args) + def query_denom_balance(self, denom: str, chain_id: str, chain_prefix: str) -> int: + """ + Gets the balance of the denom on the given chain. + """ + + denom_id = f"{denom}_{chain_id}" + + if denom_id in self.denom_balance_cache: + return self.denom_balance_cache[denom_id] + + balance_resp_asset = try_multiple_clients( + self.clients[chain_id], + lambda client: client.query_bank_balance( + Address( + self.wallet.public_key(), + prefix=chain_prefix, + ), + denom, + ), + ) + + if balance_resp_asset is None or not isinstance(balance_resp_asset, int): + self.denom_balance_cache[denom_id] = 0 + + return 0 + + self.denom_balance_cache[denom_id] = int(balance_resp_asset) + + return int(balance_resp_asset) + async def query_denom_route( self, query: DenomRouteQuery ) -> Optional[list[DenomRouteLeg]]: From 93bc2fea26e77355d6db4d0584816131db79bcf7 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Thu, 17 Oct 2024 21:13:30 +0000 Subject: [PATCH 42/46] Remove denom balance prefix caching altogether. --- src/scheduler.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/src/scheduler.py b/src/scheduler.py index e904ea64a..b9be864b9 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -116,9 +116,6 @@ def commit_history(self) -> Self: return self - def refresh_denom_balances(self) -> None: - self.denom_balance_cache.clear() - def cancel(self) -> Self: """ Marks the event loop for termination. @@ -249,11 +246,6 @@ def query_denom_balance(self, denom: str, chain_id: str, chain_prefix: str) -> i Gets the balance of the denom on the given chain. """ - denom_id = f"{denom}_{chain_id}" - - if denom_id in self.denom_balance_cache: - return self.denom_balance_cache[denom_id] - balance_resp_asset = try_multiple_clients( self.clients[chain_id], lambda client: client.query_bank_balance( @@ -266,12 +258,8 @@ def query_denom_balance(self, denom: str, chain_id: str, chain_prefix: str) -> i ) if balance_resp_asset is None or not isinstance(balance_resp_asset, int): - self.denom_balance_cache[denom_id] = 0 - return 0 - self.denom_balance_cache[denom_id] = int(balance_resp_asset) - return int(balance_resp_asset) async def query_denom_route( From 15f19a9ab0584a605e718eb80a48319fd9a9d258 Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Thu, 17 Oct 2024 21:22:54 +0000 Subject: [PATCH 43/46] Track block heights at which legs in orders are submitted. --- src/contracts/route.py | 2 ++ src/db.py | 6 ++++-- src/scheduler.py | 2 +- src/strategies/util.py | 24 ++++++++++++++++++++---- 4 files changed, 27 insertions(+), 7 deletions(-) diff --git a/src/contracts/route.py b/src/contracts/route.py index fcaeaac09..3eb6520c6 100644 --- a/src/contracts/route.py +++ b/src/contracts/route.py @@ -40,6 +40,7 @@ class LegRepr: out_asset: str kind: str executed: bool + execution_height: Optional[int] def __str__(self) -> str: return f"{self.kind}: {self.in_asset} -> {self.out_asset}" @@ -114,6 +115,7 @@ def legs_db_rows(self, order_uid: int) -> list[list[Any]]: leg.out_asset(), leg_repr.kind, leg_repr.executed, + leg_repr.execution_height, ] ) diff --git a/src/db.py b/src/db.py index 9d544dcce..d2adebcd6 100644 --- a/src/db.py +++ b/src/db.py @@ -48,8 +48,9 @@ def insert_legs_rows(cur: Cursor, rows: list[list[Any]]) -> None: in_asset, out_asset, kind, - executed - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""", + executed, + execution_height + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", rows, ) @@ -93,6 +94,7 @@ def migrate(cur: Cursor) -> None: out_asset TEXT NOT NULL, kind TEXT NOT NULL, executed BOOL NOT NULL, + execution_height INTEGER, PRIMARY KEY (route_index, order_uid), FOREIGN KEY(order_uid) REFERENCES orders(uid) )""" diff --git a/src/scheduler.py b/src/scheduler.py index b9be864b9..9542b7799 100644 --- a/src/scheduler.py +++ b/src/scheduler.py @@ -141,7 +141,7 @@ def queue_route( r = Route( len(self.order_history), [ - LegRepr(leg.in_asset(), leg.out_asset(), leg.backend.kind, False) + LegRepr(leg.in_asset(), leg.out_asset(), leg.backend.kind, False, None) for leg in route ], route, diff --git a/src/strategies/util.py b/src/strategies/util.py index 3e622a121..714cfb05d 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -458,13 +458,21 @@ async def exec_arb( ) for leg, _ in sublegs: - next( + executed_leg = next( ( leg_repr for leg_repr in route_ent.route if str(leg_repr) == str(leg) ) - ).executed = True + ) + + executed_leg.executed = True + + # Update the execution height if it can be found + resp = tx.response() + + if resp: + executed_leg.execution_height = resp.height prev_leg = leg @@ -528,9 +536,17 @@ async def exec_arb( ], ) - next( + executed_leg = next( (leg_repr for leg_repr in route_ent.route if str(leg_repr) == str(leg)) - ).executed = True + ) + + executed_leg.executed = True + + # Update the execution height if it can be found + resp = tx.response() + + if resp: + executed_leg.execution_height = resp.height prev_leg = leg From 6d05a26eaf4b4e69af48c66dd5e902a583baebee Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Thu, 17 Oct 2024 23:48:17 +0000 Subject: [PATCH 44/46] Linter fixes. --- src/strategies/util.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/strategies/util.py b/src/strategies/util.py index 714cfb05d..4412b9f09 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -469,7 +469,7 @@ async def exec_arb( executed_leg.executed = True # Update the execution height if it can be found - resp = tx.response() + resp = tx.response if resp: executed_leg.execution_height = resp.height @@ -543,7 +543,7 @@ async def exec_arb( executed_leg.executed = True # Update the execution height if it can be found - resp = tx.response() + resp = tx.response if resp: executed_leg.execution_height = resp.height From fda5eac17844f4355e01939a8f1f455fcdf85a7d Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Fri, 18 Oct 2024 16:07:36 +0000 Subject: [PATCH 45/46] Add tests for unprofitable osmosis arbs. --- local-interchaintest/src/main.rs | 50 +++++++++++++++++++++++++++++ local-interchaintest/src/tests.rs | 52 +++++++++++++++++++++++++++++-- 2 files changed, 100 insertions(+), 2 deletions(-) diff --git a/local-interchaintest/src/main.rs b/local-interchaintest/src/main.rs index 221270a75..367740945 100644 --- a/local-interchaintest/src/main.rs +++ b/local-interchaintest/src/main.rs @@ -183,6 +183,56 @@ fn main() -> Result<(), Box> { .with_test(Box::new(tests::test_unprofitable_arb) as TestFn) .build()?, )? + // Test case unprofitable osmo arb: + // + // - Astro: untrn-bruhtoken @ 1.5 bruhtoken/untrn + // - Osmo: bruhtoken-uosmo @ 0.001 uosmo/bruhtoken + // - Astro: uosmo-untrn @ 1 untrn/uosmo + .run( + TestBuilder::default() + .with_name("Osmosis Arb") + .with_description("The arbitrage bot not execute an unprofitable arb on Osmosis") + .with_denom(untrn_osmo.clone(), 100000000000) + .with_denom(uosmo.clone(), 100000000000) + .with_denom(bruhtoken.clone(), 100000000000) + .with_denom(untrn.clone(), 100000000000) + .with_denom(bruhtoken_osmo.clone(), 100000000000) + .with_pool( + untrn.clone(), + uosmo_ntrn.clone(), + Pool::Astroport( + AstroportPoolBuilder::default() + .with_balance_asset_a(10000000u128) + .with_balance_asset_b(15000000u128) + .build()?, + ), + ) + .with_pool( + uosmo.clone(), + bruhtoken_osmo.clone(), + Pool::Osmosis( + OsmosisPoolBuilder::default() + .with_funds(bruhtoken_osmo.clone(), 100000000u128) + .with_funds(uosmo.clone(), 100000u128) + .with_weight(bruhtoken_osmo.clone(), 100u128) + .with_weight(uosmo.clone(), 1u128) + .build(), + ), + ) + .with_pool( + untrn.clone(), + bruhtoken.clone(), + Pool::Auction( + AuctionPoolBuilder::default() + .with_balance_offer_asset(10000000u128) + .with_price(Decimal::percent(10)) + .build()?, + ), + ) + .with_arbbot() + .with_test(Box::new(tests::test_unprofitable_osmo_arb) as TestFn) + .build()?, + )? // Test case (astro -> osmo arb): // // - Astro: untrn-bruhtoken @ 1.5 bruhtoken/untrn diff --git a/local-interchaintest/src/tests.rs b/local-interchaintest/src/tests.rs index c592ebb56..1cb09b300 100644 --- a/local-interchaintest/src/tests.rs +++ b/local-interchaintest/src/tests.rs @@ -89,6 +89,54 @@ pub fn test_unprofitable_arb() -> Result<(), Box Result<(), Box> { + let conn = sqlite::open(ARBFILE_PATH).expect("failed to open db"); + + let profit = { + let query = "SELECT SUM(o.realized_profit) AS total_profit FROM orders o"; + + let mut statement = conn.prepare(query).unwrap(); + + statement + .next() + .ok() + .and_then(|_| statement.read::("total_profit").ok()) + .unwrap_or_default() + }; + + let auction_profit = { + let query = "SELECT SUM(order_profit) AS total_profit FROM (SELECT MAX(o.realized_profit) AS order_profit FROM orders o INNER JOIN legs l ON o.uid = l.order_uid WHERE l.kind = 'auction' GROUP BY o.uid)"; + let mut statement = conn.prepare(query).unwrap(); + + statement + .next() + .ok() + .and_then(|_| statement.read::("total_profit").ok()) + .unwrap_or_default() + }; + + let osmo_profit = { + let query = "SELECT SUM(order_profit) AS total_profit FROM (SELECT MAX(o.realized_profit) AS order_profit FROM orders o INNER JOIN legs l ON o.uid = l.order_uid WHERE l.kind = 'osmosis' GROUP BY o.uid)"; + let mut statement = conn.prepare(query).unwrap(); + + statement + .next() + .ok() + .and_then(|_| statement.read::("total_profit").ok()) + .unwrap_or_default() + }; + + println!("ARB BOT PROFIT: {profit}"); + println!("AUCTION BOT PROFIT: {auction_profit}"); + println!("OSMOSIS BOT PROFIT: {osmo_profit}"); + + util::assert_err("profit == 0", profit == 0, true)?; + util::assert_err("osmo_profit == 0", osmo_profit == 0, true)?; + util::assert_err("auction_profit == 0", auction_profit == 0, true)?; + + Ok(()) +} + pub fn test_osmo_arb() -> Result<(), Box> { let conn = sqlite::open(ARBFILE_PATH).expect("failed to open db"); @@ -105,7 +153,7 @@ pub fn test_osmo_arb() -> Result<(), Box> { }; let auction_profit = { - let query = "SELECT SUM(order_profit) AS total_profit FROM (SELECT MAX(o.realized_profit) AS order_profit FROM orders o WHERE l.kind = 'auction' INNER JOIN legs l ON o.uid == l.order_uid GROUP BY o.uid)"; + let query = "SELECT SUM(order_profit) AS total_profit FROM (SELECT MAX(o.realized_profit) AS order_profit FROM orders o INNER JOIN legs l ON o.uid = l.order_uid WHERE l.kind = 'auction' GROUP BY o.uid)"; let mut statement = conn.prepare(query).unwrap(); statement @@ -116,7 +164,7 @@ pub fn test_osmo_arb() -> Result<(), Box> { }; let osmo_profit = { - let query = "SELECT SUM(order_profit) AS total_profit FROM (SELECT MAX(o.realized_profit) AS order_profit FROM orders o WHERE l.kind = 'osmosis' INNER JOIN legs l ON o.uid == l.order_uid GROUP BY o.uid)"; + let query = "SELECT SUM(order_profit) AS total_profit FROM (SELECT MAX(o.realized_profit) AS order_profit FROM orders o INNER JOIN legs l ON o.uid = l.order_uid WHERE l.kind = 'osmosis' GROUP BY o.uid)"; let mut statement = conn.prepare(query).unwrap(); statement From b1ab9cf32a33f6148f434fd4114c5f9c963b249b Mon Sep 17 00:00:00 2001 From: Dowland Aiello Date: Fri, 18 Oct 2024 18:47:20 +0000 Subject: [PATCH 46/46] Use source channel endpoint for packet ack checks. --- src/strategies/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/strategies/util.py b/src/strategies/util.py index 4412b9f09..e53a93f9f 100644 --- a/src/strategies/util.py +++ b/src/strategies/util.py @@ -1108,7 +1108,7 @@ async def transfer_or_continue() -> bool: # Check for a package acknowledgement by querying osmosis ack_resp = await try_multiple_rest_endpoints( - ctx.endpoints[dest_chain_id]["http"], + ctx.endpoints[src_chain_id]["http"], ( f"/ibc/core/channel/v1/channels/{src_channel_id}/" f"ports/transfer/packet_acks/"