Skip to content

Commit

Permalink
Merge #6357: backport: bitcoin#22229 test: consolidate to f-strings a…
Browse files Browse the repository at this point in the history
…nd related fixes

7c6c93d fix: remove missing comment to follow-up for bitcoin#15864 (Konstantin Akimov)
65226da Merge bitcoin#22229: test: consolidate to f-strings (part 1) (MarcoFalke)
ad2c5a5 refactor: unify feature_notifications.py after #5522 with bitcoin's codebase (Konstantin Akimov)

Pull request description:

  ## Issue being fixed or feature implemented
  Just one backport bitcoin#22229 because it is a big size, even though there's nothing non-trivial in it.

  Though, even it is called as part I, there has not been part II yet.

  ## What was done?
  Some preparation, code unifications to make bitcoin#22229 with less conflicts and finally backport of itselfl.

  ## How Has This Been Tested?
  Run unit & functional test

  ## Breaking Changes
  N/A

  ## Checklist:
  - [x] I have performed a self-review of my own code
  - [ ] I have commented my code, particularly in hard-to-understand areas
  - [ ] I have added or updated relevant unit/integration/functional/e2e tests
  - [ ] I have made corresponding changes to the documentation
  - [x] I have assigned this pull request to a milestone

ACKs for top commit:
  UdjinM6:
    utACK 7c6c93d
  PastaPastaPasta:
    utACK 7c6c93d

Tree-SHA512: fe296e3255d45a7a1924bd1e5e21634b3cd36ea3f71cf5e8684b54336771665ea7758de7bfc78721669a928f967e7d4db7b1da0a5cd275feb1a2ec0df841ad5c
  • Loading branch information
PastaPastaPasta committed Oct 25, 2024
2 parents 54bec78 + 7c6c93d commit 4b5e392
Show file tree
Hide file tree
Showing 24 changed files with 159 additions and 162 deletions.
14 changes: 7 additions & 7 deletions test/functional/feature_asmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@
VERSION = 'fec61fa21a9f46f3b17bdcd660d7f4cd90b966aad3aec593c99b35f0aca15853'

def expected_messages(filename):
return ['Opened asmap file "{}" (59 bytes) from disk'.format(filename),
'Using asmap version {} for IP bucketing'.format(VERSION)]
return [f'Opened asmap file "{filename}" (59 bytes) from disk',
f'Using asmap version {VERSION} for IP bucketing']

class AsmapTest(BitcoinTestFramework):
def set_test_params(self):
Expand All @@ -58,7 +58,7 @@ def test_asmap_with_absolute_path(self):
filename = os.path.join(self.datadir, 'my-map-file.map')
shutil.copyfile(self.asmap_raw, filename)
with self.node.assert_debug_log(expected_messages(filename)):
self.start_node(0, ['-asmap={}'.format(filename)])
self.start_node(0, [f'-asmap={filename}'])
os.remove(filename)

def test_asmap_with_relative_path(self):
Expand All @@ -68,13 +68,13 @@ def test_asmap_with_relative_path(self):
filename = os.path.join(self.datadir, name)
shutil.copyfile(self.asmap_raw, filename)
with self.node.assert_debug_log(expected_messages(filename)):
self.start_node(0, ['-asmap={}'.format(name)])
self.start_node(0, [f'-asmap={name}'])
os.remove(filename)

def test_default_asmap(self):
shutil.copyfile(self.asmap_raw, self.default_asmap)
for arg in ['-asmap', '-asmap=']:
self.log.info('Test dashd {} (using default map file)'.format(arg))
self.log.info(f'Test dashd {arg} (using default map file)')
self.stop_node(0)
with self.node.assert_debug_log(expected_messages(self.default_asmap)):
self.start_node(0, [arg])
Expand All @@ -99,15 +99,15 @@ def test_asmap_interaction_with_addrman_containing_entries(self):
def test_default_asmap_with_missing_file(self):
self.log.info('Test dashd -asmap with missing default map file')
self.stop_node(0)
msg = "Error: Could not find asmap file \"{}\"".format(self.default_asmap)
msg = f"Error: Could not find asmap file \"{self.default_asmap}\""
self.node.assert_start_raises_init_error(extra_args=['-asmap'], expected_msg=msg)

def test_empty_asmap(self):
self.log.info('Test dashd -asmap with empty map file')
self.stop_node(0)
with open(self.default_asmap, "w", encoding="utf-8") as f:
f.write("")
msg = "Error: Could not parse asmap file \"{}\"".format(self.default_asmap)
msg = f"Error: Could not parse asmap file \"{self.default_asmap}\""
self.node.assert_start_raises_init_error(extra_args=['-asmap'], expected_msg=msg)
os.remove(self.default_asmap)

Expand Down
2 changes: 1 addition & 1 deletion test/functional/feature_backwards_compatibility.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ def run_test(self):
wallet = node_master.get_wallet_rpc("u1_v17")
info = wallet.getaddressinfo(address)
# TODO enable back when HD wallets are created by default
#descriptor = "pkh([" + info["hdmasterfingerprint"] + hdkeypath[1:] + "]" + v17_pubkey + ")"
#descriptor = f"pkh([{info["hdmasterfingerprint"]}{hdkeypath[1:]}]{v17_pubkey})"
#assert_equal(info["desc"], descsum_create(descriptor))
assert_equal(info["pubkey"], v17_pubkey)

Expand Down
5 changes: 2 additions & 3 deletions test/functional/feature_blocksdir.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,10 @@ def run_test(self):
initialize_datadir(self.options.tmpdir, 0, self.chain)
self.log.info("Starting with nonexistent blocksdir ...")
blocksdir_path = os.path.join(self.options.tmpdir, 'blocksdir')
self.nodes[0].assert_start_raises_init_error(["-blocksdir=" + blocksdir_path], 'Error: Specified blocks directory "' +
blocksdir_path + '" does not exist.')
self.nodes[0].assert_start_raises_init_error([f"-blocksdir={blocksdir_path}"], f'Error: Specified blocks directory "{blocksdir_path}" does not exist.')
os.mkdir(blocksdir_path)
self.log.info("Starting with existing blocksdir ...")
self.start_node(0, ["-blocksdir=" + blocksdir_path])
self.start_node(0, [f"-blocksdir={blocksdir_path}"])
self.log.info("mining blocks..")
self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address)
assert os.path.isfile(os.path.join(blocksdir_path, self.chain, "blocks", "blk00000.dat"))
Expand Down
5 changes: 2 additions & 3 deletions test/functional/feature_cltv.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def run_test(self):
block.nVersion = 3
block.solve()

with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000003)'.format(block.hash)]):
with self.nodes[0].assert_debug_log(expected_msgs=[f'{block.hash}, bad-version(0x00000003)']):
peer.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
peer.sync_with_ping()
Expand Down Expand Up @@ -173,8 +173,7 @@ def run_test(self):
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()

with self.nodes[0].assert_debug_log(expected_msgs=['CheckInputScripts on {} failed with {}'.format(
block.vtx[-1].hash, expected_cltv_reject_reason)]):
with self.nodes[0].assert_debug_log(expected_msgs=[f'CheckInputScripts on {block.vtx[-1].hash} failed with {expected_cltv_reject_reason}']):
peer.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
peer.sync_with_ping()
Expand Down
27 changes: 13 additions & 14 deletions test/functional/feature_config_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def test_config_file_parser(self):

inc_conf_file_path = os.path.join(self.nodes[0].datadir, 'include.conf')
with open(os.path.join(self.nodes[0].datadir, 'dash.conf'), 'a', encoding='utf-8') as conf:
conf.write('includeconf={}\n'.format(inc_conf_file_path))
conf.write(f'includeconf={inc_conf_file_path}\n')

self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Error parsing command line arguments: Invalid parameter -dash_cli=1',
Expand All @@ -42,13 +42,13 @@ def test_config_file_parser(self):
if self.is_wallet_compiled():
with open(inc_conf_file_path, 'w', encoding='utf8') as conf:
conf.write("wallet=foo\n")
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Config setting for -wallet only applied on %s network when in [%s] section.' % (self.chain, self.chain))
self.nodes[0].assert_start_raises_init_error(expected_msg=f'Error: Config setting for -wallet only applied on {self.chain} network when in [{self.chain}] section.')

main_conf_file_path = os.path.join(self.options.tmpdir, 'node0', 'dash_main.conf')
util.write_config(main_conf_file_path, n=0, chain='', extra_config='includeconf={}\n'.format(inc_conf_file_path))
util.write_config(main_conf_file_path, n=0, chain='', extra_config=f'includeconf={inc_conf_file_path}\n')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('acceptnonstdtxn=1\n')
self.nodes[0].assert_start_raises_init_error(extra_args=["-conf={}".format(main_conf_file_path)], expected_msg='Error: acceptnonstdtxn is not currently supported for main chain')
self.nodes[0].assert_start_raises_init_error(extra_args=[f"-conf={main_conf_file_path}"], expected_msg='Error: acceptnonstdtxn is not currently supported for main chain')

with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('nono\n')
Expand All @@ -68,14 +68,14 @@ def test_config_file_parser(self):

inc_conf_file2_path = os.path.join(self.nodes[0].datadir, 'include2.conf')
with open(os.path.join(self.nodes[0].datadir, 'dash.conf'), 'a', encoding='utf-8') as conf:
conf.write('includeconf={}\n'.format(inc_conf_file2_path))
conf.write(f'includeconf={inc_conf_file2_path}\n')

with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('testnot.datadir=1\n')
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
conf.write('[testnet]\n')
self.restart_node(0)
self.nodes[0].stop_node(expected_stderr='Warning: ' + inc_conf_file_path + ':1 Section [testnot] is not recognized.' + os.linesep + inc_conf_file2_path + ':1 Section [testnet] is not recognized.')
self.nodes[0].stop_node(expected_stderr=f'Warning: {inc_conf_file_path}:1 Section [testnot] is not recognized.{os.linesep}{inc_conf_file2_path}:1 Section [testnet] is not recognized.')

with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('') # clear
Expand Down Expand Up @@ -104,8 +104,8 @@ def test_args_log(self):
'Command-line arg: rpcpassword=****',
'Command-line arg: rpcuser=****',
'Command-line arg: torpassword=****',
'Config file arg: %s="1"' % self.chain,
'Config file arg: [%s] server="1"' % self.chain,
f'Config file arg: {self.chain}="1"',
f'Config file arg: [{self.chain}] server="1"',
],
unexpected_msgs=[
'alice:f7efda5c189b999524f151318c0c86$d5b51b3beffbc0',
Expand Down Expand Up @@ -263,34 +263,33 @@ def run_test(self):

# Check that using -datadir argument on non-existent directory fails
self.nodes[0].datadir = new_data_dir
self.nodes[0].assert_start_raises_init_error(['-datadir=' + new_data_dir], 'Error: Specified data directory "' + new_data_dir + '" does not exist.')
self.nodes[0].assert_start_raises_init_error([f'-datadir={new_data_dir}'], f'Error: Specified data directory "{new_data_dir}" does not exist.')

# Check that using non-existent datadir in conf file fails
conf_file = os.path.join(default_data_dir, "dash.conf")

# datadir needs to be set before [chain] section
conf_file_contents = open(conf_file, encoding='utf8').read()
with open(conf_file, 'w', encoding='utf8') as f:
f.write("datadir=" + new_data_dir + "\n")
f.write(f"datadir={new_data_dir}\n")
f.write(conf_file_contents)

self.nodes[0].assert_start_raises_init_error(['-conf=' + conf_file], 'Error: Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.')
self.nodes[0].assert_start_raises_init_error([f'-conf={conf_file}'], f'Error: Error reading configuration file: specified data directory "{new_data_dir}" does not exist.')

# Check that an explicitly specified config file that cannot be opened fails
none_existent_conf_file = os.path.join(default_data_dir, "none_existent_dash.conf")
self.nodes[0].assert_start_raises_init_error(['-conf=' + none_existent_conf_file], 'Error: Error reading configuration file: specified config file "' + none_existent_conf_file + '" could not be opened.')

# Create the directory and ensure the config file now works
os.mkdir(new_data_dir)
# Temporarily disabled, because this test would access the user's home dir (~/.bitcoin)
self.start_node(0, ['-conf='+conf_file])
self.start_node(0, [f'-conf={conf_file}'])
self.stop_node(0)
assert os.path.exists(os.path.join(new_data_dir, self.chain, 'blocks'))

# Ensure command line argument overrides datadir in conf
os.mkdir(new_data_dir_2)
self.nodes[0].datadir = new_data_dir_2
self.start_node(0, ['-datadir='+new_data_dir_2, '-conf='+conf_file])
self.start_node(0, [f'-datadir={new_data_dir_2}', f'-conf={conf_file}'])
assert os.path.exists(os.path.join(new_data_dir_2, self.chain, 'blocks'))


Expand Down
2 changes: 1 addition & 1 deletion test/functional/feature_csv_activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ def run_test(self):
self.send_blocks(test_blocks)

assert_equal(self.tipheight, CSV_ACTIVATION_HEIGHT - 2)
self.log.info("Height = {}, CSV not yet active (will activate for block {}, not {})".format(self.tipheight, CSV_ACTIVATION_HEIGHT, CSV_ACTIVATION_HEIGHT - 1))
self.log.info(f"Height = {self.tipheight}, CSV not yet active (will activate for block {CSV_ACTIVATION_HEIGHT}, not {CSV_ACTIVATION_HEIGHT - 1})")
assert not softfork_active(self.nodes[0], 'csv')

# Test both version 1 and version 2 transactions for all tests
Expand Down
32 changes: 16 additions & 16 deletions test/functional/feature_dbcrash.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def restart_node(self, node_index, expected_tip):
# perhaps we generated a test case that blew up our cache?
# TODO: If this happens a lot, we should try to restart without -dbcrashratio
# and make sure that recovery happens.
raise AssertionError("Unable to successfully restart node %d in allotted time", node_index)
raise AssertionError(f"Unable to successfully restart node {node_index} in allotted time")

def submit_block_catch_error(self, node_index, block):
"""Try submitting a block to the given node.
Expand All @@ -114,10 +114,10 @@ def submit_block_catch_error(self, node_index, block):
self.nodes[node_index].submitblock(block)
return True
except (http.client.CannotSendRequest, http.client.RemoteDisconnected) as e:
self.log.debug("node %d submitblock raised exception: %s", node_index, e)
self.log.debug(f"node {node_index} submitblock raised exception: {e}")
return False
except OSError as e:
self.log.debug("node %d submitblock raised OSError exception: errno=%s", node_index, e.errno)
self.log.debug(f"node {node_index} submitblock raised OSError exception: errno={e.errno}")
if e.errno in [errno.EPIPE, errno.ECONNREFUSED, errno.ECONNRESET]:
# The node has likely crashed
return False
Expand All @@ -142,15 +142,15 @@ def sync_node3blocks(self, block_hashes):
# Deliver each block to each other node
for i in range(3):
nodei_utxo_hash = None
self.log.debug("Syncing blocks to node %d", i)
self.log.debug(f"Syncing blocks to node {i}")
for (block_hash, block) in blocks:
# Get the block from node3, and submit to node_i
self.log.debug("submitting block %s", block_hash)
self.log.debug(f"submitting block {block_hash}")
if not self.submit_block_catch_error(i, block):
# TODO: more carefully check that the crash is due to -dbcrashratio
# (change the exit code perhaps, and check that here?)
self.wait_for_node_exit(i, timeout=30)
self.log.debug("Restarting node %d after block hash %s", i, block_hash)
self.log.debug(f"Restarting node {i} after block hash {block_hash}")
nodei_utxo_hash = self.restart_node(i, block_hash)
assert nodei_utxo_hash is not None
self.restart_counts[i] += 1
Expand All @@ -167,7 +167,7 @@ def sync_node3blocks(self, block_hashes):
# - we only update the utxo cache after a node restart, since flushing
# the cache is a no-op at that point
if nodei_utxo_hash is not None:
self.log.debug("Checking txoutsetinfo matches for node %d", i)
self.log.debug(f"Checking txoutsetinfo matches for node {i}")
assert_equal(nodei_utxo_hash, node3_utxo_hash)

def verify_utxo_hash(self):
Expand Down Expand Up @@ -218,14 +218,14 @@ def run_test(self):
# Start by creating a lot of utxos on node3
initial_height = self.nodes[3].getblockcount()
utxo_list = create_confirmed_utxos(self, self.nodes[3].getnetworkinfo()['relayfee'], self.nodes[3], 5000, sync_fun=self.no_op)
self.log.info("Prepped %d utxo entries", len(utxo_list))
self.log.info(f"Prepped {len(utxo_list)} utxo entries")

# Sync these blocks with the other nodes
block_hashes_to_sync = []
for height in range(initial_height + 1, self.nodes[3].getblockcount() + 1):
block_hashes_to_sync.append(self.nodes[3].getblockhash(height))

self.log.debug("Syncing %d blocks with other nodes", len(block_hashes_to_sync))
self.log.debug(f"Syncing {len(block_hashes_to_sync)} blocks with other nodes")
# Syncing the blocks could cause nodes to crash, so the test begins here.
self.sync_node3blocks(block_hashes_to_sync)

Expand All @@ -235,18 +235,18 @@ def run_test(self):
# each time through the loop, generate a bunch of transactions,
# and then either mine a single new block on the tip, or some-sized reorg.
for i in range(40):
self.log.info("Iteration %d, generating 2500 transactions %s", i, self.restart_counts)
self.log.info(f"Iteration {i}, generating 2500 transactions {self.restart_counts}")
# Generate a bunch of small-ish transactions
self.generate_small_transactions(self.nodes[3], 2500, utxo_list)
# Pick a random block between current tip, and starting tip
current_height = self.nodes[3].getblockcount()
random_height = random.randint(starting_tip_height, current_height)
self.log.debug("At height %d, considering height %d", current_height, random_height)
self.log.debug(f"At height {current_height}, considering height {random_height}")
if random_height > starting_tip_height:
# Randomly reorg from this point with some probability (1/4 for
# tip, 1/5 for tip-1, ...)
if random.random() < 1.0 / (current_height + 4 - random_height):
self.log.debug("Invalidating block at height %d", random_height)
self.log.debug(f"Invalidating block at height {random_height}")
self.nodes[3].invalidateblock(self.nodes[3].getblockhash(random_height))

# Now generate new blocks until we pass the old tip height
Expand All @@ -260,18 +260,18 @@ def run_test(self):
address=self.nodes[3].getnewaddress(),
sync_fun=self.no_op,
))
self.log.debug("Syncing %d new blocks...", len(block_hashes))
self.log.debug(f"Syncing {len(block_hashes)} new blocks...")
self.sync_node3blocks(block_hashes)
utxo_list = self.nodes[3].listunspent()
self.log.debug("Node3 utxo count: %d", len(utxo_list))
self.log.debug(f"Node3 utxo count: {len(utxo_list)}")

# Check that the utxo hashes agree with node3
# Useful side effect: each utxo cache gets flushed here, so that we
# won't get crashes on shutdown at the end of the test.
self.verify_utxo_hash()

# Check the test coverage
self.log.info("Restarted nodes: %s; crashes on restart: %d", self.restart_counts, self.crashed_on_restart)
self.log.info(f"Restarted nodes: {self.restart_counts}; crashes on restart: {self.crashed_on_restart}")

# If no nodes were restarted, we didn't test anything.
assert self.restart_counts != [0, 0, 0]
Expand All @@ -282,7 +282,7 @@ def run_test(self):
# Warn if any of the nodes escaped restart.
for i in range(3):
if self.restart_counts[i] == 0:
self.log.warning("Node %d never crashed during utxo flush!", i)
self.log.warning(f"Node {i} never crashed during utxo flush!")


if __name__ == "__main__":
Expand Down
Loading

0 comments on commit 4b5e392

Please sign in to comment.