diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index 5f3e3ea78e4a..89b015cc524f 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -155,6 +155,7 @@ class TestBlockHeaderValidation: @pytest.mark.anyio async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_blocks: list[FullBlock]) -> None: blocks = default_1000_blocks + fork_info = ForkInfo(blocks[0].height - 1, blocks[0].height - 1, blocks[0].prev_header_hash) for block in blocks: if ( len(block.finished_sub_slots) > 0 @@ -181,7 +182,9 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block assert error.code == Err.INVALID_NEW_SUB_SLOT_ITERS # Also fails calling the outer methods, but potentially with a different error - await _validate_and_add_block(empty_blockchain, block_bad, expected_result=AddBlockResult.INVALID_BLOCK) + await _validate_and_add_block( + empty_blockchain, block_bad, expected_result=AddBlockResult.INVALID_BLOCK, fork_info=fork_info + ) new_finished_ss_2 = recursive_replace( block.finished_sub_slots[0], @@ -205,7 +208,7 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block # Also fails calling the outer methods, but potentially with a different error await _validate_and_add_block( - empty_blockchain, block_bad_2, expected_result=AddBlockResult.INVALID_BLOCK + empty_blockchain, block_bad_2, expected_result=AddBlockResult.INVALID_BLOCK, fork_info=fork_info ) # 3c @@ -235,7 +238,7 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block # Also fails calling the outer methods, but potentially with a different error await _validate_and_add_block( - empty_blockchain, block_bad_3, expected_result=AddBlockResult.INVALID_BLOCK + empty_blockchain, block_bad_3, expected_result=AddBlockResult.INVALID_BLOCK, fork_info=fork_info ) # 3d @@ -264,9 +267,9 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block # Also fails calling the outer methods, but potentially with a different error await _validate_and_add_block( - empty_blockchain, block_bad_4, expected_result=AddBlockResult.INVALID_BLOCK + empty_blockchain, block_bad_4, expected_result=AddBlockResult.INVALID_BLOCK, fork_info=fork_info ) - await _validate_and_add_block(empty_blockchain, block) + await _validate_and_add_block(empty_blockchain, block, fork_info=fork_info) log.info( f"Added block {block.height} total iters {block.total_iters} " f"new slot? {len(block.finished_sub_slots)}" @@ -3041,8 +3044,13 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo await _validate_and_add_block(b, block) blocks_reorg = bt.get_consecutive_blocks(2, block_list_input=blocks[:-7], guarantee_transaction_block=True) - await _validate_and_add_block(b, blocks_reorg[-2], expected_result=AddBlockResult.ADDED_AS_ORPHAN) - await _validate_and_add_block(b, blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN) + fork_info = ForkInfo(blocks[-8].height, blocks[-8].height, blocks[-8].header_hash) + await _validate_and_add_block( + b, blocks_reorg[-2], expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) + await _validate_and_add_block( + b, blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) # Coin does not exist in reorg blocks_reorg = bt.get_consecutive_blocks( @@ -3050,7 +3058,6 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo ) peak = b.get_peak() assert peak is not None - fork_info = await get_fork_info(b, blocks_reorg[-1], peak) await _validate_and_add_block(b, blocks_reorg[-1], expected_error=Err.UNKNOWN_UNSPENT, fork_info=fork_info) # Finally add the block to the fork (spending both in same bundle, this is ephemeral) @@ -3061,7 +3068,6 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo peak = b.get_peak() assert peak is not None - fork_info = await get_fork_info(b, blocks_reorg[-1], peak) await _validate_and_add_block( b, blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info ) @@ -3071,7 +3077,6 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo ) peak = b.get_peak() assert peak is not None - fork_info = await get_fork_info(b, blocks_reorg[-1], peak) await _validate_and_add_block(b, blocks_reorg[-1], expected_error=Err.DOUBLE_SPEND_IN_FORK, fork_info=fork_info) rewards_ph = wt.get_new_puzzlehash() @@ -3084,7 +3089,6 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo peak = b.get_peak() assert peak is not None - fork_info = await get_fork_info(b, blocks_reorg[-10], peak) for block in blocks_reorg[-10:]: await _validate_and_add_block_multi_result( b, block, expected_result=[AddBlockResult.ADDED_AS_ORPHAN, AddBlockResult.NEW_PEAK], fork_info=fork_info @@ -3264,13 +3268,18 @@ async def test_basic_reorg(self, empty_blockchain: Blockchain, bt: BlockTools) - assert peak.height == 14 blocks_reorg_chain = bt.get_consecutive_blocks(7, blocks[:10], seed=b"2") + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for reorg_block in blocks_reorg_chain: if reorg_block.height < 10: - await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK) + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK, fork_info=fork_info + ) elif reorg_block.height < 15: - await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) elif reorg_block.height >= 15: - await _validate_and_add_block(b, reorg_block) + await _validate_and_add_block(b, reorg_block, fork_info=fork_info) peak = b.get_peak() assert peak is not None assert peak.height == 16 @@ -3463,7 +3472,7 @@ async def test_long_reorg( # start the fork point a few blocks back, to test that the blockchain # can catch up - fork_block = default_10000_blocks[num_blocks_chain_2_start - 200] + fork_block = default_10000_blocks[num_blocks_chain_2_start - 101] fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) await b.warmup(fork_block.height) for block in blocks: @@ -3514,22 +3523,34 @@ async def test_reorg_from_genesis(self, empty_blockchain: Blockchain, bt: BlockT # Reorg to alternate chain that is 1 height longer blocks_reorg_chain = bt.get_consecutive_blocks(16, [], seed=b"2") + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for reorg_block in blocks_reorg_chain: if reorg_block.height < 15: await _validate_and_add_block_multi_result( b, reorg_block, expected_result=[AddBlockResult.ADDED_AS_ORPHAN, AddBlockResult.ALREADY_HAVE_BLOCK], + fork_info=fork_info, ) elif reorg_block.height >= 15: - await _validate_and_add_block(b, reorg_block) + await _validate_and_add_block(b, reorg_block, fork_info=fork_info) # Back to original chain blocks_reorg_chain_2 = bt.get_consecutive_blocks(3, blocks, seed=b"3") - await _validate_and_add_block(b, blocks_reorg_chain_2[-3], expected_result=AddBlockResult.ADDED_AS_ORPHAN) - await _validate_and_add_block(b, blocks_reorg_chain_2[-2]) - await _validate_and_add_block(b, blocks_reorg_chain_2[-1]) + # we start from the beginning to make sure fork_info is built correctly + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) + for reorg_block in blocks_reorg_chain_2: + if reorg_block.height < 15: + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK, fork_info=fork_info + ) + elif reorg_block.height < 16: + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) + else: + await _validate_and_add_block(b, reorg_block, fork_info=fork_info) peak = b.get_peak() assert peak is not None @@ -3579,7 +3600,7 @@ async def test_reorg_transaction(self, empty_blockchain: Blockchain, bt: BlockTo await _validate_and_add_block(b, block) fork_block = blocks[11] fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) - for block in blocks_fork: + for block in blocks_fork[12:]: await _validate_and_add_block_no_error(b, block, fork_info=fork_info) @pytest.mark.anyio @@ -3694,8 +3715,8 @@ async def test_reorg_new_ref(empty_blockchain: Blockchain, bt: BlockTools) -> No ) blocks_reorg_chain = bt.get_consecutive_blocks(4, blocks_reorg_chain, seed=b"2") + fork_info = ForkInfo(-1, -1, b.constants.GENESIS_CHALLENGE) for i, block in enumerate(blocks_reorg_chain): - fork_info: Optional[ForkInfo] = None if i < 10: expected = AddBlockResult.ALREADY_HAVE_BLOCK elif i < 19: @@ -3709,8 +3730,6 @@ async def test_reorg_new_ref(empty_blockchain: Blockchain, bt: BlockTools) -> No expected = AddBlockResult.NEW_PEAK else: expected = AddBlockResult.NEW_PEAK - if fork_info is None: - fork_info = ForkInfo(blocks[1].height, blocks[1].height, blocks[1].header_hash) await _validate_and_add_block(b, block, expected_result=expected, fork_info=fork_info) peak = b.get_peak() assert peak is not None @@ -3762,7 +3781,7 @@ async def test_reorg_stale_fork_height(empty_blockchain: Blockchain, bt: BlockTo await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK) # fake the fork_info to make every new block look like a reorg - fork_info = ForkInfo(blocks[1].height, blocks[1].height, blocks[1].header_hash) + fork_info = ForkInfo(blocks[4].height, blocks[4].height, blocks[4].header_hash) for block in blocks[5:]: await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK, fork_info=fork_info) peak = b.get_peak() @@ -3812,8 +3831,10 @@ async def test_chain_failed_rollback(empty_blockchain: Blockchain, bt: BlockTool guarantee_transaction_block=True, ) + fork_block = blocks_reorg_chain[9] + fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) for block in blocks_reorg_chain[10:-1]: - await _validate_and_add_block(b, block, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(b, block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) # Incorrectly set the height as spent in DB to trigger an error print(f"{await b.coin_store.get_coin_record(spend_bundle.coin_spends[0].coin.name())}") @@ -3823,7 +3844,7 @@ async def test_chain_failed_rollback(empty_blockchain: Blockchain, bt: BlockTool print(f"{await b.coin_store.get_coin_record(spend_bundle.coin_spends[0].coin.name())}") fork_block = blocks_reorg_chain[10 - 1] - fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) + # fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) with pytest.raises(ValueError, match="Invalid operation to set spent"): await _validate_and_add_block(b, blocks_reorg_chain[-1], fork_info=fork_info) @@ -3924,28 +3945,36 @@ async def test_reorg_flip_flop(empty_blockchain: Blockchain, bt: BlockTools) -> block1, block2 = b1, b2 counter += 1 - future = await pre_validate_block( - b.constants, - AugmentedBlockchain(b), - block1, - b.pool, - None, - ValidationState(ssi, diff, None), + preval = await ( + await pre_validate_block( + b.constants, + AugmentedBlockchain(b), + block1, + b.pool, + None, + ValidationState(ssi, diff, None), + ) ) - preval = await future - fork_info = ForkInfo(block1.height - 1, block1.height - 1, block1.prev_header_hash) + peak = b.get_peak() + if peak is None: + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) + else: + fork_info = await get_fork_info(b, block1, peak) _, err, _ = await b.add_block(block1, preval, sub_slot_iters=ssi, fork_info=fork_info) assert err is None - future = await pre_validate_block( - b.constants, - AugmentedBlockchain(b), - block2, - b.pool, - None, - ValidationState(ssi, diff, None), + preval = await ( + await pre_validate_block( + b.constants, + AugmentedBlockchain(b), + block2, + b.pool, + None, + ValidationState(ssi, diff, None), + ) ) - preval = await future - fork_info = ForkInfo(block2.height - 1, block2.height - 1, block2.prev_header_hash) + peak = b.get_peak() + assert peak is not None + fork_info = await get_fork_info(b, block2, peak) _, err, _ = await b.add_block(block2, preval, sub_slot_iters=ssi, fork_info=fork_info) assert err is None @@ -4042,11 +4071,13 @@ async def test_lookup_block_generators( # 507, 516, 527, 535, 539, 543, 547 # start with adding some blocks to test lookups from the mainchain + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for block in blocks_2[:550]: - await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK) + await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK, fork_info=fork_info) + fork_info = ForkInfo(blocks_1[500].height - 1, blocks_1[500].height - 1, blocks_1[500].prev_header_hash) for block in blocks_1[500:550]: - await _validate_and_add_block(b, block, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(b, block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) # now we have a blockchain with two forks, the peak is at blocks_2[550] and # the leight weight peak is at blocks_1[550] diff --git a/chia/_tests/blockchain/test_blockchain_transactions.py b/chia/_tests/blockchain/test_blockchain_transactions.py index 8ac88bc03571..95d0a7b2b01a 100644 --- a/chia/_tests/blockchain/test_blockchain_transactions.py +++ b/chia/_tests/blockchain/test_blockchain_transactions.py @@ -7,6 +7,7 @@ from chia._tests.blockchain.blockchain_test_utils import _validate_and_add_block from chia._tests.util.generator_tools_testing import run_and_get_removals_and_additions +from chia.consensus.blockchain import AddBlockResult from chia.full_node.full_node_api import FullNodeAPI from chia.protocols import wallet_protocol from chia.server.server import ChiaServer @@ -17,7 +18,7 @@ from chia.types.condition_opcodes import ConditionOpcode from chia.types.condition_with_args import ConditionWithArgs from chia.types.spend_bundle import SpendBundle, estimate_fees -from chia.util.errors import ConsensusError, Err +from chia.util.errors import Err from chia.util.ints import uint32, uint64 from chia.wallet.conditions import AssertCoinAnnouncement, AssertPuzzleAnnouncement @@ -44,8 +45,7 @@ async def test_basic_blockchain_tx( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block, None) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[2] spend_coin = None @@ -110,8 +110,7 @@ async def test_validate_blockchain_with_double_spend( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[2] spend_coin = None @@ -150,8 +149,7 @@ async def test_validate_blockchain_duplicate_output( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[2] @@ -189,8 +187,7 @@ async def test_validate_blockchain_with_reorg_double_spend( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[2] @@ -209,8 +206,7 @@ async def test_validate_blockchain_with_reorg_double_spend( transaction_data=spend_bundle, ) # Move chain to height 10, with a spend at height 10 - for block in blocks_spend: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks_spend, full_node_api_1.full_node) # Reorg at height 5, add up to and including height 12 new_blocks = bt.get_consecutive_blocks( @@ -221,8 +217,7 @@ async def test_validate_blockchain_with_reorg_double_spend( seed=b"another seed", ) - for block in new_blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(new_blocks[-7:], full_node_api_1.full_node) # Spend the same coin in the new reorg chain at height 13 new_blocks = bt.get_consecutive_blocks( @@ -257,8 +252,9 @@ async def test_validate_blockchain_with_reorg_double_spend( transaction_data=spend_bundle, seed=b"spend at 12 is ok", ) - for block in new_blocks_reorg: - await full_node_api_1.full_node.add_block(block) + await _validate_and_add_block( + full_node_api_1.full_node.blockchain, new_blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN + ) # Spend at height 13 is also OK (same height) new_blocks_reorg = bt.get_consecutive_blocks( @@ -269,8 +265,9 @@ async def test_validate_blockchain_with_reorg_double_spend( transaction_data=spend_bundle, seed=b"spend at 13 is ok", ) - for block in new_blocks_reorg: - await full_node_api_1.full_node.add_block(block) + await _validate_and_add_block( + full_node_api_1.full_node.blockchain, new_blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN + ) # Spend at height 14 is not OK (already spend) new_blocks_reorg = bt.get_consecutive_blocks( @@ -281,9 +278,12 @@ async def test_validate_blockchain_with_reorg_double_spend( transaction_data=spend_bundle, seed=b"spend at 14 is double spend", ) - with pytest.raises(ConsensusError): - for block in new_blocks_reorg: - await full_node_api_1.full_node.add_block(block) + await _validate_and_add_block( + full_node_api_1.full_node.blockchain, + new_blocks_reorg[-1], + expected_result=AddBlockResult.INVALID_BLOCK, + expected_error=Err.DOUBLE_SPEND, + ) @pytest.mark.anyio async def test_validate_blockchain_spend_reorg_coin( @@ -300,8 +300,7 @@ async def test_validate_blockchain_spend_reorg_coin( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[2] @@ -321,7 +320,7 @@ async def test_validate_blockchain_spend_reorg_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[5].prev_header_hash) + await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node) coin_2 = None for coin in run_and_get_removals_and_additions( @@ -345,7 +344,7 @@ async def test_validate_blockchain_spend_reorg_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[5].prev_header_hash) + await add_blocks_in_batches(new_blocks, full_node_api_1.full_node) coin_3 = None for coin in run_and_get_removals_and_additions( @@ -369,7 +368,7 @@ async def test_validate_blockchain_spend_reorg_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[5].prev_header_hash) + await add_blocks_in_batches(new_blocks, full_node_api_1.full_node) @pytest.mark.anyio async def test_validate_blockchain_spend_reorg_cb_coin( @@ -392,7 +391,7 @@ async def test_validate_blockchain_spend_reorg_cb_coin( guarantee_transaction_block=True, ) - await add_blocks_in_batches(new_blocks, full_node_api_1.full_node, blocks[6].prev_header_hash) + await add_blocks_in_batches(new_blocks, full_node_api_1.full_node) spend_block = new_blocks[-1] spend_coin = None @@ -410,7 +409,7 @@ async def test_validate_blockchain_spend_reorg_cb_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[6].prev_header_hash) + await add_blocks_in_batches(new_blocks, full_node_api_1.full_node) @pytest.mark.anyio async def test_validate_blockchain_spend_reorg_since_genesis( @@ -425,8 +424,7 @@ async def test_validate_blockchain_spend_reorg_since_genesis( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[-1] spend_coin = None @@ -439,7 +437,7 @@ async def test_validate_blockchain_spend_reorg_since_genesis( new_blocks = bt.get_consecutive_blocks( 1, blocks, seed=b"", farmer_reward_puzzle_hash=coinbase_puzzlehash, transaction_data=spend_bundle ) - await full_node_api_1.full_node.add_block(new_blocks[-1]) + await _validate_and_add_block(full_node_api_1.full_node.blockchain, new_blocks[-1]) # Spends a coin in a genesis reorg, that was already spent new_blocks = bt.get_consecutive_blocks( @@ -450,9 +448,6 @@ async def test_validate_blockchain_spend_reorg_since_genesis( guarantee_transaction_block=True, ) - for block in new_blocks: - await full_node_api_1.full_node.add_block(block) - new_blocks = bt.get_consecutive_blocks( 1, new_blocks, @@ -461,7 +456,7 @@ async def test_validate_blockchain_spend_reorg_since_genesis( transaction_data=spend_bundle, ) - await full_node_api_1.full_node.add_block(new_blocks[-1]) + await add_blocks_in_batches(new_blocks, full_node_api_1.full_node) @pytest.mark.anyio async def test_assert_my_coin_id( @@ -478,8 +473,7 @@ async def test_assert_my_coin_id( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent @@ -550,8 +544,7 @@ async def test_assert_coin_announcement_consumed( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -634,8 +627,7 @@ async def test_assert_puzzle_announcement_consumed( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -718,8 +710,7 @@ async def test_assert_height_absolute( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -784,8 +775,7 @@ async def test_assert_height_relative( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -852,8 +842,7 @@ async def test_assert_seconds_relative( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -897,7 +886,7 @@ async def test_assert_seconds_relative( time_per_block=301, ) ) - await full_node_api_1.full_node.add_block(blocks[-1]) + await _validate_and_add_block(full_node_1.blockchain, blocks[-1]) valid_new_blocks = bt.get_consecutive_blocks( 1, @@ -924,8 +913,7 @@ async def test_assert_seconds_absolute( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -971,7 +959,7 @@ async def test_assert_seconds_absolute( time_per_block=30, ) ) - await full_node_api_1.full_node.add_block(blocks[-1]) + await _validate_and_add_block(full_node_1.blockchain, blocks[-1]) valid_new_blocks = bt.get_consecutive_blocks( 1, @@ -998,8 +986,7 @@ async def test_assert_fee_condition( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] diff --git a/chia/_tests/core/full_node/stores/test_block_store.py b/chia/_tests/core/full_node/stores/test_block_store.py index 199c852cde34..aefa8607782b 100644 --- a/chia/_tests/core/full_node/stores/test_block_store.py +++ b/chia/_tests/core/full_node/stores/test_block_store.py @@ -15,6 +15,7 @@ from chia._tests.blockchain.blockchain_test_utils import _validate_and_add_block from chia._tests.util.db_connection import DBConnection, PathDBConnection +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.blockchain import AddBlockResult, Blockchain from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.consensus.full_block_to_block_record import header_block_to_sub_block_record @@ -148,9 +149,10 @@ async def test_get_full_blocks_at( bc = await Blockchain.create(coin_store, block_store, bt.constants, tmp_dir, 2) count = 0 + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for b1, b2 in zip(blocks, alt_blocks): await _validate_and_add_block(bc, b1) - await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) ret = await block_store.get_full_blocks_at([uint32(count)]) assert set(ret) == set([b1, b2]) count += 1 @@ -174,9 +176,10 @@ async def test_get_block_records_in_range( bc = await Blockchain.create(coin_store, block_store, bt.constants, tmp_dir, 2) count = 0 + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for b1, b2 in zip(blocks, alt_blocks): await _validate_and_add_block(bc, b1) - await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) # the range is inclusive ret = await block_store.get_block_records_in_range(count, count) assert len(ret) == 1 @@ -202,9 +205,10 @@ async def test_get_block_bytes_in_range_in_main_chain( bc = await Blockchain.create(coin_store, block_store, bt.constants, tmp_dir, 2) count = 0 + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for b1, b2 in zip(blocks, alt_blocks): await _validate_and_add_block(bc, b1) - await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) # the range is inclusive ret = await block_store.get_block_bytes_in_range(count, count) assert ret == [bytes(b1)] @@ -261,9 +265,10 @@ async def test_rollback(bt: BlockTools, tmp_dir: Path, use_cache: bool, default_ # insert all blocks count = 0 + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for b1, b2 in zip(blocks, alt_blocks): await _validate_and_add_block(bc, b1) - await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) count += 1 ret = await block_store.get_random_not_compactified(count) assert len(ret) == count diff --git a/chia/_tests/core/full_node/stores/test_coin_store.py b/chia/_tests/core/full_node/stores/test_coin_store.py index a9a5f47c9c86..440e2ce2d4d2 100644 --- a/chia/_tests/core/full_node/stores/test_coin_store.py +++ b/chia/_tests/core/full_node/stores/test_coin_store.py @@ -12,6 +12,7 @@ from chia._tests.util.db_connection import DBConnection from chia._tests.util.get_name_puzzle_conditions import get_name_puzzle_conditions from chia._tests.util.misc import Marks, datacases +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward from chia.consensus.blockchain import AddBlockResult, Blockchain from chia.consensus.coinbase import create_farmer_coin, create_pool_coin @@ -364,13 +365,20 @@ async def test_basic_reorg(tmp_dir: Path, db_version: int, bt: BlockTools) -> No blocks_reorg_chain = bt.get_consecutive_blocks(reorg_length, blocks[: initial_block_count - 10], seed=b"2") + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for reorg_block in blocks_reorg_chain: if reorg_block.height < initial_block_count - 10: - await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK) + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK, fork_info=fork_info + ) elif reorg_block.height < initial_block_count: - await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) elif reorg_block.height >= initial_block_count: - await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.NEW_PEAK) + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.NEW_PEAK, fork_info=fork_info + ) if reorg_block.is_transaction_block(): coins = reorg_block.get_included_reward_coins() records = [await coin_store.get_coin_record(coin.name()) for coin in coins] diff --git a/chia/_tests/core/full_node/stores/test_full_node_store.py b/chia/_tests/core/full_node/stores/test_full_node_store.py index 328a1ad50649..802939c31a43 100644 --- a/chia/_tests/core/full_node/stores/test_full_node_store.py +++ b/chia/_tests/core/full_node/stores/test_full_node_store.py @@ -452,8 +452,9 @@ async def test_basic_store( normalized_to_identity_cc_sp=normalized_to_identity, ) + fork_info = ForkInfo(blocks[0].height - 1, blocks[0].height - 1, blocks[0].prev_header_hash) for block in blocks: - await _validate_and_add_block_no_error(blockchain, block) + await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) sb = blockchain.block_record(block.header_hash) next_sub_slot_iters, next_difficulty = get_next_sub_slot_iters_and_difficulty( blockchain.constants, False, sb, blockchain @@ -834,6 +835,7 @@ async def test_basic_store( # Test future EOS cache store.initialize_genesis_sub_slot() + fork_info = ForkInfo(-1, -1, blockchain.constants.GENESIS_CHALLENGE) blocks = custom_block_tools.get_consecutive_blocks( 1, normalized_to_identity_cc_eos=normalized_to_identity, @@ -841,7 +843,7 @@ async def test_basic_store( normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) - await _validate_and_add_block_no_error(blockchain, blocks[-1]) + await _validate_and_add_block_no_error(blockchain, blocks[-1], fork_info=fork_info) while True: blocks = custom_block_tools.get_consecutive_blocks( 1, @@ -851,7 +853,7 @@ async def test_basic_store( normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) - await _validate_and_add_block_no_error(blockchain, blocks[-1]) + await _validate_and_add_block_no_error(blockchain, blocks[-1], fork_info=fork_info) sb = blockchain.block_record(blocks[-1].header_hash) if sb.first_in_sub_slot: break @@ -982,6 +984,7 @@ async def test_basic_store( # i2 ......... i1 # Then do a reorg up to B2, removing all signage points after B2, but not before log.warning(f"Adding blocks up to {blocks[-1]}") + fork_info = ForkInfo(-1, -1, blockchain.constants.GENESIS_CHALLENGE) for block in blocks: await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) @@ -1042,7 +1045,7 @@ def assert_sp_none(sp_index: int, is_none: bool) -> None: assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp) # Adding a new peak clears all SPs after that peak - await _validate_and_add_block_no_error(blockchain, blocks[-2]) + await _validate_and_add_block_no_error(blockchain, blocks[-2], fork_info=fork_info) peak = blockchain.get_peak() assert peak is not None result = await blockchain.get_sp_and_ip_sub_slots(peak.header_hash) @@ -1090,7 +1093,7 @@ def assert_sp_none(sp_index: int, is_none: bool) -> None: assert_sp_none(i1 + 1, False) assert_sp_none(i1 + 4, False) - await _validate_and_add_block_no_error(blockchain, blocks[-1]) + await _validate_and_add_block_no_error(blockchain, blocks[-1], fork_info=fork_info) peak = blockchain.get_peak() assert peak is not None result = await blockchain.get_sp_and_ip_sub_slots(peak.header_hash) @@ -1120,7 +1123,7 @@ def assert_sp_none(sp_index: int, is_none: bool) -> None: break else: for block in blocks[-2:]: - await _validate_and_add_block_no_error(blockchain, block) + await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) @pytest.mark.limit_consensus_modes(reason="save time") diff --git a/chia/_tests/core/full_node/test_full_node.py b/chia/_tests/core/full_node/test_full_node.py index de6da463f30e..aad87ec8067d 100644 --- a/chia/_tests/core/full_node/test_full_node.py +++ b/chia/_tests/core/full_node/test_full_node.py @@ -549,8 +549,9 @@ async def test_basic_chain(self, wallet_nodes, self_hostname): assert full_node_1.full_node.blockchain.get_peak().height == 0 + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for block in bt.get_consecutive_blocks(30): - await full_node_1.full_node.add_block(block, peer) + await full_node_1.full_node.add_block(block, peer, fork_info=fork_info) assert full_node_1.full_node.blockchain.get_peak().height == 29 @@ -1018,7 +1019,7 @@ async def test_new_transaction_and_mempool(self, wallet_nodes, self_hostname, se block_list_input=blocks[:-1], guarantee_transaction_block=True, ) - await add_blocks_in_batches(blocks[-2:], full_node_1.full_node, blocks[-2].prev_header_hash) + await add_blocks_in_batches(blocks[-2:], full_node_1.full_node) # Can now resubmit a transaction after the reorg status, err = await full_node_1.full_node.add_transaction( successful_bundle, successful_bundle.name(), peer, test=True @@ -2602,13 +2603,13 @@ def check_nodes_in_sync(): assert chain_b[-1].total_iters < chain_a[-1].total_iters - await add_blocks_in_batches(chain_a[-1:], full_node_1.full_node, chain[-1].header_hash) + await add_blocks_in_batches(chain_a[-1:], full_node_1.full_node) await time_out_assert(10, check_nodes_in_sync) await validate_coin_set(full_node_1.full_node.blockchain.coin_store, chain_a) await validate_coin_set(full_node_2.full_node.blockchain.coin_store, chain_a) - await add_blocks_in_batches(chain_b[-1:], full_node_1.full_node, chain[-1].header_hash) + await add_blocks_in_batches(chain_b[-1:], full_node_1.full_node) # make sure node 1 reorged onto chain B assert full_node_1.full_node.blockchain.get_peak().header_hash == chain_b[-1].header_hash @@ -2648,7 +2649,7 @@ def check_nodes_in_sync(): all_coins.append(coin) spend_bundle = wallet_a.generate_signed_transaction(uint64(1_000), receiver_puzzlehash, all_coins.pop()) - await add_blocks_in_batches(chain[-4:], full_node_1.full_node, chain[-5].header_hash) + await add_blocks_in_batches(chain[-4:], full_node_1.full_node) await time_out_assert(10, check_nodes_in_sync) await validate_coin_set(full_node_1.full_node.blockchain.coin_store, chain) await validate_coin_set(full_node_2.full_node.blockchain.coin_store, chain) @@ -2665,8 +2666,7 @@ async def test_eviction_from_bls_cache(one_node_one_block: tuple[FullNodeSimulat blocks = bt.get_consecutive_blocks( 3, guarantee_transaction_block=True, farmer_reward_puzzle_hash=bt.pool_ph, pool_reward_puzzle_hash=bt.pool_ph ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) wt = bt.get_pool_wallet_tool() reward_coins = blocks[-1].get_included_reward_coins() # Setup a test block with two pk msg pairs diff --git a/chia/_tests/core/mempool/test_mempool.py b/chia/_tests/core/mempool/test_mempool.py index 7c1580d9d70e..ebf260357db8 100644 --- a/chia/_tests/core/mempool/test_mempool.py +++ b/chia/_tests/core/mempool/test_mempool.py @@ -43,6 +43,7 @@ from chia.server.outbound_message import Message from chia.server.server import ChiaServer from chia.server.ws_connection import WSChiaConnection +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.simulator.block_tools import BlockTools, test_constants from chia.simulator.full_node_simulator import FullNodeSimulator from chia.simulator.simulator_protocol import FarmNewBlockProtocol @@ -373,8 +374,7 @@ async def next_block(full_node_1: FullNodeSimulator, wallet_a: WalletTool, bt: B time_per_block=10, ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 1) return blocks[-1].get_included_reward_coins()[0] @@ -569,8 +569,7 @@ async def test_double_spend( ) peer = await connect_and_get_peer(server_1, server_2, self_hostname) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) spend_bundle1 = generate_test_spend_bundle(wallet_a, blocks[-1].get_included_reward_coins()[0]) @@ -615,8 +614,7 @@ async def test_double_spend_with_higher_fee( ) invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) coins = iter(blocks[-1].get_included_reward_coins()) @@ -698,8 +696,7 @@ async def test_invalid_signature( pool_reward_puzzle_hash=reward_ph, ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) coins = iter(blocks[-1].get_included_reward_coins()) @@ -743,8 +740,7 @@ async def condition_tester( else: raise Exception("dummy peer not found") - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + num_blocks) @@ -786,8 +782,7 @@ async def condition_tester2( else: raise Exception("dummy peer not found") - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) @@ -1740,8 +1735,7 @@ async def test_stealing_fee( peer = await connect_and_get_peer(server_1, server_2, bt.config["self_hostname"]) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 5) @@ -1799,8 +1793,7 @@ async def test_double_spend_same_bundle( pool_reward_puzzle_hash=reward_ph, ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) # coin = blocks[-1].get_included_reward_coins()[0] @@ -1848,8 +1841,7 @@ async def test_agg_sig_condition( pool_reward_puzzle_hash=reward_ph, ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) @@ -2854,8 +2846,7 @@ async def test_invalid_coin_spend_coin( pool_reward_puzzle_hash=reward_ph, ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, blocks[-1].height) diff --git a/chia/_tests/pools/test_pool_rpc.py b/chia/_tests/pools/test_pool_rpc.py index e8be4bf8f71c..dd30f7d3a0cf 100644 --- a/chia/_tests/pools/test_pool_rpc.py +++ b/chia/_tests/pools/test_pool_rpc.py @@ -22,6 +22,7 @@ from chia.pools.pool_puzzles import SINGLETON_LAUNCHER_HASH from chia.pools.pool_wallet_info import PoolSingletonState, PoolWalletInfo from chia.rpc.wallet_rpc_client import WalletRpcClient +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.simulator.block_tools import BlockTools, get_plot_dir from chia.simulator.full_node_simulator import FullNodeSimulator from chia.simulator.simulator_protocol import ReorgProtocol @@ -432,8 +433,7 @@ async def test_absorb_self( guarantee_transaction_block=True, ) - for block in blocks[-3:]: - await full_node_api.full_node.add_block(block) + await add_blocks_in_batches(blocks[-3:], full_node_api.full_node) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) bal = await client.get_wallet_balance(2) @@ -532,8 +532,7 @@ async def test_absorb_self_multiple_coins( ) block_count = 3 - for block in blocks[-block_count:]: - await full_node_api.full_node.add_block(block) + await add_blocks_in_batches(blocks[-block_count:], full_node_api.full_node) await full_node_api.farm_blocks_to_puzzlehash(count=1, guarantee_transaction_blocks=True) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) @@ -604,8 +603,7 @@ async def farming_to_pool() -> bool: ) block_count = 3 - for block in blocks[-block_count:]: - await full_node_api.full_node.add_block(block) + await add_blocks_in_batches(blocks[-block_count:], full_node_api.full_node) await full_node_api.farm_blocks_to_puzzlehash(count=1, guarantee_transaction_blocks=True) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) # Pooled plots don't have balance @@ -664,8 +662,7 @@ async def status_updated() -> bool: block_list_input=blocks, guarantee_transaction_block=True, ) - for block in blocks[-2:]: - await full_node_api.full_node.add_block(block) + await add_blocks_in_batches(blocks[-2:], full_node_api.full_node) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) # Absorb the farmed reward @@ -1007,8 +1004,7 @@ async def status_is_leaving_no_blocks() -> bool: transaction_data=next(tx.spend_bundle for tx in join_pool_txs if tx.spend_bundle is not None), ) - for block in more_blocks[-3:]: - await full_node_api.full_node.add_block(block) + await add_blocks_in_batches(more_blocks[-3:], full_node_api.full_node) await time_out_assert(timeout=WAIT_SECS, function=status_is_leaving_no_blocks) diff --git a/chia/_tests/wallet/sync/test_wallet_sync.py b/chia/_tests/wallet/sync/test_wallet_sync.py index 8e16c7d71831..10c09aa7d4f0 100644 --- a/chia/_tests/wallet/sync/test_wallet_sync.py +++ b/chia/_tests/wallet/sync/test_wallet_sync.py @@ -188,7 +188,7 @@ async def test_basic_sync_wallet( blocks_reorg = bt.get_consecutive_blocks(num_blocks - 1, block_list_input=default_400_blocks[:-5]) blocks_reorg = bt.get_consecutive_blocks(1, blocks_reorg, guarantee_transaction_block=True, current_time=True) - await add_blocks_in_batches(blocks_reorg[1:], full_node, blocks_reorg[0].header_hash) + await add_blocks_in_batches(blocks_reorg[1:], full_node) for wallet_node, wallet_server in wallets: await time_out_assert( @@ -245,9 +245,7 @@ async def test_almost_recent( blockchain_constants.WEIGHT_PROOF_RECENT_BLOCKS + 10, block_list_input=all_blocks ) - await add_blocks_in_batches( - new_blocks[base_num_blocks + 20 :], full_node, new_blocks[base_num_blocks + 19].header_hash - ) + await add_blocks_in_batches(new_blocks[base_num_blocks + 20 :], full_node) for wallet_node, wallet_server in wallets: wallet = wallet_node.wallet_state_manager.main_wallet @@ -434,7 +432,7 @@ async def test_wallet_reorg_sync( num_blocks = 30 blocks_reorg = bt.get_consecutive_blocks(num_blocks, block_list_input=default_400_blocks[:-5]) - await add_blocks_in_batches(blocks_reorg[-30:], full_node, blocks_reorg[-30].prev_header_hash) + await add_blocks_in_batches(blocks_reorg[-30:], full_node) for wallet_node, wallet_server in wallets: wallet = wallet_node.wallet_state_manager.main_wallet diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index f989bfe96344..71e5d49c58a9 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -337,6 +337,21 @@ async def add_block( header_hash: bytes32 = block.header_hash + # passing in correct fork_info is critical for performing reorgs + # correctly, so we perform some validation of it here + assert block.height - 1 == fork_info.peak_height + assert len(fork_info.block_hashes) == fork_info.peak_height - fork_info.fork_height + if fork_info.peak_height == fork_info.fork_height: + # if fork_info is saying we're not on a fork, the previous block better + # be part of the main chain + assert block.prev_header_hash == fork_info.peak_hash + if fork_info.fork_height == -1: + assert fork_info.peak_hash == self.constants.GENESIS_CHALLENGE + else: + assert self.height_to_hash(uint32(fork_info.fork_height)) == block.prev_header_hash + else: + assert fork_info.peak_hash == block.prev_header_hash + if extending_main_chain: fork_info.reset(block.height - 1, block.prev_header_hash) diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 3cc37b37c22d..434fd7524e94 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -2006,6 +2006,8 @@ async def add_block( # Adds the block to seen, and check if it's seen before (which means header is in memory) header_hash = block.header_hash if self.blockchain.contains_block(header_hash): + if fork_info is not None: + await self.blockchain.run_single_block(block, fork_info) return None pre_validation_result: Optional[PreValidationResult] = None @@ -2078,6 +2080,8 @@ async def add_block( ): # After acquiring the lock, check again, because another asyncio thread might have added it if self.blockchain.contains_block(header_hash): + if fork_info is not None: + await self.blockchain.run_single_block(block, fork_info) return None validation_start = time.monotonic() # Tries to add the block to the blockchain, if we already validated transactions, don't do it again diff --git a/chia/simulator/add_blocks_in_batches.py b/chia/simulator/add_blocks_in_batches.py index 712fe9d804d3..dc0a1910060b 100644 --- a/chia/simulator/add_blocks_in_batches.py +++ b/chia/simulator/add_blocks_in_batches.py @@ -5,7 +5,6 @@ from chia.consensus.block_body_validation import ForkInfo from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty from chia.full_node.full_node import FullNode, PeakPostProcessingResult -from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.full_block import FullBlock from chia.types.peer_info import PeerInfo from chia.types.validation_state import ValidationState @@ -16,21 +15,23 @@ async def add_blocks_in_batches( blocks: list[FullBlock], full_node: FullNode, - header_hash: Optional[bytes32] = None, ) -> None: - if header_hash is None: + peak_hash = blocks[0].prev_header_hash + if blocks[0].height == 0: + assert peak_hash == full_node.constants.GENESIS_CHALLENGE diff = full_node.constants.DIFFICULTY_STARTING ssi = full_node.constants.SUB_SLOT_ITERS_STARTING fork_height = -1 - fork_info = ForkInfo(-1, fork_height, full_node.constants.GENESIS_CHALLENGE) else: - block_record = await full_node.blockchain.get_block_record_from_db(header_hash) + # assume the fork point is immediately before the + # batch of block we're about to add + block_record = await full_node.blockchain.get_block_record_from_db(peak_hash) assert block_record is not None ssi, diff = get_next_sub_slot_iters_and_difficulty( full_node.constants, True, block_record, full_node.blockchain ) fork_height = block_record.height - fork_info = ForkInfo(block_record.height, fork_height, block_record.header_hash) + fork_info = ForkInfo(fork_height, blocks[0].height - 1, peak_hash) vs = ValidationState(ssi, diff, None) diff --git a/chia/simulator/full_node_simulator.py b/chia/simulator/full_node_simulator.py index a3334fab3e3a..17cec22dfd5a 100644 --- a/chia/simulator/full_node_simulator.py +++ b/chia/simulator/full_node_simulator.py @@ -300,7 +300,7 @@ async def reorg_from_index_to_new_index(self, request: ReorgProtocol): guarantee_transaction_block=True, seed=seed, ) - await add_blocks_in_batches(more_blocks, self.full_node, current_blocks[old_index].header_hash) + await add_blocks_in_batches(more_blocks[old_index + 1 :], self.full_node) async def farm_blocks_to_puzzlehash( self,