diff --git a/chia/_tests/util/blockchain.py b/chia/_tests/util/blockchain.py index e5683db6df85..68ee98999a84 100644 --- a/chia/_tests/util/blockchain.py +++ b/chia/_tests/util/blockchain.py @@ -25,7 +25,7 @@ async def create_blockchain( async with DBWrapper2.managed(database=db_uri, uri=True, reader_count=1, db_version=db_version) as wrapper: coin_store = await CoinStore.create(wrapper) store = await BlockStore.create(wrapper) - bc1 = await Blockchain.create(coin_store, store, constants, Path("."), 2, single_threaded=True) + bc1 = await Blockchain.create(coin_store, store, constants, Path("."), 2, single_threaded=True, log_coins=True) try: assert bc1.get_peak() is None yield bc1, wrapper diff --git a/chia/_tests/util/setup_nodes.py b/chia/_tests/util/setup_nodes.py index de4c2cf2bbad..fb371c5dfe8a 100644 --- a/chia/_tests/util/setup_nodes.py +++ b/chia/_tests/util/setup_nodes.py @@ -85,7 +85,7 @@ async def setup_two_nodes( Setup and teardown of two full nodes, with blockchains and separate DBs. """ - config_overrides = {"full_node.max_sync_wait": 0} + config_overrides = {"full_node.max_sync_wait": 0, "full_node.log_coins": True} with TempKeyring(populate=True) as keychain1, TempKeyring(populate=True) as keychain2: bt1 = await create_block_tools_async( constants=consensus_constants, keychain=keychain1, config_overrides=config_overrides @@ -121,7 +121,7 @@ async def setup_n_nodes( """ Setup and teardown of n full nodes, with blockchains and separate DBs. """ - config_overrides = {"full_node.max_sync_wait": 0} + config_overrides = {"full_node.max_sync_wait": 0, "full_node.log_coins": True} with ExitStack() as stack: keychains = [stack.enter_context(TempKeyring(populate=True)) for _ in range(n)] async with AsyncExitStack() as async_exit_stack: @@ -246,6 +246,7 @@ async def setup_simulators_and_wallets_inner( ) -> AsyncIterator[tuple[list[BlockTools], list[SimulatorFullNodeService], list[WalletService]]]: if config_overrides is not None and "full_node.max_sync_wait" not in config_overrides: config_overrides["full_node.max_sync_wait"] = 0 + config_overrides["full_node.log_coins"] = True async with AsyncExitStack() as async_exit_stack: bt_tools: list[BlockTools] = [ await create_block_tools_async(consensus_constants, keychain=keychain1, config_overrides=config_overrides) @@ -360,7 +361,7 @@ async def setup_full_system_inner( keychain2: Keychain, shared_b_tools: BlockTools, ) -> AsyncIterator[FullSystem]: - config_overrides = {"full_node.max_sync_wait": 0} + config_overrides = {"full_node.max_sync_wait": 0, "full_node.log_coins": True} if b_tools is None: b_tools = await create_block_tools_async( constants=consensus_constants, keychain=keychain1, config_overrides=config_overrides diff --git a/chia/_tests/wallet/conftest.py b/chia/_tests/wallet/conftest.py index 43e231a731b0..f0f44eb16f5b 100644 --- a/chia/_tests/wallet/conftest.py +++ b/chia/_tests/wallet/conftest.py @@ -65,7 +65,7 @@ async def ignore_block_validation( if "standard_block_tools" in request.keywords: return None - async def validate_block_body(*args: Any) -> Literal[None]: + async def validate_block_body(*args: Any, **kwargs: Any) -> Literal[None]: return None def create_wrapper(original_create: Any) -> Any: diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index 151b37f1d532..b75c0bb70615 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -187,6 +187,8 @@ async def validate_block_body( height: uint32, conds: Optional[SpendBundleConditions], fork_info: ForkInfo, + *, + log_coins: bool = False, ) -> Optional[Err]: """ This assumes the header block has been completely validated. @@ -474,6 +476,9 @@ async def validate_block_body( else: look_in_fork.append(unspent.name) + if log_coins and len(look_in_fork) > 0: + log.info("%d coins spent after fork", len(look_in_fork)) + if len(unspent_records) != len(removals_from_db): # some coins could not be found in the DB. We need to find out which # ones and look for them in additions_since_fork @@ -483,6 +488,9 @@ async def validate_block_body( continue look_in_fork.append(rem) + if log_coins and len(look_in_fork) > 0: + log.info("coins spent in fork: %s", ",".join([f"{name}"[0:6] for name in look_in_fork])) + for rem in look_in_fork: # This coin is not in the current heaviest chain, so it must be in the fork if rem not in fork_info.additions_since_fork: diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index 1897311812ce..f989bfe96344 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -115,6 +115,8 @@ class Blockchain: priority_mutex: PriorityMutex[BlockchainMutexPriority] compact_proof_lock: asyncio.Lock + _log_coins: bool + @staticmethod async def create( coin_store: CoinStore, @@ -124,6 +126,7 @@ async def create( reserved_cores: int, *, single_threaded: bool = False, + log_coins: bool = False, ) -> Blockchain: """ Initializes a blockchain with the BlockRecords from disk, assuming they have all been @@ -131,6 +134,7 @@ async def create( in the consensus constants config. """ self = Blockchain() + self._log_coins = log_coins # Blocks are validated under high priority, and transactions under low priority. This guarantees blocks will # be validated first. self.priority_mutex = PriorityMutex.create(priority_type=BlockchainMutexPriority) @@ -364,6 +368,7 @@ async def add_block( block.height, pre_validation_result.conds, fork_info, + log_coins=self._log_coins, ) if error_code is not None: return AddBlockResult.INVALID_BLOCK, error_code, None @@ -474,10 +479,39 @@ async def _reconsider_peak( if block_record.weight == peak.weight and peak.total_iters <= block_record.total_iters: # this is an equal weight block but our peak has lower iterations, so we dont change the coin set return [], None + if block_record.weight == peak.weight: + log.info( + f"block has equal weight as our peak ({peak.weight}), but fewer " + f"total iterations {block_record.total_iters} " + f"peak: {peak.total_iters} " + f"peak-hash: {peak.header_hash}" + ) if block_record.prev_hash != peak.header_hash: for coin_record in await self.coin_store.rollback_to_block(fork_info.fork_height): rolled_back_state[coin_record.name] = coin_record + if self._log_coins and len(rolled_back_state) > 0: + log.info(f"rolled back {len(rolled_back_state)} coins, to fork height {fork_info.fork_height}") + log.info( + "removed: %s", + ",".join( + [ + name.hex()[0:6] + for name, state in rolled_back_state.items() + if state.confirmed_block_index == 0 + ] + ), + ) + log.info( + "unspent: %s", + ",".join( + [ + name.hex()[0:6] + for name, state in rolled_back_state.items() + if state.confirmed_block_index != 0 + ] + ), + ) # Collects all blocks from fork point to new peak records_to_add: list[BlockRecord] = [] @@ -524,6 +558,15 @@ async def _reconsider_peak( tx_additions, tx_removals, ) + if self._log_coins and (len(tx_removals) > 0 or len(tx_additions) > 0): + log.info( + f"adding new block to coin_store " + f"(hh: {fetched_block_record.header_hash} " + f"height: {fetched_block_record.height}), {len(tx_removals)} spends" + ) + log.info("rewards: %s", ",".join([add.name().hex()[0:6] for add in included_reward_coins])) + log.info("additions: %s", ",".join([add.name().hex()[0:6] for add in tx_additions])) + log.info("removals: %s", ",".join([f"{rem}"[0:6] for rem in tx_removals])) # we made it to the end successfully # Rollback sub_epoch_summaries @@ -718,6 +761,7 @@ async def validate_unfinished_block( uint32(prev_height + 1), conds, fork_info, + log_coins=self._log_coins, ) if error_code is not None: diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index e0e36ad25914..fe2927895268 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -258,6 +258,7 @@ async def manage(self) -> AsyncIterator[None]: start_time = time.monotonic() reserved_cores = self.config.get("reserved_cores", 0) single_threaded = self.config.get("single_threaded", False) + log_coins = self.config.get("log_coins", False) multiprocessing_start_method = process_config_start_method(config=self.config, log=self.log) self.multiprocessing_context = multiprocessing.get_context(method=multiprocessing_start_method) self._blockchain = await Blockchain.create( @@ -267,6 +268,7 @@ async def manage(self) -> AsyncIterator[None]: blockchain_dir=self.db_path.parent, reserved_cores=reserved_cores, single_threaded=single_threaded, + log_coins=log_coins, ) self._mempool_manager = MempoolManager( @@ -1823,6 +1825,7 @@ async def peak_post_processing( self.log.info( f"🌱 Updated peak to height {record.height}, weight {record.weight}, " f"hh {record.header_hash.hex()}, " + f"ph {record.prev_hash.hex()}, " f"forked at {state_change_summary.fork_height}, rh: {record.reward_infusion_new_challenge.hex()}, " f"total iters: {record.total_iters}, " f"overflow: {record.overflow}, " diff --git a/chia/util/initial-config.yaml b/chia/util/initial-config.yaml index 335f1be4279b..d3208f1dcd90 100644 --- a/chia/util/initial-config.yaml +++ b/chia/util/initial-config.yaml @@ -361,6 +361,10 @@ full_node: # profiled. single_threaded: False + # when enabled, logs coins additions, removals and reorgs at INFO level. + # Requires the log level to be INFO or DEBUG as well. + log_coins: False + # How often to initiate outbound connections to other full nodes. peer_connect_interval: 30 # How long to wait for a peer connection