Skip to content

Commit

Permalink
add new configuration option to log the first 6 hex digits of coins (#…
Browse files Browse the repository at this point in the history
…18893)

add new configuration option to log the first 6 hex digits of all coins added, removed and reorged (off by default)
  • Loading branch information
arvidn authored Dec 3, 2024
1 parent 9b7bb5b commit 98f7f88
Show file tree
Hide file tree
Showing 7 changed files with 65 additions and 5 deletions.
2 changes: 1 addition & 1 deletion chia/_tests/util/blockchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ async def create_blockchain(
async with DBWrapper2.managed(database=db_uri, uri=True, reader_count=1, db_version=db_version) as wrapper:
coin_store = await CoinStore.create(wrapper)
store = await BlockStore.create(wrapper)
bc1 = await Blockchain.create(coin_store, store, constants, Path("."), 2, single_threaded=True)
bc1 = await Blockchain.create(coin_store, store, constants, Path("."), 2, single_threaded=True, log_coins=True)
try:
assert bc1.get_peak() is None
yield bc1, wrapper
Expand Down
7 changes: 4 additions & 3 deletions chia/_tests/util/setup_nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ async def setup_two_nodes(
Setup and teardown of two full nodes, with blockchains and separate DBs.
"""

config_overrides = {"full_node.max_sync_wait": 0}
config_overrides = {"full_node.max_sync_wait": 0, "full_node.log_coins": True}
with TempKeyring(populate=True) as keychain1, TempKeyring(populate=True) as keychain2:
bt1 = await create_block_tools_async(
constants=consensus_constants, keychain=keychain1, config_overrides=config_overrides
Expand Down Expand Up @@ -121,7 +121,7 @@ async def setup_n_nodes(
"""
Setup and teardown of n full nodes, with blockchains and separate DBs.
"""
config_overrides = {"full_node.max_sync_wait": 0}
config_overrides = {"full_node.max_sync_wait": 0, "full_node.log_coins": True}
with ExitStack() as stack:
keychains = [stack.enter_context(TempKeyring(populate=True)) for _ in range(n)]
async with AsyncExitStack() as async_exit_stack:
Expand Down Expand Up @@ -246,6 +246,7 @@ async def setup_simulators_and_wallets_inner(
) -> AsyncIterator[tuple[list[BlockTools], list[SimulatorFullNodeService], list[WalletService]]]:
if config_overrides is not None and "full_node.max_sync_wait" not in config_overrides:
config_overrides["full_node.max_sync_wait"] = 0
config_overrides["full_node.log_coins"] = True
async with AsyncExitStack() as async_exit_stack:
bt_tools: list[BlockTools] = [
await create_block_tools_async(consensus_constants, keychain=keychain1, config_overrides=config_overrides)
Expand Down Expand Up @@ -360,7 +361,7 @@ async def setup_full_system_inner(
keychain2: Keychain,
shared_b_tools: BlockTools,
) -> AsyncIterator[FullSystem]:
config_overrides = {"full_node.max_sync_wait": 0}
config_overrides = {"full_node.max_sync_wait": 0, "full_node.log_coins": True}
if b_tools is None:
b_tools = await create_block_tools_async(
constants=consensus_constants, keychain=keychain1, config_overrides=config_overrides
Expand Down
2 changes: 1 addition & 1 deletion chia/_tests/wallet/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ async def ignore_block_validation(
if "standard_block_tools" in request.keywords:
return None

async def validate_block_body(*args: Any) -> Literal[None]:
async def validate_block_body(*args: Any, **kwargs: Any) -> Literal[None]:
return None

def create_wrapper(original_create: Any) -> Any:
Expand Down
8 changes: 8 additions & 0 deletions chia/consensus/block_body_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,8 @@ async def validate_block_body(
height: uint32,
conds: Optional[SpendBundleConditions],
fork_info: ForkInfo,
*,
log_coins: bool = False,
) -> Optional[Err]:
"""
This assumes the header block has been completely validated.
Expand Down Expand Up @@ -474,6 +476,9 @@ async def validate_block_body(
else:
look_in_fork.append(unspent.name)

if log_coins and len(look_in_fork) > 0:
log.info("%d coins spent after fork", len(look_in_fork))

if len(unspent_records) != len(removals_from_db):
# some coins could not be found in the DB. We need to find out which
# ones and look for them in additions_since_fork
Expand All @@ -483,6 +488,9 @@ async def validate_block_body(
continue
look_in_fork.append(rem)

if log_coins and len(look_in_fork) > 0:
log.info("coins spent in fork: %s", ",".join([f"{name}"[0:6] for name in look_in_fork]))

for rem in look_in_fork:
# This coin is not in the current heaviest chain, so it must be in the fork
if rem not in fork_info.additions_since_fork:
Expand Down
44 changes: 44 additions & 0 deletions chia/consensus/blockchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,8 @@ class Blockchain:
priority_mutex: PriorityMutex[BlockchainMutexPriority]
compact_proof_lock: asyncio.Lock

_log_coins: bool

@staticmethod
async def create(
coin_store: CoinStore,
Expand All @@ -124,13 +126,15 @@ async def create(
reserved_cores: int,
*,
single_threaded: bool = False,
log_coins: bool = False,
) -> Blockchain:
"""
Initializes a blockchain with the BlockRecords from disk, assuming they have all been
validated. Uses the genesis block given in override_constants, or as a fallback,
in the consensus constants config.
"""
self = Blockchain()
self._log_coins = log_coins
# Blocks are validated under high priority, and transactions under low priority. This guarantees blocks will
# be validated first.
self.priority_mutex = PriorityMutex.create(priority_type=BlockchainMutexPriority)
Expand Down Expand Up @@ -364,6 +368,7 @@ async def add_block(
block.height,
pre_validation_result.conds,
fork_info,
log_coins=self._log_coins,
)
if error_code is not None:
return AddBlockResult.INVALID_BLOCK, error_code, None
Expand Down Expand Up @@ -474,10 +479,39 @@ async def _reconsider_peak(
if block_record.weight == peak.weight and peak.total_iters <= block_record.total_iters:
# this is an equal weight block but our peak has lower iterations, so we dont change the coin set
return [], None
if block_record.weight == peak.weight:
log.info(
f"block has equal weight as our peak ({peak.weight}), but fewer "
f"total iterations {block_record.total_iters} "
f"peak: {peak.total_iters} "
f"peak-hash: {peak.header_hash}"
)

if block_record.prev_hash != peak.header_hash:
for coin_record in await self.coin_store.rollback_to_block(fork_info.fork_height):
rolled_back_state[coin_record.name] = coin_record
if self._log_coins and len(rolled_back_state) > 0:
log.info(f"rolled back {len(rolled_back_state)} coins, to fork height {fork_info.fork_height}")
log.info(
"removed: %s",
",".join(
[
name.hex()[0:6]
for name, state in rolled_back_state.items()
if state.confirmed_block_index == 0
]
),
)
log.info(
"unspent: %s",
",".join(
[
name.hex()[0:6]
for name, state in rolled_back_state.items()
if state.confirmed_block_index != 0
]
),
)

# Collects all blocks from fork point to new peak
records_to_add: list[BlockRecord] = []
Expand Down Expand Up @@ -524,6 +558,15 @@ async def _reconsider_peak(
tx_additions,
tx_removals,
)
if self._log_coins and (len(tx_removals) > 0 or len(tx_additions) > 0):
log.info(
f"adding new block to coin_store "
f"(hh: {fetched_block_record.header_hash} "
f"height: {fetched_block_record.height}), {len(tx_removals)} spends"
)
log.info("rewards: %s", ",".join([add.name().hex()[0:6] for add in included_reward_coins]))
log.info("additions: %s", ",".join([add.name().hex()[0:6] for add in tx_additions]))
log.info("removals: %s", ",".join([f"{rem}"[0:6] for rem in tx_removals]))

# we made it to the end successfully
# Rollback sub_epoch_summaries
Expand Down Expand Up @@ -718,6 +761,7 @@ async def validate_unfinished_block(
uint32(prev_height + 1),
conds,
fork_info,
log_coins=self._log_coins,
)

if error_code is not None:
Expand Down
3 changes: 3 additions & 0 deletions chia/full_node/full_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,7 @@ async def manage(self) -> AsyncIterator[None]:
start_time = time.monotonic()
reserved_cores = self.config.get("reserved_cores", 0)
single_threaded = self.config.get("single_threaded", False)
log_coins = self.config.get("log_coins", False)
multiprocessing_start_method = process_config_start_method(config=self.config, log=self.log)
self.multiprocessing_context = multiprocessing.get_context(method=multiprocessing_start_method)
self._blockchain = await Blockchain.create(
Expand All @@ -267,6 +268,7 @@ async def manage(self) -> AsyncIterator[None]:
blockchain_dir=self.db_path.parent,
reserved_cores=reserved_cores,
single_threaded=single_threaded,
log_coins=log_coins,
)

self._mempool_manager = MempoolManager(
Expand Down Expand Up @@ -1823,6 +1825,7 @@ async def peak_post_processing(
self.log.info(
f"🌱 Updated peak to height {record.height}, weight {record.weight}, "
f"hh {record.header_hash.hex()}, "
f"ph {record.prev_hash.hex()}, "
f"forked at {state_change_summary.fork_height}, rh: {record.reward_infusion_new_challenge.hex()}, "
f"total iters: {record.total_iters}, "
f"overflow: {record.overflow}, "
Expand Down
4 changes: 4 additions & 0 deletions chia/util/initial-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -361,6 +361,10 @@ full_node:
# profiled.
single_threaded: False

# when enabled, logs coins additions, removals and reorgs at INFO level.
# Requires the log level to be INFO or DEBUG as well.
log_coins: False

# How often to initiate outbound connections to other full nodes.
peer_connect_interval: 30
# How long to wait for a peer connection
Expand Down

0 comments on commit 98f7f88

Please sign in to comment.