Skip to content

Commit

Permalink
Optimize deepcopy use
Browse files Browse the repository at this point in the history
  • Loading branch information
fchirica authored Nov 28, 2024
1 parent 8b659b6 commit c8a9574
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions chia/data_layer/data_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,13 +318,13 @@ async def migrate_db(self, server_files_location: Path) -> None:
log.error(f"Cannot recover data from {filename}: {e}")
break

async def get_merkle_blob(self, root_hash: Optional[bytes32]) -> MerkleBlob:
async def get_merkle_blob(self, root_hash: Optional[bytes32], read_only: bool = False) -> MerkleBlob:
if root_hash is None:
return MerkleBlob(blob=bytearray())

existing_blob = self.recent_merkle_blobs.get(root_hash)
if existing_blob is not None:
return copy.deepcopy(existing_blob)
return existing_blob if read_only else copy.deepcopy(existing_blob)

async with self.db_wrapper.reader() as reader:
cursor = await reader.execute(
Expand Down Expand Up @@ -533,7 +533,7 @@ async def build_blob_from_nodes(
root_hash = row["root_hash"]
index = row["idx"]

other_merkle_blob = await self.get_merkle_blob(root_hash)
other_merkle_blob = await self.get_merkle_blob(root_hash, read_only=True)
nodes = other_merkle_blob.get_nodes_with_indexes(index=index)
index_to_hash = {index: bytes32(node.hash) for index, node in nodes}
for _, node in nodes:
Expand Down

0 comments on commit c8a9574

Please sign in to comment.