Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Block hash or meta hash in transition mapping ? #18

Merged
merged 3 commits into from
Jul 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 10 additions & 14 deletions packages/protocol/contracts/L1/BasedOperator.sol
Original file line number Diff line number Diff line change
Expand Up @@ -108,13 +108,13 @@
}

VerifierRegistry verifierRegistry = VerifierRegistry(resolve("verifier_registry", false));
TaikoL1 taiko = TaikoL1(resolve("taiko", false));
TaikoL1 taiko = TaikoL1(resolve("taiko", false));
// Verify the proofs
uint160 prevVerifier = uint160(0);
for (uint256 i = 0; i < proofBatch.proofs.length; i++) {
IVerifier verifier = proofBatch.proofs[i].verifier;
// Make sure each verifier is unique
if(prevVerifier >= uint160(address(verifier))) {
if (prevVerifier >= uint160(address(verifier))) {
revert L1_INVALID_OR_DUPLICATE_VERIFIER();
}
// Make sure it's a valid verifier
Expand All @@ -137,21 +137,17 @@
// invalid
// Get the currently stored transition
TaikoData.TransitionState memory storedTransition = taiko.getTransition(
proofBatch.blockMetadata.l2BlockNumber, proofBatch.transition.parentHash
proofBatch.blockMetadata.l2BlockNumber, proofBatch.transition.parentBlockHash
);

console2.log("What is stored:");
console2.logBytes32(storedTransition.blockHash);

console2.log("What we are trying to prove:");
console2.logBytes32(proofBatch.transition.blockHash);

// Brecht: SO we set the blockHash in proposeBlock().
// But we need to prove it too (the same one), so somehow we need to check if this is proven already and IF NOT, then revert with "block already proven", no ? So i set the verifiableAfter in propseBlock to 0, because this is just a "proposed state".
if (storedTransition.isProven == true && storedTransition.blockHash == proofBatch.transition.blockHash) {
// Somehow we need to check if this is proven already and IF YES and transition is trying to
// prove the same, then revert with "block already proven".
if (
storedTransition.isProven == true
&& storedTransition.blockHash == proofBatch.transition.blockHash
) {
revert("block already proven");
}
else {
} else {
// TODO(Brecht): Check that one of the verifiers is now poissoned
}

Expand Down Expand Up @@ -181,7 +177,7 @@
) {
Block storage blk = blocks[blockId];

// TODO(Brecht): Verify that all the verifers used to prove the block are still valid

Check failure on line 180 in packages/protocol/contracts/L1/BasedOperator.sol

View workflow job for this annotation

GitHub Actions / codespell

verifers ==> verifiers

// Find out who the prover is
TaikoData.Block memory previousBlock = taiko.getBlock(uint64(blockId) - 1);
Expand All @@ -197,7 +193,7 @@
}
}

// Additinal proposer rules

Check failure on line 196 in packages/protocol/contracts/L1/BasedOperator.sol

View workflow job for this annotation

GitHub Actions / codespell

Additinal ==> Additional
function _isProposerPermitted(TaikoData.BlockMetadata memory _block) private returns (bool) {
if (_block.l2BlockNumber == 1) {
// Only proposer_one can propose the first block after genesis
Expand Down
5 changes: 3 additions & 2 deletions packages/protocol/contracts/L1/TaikoData.sol
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ library TaikoData {
/// @dev Struct containing data only required for proving a block
struct BlockMetadata {
bytes32 blockHash;
bytes32 parentBlockHash;
bytes32 parentMetaHash;
bytes32 l1Hash;
uint256 difficulty;
Expand All @@ -40,13 +41,13 @@ library TaikoData {

/// @dev Struct representing transition to be proven.
struct Transition {
bytes32 parentHash;
bytes32 parentBlockHash;
bytes32 blockHash;
}

/// @dev Struct representing state transition data.
struct TransitionState {
bytes32 blockHash;
bytes32 blockHash; //Might be removed..
uint64 timestamp;
address prover;
uint64 verifiableAfter;
Expand Down
1 change: 1 addition & 0 deletions packages/protocol/contracts/L1/TaikoErrors.sol
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
pragma solidity ^0.8.20;

/// @title TaikoErrors
/// @notice This abstract contract provides custom error declartions used in

Check failure on line 10 in packages/protocol/contracts/L1/TaikoErrors.sol

View workflow job for this annotation

GitHub Actions / codespell

declartions ==> declarations
/// the Taiko protocol. Each error corresponds to specific situations where
/// exceptions might be thrown.
abstract contract TaikoErrors {
Expand All @@ -20,6 +20,7 @@
error L1_BLOB_NOT_FOUND();
error L1_BLOB_NOT_REUSEABLE();
error L1_BLOCK_MISMATCH();
error L1_INCORRECT_BLOCK();
error L1_INSUFFICIENT_TOKEN();
error L1_INVALID_ADDRESS();
error L1_INVALID_AMOUNT();
Expand Down
43 changes: 25 additions & 18 deletions packages/protocol/contracts/L1/TaikoL1.sol
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ pragma solidity ^0.8.20;
import "../common/EssentialContract.sol";
import "./TaikoErrors.sol";
import "./TaikoEvents.sol";
import "forge-std/console2.sol";

/// @title TaikoL1
contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors {
Expand Down Expand Up @@ -58,7 +57,7 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors {
payable
nonReentrant
whenNotPaused
onlyFromNamed("based_operator")
onlyFromNamed("operator")
returns (TaikoData.BlockMetadata memory _block)
{
TaikoData.Config memory config = getConfig();
Expand All @@ -69,6 +68,7 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors {
// Verify L1 data
// TODO(Brecht): needs to be more configurable for preconfirmations
require(_block.l1Hash == blockhash(_block.l1StateBlockNumber), "INVALID_L1_BLOCKHASH");
require(_block.blockHash != 0x0, "INVALID_L2_BLOCKHASH");
require(_block.difficulty == block.prevrandao, "INVALID_DIFFICULTY");
// Verify misc data
require(_block.gasLimit == config.blockMaxGasLimit, "INVALID_GAS_LIMIT");
Expand All @@ -95,7 +95,7 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors {

TaikoData.Block storage parentBlock = state.blocks[(state.numBlocks - 1)];

require(_block.parentMetaHash == parentBlock.blockHash, "invalid parentMetaHash");
require(_block.parentMetaHash == parentBlock.metaHash, "invalid parentHash");

// Verify the passed in L1 state block number.
// We only allow the L1 block to be 4 epochs old.
Expand Down Expand Up @@ -132,11 +132,10 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors {
// Store the block
state.blocks[state.numBlocks] = blk;

// Store the passed in block hash as in
state.transitions[blk.blockId][_block.parentMetaHash].blockHash = _block.blockHash;
// For now it does not matter - we are not going to prove anyways
state.transitions[blk.blockId][_block.parentMetaHash].verifiableAfter =
uint64(block.timestamp) + 365 days;
// Store the passed in block hash as is
state.transitions[blk.blockId][_block.parentBlockHash].blockHash = _block.blockHash;
// Big enough number so that we are sure we don't hit that deadline in the future.
state.transitions[blk.blockId][_block.parentBlockHash].verifiableAfter = type(uint64).max;

// Increment the counter (cursor) by 1.
state.numBlocks++;
Expand All @@ -156,7 +155,7 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors {
external
nonReentrant
whenNotPaused
onlyFromNamed("based_operator")
onlyFromNamed("operator")
{
// Check that the block has been proposed but has not yet been verified.
if (
Expand All @@ -169,14 +168,17 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors {
TaikoData.Block storage blk = state.blocks[_block.l2BlockNumber];

// Make sure the correct block was proven
require(blk.metaHash != keccak256(abi.encode(_block)), "incorrect block");
if (blk.metaHash != keccak256(abi.encode(_block))) {
revert L1_INCORRECT_BLOCK();
}

// Store the transition
TaikoData.TransitionState storage storedTransition =
state.transitions[_block.l2BlockNumber][transition.parentHash];
state.transitions[_block.l2BlockNumber][transition.parentBlockHash];
storedTransition.blockHash = transition.blockHash;
storedTransition.prover = prover;
storedTransition.verifiableAfter = uint32(block.timestamp + SECURITY_DELAY_AFTER_PROVEN);
storedTransition.isProven = true;

emit TransitionProved({ blockId: _block.l2BlockNumber, tran: transition, prover: prover });
}
Expand All @@ -187,26 +189,27 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors {
external
nonReentrant
whenNotPaused
onlyFromNamed("based_operator")
onlyFromNamed("operator")
{
// Get the last verified blockhash
TaikoData.Block storage blk = state.blocks[state.lastVerifiedBlockId];
bytes32 blockHash = blk.blockHash;
// Go to the first unverified block
uint256 blockId = uint256(state.lastVerifiedBlockId) + 1;
uint256 numBlocksVerified;

while (blockId < state.numBlocks && numBlocksVerified < maxBlocksToVerify) {
blk = state.blocks[blockId];

// Check if the parent block hash matches the actual block hash of the parent
// Check if the timestamp is older than required
if (
state.transitions[blockId][blockHash].blockHash == bytes32(0)
|| block.timestamp < state.transitions[blockId][blockHash].verifiableAfter
block
// Genesis is already verified with initialization so if we do not allow to set
// blockHash = bytes32(0), then we can remove the bytes32(0) check.
/*state.transitions[blockId][blockHash].blockHash == bytes32(0)
|| */
.timestamp < state.transitions[blockId][blockHash].verifiableAfter
) {
break;
}

// Copy the blockhash to the block
blk.blockHash = state.transitions[blockId][blockHash].blockHash;
// Update latest block hash
Expand Down Expand Up @@ -258,6 +261,10 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors {
return uint256(state.lastVerifiedBlockId);
}

function getNumOfBlocks() public view returns (uint256) {
return uint256(state.numBlocks);
}

/// @notice Gets the configuration of the TaikoL1 contract.
/// @return Config struct containing configuration parameters.
function getConfig() public view virtual returns (TaikoData.Config memory) {
Expand Down
10 changes: 8 additions & 2 deletions packages/protocol/contracts/L1/VerifierBattleRoyale.sol
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,10 @@ contract VerifierBattleRoyale is EssentialContract {

TaikoData.Transition memory transitionA = proofBatch.proofs[0].transition;
TaikoData.Transition memory transitionB = proofBatch.proofs[1].transition;
require(transitionA.parentHash == transitionB.parentHash, "parentHash not the same");
require(
transitionA.parentBlockHash == transitionB.parentBlockHash,
"parentHash not the same"
);
require(transitionA.blockHash != transitionB.blockHash, "blockhash the same");
} else if (proofBatch.proofs.length == 3) {
/* Multiple verifiers in a consensus show that another verifier is faulty */
Expand All @@ -105,7 +108,10 @@ contract VerifierBattleRoyale is EssentialContract {
for (uint256 i = 0; i < proofBatch.proofs.length - 1; i++) {
TaikoData.Transition memory transitionA = proofBatch.proofs[i].transition;
TaikoData.Transition memory transitionB = proofBatch.proofs[i + 1].transition;
require(transitionA.parentHash == transitionB.parentHash, "parentHash not the same");
require(
transitionA.parentBlockHash == transitionB.parentBlockHash,
"parentHash not the same"
);
if (i < proofBatch.proofs.length - 2) {
require(transitionA.blockHash == transitionB.blockHash, "blockhash the same");
} else {
Expand Down
88 changes: 62 additions & 26 deletions packages/protocol/test/L1/TaikoL1.t.sol
Original file line number Diff line number Diff line change
Expand Up @@ -9,45 +9,81 @@ contract TaikoL1Test is TaikoL1TestBase {
TaikoL1(payable(deployProxy({ name: "taiko", impl: address(new TaikoL1()), data: "" })));
}

function test_L1_proposeBlock() external {
function test_L1_propose_prove_and_verify_blocks_sequentially() external {
giveEthAndTko(Alice, 100 ether, 100 ether);

TaikoData.BlockMetadata memory meta;

vm.roll(block.number + 1);
vm.warp(block.timestamp + 12);

// console2.log(block.number);
// meta.blockHash = randBytes32();
// meta.parentMetaHash = GENESIS_BLOCK_HASH;
// meta.l1Hash = blockhash(block.number - 1);
// meta.difficulty = block.prevrandao;
// meta.blobHash = randBytes32();
// meta.coinbase = Alice;
// meta.l2BlockNumber = 1;
// meta.gasLimit = L1.getConfig().blockMaxGasLimit;
// meta.l1StateBlockNumber = uint32(block.number-1);
// meta.timestamp = uint64(block.timestamp - 12); // 1 block behind

// meta.txListByteOffset = 0;
// meta.txListByteSize = 0;
// meta.blobUsed = true;

for (uint64 blockId = 1; blockId <= 1; blockId++) {
printVariables("before propose");
bytes32 parentMetaHash;
for (uint64 blockId = 1; blockId <= 20; blockId++) {
printVariables("before propose & prove & verify");
// Create metadata and propose the block
meta = createBlockMetaData(Alice, blockId, 1, true);
proposeBlock(Alice, Alice, meta);
printVariables("after propose");
proposeBlock(Alice, Alice, meta, "");

// Create proofs and prove a block
BasedOperator.ProofBatch memory blockProofs = createProofs(meta, Alice, true);

proveBlock(Alice, abi.encode(blockProofs));

// bytes32 blockHash = bytes32(1e10 + blockId);
// bytes32 stateRoot = bytes32(1e9 + blockId);
//Wait enought time and verify block
vm.warp(uint32(block.timestamp + L1.SECURITY_DELAY_AFTER_PROVEN() + 1));
vm.roll(block.number + 10);
verifyBlock(1);
parentMetaHash = keccak256(abi.encode(meta));
printVariables("after verify");
}
}

function test_L1_propose_some_blocks_in_a_row_then_prove_and_verify() external {
giveEthAndTko(Alice, 100 ether, 100 ether);

TaikoData.BlockMetadata[] memory blockMetaDatas = new TaikoData.BlockMetadata[](20);

// proveBlock(Alice, meta, parentHash, blockHash, stateRoot, meta.minTier, "");
// parentHash = blockHash;
vm.roll(block.number + 1);
vm.warp(block.timestamp + 12);

bytes32 parentMetaHash;
for (uint64 blockId = 1; blockId <= 20; blockId++) {
printVariables("before propose & prove & verify");
// Create metadata and propose the block
blockMetaDatas[blockId - 1] = createBlockMetaData(Alice, blockId, 1, true);
proposeBlock(Alice, Alice, blockMetaDatas[blockId - 1], "");
vm.roll(block.number + 1);
vm.warp(block.timestamp + 12);
}

for (uint64 blockId = 1; blockId <= 20; blockId++) {
// Create proofs and prove a block
BasedOperator.ProofBatch memory blockProofs =
createProofs(blockMetaDatas[blockId - 1], Alice, true);
proveBlock(Alice, abi.encode(blockProofs));

//Wait enought time and verify block (currently we simply just "wait enough" from latest
// block and not time it perfectly)
vm.warp(uint32(block.timestamp + L1.SECURITY_DELAY_AFTER_PROVEN() + 1));
vm.roll(block.number + 10);
verifyBlock(1);
parentMetaHash = keccak256(abi.encode(blockMetaDatas[blockId - 1]));
printVariables("after verify 1");
}
}

function test_L1_propose_block_outside_the_4_epoch_window() external {
giveEthAndTko(Alice, 100 ether, 100 ether);

TaikoData.BlockMetadata memory meta;

vm.roll(block.number + 1);
vm.warp(block.timestamp + 12);

// Create metadata and propose the block 129 blocks later only
meta = createBlockMetaData(Alice, 1, 1, true);
vm.roll(block.number + 129);
vm.warp(block.timestamp + 129 * 12);

proposeBlock(Alice, Alice, meta, TaikoErrors.L1_INVALID_L1_STATE_BLOCK.selector);
}
}
Loading
Loading