diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 2950cf3e7bf..f03dece5f14 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -98,8 +98,6 @@ jobs: needs: [prepare, build-amd64, build-arm64] runs-on: ubuntu-latest steps: - - name: Checkout code - uses: actions/checkout@v4 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Login to DockerHub @@ -109,8 +107,14 @@ jobs: password: ${{ env.DOCKERHUB_TOKEN }} - name: Create and push manifest run: | - docker buildx create --use - docker buildx build --push --platform linux/amd64,linux/arm64 --tag ${{ env.IMAGE_NAME }}:${{ needs.prepare.outputs.version }} --file Dockerfile . + docker buildx imagetools create \ + --tag ${{ env.IMAGE_NAME }}:${{ needs.prepare.outputs.version }} \ + ${{ env.IMAGE_NAME }}:${{ needs.prepare.outputs.version }}-amd64 \ + ${{ env.IMAGE_NAME }}:${{ needs.prepare.outputs.version }}-arm64 + if [ "${{ needs.prepare.outputs.latest_tag }}" == "true" ]; then - docker buildx build --push --platform linux/amd64,linux/arm64 --tag ${{ env.IMAGE_NAME }}:latest --file Dockerfile . - fi \ No newline at end of file + docker buildx imagetools create \ + --tag ${{ env.IMAGE_NAME }}:latest \ + ${{ env.IMAGE_NAME }}:${{ needs.prepare.outputs.version }}-amd64 \ + ${{ env.IMAGE_NAME }}:${{ needs.prepare.outputs.version }}-arm64 + fi diff --git a/debugToolsConfig.yaml.example b/debugToolsConfig.yaml.example index 2cff0bdc980..9597019c5e4 100644 --- a/debugToolsConfig.yaml.example +++ b/debugToolsConfig.yaml.example @@ -2,4 +2,10 @@ url: "localhost:8545" localUrl: "http://0.0.0.0:8123" block: "0x9095F" -dumpFileName: "storageDump.json" \ No newline at end of file +dumpFileName: "storageDump.json" +addressRollup: "0x` +l1Url: "http://localhost:8545"` +l1ChainId: 1 +l1SyncStartBlock: 4000000 +rollupId: 1 +elderberryBachNo: 1 \ No newline at end of file diff --git a/zk/debug_tools/cast-scripts/.env.example b/zk/debug_tools/cast-scripts/.env.example new file mode 100644 index 00000000000..5b8b36fc229 --- /dev/null +++ b/zk/debug_tools/cast-scripts/.env.example @@ -0,0 +1,2 @@ +rpc_url="http://0.0.0.0:8467" +private_key="" \ No newline at end of file diff --git a/zk/debug_tools/cast-scripts/balance-after-tx.sh b/zk/debug_tools/cast-scripts/balance-after-tx.sh new file mode 100755 index 00000000000..5601c8a1391 --- /dev/null +++ b/zk/debug_tools/cast-scripts/balance-after-tx.sh @@ -0,0 +1,21 @@ +set -a # automatically export all variables +source .env +set +a + +if [ -z "$rpc_url" ]; then + echo "Please fill "rpc_url" in the .env file" + exit 1 +fi + +if [ -z "$private_key" ]; then + echo "Please fill "private_key" in the .env file" + exit 1 +fi +# balance before sending funds +echo "Balance before sending funds $(cast balance --rpc-url $rpc_url 0x5e8f0f2f8b364e2f0f3f1f1f1f1f1f1f1f1f1f1f)" + +# send some funds +cast send --rpc-url $rpc_url --legacy --value 0.1ether --private-key $private_key 0x5e8f0f2f8b364e2f0f3f1f1f1f1f1f1f1f1f1f1f + +# now get the balance of the accounts +echo "Balance after sending funds $(cast balance --rpc-url $rpc_url 0x5e8f0f2f8b364e2f0f3f1f1f1f1f1f1f1f1f1f1f)" diff --git a/zk/debug_tools/cast-scripts/estimate-gas-infinite-loop.sh b/zk/debug_tools/cast-scripts/estimate-gas-infinite-loop.sh new file mode 100755 index 00000000000..dac01e2c150 --- /dev/null +++ b/zk/debug_tools/cast-scripts/estimate-gas-infinite-loop.sh @@ -0,0 +1,19 @@ +set -a # automatically export all variables +source .env +set +a + +if [ -z "$rpc_url" ]; then + echo "Please fill "rpc_url" in the .env file" + exit 1 +fi + +if [ -z "$private_key" ]; then + echo "Please fill "private_key" in the .env file" + exit 1 +fi + +addr=$(cast wallet address --private-key $private_key) +nonce=$(cast nonce --rpc-url $rpc_url $addr) + +echo "Test address: $addr (nonce: $nonce) balance $(cast balance --rpc-url $rpc_url $addr)" +cast send --legacy --rpc-url $rpc_url --private-key $private_key --async --create 0x5b3456 \ No newline at end of file diff --git a/zk/debug_tools/cast-scripts/nonce-with-infinite-loop-sc.sh b/zk/debug_tools/cast-scripts/nonce-with-infinite-loop-sc.sh new file mode 100755 index 00000000000..139aded5988 --- /dev/null +++ b/zk/debug_tools/cast-scripts/nonce-with-infinite-loop-sc.sh @@ -0,0 +1,25 @@ +set -a # automatically export all variables +source .env +set +a + +if [ -z "$rpc_url" ]; then + echo "Please fill "rpc_url" in the .env file" + exit 1 +fi + +if [ -z "$private_key" ]; then + echo "Please fill "private_key" in the .env file" + exit 1 +fi + +addr=$(cast wallet address --private-key $private_key) +nonce=$(cast nonce --rpc-url $rpc_url $addr) + +echo "Test address: $addr (nonce: $nonce) balance $(cast balance --rpc-url $rpc_url $addr)" +cast send --legacy --rpc-url $rpc_url --private-key $private_key --gas-limit 5000000000000000000 --gas-price 1100 --async --create 0x5b3456 +echo "Test address: $addr (nonce: $(cast nonce --rpc-url $rpc_url $addr)) balance $(cast balance --rpc-url $rpc_url $addr)" +#sleep 5 seconds +echo "Sleeping for 5 seconds..." +sleep 5 + +echo "Test address: $addr (nonce: $(cast nonce --rpc-url $rpc_url $addr)) balance $(cast balance --rpc-url $rpc_url $addr)" \ No newline at end of file diff --git a/zk/debug_tools/l1-sequences-downloader/acc-input-hash/main.go b/zk/debug_tools/l1-sequences-downloader/acc-input-hash/main.go new file mode 100644 index 00000000000..0cc37fac227 --- /dev/null +++ b/zk/debug_tools/l1-sequences-downloader/acc-input-hash/main.go @@ -0,0 +1,105 @@ +package main + +import ( + "encoding/json" + "fmt" + "math/big" + "os" + + "github.com/ledgerwatch/erigon-lib/common" + "github.com/ledgerwatch/erigon/zk/syncer" + "github.com/ledgerwatch/erigon/zk/types" +) + +func main() { + fileSeq, err := os.Open("sequencesMainnet.json") + if err != nil { + panic(err) + } + defer fileSeq.Close() + sequences := make([]types.L1BatchInfo, 0) + + encSeq := json.NewDecoder(fileSeq) + if err := encSeq.Decode(&sequences); err != nil { + panic(err) + } + fileSeq.Close() + fileCalldata, err := os.Open("calldataMainnet.json") + if err != nil { + panic(err) + } + defer fileCalldata.Close() + calldata := make(map[string]string) + + encCalldata := json.NewDecoder(fileCalldata) + if err := encCalldata.Decode(&calldata); err != nil { + panic(err) + } + fileCalldata.Close() + fileAccInputHash, err := os.Open("accInputHashesMainnet.json") + if err != nil { + panic(err) + } + defer fileAccInputHash.Close() + accInputHashes := make(map[uint64]string) + + encAccInputHash := json.NewDecoder(fileAccInputHash) + if err := encAccInputHash.Decode(&accInputHashes); err != nil { + panic(err) + } + fileAccInputHash.Close() + + for i := 0; i < 40000; i++ { + delete(calldata, sequences[i].L1TxHash.String()) + delete(accInputHashes, sequences[i].BatchNo) + } + + prevSeq := sequences[40001] + for i := 40002; i < len(sequences); i++ { + nextSeq := sequences[i] + nextCalldata, ok := calldata[nextSeq.L1TxHash.String()] + if !ok { + panic(fmt.Errorf("calldata for tx %s not found", nextSeq.L1TxHash.String())) + } + prevAccInputHash, ok := accInputHashes[prevSeq.BatchNo] + if !ok { + panic(fmt.Errorf("accInputHash for batch %d not found", prevSeq.BatchNo)) + } + lastAccInputHash, ok := accInputHashes[nextSeq.BatchNo] + if !ok { + panic(fmt.Errorf("accInputHash for batch %d not found", nextSeq.BatchNo)) + } + + decodedSequenceInterface, err := syncer.DecodeSequenceBatchesCalldata(common.FromHex(nextCalldata)) + if err != nil { + panic(fmt.Errorf("failed to decode calldata for tx %s: %w", nextSeq.L1TxHash, err)) + } + + accInputHashCalcFn, totalSequenceBatches, err := syncer.GetAccInputDataCalcFunction(nextSeq.L1InfoRoot, decodedSequenceInterface) + if err != nil { + panic(fmt.Errorf("failed to get accInputHash calculation func: %w", err)) + } + + if totalSequenceBatches == 0 || nextSeq.BatchNo-prevSeq.BatchNo > uint64(totalSequenceBatches) { + panic(fmt.Errorf("batch %d is out of range of sequence calldata: %d %d", nextSeq.BatchNo, prevSeq.BatchNo, totalSequenceBatches)) + } + + prevAccInputBigInt := new(big.Int).SetBytes(common.FromHex(prevAccInputHash)) + preVAccInputHash := common.BigToHash(prevAccInputBigInt) + accInputHash := &preVAccInputHash + // calculate acc input hash + for i := 0; i < int(nextSeq.BatchNo-prevSeq.BatchNo); i++ { + accInputHash = accInputHashCalcFn(*accInputHash, i) + } + + if accInputHash.Hex() != lastAccInputHash { + panic(fmt.Errorf("accInputHash for tx %s and batchNum %d does not match", nextSeq.L1TxHash.String(), nextSeq.BatchNo)) + } + + prevSeq = nextSeq + if i%1000 == 0 { + fmt.Println(i, " sequence checked: ", nextSeq.BatchNo) + } + } + +} diff --git a/zk/debug_tools/l1-sequences-downloader/sequence-accinputhash/main.go b/zk/debug_tools/l1-sequences-downloader/sequence-accinputhash/main.go new file mode 100644 index 00000000000..57b2a9b8a2c --- /dev/null +++ b/zk/debug_tools/l1-sequences-downloader/sequence-accinputhash/main.go @@ -0,0 +1,150 @@ +package main + +import ( + "context" + "encoding/binary" + "encoding/json" + "fmt" + "os" + "time" + + ethereum "github.com/ledgerwatch/erigon" + "github.com/ledgerwatch/erigon-lib/common" + + "github.com/iden3/go-iden3-crypto/keccak256" + "github.com/ledgerwatch/erigon/ethclient" + "github.com/ledgerwatch/erigon/zk/debug_tools" + "github.com/ledgerwatch/erigon/zk/types" +) + +func main() { + ctx := context.Background() + cfg, err := debug_tools.GetConf() + if err != nil { + panic(fmt.Sprintf("RPGCOnfig: %s", err)) + } + + file, err := os.Open("sequencesMainnet.json") + if err != nil { + panic(err) + } + defer file.Close() + sequences := make([]types.L1BatchInfo, 0) + + enc := json.NewDecoder(file) + if err := enc.Decode(&sequences); err != nil { + panic(err) + } + + ethClient, err := ethclient.Dial(cfg.L1Url) + if err != nil { + panic(err) + } + + emptyHash := common.Hash{} + rollupAddr := common.HexToAddress(cfg.AddressRollup) + accInputHashes := make(map[uint64]string) + index := 0 + for { + seq := sequences[index] + // get call data for tx + + var accInputHash common.Hash + var err error + if seq.BatchNo < cfg.ElderberryBachNo { + accInputHash, err = callSequencedBatchesMap(ctx, ethClient, &rollupAddr, seq.BatchNo) + } else { + accInputHash, _, err = callGetRollupSequencedBatches(ctx, ethClient, &rollupAddr, cfg.RollupId, seq.BatchNo) + } + if err != nil { + fmt.Println("Error fetching transaction: ", err, " for batch ", seq.BatchNo) + continue + } + + if accInputHash == emptyHash { + fmt.Println("Error fetching transaction: accInputHash is empty for batch ", seq.BatchNo) + panic("Error fetching transaction: accInputHash is empty for batch") + } + accInputHashes[seq.BatchNo] = accInputHash.Hex() + + index++ + if index >= len(sequences) { + break + } + + if index%100 == 0 { + fmt.Println("Processed ", index, "sequences from ", len(sequences)) + } + + time.Sleep(2 * time.Millisecond) + } + + // write l1BatchInfos to file + file2, err := os.Create("accInputHashes.json") + if err != nil { + panic(err) + } + defer file2.Close() + + enc2 := json.NewEncoder(file2) + enc2.SetIndent("", " ") + if err := enc2.Encode(accInputHashes); err != nil { + panic(err) + } +} + +// calls the old rollup contract to get the accInputHash for a certain batch +// returns the accInputHash and lastBatchNumber +func callSequencedBatchesMap(ctx context.Context, client *ethclient.Client, addr *common.Address, batchNum uint64) (accInputHash common.Hash, err error) { + mapKeyHex := fmt.Sprintf("%064x%064x", batchNum, 114 /* _legacySequencedBatches slot*/) + mapKey := keccak256.Hash(common.FromHex(mapKeyHex)) + mkh := common.BytesToHash(mapKey) + + resp, err := client.StorageAt(ctx, *addr, mkh, nil) + if err != nil { + return + } + + if err != nil { + return + } + + if len(resp) < 32 { + return + } + accInputHash = common.BytesToHash(resp[:32]) + + return +} + +var ( + errorShortResponseLT32 = fmt.Errorf("response length is less than 32 bytes") + errorShortResponseLT96 = fmt.Errorf("response length is less than 96 bytes") + rollupSequencedBatchesSignature = "0x25280169" // hardcoded abi signature +) + +func callGetRollupSequencedBatches(ctx context.Context, client *ethclient.Client, addr *common.Address, rollupId, batchNum uint64) (common.Hash, uint64, error) { + rollupID := fmt.Sprintf("%064x", rollupId) + batchNumber := fmt.Sprintf("%064x", batchNum) + + resp, err := client.CallContract(ctx, ethereum.CallMsg{ + To: addr, + Data: common.FromHex(rollupSequencedBatchesSignature + rollupID + batchNumber), + }, nil) + + if err != nil { + return common.Hash{}, 0, err + } + + if len(resp) < 32 { + return common.Hash{}, 0, errorShortResponseLT32 + } + h := common.BytesToHash(resp[:32]) + + if len(resp) < 96 { + return common.Hash{}, 0, errorShortResponseLT96 + } + lastBatchNumber := binary.BigEndian.Uint64(resp[88:96]) + + return h, lastBatchNumber, nil +} diff --git a/zk/debug_tools/l1-sequences-downloader/sequence-calldata/main.go b/zk/debug_tools/l1-sequences-downloader/sequence-calldata/main.go new file mode 100644 index 00000000000..416ba59a996 --- /dev/null +++ b/zk/debug_tools/l1-sequences-downloader/sequence-calldata/main.go @@ -0,0 +1,77 @@ +package main + +import ( + "context" + "encoding/hex" + "encoding/json" + "fmt" + "os" + "time" + + "github.com/ledgerwatch/erigon/ethclient" + "github.com/ledgerwatch/erigon/zk/debug_tools" + "github.com/ledgerwatch/erigon/zk/types" +) + +func main() { + ctx := context.Background() + cfg, err := debug_tools.GetConf() + if err != nil { + panic(fmt.Sprintf("RPGCOnfig: %s", err)) + } + + file, err := os.Open("sequencesMainnet.json") + if err != nil { + panic(err) + } + defer file.Close() + sequences := make([]types.L1BatchInfo, 0) + + enc := json.NewDecoder(file) + if err := enc.Decode(&sequences); err != nil { + panic(err) + } + + ethClient, err := ethclient.Dial(cfg.L1Url) + if err != nil { + panic(err) + } + + calldatas := make(map[string]string) + index := 40001 + for { + seq := sequences[index] + // get call data for tx + l1Transaction, _, err := ethClient.TransactionByHash(ctx, seq.L1TxHash) + if err != nil { + fmt.Println("Error fetching transaction: ", err) + continue + } + sequenceBatchesCalldata := l1Transaction.GetData() + calldatas[seq.L1TxHash.String()] = hex.EncodeToString(sequenceBatchesCalldata) + + index++ + if index >= len(sequences) { + break + } + + if index%100 == 0 { + fmt.Println("Processed ", index, "transactions from ", len(sequences)) + } + + time.Sleep(10 * time.Millisecond) + } + + // write l1BatchInfos to file + file2, err := os.Create("calldataFinal.json") + if err != nil { + panic(err) + } + defer file2.Close() + + enc2 := json.NewEncoder(file2) + enc2.SetIndent("", " ") + if err := enc2.Encode(calldatas); err != nil { + panic(err) + } +} diff --git a/zk/debug_tools/l1-sequences-downloader/sequence-logs/main.go b/zk/debug_tools/l1-sequences-downloader/sequence-logs/main.go new file mode 100644 index 00000000000..034ff332752 --- /dev/null +++ b/zk/debug_tools/l1-sequences-downloader/sequence-logs/main.go @@ -0,0 +1,119 @@ +package main + +import ( + "context" + "encoding/json" + "fmt" + "math/big" + "os" + "time" + + ethTypes "github.com/ledgerwatch/erigon/core/types" + "github.com/ledgerwatch/erigon/ethclient" + + ethereum "github.com/ledgerwatch/erigon" + "github.com/ledgerwatch/erigon-lib/common" + "github.com/ledgerwatch/erigon/zk/contracts" + "github.com/ledgerwatch/erigon/zk/debug_tools" + "github.com/ledgerwatch/erigon/zk/types" +) + +func main() { + ctx := context.Background() + cfg, err := debug_tools.GetConf() + if err != nil { + panic(fmt.Sprintf("RPGCOnfig: %s", err)) + } + + seqTopics := [][]common.Hash{{ + contracts.SequencedBatchTopicPreEtrog, + contracts.SequencedBatchTopicEtrog, + }} + + seqAndVerifL1Contracts := []common.Address{common.HexToAddress(cfg.AddressRollup)} + + ethClient, err := ethclient.Dial(cfg.L1Url) + if err != nil { + panic(err) + + } + latestL1Block, err := ethClient.BlockNumber(ctx) + if err != nil { + panic(err) + } + + l1BatchInfos := make([]types.L1BatchInfo, 0) + + query := ethereum.FilterQuery{ + Addresses: seqAndVerifL1Contracts, + Topics: seqTopics, + } + from := cfg.L1SyncStartBlock + fmt.Println("Fetching logs from block ", from, "to", latestL1Block) + defer fmt.Println("Done fetching logs") + count := 0 + + for from < latestL1Block { + time.Sleep(10 * time.Millisecond) + if count%10 == 0 { + fmt.Println("[progress] Fetching logs from block ", from, "to", latestL1Block) + } + to := from + 20000 + if to > latestL1Block { + to = latestL1Block + } + + query.FromBlock = new(big.Int).SetUint64(from) + query.ToBlock = new(big.Int).SetUint64(to) + + logs, err := ethClient.FilterLogs(ctx, query) + if err != nil { + fmt.Println("Error fetching logs, repeating: ", err) + continue + } + + for _, log := range logs { + l1BatchInfos = append(l1BatchInfos, parseLogType(&log)) + } + + from += 20000 + count++ + } + + // write l1BatchInfos to file + file, err := os.Create("l1BatchInfos.json") + if err != nil { + panic(err) + } + defer file.Close() + + enc := json.NewEncoder(file) + enc.SetIndent("", " ") + if err := enc.Encode(l1BatchInfos); err != nil { + panic(err) + } +} + +func parseLogType(log *ethTypes.Log) (l1BatchInfo types.L1BatchInfo) { + var ( + batchNum uint64 + l1InfoRoot common.Hash + ) + + switch log.Topics[0] { + case contracts.SequencedBatchTopicPreEtrog: + batchNum = new(big.Int).SetBytes(log.Topics[1].Bytes()).Uint64() + case contracts.SequencedBatchTopicEtrog: + batchNum = new(big.Int).SetBytes(log.Topics[1].Bytes()).Uint64() + l1InfoRoot = common.BytesToHash(log.Data[:32]) + default: + batchNum = 0 + } + + return types.L1BatchInfo{ + BatchNo: batchNum, + L1BlockNo: log.BlockNumber, + L1TxHash: common.BytesToHash(log.TxHash.Bytes()), + L1InfoRoot: l1InfoRoot, + } +} diff --git a/zk/debug_tools/test-contracts/scripts/counter.js b/zk/debug_tools/test-contracts/scripts/counter.js index 4512e52740c..6305d3dab97 100644 --- a/zk/debug_tools/test-contracts/scripts/counter.js +++ b/zk/debug_tools/test-contracts/scripts/counter.js @@ -4,13 +4,12 @@ try { const CounterContract = await hre.ethers.getContractFactory("Counter"); // Deploy the contract - const contract = await CounterContract.deploy(); + const contract = await CounterContract.deploy({gasLimit: 140000, gasPrice: 1000000000}); // Wait for the deployment transaction to be mined const deployResult = await contract.waitForDeployment(); - console.log(`Counter contract deployed to: ${await contract.getAddress()}`); - const result = await contract.increment(); + // const result = await contract.increment(); console.log('Increment method call transaction: ', result.hash); } catch (error) { console.error(error); diff --git a/zk/debug_tools/utils.go b/zk/debug_tools/utils.go index 07caeb3ae98..bb3e379e66a 100644 --- a/zk/debug_tools/utils.go +++ b/zk/debug_tools/utils.go @@ -333,9 +333,15 @@ func GetConf() (RpcConfig, error) { } type RpcConfig struct { - Url string `yaml:"url"` - LocalUrl string `yaml:"localUrl"` - Datastream string `yaml:"datastream"` - DumpFileName string `yaml:"dumpFileName"` - Block int64 `yaml:"block"` + Url string `yaml:"url"` + LocalUrl string `yaml:"localUrl"` + Datastream string `yaml:"datastream"` + DumpFileName string `yaml:"dumpFileName"` + Block int64 `yaml:"block"` + AddressRollup string `yaml:"addressRollup"` + L1Url string `yaml:"l1Url"` + L1ChainId uint64 `yaml:"l1ChainId"` + L1SyncStartBlock uint64 `yaml:"l1SyncStartBlock"` + RollupId uint64 `yaml:"rollupId"` + ElderberryBachNo uint64 `yaml:"elderberryBachNo"` }