Skip to content

Commit

Permalink
save work
Browse files Browse the repository at this point in the history
  • Loading branch information
afkbyte committed Jun 11, 2024
1 parent 051f6f8 commit f00ebfb
Show file tree
Hide file tree
Showing 12 changed files with 2,520 additions and 56 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/contract-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
run: yarn

- name: Build
run: forge test
run: forge test --fork-url https://rpc.holesky.ethpandaops.io/
tests:
name: Contract tests
runs-on: ubuntu-8
Expand Down
1 change: 1 addition & 0 deletions foundry.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ optimizer = true
optimizer_runs = 100
via_ir = false
solc_version = '0.8.12'
fs_permissions = [{ access = "read", path = "./"}]

[profile.yul]
src = 'yul'
Expand Down
7 changes: 6 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,18 @@
"postinstall": "patch-package",
"deploy-factory": "hardhat run scripts/deployment.ts",
"deploy-eth-rollup": "hardhat run scripts/createEthRollup.ts",
"deploy-erc20-rollup": "hardhat run scripts/createERC20Rollup.ts"
"deploy-erc20-rollup": "hardhat run scripts/createERC20Rollup.ts",
"deploy-blob": "yarn ts-node scripts/disperseBlob.ts"
},
"dependencies": {
"@eigenda/eigenda-utils": "2.0.0",
"@grpc/grpc-js": "^1.8.22",
"@grpc/proto-loader": "^0.7.13",
"@offchainlabs/upgrade-executor": "1.1.0-beta.0",
"@openzeppelin-upgrades/contracts": "npm:@openzeppelin/contracts-upgradeable@4.7",
"@openzeppelin/contracts": "4.7",
"@openzeppelin/contracts-upgradeable": "4.7",
"google-protobuf": "^3.21.2",
"patch-package": "^6.4.7"
},
"private": false,
Expand All @@ -62,6 +66,7 @@
"@typechain/ethers-v5": "^10.0.0",
"@typechain/hardhat": "^6.0.0",
"@types/chai": "^4.3.0",
"@types/google-protobuf": "^3.15.12",
"@types/mocha": "^9.0.0",
"@types/node": "^17.0.5",
"@typescript-eslint/eslint-plugin": "^5.14.0",
Expand Down
140 changes: 140 additions & 0 deletions scripts/disperseBlob.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
import { disperser } from "./eigenDAClient/proto/disperser";
import { ChannelCredentials } from "@grpc/grpc-js";
import * as fs from 'fs';

const blobInfoPath = "./test/foundry/blobInfo/blobInfo.json";
const client = new disperser.DisperserClient("disperser-holesky.eigenda.xyz:443", ChannelCredentials.createSsl());
const disperseBlobRequest = new disperser.DisperseBlobRequest({ data: new Uint8Array([0, 1, 2, 3]) });

async function checkBlobStatus() {
if (fs.existsSync(blobInfoPath)) {
const blobInfo = JSON.parse(fs.readFileSync(blobInfoPath, "utf8"));
let request_id_bytes = new Uint8Array(Buffer.from(blobInfo.request_id, 'hex'));

const blobStatusRequest = new disperser.BlobStatusRequest({ request_id: request_id_bytes });

return new Promise<boolean>((resolve) => {
client.GetBlobStatus(blobStatusRequest, (error: Error | null, blobStatusReply?: disperser.BlobStatusReply) => {
if (error) {
switch (error.message) {
case "5 NOT_FOUND: no metadata found for the requestID":
console.log("Blob has expired, disperse again");
resolve(true);
break;
default:
console.error("Error:", error);
resolve(false);
}
} else if (blobStatusReply) {
console.log("Blob found, no need to disperse");
resolve(false);
} else {
console.error("No reply from GetBlobStatus");
resolve(false);
}
});
});
} else {
return true;
}
}

(async () => {
const needToDisperseBlob = await checkBlobStatus();

if (needToDisperseBlob) {
console.log("DisperseBlob");
client.DisperseBlob(disperseBlobRequest, (error: Error | null, disperseBlobReply?: disperser.DisperseBlobReply) => {
if (error) {
console.error("Error:", error);
} else if (disperseBlobReply) {
console.log("Blob ID:", Buffer.from(disperseBlobReply.request_id).toString("hex"));

const blobStatusRequest = new disperser.BlobStatusRequest({ request_id: disperseBlobReply.request_id });

const blobStatusChecker = setInterval(() => {
client.GetBlobStatus(blobStatusRequest, (statusError: Error | null, blobStatusReply?: disperser.BlobStatusReply) => {
if (statusError) {
console.error("Status Error:", statusError);
} else if (blobStatusReply) {
switch (blobStatusReply.status) {
case disperser.BlobStatus.PROCESSING:
console.log("Blob is currently being processed.");
break;
case disperser.BlobStatus.DISPERSING:
console.log("Blob is currently being dispersed.");
break;
case disperser.BlobStatus.CONFIRMED:
console.log("Blob has been confirmed.");
let blobInfoWithRequestId = parseBlobInfo(disperseBlobReply, blobStatusReply);
fs.writeFileSync(blobInfoPath, JSON.stringify(blobInfoWithRequestId, null, 2));
clearInterval(blobStatusChecker);
break;
case disperser.BlobStatus.FAILED:
console.log("Blob has failed.");
break;
case disperser.BlobStatus.FINALIZED:
console.log("Blob has been finalized.");
break;
case disperser.BlobStatus.INSUFFICIENT_SIGNATURES:
console.log("Blob has insufficient signatures.");
break;
}
} else {
console.error("No reply from GetBlobStatus");
}
});
}, 30000);
} else {
console.error("No reply from DisperseBlob");
}
});
}
})();

function parseBlobInfo(disperseBlobReply: disperser.DisperseBlobReply, blobStatusReply: disperser.BlobStatusReply) {
const blobQuorumParams = blobStatusReply.info.blob_header.blob_quorum_params.map(param => ({
quorum_number: param.quorum_number,
adversary_threshold_percentage: param.adversary_threshold_percentage,
confirmation_threshold_percentage: param.confirmation_threshold_percentage,
chunk_length: param.chunk_length
}));

return {
request_id: Buffer.from(disperseBlobReply.request_id).toString("hex"),
blob_info: {
blob_header: {
commitment: {
x: Buffer.from(blobStatusReply.info.blob_header.commitment.x).toString("hex"),
y: Buffer.from(blobStatusReply.info.blob_header.commitment.y).toString("hex")
},
data_length: blobStatusReply.info.blob_header.data_length,
blob_quorum_params: blobQuorumParams
},
blob_verification_proof: {
batch_id: blobStatusReply.info.blob_verification_proof.batch_id,
blob_index: blobStatusReply.info.blob_verification_proof.blob_index,
batch_metadata: {
batch_header: {
batch_root: Buffer.from(blobStatusReply.info.blob_verification_proof.batch_metadata.batch_header.batch_root).toString("hex"),
quorum_numbers: Buffer.from(blobStatusReply.info.blob_verification_proof.batch_metadata.batch_header.quorum_numbers).toString("hex"),
quorum_signed_percentages: Buffer.from(blobStatusReply.info.blob_verification_proof.batch_metadata.batch_header.quorum_signed_percentages).toString("hex"),
reference_block_number: blobStatusReply.info.blob_verification_proof.batch_metadata.batch_header.reference_block_number
},
signatory_record_hash: Buffer.from(blobStatusReply.info.blob_verification_proof.batch_metadata.signatory_record_hash).toString("hex"),
fee: Buffer.from(blobStatusReply.info.blob_verification_proof.batch_metadata.fee).toString("hex"),
confirmation_block_number: blobStatusReply.info.blob_verification_proof.batch_metadata.confirmation_block_number,
batch_header_hash: Buffer.from(blobStatusReply.info.blob_verification_proof.batch_metadata.batch_header_hash).toString("hex")
},
inclusion_proof: Buffer.from(blobStatusReply.info.blob_verification_proof.inclusion_proof).toString("hex"),
quorum_indexes: Buffer.from(blobStatusReply.info.blob_verification_proof.quorum_indexes).toString("hex")
}
}
};
}






9 changes: 9 additions & 0 deletions scripts/eigenDAClient/proto/common.proto
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
syntax = "proto3";
package common;

message G1Commitment {
// The X coordinate of the KZG commitment. This is the raw byte representation of the field element.
bytes x = 1;
// The Y coordinate of the KZG commitment. This is the raw byte representation of the field element.
bytes y = 2;
}
98 changes: 98 additions & 0 deletions scripts/eigenDAClient/proto/common.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
/**
* Generated by the protoc-gen-ts. DO NOT EDIT!
* compiler version: 5.26.1
* source: proto/common.proto
* git: https://github.com/thesayyn/protoc-gen-ts */
import * as pb_1 from "google-protobuf";
export namespace common {
export class G1Commitment extends pb_1.Message {
#one_of_decls: number[][] = [];
constructor(data?: any[] | {
x?: Uint8Array;
y?: Uint8Array;
}) {
super();
pb_1.Message.initialize(this, Array.isArray(data) ? data : [], 0, -1, [], this.#one_of_decls);
if (!Array.isArray(data) && typeof data == "object") {
if ("x" in data && data.x != undefined) {
this.x = data.x;
}
if ("y" in data && data.y != undefined) {
this.y = data.y;
}
}
}
get x() {
return pb_1.Message.getFieldWithDefault(this, 1, new Uint8Array(0)) as Uint8Array;
}
set x(value: Uint8Array) {
pb_1.Message.setField(this, 1, value);
}
get y() {
return pb_1.Message.getFieldWithDefault(this, 2, new Uint8Array(0)) as Uint8Array;
}
set y(value: Uint8Array) {
pb_1.Message.setField(this, 2, value);
}
static fromObject(data: {
x?: Uint8Array;
y?: Uint8Array;
}): G1Commitment {
const message = new G1Commitment({});
if (data.x != null) {
message.x = data.x;
}
if (data.y != null) {
message.y = data.y;
}
return message;
}
toObject() {
const data: {
x?: Uint8Array;
y?: Uint8Array;
} = {};
if (this.x != null) {
data.x = this.x;
}
if (this.y != null) {
data.y = this.y;
}
return data;
}
serialize(): Uint8Array;
serialize(w: pb_1.BinaryWriter): void;
serialize(w?: pb_1.BinaryWriter): Uint8Array | void {
const writer = w || new pb_1.BinaryWriter();
if (this.x.length)
writer.writeBytes(1, this.x);
if (this.y.length)
writer.writeBytes(2, this.y);
if (!w)
return writer.getResultBuffer();
}
static deserialize(bytes: Uint8Array | pb_1.BinaryReader): G1Commitment {
const reader = bytes instanceof pb_1.BinaryReader ? bytes : new pb_1.BinaryReader(bytes), message = new G1Commitment();
while (reader.nextField()) {
if (reader.isEndGroup())
break;
switch (reader.getFieldNumber()) {
case 1:
message.x = reader.readBytes();
break;
case 2:
message.y = reader.readBytes();
break;
default: reader.skipField();
}
}
return message;
}
serializeBinary(): Uint8Array {
return this.serialize();
}
static deserializeBinary(bytes: Uint8Array): G1Commitment {
return G1Commitment.deserialize(bytes);
}
}
}
Loading

0 comments on commit f00ebfb

Please sign in to comment.