diff --git a/builder.html b/builder.html new file mode 100755 index 000000000..8cc2f9d8a --- /dev/null +++ b/builder.html @@ -0,0 +1,6634 @@ + + + + + + + + + + Bolt Builder + + + + + + + + + + +
+
+
+
+
+
+ + +
+
+
+
+
+
+ +

Bolt Builder

+ + +
+
+ + diff: + + + ignored: + +
+
+
+
+ +
+1764
+
-322
+ + +
+22
+
-814
+ +
+
+
+ +
+

This is an overview of the changes made to the canonical Flashbots Builder to +support inclusion preconfirmations through Bolt.

+ + + +

Here’s an overview of all the changes divided by module:

+
+
+ + +
+
+ +
+ +
+

This is where the bulk of the API diffs are located.

+
+
+ + +
+
+ +
+ +
+

We added two new Builder API endpoints to communicate with Relays:

+ + + +

The constraints cache is populated as soon as new constraints are streamed from the relay, and percolate to the miner +at block building time.

+
+
+ +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+233
+
-6
+ +
+ +
+
+
diff --git flashbots/builder/builder/builder.go chainbound/bolt/builder/builder.go +index bcdab8fc1ec93b4f85264bc7a0ec0fe25edcc4a6..913a199edf60c3a1e514442c19948f79d665d9ba 100644 +--- flashbots/builder/builder/builder.go ++++ chainbound/bolt/builder/builder.go +@@ -1,11 +1,17 @@ + package builder +  + import ( ++ "bufio" ++ "compress/gzip" + "context" ++ "encoding/json" + "errors" + "fmt" ++ "io" + "math/big" ++ "net/http" + _ "os" ++ "strings" + "sync" + "time" +  +@@ -20,6 +26,7 @@ "github.com/attestantio/go-eth2-client/spec/bellatrix" + "github.com/attestantio/go-eth2-client/spec/capella" + "github.com/attestantio/go-eth2-client/spec/deneb" + "github.com/attestantio/go-eth2-client/spec/phase0" ++ "github.com/chainbound/shardmap" + "github.com/ethereum/go-ethereum/beacon/engine" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core" +@@ -45,6 +52,10 @@ + SubmissionOffsetFromEndOfSlotSecondsDefault = 3 * time.Second + ) +  ++const ( ++ SubscribeConstraintsPath = "/relay/v1/builder/constraints" ++) ++ + type PubkeyHex string +  + type ValidatorData struct { +@@ -55,6 +66,7 @@ } +  + type IRelay interface { + SubmitBlock(msg *builderSpec.VersionedSubmitBlockRequest, vd ValidatorData) error ++ SubmitBlockWithProofs(msg *common.VersionedSubmitBlockRequestWithProofs, vd ValidatorData) error + GetValidatorForSlot(nextSlot uint64) (ValidatorData, error) + Config() RelayConfig + Start() error +@@ -81,6 +93,9 @@ builderPublicKey phase0.BLSPubKey + builderSigningDomain phase0.Domain + builderResubmitInterval time.Duration + discardRevertibleTxOnErr bool ++ ++ // constraintsCache is a map from slot to the decoded constraints made by proposers ++ constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded] +  + limiter *rate.Limiter + submissionOffsetFromEndOfSlot time.Duration +@@ -95,6 +110,7 @@ } +  + // BuilderArgs is a struct that contains all the arguments needed to create a new Builder + type BuilderArgs struct { ++ boltCCEndpoint string + sk *bls.SecretKey + ds flashbotsextra.IDatabaseService + blockConsumer flashbotsextra.BlockConsumer +@@ -161,6 +177,9 @@ args.submissionOffsetFromEndOfSlot = SubmissionOffsetFromEndOfSlotSecondsDefault + } +  + slotCtx, slotCtxCancel := context.WithCancel(context.Background()) ++ ++ constraintsCache := shardmap.NewFIFOMap[uint64, types.HashToConstraintDecoded](64, 16, shardmap.HashUint64) ++ + return &Builder{ + ds: args.ds, + blockConsumer: args.blockConsumer, +@@ -177,6 +196,8 @@ builderResubmitInterval: args.builderBlockResubmitInterval, + discardRevertibleTxOnErr: args.discardRevertibleTxOnErr, + submissionOffsetFromEndOfSlot: args.submissionOffsetFromEndOfSlot, +  ++ constraintsCache: constraintsCache, ++ + limiter: args.limiter, + slotCtx: slotCtx, + slotCtxCancel: slotCtxCancel, +@@ -228,7 +249,170 @@ } + } + }() +  +- return b.relay.Start() ++ if err := b.relay.Start(); err != nil { ++ return err ++ } ++ ++ return b.SubscribeProposerConstraints() ++} ++ ++// GenerateAuthenticationHeader generates an authentication string for the builder ++// to subscribe to SSE constraint events emitted by relays ++func (b *Builder) GenerateAuthenticationHeader() (string, error) { ++ // NOTE: the `slot` acts similarly to a nonce for the message to sign, to avoid replay attacks. ++ slot := b.slotAttrs.Slot ++ message, err := json.Marshal(common.ConstraintSubscriptionAuth{PublicKey: b.builderPublicKey, Slot: slot}) ++ if err != nil { ++ log.Error(fmt.Sprintf("Failed to marshal auth message: %v", err)) ++ return "", err ++ } ++ signatureEC := bls.Sign(b.builderSecretKey, message) ++ subscriptionSignatureJSON := `"` + phase0.BLSSignature(bls.SignatureToBytes(signatureEC)[:]).String() + `"` ++ authHeader := "BOLT " + subscriptionSignatureJSON + "," + string(message) ++ return authHeader, nil ++} ++ ++// SubscribeProposerConstraints subscribes to the constraints made by Bolt proposers ++// which the builder pulls from relay(s) using SSE. ++func (b *Builder) SubscribeProposerConstraints() error { ++ // Create authentication signed message ++ authHeader, err := b.GenerateAuthenticationHeader() ++ if err != nil { ++ log.Error(fmt.Sprintf("Failed to generate authentication header: %v", err)) ++ return err ++ } ++ ++ // Check if `b.relay` is a RemoteRelayAggregator, if so we need to subscribe to ++ // the constraints made available by all the relays ++ relayAggregator, ok := b.relay.(*RemoteRelayAggregator) ++ if ok { ++ for _, relay := range relayAggregator.relays { ++ go b.subscribeToRelayForConstraints(relay.Config().Endpoint, authHeader) ++ } ++ } else { ++ go b.subscribeToRelayForConstraints(b.relay.Config().Endpoint, authHeader) ++ } ++ return nil ++} ++ ++func (b *Builder) subscribeToRelayForConstraints(relayBaseEndpoint, authHeader string) error { ++ attempts := 0 ++ maxAttempts := 60 // Max 10 minutes of retries ++ retryInterval := 10 * time.Second ++ ++ var resp *http.Response ++ ++ for { ++ log.Info("Attempting to subscribe to constraints...") ++ ++ if attempts >= maxAttempts { ++ log.Error(fmt.Sprintf("Failed to subscribe to constraints after %d attempts", maxAttempts)) ++ return errors.New("failed to subscribe to constraints") ++ } ++ ++ req, err := http.NewRequest(http.MethodGet, relayBaseEndpoint+SubscribeConstraintsPath, nil) ++ if err != nil { ++ log.Error(fmt.Sprintf("Failed to create new http request: %v", err)) ++ return err ++ } ++ req.Header.Set("Authorization", authHeader) ++ ++ client := http.Client{} ++ ++ resp, err = client.Do(req) ++ if err != nil { ++ log.Error(fmt.Sprintf("Failed to connect to SSE server: %v", err)) ++ time.Sleep(retryInterval) ++ attempts++ ++ continue ++ } ++ ++ if resp.StatusCode != http.StatusOK { ++ log.Error(fmt.Sprintf("Error subscribing to constraints via SSE: %s, %v", resp.Status, err)) ++ return err ++ } ++ break ++ } ++ ++ defer resp.Body.Close() ++ log.Info(fmt.Sprintf("Connected to SSE server: %s", relayBaseEndpoint)) ++ ++ var reader io.Reader ++ ++ // Check if the response is gzipped ++ if resp.Header.Get("Content-Encoding") == "gzip" { ++ // Decompress the response body ++ gzipReader, err := gzip.NewReader(resp.Body) ++ if err != nil { ++ return fmt.Errorf("error creating gzip reader: %v", err) ++ } ++ defer gzipReader.Close() ++ reader = gzipReader ++ } else { ++ reader = resp.Body ++ } ++ ++ bufReader := bufio.NewReader(reader) ++ for { ++ line, err := bufReader.ReadString('\n') ++ if err != nil { ++ if err == io.EOF { ++ log.Info("End of stream") ++ break ++ } ++ log.Error(fmt.Sprintf("Error reading from response body: %v", err)) ++ continue ++ } ++ ++ if !strings.HasPrefix(line, "data: ") { ++ continue ++ } ++ ++ data := strings.TrimPrefix(line, "data: ") ++ ++ // We assume the data is the JSON representation of the constraints ++ log.Info(fmt.Sprintf("Received new constraint: %s", data)) ++ constraintsSigned := make(common.SignedConstraintsList, 0, 8) ++ if err := json.Unmarshal([]byte(data), &constraintsSigned); err != nil { ++ log.Warn(fmt.Sprintf("Failed to unmarshal constraints: %v", err)) ++ continue ++ } ++ ++ if len(constraintsSigned) == 0 { ++ log.Warn("Received 0 length list of constraints") ++ continue ++ } ++ ++ for _, constraint := range constraintsSigned { ++ decodedConstraints, err := DecodeConstraints(constraint) ++ if err != nil { ++ log.Error("Failed to decode constraint: ", err) ++ continue ++ } ++ ++ EmitBoltDemoEvent(fmt.Sprintf("Received constraint from relay for slot %d, stored in cache (path: %s)", constraint.Message.Slot, SubscribeConstraintsPath)) ++ ++ // For every constraint, we need to check if it has already been seen for the associated slot ++ slotConstraints, _ := b.constraintsCache.Get(constraint.Message.Slot) ++ if len(slotConstraints) == 0 { ++ // New constraint for this slot, add it in the map and continue with the next constraint ++ b.constraintsCache.Put(constraint.Message.Slot, decodedConstraints) ++ continue ++ } ++ ++ for hash := range decodedConstraints { ++ // Update the slot constraints ++ slotConstraints[hash] = decodedConstraints[hash] ++ } ++ ++ // Update the slot constraints in the cache ++ b.constraintsCache.Put(constraint.Message.Slot, slotConstraints) ++ ++ } ++ ++ } ++ ++ return nil + } +  + func (b *Builder) Stop() error { +@@ -236,6 +420,7 @@ close(b.stop) + return nil + } +  ++// BOLT: modify to calculate merkle inclusion proofs for preconfirmed transactions + func (b *Builder) onSealedBlock(opts SubmitBlockOpts) error { + executableData := engine.BlockToExecutableData(opts.Block, opts.BlockValue, opts.BlobSidecars) + var dataVersion spec.DataVersion +@@ -272,6 +457,35 @@ log.Error("could not get block request", "err", err) + return err + } +  ++ var versionedBlockRequestWithPreconfsProofs *common.VersionedSubmitBlockRequestWithProofs ++ ++ // BOLT: fetch constraints from the cache, which is automatically updated by the SSE subscription ++ constraints, _ := b.constraintsCache.Get(opts.PayloadAttributes.Slot) ++ log.Info(fmt.Sprintf("[BOLT]: Found %d constraints for slot %d", len(constraints), opts.PayloadAttributes.Slot)) ++ ++ if len(constraints) > 0 { ++ message := fmt.Sprintf("sealing block %d with %d constraints", opts.Block.Number(), len(constraints)) ++ log.Info(message) ++ EmitBoltDemoEvent(message) ++ ++ timeStart := time.Now() ++ inclusionProof, _, err := CalculateMerkleMultiProofs(opts.Block.Transactions(), constraints) ++ timeForProofs := time.Since(timeStart) ++ ++ if err != nil { ++ log.Error("[BOLT]: could not calculate merkle multiproofs", "err", err) ++ return err ++ } ++ ++ // BOLT: send event to web demo ++ EmitBoltDemoEvent(fmt.Sprintf("created merkle multiproof of %d constraint(s) for block %d in %v", len(constraints), opts.Block.Number(), timeForProofs)) ++ ++ versionedBlockRequestWithPreconfsProofs = &common.VersionedSubmitBlockRequestWithProofs{ ++ Inner: versionedBlockRequest, ++ Proofs: inclusionProof, ++ } ++ } ++ + if b.dryRun { + switch dataVersion { + case spec.DataVersionBellatrix: +@@ -285,16 +499,23 @@ if err != nil { + log.Error("could not validate block", "version", dataVersion.String(), "err", err) + } + } else { ++ // NOTE: we can ignore preconfs for `processBuiltBlock` + go b.processBuiltBlock(opts.Block, opts.BlockValue, opts.OrdersClosedAt, opts.SealedAt, opts.CommitedBundles, opts.AllBundles, opts.UsedSbundles, &blockBidMsg) +- err = b.relay.SubmitBlock(versionedBlockRequest, opts.ValidatorData) ++ if versionedBlockRequestWithPreconfsProofs != nil { ++ log.Info(fmt.Sprintf("[BOLT]: Sending sealed block to relay %s", versionedBlockRequestWithPreconfsProofs)) ++ err = b.relay.SubmitBlockWithProofs(versionedBlockRequestWithPreconfsProofs, opts.ValidatorData) ++ } else if len(constraints) == 0 { ++ // If versionedBlockRequestWithPreconfsProofs is nil and no constraints, then we don't have proofs to send ++ err = b.relay.SubmitBlock(versionedBlockRequest, opts.ValidatorData) ++ } else { ++ log.Warn(fmt.Sprintf("[BOLT]: Could not send sealed block this time because we have %d constraints but no proofs", len(constraints))) ++ return nil ++ } + if err != nil { + log.Error("could not submit block", "err", err, "verion", dataVersion, "#commitedBundles", len(opts.CommitedBundles)) + return err + } + } +- +- log.Info("submitted block", "version", dataVersion.String(), "slot", opts.PayloadAttributes.Slot, "value", opts.BlockValue.String(), "parent", opts.Block.ParentHash().String(), +- "hash", opts.Block.Hash(), "#commitedBundles", len(opts.CommitedBundles)) +  + return nil + } +@@ -363,6 +584,7 @@ log.Info("successfully relayed block data to consumer") + } + } +  ++// Called when a new payload event is received from the beacon client SSE + func (b *Builder) OnPayloadAttribute(attrs *types.BuilderPayloadAttributes) error { + if attrs == nil { + return nil +@@ -407,6 +629,8 @@ b.slotAttrs = *attrs + b.slotCtx = slotCtx + b.slotCtxCancel = slotCtxCancel +  ++ log.Info("[BOLT]: Inside onPayloadAttribute", "slot", attrs.Slot, "parent", attrs.HeadHash, "payloadTimestamp", uint64(attrs.Timestamp)) ++ + go b.runBuildingJob(b.slotCtx, proposerPubkey, vd, attrs) + return nil + } +@@ -422,6 +646,9 @@ allBundles []types.SimulatedBundle + usedSbundles []types.UsedSBundle + } +  ++// Continuously makes a request to the miner module with the correct params and submits the best produced block. ++// on average 1 attempt per second is made. ++// - Submissions to the relay are rate limited to 2 req/s + func (b *Builder) runBuildingJob(slotCtx context.Context, proposerPubkey phase0.BLSPubKey, vd ValidatorData, attrs *types.BuilderPayloadAttributes) { + ctx, cancel := context.WithTimeout(slotCtx, 12*time.Second) + defer cancel() +@@ -515,7 +742,7 @@ log.Debug("retrying BuildBlock", + "slot", attrs.Slot, + "parent", attrs.HeadHash, + "resubmit-interval", b.builderResubmitInterval.String()) +- err := b.eth.BuildBlock(attrs, blockHook) ++ err := b.eth.BuildBlock(attrs, blockHook, b.constraintsCache) + if err != nil { + log.Warn("Failed to build block", "err", err) + }
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+397
+
-0
+ +
+ +
+
+
diff --git flashbots/builder/builder/builder_test.go chainbound/bolt/builder/builder_test.go +index d8a698c4cf7172d0710fd5010d2587206ebd9374..6c1948183e24b999788017af20df1c3a567b464f 100644 +--- flashbots/builder/builder/builder_test.go ++++ chainbound/bolt/builder/builder_test.go +@@ -1,7 +1,12 @@ + package builder +  + import ( ++ "encoding/hex" ++ "encoding/json" ++ "fmt" + "math/big" ++ "net/http" ++ "strings" + "testing" + "time" +  +@@ -14,10 +19,13 @@ "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/ethereum/go-ethereum/core" + "github.com/ethereum/go-ethereum/core/types" + "github.com/ethereum/go-ethereum/flashbotsextra" ++ "github.com/ethereum/go-ethereum/log" + "github.com/flashbots/go-boost-utils/bls" + "github.com/flashbots/go-boost-utils/ssz" + "github.com/flashbots/go-boost-utils/utils" ++ "github.com/gorilla/handlers" + "github.com/holiman/uint256" ++ "github.com/pkg/errors" + "github.com/stretchr/testify/require" + ) +  +@@ -170,3 +178,392 @@ + time.Sleep(2200 * time.Millisecond) + require.NotNil(t, testRelay.submittedMsg) + } ++ ++func TestBlockWithPreconfs(t *testing.T) { ++ const ( ++ validatorDesiredGasLimit = 30_000_000 ++ payloadAttributeGasLimit = 30_000_000 // Was zero in the other test ++ parentBlockGasLimit = 29_000_000 ++ ) ++ expectedGasLimit := core.CalcGasLimit(parentBlockGasLimit, validatorDesiredGasLimit) ++ ++ vsk, err := bls.SecretKeyFromBytes(hexutil.MustDecode("0x370bb8c1a6e62b2882f6ec76762a67b39609002076b95aae5b023997cf9b2dc9")) ++ require.NoError(t, err) ++ validator := &ValidatorPrivateData{ ++ sk: vsk, ++ Pk: hexutil.MustDecode("0xb67d2c11bcab8c4394fc2faa9601d0b99c7f4b37e14911101da7d97077917862eed4563203d34b91b5cf0aa44d6cfa05"), ++ } ++ ++ testBeacon := testBeaconClient{ ++ validator: validator, ++ slot: 56, ++ } ++ ++ feeRecipient, _ := utils.HexToAddress("0xabcf8e0d4e9587369b2301d0790347320302cc00") ++ testRelay := testRelay{ ++ gvsVd: ValidatorData{ ++ Pubkey: PubkeyHex(testBeacon.validator.Pk.String()), ++ FeeRecipient: feeRecipient, ++ GasLimit: validatorDesiredGasLimit, ++ }, ++ } ++ ++ sk, err := bls.SecretKeyFromBytes(hexutil.MustDecode("0x31ee185dad1220a8c88ca5275e64cf5a5cb09cb621cb30df52c9bee8fbaaf8d7")) ++ require.NoError(t, err) ++ ++ bDomain := ssz.ComputeDomain(ssz.DomainTypeAppBuilder, [4]byte{0x02, 0x0, 0x0, 0x0}, phase0.Root{}) ++ ++ // https://etherscan.io/tx/0x9d48b4a021898a605b7ae49bf93ad88fa6bd7050e9448f12dde064c10f22fe9c ++ // 0x02f87601836384348477359400850517683ba883019a28943678fce4028b6745eb04fa010d9c8e4b36d6288c872b0f1366ad800080c080a0b6b7aba1954160d081b2c8612e039518b9c46cd7df838b405a03f927ad196158a071d2fb6813e5b5184def6bd90fb5f29e0c52671dea433a7decb289560a58416e ++ preconfTxByte, _ := hex.DecodeString("02f87601836384348477359400850517683ba883019a28943678fce4028b6745eb04fa010d9c8e4b36d6288c872b0f1366ad800080c080a0b6b7aba1954160d081b2c8612e039518b9c46cd7df838b405a03f927ad196158a071d2fb6813e5b5184def6bd90fb5f29e0c52671dea433a7decb289560a58416e") ++ preconfTx := new(types.Transaction) ++ err = preconfTx.UnmarshalBinary(preconfTxByte) ++ require.NoError(t, err) ++ ++ // https://etherscan.io/tx/0x15bd881daa1408b33f67fa4bdeb8acfb0a2289d9b4c6f81eef9bb2bb2e52e780 - Blob Tx ++ // 0x03f9029c01830299f184b2d05e008507aef40a00832dc6c09468d30f47f19c07bccef4ac7fae2dc12fca3e0dc980b90204ef16e845000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001e0000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000633b68f5d8d3a86593ebb815b4663bcbe0302e31382e302d64657600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004109de8da2a97e37f2e6dc9f7d50a408f9344d7aa1a925ae53daf7fbef43491a571960d76c0cb926190a9da10df7209fb1ba93cd98b1565a3a2368749d505f90c81c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0843b9aca00e1a00141e3a338e30c49ed0501e315bcc45e4edefebed43ab1368a1505461d9cf64901a01e8511e06b17683d89eb57b9869b96b8b611f969f7f56cbc0adc2df7c88a2a07a00910deacf91bba0d74e368d285d311dc5884e7cfe219d85aea5741b2b6e3a2fe ++ preconfTxWithBlobByte, _ := hex.DecodeString("03f9029c01830299f184b2d05e008507aef40a00832dc6c09468d30f47f19c07bccef4ac7fae2dc12fca3e0dc980b90204ef16e845000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001e0000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000633b68f5d8d3a86593ebb815b4663bcbe0302e31382e302d64657600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004109de8da2a97e37f2e6dc9f7d50a408f9344d7aa1a925ae53daf7fbef43491a571960d76c0cb926190a9da10df7209fb1ba93cd98b1565a3a2368749d505f90c81c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0843b9aca00e1a00141e3a338e30c49ed0501e315bcc45e4edefebed43ab1368a1505461d9cf64901a01e8511e06b17683d89eb57b9869b96b8b611f969f7f56cbc0adc2df7c88a2a07a00910deacf91bba0d74e368d285d311dc5884e7cfe219d85aea5741b2b6e3a2fe") ++ preconfTxWithBlob := new(types.Transaction) ++ err = preconfTxWithBlob.UnmarshalBinary(preconfTxWithBlobByte) ++ require.NoError(t, err) ++ ++ testExecutableData := &engine.ExecutableData{ ++ ParentHash: common.Hash{0x02, 0x03}, ++ FeeRecipient: common.Address(feeRecipient), ++ StateRoot: common.Hash{0x07, 0x16}, ++ ReceiptsRoot: common.Hash{0x08, 0x20}, ++ LogsBloom: types.Bloom{}.Bytes(), ++ Number: uint64(10), ++ GasLimit: expectedGasLimit, ++ GasUsed: uint64(100), ++ Timestamp: uint64(105), ++ ExtraData: hexutil.MustDecode("0x0042fafc"), ++ ++ BaseFeePerGas: big.NewInt(16), ++ ++ BlockHash: common.HexToHash("3cce5d0f5c9a7e188e79c35168256e91bec2d98a1140f6701da6ed3c98ea9d04"), ++ Transactions: [][]byte{preconfTxByte, preconfTxWithBlobByte}, ++ } ++ ++ testBlock, err := engine.ExecutableDataToBlock(*testExecutableData, preconfTxWithBlob.BlobHashes(), nil) ++ require.NoError(t, err) ++ ++ testPayloadAttributes := &types.BuilderPayloadAttributes{ ++ Timestamp: hexutil.Uint64(104), ++ Random: common.Hash{0x05, 0x10}, ++ SuggestedFeeRecipient: common.Address{0x04, 0x10}, ++ GasLimit: uint64(payloadAttributeGasLimit), ++ Slot: uint64(25), ++ } ++ ++ testEthService := &testEthereumService{synced: true, testExecutableData: testExecutableData, testBlock: testBlock, testBlockValue: big.NewInt(10)} ++ builderArgs := BuilderArgs{ ++ sk: sk, ++ ds: flashbotsextra.NilDbService{}, ++ relay: &testRelay, ++ builderSigningDomain: bDomain, ++ eth: testEthService, ++ dryRun: false, ++ ignoreLatePayloadAttributes: false, ++ validator: nil, ++ beaconClient: &testBeacon, ++ limiter: nil, ++ blockConsumer: flashbotsextra.NilDbService{}, ++ } ++ builder, err := NewBuilder(builderArgs) ++ require.NoError(t, err) ++ ++ builder.Start() ++ defer builder.Stop() ++ ++ // Add the transaction to the cache directly ++ builder.constraintsCache.Put(25, map[common.Hash]*types.ConstraintDecoded{ ++ preconfTx.Hash(): { ++ Tx: preconfTx, ++ }, ++ preconfTxWithBlob.Hash(): { ++ Tx: preconfTxWithBlob, ++ }, ++ }) ++ ++ err = builder.OnPayloadAttribute(testPayloadAttributes) ++ require.NoError(t, err) ++ time.Sleep(time.Second * 3) ++ ++ require.NotNil(t, testRelay.submittedMsgWithPreconf) ++ ++ expectedProposerPubkey, err := utils.HexToPubkey(testBeacon.validator.Pk.String()) ++ require.NoError(t, err) ++ ++ expectedMessage := builderApiV1.BidTrace{ ++ Slot: uint64(25), ++ ParentHash: phase0.Hash32{0x02, 0x03}, ++ BuilderPubkey: builder.builderPublicKey, ++ ProposerPubkey: expectedProposerPubkey, ++ ProposerFeeRecipient: feeRecipient, ++ GasLimit: expectedGasLimit, ++ GasUsed: uint64(100), ++ Value: &uint256.Int{0x0a}, ++ } ++ copy(expectedMessage.BlockHash[:], hexutil.MustDecode("0x3cce5d0f5c9a7e188e79c35168256e91bec2d98a1140f6701da6ed3c98ea9d04")[:]) ++ require.NotNil(t, testRelay.submittedMsgWithPreconf.Inner.Bellatrix) ++ require.Equal(t, expectedMessage, *testRelay.submittedMsgWithPreconf.Inner.Bellatrix.Message) ++ ++ expectedExecutionPayload := bellatrix.ExecutionPayload{ ++ ParentHash: [32]byte(testExecutableData.ParentHash), ++ FeeRecipient: feeRecipient, ++ StateRoot: [32]byte(testExecutableData.StateRoot), ++ ReceiptsRoot: [32]byte(testExecutableData.ReceiptsRoot), ++ LogsBloom: [256]byte{}, ++ PrevRandao: [32]byte(testExecutableData.Random), ++ BlockNumber: testExecutableData.Number, ++ GasLimit: testExecutableData.GasLimit, ++ GasUsed: testExecutableData.GasUsed, ++ Timestamp: testExecutableData.Timestamp, ++ ExtraData: hexutil.MustDecode("0x0042fafc"), ++ BaseFeePerGas: [32]byte{0x10}, ++ BlockHash: expectedMessage.BlockHash, ++ Transactions: []bellatrix.Transaction{preconfTxByte, preconfTxWithBlobByte}, ++ } ++ ++ require.Equal(t, expectedExecutionPayload, *testRelay.submittedMsgWithPreconf.Inner.Bellatrix.ExecutionPayload) ++ ++ expectedSignature, err := utils.HexToSignature("0x97db0496dcfd04ed444b87b6fc1c9e3339a0d35f7c01825ac353812601a72e7e35ef94899a9b03f4d23102214701255805efd0f6552073791ea1c3e10003ae435952f8305f6b89e58d4442ced149d3c33a486f5a390b4b8047e6ea4176059755") ++ ++ require.NoError(t, err) ++ require.Equal(t, expectedSignature, testRelay.submittedMsgWithPreconf.Inner.Bellatrix.Signature) ++ ++ require.Equal(t, uint64(25), testRelay.requestedSlot) ++ ++ // Clear the submitted message and check that the job will be ran again and but a new message will not be submitted since the hash is the same ++ testEthService.testBlockValue = big.NewInt(10) ++ ++ testRelay.submittedMsgWithPreconf = nil ++ time.Sleep(2200 * time.Millisecond) ++ require.Nil(t, testRelay.submittedMsgWithPreconf) ++ ++ // Change the hash, expect to get the block ++ testExecutableData.ExtraData = hexutil.MustDecode("0x0042fafd") ++ testExecutableData.BlockHash = common.HexToHash("0x38456f6f1f5e76cf83c89ebb8606ff2b700bf02a86a165316c6d7a0c4e6a8614") ++ testBlock, err = engine.ExecutableDataToBlock(*testExecutableData, preconfTxWithBlob.BlobHashes(), nil) ++ testEthService.testBlockValue = big.NewInt(10) ++ require.NoError(t, err) ++ testEthService.testBlock = testBlock ++ ++ time.Sleep(2200 * time.Millisecond) ++ require.NotNil(t, testRelay.submittedMsgWithPreconf) ++} ++ ++func TestSubscribeProposerConstraints(t *testing.T) { ++ // ------------ Start Builder setup ------------- // ++ const ( ++ validatorDesiredGasLimit = 30_000_000 ++ payloadAttributeGasLimit = 0 ++ parentBlockGasLimit = 29_000_000 ++ ) ++ expectedGasLimit := core.CalcGasLimit(parentBlockGasLimit, validatorDesiredGasLimit) ++ ++ vsk, err := bls.SecretKeyFromBytes(hexutil.MustDecode("0x370bb8c1a6e62b2882f6ec76762a67b39609002076b95aae5b023997cf9b2dc9")) ++ require.NoError(t, err) ++ validator := &ValidatorPrivateData{ ++ sk: vsk, ++ Pk: hexutil.MustDecode("0xb67d2c11bcab8c4394fc2faa9601d0b99c7f4b37e14911101da7d97077917862eed4563203d34b91b5cf0aa44d6cfa05"), ++ } ++ ++ testBeacon := testBeaconClient{ ++ validator: validator, ++ slot: 56, ++ } ++ ++ feeRecipient, _ := utils.HexToAddress("0xabcf8e0d4e9587369b2301d0790347320302cc00") ++ ++ relayPort := "31245" ++ relay := NewRemoteRelay(RelayConfig{Endpoint: "http://localhost:" + relayPort}, nil, true) ++ ++ sk, err := bls.SecretKeyFromBytes(hexutil.MustDecode("0x31ee185dad1220a8c88ca5275e64cf5a5cb09cb621cb30df52c9bee8fbaaf8d7")) ++ require.NoError(t, err) ++ ++ bDomain := ssz.ComputeDomain(ssz.DomainTypeAppBuilder, [4]byte{0x02, 0x0, 0x0, 0x0}, phase0.Root{}) ++ ++ testExecutableData := &engine.ExecutableData{ ++ ParentHash: common.Hash{0x02, 0x03}, ++ FeeRecipient: common.Address(feeRecipient), ++ StateRoot: common.Hash{0x07, 0x16}, ++ ReceiptsRoot: common.Hash{0x08, 0x20}, ++ LogsBloom: types.Bloom{}.Bytes(), ++ Number: uint64(10), ++ GasLimit: expectedGasLimit, ++ GasUsed: uint64(100), ++ Timestamp: uint64(105), ++ ExtraData: hexutil.MustDecode("0x0042fafc"), ++ ++ BaseFeePerGas: big.NewInt(16), ++ ++ BlockHash: common.HexToHash("0x68e516c8827b589fcb749a9e672aa16b9643437459508c467f66a9ed1de66a6c"), ++ Transactions: [][]byte{}, ++ } ++ ++ testBlock, err := engine.ExecutableDataToBlock(*testExecutableData, nil, nil) ++ require.NoError(t, err) ++ ++ testEthService := &testEthereumService{synced: true, testExecutableData: testExecutableData, testBlock: testBlock, testBlockValue: big.NewInt(10)} ++ ++ builderArgs := BuilderArgs{ ++ sk: sk, ++ ds: flashbotsextra.NilDbService{}, ++ relay: relay, ++ builderSigningDomain: bDomain, ++ eth: testEthService, ++ dryRun: false, ++ ignoreLatePayloadAttributes: false, ++ validator: nil, ++ beaconClient: &testBeacon, ++ limiter: nil, ++ blockConsumer: flashbotsextra.NilDbService{}, ++ } ++ ++ builder, err := NewBuilder(builderArgs) ++ require.NoError(t, err) ++ ++ // ------------ End Builder setup ------------- // ++ ++ // Attach the sseHandler to the relay port ++ mux := http.NewServeMux() ++ mux.HandleFunc(SubscribeConstraintsPath, sseConstraintsHandler) ++ ++ // Wrap the mux with the GzipHandler middleware ++ // NOTE: In this case, we don't need to create a gzip writer in the handlers, ++ // by default the `http.ResponseWriter` will implement gzip compression ++ gzipMux := handlers.CompressHandler(mux) ++ ++ http.HandleFunc(SubscribeConstraintsPath, sseConstraintsHandler) ++ go http.ListenAndServe(":"+relayPort, gzipMux) ++ ++ // Constraints should not be available yet ++ _, ok := builder.constraintsCache.Get(0) ++ require.Equal(t, false, ok) ++ ++ // Create authentication signed message ++ authHeader, err := builder.GenerateAuthenticationHeader() ++ require.NoError(t, err) ++ builder.subscribeToRelayForConstraints(builder.relay.Config().Endpoint, authHeader) ++ // Wait 2 seconds to save all constraints in cache ++ time.Sleep(2 * time.Second) ++ ++ slots := []uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9} ++ for _, slot := range slots { ++ cachedConstraints, ok := builder.constraintsCache.Get(slot) ++ require.Equal(t, true, ok) ++ ++ expectedConstraint := generateMockConstraintsForSlot(slot)[0] ++ decodedConstraint, err := DecodeConstraints(expectedConstraint) ++ require.NoError(t, err) ++ ++ // Compare the keys of the cachedConstraints and decodedConstraint maps ++ require.Equal(t, len(cachedConstraints), len(decodedConstraint), "The number of keys in both maps should be the same") ++ for key := range cachedConstraints { ++ _, ok := decodedConstraint[key] ++ require.True(t, ok, fmt.Sprintf("Key %s found in cachedConstraints but not in decodedConstraint", key.String())) ++ require.Equal(t, cachedConstraints[key].Tx.Data(), decodedConstraint[key].Tx.Data(), "The decodedConstraint Tx should be equal to the cachedConstraints Tx") ++ } ++ for key := range decodedConstraint { ++ _, ok := cachedConstraints[key] ++ require.True(t, ok, fmt.Sprintf("Key %s found in decodedConstraint but not in cachedConstraints", key.String())) ++ } ++ } ++} ++ ++func sseConstraintsHandler(w http.ResponseWriter, r *http.Request) { ++ w.Header().Set("Content-Type", "text/event-stream") ++ w.Header().Set("Cache-Control", "no-cache") ++ w.Header().Set("Connection", "keep-alive") ++ w.Header().Set("Content-Encoding", "gzip") ++ ++ flusher, ok := w.(http.Flusher) ++ if !ok { ++ http.Error(w, "Streaming unsupported!", http.StatusInternalServerError) ++ return ++ } ++ ++ auth := r.Header.Get("Authorization") ++ _, err := validateConstraintSubscriptionAuth(auth, 0) ++ if err != nil { ++ http.Error(w, err.Error(), http.StatusUnauthorized) ++ return ++ } ++ ++ for i := 0; i < 256; i++ { ++ // Generate some duplicated constraints ++ slot := uint64(i) % 32 ++ constraints := generateMockConstraintsForSlot(slot) ++ bytes, err := json.Marshal(constraints) ++ if err != nil { ++ log.Error(fmt.Sprintf("Error while marshaling constraints: %v", err)) ++ return ++ } ++ fmt.Fprintf(w, "data: %s\n\n", string(bytes)) ++ flusher.Flush() ++ } ++} ++ ++// generateMockConstraintsForSlot generates a list of constraints for a given slot ++func generateMockConstraintsForSlot(slot uint64) common.SignedConstraintsList { ++ rawTx := new(common.HexBytes) ++ err := rawTx.UnmarshalJSON([]byte("\"0x02f876018305da308401312d0085041f1196d2825208940c598786c88883ff5e4f461750fad64d3fae54268804b7ec32d7a2000080c080a0086f02eacec72820be3b117e1edd5bd7ed8956964b28b2d903d2cba53dd13560a06d61ec9ccce6acb31bf21878b9a844e7fdac860c5b7d684f7eb5f38a5945357c\"")) ++ if err != nil { ++ fmt.Println("Failed to unmarshal rawTx: ", err) ++ } ++ ++ return common.SignedConstraintsList{ ++ &common.SignedConstraints{ ++ Message: common.ConstraintMessage{ ++ Constraints: []*common.Constraint{{Tx: *rawTx}}, ValidatorIndex: 0, Slot: slot, ++ }, Signature: phase0.BLSSignature{}, ++ }, ++ } ++} ++ ++// validateConstraintSubscriptionAuth checks the authentication string data from the Builder, ++// and returns its BLS public key if the authentication is valid. ++func validateConstraintSubscriptionAuth(auth string, headSlot uint64) (phase0.BLSPubKey, error) { ++ zeroKey := phase0.BLSPubKey{} ++ if auth == "" { ++ return zeroKey, errors.New("authorization header missing") ++ } ++ // Authorization: <auth-scheme> <authorization-parameters> ++ parts := strings.Split(auth, " ") ++ if len(parts) != 2 { ++ return zeroKey, errors.New("ill-formed authorization header") ++ } ++ if parts[0] != "BOLT" { ++ return zeroKey, errors.New("not BOLT authentication scheme") ++ } ++ // <signatureJSON>,<authDataJSON> ++ parts = strings.SplitN(parts[1], ",", 2) ++ if len(parts) != 2 { ++ return zeroKey, errors.New("ill-formed authorization header") ++ } ++ ++ signature := new(phase0.BLSSignature) ++ if err := signature.UnmarshalJSON([]byte(parts[0])); err != nil { ++ fmt.Println("Failed to unmarshal authData: ", err) ++ return zeroKey, errors.New("ill-formed authorization header") ++ } ++ ++ authDataRaw := []byte(parts[1]) ++ authData := new(common.ConstraintSubscriptionAuth) ++ if err := json.Unmarshal(authDataRaw, authData); err != nil { ++ fmt.Println("Failed to unmarshal authData: ", err) ++ return zeroKey, errors.New("ill-formed authorization header") ++ } ++ ++ if headSlot != authData.Slot { ++ return zeroKey, errors.New("invalid head slot") ++ } ++ ++ ok, err := bls.VerifySignatureBytes(authDataRaw, signature[:], authData.PublicKey[:]) ++ if err != nil || !ok { ++ return zeroKey, errors.New("invalid signature") ++ } ++ return authData.PublicKey, nil ++}
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+5
+
-0
+ +
+ +
+
+
diff --git flashbots/builder/builder/local_relay.go chainbound/bolt/builder/local_relay.go +index 5a503a5c2b8c7e4751c09465b9e4cf2e4c43a44c..ea4c8c46fe88745a2747d5c56f335dbc78c1e553 100644 +--- flashbots/builder/builder/local_relay.go ++++ chainbound/bolt/builder/local_relay.go +@@ -21,6 +21,7 @@ "github.com/attestantio/go-eth2-client/spec" + "github.com/attestantio/go-eth2-client/spec/bellatrix" + "github.com/attestantio/go-eth2-client/spec/phase0" + eth2UtilBellatrix "github.com/attestantio/go-eth2-client/util/bellatrix" ++ "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/ethereum/go-ethereum/log" + "github.com/flashbots/go-boost-utils/bls" +@@ -114,6 +115,10 @@ + func (r *LocalRelay) SubmitBlock(msg *builderSpec.VersionedSubmitBlockRequest, _ ValidatorData) error { + log.Info("submitting block to local relay", "block", msg.Bellatrix.ExecutionPayload.BlockHash.String()) + return r.submitBlock(msg.Bellatrix) ++} ++ ++func (r *LocalRelay) SubmitBlockWithProofs(msg *common.VersionedSubmitBlockRequestWithProofs, _ ValidatorData) error { ++ panic("Not implemented!") + } +  + func (r *LocalRelay) Config() RelayConfig {
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+44
+
-0
+ +
+ +
+
+
diff --git flashbots/builder/builder/relay.go chainbound/bolt/builder/relay.go +index 579fe14d7f746aa597bdd90351f012e45372fe0d..ef002f6b1de92e0396ff42ee9c005f69cf9493f4 100644 +--- flashbots/builder/builder/relay.go ++++ chainbound/bolt/builder/relay.go +@@ -11,6 +11,7 @@ "time" +  + builderSpec "github.com/attestantio/go-builder-client/spec" + "github.com/attestantio/go-eth2-client/spec" ++ "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/log" + "github.com/flashbots/go-boost-utils/utils" + ) +@@ -178,6 +179,49 @@ return fmt.Errorf("error sending http request to relay %s. err: %w", r.config.Endpoint, err) + } + if code > 299 { + return fmt.Errorf("non-ok response code %d from relay %s", code, r.config.Endpoint) ++ } ++ ++ return nil ++} ++ ++func (r *RemoteRelay) SubmitBlockWithProofs(msg *common.VersionedSubmitBlockRequestWithProofs, _ ValidatorData) error { ++ log.Info("submitting block with proofs to remote relay", "endpoint", r.config.Endpoint) ++ endpoint := r.config.Endpoint + "/relay/v1/builder/blocks_with_proofs" ++ if r.cancellationsEnabled { ++ endpoint = endpoint + "?cancellations=1" ++ } ++ ++ var code int ++ var err error ++ if r.config.SszEnabled { ++ panic("ssz not supported for preconfs proofs yet") ++ } else { ++ ++ // BOLT: send event to web demo ++ if len(msg.Proofs.TransactionHashes) > 0 { ++ number, _ := msg.Inner.BlockNumber() ++ message := fmt.Sprintf("sending block %d with proofs to relay (path: %s)", number, "/relay/v1/builder/blocks_with_proofs") ++ log.Info(message) ++ EmitBoltDemoEvent(message) ++ } ++ ++ switch msg.Inner.Version { ++ case spec.DataVersionBellatrix: ++ code, err = SendHTTPRequest(context.TODO(), *http.DefaultClient, http.MethodPost, endpoint, msg, nil) ++ case spec.DataVersionCapella: ++ code, err = SendHTTPRequest(context.TODO(), *http.DefaultClient, http.MethodPost, endpoint, msg, nil) ++ case spec.DataVersionDeneb: ++ code, err = SendHTTPRequest(context.TODO(), *http.DefaultClient, http.MethodPost, endpoint, msg, nil) ++ default: ++ return fmt.Errorf("unknown data version %d", msg.Inner.Version) ++ } ++ } ++ ++ if err != nil { ++ return fmt.Errorf("error sending http request block with proofs to relay %s. err: %w", r.config.Endpoint, err) ++ } ++ if code > 299 { ++ return fmt.Errorf("non-ok response code %d from relay for block with proofs %s", code, r.config.Endpoint) + } +  + return nil
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+21
+
-0
+ +
+ +
+
+
diff --git flashbots/builder/builder/relay_aggregator.go chainbound/bolt/builder/relay_aggregator.go +index c39784453acc265fe5a345b97682b8fc4a728707..4655ebe1acc90a034ffeb1e193ba579d054eb3e1 100644 +--- flashbots/builder/builder/relay_aggregator.go ++++ chainbound/bolt/builder/relay_aggregator.go +@@ -6,6 +6,7 @@ "fmt" + "sync" +  + builderSpec "github.com/attestantio/go-builder-client/spec" ++ "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/log" + ) +  +@@ -53,6 +54,26 @@ go func(relay IRelay) { + err := relay.SubmitBlock(msg, registration) + if err != nil { + log.Error("could not submit block", "err", err) ++ } ++ }(relay) ++ } ++ ++ return nil ++} ++ ++func (r *RemoteRelayAggregator) SubmitBlockWithProofs(msg *common.VersionedSubmitBlockRequestWithProofs, registration ValidatorData) error { ++ r.registrationsCacheLock.RLock() ++ defer r.registrationsCacheLock.RUnlock() ++ ++ relays, found := r.registrationsCache[registration] ++ if !found { ++ return fmt.Errorf("no relays for registration %s", registration.Pubkey) ++ } ++ for _, relay := range relays { ++ go func(relay IRelay) { ++ err := relay.SubmitBlockWithProofs(msg, registration) ++ if err != nil { ++ log.Error("could not submit block with proofs", "err", err) + } + }(relay) + }
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+17
+
-3
+ +
+ +
+
+
diff --git flashbots/builder/builder/relay_aggregator_test.go chainbound/bolt/builder/relay_aggregator_test.go +index b727f52c577514214ba3413582d0a8b97604e6d9..d6eebed98460539f692b078ef3bfcfe1001c88d3 100644 +--- flashbots/builder/builder/relay_aggregator_test.go ++++ chainbound/bolt/builder/relay_aggregator_test.go +@@ -8,6 +8,7 @@ + builderApiBellatrix "github.com/attestantio/go-builder-client/api/bellatrix" + builderSpec "github.com/attestantio/go-builder-client/spec" + "github.com/attestantio/go-eth2-client/spec" ++ "github.com/ethereum/go-ethereum/common" + "github.com/stretchr/testify/require" + ) +  +@@ -22,9 +23,11 @@ sbError error + gvsVd ValidatorData + gvsErr error +  +- requestedSlot uint64 +- submittedMsg *builderSpec.VersionedSubmitBlockRequest +- submittedMsgCh chan *builderSpec.VersionedSubmitBlockRequest ++ requestedSlot uint64 ++ submittedMsg *builderSpec.VersionedSubmitBlockRequest ++ submittedMsgWithPreconf *common.VersionedSubmitBlockRequestWithProofs ++ submittedMsgCh chan *builderSpec.VersionedSubmitBlockRequest ++ submittedMsgWithPreconfCh chan *common.VersionedSubmitBlockRequestWithProofs + } +  + type testRelayAggBackend struct { +@@ -53,6 +56,17 @@ default: + } + } + r.submittedMsg = msg ++ return r.sbError ++} ++ ++func (r *testRelay) SubmitBlockWithProofs(msg *common.VersionedSubmitBlockRequestWithProofs, vd ValidatorData) error { ++ if r.submittedMsgWithPreconfCh != nil { ++ select { ++ case r.submittedMsgWithPreconfCh <- msg: ++ default: ++ } ++ } ++ r.submittedMsgWithPreconf = msg + return r.sbError + } +
+
+ + + +
+
+ +
+
+
+ + +
+ +
+

We added logic to create and verify merkle inclusion proofs based on the SSZ Transactions beacon container.

+
+
+ +
+ + +
+
+
+ + (new) + +
+ + +
+
+ +
+ +
+ +
+52
+
-0
+ +
+ +
+
+
diff --git flashbots/builder/builder/transaction_ssz.go chainbound/bolt/builder/transaction_ssz.go +new file mode 100644 +index 0000000000000000000000000000000000000000..015be2fad16e557e17c18c01cf7471c1af0f9e63 +--- /dev/null ++++ chainbound/bolt/builder/transaction_ssz.go +@@ -0,0 +1,52 @@ ++package builder ++ ++import ( ++ ssz "github.com/ferranbt/fastssz" ++) ++ ++// The maximum length in bytes of a raw RLP-encoded transaction ++var MAX_BYTES_PER_TRANSACTION uint64 = 1_073_741_824 // 2**30 ++ ++// Transaction is a wrapper type of byte slice to implement the ssz.HashRoot interface ++type Transaction []byte ++ ++// HashTreeRoot calculates the hash tree root of the transaction, which ++// is a list of basic types (byte). ++// ++// Reference: https://github.com/ethereum/consensus-specs/blob/dev/ssz/simple-serialize.md#merkleization ++func (tx *Transaction) HashTreeRoot() ([32]byte, error) { ++ hasher := ssz.NewHasher() ++ tx.HashTreeRootWith(hasher) ++ root, err := hasher.HashRoot() ++ ++ return root, err ++} ++ ++func (tx *Transaction) HashTreeRootWith(hh ssz.HashWalker) error { ++ var err error ++ byteLen := uint64(len(*tx)) ++ ++ if byteLen > MAX_BYTES_PER_TRANSACTION { ++ err = ssz.ErrIncorrectListSize ++ return err ++ } ++ ++ // Load the bytes of the transaction into the hasher ++ hh.AppendBytes32(*tx) ++ // Perform `mix_in_length(merkleize(pack(value), limit=chunk_count(type)), len(value))` ++ // Reference: https://github.com/ethereum/consensus-specs/blob/dev/ssz/simple-serialize.md#merkleization ++ // ++ // The `indx` parameters is set to `0` as we need to consider the whole hh.buf buffer for this. ++ // In an implementation of more complex types, this parameter would be used to indicate the starting ++ // index of the buffer to be merkleized. It is used a single buffer to do everything for ++ // optimization purposes. ++ hh.MerkleizeWithMixin(0, byteLen, (1073741824+31)/32) ++ ++ return nil ++} ++ ++func (tx *Transaction) GetTree() (*ssz.Node, error) { ++ w := &ssz.Wrapper{} ++ tx.HashTreeRootWith(w) ++ return w.Node(), nil ++}
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+104
+
-0
+ +
+ +
+
+
diff --git flashbots/builder/builder/utils.go chainbound/bolt/builder/utils.go +index 284285cf4e82a5cd7e343033ebebb2887e1e3e72..59bd040df90a3ab40491073e9a8c48f6829d7179 100644 +--- flashbots/builder/builder/utils.go ++++ chainbound/bolt/builder/utils.go +@@ -8,10 +8,33 @@ "encoding/json" + "errors" + "fmt" + "io" ++ "math" + "net/http" ++ "slices" ++ "strings" ++ "time" ++ ++ "github.com/attestantio/go-eth2-client/spec/bellatrix" ++ utilbellatrix "github.com/attestantio/go-eth2-client/util/bellatrix" ++ "github.com/ethereum/go-ethereum/common" ++ "github.com/ethereum/go-ethereum/core/types" ++ "github.com/ethereum/go-ethereum/log" ++ ssz "github.com/ferranbt/fastssz" + ) +  + var errHTTPErrorResponse = errors.New("HTTP error response") ++ ++func DecodeConstraints(constraints *common.SignedConstraints) (types.HashToConstraintDecoded, error) { ++ decodedConstraints := make(types.HashToConstraintDecoded) ++ for _, tx := range constraints.Message.Constraints { ++ decoded := new(types.Transaction) ++ if err := decoded.UnmarshalBinary(tx.Tx); err != nil { ++ return nil, err ++ } ++ decodedConstraints[decoded.Hash()] = &types.ConstraintDecoded{Index: tx.Index, Tx: decoded} ++ } ++ return decodedConstraints, nil ++} +  + // SendSSZRequest is a request to send SSZ data to a remote relay. + func SendSSZRequest(ctx context.Context, client http.Client, method, url string, payload []byte, useGzip bool) (code int, err error) { +@@ -117,3 +140,84 @@ } +  + return resp.StatusCode, nil + } ++ ++// EmitBoltDemoEvent sends a message to the web demo backend to log an event. ++// This is only used for demo purposes and should be removed in production. ++func EmitBoltDemoEvent(message string) { ++ event := strings.NewReader(fmt.Sprintf("{ \"message\": \"BOLT-BUILDER: %s\"}", message)) ++ eventRes, err := http.Post("http://host.docker.internal:3001/events", "application/json", event) ++ if err != nil { ++ log.Error("Failed to send web demo event: ", err) ++ } ++ if eventRes != nil { ++ defer eventRes.Body.Close() ++ } ++} ++ ++func CalculateMerkleMultiProofs( ++ payloadTransactions types.Transactions, ++ HashToConstraintDecoded types.HashToConstraintDecoded, ++) (inclusionProof *common.InclusionProof, rootNode *ssz.Node, err error) { ++ constraintsOrderedByIndex, constraintsWithoutIndex, _, _ := types.ParseConstraintsDecoded(HashToConstraintDecoded) ++ constraints := slices.Concat(constraintsOrderedByIndex, constraintsWithoutIndex) ++ ++ // BOLT: generate merkle tree from payload transactions (we need raw RLP bytes for this) ++ rawTxs := make([]bellatrix.Transaction, len(payloadTransactions)) ++ for i, tx := range payloadTransactions { ++ raw, err := tx.WithoutBlobTxSidecar().MarshalBinary() ++ if err != nil { ++ log.Warn("[BOLT]: could not marshal transaction", "txHash", tx.Hash(), "err", err) ++ continue ++ } ++ rawTxs[i] = bellatrix.Transaction(raw) ++ } ++ ++ log.Info(fmt.Sprintf("[BOLT]: Generated %d raw transactions for merkle tree", len(rawTxs))) ++ bellatrixPayloadTxs := utilbellatrix.ExecutionPayloadTransactions{Transactions: rawTxs} ++ ++ rootNode, err = bellatrixPayloadTxs.GetTree() ++ if err != nil { ++ return nil, nil, fmt.Errorf("could not get tree from transactions: %w", err) ++ } ++ ++ // BOLT: Set the value of nodes. This is MANDATORY for the proof calculation ++ // to output the leaf correctly. This is also never documented in fastssz. -__- ++ rootNode.Hash() ++ ++ // using our gen index formula: 2 * 2^21 + preconfIndex ++ baseGeneralizedIndex := int(math.Pow(float64(2), float64(21))) ++ generalizedIndexes := make([]int, len(constraints)) ++ transactionHashes := make([]common.Hash, len(constraints)) ++ ++ for i, constraint := range constraints { ++ tx := constraint.Tx ++ // get the index of the preconfirmed transaction in the block ++ preconfIndex := slices.IndexFunc(payloadTransactions, func(payloadTx *types.Transaction) bool { return payloadTx.Hash() == tx.Hash() }) ++ if preconfIndex == -1 { ++ log.Error(fmt.Sprintf("Preconfirmed transaction %s not found in block", tx.Hash())) ++ log.Error(fmt.Sprintf("block has %v transactions", len(payloadTransactions))) ++ continue ++ } ++ ++ generalizedIndex := baseGeneralizedIndex + preconfIndex ++ generalizedIndexes[i] = generalizedIndex ++ transactionHashes[i] = tx.Hash() ++ } ++ ++ log.Info(fmt.Sprintf("[BOLT]: Calculating merkle multiproof for %d preconfirmed transaction", ++ len(constraints))) ++ ++ timeStart := time.Now() ++ multiProof, err := rootNode.ProveMulti(generalizedIndexes) ++ if err != nil { ++ return nil, nil, fmt.Errorf("could not calculate merkle multiproof for %d preconf: %w", len(constraints), err) ++ } ++ ++ timeForProofs := time.Since(timeStart) ++ log.Info(fmt.Sprintf("[BOLT]: Calculated merkle multiproof for %d preconf in %s", len(constraints), timeForProofs)) ++ ++ inclusionProof = common.InclusionProofFromMultiProof(multiProof) ++ inclusionProof.TransactionHashes = transactionHashes ++ ++ return ++}
+
+ + +
+ + +
+
+
+ + (new) + +
+ + +
+
+ +
+ +
+ +
+138
+
-0
+ +
+ +
+
+
diff --git flashbots/builder/builder/utils_test.go chainbound/bolt/builder/utils_test.go +new file mode 100644 +index 0000000000000000000000000000000000000000..76e6cfdf8e16a574186d2b8109e24f76f6423e30 +--- /dev/null ++++ chainbound/bolt/builder/utils_test.go +@@ -0,0 +1,138 @@ ++package builder ++ ++import ( ++ "encoding/json" ++ "testing" ++ ++ "github.com/ethereum/go-ethereum/common" ++ "github.com/ethereum/go-ethereum/core/types" ++ fastSsz "github.com/ferranbt/fastssz" ++ "github.com/stretchr/testify/require" ++) ++ ++func TestGenerateMerkleMultiProofs(t *testing.T) { ++ // https://etherscan.io/tx/0x138a5f8ba7950521d9dec66ee760b101e0c875039e695c9fcfb34f5ef02a881b ++ // 0x02f873011a8405f5e10085037fcc60e182520894f7eaaf75cb6ec4d0e2b53964ce6733f54f7d3ffc880b6139a7cbd2000080c080a095a7a3cbb7383fc3e7d217054f861b890a935adc1adf4f05e3a2f23688cf2416a00875cdc45f4395257e44d709d04990349b105c22c11034a60d7af749ffea2765 ++ // https://etherscan.io/tx/0xfb0ee9de8941c8ad50e6a3d2999cd6ef7a541ec9cb1ba5711b76fcfd1662dfa9 ++ // 0xf8708305dc6885029332e35883019a2894500b0107e172e420561565c8177c28ac0f62017f8810ffb80e6cc327008025a0e9c0b380c68f040ae7affefd11979f5ed18ae82c00e46aa3238857c372a358eca06b26e179dd2f7a7f1601755249f4cff56690c4033553658f0d73e26c36fe7815 ++ // https://etherscan.io/tx/0x45e7ee9ba1a1d0145de29a764a33bb7fc5620486b686d68ec8cb3182d137bc90 ++ // 0xf86c0785028fa6ae0082520894098d880c4753d0332ca737aa592332ed2522cd22880d2f09f6558750008026a0963e58027576b3a8930d7d9b4a49253b6e1a2060e259b2102e34a451d375ce87a063f802538d3efed17962c96fcea431388483bbe3860ea9bb3ef01d4781450fbf ++ // https://etherscan.io/tx/0x9d48b4a021898a605b7ae49bf93ad88fa6bd7050e9448f12dde064c10f22fe9c ++ // 0x02f87601836384348477359400850517683ba883019a28943678fce4028b6745eb04fa010d9c8e4b36d6288c872b0f1366ad800080c080a0b6b7aba1954160d081b2c8612e039518b9c46cd7df838b405a03f927ad196158a071d2fb6813e5b5184def6bd90fb5f29e0c52671dea433a7decb289560a58416e ++ // https://etherscan.io/tx/0x15bd881daa1408b33f67fa4bdeb8acfb0a2289d9b4c6f81eef9bb2bb2e52e780 - Blob Tx ++ // 0x03f9029c01830299f184b2d05e008507aef40a00832dc6c09468d30f47f19c07bccef4ac7fae2dc12fca3e0dc980b90204ef16e845000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001e0000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000633b68f5d8d3a86593ebb815b4663bcbe0302e31382e302d64657600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004109de8da2a97e37f2e6dc9f7d50a408f9344d7aa1a925ae53daf7fbef43491a571960d76c0cb926190a9da10df7209fb1ba93cd98b1565a3a2368749d505f90c81c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0843b9aca00e1a00141e3a338e30c49ed0501e315bcc45e4edefebed43ab1368a1505461d9cf64901a01e8511e06b17683d89eb57b9869b96b8b611f969f7f56cbc0adc2df7c88a2a07a00910deacf91bba0d74e368d285d311dc5884e7cfe219d85aea5741b2b6e3a2fe ++ ++ raw := `["0x03f9029c01830299f184b2d05e008507aef40a00832dc6c09468d30f47f19c07bccef4ac7fae2dc12fca3e0dc980b90204ef16e845000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001e0000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000633b68f5d8d3a86593ebb815b4663bcbe0302e31382e302d64657600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004109de8da2a97e37f2e6dc9f7d50a408f9344d7aa1a925ae53daf7fbef43491a571960d76c0cb926190a9da10df7209fb1ba93cd98b1565a3a2368749d505f90c81c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0843b9aca00e1a00141e3a338e30c49ed0501e315bcc45e4edefebed43ab1368a1505461d9cf64901a01e8511e06b17683d89eb57b9869b96b8b611f969f7f56cbc0adc2df7c88a2a07a00910deacf91bba0d74e368d285d311dc5884e7cfe219d85aea5741b2b6e3a2fe", "0x02f873011a8405f5e10085037fcc60e182520894f7eaaf75cb6ec4d0e2b53964ce6733f54f7d3ffc880b6139a7cbd2000080c080a095a7a3cbb7383fc3e7d217054f861b890a935adc1adf4f05e3a2f23688cf2416a00875cdc45f4395257e44d709d04990349b105c22c11034a60d7af749ffea2765","0xf8708305dc6885029332e35883019a2894500b0107e172e420561565c8177c28ac0f62017f8810ffb80e6cc327008025a0e9c0b380c68f040ae7affefd11979f5ed18ae82c00e46aa3238857c372a358eca06b26e179dd2f7a7f1601755249f4cff56690c4033553658f0d73e26c36fe7815", "0xf86c0785028fa6ae0082520894098d880c4753d0332ca737aa592332ed2522cd22880d2f09f6558750008026a0963e58027576b3a8930d7d9b4a49253b6e1a2060e259b2102e34a451d375ce87a063f802538d3efed17962c96fcea431388483bbe3860ea9bb3ef01d4781450fbf", "0x02f87601836384348477359400850517683ba883019a28943678fce4028b6745eb04fa010d9c8e4b36d6288c872b0f1366ad800080c080a0b6b7aba1954160d081b2c8612e039518b9c46cd7df838b405a03f927ad196158a071d2fb6813e5b5184def6bd90fb5f29e0c52671dea433a7decb289560a58416e"]` ++ ++ byteTxs := make([]*common.HexBytes, 0, 5) ++ err := json.Unmarshal([]byte(raw), &byteTxs) ++ require.NoError(t, err) ++ require.Equal(t, len(byteTxs), 5) ++ ++ payloadTransactions := common.Map(byteTxs, func(rawTx *common.HexBytes) *types.Transaction { ++ transaction := new(types.Transaction) ++ err = transaction.UnmarshalBinary([]byte(*rawTx)) ++ return transaction ++ }) ++ ++ require.Equal(t, payloadTransactions[0].Type(), uint8(3)) ++ require.Equal(t, payloadTransactions[1].Type(), uint8(2)) ++ ++ // try out all combinations of "constraints": ++ // e.g. only [0], then [0, 1], then [1] etc... ++ // and log which ones are failing and which ones are not ++ for i := 1; i < len(payloadTransactions)+1; i++ { ++ t.Logf("--- Trying with %d constraints\n", i) ++ for _, chosenConstraintTransactions := range combinations(payloadTransactions, i) { ++ // find the index of the chosen constraints inside payload transactions for debugging ++ payloadIndexes := make([]int, len(chosenConstraintTransactions)) ++ for i, chosenConstraint := range chosenConstraintTransactions { ++ for j, payloadTransaction := range payloadTransactions { ++ if chosenConstraint.Hash() == payloadTransaction.Hash() { ++ payloadIndexes[i] = j ++ break ++ } ++ } ++ } ++ ++ constraints := make(types.HashToConstraintDecoded) ++ for _, tx := range chosenConstraintTransactions { ++ constraints[tx.Hash()] = &types.ConstraintDecoded{Tx: tx} ++ } ++ ++ inclusionProof, root, err := CalculateMerkleMultiProofs(payloadTransactions, constraints) ++ require.NoError(t, err) ++ rootHash := root.Hash() ++ ++ leaves := make([][]byte, len(constraints)) ++ ++ i := 0 ++ for _, constraint := range constraints { ++ if constraint == nil || constraint.Tx == nil { ++ t.Logf("nil constraint or transaction!") ++ } ++ ++ // Compute the hash tree root for the raw preconfirmed transaction ++ // and use it as "Leaf" in the proof to be verified against ++ ++ withoutBlob, err := constraint.Tx.WithoutBlobTxSidecar().MarshalBinary() ++ if err != nil { ++ t.Logf("error marshalling transaction without blob tx sidecar: %v", err) ++ } ++ ++ tx := Transaction(withoutBlob) ++ txHashTreeRoot, err := tx.HashTreeRoot() ++ if err != nil { ++ t.Logf("error calculating hash tree root: %v", err) ++ } ++ ++ leaves[i] = txHashTreeRoot[:] ++ i++ ++ } ++ ++ hashes := make([][]byte, len(inclusionProof.MerkleHashes)) ++ for i, hash := range inclusionProof.MerkleHashes { ++ hashes[i] = []byte(*hash) ++ } ++ indexes := make([]int, len(inclusionProof.GeneralizedIndexes)) ++ for i, index := range inclusionProof.GeneralizedIndexes { ++ indexes[i] = int(index) ++ } ++ ++ ok, err := fastSsz.VerifyMultiproof(rootHash[:], hashes, leaves, indexes) ++ if err != nil { ++ t.Logf("error verifying merkle proof: %v", err) ++ } ++ ++ if !ok { ++ t.Logf("FAIL with txs: %v", payloadIndexes) ++ } else { ++ t.Logf("SUCCESS with txs: %v", payloadIndexes) ++ } ++ } ++ } ++} ++ ++// Function to generate combinations of a specific length ++func combinations[T any](arr []T, k int) [][]T { ++ var result [][]T ++ n := len(arr) ++ data := make([]T, k) ++ combine(arr, data, 0, n-1, 0, k, &result) ++ return result ++} ++ ++// Helper function to generate combinations ++func combine[T any](arr, data []T, start, end, index, k int, result *[][]T) { ++ if index == k { ++ tmp := make([]T, k) ++ copy(tmp, data) ++ *result = append(*result, tmp) ++ return ++ } ++ ++ for i := start; i <= end && end-i+1 >= k-index; i++ { ++ data[index] = arr[i] ++ combine(arr, data, i+1, end, index+1, k, result) ++ } ++}
+
+ + + +
+
+ +
+
+
+ + +
+ +
+

The only change in the ETH service was adding the constraintsCache to the block building entrypoint.

+
+
+ +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+19
+
-11
+ +
+ +
+
+
diff --git flashbots/builder/builder/eth_service.go chainbound/bolt/builder/eth_service.go +index 480221815f46c97f37292ba441b280629339e04c..4d692b0220baebb075f252a8e0cb89df7c79bae9 100644 +--- flashbots/builder/builder/eth_service.go ++++ chainbound/bolt/builder/eth_service.go +@@ -5,6 +5,7 @@ "errors" + "math/big" + "time" +  ++ "github.com/chainbound/shardmap" + "github.com/ethereum/go-ethereum/beacon/engine" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" +@@ -15,7 +16,7 @@ "github.com/ethereum/go-ethereum/params" + ) +  + type IEthereumService interface { +- BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn) error ++ BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn, constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded]) error + GetBlockByHash(hash common.Hash) *types.Block + Config() *params.ChainConfig + Synced() bool +@@ -30,9 +31,10 @@ testBlobSidecar []*types.BlobTxSidecar + testBundlesMerged []types.SimulatedBundle + testAllBundles []types.SimulatedBundle + testUsedSbundles []types.UsedSBundle ++ testPreconfs []*types.Transaction + } +  +-func (t *testEthereumService) BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn) error { ++func (t *testEthereumService) BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn, constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded]) error { + sealedBlockCallback(t.testBlock, t.testBlockValue, t.testBlobSidecar, time.Now(), t.testBundlesMerged, t.testAllBundles, t.testUsedSbundles) + return nil + } +@@ -52,18 +54,20 @@ return &EthereumService{eth: eth} + } +  + // TODO: we should move to a setup similar to catalyst local blocks & payload ids +-func (s *EthereumService) BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn) error { ++func (s *EthereumService) BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn, constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded]) error { + // Send a request to generate a full block in the background. + // The result can be obtained via the returned channel. + args := &miner.BuildPayloadArgs{ +- Parent: attrs.HeadHash, +- Timestamp: uint64(attrs.Timestamp), +- FeeRecipient: attrs.SuggestedFeeRecipient, +- GasLimit: attrs.GasLimit, +- Random: attrs.Random, +- Withdrawals: attrs.Withdrawals, +- BeaconRoot: attrs.ParentBeaconBlockRoot, +- BlockHook: sealedBlockCallback, ++ Parent: attrs.HeadHash, ++ Timestamp: uint64(attrs.Timestamp), ++ FeeRecipient: attrs.SuggestedFeeRecipient, ++ GasLimit: attrs.GasLimit, ++ Random: attrs.Random, ++ Withdrawals: attrs.Withdrawals, ++ BeaconRoot: attrs.ParentBeaconBlockRoot, ++ Slot: attrs.Slot, ++ BlockHook: sealedBlockCallback, ++ ConstraintsCache: constraintsCache, + } +  + payload, err := s.eth.Miner().BuildPayload(args) +@@ -104,3 +108,7 @@ + func (s *EthereumService) Synced() bool { + return s.eth.Synced() + } ++ ++func (s *EthereumService) Ethereum() *eth.Ethereum { ++ return s.eth ++}
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+1
+
-1
+ +
+ +
+
+
diff --git flashbots/builder/builder/eth_service_test.go chainbound/bolt/builder/eth_service_test.go +index 386f472c2a1becf40c36239381d46af2f8a8074c..000a3185af88dcea75c0a656c10b7ca321480bb8 100644 +--- flashbots/builder/builder/eth_service_test.go ++++ chainbound/bolt/builder/eth_service_test.go +@@ -103,7 +103,7 @@ require.Equal(t, parent.Time+1, executableData.ExecutionPayload.Timestamp) + require.Equal(t, block.ParentHash(), parent.Hash()) + require.Equal(t, block.Hash(), executableData.ExecutionPayload.BlockHash) + require.Equal(t, blockValue.Uint64(), uint64(0)) +- }) ++ }, nil) +  + require.NoError(t, err) + }
+
+ + + +
+
+ +
+
+
+ + +
+
+
+ + +
+ +
+

This is where the actual block building logic is located.

+ +

We added a constraintsCache to the miner, which is responsible for keeping an always-updated view of the constraints +streamed from relays according to the Constraints API Relay specs. +It’s passed to the miner from the entrypoint in the builder/ module.

+ +

At block building time, we check if there are any transactions in the cache for this slot, and if so we insert them at the top of +the block. This is a naive implementation that can be improved, but it shows the concept of the builder role.

+
+
+ +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+3
+
-2
+ +
+ +
+
+
diff --git flashbots/builder/miner/algo_common_test.go chainbound/bolt/miner/algo_common_test.go +index 1b4853863eef1137a4bb83492a1e0e3fd7247180..105c709b2aa22b38bb20922e0a76474688138b55 100644 +--- flashbots/builder/miner/algo_common_test.go ++++ chainbound/bolt/miner/algo_common_test.go +@@ -528,13 +528,14 @@ t.Cleanup(func() { + testConfig.AlgoType = ALGO_MEV_GETH + }) +  +- for _, algoType := range []AlgoType{ALGO_MEV_GETH, ALGO_GREEDY, ALGO_GREEDY_BUCKETS, ALGO_GREEDY_MULTISNAP, ALGO_GREEDY_BUCKETS_MULTISNAP} { ++ for _, algoType := range []AlgoType{ALGO_MEV_GETH} { + local := new(params.ChainConfig) + *local = *ethashChainConfig + local.TerminalTotalDifficulty = big.NewInt(0) + testConfig.AlgoType = algoType +- testGetSealingWork(t, local, ethash.NewFaker()) ++ testGetSealingWork(t, local, ethash.NewFaker(), nil) + } ++ t.Fail() + } +  + func TestGetSealingWorkAlgosWithProfit(t *testing.T) {
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+23
+
-19
+ +
+ +
+
+
diff --git flashbots/builder/miner/multi_worker.go chainbound/bolt/miner/multi_worker.go +index 797b277e8110c64c79528576b10f9e183e86aca1..415447d47ca379aae834701ceca2f8c404838580 100644 +--- flashbots/builder/miner/multi_worker.go ++++ chainbound/bolt/miner/multi_worker.go +@@ -93,15 +93,17 @@ // enough to run. The empty payload can at least make sure there is something + // to deliver for not missing slot. + var empty *newPayloadResult + emptyParams := &generateParams{ +- timestamp: args.Timestamp, +- forceTime: true, +- parentHash: args.Parent, +- coinbase: args.FeeRecipient, +- random: args.Random, +- gasLimit: args.GasLimit, +- withdrawals: args.Withdrawals, +- beaconRoot: args.BeaconRoot, +- noTxs: true, ++ timestamp: args.Timestamp, ++ forceTime: true, ++ parentHash: args.Parent, ++ coinbase: args.FeeRecipient, ++ random: args.Random, ++ gasLimit: args.GasLimit, ++ withdrawals: args.Withdrawals, ++ beaconRoot: args.BeaconRoot, ++ noTxs: true, ++ slot: args.Slot, ++ constraintsCache: args.ConstraintsCache, + } + for _, worker := range w.workers { + empty = worker.getSealingBlock(emptyParams) +@@ -130,16 +132,18 @@ for _, w := range w.workers { + workerPayload := newPayload(empty.block, args.Id()) + workerPayloads = append(workerPayloads, workerPayload) + fullParams := &generateParams{ +- timestamp: args.Timestamp, +- forceTime: true, +- parentHash: args.Parent, +- coinbase: args.FeeRecipient, +- random: args.Random, +- withdrawals: args.Withdrawals, +- beaconRoot: args.BeaconRoot, +- gasLimit: args.GasLimit, +- noTxs: false, +- onBlock: args.BlockHook, ++ timestamp: args.Timestamp, ++ forceTime: true, ++ parentHash: args.Parent, ++ coinbase: args.FeeRecipient, ++ random: args.Random, ++ withdrawals: args.Withdrawals, ++ beaconRoot: args.BeaconRoot, ++ gasLimit: args.GasLimit, ++ noTxs: false, ++ onBlock: args.BlockHook, ++ slot: args.Slot, ++ constraintsCache: args.ConstraintsCache, + } +  + go func(w *worker) {
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+34
+
-27
+ +
+ +
+
+
diff --git flashbots/builder/miner/payload_building.go chainbound/bolt/miner/payload_building.go +index edd9e13c1176dca420a38b64128f91602649d8f9..ed3a4fe1c82c87771fb2df00cddbc76b9ba4bd25 100644 +--- flashbots/builder/miner/payload_building.go ++++ chainbound/bolt/miner/payload_building.go +@@ -23,6 +23,7 @@ "math/big" + "sync" + "time" +  ++ "github.com/chainbound/shardmap" + "github.com/ethereum/go-ethereum/beacon/engine" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" +@@ -35,15 +36,17 @@ // BuildPayloadArgs contains the provided parameters for building payload. + // Check engine-api specification for more details. + // https://github.com/ethereum/execution-apis/blob/main/src/engine/cancun.md#payloadattributesv3 + type BuildPayloadArgs struct { +- Parent common.Hash // The parent block to build payload on top +- Timestamp uint64 // The provided timestamp of generated payload +- FeeRecipient common.Address // The provided recipient address for collecting transaction fee +- Random common.Hash // The provided randomness value +- Withdrawals types.Withdrawals // The provided withdrawals +- BeaconRoot *common.Hash // The provided beaconRoot (Cancun) +- Version engine.PayloadVersion // Versioning byte for payload id calculation. +- GasLimit uint64 +- BlockHook BlockHookFn ++ Parent common.Hash // The parent block to build payload on top ++ Timestamp uint64 // The provided timestamp of generated payload ++ FeeRecipient common.Address // The provided recipient address for collecting transaction fee ++ Random common.Hash // The provided randomness value ++ Withdrawals types.Withdrawals // The provided withdrawals ++ BeaconRoot *common.Hash // The provided beaconRoot (Cancun) ++ Version engine.PayloadVersion // Versioning byte for payload id calculation. ++ GasLimit uint64 ++ BlockHook BlockHookFn ++ Slot uint64 ++ ConstraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded] + } +  + // Id computes an 8-byte identifier by hashing the components of the payload arguments. +@@ -248,15 +251,17 @@ // Build the initial version with no transaction included. It should be fast + // enough to run. The empty payload can at least make sure there is something + // to deliver for not missing slot. + emptyParams := &generateParams{ +- timestamp: args.Timestamp, +- forceTime: true, +- parentHash: args.Parent, +- coinbase: args.FeeRecipient, +- random: args.Random, +- withdrawals: args.Withdrawals, +- beaconRoot: args.BeaconRoot, +- noTxs: true, +- onBlock: args.BlockHook, ++ timestamp: args.Timestamp, ++ forceTime: true, ++ parentHash: args.Parent, ++ coinbase: args.FeeRecipient, ++ random: args.Random, ++ withdrawals: args.Withdrawals, ++ beaconRoot: args.BeaconRoot, ++ noTxs: true, ++ onBlock: args.BlockHook, ++ slot: args.Slot, ++ constraintsCache: args.ConstraintsCache, + } + empty := w.getSealingBlock(emptyParams) + if empty.err != nil { +@@ -280,15 +285,17 @@ // by the timestamp parameter. + endTimer := time.NewTimer(time.Second * 12) +  + fullParams := &generateParams{ +- timestamp: args.Timestamp, +- forceTime: true, +- parentHash: args.Parent, +- coinbase: args.FeeRecipient, +- random: args.Random, +- withdrawals: args.Withdrawals, +- beaconRoot: args.BeaconRoot, +- noTxs: false, +- onBlock: args.BlockHook, ++ timestamp: args.Timestamp, ++ forceTime: true, ++ parentHash: args.Parent, ++ coinbase: args.FeeRecipient, ++ random: args.Random, ++ withdrawals: args.Withdrawals, ++ beaconRoot: args.BeaconRoot, ++ noTxs: false, ++ onBlock: args.BlockHook, ++ slot: args.Slot, ++ constraintsCache: args.ConstraintsCache, + } +  + for {
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+267
+
-105
+ +
+ +
+
+
diff --git flashbots/builder/miner/worker.go chainbound/bolt/miner/worker.go +index 09d46ed99f9f600550d979c31b582201ab4eef0a..c845edbddbcf5e1ea57a5485b0a0225db30a738a 100644 +--- flashbots/builder/miner/worker.go ++++ chainbound/bolt/miner/worker.go +@@ -25,6 +25,7 @@ "sync" + "sync/atomic" + "time" +  ++ "github.com/chainbound/shardmap" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/consensus" + "github.com/ethereum/go-ethereum/consensus/misc/eip1559" +@@ -644,7 +645,7 @@ plainTxs := newTransactionsByPriceAndNonce(w.current.signer, txs, nil, nil, w.current.header.BaseFee) // Mixed bag of everrything, yolo + blobTxs := newTransactionsByPriceAndNonce(w.current.signer, nil, nil, nil, w.current.header.BaseFee) // Empty bag, don't bother optimising +  + tcount := w.current.tcount +- w.commitTransactions(w.current, plainTxs, blobTxs, nil) ++ w.commitTransactions(w.current, plainTxs, blobTxs, nil, nil) +  + // Only update the snapshot if any new transactions were added + // to the pending block +@@ -1017,14 +1018,30 @@ + return nil + } +  +-func (w *worker) commitTransactions(env *environment, plainTxs, blobTxs *transactionsByPriceAndNonce, interrupt *atomic.Int32) error { ++// commitTransactions applies sorted transactions to the current environment, updating the state ++// and creating the resulting block ++// ++// Assumptions: ++// - there are no nonce-conflicting transactions between `plainTxs`, `blobTxs` and the constraints ++// - all transaction are correctly signed ++func (w *worker) commitTransactions(env *environment, plainTxs, blobTxs *transactionsByPriceAndNonce, constraints types.HashToConstraintDecoded, interrupt *atomic.Int32) error { + gasLimit := env.header.GasLimit + if env.gasPool == nil { + env.gasPool = new(core.GasPool).AddGas(gasLimit) + } + var coalescedLogs []*types.Log +  ++ // Here we initialize and track the constraints left to be executed along ++ // with their gas requirements ++ constraintsOrderedByIndex, ++ constraintsWithoutIndex, ++ constraintsTotalGasLeft, ++ constraintsTotalBlobGasLeft := types.ParseConstraintsDecoded(constraints) ++ + for { ++ // `env.tcount` starts from 0 so it's correct to use it as the current index ++ currentTxIndex := uint64(env.tcount) ++ + // Check interruption signal and abort building if it's fired. + if interrupt != nil { + if signal := interrupt.Load(); signal != commitInterruptNone { +@@ -1036,102 +1053,166 @@ if env.gasPool.Gas() < params.TxGas { + log.Trace("Not enough gas for further transactions", "have", env.gasPool, "want", params.TxGas) + break + } ++ ++ blobGasLeft := uint64(params.MaxBlobGasPerBlock - env.blobs*params.BlobTxBlobGasPerBlob) ++ + // If we don't have enough blob space for any further blob transactions, + // skip that list altogether +- if !blobTxs.Empty() && env.blobs*params.BlobTxBlobGasPerBlob >= params.MaxBlobGasPerBlock { ++ if !blobTxs.Empty() && blobGasLeft <= 0 { + log.Trace("Not enough blob space for further blob transactions") + blobTxs.Clear() + // Fall though to pick up any plain txs + } + // Retrieve the next transaction and abort if all done. + var ( +- ltx *txpool.LazyTransaction +- txs *transactionsByPriceAndNonce +- pltx *txpool.LazyTransaction +- ptip *uint256.Int +- bltx *txpool.LazyTransaction +- btip *uint256.Int ++ lazyTx *txpool.LazyTransaction ++ txs *transactionsByPriceAndNonce ++ plainLazyTx *txpool.LazyTransaction ++ plainTxTip *uint256.Int ++ blobLazyTx *txpool.LazyTransaction ++ blobTxTip *uint256.Int + ) +  +- pTxWithMinerFee := plainTxs.Peek() +- if pTxWithMinerFee != nil { +- pltx = pTxWithMinerFee.Tx() +- ptip = pTxWithMinerFee.fees ++ if pTxWithMinerFee := plainTxs.Peek(); pTxWithMinerFee != nil { ++ plainLazyTx = pTxWithMinerFee.Tx() ++ plainTxTip = pTxWithMinerFee.fees + } +  +- bTxWithMinerFee := blobTxs.Peek() +- if bTxWithMinerFee != nil { +- bltx = bTxWithMinerFee.Tx() +- btip = bTxWithMinerFee.fees ++ if bTxWithMinerFee := blobTxs.Peek(); bTxWithMinerFee != nil { ++ blobLazyTx = bTxWithMinerFee.Tx() ++ blobTxTip = bTxWithMinerFee.fees + } +  + switch { +- case pltx == nil: +- txs, ltx = blobTxs, bltx +- case bltx == nil: +- txs, ltx = plainTxs, pltx ++ case plainLazyTx == nil: ++ txs, lazyTx = blobTxs, blobLazyTx ++ case blobLazyTx == nil: ++ txs, lazyTx = plainTxs, plainLazyTx + default: +- if ptip.Lt(btip) { +- txs, ltx = blobTxs, bltx ++ if plainTxTip.Lt(blobTxTip) { ++ txs, lazyTx = blobTxs, blobLazyTx + } else { +- txs, ltx = plainTxs, pltx ++ txs, lazyTx = plainTxs, plainLazyTx + } + } +  +- if ltx == nil { +- break ++ type candidateTx struct { ++ tx *types.Transaction ++ isConstraint bool ++ } ++ // candidate is the transaction we should execute in this cycle of the loop ++ var candidate struct { ++ tx *types.Transaction ++ isConstraint bool + } +  +- // If we don't have enough space for the next transaction, skip the account. +- if env.gasPool.Gas() < ltx.Gas { +- log.Trace("Not enough gas left for transaction", "hash", ltx.Hash, "left", env.gasPool.Gas(), "needed", ltx.Gas) +- txs.Pop() +- continue ++ var constraintTx *types.ConstraintDecoded ++ if len(constraintsOrderedByIndex) > 0 { ++ constraintTx = constraintsOrderedByIndex[0] + } +- if left := uint64(params.MaxBlobGasPerBlock - env.blobs*params.BlobTxBlobGasPerBlob); left < ltx.BlobGas { +- log.Trace("Not enough blob gas left for transaction", "hash", ltx.Hash, "left", left, "needed", ltx.BlobGas) +- txs.Pop() +- continue ++ ++ isSomePoolTxLeft := lazyTx != nil ++ ++ isThereConstraintWithThisIndex := constraintTx != nil && constraintTx.Index != nil && *constraintTx.Index == currentTxIndex ++ if isThereConstraintWithThisIndex { ++ // we retrieve the candidate constraint by shifting it from the list ++ candidate = candidateTx{tx: common.Shift(&constraintsOrderedByIndex).Tx, isConstraint: true} ++ } else { ++ if isSomePoolTxLeft { ++ // Check if there enough gas left for this tx ++ if constraintsTotalGasLeft+lazyTx.Gas > env.gasPool.Gas() || constraintsTotalBlobGasLeft+lazyTx.BlobGas > blobGasLeft { ++ // Skip this tx and try to fit one with less gas. ++ // Drop all consecutive transactions from the same sender because of `nonce-too-high` clause. ++ log.Debug("Could not find transactions gas with the remaining constraints, account skipped", "hash", lazyTx.Hash) ++ txs.Pop() ++ // Edge case: ++ // ++ // Assumption: suppose sender A sends tx T_1 with nonce 1, and T_2 with nonce 2, and T_2 is a constraint. ++ // ++ // ++ // When running the block building algorithm I first have to make sure to reserve enough gas for the constraints. ++ // This implies that when a pooled tx comes I have to check if there is enough gas for it while taking into account ++ // the rest of the remaining constraint gas to allocate. ++ // Suppose there is no gas for the pooled tx T_1, then I have to drop it and consequently drop every tx from the same ++ // sender with higher nonce due to "nonce-too-high" issues, including T_2. ++ // But then, I have dropped a constraint which means my bid is invalid. ++ // ++ // FIXME: for the PoC we're not handling this ++ ++ // Repeat the loop to try to find another pool transaction ++ continue ++ } ++ // We can safely consider the pool tx as the candidate, ++ // since by assumption it is not nonce-conflicting ++ tx := lazyTx.Resolve() ++ if tx == nil { ++ log.Trace("Ignoring evicted transaction", "hash", candidate.tx.Hash()) ++ txs.Pop() ++ continue ++ } ++ candidate = candidateTx{tx: tx, isConstraint: false} ++ } else { ++ // No more pool tx left, we can add the unindexed ones if available ++ if len(constraintsWithoutIndex) == 0 { ++ // To recap, this means: ++ // 1. there are no more pool tx left ++ // 2. there are no more constraints without an index ++ // 3. the remaining indexes inside `constraintsOrderedByIndex`, if any, cannot be satisfied ++ // As such, we can safely exist ++ break ++ } ++ candidate = candidateTx{tx: common.Shift(&constraintsWithoutIndex).Tx, isConstraint: true} ++ } + } +- // Transaction seems to fit, pull it up from the pool +- tx := ltx.Resolve() +- if tx == nil { +- log.Trace("Ignoring evicted transaction", "hash", ltx.Hash) +- txs.Pop() +- continue +- } +- // Error may be ignored here. The error has already been checked +- // during transaction acceptance is the transaction pool. +- from, _ := types.Sender(env.signer, tx) ++ ++ // Error may be ignored here, see assumption ++ from, _ := types.Sender(env.signer, candidate.tx) +  + // Check whether the tx is replay protected. If we're not in the EIP155 hf + // phase, start ignoring the sender until we do. +- if tx.Protected() && !w.chainConfig.IsEIP155(env.header.Number) { +- log.Trace("Ignoring replay protected transaction", "hash", ltx.Hash, "eip155", w.chainConfig.EIP155Block) ++ if candidate.tx.Protected() && !w.chainConfig.IsEIP155(env.header.Number) { ++ log.Trace("Ignoring replay protected transaction", "hash", candidate.tx.Hash(), "eip155", w.chainConfig.EIP155Block) + txs.Pop() + continue + } + // Start executing the transaction +- env.state.SetTxContext(tx.Hash(), env.tcount) ++ env.state.SetTxContext(candidate.tx.Hash(), env.tcount) +  +- logs, err := w.commitTransaction(env, tx) ++ logs, err := w.commitTransaction(env, candidate.tx) + switch { + case errors.Is(err, core.ErrNonceTooLow): + // New head notification data race between the transaction pool and miner, shift +- log.Trace("Skipping transaction with low nonce", "hash", ltx.Hash, "sender", from, "nonce", tx.Nonce()) +- txs.Shift() ++ log.Trace("Skipping transaction with low nonce", "hash", candidate.tx.Hash(), "sender", from, "nonce", candidate.tx.Nonce()) ++ if candidate.isConstraint { ++ log.Warn(fmt.Sprintf("Skipping constraint with low nonce, hash %s, sender %s, nonce %d", candidate.tx.Hash(), from, candidate.tx.Nonce())) ++ } else { ++ txs.Shift() ++ } +  + case errors.Is(err, nil): + // Everything ok, collect the logs and shift in the next transaction from the same account + coalescedLogs = append(coalescedLogs, logs...) + env.tcount++ +- txs.Shift() ++ if candidate.isConstraint { ++ // Update the amount of gas left for the constraints ++ constraintsTotalGasLeft -= candidate.tx.Gas() ++ constraintsTotalBlobGasLeft -= candidate.tx.BlobGas() ++ ++ constraintTip, _ := candidate.tx.EffectiveGasTip(env.header.BaseFee) ++ log.Info(fmt.Sprintf("Executed constraint %s at index %d with effective gas tip %d", candidate.tx.Hash().String(), currentTxIndex, constraintTip)) ++ } else { ++ txs.Shift() ++ } +  + default: + // Transaction is regarded as invalid, drop all consecutive transactions from + // the same sender because of `nonce-too-high` clause. +- log.Debug("Transaction failed, account skipped", "hash", ltx.Hash, "err", err) +- txs.Pop() ++ log.Debug("Transaction failed, account skipped", "hash", candidate.tx.Hash(), "err", err) ++ if candidate.isConstraint { ++ log.Warn("Constraint failed, account skipped", "hash", candidate.tx.Hash(), "err", err) ++ } else { ++ txs.Pop() ++ } + } + } + if !w.isRunning() && len(coalescedLogs) > 0 { +@@ -1154,16 +1235,18 @@ } +  + // generateParams wraps various of settings for generating sealing task. + type generateParams struct { +- timestamp uint64 // The timestamp for sealing task +- forceTime bool // Flag whether the given timestamp is immutable or not +- parentHash common.Hash // Parent block hash, empty means the latest chain head +- coinbase common.Address // The fee recipient address for including transaction +- gasLimit uint64 // The validator's requested gas limit target +- random common.Hash // The randomness generated by beacon chain, empty before the merge +- withdrawals types.Withdrawals // List of withdrawals to include in block. +- beaconRoot *common.Hash // The beacon root (cancun field). +- noTxs bool // Flag whether an empty block without any transaction is expected +- onBlock BlockHookFn // Callback to call for each produced block ++ timestamp uint64 // The timestamp for sealing task ++ forceTime bool // Flag whether the given timestamp is immutable or not ++ parentHash common.Hash // Parent block hash, empty means the latest chain head ++ coinbase common.Address // The fee recipient address for including transaction ++ gasLimit uint64 // The validator's requested gas limit target ++ random common.Hash // The randomness generated by beacon chain, empty before the merge ++ withdrawals types.Withdrawals // List of withdrawals to include in block. ++ beaconRoot *common.Hash // The beacon root (cancun field). ++ noTxs bool // Flag whether an empty block without any transaction is expected ++ onBlock BlockHookFn // Callback to call for each produced block ++ slot uint64 // The slot in which the block is being produced ++ constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded] // The preconfirmations to include in the block + } +  + func doPrepareHeader(genParams *generateParams, chain *core.BlockChain, config *Config, chainConfig *params.ChainConfig, extra []byte, engine consensus.Engine) (*types.Header, *types.Header, error) { +@@ -1266,7 +1349,7 @@ } + return env, nil + } +  +-func (w *worker) fillTransactionsSelectAlgo(interrupt *atomic.Int32, env *environment) ([]types.SimulatedBundle, []types.SimulatedBundle, []types.UsedSBundle, map[common.Hash]struct{}, error) { ++func (w *worker) fillTransactionsSelectAlgo(interrupt *atomic.Int32, env *environment, constraints types.HashToConstraintDecoded) ([]types.SimulatedBundle, []types.SimulatedBundle, []types.UsedSBundle, map[common.Hash]struct{}, error) { + var ( + blockBundles []types.SimulatedBundle + allBundles []types.SimulatedBundle +@@ -1274,21 +1357,35 @@ usedSbundles []types.UsedSBundle + mempoolTxHashes map[common.Hash]struct{} + err error + ) +- switch w.flashbots.algoType { +- case ALGO_GREEDY, ALGO_GREEDY_BUCKETS, ALGO_GREEDY_MULTISNAP, ALGO_GREEDY_BUCKETS_MULTISNAP: ++ ++ // switch w.flashbots.algoType { ++ // ++ // case ALGO_GREEDY, ALGO_GREEDY_BUCKETS, ALGO_GREEDY_MULTISNAP, ALGO_GREEDY_BUCKETS_MULTISNAP: ++ // ++ // blockBundles, allBundles, usedSbundles, mempoolTxHashes, err = w.fillTransactionsAlgoWorker(interrupt, env) ++ // blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env, constraints) ++ // case ALGO_MEV_GETH: ++ // blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env, constraints) ++ // default: ++ // blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env, constraints) ++ // } ++ ++ // // FIXME: (BOLT) the greedy algorithms do not support the constraints interface at the moment. ++ // // As such for this PoC we will be always using the MEV GETH algorithm regardless of the worker configuration. ++ if len(constraints) > 0 { ++ blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env, constraints) ++ } else { + blockBundles, allBundles, usedSbundles, mempoolTxHashes, err = w.fillTransactionsAlgoWorker(interrupt, env) +- case ALGO_MEV_GETH: +- blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env) +- default: +- blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env) + } ++ + return blockBundles, allBundles, usedSbundles, mempoolTxHashes, err + } +  + // fillTransactions retrieves the pending transactions from the txpool and fills them + // into the given sealing block. The transaction selection and ordering strategy can + // be customized with the plugin in the future. +-func (w *worker) fillTransactions(interrupt *atomic.Int32, env *environment) ([]types.SimulatedBundle, []types.SimulatedBundle, map[common.Hash]struct{}, error) { ++func (w *worker) fillTransactions(interrupt *atomic.Int32, env *environment, constraints types.HashToConstraintDecoded) ([]types.SimulatedBundle, []types.SimulatedBundle, map[common.Hash]struct{}, error) { ++ log.Info(fmt.Sprintf("Filling transactions with %d constraints:", len(constraints))) + w.mu.RLock() + tip := w.tip + w.mu.RUnlock() +@@ -1304,6 +1401,12 @@ mempoolTxHashes[tx.Hash] = struct{}{} + } + } +  ++ // NOTE: as done with builder txs, we need to fill mempoolTxHashes with the constraints hashes ++ // in order to pass block validation ++ for hash := range constraints { ++ mempoolTxHashes[hash] = struct{}{} ++ } ++ + if env.header.BaseFee != nil { + filter.BaseFee = uint256.MustFromBig(env.header.BaseFee) + } +@@ -1316,6 +1419,45 @@ + filter.OnlyPlainTxs, filter.OnlyBlobTxs = false, true + pendingBlobTxs := w.eth.TxPool().Pending(filter) +  ++ // Drop all transactions that conflict with the constraints (sender, nonce) ++ signerAndNonceOfConstraints := make(map[common.Address]uint64) ++ ++ for _, constraint := range constraints { ++ from, err := types.Sender(env.signer, constraint.Tx) ++ log.Info(fmt.Sprintf("Inside fillTransactions, constraint %s from %s", constraint.Tx.Hash().String(), from.String())) ++ if err != nil { ++ // NOTE: is this the right behaviour? If this happens the builder is not able to ++ // produce a valid bid ++ log.Error("Failed to recover sender from constraint. Skipping constraint", "err", err) ++ continue ++ } ++ ++ signerAndNonceOfConstraints[from] = constraint.Tx.Nonce() ++ } ++ for sender, lazyTxs := range pendingPlainTxs { ++ common.Filter(&lazyTxs, func(lazyTx *txpool.LazyTransaction) bool { ++ if nonce, ok := signerAndNonceOfConstraints[sender]; ok { ++ if lazyTx.Tx.Nonce() == nonce { ++ return false ++ } ++ } ++ ++ return true ++ }) ++ } ++ ++ for sender, lazyTxs := range pendingBlobTxs { ++ common.Filter(&lazyTxs, func(lazyTx *txpool.LazyTransaction) bool { ++ if nonce, ok := signerAndNonceOfConstraints[sender]; ok { ++ if lazyTx.Tx.Nonce() == nonce { ++ return false ++ } ++ } ++ ++ return true ++ }) ++ } ++ + // Split the pending transactions into locals and remotes. + localPlainTxs, remotePlainTxs := make(map[common.Address][]*txpool.LazyTransaction), pendingPlainTxs + localBlobTxs, remoteBlobTxs := make(map[common.Address][]*txpool.LazyTransaction), pendingBlobTxs +@@ -1333,48 +1475,49 @@ } +  + var blockBundles []types.SimulatedBundle + var allBundles []types.SimulatedBundle +- if w.flashbots.isFlashbots { +- bundles, ccBundleCh := w.eth.TxPool().MevBundles(env.header.Number, env.header.Time) +- bundles = append(bundles, <-ccBundleCh...) +- +- var ( +- bundleTxs []*types.Transaction +- resultingBundle simulatedBundle +- mergedBundles []types.SimulatedBundle +- numBundles int +- err error +- ) +- // Sets allBundles in outer scope +- bundleTxs, resultingBundle, mergedBundles, numBundles, allBundles, err = w.generateFlashbotsBundle(env, bundles, pending) +- if err != nil { +- log.Error("Failed to generate flashbots bundle", "err", err) +- return nil, nil, nil, err +- } +- log.Info("Flashbots bundle", "ethToCoinbase", ethIntToFloat(resultingBundle.TotalEth), "gasUsed", resultingBundle.TotalGasUsed, "bundleScore", resultingBundle.MevGasPrice, "bundleLength", len(bundleTxs), "numBundles", numBundles, "worker", w.flashbots.maxMergedBundles) +- if len(bundleTxs) == 0 { +- return nil, nil, nil, errors.New("no bundles to apply") +- } +- if err := w.commitBundle(env, bundleTxs, interrupt); err != nil { +- return nil, nil, nil, err +- } +- blockBundles = mergedBundles +- env.profit.Add(env.profit, resultingBundle.EthSentToCoinbase) +- } ++ // if w.flashbots.isFlashbots { ++ // bundles, ccBundleCh := w.eth.TxPool().MevBundles(env.header.Number, env.header.Time) ++ // bundles = append(bundles, <-ccBundleCh...) ++ // ++ // var ( ++ // bundleTxs []*types.Transaction ++ // resultingBundle simulatedBundle ++ // mergedBundles []types.SimulatedBundle ++ // numBundles int ++ // err error ++ // ) ++ // // Sets allBundles in outer scope ++ // bundleTxs, resultingBundle, mergedBundles, numBundles, allBundles, err = w.generateFlashbotsBundle(env, bundles, pending) ++ // if err != nil { ++ // log.Error("Failed to generate flashbots bundle", "err", err) ++ // return nil, nil, nil, err ++ // } ++ // log.Info("Flashbots bundle", "ethToCoinbase", ethIntToFloat(resultingBundle.TotalEth), "gasUsed", resultingBundle.TotalGasUsed, "bundleScore", resultingBundle.MevGasPrice, "bundleLength", len(bundleTxs), "numBundles", numBundles, "worker", w.flashbots.maxMergedBundles) ++ // if len(bundleTxs) == 0 { ++ // log.Info("No bundles to apply") ++ // return nil, nil, nil, errors.New("no bundles to apply") ++ // } ++ // if err := w.commitBundle(env, bundleTxs, interrupt); err != nil { ++ // return nil, nil, nil, err ++ // } ++ // blockBundles = mergedBundles ++ // env.profit.Add(env.profit, resultingBundle.EthSentToCoinbase) ++ // } +  + // Fill the block with all available pending transactions. +- if len(localPlainTxs) > 0 || len(localBlobTxs) > 0 { ++ if len(localPlainTxs) > 0 || len(localBlobTxs) > 0 || len(constraints) > 0 { + plainTxs := newTransactionsByPriceAndNonce(env.signer, localPlainTxs, nil, nil, env.header.BaseFee) + blobTxs := newTransactionsByPriceAndNonce(env.signer, localBlobTxs, nil, nil, env.header.BaseFee) +  +- if err := w.commitTransactions(env, plainTxs, blobTxs, interrupt); err != nil { ++ if err := w.commitTransactions(env, plainTxs, blobTxs, constraints, interrupt); err != nil { + return nil, nil, nil, err + } + } +- if len(remotePlainTxs) > 0 || len(remoteBlobTxs) > 0 { ++ if len(remotePlainTxs) > 0 || len(remoteBlobTxs) > 0 || len(constraints) > 0 { + plainTxs := newTransactionsByPriceAndNonce(env.signer, remotePlainTxs, nil, nil, env.header.BaseFee) + blobTxs := newTransactionsByPriceAndNonce(env.signer, remoteBlobTxs, nil, nil, env.header.BaseFee) +  +- if err := w.commitTransactions(env, plainTxs, blobTxs, interrupt); err != nil { ++ if err := w.commitTransactions(env, plainTxs, blobTxs, constraints, interrupt); err != nil { + return nil, nil, nil, err + } + } +@@ -1400,6 +1543,7 @@ } + // Split the pending transactions into locals and remotes + // Fill the block with all available pending transactions. + pending := w.eth.TxPool().Pending(filter) ++ + mempoolTxHashes := make(map[common.Hash]struct{}, len(pending)) + for _, txs := range pending { + for _, tx := range txs { +@@ -1587,11 +1731,25 @@ } +  + orderCloseTime := time.Now() +  +- blockBundles, allBundles, usedSbundles, mempoolTxHashes, err := w.fillTransactionsSelectAlgo(nil, work) ++ var constraints types.HashToConstraintDecoded ++ ++ if params.constraintsCache != nil { ++ constraints, _ = params.constraintsCache.Get(params.slot) ++ log.Info(fmt.Sprintf("[BOLT]: found %d constraints for slot %d ", len(constraints), params.slot)) ++ } ++ ++ blockBundles, allBundles, usedSbundles, mempoolTxHashes, err := w.fillTransactionsSelectAlgo(nil, work, constraints) + if err != nil { + return &newPayloadResult{err: err} + } +  ++ // NOTE: as done with builder txs, we need to fill mempoolTxHashes with the constraints hashes ++ // in order to pass block validation. Otherwise the constraints will be rejected as unknown ++ // because they not part of the mempool and not part of the known bundles ++ for hash := range constraints { ++ mempoolTxHashes[hash] = struct{}{} ++ } ++ + // We mark transactions created by the builder as mempool transactions so code validating bundles will not fail + // for transactions created by the builder such as mev share refunds. + for _, tx := range work.txs { +@@ -1645,6 +1803,8 @@ + return block, blockProfit, nil + } +  ++// checkProposerPayment checks that the last transaction in the block is targeting the ++// validator coinbase and returns the block profit equal to the value of the last transaction. + func (w *worker) checkProposerPayment(work *environment, validatorCoinbase common.Address) (*big.Int, error) { + if len(work.txs) == 0 { + return nil, errors.New("no proposer payment tx") +@@ -1694,7 +1854,7 @@ return + } +  + // Fill pending transactions from the txpool +- _, _, _, _, err = w.fillTransactionsSelectAlgo(interrupt, work) ++ _, _, _, _, err = w.fillTransactionsSelectAlgo(interrupt, work, nil) + switch { + case err == nil: + // The entire block is filled, decrease resubmit interval in case +@@ -2198,6 +2358,8 @@ w.mu.Lock() + sender := w.coinbase + w.mu.Unlock() + builderBalance := env.state.GetBalance(sender).ToBig() ++ ++ log.Info(fmt.Sprintf("[BOLT]: builderBalance %v, reserve.builderBalance %v", builderBalance, reserve.builderBalance)) +  + availableFunds := new(big.Int).Sub(builderBalance, reserve.builderBalance) + if availableFunds.Sign() <= 0 {
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+64
+
-25
+ +
+ +
+
+
diff --git flashbots/builder/miner/worker_test.go chainbound/bolt/miner/worker_test.go +index d65ad578de31558b667c7934cb7581751853fa8f..745a476183e62a79286c3dfb5cc90566092244d4 100644 +--- flashbots/builder/miner/worker_test.go ++++ chainbound/bolt/miner/worker_test.go +@@ -24,6 +24,7 @@ "sync/atomic" + "testing" + "time" +  ++ "github.com/chainbound/shardmap" + "github.com/ethereum/go-ethereum/accounts" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/consensus" +@@ -77,6 +78,9 @@ // Test transactions + pendingTxs []*types.Transaction + newTxs []*types.Transaction +  ++ // Test testConstraintsCache ++ testConstraintsCache = new(shardmap.FIFOMap[uint64, types.HashToConstraintDecoded]) ++ + testConfig = &Config{ + Recommit: time.Second, + GasCeil: params.GenesisGasLimit, +@@ -84,6 +88,8 @@ } +  + defaultGenesisAlloc = types.GenesisAlloc{testBankAddress: {Balance: testBankFunds}} + ) ++ ++const pendingTxsLen = 50 +  + func init() { + testTxPoolConfig = legacypool.DefaultConfig +@@ -98,15 +104,32 @@ Epoch: 30000, + } +  + signer := types.LatestSigner(params.TestChainConfig) +- tx1 := types.MustSignNewTx(testBankKey, signer, &types.AccessListTx{ +- ChainID: params.TestChainConfig.ChainID, +- Nonce: 0, +- To: &testUserAddress, +- Value: big.NewInt(1000), +- Gas: params.TxGas, +- GasPrice: big.NewInt(params.InitialBaseFee), +- }) +- pendingTxs = append(pendingTxs, tx1) ++ for i := 0; i < pendingTxsLen; i++ { ++ tx1 := types.MustSignNewTx(testBankKey, signer, &types.AccessListTx{ ++ ChainID: params.TestChainConfig.ChainID, ++ Nonce: uint64(i), ++ To: &testUserAddress, ++ Value: big.NewInt(1000), ++ Gas: params.TxGas, ++ GasPrice: big.NewInt(params.InitialBaseFee), ++ }) ++ ++ // Add some constraints every 3 txs, and every 6 add an index ++ if i%3 == 0 { ++ idx := new(uint64) ++ if i%2 == 0 { ++ *idx = uint64(i) ++ } else { ++ idx = nil ++ } ++ constraints := make(map[common.Hash]*types.ConstraintDecoded) ++ constraints[tx1.Hash()] = &types.ConstraintDecoded{Index: idx, Tx: tx1} ++ // FIXME: slot 0 is probably not correct for these tests ++ testConstraintsCache.Put(0, constraints) ++ } ++ ++ pendingTxs = append(pendingTxs, tx1) ++ } +  + tx2 := types.MustSignNewTx(testBankKey, signer, &types.LegacyTx{ + Nonce: 1, +@@ -130,7 +153,7 @@ func newTestWorkerBackend(t *testing.T, chainConfig *params.ChainConfig, engine consensus.Engine, db ethdb.Database, alloc types.GenesisAlloc, n int, gasLimit uint64) *testWorkerBackend { + if alloc == nil { + alloc = defaultGenesisAlloc + } +- var gspec = &core.Genesis{ ++ gspec := &core.Genesis{ + Config: chainConfig, + GasLimit: gasLimit, + Alloc: alloc, +@@ -251,10 +274,10 @@ + w, _ := newTestWorker(t, chainConfig, engine, rawdb.NewMemoryDatabase(), nil, 0) + defer w.close() +  +- taskCh := make(chan struct{}, 2) ++ taskCh := make(chan struct{}, pendingTxsLen*2) + checkEqual := func(t *testing.T, task *task) { + // The work should contain 1 tx +- receiptLen, balance := 1, uint256.NewInt(1000) ++ receiptLen, balance := pendingTxsLen, uint256.NewInt(50_000) + if len(task.receipts) != receiptLen { + t.Fatalf("receipt number mismatch: have %d, want %d", len(task.receipts), receiptLen) + } +@@ -378,12 +401,12 @@ } +  + func TestGetSealingWorkEthash(t *testing.T) { + t.Parallel() +- testGetSealingWork(t, ethashChainConfig, ethash.NewFaker()) ++ testGetSealingWork(t, ethashChainConfig, ethash.NewFaker(), nil) + } +  + func TestGetSealingWorkClique(t *testing.T) { + t.Parallel() +- testGetSealingWork(t, cliqueChainConfig, clique.New(cliqueChainConfig.Clique, rawdb.NewMemoryDatabase())) ++ testGetSealingWork(t, cliqueChainConfig, clique.New(cliqueChainConfig.Clique, rawdb.NewMemoryDatabase()), nil) + } +  + func TestGetSealingWorkPostMerge(t *testing.T) { +@@ -391,10 +414,25 @@ t.Parallel() + local := new(params.ChainConfig) + *local = *ethashChainConfig + local.TerminalTotalDifficulty = big.NewInt(0) +- testGetSealingWork(t, local, ethash.NewFaker()) ++ testGetSealingWork(t, local, ethash.NewFaker(), nil) + } +  +-func testGetSealingWork(t *testing.T, chainConfig *params.ChainConfig, engine consensus.Engine) { ++// TestGetSealingWorkWithConstraints tests the getSealingWork function with constraints. ++// This is the main test for the modified block building algorithm. Unfortunately ++// is not easy to make an end to end test where the constraints are pulled from the relay. ++// ++// A suggestion is to walk through the executing code with a debugger to further inspect the algorithm. ++// ++// However, if you want to check that functionality see `builder_test.go` ++func TestGetSealingWorkWithConstraints(t *testing.T) { ++ // t.Parallel() ++ local := new(params.ChainConfig) ++ *local = *ethashChainConfig ++ local.TerminalTotalDifficulty = big.NewInt(0) ++ testGetSealingWork(t, local, ethash.NewFaker(), testConstraintsCache) ++} ++ ++func testGetSealingWork(t *testing.T, chainConfig *params.ChainConfig, engine consensus.Engine, constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded]) { + defer engine.Close() + w, b := newTestWorker(t, chainConfig, engine, rawdb.NewMemoryDatabase(), nil, 0) + defer w.close() +@@ -486,15 +524,16 @@ + // This API should work even when the automatic sealing is not enabled + for _, c := range cases { + r := w.getSealingBlock(&generateParams{ +- parentHash: c.parent, +- timestamp: timestamp, +- coinbase: c.coinbase, +- random: c.random, +- withdrawals: nil, +- beaconRoot: nil, +- noTxs: false, +- forceTime: true, +- onBlock: nil, ++ parentHash: c.parent, ++ timestamp: timestamp, ++ coinbase: c.coinbase, ++ random: c.random, ++ withdrawals: nil, ++ beaconRoot: nil, ++ noTxs: false, ++ forceTime: true, ++ onBlock: nil, ++ constraintsCache: constraintsCache, + }) + if c.expectErr { + if r.err == nil {
+
+ + + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+3
+
-2
+ +
+ +
+
+
diff --git flashbots/builder/miner/algo_common_test.go chainbound/bolt/miner/algo_common_test.go +index 1b4853863eef1137a4bb83492a1e0e3fd7247180..105c709b2aa22b38bb20922e0a76474688138b55 100644 +--- flashbots/builder/miner/algo_common_test.go ++++ chainbound/bolt/miner/algo_common_test.go +@@ -528,13 +528,14 @@ t.Cleanup(func() { + testConfig.AlgoType = ALGO_MEV_GETH + }) +  +- for _, algoType := range []AlgoType{ALGO_MEV_GETH, ALGO_GREEDY, ALGO_GREEDY_BUCKETS, ALGO_GREEDY_MULTISNAP, ALGO_GREEDY_BUCKETS_MULTISNAP} { ++ for _, algoType := range []AlgoType{ALGO_MEV_GETH} { + local := new(params.ChainConfig) + *local = *ethashChainConfig + local.TerminalTotalDifficulty = big.NewInt(0) + testConfig.AlgoType = algoType +- testGetSealingWork(t, local, ethash.NewFaker()) ++ testGetSealingWork(t, local, ethash.NewFaker(), nil) + } ++ t.Fail() + } +  + func TestGetSealingWorkAlgosWithProfit(t *testing.T) {
+
+ + +
+
+ +
+
+
+ + +
+ +
+

In the API backend, we don’t differentiate between private and public transactions for simplicity.

+
+
+ +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+1
+
-1
+ +
+ +
+
+
diff --git flashbots/builder/eth/api_backend.go chainbound/bolt/eth/api_backend.go +index ef2c444ba0acabde26dbc629783115446a9aeb08..170218725eafa10a7390ae521d164c1426d4cd8b 100644 +--- flashbots/builder/eth/api_backend.go ++++ chainbound/bolt/eth/api_backend.go +@@ -290,7 +290,7 @@ } +  + func (b *EthAPIBackend) SendTx(ctx context.Context, signedTx *types.Transaction, private bool) error { + if private { +- return b.eth.txPool.Add([]*types.Transaction{signedTx}, false, false, true)[0] ++ return b.eth.txPool.Add([]*types.Transaction{signedTx}, true, false, true)[0] + } else { + return b.eth.txPool.Add([]*types.Transaction{signedTx}, true, false, false)[0] + }
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+0
+
-93
+ +
+ +
+
+
diff --git flashbots/builder/eth/block-validation/api_test.go chainbound/bolt/eth/block-validation/api_test.go +index 4d8afc6fff1e732a3781b356e0217cfcb91fa736..4340e99b35bec87071841ad1f14af422ea814583 100644 +--- flashbots/builder/eth/block-validation/api_test.go ++++ chainbound/bolt/eth/block-validation/api_test.go +@@ -845,99 +845,6 @@ } + return blockRequest, nil + } +  +-func TestValidateBuilderSubmissionV2_CoinbasePaymentUnderflow(t *testing.T) { +- genesis, preMergeBlocks := generatePreMergeChain(20) +- lastBlock := preMergeBlocks[len(preMergeBlocks)-1] +- time := lastBlock.Time() + 5 +- genesis.Config.ShanghaiTime = &time +- n, ethservice := startEthService(t, genesis, preMergeBlocks) +- ethservice.Merger().ReachTTD() +- defer n.Close() +- +- api := NewBlockValidationAPI(ethservice, nil, true, true) +- +- baseFee := eip1559.CalcBaseFee(ethservice.BlockChain().Config(), lastBlock.Header()) +- txs := make(types.Transactions, 0) +- +- statedb, _ := ethservice.BlockChain().StateAt(lastBlock.Root()) +- nonce := statedb.GetNonce(testAddr) +- validatorNonce := statedb.GetNonce(testValidatorAddr) +- signer := types.LatestSigner(ethservice.BlockChain().Config()) +- +- expectedProfit := uint64(0) +- +- tx1, _ := types.SignTx(types.NewTransaction(nonce, common.Address{0x16}, big.NewInt(10), 21000, big.NewInt(2*baseFee.Int64()), nil), signer, testKey) +- txs = append(txs, tx1) +- expectedProfit += 21000 * baseFee.Uint64() +- +- // this tx will use 56996 gas +- tx2, _ := types.SignTx(types.NewContractCreation(nonce+1, new(big.Int), 1000000, big.NewInt(2*baseFee.Int64()), logCode), signer, testKey) +- txs = append(txs, tx2) +- expectedProfit += 56996 * baseFee.Uint64() +- +- tx3, _ := types.SignTx(types.NewTransaction(nonce+2, testAddr, big.NewInt(10), 21000, baseFee, nil), signer, testKey) +- txs = append(txs, tx3) +- +- // Test transferring out more than the profit +- toTransferOut := 2*expectedProfit - 21000*baseFee.Uint64() +- tx4, _ := types.SignTx(types.NewTransaction(validatorNonce, testAddr, big.NewInt(int64(toTransferOut)), 21000, baseFee, nil), signer, testValidatorKey) +- txs = append(txs, tx4) +- expectedProfit += 7 +- +- withdrawals := []*types.Withdrawal{ +- { +- Index: 0, +- Validator: 1, +- Amount: 100, +- Address: testAddr, +- }, +- { +- Index: 1, +- Validator: 1, +- Amount: 100, +- Address: testAddr, +- }, +- } +- withdrawalsRoot := types.DeriveSha(types.Withdrawals(withdrawals), trie.NewStackTrie(nil)) +- +- buildBlockArgs := buildBlockArgs{ +- parentHash: lastBlock.Hash(), +- parentRoot: lastBlock.Root(), +- feeRecipient: testValidatorAddr, +- txs: txs, +- random: common.Hash{}, +- number: lastBlock.NumberU64() + 1, +- gasLimit: lastBlock.GasLimit(), +- timestamp: lastBlock.Time() + 5, +- extraData: nil, +- baseFeePerGas: baseFee, +- withdrawals: withdrawals, +- } +- +- execData, err := buildBlock(buildBlockArgs, ethservice.BlockChain()) +- require.NoError(t, err) +- +- value := big.NewInt(int64(expectedProfit)) +- +- req, err := executableDataToBlockValidationRequest(execData, testValidatorAddr, value, withdrawalsRoot) +- require.NoError(t, err) +- require.ErrorContains(t, api.ValidateBuilderSubmissionV2(req), "payment tx not to the proposers fee recipient") +- +- // try to claim less profit than expected, should work +- value.SetUint64(expectedProfit - 1) +- +- req, err = executableDataToBlockValidationRequest(execData, testValidatorAddr, value, withdrawalsRoot) +- require.NoError(t, err) +- require.ErrorContains(t, api.ValidateBuilderSubmissionV2(req), "payment tx not to the proposers fee recipient") +- +- // try to claim more profit than expected, should fail +- value.SetUint64(expectedProfit + 1) +- +- req, err = executableDataToBlockValidationRequest(execData, testValidatorAddr, value, withdrawalsRoot) +- require.NoError(t, err) +- require.ErrorContains(t, api.ValidateBuilderSubmissionV2(req), "payment") +-} +- + // This tests payment when the proposer fee recipient is the same as the coinbase + func TestValidateBuilderSubmissionV2_CoinbasePaymentDefault(t *testing.T) { + genesis, preMergeBlocks := generatePreMergeChain(20)
+
+ + + +
+
+ +
+
+
+ + +
+ +
+

We added the ConstraintDecoded primitive type in the core module.

+ +

This is not the greatest place for this type but given that it uses common.Hash, Transaction and +it’s used in both the builder package and the miner package, it should be ok here.

+
+
+ +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+4
+
-8
+ +
+ +
+
+
diff --git flashbots/builder/core/blockchain.go chainbound/bolt/core/blockchain.go +index e1b1ea1bca9d90158551583cb6f2e84612928faf..12639a34d6ded9dd74e231cadbe18973583c31c5 100644 +--- flashbots/builder/core/blockchain.go ++++ chainbound/bolt/core/blockchain.go +@@ -2494,14 +2494,13 @@ if err != nil { + return err + } +  +- feeRecipientBalanceAfter := new(uint256.Int).Set(statedb.GetBalance(feeRecipient)) +- +- amtBeforeOrWithdrawn := new(uint256.Int).Set(feeRecipientBalanceBefore) ++ feeRecipientBalanceDelta := new(uint256.Int).Set(statedb.GetBalance(feeRecipient)) ++ feeRecipientBalanceDelta.Sub(feeRecipientBalanceDelta, feeRecipientBalanceBefore) + if excludeWithdrawals { + for _, w := range block.Withdrawals() { + if w.Address == feeRecipient { + amount := new(uint256.Int).Mul(new(uint256.Int).SetUint64(w.Amount), uint256.NewInt(params.GWei)) +- amtBeforeOrWithdrawn = amtBeforeOrWithdrawn.Add(amtBeforeOrWithdrawn, amount) ++ feeRecipientBalanceDelta.Sub(feeRecipientBalanceDelta, amount) + } + } + } +@@ -2530,10 +2529,7 @@ } +  + // Validate proposer payment +  +- if useBalanceDiffProfit && feeRecipientBalanceAfter.Cmp(amtBeforeOrWithdrawn) >= 0 { +- feeRecipientBalanceDelta := new(uint256.Int).Set(feeRecipientBalanceAfter) +- feeRecipientBalanceDelta = feeRecipientBalanceDelta.Sub(feeRecipientBalanceDelta, amtBeforeOrWithdrawn) +- ++ if useBalanceDiffProfit { + uint256ExpectedProfit, ok := uint256.FromBig(expectedProfit) + if !ok { + if feeRecipientBalanceDelta.Cmp(uint256ExpectedProfit) >= 0 {
+
+ + +
+ + +
+
+
+ + (new) + +
+ + +
+
+ +
+ +
+ +
+62
+
-0
+ +
+ +
+
+
diff --git flashbots/builder/core/types/constraints.go chainbound/bolt/core/types/constraints.go +new file mode 100644 +index 0000000000000000000000000000000000000000..e587b475531ba5585e97da44e848386e6345568e +--- /dev/null ++++ chainbound/bolt/core/types/constraints.go +@@ -0,0 +1,62 @@ ++package types ++ ++import ( ++ "sort" ++ ++ "github.com/ethereum/go-ethereum/common" ++) ++ ++// NOTE: not the greatest place for this type but given that it uses ++// `common.Hash`, `Transaction` and it's used in both the builder ++// package and the miner package, here it's a good place for now ++ ++type ( ++ HashToConstraintDecoded = map[common.Hash]*ConstraintDecoded ++ ConstraintDecoded struct { ++ Index *uint64 ++ Tx *Transaction ++ } ++) ++ ++// ParseConstraintsDecoded receives a map of constraints and returns ++// - a slice of constraints sorted by index ++// - a slice of constraints without index sorted by nonce and hash ++// - the total gas required by the constraints ++// - the total blob gas required by the constraints ++func ParseConstraintsDecoded(constraints HashToConstraintDecoded) ([]*ConstraintDecoded, []*ConstraintDecoded, uint64, uint64) { ++ // Here we initialize and track the constraints left to be executed along ++ // with their gas requirements ++ constraintsOrderedByIndex := make([]*ConstraintDecoded, 0, len(constraints)) ++ constraintsWithoutIndex := make([]*ConstraintDecoded, 0, len(constraints)) ++ constraintsTotalGasLeft := uint64(0) ++ constraintsTotalBlobGasLeft := uint64(0) ++ ++ for _, constraint := range constraints { ++ if constraint.Index == nil { ++ constraintsWithoutIndex = append(constraintsWithoutIndex, constraint) ++ } else { ++ constraintsOrderedByIndex = append(constraintsOrderedByIndex, constraint) ++ } ++ constraintsTotalGasLeft += constraint.Tx.Gas() ++ constraintsTotalBlobGasLeft += constraint.Tx.BlobGas() ++ } ++ ++ // Sorts the constraints by index ascending ++ sort.Slice(constraintsOrderedByIndex, func(i, j int) bool { ++ // By assumption, all constraints here have a non-nil index ++ return *constraintsOrderedByIndex[i].Index < *constraintsOrderedByIndex[j].Index ++ }) ++ ++ // Sorts the unindexed constraints by nonce ascending and by hash ++ sort.Slice(constraintsWithoutIndex, func(i, j int) bool { ++ iNonce := constraintsWithoutIndex[i].Tx.Nonce() ++ jNonce := constraintsWithoutIndex[j].Tx.Nonce() ++ // Sort by hash ++ if iNonce == jNonce { ++ return constraintsWithoutIndex[i].Tx.Hash().Cmp(constraintsWithoutIndex[j].Tx.Hash()) < 0 ++ } ++ return iNonce < jNonce ++ }) ++ ++ return constraintsOrderedByIndex, constraintsWithoutIndex, constraintsTotalGasLeft, constraintsTotalBlobGasLeft ++}
+
+ + + +
+
+ +
+
+
+ + +
+ +
+

Common utilities and types used across all packages

+
+
+ +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+159
+
-0
+ +
+ +
+
+
diff --git flashbots/builder/common/types.go chainbound/bolt/common/types.go +index aadca87f82af89543de3387e24a90cba5fe1846f..21dff977795f300d2279733002f97bd51c494c94 100644 +--- flashbots/builder/common/types.go ++++ chainbound/bolt/common/types.go +@@ -31,6 +31,14 @@ "strings" +  + "github.com/ethereum/go-ethereum/common/hexutil" + "golang.org/x/crypto/sha3" ++ ++ "github.com/attestantio/go-builder-client/api/bellatrix" ++ "github.com/attestantio/go-builder-client/api/capella" ++ "github.com/attestantio/go-builder-client/api/deneb" ++ builderSpec "github.com/attestantio/go-builder-client/spec" ++ consensusSpec "github.com/attestantio/go-eth2-client/spec" ++ "github.com/attestantio/go-eth2-client/spec/phase0" ++ fastSsz "github.com/ferranbt/fastssz" + ) +  + // Lengths of hashes and addresses in bytes. +@@ -475,3 +483,154 @@ } else { + return err + } + } ++ ++type HexBytes []byte ++ ++// MarshalJSON implements json.Marshaler. ++func (h HexBytes) MarshalJSON() ([]byte, error) { ++ return []byte(fmt.Sprintf(`"%#x"`, []byte(h))), nil ++} ++ ++// UnmarshalJSON implements json.Unmarshaler. ++func (s *HexBytes) UnmarshalJSON(input []byte) error { ++ if len(input) == 0 { ++ return errors.New("input missing") ++ } ++ ++ if !bytes.HasPrefix(input, []byte{'"', '0', 'x'}) { ++ return errors.New("invalid prefix") ++ } ++ if !bytes.HasSuffix(input, []byte{'"'}) { ++ return errors.New("invalid suffix") ++ } ++ ++ src := input[3 : len(input)-1] ++ *s = make([]byte, hex.DecodedLen(len(src))) ++ ++ _, err := hex.Decode(*s, input[3:len(input)-1]) ++ if err != nil { ++ return err ++ } ++ ++ return nil ++} ++ ++// InclusionProof is a Merkle Multiproof of inclusion of a set of TransactionHashes ++type InclusionProof struct { ++ TransactionHashes []Hash `json:"transaction_hashes"` ++ GeneralizedIndexes []uint64 `json:"generalized_indexes"` ++ MerkleHashes []*HexBytes `json:"merkle_hashes"` ++} ++ ++// InclusionProofFromMultiProof converts a fastssz.Multiproof into an InclusionProof, without ++// filling the TransactionHashes ++func InclusionProofFromMultiProof(mp *fastSsz.Multiproof) *InclusionProof { ++ merkleHashes := make([]*HexBytes, len(mp.Hashes)) ++ for i, h := range mp.Hashes { ++ merkleHashes[i] = new(HexBytes) ++ *(merkleHashes[i]) = h ++ } ++ ++ leaves := make([]*HexBytes, len(mp.Leaves)) ++ for i, h := range mp.Leaves { ++ leaves[i] = new(HexBytes) ++ *(leaves[i]) = h ++ } ++ generalIndexes := make([]uint64, len(mp.Indices)) ++ for i, idx := range mp.Indices { ++ generalIndexes[i] = uint64(idx) ++ } ++ return &InclusionProof{ ++ MerkleHashes: merkleHashes, ++ GeneralizedIndexes: generalIndexes, ++ } ++} ++ ++func (p *InclusionProof) String() string { ++ return JSONStringify(p) ++} ++ ++// A wrapper struct over `builderSpec.VersionedSubmitBlockRequest` ++// to include preconfirmation proofs ++type VersionedSubmitBlockRequestWithProofs struct { ++ Inner *builderSpec.VersionedSubmitBlockRequest `json:"inner"` ++ Proofs *InclusionProof `json:"proofs"` ++} ++ ++// this is necessary, because the mev-boost-relay deserialization doesn't expect a "Version" and "Data" wrapper object ++// for deserialization. Instead, it tries to decode the object into the "Deneb" version first and if that fails, it tries ++// the "Capella" version. This is a workaround to make the deserialization work. ++func (v *VersionedSubmitBlockRequestWithProofs) MarshalJSON() ([]byte, error) { ++ switch v.Inner.Version { ++ case consensusSpec.DataVersionBellatrix: ++ return json.Marshal(struct { ++ Inner *bellatrix.SubmitBlockRequest `json:"inner"` ++ Proofs *InclusionProof `json:"proofs"` ++ }{ ++ Inner: v.Inner.Bellatrix, ++ Proofs: v.Proofs, ++ }) ++ case consensusSpec.DataVersionCapella: ++ return json.Marshal(struct { ++ Inner *capella.SubmitBlockRequest `json:"inner"` ++ Proofs *InclusionProof `json:"proofs"` ++ }{ ++ Inner: v.Inner.Capella, ++ Proofs: v.Proofs, ++ }) ++ case consensusSpec.DataVersionDeneb: ++ return json.Marshal(struct { ++ Inner *deneb.SubmitBlockRequest `json:"inner"` ++ Proofs *InclusionProof `json:"proofs"` ++ }{ ++ Inner: v.Inner.Deneb, ++ Proofs: v.Proofs, ++ }) ++ } ++ ++ return nil, fmt.Errorf("unknown data version %d", v.Inner.Version) ++} ++ ++func (v *VersionedSubmitBlockRequestWithProofs) String() string { ++ return JSONStringify(v) ++} ++ ++// SignedConstraintsList are a list of proposer constraints that a builder must satisfy ++// in order to produce a valid bid. This is not defined on the ++// [spec](https://chainbound.github.io/bolt-docs/api/builder-api) ++// but it's useful as an helper type ++type SignedConstraintsList = []*SignedConstraints ++ ++// Reference: https://chainbound.github.io/bolt-docs/api/builder-api ++type SignedConstraints struct { ++ Message ConstraintMessage `json:"message"` ++ Signature phase0.BLSSignature `json:"signature"` ++} ++ ++// Reference: https://chainbound.github.io/bolt-docs/api/builder-api ++type ConstraintMessage struct { ++ Constraints []*Constraint `json:"constraints"` ++ ValidatorIndex uint64 `json:"validator_index"` ++ Slot uint64 `json:"slot"` ++} ++ ++// Reference: https://chainbound.github.io/bolt-docs/api/builder-api ++type Constraint struct { ++ Index *uint64 `json:"index"` ++ Tx HexBytes `json:"tx"` ++} ++ ++// ConstraintSubscriptionAuth is the struct the builder signs over to authenticate ++// when subscribing to SSE constraint events from the relay ++type ConstraintSubscriptionAuth struct { ++ PublicKey phase0.BLSPubKey `json:"publicKey"` ++ Slot uint64 `json:"slot"` ++} ++ ++func (c *ConstraintSubscriptionAuth) String() string { ++ buf, err := json.Marshal(c) ++ if err != nil { ++ return fmt.Sprintf("failed to marshal ConstraintSubscriptionAuth: %v", err) ++ } ++ return string(buf) ++}
+
+ + +
+ + +
+
+
+ + (new) + +
+ + +
+
+ +
+ +
+ +
+66
+
-0
+ +
+ +
+
+
diff --git flashbots/builder/common/utils.go chainbound/bolt/common/utils.go +new file mode 100644 +index 0000000000000000000000000000000000000000..50ee385ead7bc04281eab32d8579e5eb63afcb9f +--- /dev/null ++++ chainbound/bolt/common/utils.go +@@ -0,0 +1,66 @@ ++package common ++ ++import "encoding/json" ++ ++func Find[T any](slice []*T, predicate func(el *T) bool) *T { ++ for _, el := range slice { ++ if predicate(el) { ++ return el ++ } ++ } ++ return nil ++} ++ ++// Filter filters a slice in place, removing elements for which the predicate returns false. ++func Filter[T any](slice *[]*T, predicate func(el *T) bool) { ++ if slice == nil { ++ return ++ } ++ ++ for i := 0; i < len(*slice); i++ { ++ el := (*slice)[i] ++ if !predicate(el) { ++ // Remove the element by slicing ++ if i == len(*slice)-1 { ++ *slice = (*slice)[:i] ++ } else { ++ *slice = append((*slice)[:i], (*slice)[i+1:]...) ++ } ++ i-- // Decrement index to adjust for the removed element ++ } ++ } ++} ++ ++func Pop[T any](slice *[]*T) *T { ++ if slice == nil || len(*slice) == 0 { ++ return nil ++ } ++ el := (*slice)[len(*slice)-1] ++ *slice = (*slice)[:len(*slice)-1] ++ return el ++} ++ ++func Shift[T any](slice *[]*T) *T { ++ if slice == nil || len(*slice) == 0 { ++ return nil ++ } ++ el := (*slice)[0] ++ *slice = (*slice)[1:] ++ return el ++} ++ ++func Map[T any, U any](slice []*T, mapper func(el *T) *U) []*U { ++ result := make([]*U, len(slice)) ++ for i, el := range slice { ++ result[i] = mapper(el) ++ } ++ return result ++} ++ ++func JSONStringify(obj any) string { ++ b, err := json.Marshal(obj) ++ if err != nil { ++ return "" ++ } ++ return string(b) ++}
+
+ + +
+ + +
+
+
+ + (new) + +
+ + +
+
+ +
+ +
+ +
+29
+
-0
+ +
+ +
+
+
diff --git flashbots/builder/common/utils_test.go chainbound/bolt/common/utils_test.go +new file mode 100644 +index 0000000000000000000000000000000000000000..bbccefe2ebb1b900b90e4153f317f4e4c2b73f39 +--- /dev/null ++++ chainbound/bolt/common/utils_test.go +@@ -0,0 +1,29 @@ ++package common ++ ++import "testing" ++ ++func TestGenericFilter(t *testing.T) { ++ slice := []*int{new(int), new(int), new(int), new(int)} ++ for i := 0; i < len(slice); i++ { ++ *slice[i] = i ++ } ++ ++ Filter(&slice, func(el *int) bool { ++ return el != nil ++ }) ++ if len(slice) != 4 { ++ t.Errorf("Filter failed") ++ } ++ Filter(&slice, func(el *int) bool { ++ return *el%2 == 0 ++ }) ++ if len(slice) != 2 { ++ t.Errorf("Filter failed") ++ } ++ Filter(&slice, func(el *int) bool { ++ return el == nil ++ }) ++ if len(slice) != 0 { ++ t.Errorf("Filter failed") ++ } ++}
+
+ + + +
+
+ +
+
+
+ + +
+ +
+
+
+ +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+18
+
-17
+ +
+ +
+
+
diff --git flashbots/builder/internal/ethapi/api.go chainbound/bolt/internal/ethapi/api.go +index e3b04835e2a7f57af1499b4f617000b19551f6ab..f53a6fc61716e6b770c8a244511944e50f77f607 100644 +--- flashbots/builder/internal/ethapi/api.go ++++ chainbound/bolt/internal/ethapi/api.go +@@ -242,7 +242,7 @@ } + pending, queue := s.b.TxPoolContent() +  + // Define a formatter to flatten a transaction into a string +- var format = func(tx *types.Transaction) string { ++ format := func(tx *types.Transaction) string { + if to := tx.To(); to != nil { + return fmt.Sprintf("%s: %v wei + %v gas × %v wei", tx.To().Hex(), tx.Value(), tx.Gas(), tx.GasPrice()) + } +@@ -1755,20 +1755,21 @@ } + if err := b.SendTx(ctx, tx, private); err != nil { + return common.Hash{}, err + } +- // Print a log with full tx details for manual investigations and interventions +- head := b.CurrentBlock() +- signer := types.MakeSigner(b.ChainConfig(), head.Number, head.Time) +- from, err := types.Sender(signer, tx) +- if err != nil { +- return common.Hash{}, err +- } +- +- if tx.To() == nil { +- addr := crypto.CreateAddress(from, tx.Nonce()) +- log.Info("Submitted contract creation", "hash", tx.Hash().Hex(), "from", from, "nonce", tx.Nonce(), "contract", addr.Hex(), "value", tx.Value()) +- } else { +- log.Info("Submitted transaction", "hash", tx.Hash().Hex(), "from", from, "nonce", tx.Nonce(), "recipient", tx.To(), "value", tx.Value()) +- } ++ // Print a log with full tx details for manual investigations and interventions. ++ // TODO: remove this log, too noisy ++ // head := b.CurrentBlock() ++ // signer := types.MakeSigner(b.ChainConfig(), head.Number, head.Time) ++ // from, err := types.Sender(signer, tx) ++ // if err != nil { ++ // return common.Hash{}, err ++ // } ++ // ++ // if tx.To() == nil { ++ // addr := crypto.CreateAddress(from, tx.Nonce()) ++ // log.Info("Submitted contract creation", "hash", tx.Hash().Hex(), "from", from, "nonce", tx.Nonce(), "contract", addr.Hex(), "value", tx.Value()) ++ // } else { ++ // log.Info("Submitted transaction", "hash", tx.Hash().Hex(), "from", from, "nonce", tx.Nonce(), "recipient", tx.To(), "value", tx.Value()) ++ // } + return tx.Hash(), nil + } +  +@@ -1952,11 +1953,11 @@ } + matchTx := sendArgs.toTransaction() +  + // Before replacing the old transaction, ensure the _new_ transaction fee is reasonable. +- var price = matchTx.GasPrice() ++ price := matchTx.GasPrice() + if gasPrice != nil { + price = gasPrice.ToInt() + } +- var gas = matchTx.Gas() ++ gas := matchTx.Gas() + if gasLimit != nil { + gas = uint64(*gasLimit) + }
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+3
+
-4
+ +
+ +
+
+
diff --git flashbots/builder/internal/ethapi/transaction_args.go chainbound/bolt/internal/ethapi/transaction_args.go +index bae1c68641594887b4a800c0f7bfd6af58326ecf..7b4606742764a82120b6e2d7f656cfb46dbf9f88 100644 +--- flashbots/builder/internal/ethapi/transaction_args.go ++++ chainbound/bolt/internal/ethapi/transaction_args.go +@@ -37,9 +37,7 @@ "github.com/ethereum/go-ethereum/rpc" + "github.com/holiman/uint256" + ) +  +-var ( +- maxBlobsPerTransaction = params.MaxBlobGasPerBlock / params.BlobTxBlobGasPerBlob +-) ++var maxBlobsPerTransaction = params.MaxBlobGasPerBlock / params.BlobTxBlobGasPerBlob +  + // TransactionArgs represents the arguments to construct a new transaction + // or a message call. +@@ -384,7 +382,8 @@ if args.Gas != nil { + gas = uint64(*args.Gas) + } + if globalGasCap != 0 && globalGasCap < gas { +- log.Warn("Caller gas above allowance, capping", "requested", gas, "cap", globalGasCap) ++ // TODO: remove this, but for now it's too noisy ++ // log.Warn("Caller gas above allowance, capping", "requested", gas, "cap", globalGasCap) + gas = globalGasCap + } + var (
+
+ + + +
+
+ +
+
+
+ + +
+
+
+ + +
+ +
+
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-5
+ +
+ +
+
+
diff --git flashbots/builder/.dockerignore chainbound/bolt/.dockerignore +deleted file mode 100644 +index 0c013d18b13f26adba32df14f3642b90e048e0d3..0000000000000000000000000000000000000000 +--- flashbots/builder/.dockerignore ++++ /dev/null +@@ -1,5 +0,0 @@ +-**/*_test.go +- +-build/_workspace +-build/_bin +-tests/testdata
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-3
+ +
+ +
+
+
diff --git flashbots/builder/.gitattributes chainbound/bolt/.gitattributes +deleted file mode 100644 +index 0269fab9cba2722fb0a7598ff18bc2ba46c45bed..0000000000000000000000000000000000000000 +--- flashbots/builder/.gitattributes ++++ /dev/null +@@ -1,3 +0,0 @@ +-# Auto detect text files and perform LF normalization +-* text=auto +-*.sol linguist-language=Solidity
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-4
+ +
+ +
+
+
diff --git flashbots/builder/.github/CODEOWNERS chainbound/bolt/.github/CODEOWNERS +deleted file mode 100644 +index 6cf5893f99c00c40dc6656232c72e3fbd3f05a1c..0000000000000000000000000000000000000000 +--- flashbots/builder/.github/CODEOWNERS ++++ /dev/null +@@ -1,4 +0,0 @@ +-# These owners will be the default owners for everything in +-# the repo. Unless a later match takes precedence, +-# they will be requested for review when someone opens a pull request. +-* @dvush @Wazzymandias @TymKh @Ruteri @avalonche
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-40
+ +
+ +
+
+
diff --git flashbots/builder/.github/CONTRIBUTING.md chainbound/bolt/.github/CONTRIBUTING.md +deleted file mode 100644 +index 969b7f8f9fa1ef4cd87f80bb9df2e80d6cbbc2e8..0000000000000000000000000000000000000000 +--- flashbots/builder/.github/CONTRIBUTING.md ++++ /dev/null +@@ -1,40 +0,0 @@ +-# Contributing +- +-Thank you for considering to help out with the source code! We welcome +-contributions from anyone on the internet, and are grateful for even the +-smallest of fixes! +- +-If you'd like to contribute to go-ethereum, please fork, fix, commit and send a +-pull request for the maintainers to review and merge into the main code base. If +-you wish to submit more complex changes though, please check up with the core +-devs first on [our gitter channel](https://gitter.im/ethereum/go-ethereum) to +-ensure those changes are in line with the general philosophy of the project +-and/or get some early feedback which can make both your efforts much lighter as +-well as our review and merge procedures quick and simple. +- +-## Coding guidelines +- +-Please make sure your contributions adhere to our coding guidelines: +- +- * Code must adhere to the official Go +-[formatting](https://golang.org/doc/effective_go.html#formatting) guidelines +-(i.e. uses [gofmt](https://golang.org/cmd/gofmt/)). +- * Code must be documented adhering to the official Go +-[commentary](https://golang.org/doc/effective_go.html#commentary) guidelines. +- * Pull requests need to be based on and opened against the `master` branch. +- * Commit messages should be prefixed with the package(s) they modify. +- * E.g. "eth, rpc: make trace configs optional" +- +-## Can I have feature X +- +-Before you submit a feature request, please check and make sure that it isn't +-possible through some other means. The JavaScript-enabled console is a powerful +-feature in the right hands. Please check our +-[Geth documentation page](https://geth.ethereum.org/docs/) for more info +-and help. +- +-## Configuration, dependencies, and tests +- +-Please see the [Developers' Guide](https://geth.ethereum.org/docs/developers/geth-developer/dev-guide) +-for more details on configuring your environment, managing project dependencies +-and testing procedures.
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-31
+ +
+ +
+
+
diff --git flashbots/builder/.github/ISSUE_TEMPLATE/bug.md chainbound/bolt/.github/ISSUE_TEMPLATE/bug.md +deleted file mode 100644 +index 45bfd986ac6e38ec7364ce8473b663eee97628c5..0000000000000000000000000000000000000000 +--- flashbots/builder/.github/ISSUE_TEMPLATE/bug.md ++++ /dev/null +@@ -1,31 +0,0 @@ +---- +-name: Report a bug +-about: Something with go-ethereum is not working as expected +-title: '' +-labels: 'type:bug' +-assignees: '' +---- +- +-#### System information +- +-Geth version: `geth version` +-CL client & version: e.g. lighthouse/nimbus/prysm@v1.0.0 +-OS & Version: Windows/Linux/OSX +-Commit hash : (if `develop`) +- +-#### Expected behaviour +- +- +-#### Actual behaviour +- +- +-#### Steps to reproduce the behaviour +- +- +-#### Backtrace +- +-```` +-[backtrace] +-```` +- +-When submitting logs: please submit them as text and not screenshots.
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-16
+ +
+ +
+
+
diff --git flashbots/builder/.github/ISSUE_TEMPLATE/feature.md chainbound/bolt/.github/ISSUE_TEMPLATE/feature.md +deleted file mode 100644 +index aacd885f9e5ef7de4eaa833c9e67297db24a85e2..0000000000000000000000000000000000000000 +--- flashbots/builder/.github/ISSUE_TEMPLATE/feature.md ++++ /dev/null +@@ -1,17 +0,0 @@ +---- +-name: Request a feature +-about: Report a missing feature - e.g. as a step before submitting a PR +-title: '' +-labels: 'type:feature' +-assignees: '' +---- +- +-# Rationale +- +-Why should this feature exist? +-What are the use-cases? +- +-# Implementation +- +-Do you have ideas regarding the implementation of this feature? +-Are you willing to implement this feature? +\ No newline at end of file
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-9
+ +
+ +
+
+
diff --git flashbots/builder/.github/ISSUE_TEMPLATE/question.md chainbound/bolt/.github/ISSUE_TEMPLATE/question.md +deleted file mode 100644 +index 8f460ab558ecc6930b0f1c348c08bdde31fe2b2b..0000000000000000000000000000000000000000 +--- flashbots/builder/.github/ISSUE_TEMPLATE/question.md ++++ /dev/null +@@ -1,9 +0,0 @@ +---- +-name: Ask a question +-about: Something is unclear +-title: '' +-labels: 'type:docs' +-assignees: '' +---- +- +-This should only be used in very rare cases e.g. if you are not 100% sure if something is a bug or asking a question that leads to improving the documentation. For general questions please use [discord](https://discord.gg/nthXNEv) or the Ethereum stack exchange at https://ethereum.stackexchange.com.
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-11
+ +
+ +
+
+
diff --git flashbots/builder/.github/no-response.yml chainbound/bolt/.github/no-response.yml +deleted file mode 100644 +index 903d4ce85f350f737c2049d272fae414a9ac148d..0000000000000000000000000000000000000000 +--- flashbots/builder/.github/no-response.yml ++++ /dev/null +@@ -1,11 +0,0 @@ +-# Number of days of inactivity before an Issue is closed for lack of response +-daysUntilClose: 30 +-# Label requiring a response +-responseRequiredLabel: "need:more-information" +-# Comment to post when closing an Issue for lack of response. Set to `false` to disable +-closeComment: > +- This issue has been automatically closed because there has been no response +- to our request for more information from the original author. With only the +- information that is currently in the issue, we don't have enough information +- to take action. Please reach out if you have more relevant information or +- answers to our questions so that we can investigate further.
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-11
+ +
+ +
+
+
diff --git flashbots/builder/.github/pull_request_template.md chainbound/bolt/.github/pull_request_template.md +deleted file mode 100644 +index 15f903e765a605b824f908f266e78ee9a97ff476..0000000000000000000000000000000000000000 +--- flashbots/builder/.github/pull_request_template.md ++++ /dev/null +@@ -1,11 +0,0 @@ +-## 📝 Summary +- +-<!--- A general summary of your changes --> +- +-## 📚 References +- +-<!-- Any interesting external links to documentation, articles, tweets which add value to the PR --> +- +---- +- +-* [ ] I have seen and agree to [`CONTRIBUTING.md`](https://github.com/flashbots/builder/blob/main/CONTRIBUTING.md)
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-17
+ +
+ +
+
+
diff --git flashbots/builder/.github/stale.yml chainbound/bolt/.github/stale.yml +deleted file mode 100644 +index 6d921cc795ff45352aafb16efb6130f50e470b54..0000000000000000000000000000000000000000 +--- flashbots/builder/.github/stale.yml ++++ /dev/null +@@ -1,17 +0,0 @@ +-# Number of days of inactivity before an issue becomes stale +-daysUntilStale: 366 +-# Number of days of inactivity before a stale issue is closed +-daysUntilClose: 42 +-# Issues with these labels will never be considered stale +-exemptLabels: +- - pinned +- - security +-# Label to use when marking an issue as stale +-staleLabel: "status:inactive" +-# Comment to post when marking an issue as stale. Set to `false` to disable +-markComment: > +- This issue has been automatically marked as stale because it has not had +- recent activity. It will be closed if no further activity occurs. Thank you +- for your contributions. +-# Comment to post when closing a stale issue. Set to `false` to disable +-closeComment: false
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-57
+ +
+ +
+
+
diff --git flashbots/builder/.github/workflows/go.yml chainbound/bolt/.github/workflows/go.yml +deleted file mode 100644 +index 80d75bd7f9fc7470020e40bb60e22c8fe9e77b73..0000000000000000000000000000000000000000 +--- flashbots/builder/.github/workflows/go.yml ++++ /dev/null +@@ -1,57 +0,0 @@ +-name: Go +- +-on: +- push: +- branches: [ main ] +- pull_request: +- +-env: +- CGO_CFLAGS_ALLOW: "-O -D__BLST_PORTABLE__" +- CGO_CFLAGS: "-O -D__BLST_PORTABLE__" +- +-jobs: +- +- lint: +- name: Lint +- runs-on: ubuntu-latest +- steps: +- - name: Set up Go +- uses: actions/setup-go@v3 +- with: +- go-version: ^1.21.4 +- id: go +- +- - name: Check out code into the Go module directory +- uses: actions/checkout@v2 +- +- - name: Install golangci-lint +- run: go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.51.2 +- +- - name: Lint +- run: make lint +- +- - name: Ensure go mod tidy runs without changes +- run: | +- go mod tidy +- git diff-index HEAD +- git diff-index --quiet HEAD +- +- build: +- name: Build +- runs-on: ubuntu-latest +- steps: +- +- - name: Set up Go 1.x +- uses: actions/setup-go@v3 +- with: +- go-version: 1.21.4 +- id: go +- +- - name: Check out code into the Go module directory +- uses: actions/checkout@v2 +- +- - name: Test +- run: go test ./core ./miner/... ./internal/ethapi/... ./builder/... ./eth/block-validation/... +- +- - name: Build +- run: make geth
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-74
+ +
+ +
+
+
diff --git flashbots/builder/.github/workflows/release.yml chainbound/bolt/.github/workflows/release.yml +deleted file mode 100644 +index 3d056e59cc9b58d7dfd90d5292ed19d8a1dfa622..0000000000000000000000000000000000000000 +--- flashbots/builder/.github/workflows/release.yml ++++ /dev/null +@@ -1,74 +0,0 @@ +-name: Release +- +-on: +- push: +- tags: +- - 'v*' +- +-jobs: +- docker-image: +- name: Publish Docker Image +- runs-on: ubuntu-latest +- +- steps: +- - name: Checkout sources +- uses: actions/checkout@v2 +- +- - name: Get tag version +- run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV +- +- - name: Print version +- run: | +- echo $RELEASE_VERSION +- echo ${{ env.RELEASE_VERSION }} +- +- - name: Set up QEMU +- uses: docker/setup-qemu-action@v2 +- +- - name: Set up Docker Buildx +- uses: docker/setup-buildx-action@v2 +- +- - name: Extract metadata (tags, labels) for Docker +- id: meta +- uses: docker/metadata-action@v4 +- with: +- images: flashbots/builder +- tags: | +- type=sha +- type=pep440,pattern={{version}} +- type=pep440,pattern={{major}}.{{minor}} +- type=raw,value=latest +- +- - name: Login to DockerHub +- uses: docker/login-action@v2 +- with: +- username: ${{ secrets.FLASHBOTS_DOCKERHUB_USERNAME }} +- password: ${{ secrets.FLASHBOTS_DOCKERHUB_TOKEN }} +- +- - name: Build and push +- uses: docker/build-push-action@v3 +- with: +- context: . +- push: true +- build-args: | +- VERSION=${{ env.RELEASE_VERSION }} +- platforms: linux/amd64,linux/arm64 +- tags: ${{ steps.meta.outputs.tags }} +- labels: ${{ steps.meta.outputs.labels }} +- +- github-release: +- runs-on: ubuntu-latest +- steps: +- - name: Checkout sources +- uses: actions/checkout@v2 +- +- - name: Create release +- id: create_release +- uses: actions/create-release@v1 +- env: +- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +- with: +- tag_name: ${{ github.ref }} +- release_name: ${{ github.ref }} +- draft: true +- prerelease: false
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-54
+ +
+ +
+
+
diff --git flashbots/builder/.gitignore chainbound/bolt/.gitignore +deleted file mode 100644 +index 7b1908dc5cd79d574e39053a78c951ab55507460..0000000000000000000000000000000000000000 +--- flashbots/builder/.gitignore ++++ /dev/null +@@ -1,54 +0,0 @@ +-# See http://help.github.com/ignore-files/ for more about ignoring files. +-# +-# If you find yourself ignoring temporary files generated by your text editor +-# or operating system, you probably want to add a global ignore instead: +-# git config --global core.excludesfile ~/.gitignore_global +- +-/tmp +-*/**/*un~ +-*/**/*.test +-*un~ +-.DS_Store +-*/**/.DS_Store +-.ethtest +-*/**/*tx_database* +-*/**/*dapps* +-build/_vendor/pkg +- +-#* +-.#* +-*# +-*~ +-.project +-.settings +- +-# used by the Makefile +-/build/_workspace/ +-/build/cache/ +-/build/bin/ +-/geth*.zip +- +-# travis +-profile.tmp +-profile.cov +- +-# IdeaIDE +-.idea +- +-# VS Code +-.vscode +- +-# dashboard +-/dashboard/assets/flow-typed +-/dashboard/assets/node_modules +-/dashboard/assets/stats.json +-/dashboard/assets/bundle.js +-/dashboard/assets/bundle.js.map +-/dashboard/assets/package-lock.json +- +-**/yarn-error.log +-logs/ +-/ofac_blacklist.json +-/blacklist.json +- +-tests/spec-tests/
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-8
+ +
+ +
+
+
diff --git flashbots/builder/.gitmodules chainbound/bolt/.gitmodules +deleted file mode 100644 +index 241c169c4772ce246ffa45f7fa8a63019ffea0e1..0000000000000000000000000000000000000000 +--- flashbots/builder/.gitmodules ++++ /dev/null +@@ -1,8 +0,0 @@ +-[submodule "tests"] +- path = tests/testdata +- url = https://github.com/ethereum/tests +- shallow = true +-[submodule "evm-benchmarks"] +- path = tests/evm-benchmarks +- url = https://github.com/ipsilon/evm-benchmarks +- shallow = true
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-60
+ +
+ +
+
+
diff --git flashbots/builder/.golangci.yml chainbound/bolt/.golangci.yml +deleted file mode 100644 +index 0343c4b4ebf2eec8adc0a473a2892a4100b3a86a..0000000000000000000000000000000000000000 +--- flashbots/builder/.golangci.yml ++++ /dev/null +@@ -1,60 +0,0 @@ +-# This file configures github.com/golangci/golangci-lint. +- +-run: +- timeout: 20m +- tests: true +- # default is true. Enables skipping of directories: +- # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ +- skip-dirs-use-default: true +- skip-files: +- - core/genesis_alloc.go +- +-linters: +- disable-all: true +- enable: +- - goimports +- - gosimple +- - govet +- - ineffassign +- - misspell +- - unconvert +- - typecheck +- - unused +- - staticcheck +- - bidichk +- - durationcheck +- - exportloopref +- - whitespace +- +- # - structcheck # lots of false positives +- # - errcheck #lot of false positives +- # - contextcheck +- # - errchkjson # lots of false positives +- # - errorlint # this check crashes +- # - exhaustive # silly check +- # - makezero # false positives +- # - nilerr # several intentional +- +-linters-settings: +- gofmt: +- simplify: true +- +-issues: +- exclude-rules: +- - path: crypto/bn256/cloudflare/optate.go +- linters: +- - deadcode +- - staticcheck +- - path: internal/build/pgp.go +- text: 'SA1019: "golang.org/x/crypto/openpgp" is deprecated: this package is unmaintained except for security fixes.' +- - path: core/vm/contracts.go +- text: 'SA1019: "golang.org/x/crypto/ripemd160" is deprecated: RIPEMD-160 is a legacy hash and should not be used for new applications.' +- - path: accounts/usbwallet/trezor.go +- text: 'SA1019: "github.com/golang/protobuf/proto" is deprecated: Use the "google.golang.org/protobuf/proto" package instead.' +- - path: accounts/usbwallet/trezor/ +- text: 'SA1019: "github.com/golang/protobuf/proto" is deprecated: Use the "google.golang.org/protobuf/proto" package instead.' +- exclude: +- - 'SA1019: event.TypeMux is deprecated: use Feed' +- - 'SA1019: strings.Title is deprecated' +- - 'SA1019: strings.Title has been deprecated since Go 1.18 and an alternative has been available since Go 1.0: The rule Title uses for word boundaries does not handle Unicode punctuation properly. Use golang.org/x/text/cases instead.' +- - 'SA1029: should not use built-in type string as key for value'
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-237
+ +
+ +
+
+
diff --git flashbots/builder/.mailmap chainbound/bolt/.mailmap +deleted file mode 100644 +index aa074b76d6b59639cf48cdb408e418a0b7c05eca..0000000000000000000000000000000000000000 +--- flashbots/builder/.mailmap ++++ /dev/null +@@ -1,237 +0,0 @@ +-Aaron Buchwald <aaron.buchwald56@gmail.com> +- +-Aaron Kumavis <kumavis@users.noreply.github.com> +- +-Abel Nieto <abel.nieto90@gmail.com> +-Abel Nieto <abel.nieto90@gmail.com> <anietoro@uwaterloo.ca> +- +-Afri Schoedon <58883403+q9f@users.noreply.github.com> +-Afri Schoedon <5chdn@users.noreply.github.com> <58883403+q9f@users.noreply.github.com> +- +-Alec Perseghin <aperseghin@gmail.com> +- +-Aleksey Smyrnov <i@soar.name> +- +-Alex Leverington <alex@ethdev.com> +-Alex Leverington <alex@ethdev.com> <subtly@users.noreply.github.com> +- +-Alex Pozhilenkov <alex_pozhilenkov@adoriasoft.com> +-Alex Pozhilenkov <alex_pozhilenkov@adoriasoft.com> <leshiy12345678@gmail.com> +- +-Alexey Akhunov <akhounov@gmail.com> +- +-Alon Muroch <alonmuroch@gmail.com> +- +-Andrey Petrov <shazow@gmail.com> +-Andrey Petrov <shazow@gmail.com> <andrey.petrov@shazow.net> +- +-Arkadiy Paronyan <arkadiy@ethdev.com> +- +-Armin Braun <me@obrown.io> +- +-Aron Fischer <github@aron.guru> <homotopycolimit@users.noreply.github.com> +- +-Austin Roberts <code@ausiv.com> +-Austin Roberts <code@ausiv.com> <git@ausiv.com> +- +-Bas van Kervel <bas@ethdev.com> +-Bas van Kervel <bas@ethdev.com> <basvankervel@ziggo.nl> +-Bas van Kervel <bas@ethdev.com> <basvankervel@gmail.com> +-Bas van Kervel <bas@ethdev.com> <bas-vk@users.noreply.github.com> +- +-Boqin Qin <bobbqqin@bupt.edu.cn> +-Boqin Qin <bobbqqin@bupt.edu.cn> <Bobbqqin@gmail.com> +- +-Casey Detrio <cdetrio@gmail.com> +- +-Cheng Li <lob4tt@gmail.com> +- +-Chris Ziogas <ziogaschr@gmail.com> +-Chris Ziogas <ziogaschr@gmail.com> <ziogas_chr@hotmail.com> +- +-Christoph Jentzsch <jentzsch.software@gmail.com> +- +-Diederik Loerakker <proto@protolambda.com> +- +-Dimitry Khokhlov <winsvega@mail.ru> +- +-Domino Valdano <dominoplural@gmail.com> +-Domino Valdano <dominoplural@gmail.com> <jeff@okcupid.com> +- +-Edgar Aroutiounian <edgar.factorial@gmail.com> +- +-Elliot Shepherd <elliot@identitii.com> +- +-Enrique Fynn <enriquefynn@gmail.com> +- +-Enrique Fynn <me@enriquefynn.com> +-Enrique Fynn <me@enriquefynn.com> <enriquefynn@gmail.com> +- +-Ernesto del Toro <ernesto.deltoro@gmail.com> +-Ernesto del Toro <ernesto.deltoro@gmail.com> <ernestodeltoro@users.noreply.github.com> +- +-Everton Fraga <ev@ethereum.org> +- +-Felix Lange <fjl@twurst.com> +-Felix Lange <fjl@twurst.com> <fjl@users.noreply.github.com> +- +-Frank Wang <eternnoir@gmail.com> +- +-Gary Rong <garyrong0905@gmail.com> +- +-Gavin Wood <i@gavwood.com> +- +-Gregg Dourgarian <greggd@tempworks.com> +- +-Guillaume Ballet <gballet@gmail.com> +-Guillaume Ballet <gballet@gmail.com> <3272758+gballet@users.noreply.github.com> +- +-Guillaume Nicolas <guin56@gmail.com> +- +-Hanjiang Yu <delacroix.yu@gmail.com> +-Hanjiang Yu <delacroix.yu@gmail.com> <42531996+de1acr0ix@users.noreply.github.com> +- +-Heiko Hees <heiko@heiko.org> +- +-Henning Diedrich <hd@eonblast.com> +-Henning Diedrich <hd@eonblast.com> Drake Burroughs <wildfyre@hotmail.com> +- +-Hwanjo Heo <34005989+hwanjo@users.noreply.github.com> +- +-Iskander (Alex) Sharipov <quasilyte@gmail.com> +-Iskander (Alex) Sharipov <quasilyte@gmail.com> <i.sharipov@corp.vk.com> +- +-Jae Kwon <jkwon.work@gmail.com> +- +-Janoš Guljaš <janos@resenje.org> <janos@users.noreply.github.com> +-Janoš Guljaš <janos@resenje.org> Janos Guljas <janos@resenje.org> +- +-Jared Wasinger <j-wasinger@hotmail.com> +- +-Jason Carver <jacarver@linkedin.com> +-Jason Carver <jacarver@linkedin.com> <ut96caarrs@snkmail.com> +- +-Javier Peletier <jm@epiclabs.io> +-Javier Peletier <jm@epiclabs.io> <jpeletier@users.noreply.github.com> +- +-Jeffrey Wilcke <jeffrey@ethereum.org> +-Jeffrey Wilcke <jeffrey@ethereum.org> <geffobscura@gmail.com> +-Jeffrey Wilcke <jeffrey@ethereum.org> <obscuren@obscura.com> +-Jeffrey Wilcke <jeffrey@ethereum.org> <obscuren@users.noreply.github.com> +- +-Jens Agerberg <github@agerberg.me> +- +-Joseph Chow <ethereum@outlook.com> +-Joseph Chow <ethereum@outlook.com> ethers <TODO> +- +- +-Joseph Goulden <joegoulden@gmail.com> +- +-Justin Drake <drakefjustin@gmail.com> +- +-Kenso Trabing <ktrabing@acm.org> +-Kenso Trabing <ktrabing@acm.org> <kenso.trabing@bloomwebsite.com> +- +-Liang Ma <liangma@liangbit.com> +-Liang Ma <liangma@liangbit.com> <liangma.ul@gmail.com> +- +-Louis Holbrook <dev@holbrook.no> +-Louis Holbrook <dev@holbrook.no> <nolash@users.noreply.github.com> +- +-Maran Hidskes <maran.hidskes@gmail.com> +- +-Marian Oancea <contact@siteshop.ro> +- +-Martin Becze <mjbecze@gmail.com> +-Martin Becze <mjbecze@gmail.com> <wanderer@users.noreply.github.com> +- +-Martin Lundfall <martin.lundfall@protonmail.com> +- +-Matt Garnett <14004106+lightclient@users.noreply.github.com> +- +-Matthew Halpern <matthalp@gmail.com> +-Matthew Halpern <matthalp@gmail.com> <matthalp@google.com> +- +-Michael Riabzev <michael@starkware.co> +- +-Nchinda Nchinda <nchinda2@gmail.com> +- +-Nick Dodson <silentcicero@outlook.com> +- +-Nick Johnson <arachnid@notdot.net> +- +-Nick Savers <nicksavers@gmail.com> +- +-Nishant Das <nishdas93@gmail.com> +-Nishant Das <nishdas93@gmail.com> <nish1993@hotmail.com> +- +-Olivier Hervieu <olivier.hervieu@gmail.com> +- +-Pascal Dierich <pascal@merkleplant.xyz> +-Pascal Dierich <pascal@merkleplant.xyz> <pascal@pascaldierich.com> +- +-RJ Catalano <catalanor0220@gmail.com> +-RJ Catalano <catalanor0220@gmail.com> <rj@erisindustries.com> +- +-Ralph Caraveo <deckarep@gmail.com> +- +-Rene Lubov <41963722+renaynay@users.noreply.github.com> +- +-Robert Zaremba <robert@zaremba.ch> +-Robert Zaremba <robert@zaremba.ch> <robert.zaremba@scale-it.pl> +- +-Roman Mandeleil <roman.mandeleil@gmail.com> +- +-Sorin Neacsu <sorin.neacsu@gmail.com> +-Sorin Neacsu <sorin.neacsu@gmail.com> <sorin@users.noreply.github.com> +- +-Sven Ehlert <sven@ethdev.com> +- +-Taylor Gerring <taylor.gerring@gmail.com> +-Taylor Gerring <taylor.gerring@gmail.com> <taylor.gerring@ethereum.org> +- +-Thomas Bocek <tom@tomp2p.net> +- +-Tim Cooijmans <timcooijmans@gmail.com> +- +-Valentin Wüstholz <wuestholz@gmail.com> +-Valentin Wüstholz <wuestholz@gmail.com> <wuestholz@users.noreply.github.com> +- +-Victor Tran <vu.tran54@gmail.com> +- +-Viktor Trón <viktor.tron@gmail.com> +- +-Ville Sundell <github@solarius.fi> +- +-Vincent G <caktux@gmail.com> +- +-Vitalik Buterin <v@buterin.com> +- +-Vlad Gluhovsky <gluk256@gmail.com> +-Vlad Gluhovsky <gluk256@gmail.com> <gluk256@users.noreply.github.com> +- +-Wenshao Zhong <wzhong20@uic.edu> +-Wenshao Zhong <wzhong20@uic.edu> <11510383@mail.sustc.edu.cn> +-Wenshao Zhong <wzhong20@uic.edu> <374662347@qq.com> +- +-Will Villanueva <hello@willvillanueva.com> +- +-Xiaobing Jiang <s7v7nislands@gmail.com> +- +-Xudong Liu <33193253+r1cs@users.noreply.github.com> +- +-Yohann Léon <sybiload@gmail.com> +- +-Zachinquarantine <Zachinquarantine@protonmail.com> +-Zachinquarantine <Zachinquarantine@protonmail.com> <zachinquarantine@yahoo.com> +- +-Ziyuan Zhong <zzy.albert@163.com> +- +-Zsolt Felföldi <zsfelfoldi@gmail.com> +- +-meowsbits <b5c6@protonmail.com> +-meowsbits <b5c6@protonmail.com> <45600330+meowsbits@users.noreply.github.com> +- +-nedifi <103940716+nedifi@users.noreply.github.com> +- +-Максим Чусовлянов <mchusovlianov@gmail.com>
+
+ + +
+ + +
+
+ + +
+ + (deleted) + +
+
+
+ +
+ +
+ +
+0
+
-168
+ +
+ +
+
+
diff --git flashbots/builder/.travis.yml chainbound/bolt/.travis.yml +deleted file mode 100644 +index a55583a703febc6a861a5b41b8d70352724eb02f..0000000000000000000000000000000000000000 +--- flashbots/builder/.travis.yml ++++ /dev/null +@@ -1,168 +0,0 @@ +-language: go +-go_import_path: github.com/ethereum/go-ethereum +-sudo: false +-jobs: +- allow_failures: +- - stage: build +- os: osx +- env: +- - azure-osx +- +- include: +- # These builders create the Docker sub-images for multi-arch push and each +- # will attempt to push the multi-arch image if they are the last builder +- - stage: build +- if: type = push +- os: linux +- arch: amd64 +- dist: bionic +- go: 1.21.x +- env: +- - docker +- services: +- - docker +- git: +- submodules: false # avoid cloning ethereum/tests +- before_install: +- - export DOCKER_CLI_EXPERIMENTAL=enabled +- script: +- - go run build/ci.go docker -image -manifest amd64,arm64 -upload ethereum/client-go +- +- - stage: build +- if: type = push +- os: linux +- arch: arm64 +- dist: bionic +- go: 1.21.x +- env: +- - docker +- services: +- - docker +- git: +- submodules: false # avoid cloning ethereum/tests +- before_install: +- - export DOCKER_CLI_EXPERIMENTAL=enabled +- script: +- - go run build/ci.go docker -image -manifest amd64,arm64 -upload ethereum/client-go +- +- # This builder does the Linux Azure uploads +- - stage: build +- if: type = push +- os: linux +- dist: bionic +- sudo: required +- go: 1.21.x +- env: +- - azure-linux +- git: +- submodules: false # avoid cloning ethereum/tests +- addons: +- apt: +- packages: +- - gcc-multilib +- script: +- # Build for the primary platforms that Trusty can manage +- - go run build/ci.go install -dlgo +- - go run build/ci.go archive -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds +- - go run build/ci.go install -dlgo -arch 386 +- - go run build/ci.go archive -arch 386 -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds +- +- # Switch over GCC to cross compilation (breaks 386, hence why do it here only) +- - sudo -E apt-get -yq --no-install-suggests --no-install-recommends --force-yes install gcc-arm-linux-gnueabi libc6-dev-armel-cross gcc-arm-linux-gnueabihf libc6-dev-armhf-cross gcc-aarch64-linux-gnu libc6-dev-arm64-cross +- - sudo ln -s /usr/include/asm-generic /usr/include/asm +- +- - GOARM=5 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabi-gcc +- - GOARM=5 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds +- - GOARM=6 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabi-gcc +- - GOARM=6 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds +- - GOARM=7 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabihf-gcc +- - GOARM=7 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds +- - go run build/ci.go install -dlgo -arch arm64 -cc aarch64-linux-gnu-gcc +- - go run build/ci.go archive -arch arm64 -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds +- +- # This builder does the OSX Azure uploads +- - stage: build +- if: type = push +- os: osx +- osx_image: xcode14.2 +- go: 1.21.x +- env: +- - azure-osx +- git: +- submodules: false # avoid cloning ethereum/tests +- script: +- - go run build/ci.go install -dlgo +- - go run build/ci.go archive -type tar -signer OSX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds +- - go run build/ci.go install -dlgo -arch arm64 +- - go run build/ci.go archive -arch arm64 -type tar -signer OSX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds +- +- # These builders run the tests +- - stage: build +- os: linux +- arch: amd64 +- dist: bionic +- go: 1.21.x +- script: +- - travis_wait 30 go run build/ci.go test $TEST_PACKAGES +- +- - stage: build +- if: type = pull_request +- os: linux +- arch: arm64 +- dist: bionic +- go: 1.20.x +- script: +- - travis_wait 30 go run build/ci.go test $TEST_PACKAGES +- +- - stage: build +- os: linux +- dist: bionic +- go: 1.20.x +- script: +- - travis_wait 30 go run build/ci.go test $TEST_PACKAGES +- +- # This builder does the Ubuntu PPA nightly uploads +- - stage: build +- if: type = cron || (type = push && tag ~= /^v[0-9]/) +- os: linux +- dist: bionic +- go: 1.21.x +- env: +- - ubuntu-ppa +- git: +- submodules: false # avoid cloning ethereum/tests +- addons: +- apt: +- packages: +- - devscripts +- - debhelper +- - dput +- - fakeroot +- - python-bzrlib +- - python-paramiko +- script: +- - echo '|1|7SiYPr9xl3uctzovOTj4gMwAC1M=|t6ReES75Bo/PxlOPJ6/GsGbTrM0= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA0aKz5UTUndYgIGG7dQBV+HaeuEZJ2xPHo2DS2iSKvUL4xNMSAY4UguNW+pX56nAQmZKIZZ8MaEvSj6zMEDiq6HFfn5JcTlM80UwlnyKe8B8p7Nk06PPQLrnmQt5fh0HmEcZx+JU9TZsfCHPnX7MNz4ELfZE6cFsclClrKim3BHUIGq//t93DllB+h4O9LHjEUsQ1Sr63irDLSutkLJD6RXchjROXkNirlcNVHH/jwLWR5RcYilNX7S5bIkK8NlWPjsn/8Ua5O7I9/YoE97PpO6i73DTGLh5H9JN/SITwCKBkgSDWUt61uPK3Y11Gty7o2lWsBjhBUm2Y38CBsoGmBw==' >> ~/.ssh/known_hosts +- - go run build/ci.go debsrc -upload ethereum/ethereum -sftp-user geth-ci -signer "Go Ethereum Linux Builder <geth-ci@ethereum.org>" +- +- # This builder does the Azure archive purges to avoid accumulating junk +- - stage: build +- if: type = cron +- os: linux +- dist: bionic +- go: 1.21.x +- env: +- - azure-purge +- git: +- submodules: false # avoid cloning ethereum/tests +- script: +- - go run build/ci.go purge -store gethstore/builds -days 14 +- +- # This builder executes race tests +- - stage: build +- if: type = cron +- os: linux +- dist: bionic +- go: 1.21.x +- script: +- - travis_wait 30 go run build/ci.go test -race $TEST_PACKAGES +-
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+1
+
-1
+ +
+ +
+
+
diff --git flashbots/builder/Dockerfile chainbound/bolt/Dockerfile +index ed69a04789678e839186208e04a2483b33b4d68c..c808c9d940fa1c217cea7e417241b53626d233a2 100644 +--- flashbots/builder/Dockerfile ++++ chainbound/bolt/Dockerfile +@@ -4,7 +4,7 @@ ARG VERSION="" + ARG BUILDNUM="" +  + # Build Geth in a stock Go builder container +-FROM golang:1.21-alpine as builder ++FROM golang:1.22-alpine AS builder +  + RUN apk add --no-cache gcc musl-dev linux-headers git +
+
+ + +
+ + +
+
+ + + +
+
+ +
+ +
+ +
+1
+
-1
+ +
+ +
+
+
diff --git flashbots/builder/Dockerfile.alltools chainbound/bolt/Dockerfile.alltools +index c317da25fa4870b8fd2189ccf0a679ddbe87384a..ddffb8ee1d1c4da5448c9ddbe845b0fe7fc16844 100644 +--- flashbots/builder/Dockerfile.alltools ++++ chainbound/bolt/Dockerfile.alltools +@@ -4,7 +4,7 @@ ARG VERSION="" + ARG BUILDNUM="" +  + # Build Geth in a stock Go builder container +-FROM golang:1.21-alpine as builder ++FROM golang:1.22-alpine AS builder +  + RUN apk add --no-cache gcc musl-dev linux-headers git +
+
+ + +
+
+ + + +
+ +
+
+ + + +
+
+ +
+ +
+ +
+7
+
-3
+ +
+ +
+
+
diff --git flashbots/builder/go.mod chainbound/bolt/go.mod +index 7d6b1540a62cab968e4f54c8ee75f0d8b10df36a..dfe1cc1581108637aa8a11b882fff140e57e2cfc 100644 +--- flashbots/builder/go.mod ++++ chainbound/bolt/go.mod +@@ -1,6 +1,6 @@ + module github.com/ethereum/go-ethereum +  +-go 1.20 ++go 1.22 +  + require ( + github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.2.0 +@@ -15,6 +15,7 @@ github.com/aws/aws-sdk-go-v2/service/route53 v1.30.2 + github.com/btcsuite/btcd/btcec/v2 v2.2.1 + github.com/cenkalti/backoff/v4 v4.2.1 + github.com/cespare/cp v0.1.0 ++ github.com/chainbound/shardmap v0.0.2 + github.com/cloudflare/cloudflare-go v0.79.0 + github.com/cockroachdb/pebble v0.0.0-20230928194634-aa077af62593 + github.com/consensys/gnark-crypto v0.12.1 +@@ -25,7 +26,7 @@ github.com/deckarep/golang-set/v2 v2.1.0 + github.com/dop251/goja v0.0.0-20230806174421-c933cf95e127 + github.com/ethereum/c-kzg-4844 v0.4.0 + github.com/fatih/color v1.15.0 +- github.com/ferranbt/fastssz v0.1.3 ++ github.com/ferranbt/fastssz v0.1.4-0.20240724090034-31cd371f8688 + github.com/fjl/gencodec v0.0.0-20230517082657-f9840df7b83e + github.com/fjl/memsize v0.0.2 + github.com/flashbots/go-boost-utils v1.8.0 +@@ -39,6 +40,7 @@ github.com/golang/protobuf v1.5.3 + github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb + github.com/google/gofuzz v1.2.0 + github.com/google/uuid v1.3.0 ++ github.com/gorilla/handlers v1.5.2 + github.com/gorilla/mux v1.8.0 + github.com/gorilla/websocket v1.4.2 + github.com/grafana/pyroscope-go/godeltaprof v0.1.7 +@@ -84,6 +86,8 @@ gopkg.in/yaml.v3 v3.0.1 + ) +  + require ( ++ github.com/emicklei/dot v1.6.2 // indirect ++ github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/getsentry/sentry-go v0.18.0 // indirect + github.com/goccy/go-yaml v1.11.2 // indirect + github.com/klauspost/cpuid/v2 v2.2.5 // indirect +@@ -147,7 +151,7 @@ github.com/mitchellh/pointerstructure v1.2.0 // indirect + github.com/mmcloughlin/addchain v0.4.0 // indirect + github.com/naoina/toml v0.1.1 + github.com/opentracing/opentracing-go v1.2.0 // indirect +- github.com/pkg/errors v0.9.1 // indirect ++ github.com/pkg/errors v0.9.1 + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_golang v1.16.0 // indirect + github.com/prometheus/client_model v0.3.0 // indirect
+
+ + +
+
+ + + +
+ +
+
+ + + +
+
+ +
+ +
+ +
+10
+
-2
+ +
+ +
+
+
diff --git flashbots/builder/go.sum chainbound/bolt/go.sum +index 3c9ff3c8173e1ee07717ea20a9ea6d6292488016..1ab78598f52a4582b536e7b5d6988d85c54dec5b 100644 +--- flashbots/builder/go.sum ++++ chainbound/bolt/go.sum +@@ -74,6 +74,8 @@ github.com/cespare/cp v0.1.0 h1:SE+dxFebS7Iik5LK0tsi1k9ZCxEaFX4AjQmoyA+1dJk= + github.com/cespare/cp v0.1.0/go.mod h1:SOGHArjBr4JWaSDEVpWpo/hNg6RoKrls6Oh40hiwW+s= + github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= + github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= ++github.com/chainbound/shardmap v0.0.2 h1:yB1weccdm2vC6dnqzzLwPIvyAnRj7815mJWbkPybiYw= ++github.com/chainbound/shardmap v0.0.2/go.mod h1:TBvIzhHyFUbt+oa3UzbijobTUh221st6xIbuki7WzPc= + github.com/chzyer/logex v1.2.0/go.mod h1:9+9sk7u7pGNWYMkh0hdiL++6OeibzJccyQU4p4MedaY= + github.com/chzyer/readline v1.5.0/go.mod h1:x22KAscuvRqlLoK9CsoYsmxoXZMMFVyOl86cAH8qUic= + github.com/chzyer/test v0.0.0-20210722231415-061457976a23/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +@@ -135,6 +137,8 @@ github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y= + github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM= + github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= + github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= ++github.com/emicklei/dot v1.6.2 h1:08GN+DD79cy/tzN6uLCT84+2Wk9u+wvqP+Hkx/dIR8A= ++github.com/emicklei/dot v1.6.2/go.mod h1:DeV7GvQtIw4h2u73RKBkkFdvVAz0D9fzeJrgPW6gy/s= + github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= + github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= + github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +@@ -146,8 +150,10 @@ github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= + github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= + github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= + github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +-github.com/ferranbt/fastssz v0.1.3 h1:ZI+z3JH05h4kgmFXdHuR1aWYsgrg7o+Fw7/NCzM16Mo= +-github.com/ferranbt/fastssz v0.1.3/go.mod h1:0Y9TEd/9XuFlh7mskMPfXiI2Dkw4Ddg9EyXt1W7MRvE= ++github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= ++github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= ++github.com/ferranbt/fastssz v0.1.4-0.20240724090034-31cd371f8688 h1:k70X5h1haHaSbpD/9fcjtvAUEVlRlOKtdpvN7Mzhcv4= ++github.com/ferranbt/fastssz v0.1.4-0.20240724090034-31cd371f8688/go.mod h1:Ea3+oeoRGGLGm5shYAeDgu6PGUlcvQhE2fILyD9+tGg= + github.com/fjl/gencodec v0.0.0-20230517082657-f9840df7b83e h1:bBLctRc7kr01YGvaDfgLbTwjFNW5jdp5y5rj8XXBHfY= + github.com/fjl/gencodec v0.0.0-20230517082657-f9840df7b83e/go.mod h1:AzA8Lj6YtixmJWL+wkKoBGsLWy9gFrAzi4g+5bCKwpY= + github.com/fjl/memsize v0.0.2 h1:27txuSD9or+NZlnOWdKUxeBzTAUkWCVh+4Gf2dWFOzA= +@@ -253,6 +259,8 @@ github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= + github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= + github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= + github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= ++github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE= ++github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w= + github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= + github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= + github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
+
+ + +
+
+ +
+
+
+ + +
+
+
+ + + + + + + + diff --git a/index.html b/index.html old mode 100755 new mode 100644 index 8cc2f9d8a..680744265 --- a/index.html +++ b/index.html @@ -1,6634 +1,35 @@ - - - + + + - - + + - Bolt Builder + Bolt Forkdiff Homepage - + - + - - - - - -
-
+ +
-
-
-
- - -
-
-
-
-
-
- -

Bolt Builder

- - -
-
- - diff: - - - ignored: - -
-
-
-
- -
+1764
-
-322
- - -
+22
-
-814
- -
-
-
- -
-

This is an overview of the changes made to the canonical Flashbots Builder to -support inclusion preconfirmations through Bolt.

- - - -

Here’s an overview of all the changes divided by module:

-
-
- - -
-
- -
- -
-

This is where the bulk of the API diffs are located.

-
-
- - -
-
- -
- -
-

We added two new Builder API endpoints to communicate with Relays:

- - - -

The constraints cache is populated as soon as new constraints are streamed from the relay, and percolate to the miner -at block building time.

-
-
- -
- - -
-
- - - -
-
- -
- -
- -
+233
-
-6
- -
- -
-
-
diff --git flashbots/builder/builder/builder.go chainbound/bolt/builder/builder.go -index bcdab8fc1ec93b4f85264bc7a0ec0fe25edcc4a6..913a199edf60c3a1e514442c19948f79d665d9ba 100644 ---- flashbots/builder/builder/builder.go -+++ chainbound/bolt/builder/builder.go -@@ -1,11 +1,17 @@ - package builder -  - import ( -+ "bufio" -+ "compress/gzip" - "context" -+ "encoding/json" - "errors" - "fmt" -+ "io" - "math/big" -+ "net/http" - _ "os" -+ "strings" - "sync" - "time" -  -@@ -20,6 +26,7 @@ "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/attestantio/go-eth2-client/spec/deneb" - "github.com/attestantio/go-eth2-client/spec/phase0" -+ "github.com/chainbound/shardmap" - "github.com/ethereum/go-ethereum/beacon/engine" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core" -@@ -45,6 +52,10 @@ - SubmissionOffsetFromEndOfSlotSecondsDefault = 3 * time.Second - ) -  -+const ( -+ SubscribeConstraintsPath = "/relay/v1/builder/constraints" -+) -+ - type PubkeyHex string -  - type ValidatorData struct { -@@ -55,6 +66,7 @@ } -  - type IRelay interface { - SubmitBlock(msg *builderSpec.VersionedSubmitBlockRequest, vd ValidatorData) error -+ SubmitBlockWithProofs(msg *common.VersionedSubmitBlockRequestWithProofs, vd ValidatorData) error - GetValidatorForSlot(nextSlot uint64) (ValidatorData, error) - Config() RelayConfig - Start() error -@@ -81,6 +93,9 @@ builderPublicKey phase0.BLSPubKey - builderSigningDomain phase0.Domain - builderResubmitInterval time.Duration - discardRevertibleTxOnErr bool -+ -+ // constraintsCache is a map from slot to the decoded constraints made by proposers -+ constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded] -  - limiter *rate.Limiter - submissionOffsetFromEndOfSlot time.Duration -@@ -95,6 +110,7 @@ } -  - // BuilderArgs is a struct that contains all the arguments needed to create a new Builder - type BuilderArgs struct { -+ boltCCEndpoint string - sk *bls.SecretKey - ds flashbotsextra.IDatabaseService - blockConsumer flashbotsextra.BlockConsumer -@@ -161,6 +177,9 @@ args.submissionOffsetFromEndOfSlot = SubmissionOffsetFromEndOfSlotSecondsDefault - } -  - slotCtx, slotCtxCancel := context.WithCancel(context.Background()) -+ -+ constraintsCache := shardmap.NewFIFOMap[uint64, types.HashToConstraintDecoded](64, 16, shardmap.HashUint64) -+ - return &Builder{ - ds: args.ds, - blockConsumer: args.blockConsumer, -@@ -177,6 +196,8 @@ builderResubmitInterval: args.builderBlockResubmitInterval, - discardRevertibleTxOnErr: args.discardRevertibleTxOnErr, - submissionOffsetFromEndOfSlot: args.submissionOffsetFromEndOfSlot, -  -+ constraintsCache: constraintsCache, -+ - limiter: args.limiter, - slotCtx: slotCtx, - slotCtxCancel: slotCtxCancel, -@@ -228,7 +249,170 @@ } - } - }() -  -- return b.relay.Start() -+ if err := b.relay.Start(); err != nil { -+ return err -+ } -+ -+ return b.SubscribeProposerConstraints() -+} -+ -+// GenerateAuthenticationHeader generates an authentication string for the builder -+// to subscribe to SSE constraint events emitted by relays -+func (b *Builder) GenerateAuthenticationHeader() (string, error) { -+ // NOTE: the `slot` acts similarly to a nonce for the message to sign, to avoid replay attacks. -+ slot := b.slotAttrs.Slot -+ message, err := json.Marshal(common.ConstraintSubscriptionAuth{PublicKey: b.builderPublicKey, Slot: slot}) -+ if err != nil { -+ log.Error(fmt.Sprintf("Failed to marshal auth message: %v", err)) -+ return "", err -+ } -+ signatureEC := bls.Sign(b.builderSecretKey, message) -+ subscriptionSignatureJSON := `"` + phase0.BLSSignature(bls.SignatureToBytes(signatureEC)[:]).String() + `"` -+ authHeader := "BOLT " + subscriptionSignatureJSON + "," + string(message) -+ return authHeader, nil -+} -+ -+// SubscribeProposerConstraints subscribes to the constraints made by Bolt proposers -+// which the builder pulls from relay(s) using SSE. -+func (b *Builder) SubscribeProposerConstraints() error { -+ // Create authentication signed message -+ authHeader, err := b.GenerateAuthenticationHeader() -+ if err != nil { -+ log.Error(fmt.Sprintf("Failed to generate authentication header: %v", err)) -+ return err -+ } -+ -+ // Check if `b.relay` is a RemoteRelayAggregator, if so we need to subscribe to -+ // the constraints made available by all the relays -+ relayAggregator, ok := b.relay.(*RemoteRelayAggregator) -+ if ok { -+ for _, relay := range relayAggregator.relays { -+ go b.subscribeToRelayForConstraints(relay.Config().Endpoint, authHeader) -+ } -+ } else { -+ go b.subscribeToRelayForConstraints(b.relay.Config().Endpoint, authHeader) -+ } -+ return nil -+} -+ -+func (b *Builder) subscribeToRelayForConstraints(relayBaseEndpoint, authHeader string) error { -+ attempts := 0 -+ maxAttempts := 60 // Max 10 minutes of retries -+ retryInterval := 10 * time.Second -+ -+ var resp *http.Response -+ -+ for { -+ log.Info("Attempting to subscribe to constraints...") -+ -+ if attempts >= maxAttempts { -+ log.Error(fmt.Sprintf("Failed to subscribe to constraints after %d attempts", maxAttempts)) -+ return errors.New("failed to subscribe to constraints") -+ } -+ -+ req, err := http.NewRequest(http.MethodGet, relayBaseEndpoint+SubscribeConstraintsPath, nil) -+ if err != nil { -+ log.Error(fmt.Sprintf("Failed to create new http request: %v", err)) -+ return err -+ } -+ req.Header.Set("Authorization", authHeader) -+ -+ client := http.Client{} -+ -+ resp, err = client.Do(req) -+ if err != nil { -+ log.Error(fmt.Sprintf("Failed to connect to SSE server: %v", err)) -+ time.Sleep(retryInterval) -+ attempts++ -+ continue -+ } -+ -+ if resp.StatusCode != http.StatusOK { -+ log.Error(fmt.Sprintf("Error subscribing to constraints via SSE: %s, %v", resp.Status, err)) -+ return err -+ } -+ break -+ } -+ -+ defer resp.Body.Close() -+ log.Info(fmt.Sprintf("Connected to SSE server: %s", relayBaseEndpoint)) -+ -+ var reader io.Reader -+ -+ // Check if the response is gzipped -+ if resp.Header.Get("Content-Encoding") == "gzip" { -+ // Decompress the response body -+ gzipReader, err := gzip.NewReader(resp.Body) -+ if err != nil { -+ return fmt.Errorf("error creating gzip reader: %v", err) -+ } -+ defer gzipReader.Close() -+ reader = gzipReader -+ } else { -+ reader = resp.Body -+ } -+ -+ bufReader := bufio.NewReader(reader) -+ for { -+ line, err := bufReader.ReadString('\n') -+ if err != nil { -+ if err == io.EOF { -+ log.Info("End of stream") -+ break -+ } -+ log.Error(fmt.Sprintf("Error reading from response body: %v", err)) -+ continue -+ } -+ -+ if !strings.HasPrefix(line, "data: ") { -+ continue -+ } -+ -+ data := strings.TrimPrefix(line, "data: ") -+ -+ // We assume the data is the JSON representation of the constraints -+ log.Info(fmt.Sprintf("Received new constraint: %s", data)) -+ constraintsSigned := make(common.SignedConstraintsList, 0, 8) -+ if err := json.Unmarshal([]byte(data), &constraintsSigned); err != nil { -+ log.Warn(fmt.Sprintf("Failed to unmarshal constraints: %v", err)) -+ continue -+ } -+ -+ if len(constraintsSigned) == 0 { -+ log.Warn("Received 0 length list of constraints") -+ continue -+ } -+ -+ for _, constraint := range constraintsSigned { -+ decodedConstraints, err := DecodeConstraints(constraint) -+ if err != nil { -+ log.Error("Failed to decode constraint: ", err) -+ continue -+ } -+ -+ EmitBoltDemoEvent(fmt.Sprintf("Received constraint from relay for slot %d, stored in cache (path: %s)", constraint.Message.Slot, SubscribeConstraintsPath)) -+ -+ // For every constraint, we need to check if it has already been seen for the associated slot -+ slotConstraints, _ := b.constraintsCache.Get(constraint.Message.Slot) -+ if len(slotConstraints) == 0 { -+ // New constraint for this slot, add it in the map and continue with the next constraint -+ b.constraintsCache.Put(constraint.Message.Slot, decodedConstraints) -+ continue -+ } -+ -+ for hash := range decodedConstraints { -+ // Update the slot constraints -+ slotConstraints[hash] = decodedConstraints[hash] -+ } -+ -+ // Update the slot constraints in the cache -+ b.constraintsCache.Put(constraint.Message.Slot, slotConstraints) -+ -+ } -+ -+ } -+ -+ return nil - } -  - func (b *Builder) Stop() error { -@@ -236,6 +420,7 @@ close(b.stop) - return nil - } -  -+// BOLT: modify to calculate merkle inclusion proofs for preconfirmed transactions - func (b *Builder) onSealedBlock(opts SubmitBlockOpts) error { - executableData := engine.BlockToExecutableData(opts.Block, opts.BlockValue, opts.BlobSidecars) - var dataVersion spec.DataVersion -@@ -272,6 +457,35 @@ log.Error("could not get block request", "err", err) - return err - } -  -+ var versionedBlockRequestWithPreconfsProofs *common.VersionedSubmitBlockRequestWithProofs -+ -+ // BOLT: fetch constraints from the cache, which is automatically updated by the SSE subscription -+ constraints, _ := b.constraintsCache.Get(opts.PayloadAttributes.Slot) -+ log.Info(fmt.Sprintf("[BOLT]: Found %d constraints for slot %d", len(constraints), opts.PayloadAttributes.Slot)) -+ -+ if len(constraints) > 0 { -+ message := fmt.Sprintf("sealing block %d with %d constraints", opts.Block.Number(), len(constraints)) -+ log.Info(message) -+ EmitBoltDemoEvent(message) -+ -+ timeStart := time.Now() -+ inclusionProof, _, err := CalculateMerkleMultiProofs(opts.Block.Transactions(), constraints) -+ timeForProofs := time.Since(timeStart) -+ -+ if err != nil { -+ log.Error("[BOLT]: could not calculate merkle multiproofs", "err", err) -+ return err -+ } -+ -+ // BOLT: send event to web demo -+ EmitBoltDemoEvent(fmt.Sprintf("created merkle multiproof of %d constraint(s) for block %d in %v", len(constraints), opts.Block.Number(), timeForProofs)) -+ -+ versionedBlockRequestWithPreconfsProofs = &common.VersionedSubmitBlockRequestWithProofs{ -+ Inner: versionedBlockRequest, -+ Proofs: inclusionProof, -+ } -+ } -+ - if b.dryRun { - switch dataVersion { - case spec.DataVersionBellatrix: -@@ -285,16 +499,23 @@ if err != nil { - log.Error("could not validate block", "version", dataVersion.String(), "err", err) - } - } else { -+ // NOTE: we can ignore preconfs for `processBuiltBlock` - go b.processBuiltBlock(opts.Block, opts.BlockValue, opts.OrdersClosedAt, opts.SealedAt, opts.CommitedBundles, opts.AllBundles, opts.UsedSbundles, &blockBidMsg) -- err = b.relay.SubmitBlock(versionedBlockRequest, opts.ValidatorData) -+ if versionedBlockRequestWithPreconfsProofs != nil { -+ log.Info(fmt.Sprintf("[BOLT]: Sending sealed block to relay %s", versionedBlockRequestWithPreconfsProofs)) -+ err = b.relay.SubmitBlockWithProofs(versionedBlockRequestWithPreconfsProofs, opts.ValidatorData) -+ } else if len(constraints) == 0 { -+ // If versionedBlockRequestWithPreconfsProofs is nil and no constraints, then we don't have proofs to send -+ err = b.relay.SubmitBlock(versionedBlockRequest, opts.ValidatorData) -+ } else { -+ log.Warn(fmt.Sprintf("[BOLT]: Could not send sealed block this time because we have %d constraints but no proofs", len(constraints))) -+ return nil -+ } - if err != nil { - log.Error("could not submit block", "err", err, "verion", dataVersion, "#commitedBundles", len(opts.CommitedBundles)) - return err - } - } -- -- log.Info("submitted block", "version", dataVersion.String(), "slot", opts.PayloadAttributes.Slot, "value", opts.BlockValue.String(), "parent", opts.Block.ParentHash().String(), -- "hash", opts.Block.Hash(), "#commitedBundles", len(opts.CommitedBundles)) -  - return nil - } -@@ -363,6 +584,7 @@ log.Info("successfully relayed block data to consumer") - } - } -  -+// Called when a new payload event is received from the beacon client SSE - func (b *Builder) OnPayloadAttribute(attrs *types.BuilderPayloadAttributes) error { - if attrs == nil { - return nil -@@ -407,6 +629,8 @@ b.slotAttrs = *attrs - b.slotCtx = slotCtx - b.slotCtxCancel = slotCtxCancel -  -+ log.Info("[BOLT]: Inside onPayloadAttribute", "slot", attrs.Slot, "parent", attrs.HeadHash, "payloadTimestamp", uint64(attrs.Timestamp)) -+ - go b.runBuildingJob(b.slotCtx, proposerPubkey, vd, attrs) - return nil - } -@@ -422,6 +646,9 @@ allBundles []types.SimulatedBundle - usedSbundles []types.UsedSBundle - } -  -+// Continuously makes a request to the miner module with the correct params and submits the best produced block. -+// on average 1 attempt per second is made. -+// - Submissions to the relay are rate limited to 2 req/s - func (b *Builder) runBuildingJob(slotCtx context.Context, proposerPubkey phase0.BLSPubKey, vd ValidatorData, attrs *types.BuilderPayloadAttributes) { - ctx, cancel := context.WithTimeout(slotCtx, 12*time.Second) - defer cancel() -@@ -515,7 +742,7 @@ log.Debug("retrying BuildBlock", - "slot", attrs.Slot, - "parent", attrs.HeadHash, - "resubmit-interval", b.builderResubmitInterval.String()) -- err := b.eth.BuildBlock(attrs, blockHook) -+ err := b.eth.BuildBlock(attrs, blockHook, b.constraintsCache) - if err != nil { - log.Warn("Failed to build block", "err", err) - }
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+397
-
-0
- -
- -
-
-
diff --git flashbots/builder/builder/builder_test.go chainbound/bolt/builder/builder_test.go -index d8a698c4cf7172d0710fd5010d2587206ebd9374..6c1948183e24b999788017af20df1c3a567b464f 100644 ---- flashbots/builder/builder/builder_test.go -+++ chainbound/bolt/builder/builder_test.go -@@ -1,7 +1,12 @@ - package builder -  - import ( -+ "encoding/hex" -+ "encoding/json" -+ "fmt" - "math/big" -+ "net/http" -+ "strings" - "testing" - "time" -  -@@ -14,10 +19,13 @@ "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/flashbotsextra" -+ "github.com/ethereum/go-ethereum/log" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/go-boost-utils/ssz" - "github.com/flashbots/go-boost-utils/utils" -+ "github.com/gorilla/handlers" - "github.com/holiman/uint256" -+ "github.com/pkg/errors" - "github.com/stretchr/testify/require" - ) -  -@@ -170,3 +178,392 @@ - time.Sleep(2200 * time.Millisecond) - require.NotNil(t, testRelay.submittedMsg) - } -+ -+func TestBlockWithPreconfs(t *testing.T) { -+ const ( -+ validatorDesiredGasLimit = 30_000_000 -+ payloadAttributeGasLimit = 30_000_000 // Was zero in the other test -+ parentBlockGasLimit = 29_000_000 -+ ) -+ expectedGasLimit := core.CalcGasLimit(parentBlockGasLimit, validatorDesiredGasLimit) -+ -+ vsk, err := bls.SecretKeyFromBytes(hexutil.MustDecode("0x370bb8c1a6e62b2882f6ec76762a67b39609002076b95aae5b023997cf9b2dc9")) -+ require.NoError(t, err) -+ validator := &ValidatorPrivateData{ -+ sk: vsk, -+ Pk: hexutil.MustDecode("0xb67d2c11bcab8c4394fc2faa9601d0b99c7f4b37e14911101da7d97077917862eed4563203d34b91b5cf0aa44d6cfa05"), -+ } -+ -+ testBeacon := testBeaconClient{ -+ validator: validator, -+ slot: 56, -+ } -+ -+ feeRecipient, _ := utils.HexToAddress("0xabcf8e0d4e9587369b2301d0790347320302cc00") -+ testRelay := testRelay{ -+ gvsVd: ValidatorData{ -+ Pubkey: PubkeyHex(testBeacon.validator.Pk.String()), -+ FeeRecipient: feeRecipient, -+ GasLimit: validatorDesiredGasLimit, -+ }, -+ } -+ -+ sk, err := bls.SecretKeyFromBytes(hexutil.MustDecode("0x31ee185dad1220a8c88ca5275e64cf5a5cb09cb621cb30df52c9bee8fbaaf8d7")) -+ require.NoError(t, err) -+ -+ bDomain := ssz.ComputeDomain(ssz.DomainTypeAppBuilder, [4]byte{0x02, 0x0, 0x0, 0x0}, phase0.Root{}) -+ -+ // https://etherscan.io/tx/0x9d48b4a021898a605b7ae49bf93ad88fa6bd7050e9448f12dde064c10f22fe9c -+ // 0x02f87601836384348477359400850517683ba883019a28943678fce4028b6745eb04fa010d9c8e4b36d6288c872b0f1366ad800080c080a0b6b7aba1954160d081b2c8612e039518b9c46cd7df838b405a03f927ad196158a071d2fb6813e5b5184def6bd90fb5f29e0c52671dea433a7decb289560a58416e -+ preconfTxByte, _ := hex.DecodeString("02f87601836384348477359400850517683ba883019a28943678fce4028b6745eb04fa010d9c8e4b36d6288c872b0f1366ad800080c080a0b6b7aba1954160d081b2c8612e039518b9c46cd7df838b405a03f927ad196158a071d2fb6813e5b5184def6bd90fb5f29e0c52671dea433a7decb289560a58416e") -+ preconfTx := new(types.Transaction) -+ err = preconfTx.UnmarshalBinary(preconfTxByte) -+ require.NoError(t, err) -+ -+ // https://etherscan.io/tx/0x15bd881daa1408b33f67fa4bdeb8acfb0a2289d9b4c6f81eef9bb2bb2e52e780 - Blob Tx -+ // 0x03f9029c01830299f184b2d05e008507aef40a00832dc6c09468d30f47f19c07bccef4ac7fae2dc12fca3e0dc980b90204ef16e845000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001e0000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000633b68f5d8d3a86593ebb815b4663bcbe0302e31382e302d64657600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004109de8da2a97e37f2e6dc9f7d50a408f9344d7aa1a925ae53daf7fbef43491a571960d76c0cb926190a9da10df7209fb1ba93cd98b1565a3a2368749d505f90c81c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0843b9aca00e1a00141e3a338e30c49ed0501e315bcc45e4edefebed43ab1368a1505461d9cf64901a01e8511e06b17683d89eb57b9869b96b8b611f969f7f56cbc0adc2df7c88a2a07a00910deacf91bba0d74e368d285d311dc5884e7cfe219d85aea5741b2b6e3a2fe -+ preconfTxWithBlobByte, _ := hex.DecodeString("03f9029c01830299f184b2d05e008507aef40a00832dc6c09468d30f47f19c07bccef4ac7fae2dc12fca3e0dc980b90204ef16e845000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001e0000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000633b68f5d8d3a86593ebb815b4663bcbe0302e31382e302d64657600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004109de8da2a97e37f2e6dc9f7d50a408f9344d7aa1a925ae53daf7fbef43491a571960d76c0cb926190a9da10df7209fb1ba93cd98b1565a3a2368749d505f90c81c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0843b9aca00e1a00141e3a338e30c49ed0501e315bcc45e4edefebed43ab1368a1505461d9cf64901a01e8511e06b17683d89eb57b9869b96b8b611f969f7f56cbc0adc2df7c88a2a07a00910deacf91bba0d74e368d285d311dc5884e7cfe219d85aea5741b2b6e3a2fe") -+ preconfTxWithBlob := new(types.Transaction) -+ err = preconfTxWithBlob.UnmarshalBinary(preconfTxWithBlobByte) -+ require.NoError(t, err) -+ -+ testExecutableData := &engine.ExecutableData{ -+ ParentHash: common.Hash{0x02, 0x03}, -+ FeeRecipient: common.Address(feeRecipient), -+ StateRoot: common.Hash{0x07, 0x16}, -+ ReceiptsRoot: common.Hash{0x08, 0x20}, -+ LogsBloom: types.Bloom{}.Bytes(), -+ Number: uint64(10), -+ GasLimit: expectedGasLimit, -+ GasUsed: uint64(100), -+ Timestamp: uint64(105), -+ ExtraData: hexutil.MustDecode("0x0042fafc"), -+ -+ BaseFeePerGas: big.NewInt(16), -+ -+ BlockHash: common.HexToHash("3cce5d0f5c9a7e188e79c35168256e91bec2d98a1140f6701da6ed3c98ea9d04"), -+ Transactions: [][]byte{preconfTxByte, preconfTxWithBlobByte}, -+ } -+ -+ testBlock, err := engine.ExecutableDataToBlock(*testExecutableData, preconfTxWithBlob.BlobHashes(), nil) -+ require.NoError(t, err) -+ -+ testPayloadAttributes := &types.BuilderPayloadAttributes{ -+ Timestamp: hexutil.Uint64(104), -+ Random: common.Hash{0x05, 0x10}, -+ SuggestedFeeRecipient: common.Address{0x04, 0x10}, -+ GasLimit: uint64(payloadAttributeGasLimit), -+ Slot: uint64(25), -+ } -+ -+ testEthService := &testEthereumService{synced: true, testExecutableData: testExecutableData, testBlock: testBlock, testBlockValue: big.NewInt(10)} -+ builderArgs := BuilderArgs{ -+ sk: sk, -+ ds: flashbotsextra.NilDbService{}, -+ relay: &testRelay, -+ builderSigningDomain: bDomain, -+ eth: testEthService, -+ dryRun: false, -+ ignoreLatePayloadAttributes: false, -+ validator: nil, -+ beaconClient: &testBeacon, -+ limiter: nil, -+ blockConsumer: flashbotsextra.NilDbService{}, -+ } -+ builder, err := NewBuilder(builderArgs) -+ require.NoError(t, err) -+ -+ builder.Start() -+ defer builder.Stop() -+ -+ // Add the transaction to the cache directly -+ builder.constraintsCache.Put(25, map[common.Hash]*types.ConstraintDecoded{ -+ preconfTx.Hash(): { -+ Tx: preconfTx, -+ }, -+ preconfTxWithBlob.Hash(): { -+ Tx: preconfTxWithBlob, -+ }, -+ }) -+ -+ err = builder.OnPayloadAttribute(testPayloadAttributes) -+ require.NoError(t, err) -+ time.Sleep(time.Second * 3) -+ -+ require.NotNil(t, testRelay.submittedMsgWithPreconf) -+ -+ expectedProposerPubkey, err := utils.HexToPubkey(testBeacon.validator.Pk.String()) -+ require.NoError(t, err) -+ -+ expectedMessage := builderApiV1.BidTrace{ -+ Slot: uint64(25), -+ ParentHash: phase0.Hash32{0x02, 0x03}, -+ BuilderPubkey: builder.builderPublicKey, -+ ProposerPubkey: expectedProposerPubkey, -+ ProposerFeeRecipient: feeRecipient, -+ GasLimit: expectedGasLimit, -+ GasUsed: uint64(100), -+ Value: &uint256.Int{0x0a}, -+ } -+ copy(expectedMessage.BlockHash[:], hexutil.MustDecode("0x3cce5d0f5c9a7e188e79c35168256e91bec2d98a1140f6701da6ed3c98ea9d04")[:]) -+ require.NotNil(t, testRelay.submittedMsgWithPreconf.Inner.Bellatrix) -+ require.Equal(t, expectedMessage, *testRelay.submittedMsgWithPreconf.Inner.Bellatrix.Message) -+ -+ expectedExecutionPayload := bellatrix.ExecutionPayload{ -+ ParentHash: [32]byte(testExecutableData.ParentHash), -+ FeeRecipient: feeRecipient, -+ StateRoot: [32]byte(testExecutableData.StateRoot), -+ ReceiptsRoot: [32]byte(testExecutableData.ReceiptsRoot), -+ LogsBloom: [256]byte{}, -+ PrevRandao: [32]byte(testExecutableData.Random), -+ BlockNumber: testExecutableData.Number, -+ GasLimit: testExecutableData.GasLimit, -+ GasUsed: testExecutableData.GasUsed, -+ Timestamp: testExecutableData.Timestamp, -+ ExtraData: hexutil.MustDecode("0x0042fafc"), -+ BaseFeePerGas: [32]byte{0x10}, -+ BlockHash: expectedMessage.BlockHash, -+ Transactions: []bellatrix.Transaction{preconfTxByte, preconfTxWithBlobByte}, -+ } -+ -+ require.Equal(t, expectedExecutionPayload, *testRelay.submittedMsgWithPreconf.Inner.Bellatrix.ExecutionPayload) -+ -+ expectedSignature, err := utils.HexToSignature("0x97db0496dcfd04ed444b87b6fc1c9e3339a0d35f7c01825ac353812601a72e7e35ef94899a9b03f4d23102214701255805efd0f6552073791ea1c3e10003ae435952f8305f6b89e58d4442ced149d3c33a486f5a390b4b8047e6ea4176059755") -+ -+ require.NoError(t, err) -+ require.Equal(t, expectedSignature, testRelay.submittedMsgWithPreconf.Inner.Bellatrix.Signature) -+ -+ require.Equal(t, uint64(25), testRelay.requestedSlot) -+ -+ // Clear the submitted message and check that the job will be ran again and but a new message will not be submitted since the hash is the same -+ testEthService.testBlockValue = big.NewInt(10) -+ -+ testRelay.submittedMsgWithPreconf = nil -+ time.Sleep(2200 * time.Millisecond) -+ require.Nil(t, testRelay.submittedMsgWithPreconf) -+ -+ // Change the hash, expect to get the block -+ testExecutableData.ExtraData = hexutil.MustDecode("0x0042fafd") -+ testExecutableData.BlockHash = common.HexToHash("0x38456f6f1f5e76cf83c89ebb8606ff2b700bf02a86a165316c6d7a0c4e6a8614") -+ testBlock, err = engine.ExecutableDataToBlock(*testExecutableData, preconfTxWithBlob.BlobHashes(), nil) -+ testEthService.testBlockValue = big.NewInt(10) -+ require.NoError(t, err) -+ testEthService.testBlock = testBlock -+ -+ time.Sleep(2200 * time.Millisecond) -+ require.NotNil(t, testRelay.submittedMsgWithPreconf) -+} -+ -+func TestSubscribeProposerConstraints(t *testing.T) { -+ // ------------ Start Builder setup ------------- // -+ const ( -+ validatorDesiredGasLimit = 30_000_000 -+ payloadAttributeGasLimit = 0 -+ parentBlockGasLimit = 29_000_000 -+ ) -+ expectedGasLimit := core.CalcGasLimit(parentBlockGasLimit, validatorDesiredGasLimit) -+ -+ vsk, err := bls.SecretKeyFromBytes(hexutil.MustDecode("0x370bb8c1a6e62b2882f6ec76762a67b39609002076b95aae5b023997cf9b2dc9")) -+ require.NoError(t, err) -+ validator := &ValidatorPrivateData{ -+ sk: vsk, -+ Pk: hexutil.MustDecode("0xb67d2c11bcab8c4394fc2faa9601d0b99c7f4b37e14911101da7d97077917862eed4563203d34b91b5cf0aa44d6cfa05"), -+ } -+ -+ testBeacon := testBeaconClient{ -+ validator: validator, -+ slot: 56, -+ } -+ -+ feeRecipient, _ := utils.HexToAddress("0xabcf8e0d4e9587369b2301d0790347320302cc00") -+ -+ relayPort := "31245" -+ relay := NewRemoteRelay(RelayConfig{Endpoint: "http://localhost:" + relayPort}, nil, true) -+ -+ sk, err := bls.SecretKeyFromBytes(hexutil.MustDecode("0x31ee185dad1220a8c88ca5275e64cf5a5cb09cb621cb30df52c9bee8fbaaf8d7")) -+ require.NoError(t, err) -+ -+ bDomain := ssz.ComputeDomain(ssz.DomainTypeAppBuilder, [4]byte{0x02, 0x0, 0x0, 0x0}, phase0.Root{}) -+ -+ testExecutableData := &engine.ExecutableData{ -+ ParentHash: common.Hash{0x02, 0x03}, -+ FeeRecipient: common.Address(feeRecipient), -+ StateRoot: common.Hash{0x07, 0x16}, -+ ReceiptsRoot: common.Hash{0x08, 0x20}, -+ LogsBloom: types.Bloom{}.Bytes(), -+ Number: uint64(10), -+ GasLimit: expectedGasLimit, -+ GasUsed: uint64(100), -+ Timestamp: uint64(105), -+ ExtraData: hexutil.MustDecode("0x0042fafc"), -+ -+ BaseFeePerGas: big.NewInt(16), -+ -+ BlockHash: common.HexToHash("0x68e516c8827b589fcb749a9e672aa16b9643437459508c467f66a9ed1de66a6c"), -+ Transactions: [][]byte{}, -+ } -+ -+ testBlock, err := engine.ExecutableDataToBlock(*testExecutableData, nil, nil) -+ require.NoError(t, err) -+ -+ testEthService := &testEthereumService{synced: true, testExecutableData: testExecutableData, testBlock: testBlock, testBlockValue: big.NewInt(10)} -+ -+ builderArgs := BuilderArgs{ -+ sk: sk, -+ ds: flashbotsextra.NilDbService{}, -+ relay: relay, -+ builderSigningDomain: bDomain, -+ eth: testEthService, -+ dryRun: false, -+ ignoreLatePayloadAttributes: false, -+ validator: nil, -+ beaconClient: &testBeacon, -+ limiter: nil, -+ blockConsumer: flashbotsextra.NilDbService{}, -+ } -+ -+ builder, err := NewBuilder(builderArgs) -+ require.NoError(t, err) -+ -+ // ------------ End Builder setup ------------- // -+ -+ // Attach the sseHandler to the relay port -+ mux := http.NewServeMux() -+ mux.HandleFunc(SubscribeConstraintsPath, sseConstraintsHandler) -+ -+ // Wrap the mux with the GzipHandler middleware -+ // NOTE: In this case, we don't need to create a gzip writer in the handlers, -+ // by default the `http.ResponseWriter` will implement gzip compression -+ gzipMux := handlers.CompressHandler(mux) -+ -+ http.HandleFunc(SubscribeConstraintsPath, sseConstraintsHandler) -+ go http.ListenAndServe(":"+relayPort, gzipMux) -+ -+ // Constraints should not be available yet -+ _, ok := builder.constraintsCache.Get(0) -+ require.Equal(t, false, ok) -+ -+ // Create authentication signed message -+ authHeader, err := builder.GenerateAuthenticationHeader() -+ require.NoError(t, err) -+ builder.subscribeToRelayForConstraints(builder.relay.Config().Endpoint, authHeader) -+ // Wait 2 seconds to save all constraints in cache -+ time.Sleep(2 * time.Second) -+ -+ slots := []uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9} -+ for _, slot := range slots { -+ cachedConstraints, ok := builder.constraintsCache.Get(slot) -+ require.Equal(t, true, ok) -+ -+ expectedConstraint := generateMockConstraintsForSlot(slot)[0] -+ decodedConstraint, err := DecodeConstraints(expectedConstraint) -+ require.NoError(t, err) -+ -+ // Compare the keys of the cachedConstraints and decodedConstraint maps -+ require.Equal(t, len(cachedConstraints), len(decodedConstraint), "The number of keys in both maps should be the same") -+ for key := range cachedConstraints { -+ _, ok := decodedConstraint[key] -+ require.True(t, ok, fmt.Sprintf("Key %s found in cachedConstraints but not in decodedConstraint", key.String())) -+ require.Equal(t, cachedConstraints[key].Tx.Data(), decodedConstraint[key].Tx.Data(), "The decodedConstraint Tx should be equal to the cachedConstraints Tx") -+ } -+ for key := range decodedConstraint { -+ _, ok := cachedConstraints[key] -+ require.True(t, ok, fmt.Sprintf("Key %s found in decodedConstraint but not in cachedConstraints", key.String())) -+ } -+ } -+} -+ -+func sseConstraintsHandler(w http.ResponseWriter, r *http.Request) { -+ w.Header().Set("Content-Type", "text/event-stream") -+ w.Header().Set("Cache-Control", "no-cache") -+ w.Header().Set("Connection", "keep-alive") -+ w.Header().Set("Content-Encoding", "gzip") -+ -+ flusher, ok := w.(http.Flusher) -+ if !ok { -+ http.Error(w, "Streaming unsupported!", http.StatusInternalServerError) -+ return -+ } -+ -+ auth := r.Header.Get("Authorization") -+ _, err := validateConstraintSubscriptionAuth(auth, 0) -+ if err != nil { -+ http.Error(w, err.Error(), http.StatusUnauthorized) -+ return -+ } -+ -+ for i := 0; i < 256; i++ { -+ // Generate some duplicated constraints -+ slot := uint64(i) % 32 -+ constraints := generateMockConstraintsForSlot(slot) -+ bytes, err := json.Marshal(constraints) -+ if err != nil { -+ log.Error(fmt.Sprintf("Error while marshaling constraints: %v", err)) -+ return -+ } -+ fmt.Fprintf(w, "data: %s\n\n", string(bytes)) -+ flusher.Flush() -+ } -+} -+ -+// generateMockConstraintsForSlot generates a list of constraints for a given slot -+func generateMockConstraintsForSlot(slot uint64) common.SignedConstraintsList { -+ rawTx := new(common.HexBytes) -+ err := rawTx.UnmarshalJSON([]byte("\"0x02f876018305da308401312d0085041f1196d2825208940c598786c88883ff5e4f461750fad64d3fae54268804b7ec32d7a2000080c080a0086f02eacec72820be3b117e1edd5bd7ed8956964b28b2d903d2cba53dd13560a06d61ec9ccce6acb31bf21878b9a844e7fdac860c5b7d684f7eb5f38a5945357c\"")) -+ if err != nil { -+ fmt.Println("Failed to unmarshal rawTx: ", err) -+ } -+ -+ return common.SignedConstraintsList{ -+ &common.SignedConstraints{ -+ Message: common.ConstraintMessage{ -+ Constraints: []*common.Constraint{{Tx: *rawTx}}, ValidatorIndex: 0, Slot: slot, -+ }, Signature: phase0.BLSSignature{}, -+ }, -+ } -+} -+ -+// validateConstraintSubscriptionAuth checks the authentication string data from the Builder, -+// and returns its BLS public key if the authentication is valid. -+func validateConstraintSubscriptionAuth(auth string, headSlot uint64) (phase0.BLSPubKey, error) { -+ zeroKey := phase0.BLSPubKey{} -+ if auth == "" { -+ return zeroKey, errors.New("authorization header missing") -+ } -+ // Authorization: <auth-scheme> <authorization-parameters> -+ parts := strings.Split(auth, " ") -+ if len(parts) != 2 { -+ return zeroKey, errors.New("ill-formed authorization header") -+ } -+ if parts[0] != "BOLT" { -+ return zeroKey, errors.New("not BOLT authentication scheme") -+ } -+ // <signatureJSON>,<authDataJSON> -+ parts = strings.SplitN(parts[1], ",", 2) -+ if len(parts) != 2 { -+ return zeroKey, errors.New("ill-formed authorization header") -+ } -+ -+ signature := new(phase0.BLSSignature) -+ if err := signature.UnmarshalJSON([]byte(parts[0])); err != nil { -+ fmt.Println("Failed to unmarshal authData: ", err) -+ return zeroKey, errors.New("ill-formed authorization header") -+ } -+ -+ authDataRaw := []byte(parts[1]) -+ authData := new(common.ConstraintSubscriptionAuth) -+ if err := json.Unmarshal(authDataRaw, authData); err != nil { -+ fmt.Println("Failed to unmarshal authData: ", err) -+ return zeroKey, errors.New("ill-formed authorization header") -+ } -+ -+ if headSlot != authData.Slot { -+ return zeroKey, errors.New("invalid head slot") -+ } -+ -+ ok, err := bls.VerifySignatureBytes(authDataRaw, signature[:], authData.PublicKey[:]) -+ if err != nil || !ok { -+ return zeroKey, errors.New("invalid signature") -+ } -+ return authData.PublicKey, nil -+}
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+5
-
-0
- -
- -
-
-
diff --git flashbots/builder/builder/local_relay.go chainbound/bolt/builder/local_relay.go -index 5a503a5c2b8c7e4751c09465b9e4cf2e4c43a44c..ea4c8c46fe88745a2747d5c56f335dbc78c1e553 100644 ---- flashbots/builder/builder/local_relay.go -+++ chainbound/bolt/builder/local_relay.go -@@ -21,6 +21,7 @@ "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/phase0" - eth2UtilBellatrix "github.com/attestantio/go-eth2-client/util/bellatrix" -+ "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/log" - "github.com/flashbots/go-boost-utils/bls" -@@ -114,6 +115,10 @@ - func (r *LocalRelay) SubmitBlock(msg *builderSpec.VersionedSubmitBlockRequest, _ ValidatorData) error { - log.Info("submitting block to local relay", "block", msg.Bellatrix.ExecutionPayload.BlockHash.String()) - return r.submitBlock(msg.Bellatrix) -+} -+ -+func (r *LocalRelay) SubmitBlockWithProofs(msg *common.VersionedSubmitBlockRequestWithProofs, _ ValidatorData) error { -+ panic("Not implemented!") - } -  - func (r *LocalRelay) Config() RelayConfig {
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+44
-
-0
- -
- -
-
-
diff --git flashbots/builder/builder/relay.go chainbound/bolt/builder/relay.go -index 579fe14d7f746aa597bdd90351f012e45372fe0d..ef002f6b1de92e0396ff42ee9c005f69cf9493f4 100644 ---- flashbots/builder/builder/relay.go -+++ chainbound/bolt/builder/relay.go -@@ -11,6 +11,7 @@ "time" -  - builderSpec "github.com/attestantio/go-builder-client/spec" - "github.com/attestantio/go-eth2-client/spec" -+ "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/log" - "github.com/flashbots/go-boost-utils/utils" - ) -@@ -178,6 +179,49 @@ return fmt.Errorf("error sending http request to relay %s. err: %w", r.config.Endpoint, err) - } - if code > 299 { - return fmt.Errorf("non-ok response code %d from relay %s", code, r.config.Endpoint) -+ } -+ -+ return nil -+} -+ -+func (r *RemoteRelay) SubmitBlockWithProofs(msg *common.VersionedSubmitBlockRequestWithProofs, _ ValidatorData) error { -+ log.Info("submitting block with proofs to remote relay", "endpoint", r.config.Endpoint) -+ endpoint := r.config.Endpoint + "/relay/v1/builder/blocks_with_proofs" -+ if r.cancellationsEnabled { -+ endpoint = endpoint + "?cancellations=1" -+ } -+ -+ var code int -+ var err error -+ if r.config.SszEnabled { -+ panic("ssz not supported for preconfs proofs yet") -+ } else { -+ -+ // BOLT: send event to web demo -+ if len(msg.Proofs.TransactionHashes) > 0 { -+ number, _ := msg.Inner.BlockNumber() -+ message := fmt.Sprintf("sending block %d with proofs to relay (path: %s)", number, "/relay/v1/builder/blocks_with_proofs") -+ log.Info(message) -+ EmitBoltDemoEvent(message) -+ } -+ -+ switch msg.Inner.Version { -+ case spec.DataVersionBellatrix: -+ code, err = SendHTTPRequest(context.TODO(), *http.DefaultClient, http.MethodPost, endpoint, msg, nil) -+ case spec.DataVersionCapella: -+ code, err = SendHTTPRequest(context.TODO(), *http.DefaultClient, http.MethodPost, endpoint, msg, nil) -+ case spec.DataVersionDeneb: -+ code, err = SendHTTPRequest(context.TODO(), *http.DefaultClient, http.MethodPost, endpoint, msg, nil) -+ default: -+ return fmt.Errorf("unknown data version %d", msg.Inner.Version) -+ } -+ } -+ -+ if err != nil { -+ return fmt.Errorf("error sending http request block with proofs to relay %s. err: %w", r.config.Endpoint, err) -+ } -+ if code > 299 { -+ return fmt.Errorf("non-ok response code %d from relay for block with proofs %s", code, r.config.Endpoint) - } -  - return nil
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+21
-
-0
- -
- -
-
-
diff --git flashbots/builder/builder/relay_aggregator.go chainbound/bolt/builder/relay_aggregator.go -index c39784453acc265fe5a345b97682b8fc4a728707..4655ebe1acc90a034ffeb1e193ba579d054eb3e1 100644 ---- flashbots/builder/builder/relay_aggregator.go -+++ chainbound/bolt/builder/relay_aggregator.go -@@ -6,6 +6,7 @@ "fmt" - "sync" -  - builderSpec "github.com/attestantio/go-builder-client/spec" -+ "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/log" - ) -  -@@ -53,6 +54,26 @@ go func(relay IRelay) { - err := relay.SubmitBlock(msg, registration) - if err != nil { - log.Error("could not submit block", "err", err) -+ } -+ }(relay) -+ } -+ -+ return nil -+} -+ -+func (r *RemoteRelayAggregator) SubmitBlockWithProofs(msg *common.VersionedSubmitBlockRequestWithProofs, registration ValidatorData) error { -+ r.registrationsCacheLock.RLock() -+ defer r.registrationsCacheLock.RUnlock() -+ -+ relays, found := r.registrationsCache[registration] -+ if !found { -+ return fmt.Errorf("no relays for registration %s", registration.Pubkey) -+ } -+ for _, relay := range relays { -+ go func(relay IRelay) { -+ err := relay.SubmitBlockWithProofs(msg, registration) -+ if err != nil { -+ log.Error("could not submit block with proofs", "err", err) - } - }(relay) - }
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+17
-
-3
- -
- -
-
-
diff --git flashbots/builder/builder/relay_aggregator_test.go chainbound/bolt/builder/relay_aggregator_test.go -index b727f52c577514214ba3413582d0a8b97604e6d9..d6eebed98460539f692b078ef3bfcfe1001c88d3 100644 ---- flashbots/builder/builder/relay_aggregator_test.go -+++ chainbound/bolt/builder/relay_aggregator_test.go -@@ -8,6 +8,7 @@ - builderApiBellatrix "github.com/attestantio/go-builder-client/api/bellatrix" - builderSpec "github.com/attestantio/go-builder-client/spec" - "github.com/attestantio/go-eth2-client/spec" -+ "github.com/ethereum/go-ethereum/common" - "github.com/stretchr/testify/require" - ) -  -@@ -22,9 +23,11 @@ sbError error - gvsVd ValidatorData - gvsErr error -  -- requestedSlot uint64 -- submittedMsg *builderSpec.VersionedSubmitBlockRequest -- submittedMsgCh chan *builderSpec.VersionedSubmitBlockRequest -+ requestedSlot uint64 -+ submittedMsg *builderSpec.VersionedSubmitBlockRequest -+ submittedMsgWithPreconf *common.VersionedSubmitBlockRequestWithProofs -+ submittedMsgCh chan *builderSpec.VersionedSubmitBlockRequest -+ submittedMsgWithPreconfCh chan *common.VersionedSubmitBlockRequestWithProofs - } -  - type testRelayAggBackend struct { -@@ -53,6 +56,17 @@ default: - } - } - r.submittedMsg = msg -+ return r.sbError -+} -+ -+func (r *testRelay) SubmitBlockWithProofs(msg *common.VersionedSubmitBlockRequestWithProofs, vd ValidatorData) error { -+ if r.submittedMsgWithPreconfCh != nil { -+ select { -+ case r.submittedMsgWithPreconfCh <- msg: -+ default: -+ } -+ } -+ r.submittedMsgWithPreconf = msg - return r.sbError - } -
-
- - - -
-
- -
-
-
- - -
- -
-

We added logic to create and verify merkle inclusion proofs based on the SSZ Transactions beacon container.

-
-
- -
- - -
-
-
- - (new) - -
- - -
-
- -
- -
- -
+52
-
-0
- -
- -
-
-
diff --git flashbots/builder/builder/transaction_ssz.go chainbound/bolt/builder/transaction_ssz.go -new file mode 100644 -index 0000000000000000000000000000000000000000..015be2fad16e557e17c18c01cf7471c1af0f9e63 ---- /dev/null -+++ chainbound/bolt/builder/transaction_ssz.go -@@ -0,0 +1,52 @@ -+package builder -+ -+import ( -+ ssz "github.com/ferranbt/fastssz" -+) -+ -+// The maximum length in bytes of a raw RLP-encoded transaction -+var MAX_BYTES_PER_TRANSACTION uint64 = 1_073_741_824 // 2**30 -+ -+// Transaction is a wrapper type of byte slice to implement the ssz.HashRoot interface -+type Transaction []byte -+ -+// HashTreeRoot calculates the hash tree root of the transaction, which -+// is a list of basic types (byte). -+// -+// Reference: https://github.com/ethereum/consensus-specs/blob/dev/ssz/simple-serialize.md#merkleization -+func (tx *Transaction) HashTreeRoot() ([32]byte, error) { -+ hasher := ssz.NewHasher() -+ tx.HashTreeRootWith(hasher) -+ root, err := hasher.HashRoot() -+ -+ return root, err -+} -+ -+func (tx *Transaction) HashTreeRootWith(hh ssz.HashWalker) error { -+ var err error -+ byteLen := uint64(len(*tx)) -+ -+ if byteLen > MAX_BYTES_PER_TRANSACTION { -+ err = ssz.ErrIncorrectListSize -+ return err -+ } -+ -+ // Load the bytes of the transaction into the hasher -+ hh.AppendBytes32(*tx) -+ // Perform `mix_in_length(merkleize(pack(value), limit=chunk_count(type)), len(value))` -+ // Reference: https://github.com/ethereum/consensus-specs/blob/dev/ssz/simple-serialize.md#merkleization -+ // -+ // The `indx` parameters is set to `0` as we need to consider the whole hh.buf buffer for this. -+ // In an implementation of more complex types, this parameter would be used to indicate the starting -+ // index of the buffer to be merkleized. It is used a single buffer to do everything for -+ // optimization purposes. -+ hh.MerkleizeWithMixin(0, byteLen, (1073741824+31)/32) -+ -+ return nil -+} -+ -+func (tx *Transaction) GetTree() (*ssz.Node, error) { -+ w := &ssz.Wrapper{} -+ tx.HashTreeRootWith(w) -+ return w.Node(), nil -+}
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+104
-
-0
- -
- -
-
-
diff --git flashbots/builder/builder/utils.go chainbound/bolt/builder/utils.go -index 284285cf4e82a5cd7e343033ebebb2887e1e3e72..59bd040df90a3ab40491073e9a8c48f6829d7179 100644 ---- flashbots/builder/builder/utils.go -+++ chainbound/bolt/builder/utils.go -@@ -8,10 +8,33 @@ "encoding/json" - "errors" - "fmt" - "io" -+ "math" - "net/http" -+ "slices" -+ "strings" -+ "time" -+ -+ "github.com/attestantio/go-eth2-client/spec/bellatrix" -+ utilbellatrix "github.com/attestantio/go-eth2-client/util/bellatrix" -+ "github.com/ethereum/go-ethereum/common" -+ "github.com/ethereum/go-ethereum/core/types" -+ "github.com/ethereum/go-ethereum/log" -+ ssz "github.com/ferranbt/fastssz" - ) -  - var errHTTPErrorResponse = errors.New("HTTP error response") -+ -+func DecodeConstraints(constraints *common.SignedConstraints) (types.HashToConstraintDecoded, error) { -+ decodedConstraints := make(types.HashToConstraintDecoded) -+ for _, tx := range constraints.Message.Constraints { -+ decoded := new(types.Transaction) -+ if err := decoded.UnmarshalBinary(tx.Tx); err != nil { -+ return nil, err -+ } -+ decodedConstraints[decoded.Hash()] = &types.ConstraintDecoded{Index: tx.Index, Tx: decoded} -+ } -+ return decodedConstraints, nil -+} -  - // SendSSZRequest is a request to send SSZ data to a remote relay. - func SendSSZRequest(ctx context.Context, client http.Client, method, url string, payload []byte, useGzip bool) (code int, err error) { -@@ -117,3 +140,84 @@ } -  - return resp.StatusCode, nil - } -+ -+// EmitBoltDemoEvent sends a message to the web demo backend to log an event. -+// This is only used for demo purposes and should be removed in production. -+func EmitBoltDemoEvent(message string) { -+ event := strings.NewReader(fmt.Sprintf("{ \"message\": \"BOLT-BUILDER: %s\"}", message)) -+ eventRes, err := http.Post("http://host.docker.internal:3001/events", "application/json", event) -+ if err != nil { -+ log.Error("Failed to send web demo event: ", err) -+ } -+ if eventRes != nil { -+ defer eventRes.Body.Close() -+ } -+} -+ -+func CalculateMerkleMultiProofs( -+ payloadTransactions types.Transactions, -+ HashToConstraintDecoded types.HashToConstraintDecoded, -+) (inclusionProof *common.InclusionProof, rootNode *ssz.Node, err error) { -+ constraintsOrderedByIndex, constraintsWithoutIndex, _, _ := types.ParseConstraintsDecoded(HashToConstraintDecoded) -+ constraints := slices.Concat(constraintsOrderedByIndex, constraintsWithoutIndex) -+ -+ // BOLT: generate merkle tree from payload transactions (we need raw RLP bytes for this) -+ rawTxs := make([]bellatrix.Transaction, len(payloadTransactions)) -+ for i, tx := range payloadTransactions { -+ raw, err := tx.WithoutBlobTxSidecar().MarshalBinary() -+ if err != nil { -+ log.Warn("[BOLT]: could not marshal transaction", "txHash", tx.Hash(), "err", err) -+ continue -+ } -+ rawTxs[i] = bellatrix.Transaction(raw) -+ } -+ -+ log.Info(fmt.Sprintf("[BOLT]: Generated %d raw transactions for merkle tree", len(rawTxs))) -+ bellatrixPayloadTxs := utilbellatrix.ExecutionPayloadTransactions{Transactions: rawTxs} -+ -+ rootNode, err = bellatrixPayloadTxs.GetTree() -+ if err != nil { -+ return nil, nil, fmt.Errorf("could not get tree from transactions: %w", err) -+ } -+ -+ // BOLT: Set the value of nodes. This is MANDATORY for the proof calculation -+ // to output the leaf correctly. This is also never documented in fastssz. -__- -+ rootNode.Hash() -+ -+ // using our gen index formula: 2 * 2^21 + preconfIndex -+ baseGeneralizedIndex := int(math.Pow(float64(2), float64(21))) -+ generalizedIndexes := make([]int, len(constraints)) -+ transactionHashes := make([]common.Hash, len(constraints)) -+ -+ for i, constraint := range constraints { -+ tx := constraint.Tx -+ // get the index of the preconfirmed transaction in the block -+ preconfIndex := slices.IndexFunc(payloadTransactions, func(payloadTx *types.Transaction) bool { return payloadTx.Hash() == tx.Hash() }) -+ if preconfIndex == -1 { -+ log.Error(fmt.Sprintf("Preconfirmed transaction %s not found in block", tx.Hash())) -+ log.Error(fmt.Sprintf("block has %v transactions", len(payloadTransactions))) -+ continue -+ } -+ -+ generalizedIndex := baseGeneralizedIndex + preconfIndex -+ generalizedIndexes[i] = generalizedIndex -+ transactionHashes[i] = tx.Hash() -+ } -+ -+ log.Info(fmt.Sprintf("[BOLT]: Calculating merkle multiproof for %d preconfirmed transaction", -+ len(constraints))) -+ -+ timeStart := time.Now() -+ multiProof, err := rootNode.ProveMulti(generalizedIndexes) -+ if err != nil { -+ return nil, nil, fmt.Errorf("could not calculate merkle multiproof for %d preconf: %w", len(constraints), err) -+ } -+ -+ timeForProofs := time.Since(timeStart) -+ log.Info(fmt.Sprintf("[BOLT]: Calculated merkle multiproof for %d preconf in %s", len(constraints), timeForProofs)) -+ -+ inclusionProof = common.InclusionProofFromMultiProof(multiProof) -+ inclusionProof.TransactionHashes = transactionHashes -+ -+ return -+}
-
- - -
- - -
-
-
- - (new) - -
- - -
-
- -
- -
- -
+138
-
-0
- -
- -
-
-
diff --git flashbots/builder/builder/utils_test.go chainbound/bolt/builder/utils_test.go -new file mode 100644 -index 0000000000000000000000000000000000000000..76e6cfdf8e16a574186d2b8109e24f76f6423e30 ---- /dev/null -+++ chainbound/bolt/builder/utils_test.go -@@ -0,0 +1,138 @@ -+package builder -+ -+import ( -+ "encoding/json" -+ "testing" -+ -+ "github.com/ethereum/go-ethereum/common" -+ "github.com/ethereum/go-ethereum/core/types" -+ fastSsz "github.com/ferranbt/fastssz" -+ "github.com/stretchr/testify/require" -+) -+ -+func TestGenerateMerkleMultiProofs(t *testing.T) { -+ // https://etherscan.io/tx/0x138a5f8ba7950521d9dec66ee760b101e0c875039e695c9fcfb34f5ef02a881b -+ // 0x02f873011a8405f5e10085037fcc60e182520894f7eaaf75cb6ec4d0e2b53964ce6733f54f7d3ffc880b6139a7cbd2000080c080a095a7a3cbb7383fc3e7d217054f861b890a935adc1adf4f05e3a2f23688cf2416a00875cdc45f4395257e44d709d04990349b105c22c11034a60d7af749ffea2765 -+ // https://etherscan.io/tx/0xfb0ee9de8941c8ad50e6a3d2999cd6ef7a541ec9cb1ba5711b76fcfd1662dfa9 -+ // 0xf8708305dc6885029332e35883019a2894500b0107e172e420561565c8177c28ac0f62017f8810ffb80e6cc327008025a0e9c0b380c68f040ae7affefd11979f5ed18ae82c00e46aa3238857c372a358eca06b26e179dd2f7a7f1601755249f4cff56690c4033553658f0d73e26c36fe7815 -+ // https://etherscan.io/tx/0x45e7ee9ba1a1d0145de29a764a33bb7fc5620486b686d68ec8cb3182d137bc90 -+ // 0xf86c0785028fa6ae0082520894098d880c4753d0332ca737aa592332ed2522cd22880d2f09f6558750008026a0963e58027576b3a8930d7d9b4a49253b6e1a2060e259b2102e34a451d375ce87a063f802538d3efed17962c96fcea431388483bbe3860ea9bb3ef01d4781450fbf -+ // https://etherscan.io/tx/0x9d48b4a021898a605b7ae49bf93ad88fa6bd7050e9448f12dde064c10f22fe9c -+ // 0x02f87601836384348477359400850517683ba883019a28943678fce4028b6745eb04fa010d9c8e4b36d6288c872b0f1366ad800080c080a0b6b7aba1954160d081b2c8612e039518b9c46cd7df838b405a03f927ad196158a071d2fb6813e5b5184def6bd90fb5f29e0c52671dea433a7decb289560a58416e -+ // https://etherscan.io/tx/0x15bd881daa1408b33f67fa4bdeb8acfb0a2289d9b4c6f81eef9bb2bb2e52e780 - Blob Tx -+ // 0x03f9029c01830299f184b2d05e008507aef40a00832dc6c09468d30f47f19c07bccef4ac7fae2dc12fca3e0dc980b90204ef16e845000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001e0000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000633b68f5d8d3a86593ebb815b4663bcbe0302e31382e302d64657600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004109de8da2a97e37f2e6dc9f7d50a408f9344d7aa1a925ae53daf7fbef43491a571960d76c0cb926190a9da10df7209fb1ba93cd98b1565a3a2368749d505f90c81c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0843b9aca00e1a00141e3a338e30c49ed0501e315bcc45e4edefebed43ab1368a1505461d9cf64901a01e8511e06b17683d89eb57b9869b96b8b611f969f7f56cbc0adc2df7c88a2a07a00910deacf91bba0d74e368d285d311dc5884e7cfe219d85aea5741b2b6e3a2fe -+ -+ raw := `["0x03f9029c01830299f184b2d05e008507aef40a00832dc6c09468d30f47f19c07bccef4ac7fae2dc12fca3e0dc980b90204ef16e845000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001e0000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000633b68f5d8d3a86593ebb815b4663bcbe0302e31382e302d64657600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004109de8da2a97e37f2e6dc9f7d50a408f9344d7aa1a925ae53daf7fbef43491a571960d76c0cb926190a9da10df7209fb1ba93cd98b1565a3a2368749d505f90c81c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0843b9aca00e1a00141e3a338e30c49ed0501e315bcc45e4edefebed43ab1368a1505461d9cf64901a01e8511e06b17683d89eb57b9869b96b8b611f969f7f56cbc0adc2df7c88a2a07a00910deacf91bba0d74e368d285d311dc5884e7cfe219d85aea5741b2b6e3a2fe", "0x02f873011a8405f5e10085037fcc60e182520894f7eaaf75cb6ec4d0e2b53964ce6733f54f7d3ffc880b6139a7cbd2000080c080a095a7a3cbb7383fc3e7d217054f861b890a935adc1adf4f05e3a2f23688cf2416a00875cdc45f4395257e44d709d04990349b105c22c11034a60d7af749ffea2765","0xf8708305dc6885029332e35883019a2894500b0107e172e420561565c8177c28ac0f62017f8810ffb80e6cc327008025a0e9c0b380c68f040ae7affefd11979f5ed18ae82c00e46aa3238857c372a358eca06b26e179dd2f7a7f1601755249f4cff56690c4033553658f0d73e26c36fe7815", "0xf86c0785028fa6ae0082520894098d880c4753d0332ca737aa592332ed2522cd22880d2f09f6558750008026a0963e58027576b3a8930d7d9b4a49253b6e1a2060e259b2102e34a451d375ce87a063f802538d3efed17962c96fcea431388483bbe3860ea9bb3ef01d4781450fbf", "0x02f87601836384348477359400850517683ba883019a28943678fce4028b6745eb04fa010d9c8e4b36d6288c872b0f1366ad800080c080a0b6b7aba1954160d081b2c8612e039518b9c46cd7df838b405a03f927ad196158a071d2fb6813e5b5184def6bd90fb5f29e0c52671dea433a7decb289560a58416e"]` -+ -+ byteTxs := make([]*common.HexBytes, 0, 5) -+ err := json.Unmarshal([]byte(raw), &byteTxs) -+ require.NoError(t, err) -+ require.Equal(t, len(byteTxs), 5) -+ -+ payloadTransactions := common.Map(byteTxs, func(rawTx *common.HexBytes) *types.Transaction { -+ transaction := new(types.Transaction) -+ err = transaction.UnmarshalBinary([]byte(*rawTx)) -+ return transaction -+ }) -+ -+ require.Equal(t, payloadTransactions[0].Type(), uint8(3)) -+ require.Equal(t, payloadTransactions[1].Type(), uint8(2)) -+ -+ // try out all combinations of "constraints": -+ // e.g. only [0], then [0, 1], then [1] etc... -+ // and log which ones are failing and which ones are not -+ for i := 1; i < len(payloadTransactions)+1; i++ { -+ t.Logf("--- Trying with %d constraints\n", i) -+ for _, chosenConstraintTransactions := range combinations(payloadTransactions, i) { -+ // find the index of the chosen constraints inside payload transactions for debugging -+ payloadIndexes := make([]int, len(chosenConstraintTransactions)) -+ for i, chosenConstraint := range chosenConstraintTransactions { -+ for j, payloadTransaction := range payloadTransactions { -+ if chosenConstraint.Hash() == payloadTransaction.Hash() { -+ payloadIndexes[i] = j -+ break -+ } -+ } -+ } -+ -+ constraints := make(types.HashToConstraintDecoded) -+ for _, tx := range chosenConstraintTransactions { -+ constraints[tx.Hash()] = &types.ConstraintDecoded{Tx: tx} -+ } -+ -+ inclusionProof, root, err := CalculateMerkleMultiProofs(payloadTransactions, constraints) -+ require.NoError(t, err) -+ rootHash := root.Hash() -+ -+ leaves := make([][]byte, len(constraints)) -+ -+ i := 0 -+ for _, constraint := range constraints { -+ if constraint == nil || constraint.Tx == nil { -+ t.Logf("nil constraint or transaction!") -+ } -+ -+ // Compute the hash tree root for the raw preconfirmed transaction -+ // and use it as "Leaf" in the proof to be verified against -+ -+ withoutBlob, err := constraint.Tx.WithoutBlobTxSidecar().MarshalBinary() -+ if err != nil { -+ t.Logf("error marshalling transaction without blob tx sidecar: %v", err) -+ } -+ -+ tx := Transaction(withoutBlob) -+ txHashTreeRoot, err := tx.HashTreeRoot() -+ if err != nil { -+ t.Logf("error calculating hash tree root: %v", err) -+ } -+ -+ leaves[i] = txHashTreeRoot[:] -+ i++ -+ } -+ -+ hashes := make([][]byte, len(inclusionProof.MerkleHashes)) -+ for i, hash := range inclusionProof.MerkleHashes { -+ hashes[i] = []byte(*hash) -+ } -+ indexes := make([]int, len(inclusionProof.GeneralizedIndexes)) -+ for i, index := range inclusionProof.GeneralizedIndexes { -+ indexes[i] = int(index) -+ } -+ -+ ok, err := fastSsz.VerifyMultiproof(rootHash[:], hashes, leaves, indexes) -+ if err != nil { -+ t.Logf("error verifying merkle proof: %v", err) -+ } -+ -+ if !ok { -+ t.Logf("FAIL with txs: %v", payloadIndexes) -+ } else { -+ t.Logf("SUCCESS with txs: %v", payloadIndexes) -+ } -+ } -+ } -+} -+ -+// Function to generate combinations of a specific length -+func combinations[T any](arr []T, k int) [][]T { -+ var result [][]T -+ n := len(arr) -+ data := make([]T, k) -+ combine(arr, data, 0, n-1, 0, k, &result) -+ return result -+} -+ -+// Helper function to generate combinations -+func combine[T any](arr, data []T, start, end, index, k int, result *[][]T) { -+ if index == k { -+ tmp := make([]T, k) -+ copy(tmp, data) -+ *result = append(*result, tmp) -+ return -+ } -+ -+ for i := start; i <= end && end-i+1 >= k-index; i++ { -+ data[index] = arr[i] -+ combine(arr, data, i+1, end, index+1, k, result) -+ } -+}
-
- - - -
-
- -
-
-
- - -
- -
-

The only change in the ETH service was adding the constraintsCache to the block building entrypoint.

-
-
- -
- - -
-
- - - -
-
- -
- -
- -
+19
-
-11
- -
- -
-
-
diff --git flashbots/builder/builder/eth_service.go chainbound/bolt/builder/eth_service.go -index 480221815f46c97f37292ba441b280629339e04c..4d692b0220baebb075f252a8e0cb89df7c79bae9 100644 ---- flashbots/builder/builder/eth_service.go -+++ chainbound/bolt/builder/eth_service.go -@@ -5,6 +5,7 @@ "errors" - "math/big" - "time" -  -+ "github.com/chainbound/shardmap" - "github.com/ethereum/go-ethereum/beacon/engine" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" -@@ -15,7 +16,7 @@ "github.com/ethereum/go-ethereum/params" - ) -  - type IEthereumService interface { -- BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn) error -+ BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn, constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded]) error - GetBlockByHash(hash common.Hash) *types.Block - Config() *params.ChainConfig - Synced() bool -@@ -30,9 +31,10 @@ testBlobSidecar []*types.BlobTxSidecar - testBundlesMerged []types.SimulatedBundle - testAllBundles []types.SimulatedBundle - testUsedSbundles []types.UsedSBundle -+ testPreconfs []*types.Transaction - } -  --func (t *testEthereumService) BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn) error { -+func (t *testEthereumService) BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn, constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded]) error { - sealedBlockCallback(t.testBlock, t.testBlockValue, t.testBlobSidecar, time.Now(), t.testBundlesMerged, t.testAllBundles, t.testUsedSbundles) - return nil - } -@@ -52,18 +54,20 @@ return &EthereumService{eth: eth} - } -  - // TODO: we should move to a setup similar to catalyst local blocks & payload ids --func (s *EthereumService) BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn) error { -+func (s *EthereumService) BuildBlock(attrs *types.BuilderPayloadAttributes, sealedBlockCallback miner.BlockHookFn, constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded]) error { - // Send a request to generate a full block in the background. - // The result can be obtained via the returned channel. - args := &miner.BuildPayloadArgs{ -- Parent: attrs.HeadHash, -- Timestamp: uint64(attrs.Timestamp), -- FeeRecipient: attrs.SuggestedFeeRecipient, -- GasLimit: attrs.GasLimit, -- Random: attrs.Random, -- Withdrawals: attrs.Withdrawals, -- BeaconRoot: attrs.ParentBeaconBlockRoot, -- BlockHook: sealedBlockCallback, -+ Parent: attrs.HeadHash, -+ Timestamp: uint64(attrs.Timestamp), -+ FeeRecipient: attrs.SuggestedFeeRecipient, -+ GasLimit: attrs.GasLimit, -+ Random: attrs.Random, -+ Withdrawals: attrs.Withdrawals, -+ BeaconRoot: attrs.ParentBeaconBlockRoot, -+ Slot: attrs.Slot, -+ BlockHook: sealedBlockCallback, -+ ConstraintsCache: constraintsCache, - } -  - payload, err := s.eth.Miner().BuildPayload(args) -@@ -104,3 +108,7 @@ - func (s *EthereumService) Synced() bool { - return s.eth.Synced() - } -+ -+func (s *EthereumService) Ethereum() *eth.Ethereum { -+ return s.eth -+}
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+1
-
-1
- -
- -
-
-
diff --git flashbots/builder/builder/eth_service_test.go chainbound/bolt/builder/eth_service_test.go -index 386f472c2a1becf40c36239381d46af2f8a8074c..000a3185af88dcea75c0a656c10b7ca321480bb8 100644 ---- flashbots/builder/builder/eth_service_test.go -+++ chainbound/bolt/builder/eth_service_test.go -@@ -103,7 +103,7 @@ require.Equal(t, parent.Time+1, executableData.ExecutionPayload.Timestamp) - require.Equal(t, block.ParentHash(), parent.Hash()) - require.Equal(t, block.Hash(), executableData.ExecutionPayload.BlockHash) - require.Equal(t, blockValue.Uint64(), uint64(0)) -- }) -+ }, nil) -  - require.NoError(t, err) - }
-
- - - -
-
- -
-
-
- - -
-
-
- - -
- -
-

This is where the actual block building logic is located.

- -

We added a constraintsCache to the miner, which is responsible for keeping an always-updated view of the constraints -streamed from relays according to the Constraints API Relay specs. -It’s passed to the miner from the entrypoint in the builder/ module.

- -

At block building time, we check if there are any transactions in the cache for this slot, and if so we insert them at the top of -the block. This is a naive implementation that can be improved, but it shows the concept of the builder role.

-
-
- -
- - -
-
- - - -
-
- -
- -
- -
+3
-
-2
- -
- -
-
-
diff --git flashbots/builder/miner/algo_common_test.go chainbound/bolt/miner/algo_common_test.go -index 1b4853863eef1137a4bb83492a1e0e3fd7247180..105c709b2aa22b38bb20922e0a76474688138b55 100644 ---- flashbots/builder/miner/algo_common_test.go -+++ chainbound/bolt/miner/algo_common_test.go -@@ -528,13 +528,14 @@ t.Cleanup(func() { - testConfig.AlgoType = ALGO_MEV_GETH - }) -  -- for _, algoType := range []AlgoType{ALGO_MEV_GETH, ALGO_GREEDY, ALGO_GREEDY_BUCKETS, ALGO_GREEDY_MULTISNAP, ALGO_GREEDY_BUCKETS_MULTISNAP} { -+ for _, algoType := range []AlgoType{ALGO_MEV_GETH} { - local := new(params.ChainConfig) - *local = *ethashChainConfig - local.TerminalTotalDifficulty = big.NewInt(0) - testConfig.AlgoType = algoType -- testGetSealingWork(t, local, ethash.NewFaker()) -+ testGetSealingWork(t, local, ethash.NewFaker(), nil) - } -+ t.Fail() - } -  - func TestGetSealingWorkAlgosWithProfit(t *testing.T) {
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+23
-
-19
- -
- -
-
-
diff --git flashbots/builder/miner/multi_worker.go chainbound/bolt/miner/multi_worker.go -index 797b277e8110c64c79528576b10f9e183e86aca1..415447d47ca379aae834701ceca2f8c404838580 100644 ---- flashbots/builder/miner/multi_worker.go -+++ chainbound/bolt/miner/multi_worker.go -@@ -93,15 +93,17 @@ // enough to run. The empty payload can at least make sure there is something - // to deliver for not missing slot. - var empty *newPayloadResult - emptyParams := &generateParams{ -- timestamp: args.Timestamp, -- forceTime: true, -- parentHash: args.Parent, -- coinbase: args.FeeRecipient, -- random: args.Random, -- gasLimit: args.GasLimit, -- withdrawals: args.Withdrawals, -- beaconRoot: args.BeaconRoot, -- noTxs: true, -+ timestamp: args.Timestamp, -+ forceTime: true, -+ parentHash: args.Parent, -+ coinbase: args.FeeRecipient, -+ random: args.Random, -+ gasLimit: args.GasLimit, -+ withdrawals: args.Withdrawals, -+ beaconRoot: args.BeaconRoot, -+ noTxs: true, -+ slot: args.Slot, -+ constraintsCache: args.ConstraintsCache, - } - for _, worker := range w.workers { - empty = worker.getSealingBlock(emptyParams) -@@ -130,16 +132,18 @@ for _, w := range w.workers { - workerPayload := newPayload(empty.block, args.Id()) - workerPayloads = append(workerPayloads, workerPayload) - fullParams := &generateParams{ -- timestamp: args.Timestamp, -- forceTime: true, -- parentHash: args.Parent, -- coinbase: args.FeeRecipient, -- random: args.Random, -- withdrawals: args.Withdrawals, -- beaconRoot: args.BeaconRoot, -- gasLimit: args.GasLimit, -- noTxs: false, -- onBlock: args.BlockHook, -+ timestamp: args.Timestamp, -+ forceTime: true, -+ parentHash: args.Parent, -+ coinbase: args.FeeRecipient, -+ random: args.Random, -+ withdrawals: args.Withdrawals, -+ beaconRoot: args.BeaconRoot, -+ gasLimit: args.GasLimit, -+ noTxs: false, -+ onBlock: args.BlockHook, -+ slot: args.Slot, -+ constraintsCache: args.ConstraintsCache, - } -  - go func(w *worker) {
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+34
-
-27
- -
- -
-
-
diff --git flashbots/builder/miner/payload_building.go chainbound/bolt/miner/payload_building.go -index edd9e13c1176dca420a38b64128f91602649d8f9..ed3a4fe1c82c87771fb2df00cddbc76b9ba4bd25 100644 ---- flashbots/builder/miner/payload_building.go -+++ chainbound/bolt/miner/payload_building.go -@@ -23,6 +23,7 @@ "math/big" - "sync" - "time" -  -+ "github.com/chainbound/shardmap" - "github.com/ethereum/go-ethereum/beacon/engine" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" -@@ -35,15 +36,17 @@ // BuildPayloadArgs contains the provided parameters for building payload. - // Check engine-api specification for more details. - // https://github.com/ethereum/execution-apis/blob/main/src/engine/cancun.md#payloadattributesv3 - type BuildPayloadArgs struct { -- Parent common.Hash // The parent block to build payload on top -- Timestamp uint64 // The provided timestamp of generated payload -- FeeRecipient common.Address // The provided recipient address for collecting transaction fee -- Random common.Hash // The provided randomness value -- Withdrawals types.Withdrawals // The provided withdrawals -- BeaconRoot *common.Hash // The provided beaconRoot (Cancun) -- Version engine.PayloadVersion // Versioning byte for payload id calculation. -- GasLimit uint64 -- BlockHook BlockHookFn -+ Parent common.Hash // The parent block to build payload on top -+ Timestamp uint64 // The provided timestamp of generated payload -+ FeeRecipient common.Address // The provided recipient address for collecting transaction fee -+ Random common.Hash // The provided randomness value -+ Withdrawals types.Withdrawals // The provided withdrawals -+ BeaconRoot *common.Hash // The provided beaconRoot (Cancun) -+ Version engine.PayloadVersion // Versioning byte for payload id calculation. -+ GasLimit uint64 -+ BlockHook BlockHookFn -+ Slot uint64 -+ ConstraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded] - } -  - // Id computes an 8-byte identifier by hashing the components of the payload arguments. -@@ -248,15 +251,17 @@ // Build the initial version with no transaction included. It should be fast - // enough to run. The empty payload can at least make sure there is something - // to deliver for not missing slot. - emptyParams := &generateParams{ -- timestamp: args.Timestamp, -- forceTime: true, -- parentHash: args.Parent, -- coinbase: args.FeeRecipient, -- random: args.Random, -- withdrawals: args.Withdrawals, -- beaconRoot: args.BeaconRoot, -- noTxs: true, -- onBlock: args.BlockHook, -+ timestamp: args.Timestamp, -+ forceTime: true, -+ parentHash: args.Parent, -+ coinbase: args.FeeRecipient, -+ random: args.Random, -+ withdrawals: args.Withdrawals, -+ beaconRoot: args.BeaconRoot, -+ noTxs: true, -+ onBlock: args.BlockHook, -+ slot: args.Slot, -+ constraintsCache: args.ConstraintsCache, - } - empty := w.getSealingBlock(emptyParams) - if empty.err != nil { -@@ -280,15 +285,17 @@ // by the timestamp parameter. - endTimer := time.NewTimer(time.Second * 12) -  - fullParams := &generateParams{ -- timestamp: args.Timestamp, -- forceTime: true, -- parentHash: args.Parent, -- coinbase: args.FeeRecipient, -- random: args.Random, -- withdrawals: args.Withdrawals, -- beaconRoot: args.BeaconRoot, -- noTxs: false, -- onBlock: args.BlockHook, -+ timestamp: args.Timestamp, -+ forceTime: true, -+ parentHash: args.Parent, -+ coinbase: args.FeeRecipient, -+ random: args.Random, -+ withdrawals: args.Withdrawals, -+ beaconRoot: args.BeaconRoot, -+ noTxs: false, -+ onBlock: args.BlockHook, -+ slot: args.Slot, -+ constraintsCache: args.ConstraintsCache, - } -  - for {
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+267
-
-105
- -
- -
-
-
diff --git flashbots/builder/miner/worker.go chainbound/bolt/miner/worker.go -index 09d46ed99f9f600550d979c31b582201ab4eef0a..c845edbddbcf5e1ea57a5485b0a0225db30a738a 100644 ---- flashbots/builder/miner/worker.go -+++ chainbound/bolt/miner/worker.go -@@ -25,6 +25,7 @@ "sync" - "sync/atomic" - "time" -  -+ "github.com/chainbound/shardmap" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/consensus" - "github.com/ethereum/go-ethereum/consensus/misc/eip1559" -@@ -644,7 +645,7 @@ plainTxs := newTransactionsByPriceAndNonce(w.current.signer, txs, nil, nil, w.current.header.BaseFee) // Mixed bag of everrything, yolo - blobTxs := newTransactionsByPriceAndNonce(w.current.signer, nil, nil, nil, w.current.header.BaseFee) // Empty bag, don't bother optimising -  - tcount := w.current.tcount -- w.commitTransactions(w.current, plainTxs, blobTxs, nil) -+ w.commitTransactions(w.current, plainTxs, blobTxs, nil, nil) -  - // Only update the snapshot if any new transactions were added - // to the pending block -@@ -1017,14 +1018,30 @@ - return nil - } -  --func (w *worker) commitTransactions(env *environment, plainTxs, blobTxs *transactionsByPriceAndNonce, interrupt *atomic.Int32) error { -+// commitTransactions applies sorted transactions to the current environment, updating the state -+// and creating the resulting block -+// -+// Assumptions: -+// - there are no nonce-conflicting transactions between `plainTxs`, `blobTxs` and the constraints -+// - all transaction are correctly signed -+func (w *worker) commitTransactions(env *environment, plainTxs, blobTxs *transactionsByPriceAndNonce, constraints types.HashToConstraintDecoded, interrupt *atomic.Int32) error { - gasLimit := env.header.GasLimit - if env.gasPool == nil { - env.gasPool = new(core.GasPool).AddGas(gasLimit) - } - var coalescedLogs []*types.Log -  -+ // Here we initialize and track the constraints left to be executed along -+ // with their gas requirements -+ constraintsOrderedByIndex, -+ constraintsWithoutIndex, -+ constraintsTotalGasLeft, -+ constraintsTotalBlobGasLeft := types.ParseConstraintsDecoded(constraints) -+ - for { -+ // `env.tcount` starts from 0 so it's correct to use it as the current index -+ currentTxIndex := uint64(env.tcount) -+ - // Check interruption signal and abort building if it's fired. - if interrupt != nil { - if signal := interrupt.Load(); signal != commitInterruptNone { -@@ -1036,102 +1053,166 @@ if env.gasPool.Gas() < params.TxGas { - log.Trace("Not enough gas for further transactions", "have", env.gasPool, "want", params.TxGas) - break - } -+ -+ blobGasLeft := uint64(params.MaxBlobGasPerBlock - env.blobs*params.BlobTxBlobGasPerBlob) -+ - // If we don't have enough blob space for any further blob transactions, - // skip that list altogether -- if !blobTxs.Empty() && env.blobs*params.BlobTxBlobGasPerBlob >= params.MaxBlobGasPerBlock { -+ if !blobTxs.Empty() && blobGasLeft <= 0 { - log.Trace("Not enough blob space for further blob transactions") - blobTxs.Clear() - // Fall though to pick up any plain txs - } - // Retrieve the next transaction and abort if all done. - var ( -- ltx *txpool.LazyTransaction -- txs *transactionsByPriceAndNonce -- pltx *txpool.LazyTransaction -- ptip *uint256.Int -- bltx *txpool.LazyTransaction -- btip *uint256.Int -+ lazyTx *txpool.LazyTransaction -+ txs *transactionsByPriceAndNonce -+ plainLazyTx *txpool.LazyTransaction -+ plainTxTip *uint256.Int -+ blobLazyTx *txpool.LazyTransaction -+ blobTxTip *uint256.Int - ) -  -- pTxWithMinerFee := plainTxs.Peek() -- if pTxWithMinerFee != nil { -- pltx = pTxWithMinerFee.Tx() -- ptip = pTxWithMinerFee.fees -+ if pTxWithMinerFee := plainTxs.Peek(); pTxWithMinerFee != nil { -+ plainLazyTx = pTxWithMinerFee.Tx() -+ plainTxTip = pTxWithMinerFee.fees - } -  -- bTxWithMinerFee := blobTxs.Peek() -- if bTxWithMinerFee != nil { -- bltx = bTxWithMinerFee.Tx() -- btip = bTxWithMinerFee.fees -+ if bTxWithMinerFee := blobTxs.Peek(); bTxWithMinerFee != nil { -+ blobLazyTx = bTxWithMinerFee.Tx() -+ blobTxTip = bTxWithMinerFee.fees - } -  - switch { -- case pltx == nil: -- txs, ltx = blobTxs, bltx -- case bltx == nil: -- txs, ltx = plainTxs, pltx -+ case plainLazyTx == nil: -+ txs, lazyTx = blobTxs, blobLazyTx -+ case blobLazyTx == nil: -+ txs, lazyTx = plainTxs, plainLazyTx - default: -- if ptip.Lt(btip) { -- txs, ltx = blobTxs, bltx -+ if plainTxTip.Lt(blobTxTip) { -+ txs, lazyTx = blobTxs, blobLazyTx - } else { -- txs, ltx = plainTxs, pltx -+ txs, lazyTx = plainTxs, plainLazyTx - } - } -  -- if ltx == nil { -- break -+ type candidateTx struct { -+ tx *types.Transaction -+ isConstraint bool -+ } -+ // candidate is the transaction we should execute in this cycle of the loop -+ var candidate struct { -+ tx *types.Transaction -+ isConstraint bool - } -  -- // If we don't have enough space for the next transaction, skip the account. -- if env.gasPool.Gas() < ltx.Gas { -- log.Trace("Not enough gas left for transaction", "hash", ltx.Hash, "left", env.gasPool.Gas(), "needed", ltx.Gas) -- txs.Pop() -- continue -+ var constraintTx *types.ConstraintDecoded -+ if len(constraintsOrderedByIndex) > 0 { -+ constraintTx = constraintsOrderedByIndex[0] - } -- if left := uint64(params.MaxBlobGasPerBlock - env.blobs*params.BlobTxBlobGasPerBlob); left < ltx.BlobGas { -- log.Trace("Not enough blob gas left for transaction", "hash", ltx.Hash, "left", left, "needed", ltx.BlobGas) -- txs.Pop() -- continue -+ -+ isSomePoolTxLeft := lazyTx != nil -+ -+ isThereConstraintWithThisIndex := constraintTx != nil && constraintTx.Index != nil && *constraintTx.Index == currentTxIndex -+ if isThereConstraintWithThisIndex { -+ // we retrieve the candidate constraint by shifting it from the list -+ candidate = candidateTx{tx: common.Shift(&constraintsOrderedByIndex).Tx, isConstraint: true} -+ } else { -+ if isSomePoolTxLeft { -+ // Check if there enough gas left for this tx -+ if constraintsTotalGasLeft+lazyTx.Gas > env.gasPool.Gas() || constraintsTotalBlobGasLeft+lazyTx.BlobGas > blobGasLeft { -+ // Skip this tx and try to fit one with less gas. -+ // Drop all consecutive transactions from the same sender because of `nonce-too-high` clause. -+ log.Debug("Could not find transactions gas with the remaining constraints, account skipped", "hash", lazyTx.Hash) -+ txs.Pop() -+ // Edge case: -+ // -+ // Assumption: suppose sender A sends tx T_1 with nonce 1, and T_2 with nonce 2, and T_2 is a constraint. -+ // -+ // -+ // When running the block building algorithm I first have to make sure to reserve enough gas for the constraints. -+ // This implies that when a pooled tx comes I have to check if there is enough gas for it while taking into account -+ // the rest of the remaining constraint gas to allocate. -+ // Suppose there is no gas for the pooled tx T_1, then I have to drop it and consequently drop every tx from the same -+ // sender with higher nonce due to "nonce-too-high" issues, including T_2. -+ // But then, I have dropped a constraint which means my bid is invalid. -+ // -+ // FIXME: for the PoC we're not handling this -+ -+ // Repeat the loop to try to find another pool transaction -+ continue -+ } -+ // We can safely consider the pool tx as the candidate, -+ // since by assumption it is not nonce-conflicting -+ tx := lazyTx.Resolve() -+ if tx == nil { -+ log.Trace("Ignoring evicted transaction", "hash", candidate.tx.Hash()) -+ txs.Pop() -+ continue -+ } -+ candidate = candidateTx{tx: tx, isConstraint: false} -+ } else { -+ // No more pool tx left, we can add the unindexed ones if available -+ if len(constraintsWithoutIndex) == 0 { -+ // To recap, this means: -+ // 1. there are no more pool tx left -+ // 2. there are no more constraints without an index -+ // 3. the remaining indexes inside `constraintsOrderedByIndex`, if any, cannot be satisfied -+ // As such, we can safely exist -+ break -+ } -+ candidate = candidateTx{tx: common.Shift(&constraintsWithoutIndex).Tx, isConstraint: true} -+ } - } -- // Transaction seems to fit, pull it up from the pool -- tx := ltx.Resolve() -- if tx == nil { -- log.Trace("Ignoring evicted transaction", "hash", ltx.Hash) -- txs.Pop() -- continue -- } -- // Error may be ignored here. The error has already been checked -- // during transaction acceptance is the transaction pool. -- from, _ := types.Sender(env.signer, tx) -+ -+ // Error may be ignored here, see assumption -+ from, _ := types.Sender(env.signer, candidate.tx) -  - // Check whether the tx is replay protected. If we're not in the EIP155 hf - // phase, start ignoring the sender until we do. -- if tx.Protected() && !w.chainConfig.IsEIP155(env.header.Number) { -- log.Trace("Ignoring replay protected transaction", "hash", ltx.Hash, "eip155", w.chainConfig.EIP155Block) -+ if candidate.tx.Protected() && !w.chainConfig.IsEIP155(env.header.Number) { -+ log.Trace("Ignoring replay protected transaction", "hash", candidate.tx.Hash(), "eip155", w.chainConfig.EIP155Block) - txs.Pop() - continue - } - // Start executing the transaction -- env.state.SetTxContext(tx.Hash(), env.tcount) -+ env.state.SetTxContext(candidate.tx.Hash(), env.tcount) -  -- logs, err := w.commitTransaction(env, tx) -+ logs, err := w.commitTransaction(env, candidate.tx) - switch { - case errors.Is(err, core.ErrNonceTooLow): - // New head notification data race between the transaction pool and miner, shift -- log.Trace("Skipping transaction with low nonce", "hash", ltx.Hash, "sender", from, "nonce", tx.Nonce()) -- txs.Shift() -+ log.Trace("Skipping transaction with low nonce", "hash", candidate.tx.Hash(), "sender", from, "nonce", candidate.tx.Nonce()) -+ if candidate.isConstraint { -+ log.Warn(fmt.Sprintf("Skipping constraint with low nonce, hash %s, sender %s, nonce %d", candidate.tx.Hash(), from, candidate.tx.Nonce())) -+ } else { -+ txs.Shift() -+ } -  - case errors.Is(err, nil): - // Everything ok, collect the logs and shift in the next transaction from the same account - coalescedLogs = append(coalescedLogs, logs...) - env.tcount++ -- txs.Shift() -+ if candidate.isConstraint { -+ // Update the amount of gas left for the constraints -+ constraintsTotalGasLeft -= candidate.tx.Gas() -+ constraintsTotalBlobGasLeft -= candidate.tx.BlobGas() -+ -+ constraintTip, _ := candidate.tx.EffectiveGasTip(env.header.BaseFee) -+ log.Info(fmt.Sprintf("Executed constraint %s at index %d with effective gas tip %d", candidate.tx.Hash().String(), currentTxIndex, constraintTip)) -+ } else { -+ txs.Shift() -+ } -  - default: - // Transaction is regarded as invalid, drop all consecutive transactions from - // the same sender because of `nonce-too-high` clause. -- log.Debug("Transaction failed, account skipped", "hash", ltx.Hash, "err", err) -- txs.Pop() -+ log.Debug("Transaction failed, account skipped", "hash", candidate.tx.Hash(), "err", err) -+ if candidate.isConstraint { -+ log.Warn("Constraint failed, account skipped", "hash", candidate.tx.Hash(), "err", err) -+ } else { -+ txs.Pop() -+ } - } - } - if !w.isRunning() && len(coalescedLogs) > 0 { -@@ -1154,16 +1235,18 @@ } -  - // generateParams wraps various of settings for generating sealing task. - type generateParams struct { -- timestamp uint64 // The timestamp for sealing task -- forceTime bool // Flag whether the given timestamp is immutable or not -- parentHash common.Hash // Parent block hash, empty means the latest chain head -- coinbase common.Address // The fee recipient address for including transaction -- gasLimit uint64 // The validator's requested gas limit target -- random common.Hash // The randomness generated by beacon chain, empty before the merge -- withdrawals types.Withdrawals // List of withdrawals to include in block. -- beaconRoot *common.Hash // The beacon root (cancun field). -- noTxs bool // Flag whether an empty block without any transaction is expected -- onBlock BlockHookFn // Callback to call for each produced block -+ timestamp uint64 // The timestamp for sealing task -+ forceTime bool // Flag whether the given timestamp is immutable or not -+ parentHash common.Hash // Parent block hash, empty means the latest chain head -+ coinbase common.Address // The fee recipient address for including transaction -+ gasLimit uint64 // The validator's requested gas limit target -+ random common.Hash // The randomness generated by beacon chain, empty before the merge -+ withdrawals types.Withdrawals // List of withdrawals to include in block. -+ beaconRoot *common.Hash // The beacon root (cancun field). -+ noTxs bool // Flag whether an empty block without any transaction is expected -+ onBlock BlockHookFn // Callback to call for each produced block -+ slot uint64 // The slot in which the block is being produced -+ constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded] // The preconfirmations to include in the block - } -  - func doPrepareHeader(genParams *generateParams, chain *core.BlockChain, config *Config, chainConfig *params.ChainConfig, extra []byte, engine consensus.Engine) (*types.Header, *types.Header, error) { -@@ -1266,7 +1349,7 @@ } - return env, nil - } -  --func (w *worker) fillTransactionsSelectAlgo(interrupt *atomic.Int32, env *environment) ([]types.SimulatedBundle, []types.SimulatedBundle, []types.UsedSBundle, map[common.Hash]struct{}, error) { -+func (w *worker) fillTransactionsSelectAlgo(interrupt *atomic.Int32, env *environment, constraints types.HashToConstraintDecoded) ([]types.SimulatedBundle, []types.SimulatedBundle, []types.UsedSBundle, map[common.Hash]struct{}, error) { - var ( - blockBundles []types.SimulatedBundle - allBundles []types.SimulatedBundle -@@ -1274,21 +1357,35 @@ usedSbundles []types.UsedSBundle - mempoolTxHashes map[common.Hash]struct{} - err error - ) -- switch w.flashbots.algoType { -- case ALGO_GREEDY, ALGO_GREEDY_BUCKETS, ALGO_GREEDY_MULTISNAP, ALGO_GREEDY_BUCKETS_MULTISNAP: -+ -+ // switch w.flashbots.algoType { -+ // -+ // case ALGO_GREEDY, ALGO_GREEDY_BUCKETS, ALGO_GREEDY_MULTISNAP, ALGO_GREEDY_BUCKETS_MULTISNAP: -+ // -+ // blockBundles, allBundles, usedSbundles, mempoolTxHashes, err = w.fillTransactionsAlgoWorker(interrupt, env) -+ // blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env, constraints) -+ // case ALGO_MEV_GETH: -+ // blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env, constraints) -+ // default: -+ // blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env, constraints) -+ // } -+ -+ // // FIXME: (BOLT) the greedy algorithms do not support the constraints interface at the moment. -+ // // As such for this PoC we will be always using the MEV GETH algorithm regardless of the worker configuration. -+ if len(constraints) > 0 { -+ blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env, constraints) -+ } else { - blockBundles, allBundles, usedSbundles, mempoolTxHashes, err = w.fillTransactionsAlgoWorker(interrupt, env) -- case ALGO_MEV_GETH: -- blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env) -- default: -- blockBundles, allBundles, mempoolTxHashes, err = w.fillTransactions(interrupt, env) - } -+ - return blockBundles, allBundles, usedSbundles, mempoolTxHashes, err - } -  - // fillTransactions retrieves the pending transactions from the txpool and fills them - // into the given sealing block. The transaction selection and ordering strategy can - // be customized with the plugin in the future. --func (w *worker) fillTransactions(interrupt *atomic.Int32, env *environment) ([]types.SimulatedBundle, []types.SimulatedBundle, map[common.Hash]struct{}, error) { -+func (w *worker) fillTransactions(interrupt *atomic.Int32, env *environment, constraints types.HashToConstraintDecoded) ([]types.SimulatedBundle, []types.SimulatedBundle, map[common.Hash]struct{}, error) { -+ log.Info(fmt.Sprintf("Filling transactions with %d constraints:", len(constraints))) - w.mu.RLock() - tip := w.tip - w.mu.RUnlock() -@@ -1304,6 +1401,12 @@ mempoolTxHashes[tx.Hash] = struct{}{} - } - } -  -+ // NOTE: as done with builder txs, we need to fill mempoolTxHashes with the constraints hashes -+ // in order to pass block validation -+ for hash := range constraints { -+ mempoolTxHashes[hash] = struct{}{} -+ } -+ - if env.header.BaseFee != nil { - filter.BaseFee = uint256.MustFromBig(env.header.BaseFee) - } -@@ -1316,6 +1419,45 @@ - filter.OnlyPlainTxs, filter.OnlyBlobTxs = false, true - pendingBlobTxs := w.eth.TxPool().Pending(filter) -  -+ // Drop all transactions that conflict with the constraints (sender, nonce) -+ signerAndNonceOfConstraints := make(map[common.Address]uint64) -+ -+ for _, constraint := range constraints { -+ from, err := types.Sender(env.signer, constraint.Tx) -+ log.Info(fmt.Sprintf("Inside fillTransactions, constraint %s from %s", constraint.Tx.Hash().String(), from.String())) -+ if err != nil { -+ // NOTE: is this the right behaviour? If this happens the builder is not able to -+ // produce a valid bid -+ log.Error("Failed to recover sender from constraint. Skipping constraint", "err", err) -+ continue -+ } -+ -+ signerAndNonceOfConstraints[from] = constraint.Tx.Nonce() -+ } -+ for sender, lazyTxs := range pendingPlainTxs { -+ common.Filter(&lazyTxs, func(lazyTx *txpool.LazyTransaction) bool { -+ if nonce, ok := signerAndNonceOfConstraints[sender]; ok { -+ if lazyTx.Tx.Nonce() == nonce { -+ return false -+ } -+ } -+ -+ return true -+ }) -+ } -+ -+ for sender, lazyTxs := range pendingBlobTxs { -+ common.Filter(&lazyTxs, func(lazyTx *txpool.LazyTransaction) bool { -+ if nonce, ok := signerAndNonceOfConstraints[sender]; ok { -+ if lazyTx.Tx.Nonce() == nonce { -+ return false -+ } -+ } -+ -+ return true -+ }) -+ } -+ - // Split the pending transactions into locals and remotes. - localPlainTxs, remotePlainTxs := make(map[common.Address][]*txpool.LazyTransaction), pendingPlainTxs - localBlobTxs, remoteBlobTxs := make(map[common.Address][]*txpool.LazyTransaction), pendingBlobTxs -@@ -1333,48 +1475,49 @@ } -  - var blockBundles []types.SimulatedBundle - var allBundles []types.SimulatedBundle -- if w.flashbots.isFlashbots { -- bundles, ccBundleCh := w.eth.TxPool().MevBundles(env.header.Number, env.header.Time) -- bundles = append(bundles, <-ccBundleCh...) -- -- var ( -- bundleTxs []*types.Transaction -- resultingBundle simulatedBundle -- mergedBundles []types.SimulatedBundle -- numBundles int -- err error -- ) -- // Sets allBundles in outer scope -- bundleTxs, resultingBundle, mergedBundles, numBundles, allBundles, err = w.generateFlashbotsBundle(env, bundles, pending) -- if err != nil { -- log.Error("Failed to generate flashbots bundle", "err", err) -- return nil, nil, nil, err -- } -- log.Info("Flashbots bundle", "ethToCoinbase", ethIntToFloat(resultingBundle.TotalEth), "gasUsed", resultingBundle.TotalGasUsed, "bundleScore", resultingBundle.MevGasPrice, "bundleLength", len(bundleTxs), "numBundles", numBundles, "worker", w.flashbots.maxMergedBundles) -- if len(bundleTxs) == 0 { -- return nil, nil, nil, errors.New("no bundles to apply") -- } -- if err := w.commitBundle(env, bundleTxs, interrupt); err != nil { -- return nil, nil, nil, err -- } -- blockBundles = mergedBundles -- env.profit.Add(env.profit, resultingBundle.EthSentToCoinbase) -- } -+ // if w.flashbots.isFlashbots { -+ // bundles, ccBundleCh := w.eth.TxPool().MevBundles(env.header.Number, env.header.Time) -+ // bundles = append(bundles, <-ccBundleCh...) -+ // -+ // var ( -+ // bundleTxs []*types.Transaction -+ // resultingBundle simulatedBundle -+ // mergedBundles []types.SimulatedBundle -+ // numBundles int -+ // err error -+ // ) -+ // // Sets allBundles in outer scope -+ // bundleTxs, resultingBundle, mergedBundles, numBundles, allBundles, err = w.generateFlashbotsBundle(env, bundles, pending) -+ // if err != nil { -+ // log.Error("Failed to generate flashbots bundle", "err", err) -+ // return nil, nil, nil, err -+ // } -+ // log.Info("Flashbots bundle", "ethToCoinbase", ethIntToFloat(resultingBundle.TotalEth), "gasUsed", resultingBundle.TotalGasUsed, "bundleScore", resultingBundle.MevGasPrice, "bundleLength", len(bundleTxs), "numBundles", numBundles, "worker", w.flashbots.maxMergedBundles) -+ // if len(bundleTxs) == 0 { -+ // log.Info("No bundles to apply") -+ // return nil, nil, nil, errors.New("no bundles to apply") -+ // } -+ // if err := w.commitBundle(env, bundleTxs, interrupt); err != nil { -+ // return nil, nil, nil, err -+ // } -+ // blockBundles = mergedBundles -+ // env.profit.Add(env.profit, resultingBundle.EthSentToCoinbase) -+ // } -  - // Fill the block with all available pending transactions. -- if len(localPlainTxs) > 0 || len(localBlobTxs) > 0 { -+ if len(localPlainTxs) > 0 || len(localBlobTxs) > 0 || len(constraints) > 0 { - plainTxs := newTransactionsByPriceAndNonce(env.signer, localPlainTxs, nil, nil, env.header.BaseFee) - blobTxs := newTransactionsByPriceAndNonce(env.signer, localBlobTxs, nil, nil, env.header.BaseFee) -  -- if err := w.commitTransactions(env, plainTxs, blobTxs, interrupt); err != nil { -+ if err := w.commitTransactions(env, plainTxs, blobTxs, constraints, interrupt); err != nil { - return nil, nil, nil, err - } - } -- if len(remotePlainTxs) > 0 || len(remoteBlobTxs) > 0 { -+ if len(remotePlainTxs) > 0 || len(remoteBlobTxs) > 0 || len(constraints) > 0 { - plainTxs := newTransactionsByPriceAndNonce(env.signer, remotePlainTxs, nil, nil, env.header.BaseFee) - blobTxs := newTransactionsByPriceAndNonce(env.signer, remoteBlobTxs, nil, nil, env.header.BaseFee) -  -- if err := w.commitTransactions(env, plainTxs, blobTxs, interrupt); err != nil { -+ if err := w.commitTransactions(env, plainTxs, blobTxs, constraints, interrupt); err != nil { - return nil, nil, nil, err - } - } -@@ -1400,6 +1543,7 @@ } - // Split the pending transactions into locals and remotes - // Fill the block with all available pending transactions. - pending := w.eth.TxPool().Pending(filter) -+ - mempoolTxHashes := make(map[common.Hash]struct{}, len(pending)) - for _, txs := range pending { - for _, tx := range txs { -@@ -1587,11 +1731,25 @@ } -  - orderCloseTime := time.Now() -  -- blockBundles, allBundles, usedSbundles, mempoolTxHashes, err := w.fillTransactionsSelectAlgo(nil, work) -+ var constraints types.HashToConstraintDecoded -+ -+ if params.constraintsCache != nil { -+ constraints, _ = params.constraintsCache.Get(params.slot) -+ log.Info(fmt.Sprintf("[BOLT]: found %d constraints for slot %d ", len(constraints), params.slot)) -+ } -+ -+ blockBundles, allBundles, usedSbundles, mempoolTxHashes, err := w.fillTransactionsSelectAlgo(nil, work, constraints) - if err != nil { - return &newPayloadResult{err: err} - } -  -+ // NOTE: as done with builder txs, we need to fill mempoolTxHashes with the constraints hashes -+ // in order to pass block validation. Otherwise the constraints will be rejected as unknown -+ // because they not part of the mempool and not part of the known bundles -+ for hash := range constraints { -+ mempoolTxHashes[hash] = struct{}{} -+ } -+ - // We mark transactions created by the builder as mempool transactions so code validating bundles will not fail - // for transactions created by the builder such as mev share refunds. - for _, tx := range work.txs { -@@ -1645,6 +1803,8 @@ - return block, blockProfit, nil - } -  -+// checkProposerPayment checks that the last transaction in the block is targeting the -+// validator coinbase and returns the block profit equal to the value of the last transaction. - func (w *worker) checkProposerPayment(work *environment, validatorCoinbase common.Address) (*big.Int, error) { - if len(work.txs) == 0 { - return nil, errors.New("no proposer payment tx") -@@ -1694,7 +1854,7 @@ return - } -  - // Fill pending transactions from the txpool -- _, _, _, _, err = w.fillTransactionsSelectAlgo(interrupt, work) -+ _, _, _, _, err = w.fillTransactionsSelectAlgo(interrupt, work, nil) - switch { - case err == nil: - // The entire block is filled, decrease resubmit interval in case -@@ -2198,6 +2358,8 @@ w.mu.Lock() - sender := w.coinbase - w.mu.Unlock() - builderBalance := env.state.GetBalance(sender).ToBig() -+ -+ log.Info(fmt.Sprintf("[BOLT]: builderBalance %v, reserve.builderBalance %v", builderBalance, reserve.builderBalance)) -  - availableFunds := new(big.Int).Sub(builderBalance, reserve.builderBalance) - if availableFunds.Sign() <= 0 {
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+64
-
-25
- -
- -
-
-
diff --git flashbots/builder/miner/worker_test.go chainbound/bolt/miner/worker_test.go -index d65ad578de31558b667c7934cb7581751853fa8f..745a476183e62a79286c3dfb5cc90566092244d4 100644 ---- flashbots/builder/miner/worker_test.go -+++ chainbound/bolt/miner/worker_test.go -@@ -24,6 +24,7 @@ "sync/atomic" - "testing" - "time" -  -+ "github.com/chainbound/shardmap" - "github.com/ethereum/go-ethereum/accounts" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/consensus" -@@ -77,6 +78,9 @@ // Test transactions - pendingTxs []*types.Transaction - newTxs []*types.Transaction -  -+ // Test testConstraintsCache -+ testConstraintsCache = new(shardmap.FIFOMap[uint64, types.HashToConstraintDecoded]) -+ - testConfig = &Config{ - Recommit: time.Second, - GasCeil: params.GenesisGasLimit, -@@ -84,6 +88,8 @@ } -  - defaultGenesisAlloc = types.GenesisAlloc{testBankAddress: {Balance: testBankFunds}} - ) -+ -+const pendingTxsLen = 50 -  - func init() { - testTxPoolConfig = legacypool.DefaultConfig -@@ -98,15 +104,32 @@ Epoch: 30000, - } -  - signer := types.LatestSigner(params.TestChainConfig) -- tx1 := types.MustSignNewTx(testBankKey, signer, &types.AccessListTx{ -- ChainID: params.TestChainConfig.ChainID, -- Nonce: 0, -- To: &testUserAddress, -- Value: big.NewInt(1000), -- Gas: params.TxGas, -- GasPrice: big.NewInt(params.InitialBaseFee), -- }) -- pendingTxs = append(pendingTxs, tx1) -+ for i := 0; i < pendingTxsLen; i++ { -+ tx1 := types.MustSignNewTx(testBankKey, signer, &types.AccessListTx{ -+ ChainID: params.TestChainConfig.ChainID, -+ Nonce: uint64(i), -+ To: &testUserAddress, -+ Value: big.NewInt(1000), -+ Gas: params.TxGas, -+ GasPrice: big.NewInt(params.InitialBaseFee), -+ }) -+ -+ // Add some constraints every 3 txs, and every 6 add an index -+ if i%3 == 0 { -+ idx := new(uint64) -+ if i%2 == 0 { -+ *idx = uint64(i) -+ } else { -+ idx = nil -+ } -+ constraints := make(map[common.Hash]*types.ConstraintDecoded) -+ constraints[tx1.Hash()] = &types.ConstraintDecoded{Index: idx, Tx: tx1} -+ // FIXME: slot 0 is probably not correct for these tests -+ testConstraintsCache.Put(0, constraints) -+ } -+ -+ pendingTxs = append(pendingTxs, tx1) -+ } -  - tx2 := types.MustSignNewTx(testBankKey, signer, &types.LegacyTx{ - Nonce: 1, -@@ -130,7 +153,7 @@ func newTestWorkerBackend(t *testing.T, chainConfig *params.ChainConfig, engine consensus.Engine, db ethdb.Database, alloc types.GenesisAlloc, n int, gasLimit uint64) *testWorkerBackend { - if alloc == nil { - alloc = defaultGenesisAlloc - } -- var gspec = &core.Genesis{ -+ gspec := &core.Genesis{ - Config: chainConfig, - GasLimit: gasLimit, - Alloc: alloc, -@@ -251,10 +274,10 @@ - w, _ := newTestWorker(t, chainConfig, engine, rawdb.NewMemoryDatabase(), nil, 0) - defer w.close() -  -- taskCh := make(chan struct{}, 2) -+ taskCh := make(chan struct{}, pendingTxsLen*2) - checkEqual := func(t *testing.T, task *task) { - // The work should contain 1 tx -- receiptLen, balance := 1, uint256.NewInt(1000) -+ receiptLen, balance := pendingTxsLen, uint256.NewInt(50_000) - if len(task.receipts) != receiptLen { - t.Fatalf("receipt number mismatch: have %d, want %d", len(task.receipts), receiptLen) - } -@@ -378,12 +401,12 @@ } -  - func TestGetSealingWorkEthash(t *testing.T) { - t.Parallel() -- testGetSealingWork(t, ethashChainConfig, ethash.NewFaker()) -+ testGetSealingWork(t, ethashChainConfig, ethash.NewFaker(), nil) - } -  - func TestGetSealingWorkClique(t *testing.T) { - t.Parallel() -- testGetSealingWork(t, cliqueChainConfig, clique.New(cliqueChainConfig.Clique, rawdb.NewMemoryDatabase())) -+ testGetSealingWork(t, cliqueChainConfig, clique.New(cliqueChainConfig.Clique, rawdb.NewMemoryDatabase()), nil) - } -  - func TestGetSealingWorkPostMerge(t *testing.T) { -@@ -391,10 +414,25 @@ t.Parallel() - local := new(params.ChainConfig) - *local = *ethashChainConfig - local.TerminalTotalDifficulty = big.NewInt(0) -- testGetSealingWork(t, local, ethash.NewFaker()) -+ testGetSealingWork(t, local, ethash.NewFaker(), nil) - } -  --func testGetSealingWork(t *testing.T, chainConfig *params.ChainConfig, engine consensus.Engine) { -+// TestGetSealingWorkWithConstraints tests the getSealingWork function with constraints. -+// This is the main test for the modified block building algorithm. Unfortunately -+// is not easy to make an end to end test where the constraints are pulled from the relay. -+// -+// A suggestion is to walk through the executing code with a debugger to further inspect the algorithm. -+// -+// However, if you want to check that functionality see `builder_test.go` -+func TestGetSealingWorkWithConstraints(t *testing.T) { -+ // t.Parallel() -+ local := new(params.ChainConfig) -+ *local = *ethashChainConfig -+ local.TerminalTotalDifficulty = big.NewInt(0) -+ testGetSealingWork(t, local, ethash.NewFaker(), testConstraintsCache) -+} -+ -+func testGetSealingWork(t *testing.T, chainConfig *params.ChainConfig, engine consensus.Engine, constraintsCache *shardmap.FIFOMap[uint64, types.HashToConstraintDecoded]) { - defer engine.Close() - w, b := newTestWorker(t, chainConfig, engine, rawdb.NewMemoryDatabase(), nil, 0) - defer w.close() -@@ -486,15 +524,16 @@ - // This API should work even when the automatic sealing is not enabled - for _, c := range cases { - r := w.getSealingBlock(&generateParams{ -- parentHash: c.parent, -- timestamp: timestamp, -- coinbase: c.coinbase, -- random: c.random, -- withdrawals: nil, -- beaconRoot: nil, -- noTxs: false, -- forceTime: true, -- onBlock: nil, -+ parentHash: c.parent, -+ timestamp: timestamp, -+ coinbase: c.coinbase, -+ random: c.random, -+ withdrawals: nil, -+ beaconRoot: nil, -+ noTxs: false, -+ forceTime: true, -+ onBlock: nil, -+ constraintsCache: constraintsCache, - }) - if c.expectErr { - if r.err == nil {
-
- - - -
- - -
-
- - - -
-
- -
- -
- -
+3
-
-2
- -
- -
-
-
diff --git flashbots/builder/miner/algo_common_test.go chainbound/bolt/miner/algo_common_test.go -index 1b4853863eef1137a4bb83492a1e0e3fd7247180..105c709b2aa22b38bb20922e0a76474688138b55 100644 ---- flashbots/builder/miner/algo_common_test.go -+++ chainbound/bolt/miner/algo_common_test.go -@@ -528,13 +528,14 @@ t.Cleanup(func() { - testConfig.AlgoType = ALGO_MEV_GETH - }) -  -- for _, algoType := range []AlgoType{ALGO_MEV_GETH, ALGO_GREEDY, ALGO_GREEDY_BUCKETS, ALGO_GREEDY_MULTISNAP, ALGO_GREEDY_BUCKETS_MULTISNAP} { -+ for _, algoType := range []AlgoType{ALGO_MEV_GETH} { - local := new(params.ChainConfig) - *local = *ethashChainConfig - local.TerminalTotalDifficulty = big.NewInt(0) - testConfig.AlgoType = algoType -- testGetSealingWork(t, local, ethash.NewFaker()) -+ testGetSealingWork(t, local, ethash.NewFaker(), nil) - } -+ t.Fail() - } -  - func TestGetSealingWorkAlgosWithProfit(t *testing.T) {
-
- - -
-
- -
-
-
- - -
- -
-

In the API backend, we don’t differentiate between private and public transactions for simplicity.

-
-
- -
- - -
-
- - - -
-
- -
- -
- -
+1
-
-1
- -
- -
-
-
diff --git flashbots/builder/eth/api_backend.go chainbound/bolt/eth/api_backend.go -index ef2c444ba0acabde26dbc629783115446a9aeb08..170218725eafa10a7390ae521d164c1426d4cd8b 100644 ---- flashbots/builder/eth/api_backend.go -+++ chainbound/bolt/eth/api_backend.go -@@ -290,7 +290,7 @@ } -  - func (b *EthAPIBackend) SendTx(ctx context.Context, signedTx *types.Transaction, private bool) error { - if private { -- return b.eth.txPool.Add([]*types.Transaction{signedTx}, false, false, true)[0] -+ return b.eth.txPool.Add([]*types.Transaction{signedTx}, true, false, true)[0] - } else { - return b.eth.txPool.Add([]*types.Transaction{signedTx}, true, false, false)[0] - }
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+0
-
-93
- -
- -
-
-
diff --git flashbots/builder/eth/block-validation/api_test.go chainbound/bolt/eth/block-validation/api_test.go -index 4d8afc6fff1e732a3781b356e0217cfcb91fa736..4340e99b35bec87071841ad1f14af422ea814583 100644 ---- flashbots/builder/eth/block-validation/api_test.go -+++ chainbound/bolt/eth/block-validation/api_test.go -@@ -845,99 +845,6 @@ } - return blockRequest, nil - } -  --func TestValidateBuilderSubmissionV2_CoinbasePaymentUnderflow(t *testing.T) { -- genesis, preMergeBlocks := generatePreMergeChain(20) -- lastBlock := preMergeBlocks[len(preMergeBlocks)-1] -- time := lastBlock.Time() + 5 -- genesis.Config.ShanghaiTime = &time -- n, ethservice := startEthService(t, genesis, preMergeBlocks) -- ethservice.Merger().ReachTTD() -- defer n.Close() -- -- api := NewBlockValidationAPI(ethservice, nil, true, true) -- -- baseFee := eip1559.CalcBaseFee(ethservice.BlockChain().Config(), lastBlock.Header()) -- txs := make(types.Transactions, 0) -- -- statedb, _ := ethservice.BlockChain().StateAt(lastBlock.Root()) -- nonce := statedb.GetNonce(testAddr) -- validatorNonce := statedb.GetNonce(testValidatorAddr) -- signer := types.LatestSigner(ethservice.BlockChain().Config()) -- -- expectedProfit := uint64(0) -- -- tx1, _ := types.SignTx(types.NewTransaction(nonce, common.Address{0x16}, big.NewInt(10), 21000, big.NewInt(2*baseFee.Int64()), nil), signer, testKey) -- txs = append(txs, tx1) -- expectedProfit += 21000 * baseFee.Uint64() -- -- // this tx will use 56996 gas -- tx2, _ := types.SignTx(types.NewContractCreation(nonce+1, new(big.Int), 1000000, big.NewInt(2*baseFee.Int64()), logCode), signer, testKey) -- txs = append(txs, tx2) -- expectedProfit += 56996 * baseFee.Uint64() -- -- tx3, _ := types.SignTx(types.NewTransaction(nonce+2, testAddr, big.NewInt(10), 21000, baseFee, nil), signer, testKey) -- txs = append(txs, tx3) -- -- // Test transferring out more than the profit -- toTransferOut := 2*expectedProfit - 21000*baseFee.Uint64() -- tx4, _ := types.SignTx(types.NewTransaction(validatorNonce, testAddr, big.NewInt(int64(toTransferOut)), 21000, baseFee, nil), signer, testValidatorKey) -- txs = append(txs, tx4) -- expectedProfit += 7 -- -- withdrawals := []*types.Withdrawal{ -- { -- Index: 0, -- Validator: 1, -- Amount: 100, -- Address: testAddr, -- }, -- { -- Index: 1, -- Validator: 1, -- Amount: 100, -- Address: testAddr, -- }, -- } -- withdrawalsRoot := types.DeriveSha(types.Withdrawals(withdrawals), trie.NewStackTrie(nil)) -- -- buildBlockArgs := buildBlockArgs{ -- parentHash: lastBlock.Hash(), -- parentRoot: lastBlock.Root(), -- feeRecipient: testValidatorAddr, -- txs: txs, -- random: common.Hash{}, -- number: lastBlock.NumberU64() + 1, -- gasLimit: lastBlock.GasLimit(), -- timestamp: lastBlock.Time() + 5, -- extraData: nil, -- baseFeePerGas: baseFee, -- withdrawals: withdrawals, -- } -- -- execData, err := buildBlock(buildBlockArgs, ethservice.BlockChain()) -- require.NoError(t, err) -- -- value := big.NewInt(int64(expectedProfit)) -- -- req, err := executableDataToBlockValidationRequest(execData, testValidatorAddr, value, withdrawalsRoot) -- require.NoError(t, err) -- require.ErrorContains(t, api.ValidateBuilderSubmissionV2(req), "payment tx not to the proposers fee recipient") -- -- // try to claim less profit than expected, should work -- value.SetUint64(expectedProfit - 1) -- -- req, err = executableDataToBlockValidationRequest(execData, testValidatorAddr, value, withdrawalsRoot) -- require.NoError(t, err) -- require.ErrorContains(t, api.ValidateBuilderSubmissionV2(req), "payment tx not to the proposers fee recipient") -- -- // try to claim more profit than expected, should fail -- value.SetUint64(expectedProfit + 1) -- -- req, err = executableDataToBlockValidationRequest(execData, testValidatorAddr, value, withdrawalsRoot) -- require.NoError(t, err) -- require.ErrorContains(t, api.ValidateBuilderSubmissionV2(req), "payment") --} -- - // This tests payment when the proposer fee recipient is the same as the coinbase - func TestValidateBuilderSubmissionV2_CoinbasePaymentDefault(t *testing.T) { - genesis, preMergeBlocks := generatePreMergeChain(20)
-
- - - -
-
- -
-
-
- - -
- -
-

We added the ConstraintDecoded primitive type in the core module.

- -

This is not the greatest place for this type but given that it uses common.Hash, Transaction and -it’s used in both the builder package and the miner package, it should be ok here.

-
-
- -
- - -
-
- - - -
-
- -
- -
- -
+4
-
-8
- -
- -
-
-
diff --git flashbots/builder/core/blockchain.go chainbound/bolt/core/blockchain.go -index e1b1ea1bca9d90158551583cb6f2e84612928faf..12639a34d6ded9dd74e231cadbe18973583c31c5 100644 ---- flashbots/builder/core/blockchain.go -+++ chainbound/bolt/core/blockchain.go -@@ -2494,14 +2494,13 @@ if err != nil { - return err - } -  -- feeRecipientBalanceAfter := new(uint256.Int).Set(statedb.GetBalance(feeRecipient)) -- -- amtBeforeOrWithdrawn := new(uint256.Int).Set(feeRecipientBalanceBefore) -+ feeRecipientBalanceDelta := new(uint256.Int).Set(statedb.GetBalance(feeRecipient)) -+ feeRecipientBalanceDelta.Sub(feeRecipientBalanceDelta, feeRecipientBalanceBefore) - if excludeWithdrawals { - for _, w := range block.Withdrawals() { - if w.Address == feeRecipient { - amount := new(uint256.Int).Mul(new(uint256.Int).SetUint64(w.Amount), uint256.NewInt(params.GWei)) -- amtBeforeOrWithdrawn = amtBeforeOrWithdrawn.Add(amtBeforeOrWithdrawn, amount) -+ feeRecipientBalanceDelta.Sub(feeRecipientBalanceDelta, amount) - } - } - } -@@ -2530,10 +2529,7 @@ } -  - // Validate proposer payment -  -- if useBalanceDiffProfit && feeRecipientBalanceAfter.Cmp(amtBeforeOrWithdrawn) >= 0 { -- feeRecipientBalanceDelta := new(uint256.Int).Set(feeRecipientBalanceAfter) -- feeRecipientBalanceDelta = feeRecipientBalanceDelta.Sub(feeRecipientBalanceDelta, amtBeforeOrWithdrawn) -- -+ if useBalanceDiffProfit { - uint256ExpectedProfit, ok := uint256.FromBig(expectedProfit) - if !ok { - if feeRecipientBalanceDelta.Cmp(uint256ExpectedProfit) >= 0 {
-
- - -
- - -
-
-
- - (new) - -
- - -
-
- -
- -
- -
+62
-
-0
- -
- -
-
-
diff --git flashbots/builder/core/types/constraints.go chainbound/bolt/core/types/constraints.go -new file mode 100644 -index 0000000000000000000000000000000000000000..e587b475531ba5585e97da44e848386e6345568e ---- /dev/null -+++ chainbound/bolt/core/types/constraints.go -@@ -0,0 +1,62 @@ -+package types -+ -+import ( -+ "sort" -+ -+ "github.com/ethereum/go-ethereum/common" -+) -+ -+// NOTE: not the greatest place for this type but given that it uses -+// `common.Hash`, `Transaction` and it's used in both the builder -+// package and the miner package, here it's a good place for now -+ -+type ( -+ HashToConstraintDecoded = map[common.Hash]*ConstraintDecoded -+ ConstraintDecoded struct { -+ Index *uint64 -+ Tx *Transaction -+ } -+) -+ -+// ParseConstraintsDecoded receives a map of constraints and returns -+// - a slice of constraints sorted by index -+// - a slice of constraints without index sorted by nonce and hash -+// - the total gas required by the constraints -+// - the total blob gas required by the constraints -+func ParseConstraintsDecoded(constraints HashToConstraintDecoded) ([]*ConstraintDecoded, []*ConstraintDecoded, uint64, uint64) { -+ // Here we initialize and track the constraints left to be executed along -+ // with their gas requirements -+ constraintsOrderedByIndex := make([]*ConstraintDecoded, 0, len(constraints)) -+ constraintsWithoutIndex := make([]*ConstraintDecoded, 0, len(constraints)) -+ constraintsTotalGasLeft := uint64(0) -+ constraintsTotalBlobGasLeft := uint64(0) -+ -+ for _, constraint := range constraints { -+ if constraint.Index == nil { -+ constraintsWithoutIndex = append(constraintsWithoutIndex, constraint) -+ } else { -+ constraintsOrderedByIndex = append(constraintsOrderedByIndex, constraint) -+ } -+ constraintsTotalGasLeft += constraint.Tx.Gas() -+ constraintsTotalBlobGasLeft += constraint.Tx.BlobGas() -+ } -+ -+ // Sorts the constraints by index ascending -+ sort.Slice(constraintsOrderedByIndex, func(i, j int) bool { -+ // By assumption, all constraints here have a non-nil index -+ return *constraintsOrderedByIndex[i].Index < *constraintsOrderedByIndex[j].Index -+ }) -+ -+ // Sorts the unindexed constraints by nonce ascending and by hash -+ sort.Slice(constraintsWithoutIndex, func(i, j int) bool { -+ iNonce := constraintsWithoutIndex[i].Tx.Nonce() -+ jNonce := constraintsWithoutIndex[j].Tx.Nonce() -+ // Sort by hash -+ if iNonce == jNonce { -+ return constraintsWithoutIndex[i].Tx.Hash().Cmp(constraintsWithoutIndex[j].Tx.Hash()) < 0 -+ } -+ return iNonce < jNonce -+ }) -+ -+ return constraintsOrderedByIndex, constraintsWithoutIndex, constraintsTotalGasLeft, constraintsTotalBlobGasLeft -+}
-
- - - -
-
- -
-
-
- - -
- -
-

Common utilities and types used across all packages

-
-
- -
- - -
-
- - - -
-
- -
- -
- -
+159
-
-0
- -
- -
-
-
diff --git flashbots/builder/common/types.go chainbound/bolt/common/types.go -index aadca87f82af89543de3387e24a90cba5fe1846f..21dff977795f300d2279733002f97bd51c494c94 100644 ---- flashbots/builder/common/types.go -+++ chainbound/bolt/common/types.go -@@ -31,6 +31,14 @@ "strings" -  - "github.com/ethereum/go-ethereum/common/hexutil" - "golang.org/x/crypto/sha3" -+ -+ "github.com/attestantio/go-builder-client/api/bellatrix" -+ "github.com/attestantio/go-builder-client/api/capella" -+ "github.com/attestantio/go-builder-client/api/deneb" -+ builderSpec "github.com/attestantio/go-builder-client/spec" -+ consensusSpec "github.com/attestantio/go-eth2-client/spec" -+ "github.com/attestantio/go-eth2-client/spec/phase0" -+ fastSsz "github.com/ferranbt/fastssz" - ) -  - // Lengths of hashes and addresses in bytes. -@@ -475,3 +483,154 @@ } else { - return err - } - } -+ -+type HexBytes []byte -+ -+// MarshalJSON implements json.Marshaler. -+func (h HexBytes) MarshalJSON() ([]byte, error) { -+ return []byte(fmt.Sprintf(`"%#x"`, []byte(h))), nil -+} -+ -+// UnmarshalJSON implements json.Unmarshaler. -+func (s *HexBytes) UnmarshalJSON(input []byte) error { -+ if len(input) == 0 { -+ return errors.New("input missing") -+ } -+ -+ if !bytes.HasPrefix(input, []byte{'"', '0', 'x'}) { -+ return errors.New("invalid prefix") -+ } -+ if !bytes.HasSuffix(input, []byte{'"'}) { -+ return errors.New("invalid suffix") -+ } -+ -+ src := input[3 : len(input)-1] -+ *s = make([]byte, hex.DecodedLen(len(src))) -+ -+ _, err := hex.Decode(*s, input[3:len(input)-1]) -+ if err != nil { -+ return err -+ } -+ -+ return nil -+} -+ -+// InclusionProof is a Merkle Multiproof of inclusion of a set of TransactionHashes -+type InclusionProof struct { -+ TransactionHashes []Hash `json:"transaction_hashes"` -+ GeneralizedIndexes []uint64 `json:"generalized_indexes"` -+ MerkleHashes []*HexBytes `json:"merkle_hashes"` -+} -+ -+// InclusionProofFromMultiProof converts a fastssz.Multiproof into an InclusionProof, without -+// filling the TransactionHashes -+func InclusionProofFromMultiProof(mp *fastSsz.Multiproof) *InclusionProof { -+ merkleHashes := make([]*HexBytes, len(mp.Hashes)) -+ for i, h := range mp.Hashes { -+ merkleHashes[i] = new(HexBytes) -+ *(merkleHashes[i]) = h -+ } -+ -+ leaves := make([]*HexBytes, len(mp.Leaves)) -+ for i, h := range mp.Leaves { -+ leaves[i] = new(HexBytes) -+ *(leaves[i]) = h -+ } -+ generalIndexes := make([]uint64, len(mp.Indices)) -+ for i, idx := range mp.Indices { -+ generalIndexes[i] = uint64(idx) -+ } -+ return &InclusionProof{ -+ MerkleHashes: merkleHashes, -+ GeneralizedIndexes: generalIndexes, -+ } -+} -+ -+func (p *InclusionProof) String() string { -+ return JSONStringify(p) -+} -+ -+// A wrapper struct over `builderSpec.VersionedSubmitBlockRequest` -+// to include preconfirmation proofs -+type VersionedSubmitBlockRequestWithProofs struct { -+ Inner *builderSpec.VersionedSubmitBlockRequest `json:"inner"` -+ Proofs *InclusionProof `json:"proofs"` -+} -+ -+// this is necessary, because the mev-boost-relay deserialization doesn't expect a "Version" and "Data" wrapper object -+// for deserialization. Instead, it tries to decode the object into the "Deneb" version first and if that fails, it tries -+// the "Capella" version. This is a workaround to make the deserialization work. -+func (v *VersionedSubmitBlockRequestWithProofs) MarshalJSON() ([]byte, error) { -+ switch v.Inner.Version { -+ case consensusSpec.DataVersionBellatrix: -+ return json.Marshal(struct { -+ Inner *bellatrix.SubmitBlockRequest `json:"inner"` -+ Proofs *InclusionProof `json:"proofs"` -+ }{ -+ Inner: v.Inner.Bellatrix, -+ Proofs: v.Proofs, -+ }) -+ case consensusSpec.DataVersionCapella: -+ return json.Marshal(struct { -+ Inner *capella.SubmitBlockRequest `json:"inner"` -+ Proofs *InclusionProof `json:"proofs"` -+ }{ -+ Inner: v.Inner.Capella, -+ Proofs: v.Proofs, -+ }) -+ case consensusSpec.DataVersionDeneb: -+ return json.Marshal(struct { -+ Inner *deneb.SubmitBlockRequest `json:"inner"` -+ Proofs *InclusionProof `json:"proofs"` -+ }{ -+ Inner: v.Inner.Deneb, -+ Proofs: v.Proofs, -+ }) -+ } -+ -+ return nil, fmt.Errorf("unknown data version %d", v.Inner.Version) -+} -+ -+func (v *VersionedSubmitBlockRequestWithProofs) String() string { -+ return JSONStringify(v) -+} -+ -+// SignedConstraintsList are a list of proposer constraints that a builder must satisfy -+// in order to produce a valid bid. This is not defined on the -+// [spec](https://chainbound.github.io/bolt-docs/api/builder-api) -+// but it's useful as an helper type -+type SignedConstraintsList = []*SignedConstraints -+ -+// Reference: https://chainbound.github.io/bolt-docs/api/builder-api -+type SignedConstraints struct { -+ Message ConstraintMessage `json:"message"` -+ Signature phase0.BLSSignature `json:"signature"` -+} -+ -+// Reference: https://chainbound.github.io/bolt-docs/api/builder-api -+type ConstraintMessage struct { -+ Constraints []*Constraint `json:"constraints"` -+ ValidatorIndex uint64 `json:"validator_index"` -+ Slot uint64 `json:"slot"` -+} -+ -+// Reference: https://chainbound.github.io/bolt-docs/api/builder-api -+type Constraint struct { -+ Index *uint64 `json:"index"` -+ Tx HexBytes `json:"tx"` -+} -+ -+// ConstraintSubscriptionAuth is the struct the builder signs over to authenticate -+// when subscribing to SSE constraint events from the relay -+type ConstraintSubscriptionAuth struct { -+ PublicKey phase0.BLSPubKey `json:"publicKey"` -+ Slot uint64 `json:"slot"` -+} -+ -+func (c *ConstraintSubscriptionAuth) String() string { -+ buf, err := json.Marshal(c) -+ if err != nil { -+ return fmt.Sprintf("failed to marshal ConstraintSubscriptionAuth: %v", err) -+ } -+ return string(buf) -+}
-
- - -
- - -
-
-
- - (new) - -
- - -
-
- -
- -
- -
+66
-
-0
- -
- -
-
-
diff --git flashbots/builder/common/utils.go chainbound/bolt/common/utils.go -new file mode 100644 -index 0000000000000000000000000000000000000000..50ee385ead7bc04281eab32d8579e5eb63afcb9f ---- /dev/null -+++ chainbound/bolt/common/utils.go -@@ -0,0 +1,66 @@ -+package common -+ -+import "encoding/json" -+ -+func Find[T any](slice []*T, predicate func(el *T) bool) *T { -+ for _, el := range slice { -+ if predicate(el) { -+ return el -+ } -+ } -+ return nil -+} -+ -+// Filter filters a slice in place, removing elements for which the predicate returns false. -+func Filter[T any](slice *[]*T, predicate func(el *T) bool) { -+ if slice == nil { -+ return -+ } -+ -+ for i := 0; i < len(*slice); i++ { -+ el := (*slice)[i] -+ if !predicate(el) { -+ // Remove the element by slicing -+ if i == len(*slice)-1 { -+ *slice = (*slice)[:i] -+ } else { -+ *slice = append((*slice)[:i], (*slice)[i+1:]...) -+ } -+ i-- // Decrement index to adjust for the removed element -+ } -+ } -+} -+ -+func Pop[T any](slice *[]*T) *T { -+ if slice == nil || len(*slice) == 0 { -+ return nil -+ } -+ el := (*slice)[len(*slice)-1] -+ *slice = (*slice)[:len(*slice)-1] -+ return el -+} -+ -+func Shift[T any](slice *[]*T) *T { -+ if slice == nil || len(*slice) == 0 { -+ return nil -+ } -+ el := (*slice)[0] -+ *slice = (*slice)[1:] -+ return el -+} -+ -+func Map[T any, U any](slice []*T, mapper func(el *T) *U) []*U { -+ result := make([]*U, len(slice)) -+ for i, el := range slice { -+ result[i] = mapper(el) -+ } -+ return result -+} -+ -+func JSONStringify(obj any) string { -+ b, err := json.Marshal(obj) -+ if err != nil { -+ return "" -+ } -+ return string(b) -+}
-
- - -
- - -
-
-
- - (new) - -
- - -
-
- -
- -
- -
+29
-
-0
- -
- -
-
-
diff --git flashbots/builder/common/utils_test.go chainbound/bolt/common/utils_test.go -new file mode 100644 -index 0000000000000000000000000000000000000000..bbccefe2ebb1b900b90e4153f317f4e4c2b73f39 ---- /dev/null -+++ chainbound/bolt/common/utils_test.go -@@ -0,0 +1,29 @@ -+package common -+ -+import "testing" -+ -+func TestGenericFilter(t *testing.T) { -+ slice := []*int{new(int), new(int), new(int), new(int)} -+ for i := 0; i < len(slice); i++ { -+ *slice[i] = i -+ } -+ -+ Filter(&slice, func(el *int) bool { -+ return el != nil -+ }) -+ if len(slice) != 4 { -+ t.Errorf("Filter failed") -+ } -+ Filter(&slice, func(el *int) bool { -+ return *el%2 == 0 -+ }) -+ if len(slice) != 2 { -+ t.Errorf("Filter failed") -+ } -+ Filter(&slice, func(el *int) bool { -+ return el == nil -+ }) -+ if len(slice) != 0 { -+ t.Errorf("Filter failed") -+ } -+}
-
- - - -
-
- -
-
-
- - -
- -
-
-
- -
- - -
-
- - - -
-
- -
- -
- -
+18
-
-17
- -
- -
-
-
diff --git flashbots/builder/internal/ethapi/api.go chainbound/bolt/internal/ethapi/api.go -index e3b04835e2a7f57af1499b4f617000b19551f6ab..f53a6fc61716e6b770c8a244511944e50f77f607 100644 ---- flashbots/builder/internal/ethapi/api.go -+++ chainbound/bolt/internal/ethapi/api.go -@@ -242,7 +242,7 @@ } - pending, queue := s.b.TxPoolContent() -  - // Define a formatter to flatten a transaction into a string -- var format = func(tx *types.Transaction) string { -+ format := func(tx *types.Transaction) string { - if to := tx.To(); to != nil { - return fmt.Sprintf("%s: %v wei + %v gas × %v wei", tx.To().Hex(), tx.Value(), tx.Gas(), tx.GasPrice()) - } -@@ -1755,20 +1755,21 @@ } - if err := b.SendTx(ctx, tx, private); err != nil { - return common.Hash{}, err - } -- // Print a log with full tx details for manual investigations and interventions -- head := b.CurrentBlock() -- signer := types.MakeSigner(b.ChainConfig(), head.Number, head.Time) -- from, err := types.Sender(signer, tx) -- if err != nil { -- return common.Hash{}, err -- } -- -- if tx.To() == nil { -- addr := crypto.CreateAddress(from, tx.Nonce()) -- log.Info("Submitted contract creation", "hash", tx.Hash().Hex(), "from", from, "nonce", tx.Nonce(), "contract", addr.Hex(), "value", tx.Value()) -- } else { -- log.Info("Submitted transaction", "hash", tx.Hash().Hex(), "from", from, "nonce", tx.Nonce(), "recipient", tx.To(), "value", tx.Value()) -- } -+ // Print a log with full tx details for manual investigations and interventions. -+ // TODO: remove this log, too noisy -+ // head := b.CurrentBlock() -+ // signer := types.MakeSigner(b.ChainConfig(), head.Number, head.Time) -+ // from, err := types.Sender(signer, tx) -+ // if err != nil { -+ // return common.Hash{}, err -+ // } -+ // -+ // if tx.To() == nil { -+ // addr := crypto.CreateAddress(from, tx.Nonce()) -+ // log.Info("Submitted contract creation", "hash", tx.Hash().Hex(), "from", from, "nonce", tx.Nonce(), "contract", addr.Hex(), "value", tx.Value()) -+ // } else { -+ // log.Info("Submitted transaction", "hash", tx.Hash().Hex(), "from", from, "nonce", tx.Nonce(), "recipient", tx.To(), "value", tx.Value()) -+ // } - return tx.Hash(), nil - } -  -@@ -1952,11 +1953,11 @@ } - matchTx := sendArgs.toTransaction() -  - // Before replacing the old transaction, ensure the _new_ transaction fee is reasonable. -- var price = matchTx.GasPrice() -+ price := matchTx.GasPrice() - if gasPrice != nil { - price = gasPrice.ToInt() - } -- var gas = matchTx.Gas() -+ gas := matchTx.Gas() - if gasLimit != nil { - gas = uint64(*gasLimit) - }
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+3
-
-4
- -
- -
-
-
diff --git flashbots/builder/internal/ethapi/transaction_args.go chainbound/bolt/internal/ethapi/transaction_args.go -index bae1c68641594887b4a800c0f7bfd6af58326ecf..7b4606742764a82120b6e2d7f656cfb46dbf9f88 100644 ---- flashbots/builder/internal/ethapi/transaction_args.go -+++ chainbound/bolt/internal/ethapi/transaction_args.go -@@ -37,9 +37,7 @@ "github.com/ethereum/go-ethereum/rpc" - "github.com/holiman/uint256" - ) -  --var ( -- maxBlobsPerTransaction = params.MaxBlobGasPerBlock / params.BlobTxBlobGasPerBlob --) -+var maxBlobsPerTransaction = params.MaxBlobGasPerBlock / params.BlobTxBlobGasPerBlob -  - // TransactionArgs represents the arguments to construct a new transaction - // or a message call. -@@ -384,7 +382,8 @@ if args.Gas != nil { - gas = uint64(*args.Gas) - } - if globalGasCap != 0 && globalGasCap < gas { -- log.Warn("Caller gas above allowance, capping", "requested", gas, "cap", globalGasCap) -+ // TODO: remove this, but for now it's too noisy -+ // log.Warn("Caller gas above allowance, capping", "requested", gas, "cap", globalGasCap) - gas = globalGasCap - } - var (
-
- - - -
-
- -
-
-
- - -
-
-
- - -
- -
-
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-5
- -
- -
-
-
diff --git flashbots/builder/.dockerignore chainbound/bolt/.dockerignore -deleted file mode 100644 -index 0c013d18b13f26adba32df14f3642b90e048e0d3..0000000000000000000000000000000000000000 ---- flashbots/builder/.dockerignore -+++ /dev/null -@@ -1,5 +0,0 @@ --**/*_test.go -- --build/_workspace --build/_bin --tests/testdata
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-3
- -
- -
-
-
diff --git flashbots/builder/.gitattributes chainbound/bolt/.gitattributes -deleted file mode 100644 -index 0269fab9cba2722fb0a7598ff18bc2ba46c45bed..0000000000000000000000000000000000000000 ---- flashbots/builder/.gitattributes -+++ /dev/null -@@ -1,3 +0,0 @@ --# Auto detect text files and perform LF normalization --* text=auto --*.sol linguist-language=Solidity
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-4
- -
- -
-
-
diff --git flashbots/builder/.github/CODEOWNERS chainbound/bolt/.github/CODEOWNERS -deleted file mode 100644 -index 6cf5893f99c00c40dc6656232c72e3fbd3f05a1c..0000000000000000000000000000000000000000 ---- flashbots/builder/.github/CODEOWNERS -+++ /dev/null -@@ -1,4 +0,0 @@ --# These owners will be the default owners for everything in --# the repo. Unless a later match takes precedence, --# they will be requested for review when someone opens a pull request. --* @dvush @Wazzymandias @TymKh @Ruteri @avalonche
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-40
- -
- -
-
-
diff --git flashbots/builder/.github/CONTRIBUTING.md chainbound/bolt/.github/CONTRIBUTING.md -deleted file mode 100644 -index 969b7f8f9fa1ef4cd87f80bb9df2e80d6cbbc2e8..0000000000000000000000000000000000000000 ---- flashbots/builder/.github/CONTRIBUTING.md -+++ /dev/null -@@ -1,40 +0,0 @@ --# Contributing -- --Thank you for considering to help out with the source code! We welcome --contributions from anyone on the internet, and are grateful for even the --smallest of fixes! -- --If you'd like to contribute to go-ethereum, please fork, fix, commit and send a --pull request for the maintainers to review and merge into the main code base. If --you wish to submit more complex changes though, please check up with the core --devs first on [our gitter channel](https://gitter.im/ethereum/go-ethereum) to --ensure those changes are in line with the general philosophy of the project --and/or get some early feedback which can make both your efforts much lighter as --well as our review and merge procedures quick and simple. -- --## Coding guidelines -- --Please make sure your contributions adhere to our coding guidelines: -- -- * Code must adhere to the official Go --[formatting](https://golang.org/doc/effective_go.html#formatting) guidelines --(i.e. uses [gofmt](https://golang.org/cmd/gofmt/)). -- * Code must be documented adhering to the official Go --[commentary](https://golang.org/doc/effective_go.html#commentary) guidelines. -- * Pull requests need to be based on and opened against the `master` branch. -- * Commit messages should be prefixed with the package(s) they modify. -- * E.g. "eth, rpc: make trace configs optional" -- --## Can I have feature X -- --Before you submit a feature request, please check and make sure that it isn't --possible through some other means. The JavaScript-enabled console is a powerful --feature in the right hands. Please check our --[Geth documentation page](https://geth.ethereum.org/docs/) for more info --and help. -- --## Configuration, dependencies, and tests -- --Please see the [Developers' Guide](https://geth.ethereum.org/docs/developers/geth-developer/dev-guide) --for more details on configuring your environment, managing project dependencies --and testing procedures.
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-31
- -
- -
-
-
diff --git flashbots/builder/.github/ISSUE_TEMPLATE/bug.md chainbound/bolt/.github/ISSUE_TEMPLATE/bug.md -deleted file mode 100644 -index 45bfd986ac6e38ec7364ce8473b663eee97628c5..0000000000000000000000000000000000000000 ---- flashbots/builder/.github/ISSUE_TEMPLATE/bug.md -+++ /dev/null -@@ -1,31 +0,0 @@ ----- --name: Report a bug --about: Something with go-ethereum is not working as expected --title: '' --labels: 'type:bug' --assignees: '' ----- -- --#### System information -- --Geth version: `geth version` --CL client & version: e.g. lighthouse/nimbus/prysm@v1.0.0 --OS & Version: Windows/Linux/OSX --Commit hash : (if `develop`) -- --#### Expected behaviour -- -- --#### Actual behaviour -- -- --#### Steps to reproduce the behaviour -- -- --#### Backtrace -- --```` --[backtrace] --```` -- --When submitting logs: please submit them as text and not screenshots.
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-16
- -
- -
-
-
diff --git flashbots/builder/.github/ISSUE_TEMPLATE/feature.md chainbound/bolt/.github/ISSUE_TEMPLATE/feature.md -deleted file mode 100644 -index aacd885f9e5ef7de4eaa833c9e67297db24a85e2..0000000000000000000000000000000000000000 ---- flashbots/builder/.github/ISSUE_TEMPLATE/feature.md -+++ /dev/null -@@ -1,17 +0,0 @@ ----- --name: Request a feature --about: Report a missing feature - e.g. as a step before submitting a PR --title: '' --labels: 'type:feature' --assignees: '' ----- -- --# Rationale -- --Why should this feature exist? --What are the use-cases? -- --# Implementation -- --Do you have ideas regarding the implementation of this feature? --Are you willing to implement this feature? -\ No newline at end of file
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-9
- -
- -
-
-
diff --git flashbots/builder/.github/ISSUE_TEMPLATE/question.md chainbound/bolt/.github/ISSUE_TEMPLATE/question.md -deleted file mode 100644 -index 8f460ab558ecc6930b0f1c348c08bdde31fe2b2b..0000000000000000000000000000000000000000 ---- flashbots/builder/.github/ISSUE_TEMPLATE/question.md -+++ /dev/null -@@ -1,9 +0,0 @@ ----- --name: Ask a question --about: Something is unclear --title: '' --labels: 'type:docs' --assignees: '' ----- -- --This should only be used in very rare cases e.g. if you are not 100% sure if something is a bug or asking a question that leads to improving the documentation. For general questions please use [discord](https://discord.gg/nthXNEv) or the Ethereum stack exchange at https://ethereum.stackexchange.com.
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-11
- -
- -
-
-
diff --git flashbots/builder/.github/no-response.yml chainbound/bolt/.github/no-response.yml -deleted file mode 100644 -index 903d4ce85f350f737c2049d272fae414a9ac148d..0000000000000000000000000000000000000000 ---- flashbots/builder/.github/no-response.yml -+++ /dev/null -@@ -1,11 +0,0 @@ --# Number of days of inactivity before an Issue is closed for lack of response --daysUntilClose: 30 --# Label requiring a response --responseRequiredLabel: "need:more-information" --# Comment to post when closing an Issue for lack of response. Set to `false` to disable --closeComment: > -- This issue has been automatically closed because there has been no response -- to our request for more information from the original author. With only the -- information that is currently in the issue, we don't have enough information -- to take action. Please reach out if you have more relevant information or -- answers to our questions so that we can investigate further.
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-11
- -
- -
-
-
diff --git flashbots/builder/.github/pull_request_template.md chainbound/bolt/.github/pull_request_template.md -deleted file mode 100644 -index 15f903e765a605b824f908f266e78ee9a97ff476..0000000000000000000000000000000000000000 ---- flashbots/builder/.github/pull_request_template.md -+++ /dev/null -@@ -1,11 +0,0 @@ --## 📝 Summary -- --<!--- A general summary of your changes --> -- --## 📚 References -- --<!-- Any interesting external links to documentation, articles, tweets which add value to the PR --> -- ----- -- --* [ ] I have seen and agree to [`CONTRIBUTING.md`](https://github.com/flashbots/builder/blob/main/CONTRIBUTING.md)
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-17
- -
- -
-
-
diff --git flashbots/builder/.github/stale.yml chainbound/bolt/.github/stale.yml -deleted file mode 100644 -index 6d921cc795ff45352aafb16efb6130f50e470b54..0000000000000000000000000000000000000000 ---- flashbots/builder/.github/stale.yml -+++ /dev/null -@@ -1,17 +0,0 @@ --# Number of days of inactivity before an issue becomes stale --daysUntilStale: 366 --# Number of days of inactivity before a stale issue is closed --daysUntilClose: 42 --# Issues with these labels will never be considered stale --exemptLabels: -- - pinned -- - security --# Label to use when marking an issue as stale --staleLabel: "status:inactive" --# Comment to post when marking an issue as stale. Set to `false` to disable --markComment: > -- This issue has been automatically marked as stale because it has not had -- recent activity. It will be closed if no further activity occurs. Thank you -- for your contributions. --# Comment to post when closing a stale issue. Set to `false` to disable --closeComment: false
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-57
- -
- -
-
-
diff --git flashbots/builder/.github/workflows/go.yml chainbound/bolt/.github/workflows/go.yml -deleted file mode 100644 -index 80d75bd7f9fc7470020e40bb60e22c8fe9e77b73..0000000000000000000000000000000000000000 ---- flashbots/builder/.github/workflows/go.yml -+++ /dev/null -@@ -1,57 +0,0 @@ --name: Go -- --on: -- push: -- branches: [ main ] -- pull_request: -- --env: -- CGO_CFLAGS_ALLOW: "-O -D__BLST_PORTABLE__" -- CGO_CFLAGS: "-O -D__BLST_PORTABLE__" -- --jobs: -- -- lint: -- name: Lint -- runs-on: ubuntu-latest -- steps: -- - name: Set up Go -- uses: actions/setup-go@v3 -- with: -- go-version: ^1.21.4 -- id: go -- -- - name: Check out code into the Go module directory -- uses: actions/checkout@v2 -- -- - name: Install golangci-lint -- run: go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.51.2 -- -- - name: Lint -- run: make lint -- -- - name: Ensure go mod tidy runs without changes -- run: | -- go mod tidy -- git diff-index HEAD -- git diff-index --quiet HEAD -- -- build: -- name: Build -- runs-on: ubuntu-latest -- steps: -- -- - name: Set up Go 1.x -- uses: actions/setup-go@v3 -- with: -- go-version: 1.21.4 -- id: go -- -- - name: Check out code into the Go module directory -- uses: actions/checkout@v2 -- -- - name: Test -- run: go test ./core ./miner/... ./internal/ethapi/... ./builder/... ./eth/block-validation/... -- -- - name: Build -- run: make geth
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-74
- -
- -
-
-
diff --git flashbots/builder/.github/workflows/release.yml chainbound/bolt/.github/workflows/release.yml -deleted file mode 100644 -index 3d056e59cc9b58d7dfd90d5292ed19d8a1dfa622..0000000000000000000000000000000000000000 ---- flashbots/builder/.github/workflows/release.yml -+++ /dev/null -@@ -1,74 +0,0 @@ --name: Release -- --on: -- push: -- tags: -- - 'v*' -- --jobs: -- docker-image: -- name: Publish Docker Image -- runs-on: ubuntu-latest -- -- steps: -- - name: Checkout sources -- uses: actions/checkout@v2 -- -- - name: Get tag version -- run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV -- -- - name: Print version -- run: | -- echo $RELEASE_VERSION -- echo ${{ env.RELEASE_VERSION }} -- -- - name: Set up QEMU -- uses: docker/setup-qemu-action@v2 -- -- - name: Set up Docker Buildx -- uses: docker/setup-buildx-action@v2 -- -- - name: Extract metadata (tags, labels) for Docker -- id: meta -- uses: docker/metadata-action@v4 -- with: -- images: flashbots/builder -- tags: | -- type=sha -- type=pep440,pattern={{version}} -- type=pep440,pattern={{major}}.{{minor}} -- type=raw,value=latest -- -- - name: Login to DockerHub -- uses: docker/login-action@v2 -- with: -- username: ${{ secrets.FLASHBOTS_DOCKERHUB_USERNAME }} -- password: ${{ secrets.FLASHBOTS_DOCKERHUB_TOKEN }} -- -- - name: Build and push -- uses: docker/build-push-action@v3 -- with: -- context: . -- push: true -- build-args: | -- VERSION=${{ env.RELEASE_VERSION }} -- platforms: linux/amd64,linux/arm64 -- tags: ${{ steps.meta.outputs.tags }} -- labels: ${{ steps.meta.outputs.labels }} -- -- github-release: -- runs-on: ubuntu-latest -- steps: -- - name: Checkout sources -- uses: actions/checkout@v2 -- -- - name: Create release -- id: create_release -- uses: actions/create-release@v1 -- env: -- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -- with: -- tag_name: ${{ github.ref }} -- release_name: ${{ github.ref }} -- draft: true -- prerelease: false
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-54
- -
- -
-
-
diff --git flashbots/builder/.gitignore chainbound/bolt/.gitignore -deleted file mode 100644 -index 7b1908dc5cd79d574e39053a78c951ab55507460..0000000000000000000000000000000000000000 ---- flashbots/builder/.gitignore -+++ /dev/null -@@ -1,54 +0,0 @@ --# See http://help.github.com/ignore-files/ for more about ignoring files. --# --# If you find yourself ignoring temporary files generated by your text editor --# or operating system, you probably want to add a global ignore instead: --# git config --global core.excludesfile ~/.gitignore_global -- --/tmp --*/**/*un~ --*/**/*.test --*un~ --.DS_Store --*/**/.DS_Store --.ethtest --*/**/*tx_database* --*/**/*dapps* --build/_vendor/pkg -- --#* --.#* --*# --*~ --.project --.settings -- --# used by the Makefile --/build/_workspace/ --/build/cache/ --/build/bin/ --/geth*.zip -- --# travis --profile.tmp --profile.cov -- --# IdeaIDE --.idea -- --# VS Code --.vscode -- --# dashboard --/dashboard/assets/flow-typed --/dashboard/assets/node_modules --/dashboard/assets/stats.json --/dashboard/assets/bundle.js --/dashboard/assets/bundle.js.map --/dashboard/assets/package-lock.json -- --**/yarn-error.log --logs/ --/ofac_blacklist.json --/blacklist.json -- --tests/spec-tests/
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-8
- -
- -
-
-
diff --git flashbots/builder/.gitmodules chainbound/bolt/.gitmodules -deleted file mode 100644 -index 241c169c4772ce246ffa45f7fa8a63019ffea0e1..0000000000000000000000000000000000000000 ---- flashbots/builder/.gitmodules -+++ /dev/null -@@ -1,8 +0,0 @@ --[submodule "tests"] -- path = tests/testdata -- url = https://github.com/ethereum/tests -- shallow = true --[submodule "evm-benchmarks"] -- path = tests/evm-benchmarks -- url = https://github.com/ipsilon/evm-benchmarks -- shallow = true
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-60
- -
- -
-
-
diff --git flashbots/builder/.golangci.yml chainbound/bolt/.golangci.yml -deleted file mode 100644 -index 0343c4b4ebf2eec8adc0a473a2892a4100b3a86a..0000000000000000000000000000000000000000 ---- flashbots/builder/.golangci.yml -+++ /dev/null -@@ -1,60 +0,0 @@ --# This file configures github.com/golangci/golangci-lint. -- --run: -- timeout: 20m -- tests: true -- # default is true. Enables skipping of directories: -- # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ -- skip-dirs-use-default: true -- skip-files: -- - core/genesis_alloc.go -- --linters: -- disable-all: true -- enable: -- - goimports -- - gosimple -- - govet -- - ineffassign -- - misspell -- - unconvert -- - typecheck -- - unused -- - staticcheck -- - bidichk -- - durationcheck -- - exportloopref -- - whitespace -- -- # - structcheck # lots of false positives -- # - errcheck #lot of false positives -- # - contextcheck -- # - errchkjson # lots of false positives -- # - errorlint # this check crashes -- # - exhaustive # silly check -- # - makezero # false positives -- # - nilerr # several intentional -- --linters-settings: -- gofmt: -- simplify: true -- --issues: -- exclude-rules: -- - path: crypto/bn256/cloudflare/optate.go -- linters: -- - deadcode -- - staticcheck -- - path: internal/build/pgp.go -- text: 'SA1019: "golang.org/x/crypto/openpgp" is deprecated: this package is unmaintained except for security fixes.' -- - path: core/vm/contracts.go -- text: 'SA1019: "golang.org/x/crypto/ripemd160" is deprecated: RIPEMD-160 is a legacy hash and should not be used for new applications.' -- - path: accounts/usbwallet/trezor.go -- text: 'SA1019: "github.com/golang/protobuf/proto" is deprecated: Use the "google.golang.org/protobuf/proto" package instead.' -- - path: accounts/usbwallet/trezor/ -- text: 'SA1019: "github.com/golang/protobuf/proto" is deprecated: Use the "google.golang.org/protobuf/proto" package instead.' -- exclude: -- - 'SA1019: event.TypeMux is deprecated: use Feed' -- - 'SA1019: strings.Title is deprecated' -- - 'SA1019: strings.Title has been deprecated since Go 1.18 and an alternative has been available since Go 1.0: The rule Title uses for word boundaries does not handle Unicode punctuation properly. Use golang.org/x/text/cases instead.' -- - 'SA1029: should not use built-in type string as key for value'
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-237
- -
- -
-
-
diff --git flashbots/builder/.mailmap chainbound/bolt/.mailmap -deleted file mode 100644 -index aa074b76d6b59639cf48cdb408e418a0b7c05eca..0000000000000000000000000000000000000000 ---- flashbots/builder/.mailmap -+++ /dev/null -@@ -1,237 +0,0 @@ --Aaron Buchwald <aaron.buchwald56@gmail.com> -- --Aaron Kumavis <kumavis@users.noreply.github.com> -- --Abel Nieto <abel.nieto90@gmail.com> --Abel Nieto <abel.nieto90@gmail.com> <anietoro@uwaterloo.ca> -- --Afri Schoedon <58883403+q9f@users.noreply.github.com> --Afri Schoedon <5chdn@users.noreply.github.com> <58883403+q9f@users.noreply.github.com> -- --Alec Perseghin <aperseghin@gmail.com> -- --Aleksey Smyrnov <i@soar.name> -- --Alex Leverington <alex@ethdev.com> --Alex Leverington <alex@ethdev.com> <subtly@users.noreply.github.com> -- --Alex Pozhilenkov <alex_pozhilenkov@adoriasoft.com> --Alex Pozhilenkov <alex_pozhilenkov@adoriasoft.com> <leshiy12345678@gmail.com> -- --Alexey Akhunov <akhounov@gmail.com> -- --Alon Muroch <alonmuroch@gmail.com> -- --Andrey Petrov <shazow@gmail.com> --Andrey Petrov <shazow@gmail.com> <andrey.petrov@shazow.net> -- --Arkadiy Paronyan <arkadiy@ethdev.com> -- --Armin Braun <me@obrown.io> -- --Aron Fischer <github@aron.guru> <homotopycolimit@users.noreply.github.com> -- --Austin Roberts <code@ausiv.com> --Austin Roberts <code@ausiv.com> <git@ausiv.com> -- --Bas van Kervel <bas@ethdev.com> --Bas van Kervel <bas@ethdev.com> <basvankervel@ziggo.nl> --Bas van Kervel <bas@ethdev.com> <basvankervel@gmail.com> --Bas van Kervel <bas@ethdev.com> <bas-vk@users.noreply.github.com> -- --Boqin Qin <bobbqqin@bupt.edu.cn> --Boqin Qin <bobbqqin@bupt.edu.cn> <Bobbqqin@gmail.com> -- --Casey Detrio <cdetrio@gmail.com> -- --Cheng Li <lob4tt@gmail.com> -- --Chris Ziogas <ziogaschr@gmail.com> --Chris Ziogas <ziogaschr@gmail.com> <ziogas_chr@hotmail.com> -- --Christoph Jentzsch <jentzsch.software@gmail.com> -- --Diederik Loerakker <proto@protolambda.com> -- --Dimitry Khokhlov <winsvega@mail.ru> -- --Domino Valdano <dominoplural@gmail.com> --Domino Valdano <dominoplural@gmail.com> <jeff@okcupid.com> -- --Edgar Aroutiounian <edgar.factorial@gmail.com> -- --Elliot Shepherd <elliot@identitii.com> -- --Enrique Fynn <enriquefynn@gmail.com> -- --Enrique Fynn <me@enriquefynn.com> --Enrique Fynn <me@enriquefynn.com> <enriquefynn@gmail.com> -- --Ernesto del Toro <ernesto.deltoro@gmail.com> --Ernesto del Toro <ernesto.deltoro@gmail.com> <ernestodeltoro@users.noreply.github.com> -- --Everton Fraga <ev@ethereum.org> -- --Felix Lange <fjl@twurst.com> --Felix Lange <fjl@twurst.com> <fjl@users.noreply.github.com> -- --Frank Wang <eternnoir@gmail.com> -- --Gary Rong <garyrong0905@gmail.com> -- --Gavin Wood <i@gavwood.com> -- --Gregg Dourgarian <greggd@tempworks.com> -- --Guillaume Ballet <gballet@gmail.com> --Guillaume Ballet <gballet@gmail.com> <3272758+gballet@users.noreply.github.com> -- --Guillaume Nicolas <guin56@gmail.com> -- --Hanjiang Yu <delacroix.yu@gmail.com> --Hanjiang Yu <delacroix.yu@gmail.com> <42531996+de1acr0ix@users.noreply.github.com> -- --Heiko Hees <heiko@heiko.org> -- --Henning Diedrich <hd@eonblast.com> --Henning Diedrich <hd@eonblast.com> Drake Burroughs <wildfyre@hotmail.com> -- --Hwanjo Heo <34005989+hwanjo@users.noreply.github.com> -- --Iskander (Alex) Sharipov <quasilyte@gmail.com> --Iskander (Alex) Sharipov <quasilyte@gmail.com> <i.sharipov@corp.vk.com> -- --Jae Kwon <jkwon.work@gmail.com> -- --Janoš Guljaš <janos@resenje.org> <janos@users.noreply.github.com> --Janoš Guljaš <janos@resenje.org> Janos Guljas <janos@resenje.org> -- --Jared Wasinger <j-wasinger@hotmail.com> -- --Jason Carver <jacarver@linkedin.com> --Jason Carver <jacarver@linkedin.com> <ut96caarrs@snkmail.com> -- --Javier Peletier <jm@epiclabs.io> --Javier Peletier <jm@epiclabs.io> <jpeletier@users.noreply.github.com> -- --Jeffrey Wilcke <jeffrey@ethereum.org> --Jeffrey Wilcke <jeffrey@ethereum.org> <geffobscura@gmail.com> --Jeffrey Wilcke <jeffrey@ethereum.org> <obscuren@obscura.com> --Jeffrey Wilcke <jeffrey@ethereum.org> <obscuren@users.noreply.github.com> -- --Jens Agerberg <github@agerberg.me> -- --Joseph Chow <ethereum@outlook.com> --Joseph Chow <ethereum@outlook.com> ethers <TODO> -- -- --Joseph Goulden <joegoulden@gmail.com> -- --Justin Drake <drakefjustin@gmail.com> -- --Kenso Trabing <ktrabing@acm.org> --Kenso Trabing <ktrabing@acm.org> <kenso.trabing@bloomwebsite.com> -- --Liang Ma <liangma@liangbit.com> --Liang Ma <liangma@liangbit.com> <liangma.ul@gmail.com> -- --Louis Holbrook <dev@holbrook.no> --Louis Holbrook <dev@holbrook.no> <nolash@users.noreply.github.com> -- --Maran Hidskes <maran.hidskes@gmail.com> -- --Marian Oancea <contact@siteshop.ro> -- --Martin Becze <mjbecze@gmail.com> --Martin Becze <mjbecze@gmail.com> <wanderer@users.noreply.github.com> -- --Martin Lundfall <martin.lundfall@protonmail.com> -- --Matt Garnett <14004106+lightclient@users.noreply.github.com> -- --Matthew Halpern <matthalp@gmail.com> --Matthew Halpern <matthalp@gmail.com> <matthalp@google.com> -- --Michael Riabzev <michael@starkware.co> -- --Nchinda Nchinda <nchinda2@gmail.com> -- --Nick Dodson <silentcicero@outlook.com> -- --Nick Johnson <arachnid@notdot.net> -- --Nick Savers <nicksavers@gmail.com> -- --Nishant Das <nishdas93@gmail.com> --Nishant Das <nishdas93@gmail.com> <nish1993@hotmail.com> -- --Olivier Hervieu <olivier.hervieu@gmail.com> -- --Pascal Dierich <pascal@merkleplant.xyz> --Pascal Dierich <pascal@merkleplant.xyz> <pascal@pascaldierich.com> -- --RJ Catalano <catalanor0220@gmail.com> --RJ Catalano <catalanor0220@gmail.com> <rj@erisindustries.com> -- --Ralph Caraveo <deckarep@gmail.com> -- --Rene Lubov <41963722+renaynay@users.noreply.github.com> -- --Robert Zaremba <robert@zaremba.ch> --Robert Zaremba <robert@zaremba.ch> <robert.zaremba@scale-it.pl> -- --Roman Mandeleil <roman.mandeleil@gmail.com> -- --Sorin Neacsu <sorin.neacsu@gmail.com> --Sorin Neacsu <sorin.neacsu@gmail.com> <sorin@users.noreply.github.com> -- --Sven Ehlert <sven@ethdev.com> -- --Taylor Gerring <taylor.gerring@gmail.com> --Taylor Gerring <taylor.gerring@gmail.com> <taylor.gerring@ethereum.org> -- --Thomas Bocek <tom@tomp2p.net> -- --Tim Cooijmans <timcooijmans@gmail.com> -- --Valentin Wüstholz <wuestholz@gmail.com> --Valentin Wüstholz <wuestholz@gmail.com> <wuestholz@users.noreply.github.com> -- --Victor Tran <vu.tran54@gmail.com> -- --Viktor Trón <viktor.tron@gmail.com> -- --Ville Sundell <github@solarius.fi> -- --Vincent G <caktux@gmail.com> -- --Vitalik Buterin <v@buterin.com> -- --Vlad Gluhovsky <gluk256@gmail.com> --Vlad Gluhovsky <gluk256@gmail.com> <gluk256@users.noreply.github.com> -- --Wenshao Zhong <wzhong20@uic.edu> --Wenshao Zhong <wzhong20@uic.edu> <11510383@mail.sustc.edu.cn> --Wenshao Zhong <wzhong20@uic.edu> <374662347@qq.com> -- --Will Villanueva <hello@willvillanueva.com> -- --Xiaobing Jiang <s7v7nislands@gmail.com> -- --Xudong Liu <33193253+r1cs@users.noreply.github.com> -- --Yohann Léon <sybiload@gmail.com> -- --Zachinquarantine <Zachinquarantine@protonmail.com> --Zachinquarantine <Zachinquarantine@protonmail.com> <zachinquarantine@yahoo.com> -- --Ziyuan Zhong <zzy.albert@163.com> -- --Zsolt Felföldi <zsfelfoldi@gmail.com> -- --meowsbits <b5c6@protonmail.com> --meowsbits <b5c6@protonmail.com> <45600330+meowsbits@users.noreply.github.com> -- --nedifi <103940716+nedifi@users.noreply.github.com> -- --Максим Чусовлянов <mchusovlianov@gmail.com>
-
- - -
- - -
-
- - -
- - (deleted) - -
-
-
- -
- -
- -
+0
-
-168
- -
- -
-
-
diff --git flashbots/builder/.travis.yml chainbound/bolt/.travis.yml -deleted file mode 100644 -index a55583a703febc6a861a5b41b8d70352724eb02f..0000000000000000000000000000000000000000 ---- flashbots/builder/.travis.yml -+++ /dev/null -@@ -1,168 +0,0 @@ --language: go --go_import_path: github.com/ethereum/go-ethereum --sudo: false --jobs: -- allow_failures: -- - stage: build -- os: osx -- env: -- - azure-osx -- -- include: -- # These builders create the Docker sub-images for multi-arch push and each -- # will attempt to push the multi-arch image if they are the last builder -- - stage: build -- if: type = push -- os: linux -- arch: amd64 -- dist: bionic -- go: 1.21.x -- env: -- - docker -- services: -- - docker -- git: -- submodules: false # avoid cloning ethereum/tests -- before_install: -- - export DOCKER_CLI_EXPERIMENTAL=enabled -- script: -- - go run build/ci.go docker -image -manifest amd64,arm64 -upload ethereum/client-go -- -- - stage: build -- if: type = push -- os: linux -- arch: arm64 -- dist: bionic -- go: 1.21.x -- env: -- - docker -- services: -- - docker -- git: -- submodules: false # avoid cloning ethereum/tests -- before_install: -- - export DOCKER_CLI_EXPERIMENTAL=enabled -- script: -- - go run build/ci.go docker -image -manifest amd64,arm64 -upload ethereum/client-go -- -- # This builder does the Linux Azure uploads -- - stage: build -- if: type = push -- os: linux -- dist: bionic -- sudo: required -- go: 1.21.x -- env: -- - azure-linux -- git: -- submodules: false # avoid cloning ethereum/tests -- addons: -- apt: -- packages: -- - gcc-multilib -- script: -- # Build for the primary platforms that Trusty can manage -- - go run build/ci.go install -dlgo -- - go run build/ci.go archive -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds -- - go run build/ci.go install -dlgo -arch 386 -- - go run build/ci.go archive -arch 386 -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds -- -- # Switch over GCC to cross compilation (breaks 386, hence why do it here only) -- - sudo -E apt-get -yq --no-install-suggests --no-install-recommends --force-yes install gcc-arm-linux-gnueabi libc6-dev-armel-cross gcc-arm-linux-gnueabihf libc6-dev-armhf-cross gcc-aarch64-linux-gnu libc6-dev-arm64-cross -- - sudo ln -s /usr/include/asm-generic /usr/include/asm -- -- - GOARM=5 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabi-gcc -- - GOARM=5 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds -- - GOARM=6 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabi-gcc -- - GOARM=6 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds -- - GOARM=7 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabihf-gcc -- - GOARM=7 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds -- - go run build/ci.go install -dlgo -arch arm64 -cc aarch64-linux-gnu-gcc -- - go run build/ci.go archive -arch arm64 -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds -- -- # This builder does the OSX Azure uploads -- - stage: build -- if: type = push -- os: osx -- osx_image: xcode14.2 -- go: 1.21.x -- env: -- - azure-osx -- git: -- submodules: false # avoid cloning ethereum/tests -- script: -- - go run build/ci.go install -dlgo -- - go run build/ci.go archive -type tar -signer OSX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds -- - go run build/ci.go install -dlgo -arch arm64 -- - go run build/ci.go archive -arch arm64 -type tar -signer OSX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds -- -- # These builders run the tests -- - stage: build -- os: linux -- arch: amd64 -- dist: bionic -- go: 1.21.x -- script: -- - travis_wait 30 go run build/ci.go test $TEST_PACKAGES -- -- - stage: build -- if: type = pull_request -- os: linux -- arch: arm64 -- dist: bionic -- go: 1.20.x -- script: -- - travis_wait 30 go run build/ci.go test $TEST_PACKAGES -- -- - stage: build -- os: linux -- dist: bionic -- go: 1.20.x -- script: -- - travis_wait 30 go run build/ci.go test $TEST_PACKAGES -- -- # This builder does the Ubuntu PPA nightly uploads -- - stage: build -- if: type = cron || (type = push && tag ~= /^v[0-9]/) -- os: linux -- dist: bionic -- go: 1.21.x -- env: -- - ubuntu-ppa -- git: -- submodules: false # avoid cloning ethereum/tests -- addons: -- apt: -- packages: -- - devscripts -- - debhelper -- - dput -- - fakeroot -- - python-bzrlib -- - python-paramiko -- script: -- - echo '|1|7SiYPr9xl3uctzovOTj4gMwAC1M=|t6ReES75Bo/PxlOPJ6/GsGbTrM0= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA0aKz5UTUndYgIGG7dQBV+HaeuEZJ2xPHo2DS2iSKvUL4xNMSAY4UguNW+pX56nAQmZKIZZ8MaEvSj6zMEDiq6HFfn5JcTlM80UwlnyKe8B8p7Nk06PPQLrnmQt5fh0HmEcZx+JU9TZsfCHPnX7MNz4ELfZE6cFsclClrKim3BHUIGq//t93DllB+h4O9LHjEUsQ1Sr63irDLSutkLJD6RXchjROXkNirlcNVHH/jwLWR5RcYilNX7S5bIkK8NlWPjsn/8Ua5O7I9/YoE97PpO6i73DTGLh5H9JN/SITwCKBkgSDWUt61uPK3Y11Gty7o2lWsBjhBUm2Y38CBsoGmBw==' >> ~/.ssh/known_hosts -- - go run build/ci.go debsrc -upload ethereum/ethereum -sftp-user geth-ci -signer "Go Ethereum Linux Builder <geth-ci@ethereum.org>" -- -- # This builder does the Azure archive purges to avoid accumulating junk -- - stage: build -- if: type = cron -- os: linux -- dist: bionic -- go: 1.21.x -- env: -- - azure-purge -- git: -- submodules: false # avoid cloning ethereum/tests -- script: -- - go run build/ci.go purge -store gethstore/builds -days 14 -- -- # This builder executes race tests -- - stage: build -- if: type = cron -- os: linux -- dist: bionic -- go: 1.21.x -- script: -- - travis_wait 30 go run build/ci.go test -race $TEST_PACKAGES --
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+1
-
-1
- -
- -
-
-
diff --git flashbots/builder/Dockerfile chainbound/bolt/Dockerfile -index ed69a04789678e839186208e04a2483b33b4d68c..c808c9d940fa1c217cea7e417241b53626d233a2 100644 ---- flashbots/builder/Dockerfile -+++ chainbound/bolt/Dockerfile -@@ -4,7 +4,7 @@ ARG VERSION="" - ARG BUILDNUM="" -  - # Build Geth in a stock Go builder container --FROM golang:1.21-alpine as builder -+FROM golang:1.22-alpine AS builder -  - RUN apk add --no-cache gcc musl-dev linux-headers git -
-
- - -
- - -
-
- - - -
-
- -
- -
- -
+1
-
-1
- -
- -
-
-
diff --git flashbots/builder/Dockerfile.alltools chainbound/bolt/Dockerfile.alltools -index c317da25fa4870b8fd2189ccf0a679ddbe87384a..ddffb8ee1d1c4da5448c9ddbe845b0fe7fc16844 100644 ---- flashbots/builder/Dockerfile.alltools -+++ chainbound/bolt/Dockerfile.alltools -@@ -4,7 +4,7 @@ ARG VERSION="" - ARG BUILDNUM="" -  - # Build Geth in a stock Go builder container --FROM golang:1.21-alpine as builder -+FROM golang:1.22-alpine AS builder -  - RUN apk add --no-cache gcc musl-dev linux-headers git -
-
- - -
-
- - - -
- -
-
- - - -
-
- -
- -
- -
+7
-
-3
- -
- -
-
-
diff --git flashbots/builder/go.mod chainbound/bolt/go.mod -index 7d6b1540a62cab968e4f54c8ee75f0d8b10df36a..dfe1cc1581108637aa8a11b882fff140e57e2cfc 100644 ---- flashbots/builder/go.mod -+++ chainbound/bolt/go.mod -@@ -1,6 +1,6 @@ - module github.com/ethereum/go-ethereum -  --go 1.20 -+go 1.22 -  - require ( - github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.2.0 -@@ -15,6 +15,7 @@ github.com/aws/aws-sdk-go-v2/service/route53 v1.30.2 - github.com/btcsuite/btcd/btcec/v2 v2.2.1 - github.com/cenkalti/backoff/v4 v4.2.1 - github.com/cespare/cp v0.1.0 -+ github.com/chainbound/shardmap v0.0.2 - github.com/cloudflare/cloudflare-go v0.79.0 - github.com/cockroachdb/pebble v0.0.0-20230928194634-aa077af62593 - github.com/consensys/gnark-crypto v0.12.1 -@@ -25,7 +26,7 @@ github.com/deckarep/golang-set/v2 v2.1.0 - github.com/dop251/goja v0.0.0-20230806174421-c933cf95e127 - github.com/ethereum/c-kzg-4844 v0.4.0 - github.com/fatih/color v1.15.0 -- github.com/ferranbt/fastssz v0.1.3 -+ github.com/ferranbt/fastssz v0.1.4-0.20240724090034-31cd371f8688 - github.com/fjl/gencodec v0.0.0-20230517082657-f9840df7b83e - github.com/fjl/memsize v0.0.2 - github.com/flashbots/go-boost-utils v1.8.0 -@@ -39,6 +40,7 @@ github.com/golang/protobuf v1.5.3 - github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb - github.com/google/gofuzz v1.2.0 - github.com/google/uuid v1.3.0 -+ github.com/gorilla/handlers v1.5.2 - github.com/gorilla/mux v1.8.0 - github.com/gorilla/websocket v1.4.2 - github.com/grafana/pyroscope-go/godeltaprof v0.1.7 -@@ -84,6 +86,8 @@ gopkg.in/yaml.v3 v3.0.1 - ) -  - require ( -+ github.com/emicklei/dot v1.6.2 // indirect -+ github.com/felixge/httpsnoop v1.0.4 // indirect - github.com/getsentry/sentry-go v0.18.0 // indirect - github.com/goccy/go-yaml v1.11.2 // indirect - github.com/klauspost/cpuid/v2 v2.2.5 // indirect -@@ -147,7 +151,7 @@ github.com/mitchellh/pointerstructure v1.2.0 // indirect - github.com/mmcloughlin/addchain v0.4.0 // indirect - github.com/naoina/toml v0.1.1 - github.com/opentracing/opentracing-go v1.2.0 // indirect -- github.com/pkg/errors v0.9.1 // indirect -+ github.com/pkg/errors v0.9.1 - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/prometheus/client_golang v1.16.0 // indirect - github.com/prometheus/client_model v0.3.0 // indirect
-
- - -
-
- - - -
- -
-
- - - -
-
- -
- -
- -
+10
-
-2
- -
- -
-
-
diff --git flashbots/builder/go.sum chainbound/bolt/go.sum -index 3c9ff3c8173e1ee07717ea20a9ea6d6292488016..1ab78598f52a4582b536e7b5d6988d85c54dec5b 100644 ---- flashbots/builder/go.sum -+++ chainbound/bolt/go.sum -@@ -74,6 +74,8 @@ github.com/cespare/cp v0.1.0 h1:SE+dxFebS7Iik5LK0tsi1k9ZCxEaFX4AjQmoyA+1dJk= - github.com/cespare/cp v0.1.0/go.mod h1:SOGHArjBr4JWaSDEVpWpo/hNg6RoKrls6Oh40hiwW+s= - github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= - github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -+github.com/chainbound/shardmap v0.0.2 h1:yB1weccdm2vC6dnqzzLwPIvyAnRj7815mJWbkPybiYw= -+github.com/chainbound/shardmap v0.0.2/go.mod h1:TBvIzhHyFUbt+oa3UzbijobTUh221st6xIbuki7WzPc= - github.com/chzyer/logex v1.2.0/go.mod h1:9+9sk7u7pGNWYMkh0hdiL++6OeibzJccyQU4p4MedaY= - github.com/chzyer/readline v1.5.0/go.mod h1:x22KAscuvRqlLoK9CsoYsmxoXZMMFVyOl86cAH8qUic= - github.com/chzyer/test v0.0.0-20210722231415-061457976a23/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -@@ -135,6 +137,8 @@ github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y= - github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM= - github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= - github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= -+github.com/emicklei/dot v1.6.2 h1:08GN+DD79cy/tzN6uLCT84+2Wk9u+wvqP+Hkx/dIR8A= -+github.com/emicklei/dot v1.6.2/go.mod h1:DeV7GvQtIw4h2u73RKBkkFdvVAz0D9fzeJrgPW6gy/s= - github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= - github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= - github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -@@ -146,8 +150,10 @@ github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= - github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= - github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= - github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= --github.com/ferranbt/fastssz v0.1.3 h1:ZI+z3JH05h4kgmFXdHuR1aWYsgrg7o+Fw7/NCzM16Mo= --github.com/ferranbt/fastssz v0.1.3/go.mod h1:0Y9TEd/9XuFlh7mskMPfXiI2Dkw4Ddg9EyXt1W7MRvE= -+github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= -+github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -+github.com/ferranbt/fastssz v0.1.4-0.20240724090034-31cd371f8688 h1:k70X5h1haHaSbpD/9fcjtvAUEVlRlOKtdpvN7Mzhcv4= -+github.com/ferranbt/fastssz v0.1.4-0.20240724090034-31cd371f8688/go.mod h1:Ea3+oeoRGGLGm5shYAeDgu6PGUlcvQhE2fILyD9+tGg= - github.com/fjl/gencodec v0.0.0-20230517082657-f9840df7b83e h1:bBLctRc7kr01YGvaDfgLbTwjFNW5jdp5y5rj8XXBHfY= - github.com/fjl/gencodec v0.0.0-20230517082657-f9840df7b83e/go.mod h1:AzA8Lj6YtixmJWL+wkKoBGsLWy9gFrAzi4g+5bCKwpY= - github.com/fjl/memsize v0.0.2 h1:27txuSD9or+NZlnOWdKUxeBzTAUkWCVh+4Gf2dWFOzA= -@@ -253,6 +259,8 @@ github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= - github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= - github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= - github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -+github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE= -+github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w= - github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= - github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= - github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
-
- - -
-
- -
-
-
- - -
-
-
- - - - - - - +
+

Bolt Forkdiff

+ +
+ + diff --git a/mev-boost.html b/mev-boost.html new file mode 100755 index 000000000..783cb72e0 --- /dev/null +++ b/mev-boost.html @@ -0,0 +1,7863 @@ + + + + + + + + + + Bolt MEV-Boost + + + + + + + + +
+
+
+
+
+
+ + +
+
+
+
+
+
+
+
+

Bolt MEV-Boost

+
+ +
+
+ diff: + + ignored: +
+
+
+
+
+ +1199 +
+
+ -41 +
+ +
+ +29 +
+
+ -372 +
+
+
+
+ +
+
+

+ This is an overview of the changes made to the canonical + Flashbots MEV-Boost + package to support the + Constraints API. +

+ + + +

+ All the changes are part of the core + server module: +

+
+
+
+
+ + +
+
+

+ The Constraints API spec supports communication with PBS + relays with the addition of 2 endpoints to the Builder + API: +

+ + +
+
+
+
+ + +
+
+

+ We added two new Builder API endpoints to + communicate with PBS Relays, described above. +

+ +

+ In particular, MEV-Boost now also performs proof + verification every time a new header is received. + If the inclusion proof is invalid, the block is + rejected and the block will be built with a local + fallback mechanism. +

+
+
+
+
+ + + + +
+
+
+ +6 +
+
+ -4 +
+
+
+
+
+ diff --git + flashbots/mev-boost/server/backend.go + chainbound/bolt/server/backend.go + index + 3309d5410306960b5f5f1b346c4cf678b339f9be..631a587a42f529b3fafc30ca910ef01c33d0f8bf + 100644 + --- + flashbots/mev-boost/server/backend.go + +++ + chainbound/bolt/server/backend.go + @@ -2,10 +2,12 @@ + package server   const ( // Router + paths + - pathStatus = + "/eth/v1/builder/status" + - pathRegisterValidator = + "/eth/v1/builder/validators" + - pathGetHeader = + "/eth/v1/builder/header/{slot:[0-9]+}/{parent_hash:0x[a-fA-F0-9]+}/{pubkey:0x[a-fA-F0-9]+}" + - pathGetPayload = + "/eth/v1/builder/blinded_blocks" + + pathStatus = + "/eth/v1/builder/status" + + pathRegisterValidator = + "/eth/v1/builder/validators" + + pathSubmitConstraint = + "/eth/v1/builder/constraints" + + pathGetHeader = + "/eth/v1/builder/header/{slot:[0-9]+}/{parent_hash:0x[a-fA-F0-9]+}/{pubkey:0x[a-fA-F0-9]+}" + + pathGetHeaderWithProofs = + "/eth/v1/builder/header_with_proofs/{slot:[0-9]+}/{parent_hash:0x[a-fA-F0-9]+}/{pubkey:0x[a-fA-F0-9]+}" + + pathGetPayload = + "/eth/v1/builder/blinded_blocks" +   // // Relay Monitor paths + // pathAuctionTranscript = + "/monitor/v1/transcript" +
+
+ +
+
+ + + + +
+
+
+ +173 +
+
+ -5 +
+
+
+
+
+ diff --git + flashbots/mev-boost/server/mock_relay.go + chainbound/bolt/server/mock_relay.go + index + fe6c6daa53afe223f6191390a64c833fe06b96f2..fc0695f609472d1ae440d8d9ca2433ec42ad1ca0 + 100644 + --- + flashbots/mev-boost/server/mock_relay.go + +++ + chainbound/bolt/server/mock_relay.go + @@ -16,14 +16,17 @@ + builderApiDeneb + "github.com/attestantio/go-builder-client/api/deneb" + builderApiV1 + "github.com/attestantio/go-builder-client/api/v1" + builderSpec + "github.com/attestantio/go-builder-client/spec" + "github.com/attestantio/go-eth2-client/spec" + + + "github.com/attestantio/go-eth2-client/spec/bellatrix" + "github.com/attestantio/go-eth2-client/spec/capella" + "github.com/attestantio/go-eth2-client/spec/deneb" + "github.com/attestantio/go-eth2-client/spec/phase0" + + utilbellatrix + "github.com/attestantio/go-eth2-client/util/bellatrix" + "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/flashbots/go-boost-utils/bls" + "github.com/flashbots/go-boost-utils/ssz" + "github.com/gorilla/mux" + "github.com/holiman/uint256" + + + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/require" + )   + @@ -54,13 +57,16 @@ + mu sync.Mutex requestCount map[string]int   + // Overriders + - handlerOverrideRegisterValidator func(w + http.ResponseWriter, req *http.Request) + - handlerOverrideGetHeader func(w + http.ResponseWriter, req *http.Request) + - handlerOverrideGetPayload func(w + http.ResponseWriter, req *http.Request) + + handlerOverrideRegisterValidator func(w + http.ResponseWriter, req *http.Request) + + handlerOverrideSubmitConstraint func(w + http.ResponseWriter, req *http.Request) + + handlerOverrideGetHeader func(w + http.ResponseWriter, req *http.Request) + + handlerOverrideGetHeaderWithProofs func(w + http.ResponseWriter, req *http.Request) + + handlerOverrideGetPayload func(w + http.ResponseWriter, req *http.Request) +   // Default responses + placeholders, used if overrider does not exist + - GetHeaderResponse + *builderSpec.VersionedSignedBuilderBid + - GetPayloadResponse + *builderApi.VersionedSubmitBlindedBlockResponse + + GetHeaderResponse + *builderSpec.VersionedSignedBuilderBid + + GetHeaderWithProofsResponse + *BidWithInclusionProofs + + GetPayloadResponse + *builderApi.VersionedSubmitBlindedBlockResponse +   // Server section Server + *httptest.Server + @@ -115,6 +121,8 @@ + r.HandleFunc("/", + m.handleRoot).Methods(http.MethodGet) + r.HandleFunc(pathStatus, + m.handleStatus).Methods(http.MethodGet) + r.HandleFunc(pathRegisterValidator, + m.handleRegisterValidator).Methods(http.MethodPost) + r.HandleFunc(pathGetHeader, + m.handleGetHeader).Methods(http.MethodGet) + + r.HandleFunc(pathGetHeaderWithProofs, + m.handleGetHeaderWithProofs).Methods(http.MethodGet) + + r.HandleFunc(pathSubmitConstraint, + m.handleSubmitConstraint).Methods(http.MethodPost) + r.HandleFunc(pathGetPayload, + m.handleGetPayload).Methods(http.MethodPost) +   return m.newTestMiddleware(r) + @@ -164,6 +172,62 @@ + w.Header().Set("Content-Type", + "application/json") + w.WriteHeader(http.StatusOK) }   + +func (m *mockRelay) handleSubmitConstraint(w + http.ResponseWriter, req *http.Request) + { + + m.mu.Lock() + + defer m.mu.Unlock() + + if m.handlerOverrideSubmitConstraint != nil + { + + m.handlerOverrideSubmitConstraint(w, + req) + + return + + } + + m.defaultHandleSubmitConstraint(w, + req) + +} + + + +func (m *mockRelay) + defaultHandleSubmitConstraint(w + http.ResponseWriter, req *http.Request) + { + + payload := BatchedSignedConstraints{} + + if err := DecodeJSON(req.Body, + &payload); err != nil { + + http.Error(w, err.Error(), + http.StatusBadRequest) + + return + + } + + + + w.Header().Set("Content-Type", + "application/json") + + w.WriteHeader(http.StatusOK) + +} + + + +func (m *mockRelay) + MakeGetHeaderWithConstraintsResponse(value + uint64, blockHash, parentHash, publicKey + string, version spec.DataVersion, constraints + []struct { + + tx Transaction + + hash phase0.Hash32 + +}, + +) *BidWithInclusionProofs { + + transactions := + new(utilbellatrix.ExecutionPayloadTransactions) + + + + for _, con := range constraints { + + transactions.Transactions = + append(transactions.Transactions, + bellatrix.Transaction(con.tx)) + + } + + + + rootNode, err := + transactions.GetTree() + + if err != nil { + + panic(err) + + } + + + + // BOLT: Set the value of nodes. + This is MANDATORY for the proof + calculation + + // to output the leaf correctly. + This is also never documented in fastssz. + -__- + + // Also calculates the + transactions_root + + txsRoot := rootNode.Hash() + + + + bidWithProofs := + m.MakeGetHeaderWithProofsResponseWithTxsRoot(value, + blockHash, parentHash, publicKey, version, + phase0.Root(txsRoot)) + + + + // Calculate the inclusion + proof + + inclusionProof, err := + CalculateMerkleMultiProofs(rootNode, + constraints) + + if err != nil { + + logrus.WithError(err).Error("failed to + calculate inclusion proof") + + return nil + + } + + + + bidWithProofs.Proofs = inclusionProof + + + + return bidWithProofs + +} + + + // MakeGetHeaderResponse is used to + create the default or can be used to create a + custom response to the getHeader // + method func (m *mockRelay) + MakeGetHeaderResponse(value uint64, blockHash, + parentHash, publicKey string, version + spec.DataVersion) + *builderSpec.VersionedSignedBuilderBid { + @@ -192,6 +256,7 @@ + Signature: signature, }, } case + spec.DataVersionDeneb: + + + message := &builderApiDeneb.BuilderBid{ + Header: &deneb.ExecutionPayloadHeader{ + BlockHash: _HexToHash(blockHash), + @@ -221,6 +286,70 @@ + } return nil }   + +// + MakeGetHeaderWithProofsResponseWithTxsRoot is + used to create the default or can be used to + create a custom response to the + getHeaderWithProofs + +// method + +func (m *mockRelay) + MakeGetHeaderWithProofsResponseWithTxsRoot(value + uint64, blockHash, parentHash, publicKey + string, version spec.DataVersion, txsRoot + phase0.Root) *BidWithInclusionProofs { + + switch version { + + case spec.DataVersionCapella: + + // Fill the payload with custom + values. + + message := + &builderApiCapella.BuilderBid{ + + Header: + &capella.ExecutionPayloadHeader{ + + BlockHash: _HexToHash(blockHash), + + ParentHash: _HexToHash(parentHash), + + WithdrawalsRoot: phase0.Root{}, + + TransactionsRoot: txsRoot, + + }, + + Value: uint256.NewInt(value), + + Pubkey: _HexToPubkey(publicKey), + + } + + + + // Sign the message. + + signature, err := ssz.SignMessage(message, + ssz.DomainBuilder, m.secretKey) + + require.NoError(m.t, err) + + + + return &BidWithInclusionProofs{ + + Bid: + &builderSpec.VersionedSignedBuilderBid{ + + Version: spec.DataVersionCapella, + + Capella: + &builderApiCapella.SignedBuilderBid{ + + Message: message, + + Signature: signature, + + }, + + }, + + } + + case spec.DataVersionDeneb: + + + + message := + &builderApiDeneb.BuilderBid{ + + Header: + &deneb.ExecutionPayloadHeader{ + + BlockHash: _HexToHash(blockHash), + + ParentHash: _HexToHash(parentHash), + + WithdrawalsRoot: phase0.Root{}, + + BaseFeePerGas: uint256.NewInt(0), + + TransactionsRoot: txsRoot, + + }, + + BlobKZGCommitments: + make([]deneb.KZGCommitment, 0), + + Value: uint256.NewInt(value), + + Pubkey: _HexToPubkey(publicKey), + + } + + + + // Sign the message. + + signature, err := ssz.SignMessage(message, + ssz.DomainBuilder, m.secretKey) + + require.NoError(m.t, err) + + + + return &BidWithInclusionProofs{ + + Bid: + &builderSpec.VersionedSignedBuilderBid{ + + Version: spec.DataVersionDeneb, + + Deneb: + &builderApiDeneb.SignedBuilderBid{ + + Message: message, + + Signature: signature, + + }, + + }, + + } + + case spec.DataVersionUnknown, + spec.DataVersionPhase0, + spec.DataVersionAltair, + spec.DataVersionBellatrix: + + return nil + + } + + return nil + +} + + + // handleGetHeader handles incoming + requests to server.pathGetHeader func (m + *mockRelay) handleGetHeader(w + http.ResponseWriter, req *http.Request) { + m.mu.Lock() + @@ -247,8 +376,47 @@ + "0xe28385e7bd68df656cd0042b74b69c3104b5356ed1f20eb69f1f925df47a3ab7", + "0x8a1d7b8dd64e0aafe7ea7b6c95065c9364cf99d38470c12ee807d55f7de1529ad29ce2c422e0b65e3d5a05c02caca249", + spec.DataVersionCapella, ) + + + if m.GetHeaderResponse != nil { response = + m.GetHeaderResponse + + } + + + + if err := + json.NewEncoder(w).Encode(response); err != + nil { + + http.Error(w, err.Error(), + http.StatusInternalServerError) + + return + + } + +} + + + +// handleGetHeaderWithProofs handles + incoming requests to + server.pathGetHeader + +func (m *mockRelay) + handleGetHeaderWithProofs(w + http.ResponseWriter, req *http.Request) + { + + m.mu.Lock() + + defer m.mu.Unlock() + + // Try to override default behavior + is custom handler is specified. + + if m.handlerOverrideGetHeader != nil + { + + m.handlerOverrideGetHeaderWithProofs(w, + req) + + return + + } + + m.defaultHandleGetHeaderWithProofs(w) + +} + + + +// defaultHandleGetHeaderWithProofs + returns the default handler for + handleGetHeaderWithProofs + +func (m *mockRelay) + defaultHandleGetHeaderWithProofs(w + http.ResponseWriter) { + + // By default, everything will be + ok. + + w.Header().Set("Content-Type", + "application/json") + + w.WriteHeader(http.StatusOK) + + + + // Build the default + response. + + response := + m.MakeGetHeaderWithConstraintsResponse( + + 12345, + + + "0xe28385e7bd68df656cd0042b74b69c3104b5356ed1f20eb69f1f925df47a3ab7", + + + "0xe28385e7bd68df656cd0042b74b69c3104b5356ed1f20eb69f1f925df47a3ab7", + + + "0x8a1d7b8dd64e0aafe7ea7b6c95065c9364cf99d38470c12ee807d55f7de1529ad29ce2c422e0b65e3d5a05c02caca249", + + spec.DataVersionCapella, + + nil, + + ) + + + + if m.GetHeaderWithProofsResponse != nil + { + + response = + m.GetHeaderWithProofsResponse + }   if err := + json.NewEncoder(w).Encode(response); err != nil + { +
+
+ +
+
+ + + + +
+
+
+ +449 +
+
+ -10 +
+
+
+
+
+ diff --git + flashbots/mev-boost/server/service.go + chainbound/bolt/server/service.go + index + 897d67ba5b4d4aa8875d620ae7d8f328e3fac68d..36a501792689a033351ceea45d173eef72f7ffcc + 100644 + --- + flashbots/mev-boost/server/service.go + +++ + chainbound/bolt/server/service.go + @@ -22,6 +22,9 @@ + eth2ApiV1Bellatrix + "github.com/attestantio/go-eth2-client/api/v1/bellatrix" + eth2ApiV1Capella + "github.com/attestantio/go-eth2-client/api/v1/capella" + eth2ApiV1Deneb + "github.com/attestantio/go-eth2-client/api/v1/deneb" + "github.com/attestantio/go-eth2-client/spec/phase0" + + gethCommon + "github.com/ethereum/go-ethereum/common" + + gethTypes + "github.com/ethereum/go-ethereum/core/types" + + fastSsz + "github.com/ferranbt/fastssz" + "github.com/flashbots/go-boost-utils/ssz" + "github.com/flashbots/go-boost-utils/types" + "github.com/flashbots/go-boost-utils/utils" + @@ -32,6 +35,7 @@ + "github.com/gorilla/mux" + "github.com/sirupsen/logrus" ) +   + +// Standard errors + var ( errNoRelays = errors.New("no + relays") errInvalidSlot = + errors.New("invalid slot") + @@ -41,6 +45,18 @@ + errNoSuccessfulRelayResponse = + errors.New("no successful relay + response") errServerAlreadyRunning = + errors.New("server already running") ) +   + +// Bolt errors + +var ( + + errNilProof = errors.New("nil + proof") + + errMissingConstraint = + errors.New("missing + constraint") + + errMismatchProofSize = + errors.New("proof size + mismatch") + + errInvalidProofs = errors.New("proof + verification failed") + + errInvalidRoot = errors.New("failed + getting tx root from bid") + + errNilConstraint = errors.New("nil + constraint") + + errHashesIndexesMismatch = + errors.New("proof transaction hashes and + indexes length mismatch") + + errHashesConstraintsMismatch = + errors.New("proof transaction hashes and + constraints length mismatch") + +) + + + var ( nilHash = phase0.Hash32{} nilResponse = + struct{}{} + @@ -73,10 +89,11 @@ + GenesisTime uint64 RelayCheck bool RelayMinBid + types.U256Str   + - RequestTimeoutGetHeader time.Duration + - RequestTimeoutGetPayload + time.Duration + - RequestTimeoutRegVal time.Duration + - RequestMaxRetries int + + RequestTimeoutGetHeader time.Duration + + RequestTimeoutGetPayload + time.Duration + + RequestTimeoutRegVal time.Duration + + RequestTimeoutSubmitConstraint + time.Duration + + RequestMaxRetries int + }   // BoostService - the mev-boost + service + @@ -90,17 +107,21 @@ + relayCheck bool relayMinBid types.U256Str + genesisTime uint64   + - builderSigningDomain phase0.Domain + - httpClientGetHeader http.Client + - httpClientGetPayload http.Client + - httpClientRegVal http.Client + - requestMaxRetries int + + builderSigningDomain phase0.Domain + + httpClientGetHeader http.Client + + httpClientGetPayload http.Client + + httpClientRegVal http.Client + + httpClientSubmitConstraint + http.Client + + requestMaxRetries int +   bids map[bidRespKey]bidResp // + keeping track of bids, to log the originating + relay on withholding bidsLock sync.Mutex   + slotUID *slotUID slotUIDLock sync.Mutex + + + + // BOLT: constraint cache + + constraints *ConstraintCache + }   // NewBoostService created a + new BoostService + @@ -138,7 +159,14 @@ + httpClientRegVal: http.Client{ Timeout: + opts.RequestTimeoutRegVal, CheckRedirect: + httpClientDisallowRedirects, }, + + httpClientSubmitConstraint: + http.Client{ + + Timeout: + opts.RequestTimeoutSubmitConstraint, + + CheckRedirect: + httpClientDisallowRedirects, + + }, + requestMaxRetries: opts.RequestMaxRetries, + + + + // BOLT: Initialize the constraint + cache + + constraints: NewConstraintCache(64), + }, nil }   + @@ -167,7 +195,12 @@ + r.HandleFunc("/", m.handleRoot) +   r.HandleFunc(pathStatus, + m.handleStatus).Methods(http.MethodGet) + r.HandleFunc(pathRegisterValidator, + m.handleRegisterValidator).Methods(http.MethodPost) + - r.HandleFunc(pathGetHeader, + m.handleGetHeader).Methods(http.MethodGet) + + r.HandleFunc(pathSubmitConstraint, + m.handleSubmitConstraint).Methods(http.MethodPost) + + // TODO: manage the switch between + the endpoint with and without proofs + + // with the bolt sidecar proxy + instead of using the same response here. + + // TODO: revert this to + m.handleGetHeader + + r.HandleFunc(pathGetHeader, + m.handleGetHeaderWithProofs).Methods(http.MethodGet) + + r.HandleFunc(pathGetHeaderWithProofs, + m.handleGetHeaderWithProofs).Methods(http.MethodGet) + r.HandleFunc(pathGetPayload, + m.handleGetPayload).Methods(http.MethodPost) +   r.Use(mux.CORSMethodMiddleware(r)) + @@ -308,6 +341,175 @@ + m.respondError(w, http.StatusBadGateway, + errNoSuccessfulRelayResponse.Error()) }   + +// verifyInclusionProof verifies the + proofs against the constraints, and returns an + error if the proofs are invalid. + +func (m *BoostService) + verifyInclusionProof(transactionsRoot + phase0.Root, proof *InclusionProof, slot + uint64) error { + + log := + m.log.WithFields(logrus.Fields{}) + + + + // BOLT: get constraints for the + slot + + inclusionConstraints, exists := + m.constraints.Get(slot) + + + + if !exists { + + log.Warnf("[BOLT]: No constraints + found for slot %d", slot) + + return errMissingConstraint + + } + + + + if proof == nil { + + return errNilProof + + } + + + + if len(proof.TransactionHashes) != + len(inclusionConstraints) { + + return errMismatchProofSize + + } + + if len(proof.TransactionHashes) != + len(proof.GeneralizedIndexes) { + + return errHashesIndexesMismatch + + } + + + + log.Infof("[BOLT]: Verifying merkle + multiproofs for %d transactions", + len(proof.TransactionHashes)) + + + + // Decode the constraints, and sort + them according to the utility function + used + + // TODO: this should be done before + verification ideally + + hashToConstraint := + make(HashToConstraintDecoded) + + for hash, constraint := range + inclusionConstraints { + + transaction := + new(gethTypes.Transaction) + + err := + transaction.UnmarshalBinary(constraint.Tx) + + if err != nil { + + log.WithError(err).Error("error + unmarshalling transaction while verifying + proofs") + + return err + + } + + hashToConstraint[hash] = + &ConstraintDecoded{ + + Tx: + transaction.WithoutBlobTxSidecar(), + + Index: constraint.Index, + + } + + } + + leaves := make([][]byte, + len(inclusionConstraints)) + + indexes := make([]int, + len(proof.GeneralizedIndexes)) + + + + for i, hash := range + proof.TransactionHashes { + + constraint, ok := + hashToConstraint[gethCommon.Hash(hash)] + + if constraint == nil || !ok { + + return errNilConstraint + + } + + + + // Compute the hash tree root for + the raw preconfirmed transaction + + // and use it as "Leaf" + in the proof to be verified against + + encoded, err := + constraint.Tx.MarshalBinary() + + if err != nil { + + log.WithError(err).Error("error + marshalling transaction without blob tx + sidecar") + + return err + + } + + + + tx := Transaction(encoded) + + txHashTreeRoot, err := + tx.HashTreeRoot() + + if err != nil { + + return errInvalidRoot + + } + + + + leaves[i] = txHashTreeRoot[:] + + indexes[i] = + int(proof.GeneralizedIndexes[i]) + + i++ + + } + + + + hashes := make([][]byte, + len(proof.MerkleHashes)) + + for i, hash := range proof.MerkleHashes + { + + hashes[i] = []byte(*hash) + + } + + + + currentTime := time.Now() + + ok, err := + fastSsz.VerifyMultiproof(transactionsRoot[:], + hashes, leaves, indexes) + + elapsed := time.Since(currentTime) + + if err != nil { + + log.WithError(err).Error("error + verifying merkle proof") + + return err + + } + + + + if !ok { + + log.Error("[BOLT]: proof verification + failed") + + + + // BOLT: send event to web + demo + + message := fmt.Sprintf("failed to + verify merkle proof for slot %d", + slot) + + EmitBoltDemoEvent(message) + + + + return errInvalidProofs + + } else { + + log.Info(fmt.Sprintf("[BOLT]: merkle + proof verified in %s", elapsed)) + + + + // BOLT: send event to web + demo + + // verified merkle proof for tx: %s + in %v", proof.TxHash.String(), + elapsed) + + message := fmt.Sprintf("verified + merkle proof for slot %d in %v", slot, + elapsed) + + EmitBoltDemoEvent(message) + + } + + + + return nil + +} + + + +// handleSubmitConstraint forwards a + constraint to the relays, and registers them + in the local cache. + +// They will later be used to verify + the proofs sent by the relays. + +func (m *BoostService) + handleSubmitConstraint(w http.ResponseWriter, + req *http.Request) { + + ua := + UserAgent(req.Header.Get("User-Agent")) + + log := + m.log.WithFields(logrus.Fields{ + + "method": + "submitConstraint", + + "ua": ua, + + }) + + + + path := req.URL.Path + + + + + log.Info("submitConstraint") + + + + payload := BatchedSignedConstraints{} + + if err := DecodeJSON(req.Body, + &payload); err != nil { + + log.Error("error decoding payload: + ", err) + + m.respondError(w, http.StatusBadRequest, + err.Error()) + + return + + } + + + + // Add all constraints to the + cache + + for _, signedConstraints := range payload + { + + constraintMessage := + signedConstraints.Message + + + + log.Infof("[BOLT]: adding inclusion + constraints to cache. slot = %d, + validatorIndex = %d, number of relays = + %d", constraintMessage.Slot, + constraintMessage.ValidatorIndex, + len(m.relays)) + + + + // Add the constraints to the + cache. + + // They will be cleared when we + receive a payload for the slot in + `handleGetPayload` + + err := + m.constraints.AddInclusionConstraints(constraintMessage.Slot, + constraintMessage.Constraints) + + if err != nil { + + log.WithError(err).Errorf("error + adding inclusion constraints to + cache") + + continue + + } + + + + log.Infof("[BOLT]: added inclusion + constraints to cache. slot = %d, + validatorIndex = %d, number of relays = + %d", constraintMessage.Slot, + constraintMessage.ValidatorIndex, + len(m.relays)) + + } + + + + relayRespCh := make(chan error, + len(m.relays)) + + + + + EmitBoltDemoEvent(fmt.Sprintf("received + %d constraints, forwarding to Bolt relays... + (path: %s)", len(payload), path)) + + + + for _, relay := range m.relays { + + go func(relay RelayEntry) { + + url := + relay.GetURI(pathSubmitConstraint) + + log := log.WithField("url", + url) + + + + log.Infof("sending request for %d + constraint to relay", len(payload)) + + _, err := + SendHTTPRequest(context.Background(), + m.httpClientSubmitConstraint, http.MethodPost, + url, ua, nil, payload, nil) + + log.Infof("sent request for %d + constraint to relay. err = %v", + len(payload), err) + + relayRespCh <- err + + if err != nil { + + log.WithError(err).Warn("error calling + submitConstraint on relay") + + return + + } + + }(relay) + + } + + + + for i := 0; i < len(m.relays); i++ + { + + respErr := <-relayRespCh + + if respErr == nil { + + m.respondOK(w, nilResponse) + + return + + } + + } + + + + m.respondError(w, http.StatusBadGateway, + errNoSuccessfulRelayResponse.Error()) + +} + + + // handleGetHeader requests bids from + the relays func (m *BoostService) + handleGetHeader(w http.ResponseWriter, req + *http.Request) { vars := mux.Vars(req) + @@ -514,6 +716,239 @@ + // Return the bid m.respondOK(w, + &result.response) }   + +// handleGetHeader requests bids + from the relays + +// BOLT: receiving preconfirmation + proofs from relays along with bids, and + +// verify them. If not valid, the + bid is discarded + +func (m *BoostService) + handleGetHeaderWithProofs(w + http.ResponseWriter, req *http.Request) + { + + vars := mux.Vars(req) + + slot := vars["slot"] + + parentHashHex := + vars["parent_hash"] + + pubkey := vars["pubkey"] + + + + ua := + UserAgent(req.Header.Get("User-Agent")) + + log := + m.log.WithFields(logrus.Fields{ + + "method": + "getHeaderWithProofs", + + "slot": slot, + + "parentHash": + parentHashHex, + + "pubkey": pubkey, + + "ua": ua, + + }) + + log.Debug("getHeader") + + + + slotUint, err := strconv.ParseUint(slot, + 10, 64) + + if err != nil { + + m.respondError(w, http.StatusBadRequest, + errInvalidSlot.Error()) + + return + + } + + + + if len(pubkey) != 98 { + + m.respondError(w, http.StatusBadRequest, + errInvalidPubkey.Error()) + + return + + } + + + + if len(parentHashHex) != 66 { + + m.respondError(w, http.StatusBadRequest, + errInvalidHash.Error()) + + return + + } + + + + // Make sure we have a uid for this + slot + + m.slotUIDLock.Lock() + + if m.slotUID.slot < slotUint { + + m.slotUID.slot = slotUint + + m.slotUID.uid = uuid.New() + + } + + slotUID := m.slotUID.uid + + m.slotUIDLock.Unlock() + + log = log.WithField("slotUID", + slotUID) + + + + // Log how late into the slot the + request starts + + slotStartTimestamp := m.genesisTime + + slotUint*config.SlotTimeSec + + msIntoSlot := + uint64(time.Now().UTC().UnixMilli()) - + slotStartTimestamp*1000 + + log.WithFields(logrus.Fields{ + + "genesisTime": + m.genesisTime, + + "slotTimeSec": + config.SlotTimeSec, + + "msIntoSlot": msIntoSlot, + + }).Infof("getHeader request start - %d + milliseconds into slot %d", msIntoSlot, + slotUint) + + + + // Add request headers + + headers := map[string]string{ + + HeaderKeySlotUID: slotUID.String(), + + } + + + + // Prepare relay responses + + result := bidResp{} // the final + response, containing the highest bid (if + any) + + relays := + make(map[BlockHashHex][]RelayEntry) // + relays that sent the bid for a specific + blockHash + + + + // Call the relays + + var mu sync.Mutex + + var wg sync.WaitGroup + + for _, relay := range m.relays { + + wg.Add(1) + + go func(relay RelayEntry) { + + defer wg.Done() + + path := + fmt.Sprintf("/eth/v1/builder/header_with_proofs/%s/%s/%s", + slot, parentHashHex, pubkey) + + url := relay.GetURI(path) + + log := log.WithField("url", + url) + + responsePayload := + new(BidWithInclusionProofs) + + code, err := + SendHTTPRequest(context.Background(), + m.httpClientGetHeader, http.MethodGet, url, + ua, headers, nil, responsePayload) + + if err != nil { + + log.WithError(err).Warn("error making + request to relay") + + return + + } + + + + if responsePayload.Proofs != nil { + + log.Infof("[BOLT]: get header with + proofs at slot %s, received payload with + proofs: %s", slot, responsePayload) + + } + + + + if code == http.StatusNoContent { + + log.Warn("no-content + response") + + return + + } + + + + if responsePayload.Bid == nil { + + log.Warn("Bid in response is + nil") + + return + + } + + + + // Skip if payload is empty + + if responsePayload.Bid.IsEmpty() { + + log.Warn("Bid is empty") + + return + + } + + + + // Getting the bid info will check + if there are missing fields in the + response + + bidInfo, err := + parseBidInfo(responsePayload.Bid) + + if err != nil { + + log.WithError(err).Warn("error parsing + bid info") + + return + + } + + + + if bidInfo.blockHash == nilHash { + + log.Warn("relay responded with empty + block hash") + + return + + } + + + + valueEth := + weiBigIntToEthBigFloat(bidInfo.value.ToBig()) + + log = log.WithFields(logrus.Fields{ + + "blockNumber": + bidInfo.blockNumber, + + "blockHash": + bidInfo.blockHash.String(), + + "txRoot": + bidInfo.txRoot.String(), + + "value": + valueEth.Text('f', 18), + + }) + + + + if relay.PublicKey.String() != + bidInfo.pubkey.String() { + + log.Errorf("bid pubkey mismatch. + expected: %s - got: %s", + relay.PublicKey.String(), + bidInfo.pubkey.String()) + + return + + } + + + + // Verify the relay signature in + the relay response + + if !config.SkipRelaySignatureCheck { + + ok, err := + checkRelaySignature(responsePayload.Bid, + m.builderSigningDomain, relay.PublicKey) + + if err != nil { + + log.WithError(err).Error("error + verifying relay signature") + + return + + } + + if !ok { + + log.Error("failed to verify relay + signature") + + return + + } + + } + + + + // Verify response coherence with + proposer's input data + + if bidInfo.parentHash.String() != + parentHashHex { + + log.WithFields(logrus.Fields{ + + "originalParentHash": + parentHashHex, + + "responseParentHash": + bidInfo.parentHash.String(), + + }).Error("proposer and relay parent + hashes are not the same") + + return + + } + + + + isZeroValue := bidInfo.value.IsZero() + + isEmptyListTxRoot := + bidInfo.txRoot.String() == + "0x7ffe241ea60187fdb0187bfa22de35d1f9bed7ab061d9401fd47e34a54fbede1" + + if isZeroValue || isEmptyListTxRoot { + + log.Warn("ignoring bid with 0 + value") + + return + + } + + log.Debug("bid received") + + + + // Skip if value (fee) is lower + than the minimum bid + + if + bidInfo.value.CmpBig(m.relayMinBid.BigInt()) + == -1 { + + log.Warn("ignoring bid below min-bid + value") + + return + + } + + + + // BOLT: verify preconfirmation + inclusion proofs. If they don't match, we + don't consider the bid to be valid. + + if responsePayload.Proofs != nil { + + // BOLT: verify the proofs against + the constraints. If they don't match, we + don't consider the bid to be valid. + + transactionsRoot, err := + responsePayload.Bid.TransactionsRoot() + + if err != nil { + + log.WithError(err).Error("[BOLT]: + error getting transaction root") + + return + + } + + if err := + m.verifyInclusionProof(transactionsRoot, + responsePayload.Proofs, slotUint); err != nil + { + + log.Warnf("[BOLT]: Proof verification + failed for relay %s: %s", relay.URL, + err) + + return + + } + + } + + + + mu.Lock() + + defer mu.Unlock() + + + + // Remember which relays delivered + which bids (multiple relays might deliver the + top bid) + + + relays[BlockHashHex(bidInfo.blockHash.String())] + = + append(relays[BlockHashHex(bidInfo.blockHash.String())], + relay) + + + + // Compare the bid with already + known top bid (if any) + + if !result.response.IsEmpty() { + + valueDiff := + bidInfo.value.Cmp(result.bidInfo.value) + + if valueDiff == -1 { // current bid + is less profitable than already known + one + + return + + } else if valueDiff == 0 { // + current bid is equally profitable as already + known one. Use hash as tiebreaker + + previousBidBlockHash := + result.bidInfo.blockHash + + if bidInfo.blockHash.String() >= + previousBidBlockHash.String() { + + return + + } + + } + + } + + + + // Use this relay's response as + mev-boost response because it's most + profitable + + log.Infof("new best bid. Has proofs: + %v", responsePayload.Proofs != nil) + + result.response = + *responsePayload.Bid + + result.bidInfo = bidInfo + + result.t = time.Now() + + }(relay) + + } + + + + // Wait for all requests to + complete... + + wg.Wait() + + + + if result.response.IsEmpty() { + + log.Info("no bid received") + + w.WriteHeader(http.StatusNoContent) + + return + + } + + + + // Log result + + valueEth := + weiBigIntToEthBigFloat(result.bidInfo.value.ToBig()) + + result.relays = + relays[BlockHashHex(result.bidInfo.blockHash.String())] + + log.WithFields(logrus.Fields{ + + "blockHash": + result.bidInfo.blockHash.String(), + + "blockNumber": + result.bidInfo.blockNumber, + + "txRoot": + result.bidInfo.txRoot.String(), + + "value": + valueEth.Text('f', 18), + + "relays": + strings.Join(RelayEntriesToStrings(result.relays), + ", "), + + }).Infof("best bid") + + + + // Remember the bid, for future + logging in case of withholding + + bidKey := bidRespKey{slot: slotUint, + blockHash: + result.bidInfo.blockHash.String()} + + m.bidsLock.Lock() + + m.bids[bidKey] = result + + m.bidsLock.Unlock() + + + + // Return the bid + + m.respondOK(w, &result.response) + + log.Infof("responded with best bid to + beacon client") + +} + + + func (m *BoostService) processCapellaPayload(w + http.ResponseWriter, req *http.Request, log + *logrus.Entry, payload + *eth2ApiV1Capella.SignedBlindedBeaconBlock, body + []byte) { if payload.Message == nil || + payload.Message.Body == nil || + payload.Message.Body.ExecutionPayloadHeader == + nil { log.WithField("body", + string(body)).Error("missing parts of the + request payload from the beacon-node") + @@ -791,6 +1226,8 @@ + m.respondOK(w, result) }   + +// handleGetPayload submits a signed + blinded header to receive the payload body + from the relays. + +// BOLT: when receiving the payload, + we also remove the associated constraints for + this slot. + func (m *BoostService) handleGetPayload(w + http.ResponseWriter, req *http.Request) { log := + m.log.WithField("method", + "getPayload") + log.Debug("getPayload request starts") + @@ -813,9 +1250,11 @@ + log.WithError(err).WithField("body", + string(body)).Error("could not decode + request payload from the beacon-node (signed + blinded beacon block)") m.respondError(w, + http.StatusBadRequest, err.Error()) return } + + + m.processCapellaPayload(w, req, log, payload, + body) return } + + + m.processDenebPayload(w, req, log, payload) } +
+
+ +
+
+ + + + +
+
+
+ +128 +
+
+ -15 +
+
+
+
+
+ diff --git + flashbots/mev-boost/server/service_test.go + chainbound/bolt/server/service_test.go + index + 33b2438ec3cc6ac405e283f6555e200edecff487..4446f23e8214c7c93ca1d9f484fc4ef5303d7d56 + 100644 + --- + flashbots/mev-boost/server/service_test.go + +++ + chainbound/bolt/server/service_test.go + @@ -27,6 +27,7 @@ + "github.com/attestantio/go-eth2-client/spec/capella" + "github.com/attestantio/go-eth2-client/spec/deneb" + "github.com/attestantio/go-eth2-client/spec/phase0" + eth2UtilBellatrix + "github.com/attestantio/go-eth2-client/util/bellatrix" + + + "github.com/ethereum/go-ethereum/common" + "github.com/flashbots/go-boost-utils/types" + "github.com/holiman/uint256" + "github.com/prysmaticlabs/go-bitfield" + @@ -53,16 +54,17 @@ + relayEntries[i] = backend.relays[i].RelayEntry } +   opts := BoostServiceOpts{ + - Log: testLog, + - ListenAddr: + "localhost:12345", + - Relays: relayEntries, + - GenesisForkVersionHex: + "0x00000000", + - RelayCheck: true, + - RelayMinBid: types.IntToU256(12345), + - RequestTimeoutGetHeader: + relayTimeout, + - RequestTimeoutGetPayload: + relayTimeout, + - RequestTimeoutRegVal: relayTimeout, + - RequestMaxRetries: 5, + + Log: testLog, + + ListenAddr: + "localhost:12345", + + Relays: relayEntries, + + GenesisForkVersionHex: + "0x00000000", + + RelayCheck: true, + + RelayMinBid: types.IntToU256(12345), + + RequestTimeoutGetHeader: + relayTimeout, + + RequestTimeoutGetPayload: + relayTimeout, + + RequestTimeoutRegVal: relayTimeout, + + RequestTimeoutSubmitConstraint: + relayTimeout, + + RequestMaxRetries: 5, + } service, err := NewBoostService(opts) + require.NoError(t, err) + @@ -81,6 +83,7 @@ + req, err = http.NewRequest(method, path, + bytes.NewReader(nil)) } else { payloadBytes, + err2 := json.Marshal(payload) require.NoError(t, + err2) + + fmt.Println("payload:", + string(payloadBytes)) + req, err = http.NewRequest(method, path, + bytes.NewReader(payloadBytes)) }   + @@ -207,7 +210,7 @@ + addr := "localhost:1234" + backend.boost.listenAddr = addr go func() { err + := backend.boost.StartHTTPServer() + - require.NoError(t, err) + //nolint:testifylint + + require.NoError(t, err) + }() time.Sleep(time.Millisecond * 100) path := + "http://" + addr + + "?" + strings.Repeat("abc", + 4000) // path with characters of size + over 4kb + @@ -275,7 +278,7 @@ + require.Equal(t, 1, + backend.relays[0].GetRequestCount(path)) + require.Equal(t, 1, + backend.relays[1].GetRequestCount(path))   + // Now make one relay return an error + - + backend.relays[0].overrideHandleRegisterValidator(func(w + http.ResponseWriter, _ *http.Request) { + + + backend.relays[0].overrideHandleRegisterValidator(func(w + http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusBadRequest) }) rr = + backend.request(t, http.MethodPost, path, + payload) + @@ -284,7 +287,7 @@ + require.Equal(t, 2, + backend.relays[0].GetRequestCount(path)) + require.Equal(t, 2, + backend.relays[1].GetRequestCount(path))   + // Now make both relays return an error + - which should cause the request to fail + - + backend.relays[1].overrideHandleRegisterValidator(func(w + http.ResponseWriter, _ *http.Request) { + + + backend.relays[1].overrideHandleRegisterValidator(func(w + http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusBadRequest) }) rr = + backend.request(t, http.MethodPost, path, + payload) + @@ -308,8 +311,118 @@ + require.Equal(t, 2, + backend.relays[0].GetRequestCount(path)) }) } +   + +func TestParseConstraints(t *testing.T) + { + + jsonStr := `[{ + + "message": { + + "validator_index": 12345, + + "slot": 8978583, + + "constraints": [{ + + "tx": + "0x02f871018304a5758085025ff11caf82565f94388c818ca8b9251b393131c08a736a67ccb1929787a41bb7ee22b41380c001a0c8630f734aba7acb4275a8f3b0ce831cf0c7c487fd49ee7bcca26ac622a28939a04c3745096fa0130a188fa249289fd9e60f9d6360854820dba22ae779ea6f573f", + + "index": null + + }] + + }, + + "signature": + "0x81510b571e22f89d1697545aac01c9ad0c1e7a3e778b3078bef524efae14990e58a6e960a152abd49de2e18d7fd3081c15d5c25867ccfad3d47beef6b39ac24b6b9fbf2cfa91c88f67aff750438a6841ec9e4a06a94ae41410c4f97b75ab284c" + + }]` + + + + constraints := + BatchedSignedConstraints{} + + err := json.Unmarshal([]byte(jsonStr), + &constraints) + + require.NoError(t, err) + + require.Len(t, constraints, 1) + + require.Equal(t, uint64(12345), + constraints[0].Message.ValidatorIndex) + + require.Equal(t, uint64(8978583), + constraints[0].Message.Slot) + + require.Len(t, + constraints[0].Message.Constraints, 1) + + require.Equal(t, + constraints[0].Message.Constraints[0].Tx, + Transaction(_HexToBytes("0x02f871018304a5758085025ff11caf82565f94388c818ca8b9251b393131c08a736a67ccb1929787a41bb7ee22b41380c001a0c8630f734aba7acb4275a8f3b0ce831cf0c7c487fd49ee7bcca26ac622a28939a04c3745096fa0130a188fa249289fd9e60f9d6360854820dba22ae779ea6f573f"))) + + require.Nil(t, + constraints[0].Message.Constraints[0].Index) + +} + + + +func TestConstraintsAndProofs(t *testing.T) + { + + path := pathSubmitConstraint + + slot := uint64(8978583) + + + + txHash := + _HexToHash("0xba40436abdc8adc037e2c92ea1099a5849053510c3911037ff663085ce44bc49") + + rawTx := + _HexToBytes("0x02f871018304a5758085025ff11caf82565f94388c818ca8b9251b393131c08a736a67ccb1929787a41bb7ee22b41380c001a0c8630f734aba7acb4275a8f3b0ce831cf0c7c487fd49ee7bcca26ac622a28939a04c3745096fa0130a188fa249289fd9e60f9d6360854820dba22ae779ea6f573f") + + + + payload := + BatchedSignedConstraints{&SignedConstraints{ + + Message: ConstraintsMessage{ + + ValidatorIndex: 12345, + + Slot: slot, + + Constraints: + []*Constraint{{Transaction(rawTx), + nil}}, + + }, + + Signature: + phase0.BLSSignature(_HexToBytes( + + + "0x81510b571e22f89d1697545aac01c9ad0c1e7a3e778b3078bef524efae14990e58a6e960a152abd49de2e18d7fd3081c15d5c25867ccfad3d47beef6b39ac24b6b9fbf2cfa91c88f67aff750438a6841ec9e4a06a94ae41410c4f97b75ab284c")), + + }} + + + + // Build getHeader request + + hash := + _HexToHash("0xe28385e7bd68df656cd0042b74b69c3104b5356ed1f20eb69f1f925df47a3ab7") + + pubkey := _HexToPubkey( + + + "0x8a1d7b8dd64e0aafe7ea7b6c95065c9364cf99d38470c12ee807d55f7de1529ad29ce2c422e0b65e3d5a05c02caca249") + + getHeaderPath := + getHeaderWithProofsPath(slot, hash, + pubkey) + + + + t.Run("Normal function", func(t + *testing.T) { + + backend := newTestBackend(t, 1, + time.Second) + + rr := backend.request(t, http.MethodPost, + path, payload) + + require.Equal(t, http.StatusOK, + rr.Code) + + require.Equal(t, 1, + backend.relays[0].GetRequestCount(path)) + + + + got, ok := + backend.boost.constraints.FindTransactionByHash(common.HexToHash(txHash.String())) + + require.True(t, ok) + + require.Equal(t, Transaction(rawTx), + got.Tx) + + require.Nil(t, got.Index) + + }) + + + + t.Run("Normal function with + constraints", func(t *testing.T) { + + backend := newTestBackend(t, 1, + time.Second) + + + + // Submit constraint + + backend.request(t, http.MethodPost, path, + payload) + + + + resp := + backend.relays[0].MakeGetHeaderWithConstraintsResponse( + + slot, + + + "0xe28385e7bd68df656cd0042b74b69c3104b5356ed1f20eb69f1f925df47a3ab7", + + + "0xe28385e7bd68df656cd0042b74b69c3104b5356ed1f20eb69f1f925df47a3ab7", + + + "0x8a1d7b8dd64e0aafe7ea7b6c95065c9364cf99d38470c12ee807d55f7de1529ad29ce2c422e0b65e3d5a05c02caca249", + + spec.DataVersionDeneb, + + []struct { + + tx Transaction + + hash phase0.Hash32 + + }{{rawTx, txHash}}, + + ) + + + backend.relays[0].GetHeaderWithProofsResponse + = resp + + + + rr := backend.request(t, http.MethodGet, + getHeaderPath, nil) + + require.Equal(t, http.StatusOK, rr.Code, + rr.Body.String()) + + require.Equal(t, 1, + backend.relays[0].GetRequestCount(getHeaderPath)) + + }) + + + + t.Run("No proofs given", func(t + *testing.T) { + + backend := newTestBackend(t, 1, + time.Second) + + + + // Submit constraint + + backend.request(t, http.MethodPost, path, + payload) + + + + resp := + backend.relays[0].MakeGetHeaderResponse( + + slot, + + + "0xe28385e7bd68df656cd0042b74b69c3104b5356ed1f20eb69f1f925df47a3ab7", + + + "0xe28385e7bd68df656cd0042b74b69c3104b5356ed1f20eb69f1f925df47a3ab7", + + + "0x8a1d7b8dd64e0aafe7ea7b6c95065c9364cf99d38470c12ee807d55f7de1529ad29ce2c422e0b65e3d5a05c02caca249", + + spec.DataVersionDeneb, + + ) + + backend.relays[0].GetHeaderResponse = + resp + + + + rr := backend.request(t, http.MethodGet, + getHeaderPath, nil) + + // When we have constraints + registered, but the relay does not return any + proofs, we should return no content. + + // This will force a locally built + block. + + require.Equal(t, http.StatusNoContent, + rr.Code, rr.Body.String()) + + require.Equal(t, 1, + backend.relays[0].GetRequestCount(getHeaderPath)) + + }) + +} + + + func getHeaderPath(slot uint64, parentHash + phase0.Hash32, pubkey phase0.BLSPubKey) string { + return + fmt.Sprintf("/eth/v1/builder/header/%d/%s/%s", + slot, parentHash.String(), pubkey.String()) + +} + + + +func getHeaderWithProofsPath(slot uint64, + parentHash phase0.Hash32, pubkey + phase0.BLSPubKey) string { + + return + fmt.Sprintf("/eth/v1/builder/header_with_proofs/%d/%s/%s", + slot, parentHash.String(), + pubkey.String()) + }   func TestGetHeader(t *testing.T) { + @@ -688,7 +801,7 @@ + t.Run("Retries on error from relay", + func(t *testing.T) { backend := + newTestBackend(t, 1, 2*time.Second)   count + := 0 + - backend.relays[0].handlerOverrideGetPayload + = func(w http.ResponseWriter, _ *http.Request) + { + + backend.relays[0].handlerOverrideGetPayload + = func(w http.ResponseWriter, r *http.Request) + { + if count > 0 { // success response on + the second attempt + backend.relays[0].defaultHandleGetPayload(w) + @@ -709,7 +822,7 @@ + count := 0 maxRetries := 5   + - backend.relays[0].handlerOverrideGetPayload + = func(w http.ResponseWriter, _ *http.Request) + { + + backend.relays[0].handlerOverrideGetPayload + = func(w http.ResponseWriter, r *http.Request) + { + count++ if count > maxRetries { // + success response after max retry attempts +
+
+
+
+
+
+ +
+ + +
+
+

+ These are implementations of the types defined in + the Bolt specs + here. +

+
+
+
+
+ + +
+
+
+ (new) +
+ + +
+
+ +
+
+
+ +118 +
+
+ -0 +
+
+
+
+
+ diff --git + flashbots/mev-boost/server/constraints.go + chainbound/bolt/server/constraints.go + new file mode 100644 + index + 0000000000000000000000000000000000000000..4f5914aa1bbcb022d0f59285be1f7c920a964719 + --- /dev/null + +++ + chainbound/bolt/server/constraints.go + @@ -0,0 +1,118 @@ + +package server + + + +import ( + + + "github.com/attestantio/go-eth2-client/spec/phase0" + + gethCommon + "github.com/ethereum/go-ethereum/common" + + + "github.com/ethereum/go-ethereum/core/types" + + lru + "github.com/hashicorp/golang-lru/v2" + +) + + + +type BatchedSignedConstraints = + []*SignedConstraints + + + +type SignedConstraints struct { + + Message ConstraintsMessage + `json:"message"` + + Signature phase0.BLSSignature + `json:"signature"` + +} + + + +type ConstraintsMessage struct { + + ValidatorIndex uint64 + `json:"validator_index"` + + Slot uint64 `json:"slot"` + + Constraints []*Constraint + `json:"constraints"` + +} + + + +type Constraint struct { + + Tx Transaction `json:"tx"` + + Index *uint64 + `json:"index"` + +} + + + +func (s *SignedConstraints) String() string + { + + return JSONStringify(s) + +} + + + +func (m *ConstraintsMessage) String() string + { + + return JSONStringify(m) + +} + + + +func (c *Constraint) String() string { + + return JSONStringify(c) + +} + + + +// ConstraintCache is a cache for + constraints. + +type ConstraintCache struct { + + // map of slots to all constraints + for that slot + + constraints *lru.Cache[uint64, + map[gethCommon.Hash]*Constraint] + +} + + + +// NewConstraintCache creates a new + constraint cache. + +// cap is the maximum number of + slots to store constraints for. + +func NewConstraintCache(cap int) + *ConstraintCache { + + constraints, _ := lru.New[uint64, + map[gethCommon.Hash]*Constraint](cap) + + return &ConstraintCache{ + + constraints: constraints, + + } + +} + + + +// AddInclusionConstraint adds an + inclusion constraint to the cache at the given + slot for the given transaction. + +func (c *ConstraintCache) + AddInclusionConstraint(slot uint64, tx + Transaction, index *uint64) error { + + if _, exists := c.constraints.Get(slot); + !exists { + + c.constraints.Add(slot, + make(map[gethCommon.Hash]*Constraint)) + + } + + + + // parse transaction to get its + hash and store it in the cache + + // for constant time lookup + later + + parsedTx := new(types.Transaction) + + err := parsedTx.UnmarshalBinary(tx) + + if err != nil { + + return err + + } + + + + m, _ := c.constraints.Get(slot) + + m[parsedTx.Hash()] = &Constraint{ + + Tx: tx, + + Index: index, + + } + + + + return nil + +} + + + +// AddInclusionConstraints adds + multiple inclusion constraints to the cache at + the given slot + +func (c *ConstraintCache) + AddInclusionConstraints(slot uint64, + constraints []*Constraint) error { + + if _, exists := c.constraints.Get(slot); + !exists { + + c.constraints.Add(slot, + make(map[gethCommon.Hash]*Constraint)) + + } + + + + m, _ := c.constraints.Get(slot) + + for _, constraint := range constraints + { + + parsedTx := new(types.Transaction) + + err := + parsedTx.UnmarshalBinary(constraint.Tx) + + if err != nil { + + return err + + } + + m[parsedTx.Hash()] = constraint + + } + + + + return nil + +} + + + +// Get gets the constraints at the + given slot. + +func (c *ConstraintCache) Get(slot uint64) + (map[gethCommon.Hash]*Constraint, bool) + { + + return c.constraints.Get(slot) + +} + + + +// FindTransactionByHash finds the + constraint for the given transaction hash and + returns it. + +func (c *ConstraintCache) + FindTransactionByHash(txHash gethCommon.Hash) + (*Constraint, bool) { + + for _, hashToConstraint := range + c.constraints.Values() { + + if constraint, exists := + hashToConstraint[txHash]; exists { + + return constraint, true + + } + + } + + return nil, false + +} + + + +type ( + + HashToConstraintDecoded = + map[gethCommon.Hash]*ConstraintDecoded + + ConstraintDecoded struct { + + Index *uint64 + + Tx *types.Transaction + + } + +) +
+
+ +
+
+ + +
+
+
+ (new) +
+ + +
+
+ +
+
+
+ +104 +
+
+ -0 +
+
+
+
+
+ diff --git + flashbots/mev-boost/server/proofs.go + chainbound/bolt/server/proofs.go + new file mode 100644 + index + 0000000000000000000000000000000000000000..72ccd626ee8b6a15af372ef22161fd25a6ebcef0 + --- /dev/null + +++ + chainbound/bolt/server/proofs.go + @@ -0,0 +1,104 @@ + +package server + + + +import ( + + "bytes" + + "encoding/hex" + + "encoding/json" + + "errors" + + "fmt" + + "strings" + + + + fastSsz + "github.com/ferranbt/fastssz" + + + + builderSpec + "github.com/attestantio/go-builder-client/spec" + + + "github.com/attestantio/go-eth2-client/spec/phase0" + +) + + + +type BidWithInclusionProofs struct { + + // The block bid + + Bid *builderSpec.VersionedSignedBuilderBid + `json:"bid"` + + // The inclusion proofs + + Proofs *InclusionProof + `json:"proofs"` + +} + + + +func (b *BidWithInclusionProofs) String() + string { + + out, err := json.Marshal(b) + + if err != nil { + + return err.Error() + + } + + return string(out) + +} + + + +func (p *InclusionProof) String() string + { + + proofs, err := json.Marshal(p) + + if err != nil { + + return err.Error() + + } + + return string(proofs) + +} + + + +type HexBytes []byte + + + +func (h HexBytes) Equal(other HexBytes) bool + { + + return bytes.Equal(h, other) + +} + + + +// MarshalJSON implements + json.Marshaler. + +func (h HexBytes) MarshalJSON() ([]byte, + error) { + + return + []byte(fmt.Sprintf(`"%#x"`, + []byte(h))), nil + +} + + + +// UnmarshalJSON implements + json.Unmarshaler. + +func (h *HexBytes) UnmarshalJSON(input + []byte) error { + + if len(input) == 0 { + + return errors.New("input + missing") + + } + + + + if !bytes.HasPrefix(input, + []byte{'"', '0', + 'x'}) { + + return errors.New("invalid + prefix") + + } + + + + if !bytes.HasSuffix(input, + []byte{'"'}) { + + return errors.New("invalid + suffix") + + } + + + + var data string + + json.Unmarshal(input, &data) + + + + res, _ := + hex.DecodeString(strings.TrimPrefix(data, + "0x")) + + + + *h = res + + + + return nil + +} + + + +// InclusionProof is a Merkle + Multiproof of inclusion of a set of + TransactionHashes + +type InclusionProof struct { + + TransactionHashes []phase0.Hash32 + `json:"transaction_hashes"` + + GeneralizedIndexes []uint64 + `json:"generalized_indexes"` + + MerkleHashes []*HexBytes + `json:"merkle_hashes"` + +} + + + +// InclusionProofFromMultiProof + converts a fastssz.Multiproof into an + InclusionProof, without + +// filling the + TransactionHashes + +func InclusionProofFromMultiProof(mp + *fastSsz.Multiproof) *InclusionProof { + + merkleHashes := make([]*HexBytes, + len(mp.Hashes)) + + for i, h := range mp.Hashes { + + merkleHashes[i] = new(HexBytes) + + *(merkleHashes[i]) = h + + } + + + + leaves := make([]*HexBytes, + len(mp.Leaves)) + + for i, h := range mp.Leaves { + + leaves[i] = new(HexBytes) + + *(leaves[i]) = h + + } + + generalIndexes := make([]uint64, + len(mp.Indices)) + + for i, idx := range mp.Indices { + + generalIndexes[i] = uint64(idx) + + } + + return &InclusionProof{ + + MerkleHashes: merkleHashes, + + GeneralizedIndexes: generalIndexes, + + } + +} +
+
+
+
+
+
+ +
+ + +
+
+

+ We added logic to create and verify merkle + inclusion proofs based on the SSZ + Transactions beacon container. +

+
+
+
+
+ + +
+
+
+ (new) +
+ + +
+
+ +
+
+
+ +64 +
+
+ -0 +
+
+
+
+
+ diff --git + flashbots/mev-boost/server/transaction_ssz.go + chainbound/bolt/server/transaction_ssz.go + new file mode 100644 + index + 0000000000000000000000000000000000000000..ad6d25cae79ee25f11718bd1bfa376c9c3442871 + --- /dev/null + +++ + chainbound/bolt/server/transaction_ssz.go + @@ -0,0 +1,64 @@ + +package server + + + +import ( + + ssz + "github.com/ferranbt/fastssz" + +) + + + +// MaxBytesPerTransaction is the + maximum length in bytes of a raw RLP-encoded + transaction + +var MaxBytesPerTransaction uint64 = + 1_073_741_824 // 2**30 + + + +// Transaction is a wrapper type of + byte slice to implement the ssz.HashRoot + interface + +type Transaction HexBytes + + + +// HashTreeRoot calculates the hash + tree root of the transaction, which + +// is a list of basic types + (byte). + +// + +// Reference: + https://github.com/ethereum/consensus-specs/blob/dev/ssz/simple-serialize.md#merkleization + +func (tx *Transaction) HashTreeRoot() + ([32]byte, error) { + + hasher := ssz.NewHasher() + + tx.HashTreeRootWith(hasher) + + root, err := hasher.HashRoot() + + + + return root, err + +} + + + +func (tx *Transaction) HashTreeRootWith(hh + ssz.HashWalker) error { + + var err error + + byteLen := uint64(len(*tx)) + + + + if byteLen > MaxBytesPerTransaction + { + + err = ssz.ErrIncorrectListSize + + return err + + } + + + + // Load the bytes of the + transaction into the hasher + + hh.AppendBytes32(*tx) + + // Perform + `mix_in_length(merkleize(pack(value), + limit=chunk_count(type)), len(value))` + + // Reference: + https://github.com/ethereum/consensus-specs/blob/dev/ssz/simple-serialize.md#merkleization + + // + + // The `indx` parameters is set to + `0` as we need to consider the whole hh.buf + buffer for this. + + // In an implementation of more + complex types, this parameter would be used to + indicate the starting + + // index of the buffer to be + merkleized. It is used a single buffer to do + everything for + + // optimization purposes. + + hh.MerkleizeWithMixin(0, byteLen, + (1073741824+31)/32) + + + + return nil + +} + + + +func (tx *Transaction) GetTree() (*ssz.Node, + error) { + + w := &ssz.Wrapper{} + + tx.HashTreeRootWith(w) + + return w.Node(), nil + +} + + + +func (tx *Transaction) Equal(other + *Transaction) bool { + + return + HexBytes(*tx).Equal(HexBytes(*other)) + +} + + + +func (tx *Transaction) MarshalJSON() + ([]byte, error) { + + return (*HexBytes)(tx).MarshalJSON() + +} + + + +func (tx *Transaction) UnmarshalJSON(input + []byte) error { + + return + (*HexBytes)(tx).UnmarshalJSON(input) + +} +
+
+ +
+
+ + + + +
+
+
+ +69 +
+
+ -2 +
+
+
+
+
+ diff --git + flashbots/mev-boost/server/utils.go + chainbound/bolt/server/utils.go + index + fb779d531218e9dba6e97a37f59e7a24dea88284..a7ec11cefba7bbd5f164dbcb43289154be2d1632 + 100644 + --- + flashbots/mev-boost/server/utils.go + +++ + chainbound/bolt/server/utils.go + @@ -7,6 +7,7 @@ + "encoding/json" "errors" + "fmt" "io" + + "math" + "math/big" + "net/http" "net/url" + @@ -19,11 +20,12 @@ + "github.com/attestantio/go-eth2-client/spec" + "github.com/attestantio/go-eth2-client/spec/phase0" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/common/hexutil" + + fastssz + "github.com/ferranbt/fastssz" + "github.com/flashbots/go-boost-utils/bls" + "github.com/flashbots/go-boost-utils/ssz" + "github.com/flashbots/mev-boost/config" + "github.com/holiman/uint256" + - + "github.com/sirupsen/logrus" + + log + "github.com/sirupsen/logrus" + )   const ( + @@ -105,7 +107,7 @@ + return resp.StatusCode, nil }   // + SendHTTPRequestWithRetries - prepare and send + HTTP request, retrying the request if within the + client timeout + -func SendHTTPRequestWithRetries(ctx + context.Context, client http.Client, method, + url string, userAgent UserAgent, headers + map[string]string, payload, dst any, + maxRetries int, log *logrus.Entry) (code int, + err error) { + +func SendHTTPRequestWithRetries(ctx + context.Context, client http.Client, method, + url string, userAgent UserAgent, headers + map[string]string, payload, dst any, + maxRetries int, log *log.Entry) (code int, err + error) { + var requestCtx context.Context var cancel + context.CancelFunc if client.Timeout > 0 { + @@ -270,3 +272,68 @@ + return true } return false } + + + +// EmitBoltDemoEvent sends a message + to the web demo backend to log an event. + +// This is only used for demo + purposes and should be removed in + production. + +func EmitBoltDemoEvent(message string) + { + + event := + strings.NewReader(fmt.Sprintf("{ + \"message\": \"BOLT-MEV-BOOST: + %s\"}", message)) + + eventRes, err := + http.Post("http://host.docker.internal:3001/events", + "application/json", event) + + if err != nil { + + fmt.Printf("Failed to send web demo + event: %v", err) + + } + + if eventRes != nil { + + defer eventRes.Body.Close() + + } + +} + + + +func Map[T any, U any](slice []*T, mapper + func(el *T) *U) []*U { + + result := make([]*U, len(slice)) + + for i, el := range slice { + + result[i] = mapper(el) + + } + + return result + +} + + + +func JSONStringify(obj any) string { + + b, err := json.Marshal(obj) + + if err != nil { + + return "" + + } + + return string(b) + +} + + + +func CalculateMerkleMultiProofs(rootNode + *fastssz.Node, constraints []struct { + + tx Transaction + + hash phase0.Hash32 + +}) (inclusionProof *InclusionProof, err + error) { + + // using our gen index formula: 2 * + 2^21 + preconfIndex + + baseGeneralizedIndex := + int(math.Pow(float64(2), float64(21))) + + generalizedIndexes := make([]int, + len(constraints)) + + transactionHashes := make([]phase0.Hash32, + len(constraints)) + + j := 0 + + + + for i, con := range constraints { + + generalizedIndex := baseGeneralizedIndex + + i + + generalizedIndexes[i] = + generalizedIndex + + transactionHashes[j] = con.hash + + j++ + + } + + + + log.Info(fmt.Sprintf("[BOLT]: + Calculating merkle multiproof for %d + preconfirmed transaction", + + len(constraints))) + + + + timeStart := time.Now() + + multiProof, err := + rootNode.ProveMulti(generalizedIndexes) + + if err != nil { + + log.Error(fmt.Sprintf("[BOLT]: could + not calculate merkle multiproof for %d preconf + %s", len(constraints), err)) + + return + + } + + + + timeForProofs := + time.Since(timeStart) + + log.Info(fmt.Sprintf("[BOLT]: + Calculated merkle multiproof for %d preconf in + %s", len(constraints), + timeForProofs)) + + + + inclusionProof = + InclusionProofFromMultiProof(multiProof) + + inclusionProof.TransactionHashes = + transactionHashes + + + + return inclusionProof, nil + +} +
+
+ +
+
+ + + + +
+
+
+ +76 +
+
+ -2 +
+
+
+
+
+ diff --git + flashbots/mev-boost/server/utils_test.go + chainbound/bolt/server/utils_test.go + index + 3ae76f686eb89ee6c3871cbc55b310d5228b1562..684476ebe5743ff7d14d5371a56c16214f2b8c04 + 100644 + --- + flashbots/mev-boost/server/utils_test.go + +++ + chainbound/bolt/server/utils_test.go + @@ -4,6 +4,7 @@ + import ( "bytes" + "compress/gzip" + "context" + + "encoding/json" + "fmt" "math/big" + "net/http" + @@ -13,9 +14,13 @@ + builderApi + "github.com/attestantio/go-builder-client/api" + builderApiDeneb + "github.com/attestantio/go-builder-client/api/deneb" + "github.com/attestantio/go-eth2-client/spec" + + + "github.com/attestantio/go-eth2-client/spec/bellatrix" + "github.com/attestantio/go-eth2-client/spec/capella" + "github.com/attestantio/go-eth2-client/spec/deneb" + "github.com/attestantio/go-eth2-client/spec/phase0" + + utilbellatrix + "github.com/attestantio/go-eth2-client/util/bellatrix" + + + "github.com/ethereum/go-ethereum/core/types" + + fastssz + "github.com/ferranbt/fastssz" + "github.com/flashbots/mev-boost/config" + "github.com/stretchr/testify/require" + ) + @@ -46,7 +51,7 @@ + // Test with custom UA customUA := + "test-user-agent" expectedUA := + fmt.Sprintf("mev-boost/%s %s", + config.Version, customUA) + - ts := + httptest.NewServer(http.HandlerFunc(func(_ + http.ResponseWriter, r *http.Request) { + + ts := + httptest.NewServer(http.HandlerFunc(func(w + http.ResponseWriter, r *http.Request) { + require.Equal(t, expectedUA, + r.Header.Get("User-Agent")) done <- + true })) + @@ -58,7 +63,7 @@ + <-done   // Test without custom + UA expectedUA = + fmt.Sprintf("mev-boost/%s", + config.Version) + - ts = + httptest.NewServer(http.HandlerFunc(func(_ + http.ResponseWriter, r *http.Request) { + + ts = + httptest.NewServer(http.HandlerFunc(func(w + http.ResponseWriter, r *http.Request) { + require.Equal(t, expectedUA, + r.Header.Get("User-Agent")) done <- + true })) + @@ -209,3 +214,72 @@ + require.Equal(t, tt.expected, + getPayloadResponseIsEmpty(tt.payload)) }) } } + + + +func TestGenerateMerkleMultiProofs(t + *testing.T) { + + // + https://etherscan.io/tx/0x138a5f8ba7950521d9dec66ee760b101e0c875039e695c9fcfb34f5ef02a881b + + // + 0x02f873011a8405f5e10085037fcc60e182520894f7eaaf75cb6ec4d0e2b53964ce6733f54f7d3ffc880b6139a7cbd2000080c080a095a7a3cbb7383fc3e7d217054f861b890a935adc1adf4f05e3a2f23688cf2416a00875cdc45f4395257e44d709d04990349b105c22c11034a60d7af749ffea2765 + + // + https://etherscan.io/tx/0xfb0ee9de8941c8ad50e6a3d2999cd6ef7a541ec9cb1ba5711b76fcfd1662dfa9 + + // + 0xf8708305dc6885029332e35883019a2894500b0107e172e420561565c8177c28ac0f62017f8810ffb80e6cc327008025a0e9c0b380c68f040ae7affefd11979f5ed18ae82c00e46aa3238857c372a358eca06b26e179dd2f7a7f1601755249f4cff56690c4033553658f0d73e26c36fe7815 + + // + https://etherscan.io/tx/0x45e7ee9ba1a1d0145de29a764a33bb7fc5620486b686d68ec8cb3182d137bc90 + + // + 0xf86c0785028fa6ae0082520894098d880c4753d0332ca737aa592332ed2522cd22880d2f09f6558750008026a0963e58027576b3a8930d7d9b4a49253b6e1a2060e259b2102e34a451d375ce87a063f802538d3efed17962c96fcea431388483bbe3860ea9bb3ef01d4781450fbf + + // + https://etherscan.io/tx/0x9d48b4a021898a605b7ae49bf93ad88fa6bd7050e9448f12dde064c10f22fe9c + + // + 0x02f87601836384348477359400850517683ba883019a28943678fce4028b6745eb04fa010d9c8e4b36d6288c872b0f1366ad800080c080a0b6b7aba1954160d081b2c8612e039518b9c46cd7df838b405a03f927ad196158a071d2fb6813e5b5184def6bd90fb5f29e0c52671dea433a7decb289560a58416e + + + + raw := + `["0x02f873011a8405f5e10085037fcc60e182520894f7eaaf75cb6ec4d0e2b53964ce6733f54f7d3ffc880b6139a7cbd2000080c080a095a7a3cbb7383fc3e7d217054f861b890a935adc1adf4f05e3a2f23688cf2416a00875cdc45f4395257e44d709d04990349b105c22c11034a60d7af749ffea2765","0xf8708305dc6885029332e35883019a2894500b0107e172e420561565c8177c28ac0f62017f8810ffb80e6cc327008025a0e9c0b380c68f040ae7affefd11979f5ed18ae82c00e46aa3238857c372a358eca06b26e179dd2f7a7f1601755249f4cff56690c4033553658f0d73e26c36fe7815", + "0xf86c0785028fa6ae0082520894098d880c4753d0332ca737aa592332ed2522cd22880d2f09f6558750008026a0963e58027576b3a8930d7d9b4a49253b6e1a2060e259b2102e34a451d375ce87a063f802538d3efed17962c96fcea431388483bbe3860ea9bb3ef01d4781450fbf", + "0x02f87601836384348477359400850517683ba883019a28943678fce4028b6745eb04fa010d9c8e4b36d6288c872b0f1366ad800080c080a0b6b7aba1954160d081b2c8612e039518b9c46cd7df838b405a03f927ad196158a071d2fb6813e5b5184def6bd90fb5f29e0c52671dea433a7decb289560a58416e"]` + + + + // Unmarshal the raw + transactions + + byteTxs := make([]*HexBytes, 0, 2) + + err := json.Unmarshal([]byte(raw), + &byteTxs) + + require.NoError(t, err) + + + + // Create payload + transactions + + payloadTransactions := Map(byteTxs, + func(rawTx *HexBytes) *types.Transaction + { + + transaction := new(types.Transaction) + + err = + transaction.UnmarshalBinary([]byte(*rawTx)) + + require.NoError(t, err) + + return transaction + + }) + + + + // Constraints + + constraints := []struct { + + tx Transaction + + hash phase0.Hash32 + + }{ + + {tx: Transaction(*byteTxs[0]), hash: + phase0.Hash32(payloadTransactions[0].Hash())}, + + {tx: Transaction(*byteTxs[1]), hash: + phase0.Hash32(payloadTransactions[1].Hash())}, + + } + + + + // Create root node + + transactions := + new(utilbellatrix.ExecutionPayloadTransactions) + + + + for _, con := range constraints { + + transactions.Transactions = + append(transactions.Transactions, + bellatrix.Transaction(con.tx)) + + } + + + + rootNode, err := + transactions.GetTree() + + require.NoError(t, err) + + + + // Call the function to test + + inclusionProof, err := + CalculateMerkleMultiProofs(rootNode, + constraints) + + require.NoError(t, err) + + + + // Verify the inclusion proof + + rootHash := rootNode.Hash() + + hashesBytes := make([][]byte, + len(inclusionProof.MerkleHashes)) + + for i, hash := range + inclusionProof.MerkleHashes { + + hashesBytes[i] = (*hash)[:] + + } + + leavesBytes := make([][]byte, + len(constraints)) + + for i, con := range constraints { + + root, err := con.tx.HashTreeRoot() + + require.NoError(t, err) + + leavesBytes[i] = root[:] + + } + + indicesInt := make([]int, + len(inclusionProof.GeneralizedIndexes)) + + for i, index := range + inclusionProof.GeneralizedIndexes { + + indicesInt[i] = int(index) + + } + + + + _, err = fastssz.VerifyMultiproof(rootHash, + hashesBytes, leavesBytes, indicesInt) + + require.NoError(t, err) + +} +
+
+
+
+
+
+ +
+ + +
+
+
+
+
+ + + + +
+
+
+ +3 +
+
+ -3 +
+
+
+
+
+ diff --git + flashbots/mev-boost/server/errors.go + chainbound/bolt/server/errors.go + index + a9757b38e7874d35c26d54923613b5d5a2ce2aa7..7a75a9e5141b30a269b9ed4f68c6778108db9ccf + 100644 + --- + flashbots/mev-boost/server/errors.go + +++ + chainbound/bolt/server/errors.go + @@ -1,9 +1,9 @@ + package server   + -import "errors" + +import "fmt" +   // ErrMissingRelayPubkey is + returned if a new RelayEntry URL has no public + key. + -var ErrMissingRelayPubkey = + errors.New("missing relay public + key") + +var ErrMissingRelayPubkey = + fmt.Errorf("missing relay public + key") +   // ErrPointAtInfinityPubkey is + returned if a new RelayEntry URL has an all-zero + public key. + -var ErrPointAtInfinityPubkey = + errors.New("relay public key cannot be + the point-at-infinity") + +var ErrPointAtInfinityPubkey = + fmt.Errorf("relay public key cannot be + the point-at-infinity") +
+
+
+
+
+
+
+
+
+ +
+ + +
+
+
+
+
+ + + + +
+
+
+ +9 +
+
+ -0 +
+
+
+
+
+ diff --git + flashbots/mev-boost/cmd/mev-boost/main.go + chainbound/bolt/cmd/mev-boost/main.go + old mode 120000 + new mode 100644 + index + f67563dcc90ea112615a4625f6cea09e69f26c5c..a37283427a5723fe54b6190a37e02356a365b2b0 + --- + flashbots/mev-boost/cmd/mev-boost/main.go + +++ + chainbound/bolt/cmd/mev-boost/main.go + @@ -1 +1,9 @@ + -../../main.go + \ No newline at end of file + +package main + + + +import ( + + + "github.com/flashbots/mev-boost/cli" + +) + + + +func main() { + + cli.Main() + +} +
+
+
+
+
+
+
+
+
+ +
+ + +
+
+
+
+
+ + +
+
+ + +
+ (deleted) +
+
+
+ +
+
+
+ +0 +
+
+ -0 +
+
+
+
+
+ diff --git flashbots/mev-boost/.dockerignore + chainbound/bolt/.dockerignore + deleted file mode 100644 + index + 191381ee74dec49c89f99a62d055cb1058ba0de9..0000000000000000000000000000000000000000 + --- flashbots/mev-boost/.dockerignore + +++ /dev/null + @@ -1 +0,0 @@ + -.git + \ No newline at end of file +
+
+ +
+
+ + +
+
+ + +
+ (deleted) +
+
+
+ +
+
+
+ +0 +
+
+ -10 +
+
+
+
+
+ diff --git + flashbots/mev-boost/.github/dependabot.yaml + chainbound/bolt/.github/dependabot.yaml + deleted file mode 100644 + index + 83f5768c1c14bece1a6231bc9f21adfa8317b9e0..0000000000000000000000000000000000000000 + --- + flashbots/mev-boost/.github/dependabot.yaml + +++ /dev/null + @@ -1,11 +0,0 @@ + -# Please see the documentation for all configuration + options: + -# + https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + - + -version: 2 + -updates: + - - package-ecosystem: gomod + - directory: / + - schedule: + - interval: daily + - reviewers: + - - "metachris" + \ No newline at end of file +
+
+ +
+
+ + +
+
+ + +
+ (deleted) +
+
+
+ +
+
+
+ +0 +
+
+ -19 +
+
+
+
+
+ diff --git + flashbots/mev-boost/.github/pull_request_template.md + chainbound/bolt/.github/pull_request_template.md + deleted file mode 100644 + index + 92765135f92cf5b99ccb0edafe48ccc3a299c124..0000000000000000000000000000000000000000 + --- + flashbots/mev-boost/.github/pull_request_template.md + +++ /dev/null + @@ -1,19 +0,0 @@ + -## 📝 Summary + - + -<!--- A general summary of your changes --> + - + -## ⛱ Motivation and Context + - + -<!--- Why is this change required? What problem does + it solve? --> + - + -## 📚 References + - + -<!-- Any interesting external links to documentation, + articles, tweets which add value to the PR --> + - + ---- + - + -## ✅ I have run these commands + - + -* [ ] `make lint` + -* [ ] `make test-race` + -* [ ] `go mod tidy` +
+
+ +
+
+ + +
+
+ + +
+ (deleted) +
+
+
+ +
+
+
+ +0 +
+
+ -39 +
+
+
+
+
+ diff --git + flashbots/mev-boost/.github/workflows/lint.yml + chainbound/bolt/.github/workflows/lint.yml + deleted file mode 100644 + index + 69e7b301a72d3f6a09e68c6a0cd218229bb02adb..0000000000000000000000000000000000000000 + --- + flashbots/mev-boost/.github/workflows/lint.yml + +++ /dev/null + @@ -1,39 +0,0 @@ + -name: Linting + - + -on: + - push: + - branches: + - - develop + - pull_request: + - + -jobs: + - lint: + - name: Lint + - runs-on: ubuntu-latest + - steps: + - - name: Checkout + - uses: actions/checkout@v2 + - + - - name: Set up Go + - uses: actions/setup-go@v3 + - with: + - go-version: ^1.22.0 + - id: go + - + - - name: Ensure go mod tidy runs without changes + - run: | + - go mod tidy + - git diff-index HEAD + - git diff-index --quiet HEAD + - + - - name: Install gofumpt + - run: go install mvdan.cc/gofumpt@v0.6.0 + - + - - name: Install staticcheck + - run: go install + honnef.co/go/tools/cmd/staticcheck@v0.4.7 + - + - - name: Install golangci-lint + - run: go install + github.com/golangci/golangci-lint/cmd/golangci-lint@v1.56.1 + - + - - name: Lint + - run: make lint +
+
+ +
+
+ + +
+
+ + +
+ (deleted) +
+
+
+ +
+
+
+ +0 +
+
+ -122 +
+
+
+
+
+ diff --git + flashbots/mev-boost/.github/workflows/release.yaml + chainbound/bolt/.github/workflows/release.yaml + deleted file mode 100644 + index + 959e65c105fb037e208ecc1cc7cae73beab97aef..0000000000000000000000000000000000000000 + --- + flashbots/mev-boost/.github/workflows/release.yaml + +++ /dev/null + @@ -1,123 +0,0 @@ + -name: Release + - + -on: + - push: + - tags: + - - 'v*' + - + -permissions: + - contents: write + - + -jobs: + - docker-image: + - name: Publish Docker Image + - runs-on: ubuntu-latest + - + - steps: + - - name: Checkout sources + - uses: actions/checkout@v2 + - + - - name: Get tag version + - run: echo + "RELEASE_VERSION=${GITHUB_REF#refs/*/}" + >> $GITHUB_ENV + - + - - name: Print version + - run: | + - echo $RELEASE_VERSION + - echo ${{ env.RELEASE_VERSION }} + - + - - name: Extract metadata (tags, labels) for Docker + images + - id: meta + - uses: docker/metadata-action@v4 + - with: + - images: flashbots/mev-boost + - tags: | + - type=sha + - type=pep440,pattern={{version}} + - type=pep440,pattern={{major}}.{{minor}} + - type=raw,value=latest,enable=${{ + !contains(env.RELEASE_VERSION, '-') }} + - + - - name: Set up QEMU + - uses: docker/setup-qemu-action@v2 + - + - - name: Set up Docker Buildx + - uses: docker/setup-buildx-action@v2 + - + - - name: Login to DockerHub + - uses: docker/login-action@v2 + - with: + - username: ${{ secrets.DOCKERHUB_USERNAME }} + - password: ${{ secrets.DOCKERHUB_TOKEN }} + - + - - name: Build and push + - uses: docker/build-push-action@v3 + - with: + - context: . + - push: true + - build-args: | + - VERSION=${{ env.RELEASE_VERSION }} + - platforms: linux/amd64,linux/arm64 + - tags: ${{ steps.meta.outputs.tags }} + - labels: ${{ steps.meta.outputs.labels }} + - + - build-all: + - runs-on: ubuntu-latest + - steps: + - - name: Checkout + - uses: actions/checkout@v3 + - with: + - fetch-depth: 0 + - - name: Fetch all tags + - run: git fetch --force --tags + - - name: Set up Go + - uses: actions/setup-go@v3 + - with: + - go-version: ^1.22 + - - name: Run GoReleaser + - uses: goreleaser/goreleaser-action@v3 + - with: + - distribution: goreleaser + - version: latest + - args: release --skip-publish --config + .goreleaser-build.yaml --rm-dist + - env: + - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - - name: Upload + - uses: actions/upload-artifact@v3 + - with: + - name: mev-boost-build + - path: | + - dist/mev-boost*.tar.gz + - dist/mev-boost*.txt + - + - release: + - needs: build-all + - runs-on: ubuntu-latest + - steps: + - - name: Checkout + - uses: actions/checkout@v3 + - with: + - fetch-depth: 0 + - - name: Fetch all tags + - run: git fetch --force --tags + - - name: Set up Go + - uses: actions/setup-go@v3 + - with: + - go-version: ^1.22 + - - name: Make directories + - run: | + - mkdir -p ./build + - - name: Download binaries + - uses: actions/download-artifact@v3 + - with: + - name: mev-boost-build + - path: ./build + - - name: Merge checksum file + - run: | + - cd ./build + - cat ./mev-boost*checksums.txt >> + checksums.txt + - rm ./mev-boost*checksums.txt + - - name: Release + - uses: goreleaser/goreleaser-action@v3 + - with: + - args: release --config .goreleaser-release.yaml + - env: + - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + \ No newline at end of file +
+
+ +
+
+ + +
+
+ + +
+ (deleted) +
+
+
+ +
+
+
+ +0 +
+
+ -31 +
+
+
+
+
+ diff --git + flashbots/mev-boost/.github/workflows/tests.yml + chainbound/bolt/.github/workflows/tests.yml + deleted file mode 100644 + index + 5d69e5db6efb73b03b836120c6d4c08e7d63daee..0000000000000000000000000000000000000000 + --- + flashbots/mev-boost/.github/workflows/tests.yml + +++ /dev/null + @@ -1,31 +0,0 @@ + -name: Tests + - + -on: + - push: + - branches: + - - develop + - pull_request: + - + -jobs: + - test: + - name: Test + - runs-on: ubuntu-latest + - steps: + - - name: Set up Go + - uses: actions/setup-go@v3 + - with: + - go-version: ^1.22 + - id: go + - + - - name: Checkout sources + - uses: actions/checkout@v2 + - + - - name: Run unit tests and generate the coverage + report + - run: make test-coverage + - + - - name: Upload coverage to Codecov + - uses: codecov/codecov-action@v2 + - with: + - files: ./coverage.out + - verbose: false + - flags: unittests +
+
+ +
+
+ + +
+
+ + +
+ (deleted) +
+
+
+ +
+
+
+ +0 +
+
+ -29 +
+
+
+
+
+ diff --git flashbots/mev-boost/.gitignore + chainbound/bolt/.gitignore + deleted file mode 100644 + index + 5a0fe0294584faf293805e5d04863b04cfdfc44b..0000000000000000000000000000000000000000 + --- flashbots/mev-boost/.gitignore + +++ /dev/null + @@ -1,30 +0,0 @@ + -# Binaries for programs and plugins + -*.exe + -*.exe~ + -*.dll + -*.so + -*.dylib + - + -# Test binary, built with `go test -c` + -*.test + - + -# Output of the go coverage tool, specifically when used + with LiteIDE + -*.out + - + -# Dependency directories (remove the comment below to + include it) + -# vendor/ + - + -# Ignore VI/Vim swapfiles + -.*.sw? + - + -# IntelliJ + -.idea + -.ijwb + -/mev-boost + -/test-cli + -/tmp + -/dist + -.vscode/ + -/README.internal.md + -/validator_data.json + -/build/ + \ No newline at end of file +
+
+ +
+
+ + +
+
+ + +
+ (deleted) +
+
+
+ +
+
+
+ +0 +
+
+ -68 +
+
+
+
+
+ diff --git flashbots/mev-boost/.golangci.yml + chainbound/bolt/.golangci.yml + deleted file mode 100644 + index + 074ca1cae869c4503defeac5a1fbd2ebd514a9fc..0000000000000000000000000000000000000000 + --- flashbots/mev-boost/.golangci.yml + +++ /dev/null + @@ -1,68 +0,0 @@ + -linters: + - enable-all: true + - disable: + - - exhaustruct + - - funlen + - - gochecknoglobals + - - gochecknoinits + - - gocritic + - - godot + - - godox + - - gomnd + - - lll + - - nlreturn + - - nonamedreturns + - - nosnakecase + - - paralleltest + - - testpackage + - - varnamelen + - - wrapcheck + - - wsl + - - musttag + - - depguard + - + - # + - # Maybe fix later: + - # + - - cyclop + - - gocognit + - - goconst + - - gosec + - - ireturn + - - noctx + - - tagliatelle + - - perfsprint + - + - # + - # Disabled because of generics: + - # + - - contextcheck + - - rowserrcheck + - - sqlclosecheck + - - structcheck + - - wastedassign + - + - # + - # Disabled because deprecated: + - # + - - deadcode + - - exhaustivestruct + - - golint + - - ifshort + - - interfacer + - - maligned + - - scopelint + - - varcheck + - + -linters-settings: + - gofumpt: + - extra-rules: true + - govet: + - enable-all: true + - disable: + - - fieldalignment + - - shadow + - + -output: + - print-issued-lines: true + - sort-results: true +
+
+ +
+
+ + +
+
+ + +
+ (deleted) +
+
+
+ +
+
+
+ +0 +
+
+ -24 +
+
+
+
+
+ diff --git + flashbots/mev-boost/.goreleaser-build.yaml + chainbound/bolt/.goreleaser-build.yaml + deleted file mode 100644 + index + 73c65429f211353ab8e796651724bcc3acd09a3e..0000000000000000000000000000000000000000 + --- + flashbots/mev-boost/.goreleaser-build.yaml + +++ /dev/null + @@ -1,24 +0,0 @@ + -# + https://goreleaser.com/customization/builds/ + -project_name: mev-boost + -builds: + - - id: mev-boost + - env: + - # Force build to be all Go. + - - CGO_ENABLED=0 + - flags: + - # Remove all file system paths from the + executable. + - - -trimpath + - ldflags: + - # Disables DWARF debugging information. + - - -w + - # Disables symbol table information. + - - -s + - # Sets the value of the symbol. + - - -X + github.com/flashbots/mev-boost/config.Version={{.Version}} + - goos: + - - linux + - - darwin + - - windows + - goarch: + - - amd64 + - - arm64 +
+
+ +
+
+ + +
+
+ + +
+ (deleted) +
+
+
+ +
+
+
+ +0 +
+
+ -12 +
+
+
+
+
+ diff --git + flashbots/mev-boost/.goreleaser-release.yaml + chainbound/bolt/.goreleaser-release.yaml + deleted file mode 100644 + index + 63613c9279617954666e6f1177ad0126f13cbf02..0000000000000000000000000000000000000000 + --- + flashbots/mev-boost/.goreleaser-release.yaml + +++ /dev/null + @@ -1,12 +0,0 @@ + -# + https://goreleaser.com/customization/release/ + -builds: + - - skip: true + -release: + - draft: true + - extra_files: + - - glob: ./build/* + - header: | + - # 🚀 Features + - # 🎄 Enhancements + - # 🐞 Notable bug fixes + - # 🎠 Community +
+
+ +
+
+ + + + +
+
+
+ +8 +
+
+ -1 +
+
+
+
+
+ diff --git flashbots/mev-boost/CONTRIBUTING.md + chainbound/bolt/CONTRIBUTING.md + index + e640fe8ce8dd3e9c147a4810f2138f54349dade6..9ae93632fadd9a01d12df5e61638d53b0edca800 + 100644 + --- flashbots/mev-boost/CONTRIBUTING.md + +++ chainbound/bolt/CONTRIBUTING.md + @@ -6,7 +6,14 @@ Please start + by reading our [code of conduct](CODE_OF_CONDUCT.md).   + ## Set up   + -Install a few dev dependencies for `make lint`: + https://github.com/flashbots/mev-boost/blob/go122/.github/workflows/lint.yml#L29-L37 + +Install a few dev dependencies for `make lint`: + + + +```bash + +go install + github.com/mgechev/revive@v1.1.3 + +go install mvdan.cc/gofumpt@v0.3.1 + +go install + honnef.co/go/tools/cmd/staticcheck@v0.4.2 + +go install + github.com/golangci/golangci-lint/cmd/golangci-lint@v1.51.2 + +``` +   Look at the [README for instructions to install the + dependencies and build `mev-boost`](README.md#installing) +
+
+ +
+
+ + + + +
+
+
+ +5 +
+
+ -5 +
+
+
+
+
+ diff --git flashbots/mev-boost/Dockerfile + chainbound/bolt/Dockerfile + index + 32477fb6a504a517554cbf55667ae3cbe1d80934..495f5045bcc0a415e2afd9f075eaf7aa9e7a4252 + 100644 + --- flashbots/mev-boost/Dockerfile + +++ chainbound/bolt/Dockerfile + @@ -1,5 +1,5 @@ + # syntax=docker/dockerfile:1 + -FROM golang:1.22 as builder + +FROM golang:1.22-alpine AS builder + ARG VERSION WORKDIR /build   + @@ -10,10 +10,10 @@ RUN go + mod download   ADD . . RUN + --mount=type=cache,target=/root/.cache/go-build + CGO_ENABLED=0 GOOS=linux go build \ + - -trimpath \ + - -v \ + - -ldflags "-w -s -X + 'github.com/flashbots/mev-boost/config.Version=$VERSION'" + \ + - -o mev-boost . + + -trimpath \ + + -v \ + + -ldflags "-w -s -X + 'github.com/flashbots/mev-boost/config.Version=$VERSION'" + \ + + -o mev-boost . +   FROM alpine WORKDIR /app +
+
+ +
+
+ + + + +
+
+
+ +0 +
+
+ -1 +
+
+
+
+
+ diff --git flashbots/mev-boost/Makefile + chainbound/bolt/Makefile + index + 06fc69fe19e856843433aa4f76b75d832d795429..a5ccd47e09dbf576a982d38fba22f52a6b4a4c8c + 100644 + --- flashbots/mev-boost/Makefile + +++ chainbound/bolt/Makefile + @@ -23,7 +23,6 @@ @echo + "${VERSION}"   .PHONY: build build: + - @go version + CGO_ENABLED=0 go build $(GO_BUILD_FLAGS) -o mev-boost   + .PHONY: build-testcli +
+
+ +
+
+ + + + +
+
+
+ +6 +
+
+ -9 +
+
+
+
+
+ diff --git flashbots/mev-boost/RELEASE.md + chainbound/bolt/RELEASE.md + index + f14d8f302a25eb801250c5f75380f17226ade1a4..d00d82693c20b33dcc2ce853fb35fbc151f0653e + 100644 + --- flashbots/mev-boost/RELEASE.md + +++ chainbound/bolt/RELEASE.md + @@ -44,11 +44,10 @@ + For example, creating a new release `v1.9`:   1. Create + a Github issue about the upcoming release + ([example](https://github.com/flashbots/mev-boost/issues/524)) + -2. Create a release branch: `release/v1.9` (note: + use the target version as branch name, don't add the + `-alpha` suffix) + -3. Tag an alpha version: `v1.9-alpha1` + -4. Test in testnets, iterate as needed, create more + alpha versions if needed + -5. When tests are complete, create the final tag and + release + -6. + +1. Create a release branch (`release/v1.9`) + +1. Create an alpha release: `v1.9-alpha1` + +1. Test in testnets, iterate as needed, create more + alpha versions + +1. When tests are complete, create the final + release +   ```bash # create a new branch + @@ -84,18 +83,16 @@ ## + Tagging a version and pushing the release   To create a + new version (with tag), follow all these steps! They are + necessary to have the correct build version inside, and work + with `go install`.   + -* In the release branch + * Update [`Version`](/config/vars.go) to final + version to `v1.9`, and commit * Create final tags, both + semver and pep440: * `git tag -s v1.9` * `git tag -s v1.9.0` + * Update the `stable` branch: * `git checkout stable` + - * `git merge tags/v1.9 --ff-only` (ff-only is + important, otherwise git doesn't know the stable + branch is based off the v1.9 tag!) + -* Update the `develop` branch: + + * `git merge tags/v1.9 --ff-only` + * `git checkout develop` * `git merge tags/v1.9 + --ff-only` + -* Update `Version` in `config/vars.go` to next patch + with `dev` suffix (eg. `v1.10-dev`) and commit to + `develop` branch + +* Update `Version` in `config/vars.go` to next patch + with `dev` suffix (eg. `v1.10-dev`) and commit to + develop + * Now push the `develop` and `stable` branches, as well as + the tag: `git push origin develop stable --tags`   Now + check the Github CI actions for release activity: + https://github.com/flashbots/mev-boost/actions +
+
+ +
+
+ + + + +
+
+
+ +4 +
+
+ -2 +
+
+
+
+
+ diff --git flashbots/mev-boost/go.mod + chainbound/bolt/go.mod + index + f6d36d87a037cadeb0e72aa027055381d4190a45..d745164165fe55e3c4158c3ba3597116035e3740 + 100644 + --- flashbots/mev-boost/go.mod + +++ chainbound/bolt/go.mod + @@ -1,6 +1,6 @@ + module github.com/flashbots/mev-boost   + -go 1.21 + +go 1.22 +   require ( github.com/ethereum/go-ethereum + v1.13.10 + @@ -8,6 +8,7 @@ + github.com/flashbots/go-boost-utils v1.8.0 + github.com/flashbots/go-utils v0.5.0 + github.com/google/uuid v1.6.0 + github.com/gorilla/mux v1.8.1 + + github.com/hashicorp/golang-lru/v2 + v2.0.7 + github.com/holiman/uint256 v1.2.4 + github.com/prysmaticlabs/go-bitfield + v0.0.0-20210809151128-385d8c5e3fb7 + github.com/sirupsen/logrus v1.9.3 + @@ -27,6 +28,7 @@ + github.com/consensys/bavard v0.1.13 // + indirect github.com/consensys/gnark-crypto v0.12.1 + // indirect github.com/crate-crypto/go-ipa + v0.0.0-20231025140028-3c0104f4b233 // indirect + github.com/crate-crypto/go-kzg-4844 v0.7.0 + // indirect + + github.com/emicklei/dot v1.6.2 // + indirect + github.com/ethereum/c-kzg-4844 v0.4.0 // + indirect github.com/fatih/color v1.16.0 // + indirect github.com/gballet/go-verkle + v0.1.1-0.20231031103413-a67434b50f46 // indirect + @@ -67,7 +69,7 @@ + github.com/btcsuite/btcd/btcec/v2 v2.3.2 + // indirect github.com/cespare/xxhash/v2 + v2.2.0 // indirect + github.com/davecgh/go-spew v1.1.1 // + indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 + v4.2.0 // indirect + - github.com/ferranbt/fastssz v0.1.3 // + indirect + + github.com/ferranbt/fastssz + v0.1.4-0.20240724090034-31cd371f8688 + github.com/go-ole/go-ole v1.2.6 // indirect + github.com/golang/snappy + v0.0.5-0.20220116011046-fa5810519dcb // indirect + github.com/klauspost/cpuid/v2 v2.2.6 // + indirect +
+
+ +
+
+ + + + +
+
+
+ +6 +
+
+ -0 +
+
+
+
+
+ diff --git flashbots/mev-boost/go.sum + chainbound/bolt/go.sum + index + 0aa577d7bd680f119ff0793e03558a69ce83b42e..63d6c536f33a1513bbb5f9ef1b4c1e26d87d6d9e + 100644 + --- flashbots/mev-boost/go.sum + +++ chainbound/bolt/go.sum + @@ -88,6 +88,8 @@ + github.com/dgraph-io/badger v1.6.0/go.mod + h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4= + github.com/dgryski/go-farm + v0.0.0-20190423205320-6a90982ecee2/go.mod + h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= + github.com/dustin/go-humanize v1.0.0/go.mod + h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= + github.com/eknkc/amber + v0.0.0-20171010120322-cdade1c07385/go.mod + h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= + +github.com/emicklei/dot v1.6.2 + h1:08GN+DD79cy/tzN6uLCT84+2Wk9u+wvqP+Hkx/dIR8A= + +github.com/emicklei/dot v1.6.2/go.mod + h1:DeV7GvQtIw4h2u73RKBkkFdvVAz0D9fzeJrgPW6gy/s= + github.com/envoyproxy/go-control-plane + v0.9.0/go.mod + h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= + github.com/envoyproxy/go-control-plane + v0.9.1-0.20191026205805-5f8ba28d4473/go.mod + h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= + github.com/envoyproxy/go-control-plane + v0.9.9-0.20210217033140-668b12f5399d/go.mod + h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= + @@ -103,6 +105,8 @@ + github.com/fatih/color v1.16.0/go.mod + h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= + github.com/fatih/structs v1.1.0/go.mod + h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= + github.com/ferranbt/fastssz v0.1.3 + h1:ZI+z3JH05h4kgmFXdHuR1aWYsgrg7o+Fw7/NCzM16Mo= + github.com/ferranbt/fastssz v0.1.3/go.mod + h1:0Y9TEd/9XuFlh7mskMPfXiI2Dkw4Ddg9EyXt1W7MRvE= + +github.com/ferranbt/fastssz + v0.1.4-0.20240724090034-31cd371f8688 + h1:k70X5h1haHaSbpD/9fcjtvAUEVlRlOKtdpvN7Mzhcv4= + +github.com/ferranbt/fastssz + v0.1.4-0.20240724090034-31cd371f8688/go.mod + h1:Ea3+oeoRGGLGm5shYAeDgu6PGUlcvQhE2fILyD9+tGg= + github.com/flashbots/go-boost-utils v1.8.0 + h1:z3K1hw+Fbl9AGMNQKnK7Bvf0M/rKgjfruAEvra+Z8Mg= + github.com/flashbots/go-boost-utils + v1.8.0/go.mod + h1:Ry1Rw8Lx5v1rpAR0+IvR4sV10jYAeQaGVM3vRD8mYdM= + github.com/flashbots/go-utils v0.5.0 + h1:ldjWta9B9//DJU2QcwRbErez3+1aKhSn6EoFc6d5kPY= + @@ -187,6 +191,8 @@ + github.com/gorilla/mux v1.8.1 + h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= + github.com/gorilla/mux v1.8.1/go.mod + h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= + github.com/gorilla/websocket v1.4.1/go.mod + h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= + github.com/hashicorp/go-version v1.2.0/go.mod + h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= + +github.com/hashicorp/golang-lru/v2 v2.0.7 + h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= + +github.com/hashicorp/golang-lru/v2 + v2.0.7/go.mod + h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= + github.com/hashicorp/hcl v1.0.0/go.mod + h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= + github.com/holiman/uint256 v1.2.4 + h1:jUc4Nk8fm9jZabQuqr2JzednajVmBpC+oiTiXZJEApU= + github.com/holiman/uint256 v1.2.4/go.mod + h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E= +
+
+
+
+
+
+
+
+
+ + + + + + + +