diff --git a/mev-boost-relay/README.md b/mev-boost-relay/README.md index cb7c5cc9..edb49d4c 100644 --- a/mev-boost-relay/README.md +++ b/mev-boost-relay/README.md @@ -153,6 +153,7 @@ redis-cli DEL boost-relay/sepolia:validators-registration boost-relay/sepolia:va * `FORCE_GET_HEADER_204` - force 204 as getHeader response * `ENABLE_IGNORABLE_VALIDATION_ERRORS` - enable ignorable validation errors * `USE_V1_PUBLISH_BLOCK_ENDPOINT` - uses the v1 publish block endpoint on the beacon node +* `USE_SSZ_ENCODING_PUBLISH_BLOCK` - uses the SSZ encoding for the publish block endpoint #### Development Environment Variables diff --git a/mev-boost-relay/beaconclient/prod_beacon_instance.go b/mev-boost-relay/beaconclient/prod_beacon_instance.go index 24dd1b97..d50b86c6 100644 --- a/mev-boost-relay/beaconclient/prod_beacon_instance.go +++ b/mev-boost-relay/beaconclient/prod_beacon_instance.go @@ -19,7 +19,8 @@ type ProdBeaconInstance struct { beaconURI string // feature flags - ffUseV1PublishBlockEndpoint bool + ffUseV1PublishBlockEndpoint bool + ffUseSSZEncodingPublishBlock bool } func NewProdBeaconInstance(log *logrus.Entry, beaconURI string) *ProdBeaconInstance { @@ -28,7 +29,7 @@ func NewProdBeaconInstance(log *logrus.Entry, beaconURI string) *ProdBeaconInsta "beaconURI": beaconURI, }) - client := &ProdBeaconInstance{_log, beaconURI, false} + client := &ProdBeaconInstance{_log, beaconURI, false, false} // feature flags if os.Getenv("USE_V1_PUBLISH_BLOCK_ENDPOINT") != "" { @@ -36,6 +37,11 @@ func NewProdBeaconInstance(log *logrus.Entry, beaconURI string) *ProdBeaconInsta client.ffUseV1PublishBlockEndpoint = true } + if os.Getenv("USE_SSZ_ENCODING_PUBLISH_BLOCK") != "" { + _log.Warn("env: USE_SSZ_ENCODING_PUBLISH_BLOCK: using SSZ encoding to publish blocks") + client.ffUseSSZEncodingPublishBlock = true + } + return client } @@ -251,7 +257,37 @@ func (c *ProdBeaconInstance) PublishBlock(block *common.VersionedSignedProposal, } headers := http.Header{} headers.Add("Eth-Consensus-Version", strings.ToLower(block.Version.String())) // optional in v1, required in v2 - return fetchBeacon(http.MethodPost, uri, block, nil, nil, headers, false) + + slot, err := block.Slot() + if err != nil { + slot = 0 + } + + var payloadBytes []byte + useSSZ := c.ffUseSSZEncodingPublishBlock + log := c.log + encodeStartTime := time.Now().UTC() + if useSSZ { + log = log.WithField("publishContentType", "ssz") + payloadBytes, err = block.MarshalSSZ() + } else { + log = log.WithField("publishContentType", "json") + payloadBytes, err = json.Marshal(block) + } + if err != nil { + return 0, fmt.Errorf("could not marshal request: %w", err) + } + publishingStartTime := time.Now().UTC() + encodeDurationMs := publishingStartTime.Sub(encodeStartTime).Milliseconds() + code, err = fetchBeacon(http.MethodPost, uri, payloadBytes, nil, nil, headers, useSSZ) + publishDurationMs := time.Now().UTC().Sub(publishingStartTime).Milliseconds() + log.WithFields(logrus.Fields{ + "slot": slot, + "encodeDurationMs": encodeDurationMs, + "publishDurationMs": publishDurationMs, + "payloadBytes": len(payloadBytes), + }).Info("finished publish block request") + return code, err } type GetGenesisResponse struct { diff --git a/mev-boost-relay/common/preconf.go b/mev-boost-relay/common/preconf.go index fa274d29..cff4b649 100644 --- a/mev-boost-relay/common/preconf.go +++ b/mev-boost-relay/common/preconf.go @@ -15,15 +15,16 @@ import ( "github.com/attestantio/go-eth2-client/spec/phase0" ) -// VersionedSubmitBlockRequestWithPreconfsProofs is a wrapper struct +// VersionedSubmitBlockRequestWithProofs is a wrapper struct // over `builderSpec.VersionedSubmitBlockRequest` // to include preconfirmation proofs -type VersionedSubmitBlockRequestWithPreconfsProofs struct { - Inner *VersionedSubmitBlockRequest `json:"inner"` - Proofs []*PreconfirmationWithProof `json:"proofs"` +type VersionedSubmitBlockRequestWithProofs struct { + Inner *VersionedSubmitBlockRequest `json:"inner"` + // FIXME: this is not spec-aligned yet https://github.com/chainbound/bolt/issues/55 + Proofs []*PreconfirmationWithProof `json:"proofs"` } -func (v *VersionedSubmitBlockRequestWithPreconfsProofs) String() string { +func (v *VersionedSubmitBlockRequestWithProofs) String() string { out, err := json.Marshal(v) if err != nil { return err.Error() @@ -58,7 +59,7 @@ type HexBytes []byte // MarshalJSON implements json.Marshaler. func (h HexBytes) MarshalJSON() ([]byte, error) { - return []byte(fmt.Sprintf(`"%#x"`, h)), nil + return []byte(fmt.Sprintf(`"%#x"`, []byte(h))), nil } // UnmarshalJSON implements json.Unmarshaler. @@ -85,6 +86,10 @@ func (h *HexBytes) UnmarshalJSON(input []byte) error { return nil } +func (h HexBytes) String() string { + return JSONStringify(h) +} + // SerializedMerkleProof contains a serialized Merkle proof of transaction inclusion. // - `Index“ is the generalized index of the included transaction from the SSZ tree // created from the list of transactions. diff --git a/mev-boost-relay/common/utils.go b/mev-boost-relay/common/utils.go index 8f48c45c..3ce58017 100644 --- a/mev-boost-relay/common/utils.go +++ b/mev-boost-relay/common/utils.go @@ -276,3 +276,11 @@ func GetBlockSubmissionExecutionPayload(submission *VersionedSubmitBlockRequest) } return nil, ErrEmptyPayload } + +func JSONStringify(v interface{}) string { + out, err := json.Marshal(v) + if err != nil { + return err.Error() + } + return string(out) +} diff --git a/mev-boost-relay/services/api/constraints.go b/mev-boost-relay/services/api/constraints.go index 5a0e0732..bd1f87b3 100644 --- a/mev-boost-relay/services/api/constraints.go +++ b/mev-boost-relay/services/api/constraints.go @@ -1,160 +1,333 @@ package api import ( - "encoding/hex" - "encoding/json" - "fmt" - "strings" + "encoding/binary" "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/pkg/errors" + ssz "github.com/ferranbt/fastssz" ) -// These types are taken from https://github.com/chainbound/bolt/pull/11/files#diff-0fa8405accc1cdc5b108ba0210a8f1d99e25e1a5173e45e1516d73c294b061c4 +// These types are taken from https://chainbound.github.io/bolt-docs/ -type SignedConstraintSubmission struct { - Message *ConstraintSubmission - Signature phase0.BLSSignature `ssz-size:"96"` - ProposerIndex uint64 +const ( + // NOTE: This is still a work in progress and not documented on the specs + MAX_CONSTRAINTS_PER_SLOT = 256 + MAX_BYTES_PER_TRANSACTION = 1073741824 // 2**30 +) + +type SignedConstraints struct { + Message *ConstraintsMessage `json:"message"` + // NOTE: This might change to an ECDSA signature in the future. In such case, + // when encoding/decoding SSZ we should take into account that it is 64 bytes long instead of 96 + Signature phase0.BLSSignature `ssz-size:"96" json:"signature"` } -type signedConstraintSubmissionJSON struct { - Message *ConstraintSubmission `json:"message"` - Signature string `json:"signature"` - ProposerIndex uint64 `json:"proposerIndex"` +type ConstraintsMessage struct { + ValidatorIndex uint64 `json:"validator_index"` + Slot uint64 `json:"slot"` + Constraints []*Constraint `ssz-max:"256" json:"constraints"` } -func (s *SignedConstraintSubmission) MarshalJSON() ([]byte, error) { - return json.Marshal(&signedConstraintSubmissionJSON{ - Message: s.Message, - Signature: fmt.Sprintf("%#x", s.Signature), - ProposerIndex: s.ProposerIndex, - }) +type Constraint struct { + Tx Transaction `ssz-max:"1048576" json:"tx"` + Index *Index `json:"index"` } -func (s *SignedConstraintSubmission) UnmarshalJSON(input []byte) error { - var data signedConstraintSubmissionJSON - if err := json.Unmarshal(input, &data); err != nil { - return errors.Wrap(err, "invalid JSON") - } +// For SSZ purposes, we consider `Index` as Union[uint64, None] +type Index uint64 + +func NewIndex(i uint64) *Index { + idx := Index(i) + return &idx +} + +func (c SignedConstraints) String() string { + return JSONStringify(c) +} + +func (c ConstraintsMessage) String() string { + return JSONStringify(c) +} + +func (c Constraint) String() string { + return JSONStringify(c) +} + +// ConstraintsMap is a map of constraints for a block. +type ConstraintsMap = map[phase0.Hash32]*Constraint + +// ConstraintCache is a cache for constraints. +type ConstraintCache struct { + // map of slots to constraints + constraints map[uint64]ConstraintsMap +} + +func (c *SignedConstraints) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(c) +} + +func (c *SignedConstraints) MarshalSSZTo(dst []byte) ([]byte, error) { + // We have 4 bytes of an offset to a dinamically sized object + // plus 96 bytes of the BLS signature. This indicates + // where the dynamic data begins + offset := 100 + + // Field (0) `Message` + dst = ssz.WriteOffset(dst, offset) + + // Field (1) `Signature` + dst = append(dst, c.Signature[:]...) + + // Field (0) `Message` + dst, err := c.Message.MarshalSSZTo(dst) + + return dst, err +} - if data.Message == nil { - return errors.New("message missing") +func (c *SignedConstraints) SizeSSZ() int { + // At minimum, the size is 4 bytes of an offset to a dinamically sized object + // plus 96 bytes of the BLS signature + size := 100 + + // Field (0) 'Message'. We need to add the size of the message with its default values + if c.Message == nil { + c.Message = new(ConstraintsMessage) } + size += c.Message.SizeSSZ() - s.Message = data.Message - s.ProposerIndex = data.ProposerIndex + return 0 +} - if data.Signature == "" { - return errors.New("signature missing") +func (c *SignedConstraints) UnmarshalSSZ(buf []byte) (err error) { + size := uint64(len(buf)) + if size < 100 { + // The buf must be at least 100 bytes long according to offset + signature + return ssz.ErrSize } - signature, err := hex.DecodeString(strings.TrimPrefix(data.Signature, "0x")) - if err != nil { - return errors.Wrap(err, "invalid signature") + tail := buf + var o0 uint64 // Offset (0) 'Message' + + // Offset (0) 'Message'. Handle offset too big and too small respectively + if o0 = ssz.ReadOffset(buf[0:4]); o0 > size { + return ssz.ErrOffset + } + if o0 < 100 { + return ssz.ErrInvalidVariableOffset } - if len(signature) != phase0.SignatureLength { - return errors.New("incorrect length for signature") + // Field (0) 'Message' + buf = tail[o0:] + if c.Message == nil { + c.Message = new(ConstraintsMessage) + } + if err = c.Message.UnmarshalSSZ(buf); err != nil { + return } - copy(s.Signature[:], signature) - return nil + // Field (1) `Signature` + copy(c.Signature[:], tail[4:100]) + + return } -type ConstraintSubmission struct { - Slot uint64 - TxHash phase0.Hash32 `ssz-size:"32"` - RawTx Transaction `ssz-max:"1073741824"` +func (m *ConstraintsMessage) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(m) } -type constraintSubmissionJSON struct { - Slot uint64 `json:"slot"` - TxHash string `json:"txHash"` - RawTx string `json:"rawTx"` +func (m *ConstraintsMessage) MarshalSSZTo(buf []byte) (dst []byte, err error) { + // We have 4 bytes of an offset to a dinamically sized object + // plus 16 bytes of the two uint64 fields + offset := 20 + dst = buf + + // Field (0) `ValidatorIndex` + dst = ssz.MarshalUint64(dst, m.ValidatorIndex) + + // Field (1) `Slot` + dst = ssz.MarshalUint64(dst, m.Slot) + + // Field (2) `Constraints` + dst = ssz.WriteOffset(dst, offset) + + // ------- Dynamic fields ------- + + // Field (2) `Constraints` + if size := len(m.Constraints); size > MAX_CONSTRAINTS_PER_SLOT { + err = ssz.ErrListTooBigFn("ConstraintsMessage.Constraints", size, MAX_CONSTRAINTS_PER_SLOT) + return + } + // Each constraint is a dynamically sized object so we first add the offsets + offset = 4 * len(m.Constraints) + for i := 0; i < len(m.Constraints); i++ { + dst = ssz.WriteOffset(dst, offset) + offset += m.Constraints[i].SizeSSZ() + } + // Now we add the actual data + for i := 0; i < len(m.Constraints); i++ { + if dst, err = m.Constraints[i].MarshalSSZTo(dst); err != nil { + return + } + if size := len(m.Constraints[i].Tx); size > MAX_BYTES_PER_TRANSACTION { + err = ssz.ErrBytesLengthFn("Constraints[i].Tx", size, MAX_BYTES_PER_TRANSACTION) + return + } + } + + return } -func (c *ConstraintSubmission) MarshalJSON() ([]byte, error) { - return json.Marshal(&constraintSubmissionJSON{ - Slot: c.Slot, - TxHash: c.TxHash.String(), - RawTx: fmt.Sprintf("%#x", c.RawTx), - }) +func (m *ConstraintsMessage) SizeSSZ() int { + // At minimum, the size is 4 bytes of an offset to a dinamically sized object + // plus 16 bytes of the two uint64 fields + size := 20 + + // Field (2) 'Constraints'. We need to add the size of the constraints with their default values + for i := 0; i < len(m.Constraints); i++ { + // The offset to the transaction list + size += 4 + + size += len(m.Constraints[i].Tx) + size += m.Constraints[i].Index.SizeSSZ() + } + return size } -func (c *ConstraintSubmission) UnmarshalJSON(input []byte) error { - var data constraintSubmissionJSON - if err := json.Unmarshal(input, &data); err != nil { - return err +func (m *ConstraintsMessage) UnmarshalSSZ(buf []byte) (err error) { + size := uint64(len(buf)) + if size < 20 { + // 8 + 8 + 4 bytes for the offset + return ssz.ErrSize } - c.Slot = data.Slot + tail := buf + var o2 uint64 - txHash, err := hex.DecodeString((strings.TrimPrefix(data.TxHash, "0x"))) - if err != nil { - return errors.Wrap(err, "invalid tx hash") - } + // Field (0) `ValidatorIndex` + m.ValidatorIndex = binary.LittleEndian.Uint64(buf[0:8]) - copy(c.TxHash[:], txHash) + // Field (1) `Slot` + m.Slot = binary.LittleEndian.Uint64(buf[8:16]) - rawTx, err := hex.DecodeString((strings.TrimPrefix(data.RawTx, "0x"))) - if err != nil { - return errors.Wrap(err, "invalid raw tx") + // Offset (2) 'Constraints' + if o2 = ssz.ReadOffset(buf[16:20]); o2 > size { + return ssz.ErrOffset + } + if o2 < 20 { + return ssz.ErrInvalidVariableOffset } - c.RawTx = rawTx + // Field (2) `Constraints` + buf = tail[o2:] + // We first read the amount of offset values we have, by looking + // at how big is the first offset + var length int + if length, err = ssz.DecodeDynamicLength(buf, MAX_CONSTRAINTS_PER_SLOT); err != nil { + return + } + m.Constraints = make([]*Constraint, length) + err = ssz.UnmarshalDynamic(buf, length, func(indx int, buf []byte) (err error) { + if m.Constraints[indx] == nil { + m.Constraints[indx] = new(Constraint) + } + return m.Constraints[indx].UnmarshalSSZ(buf) + }) - return nil + return } -func (c *ConstraintSubmission) String() string { - data, err := json.Marshal(c) - if err != nil { - return fmt.Sprintf("ERR: %v", err) - } - return string(data) +func (c *Constraint) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(c) } -// Constraints is a map of constraints for a block. -type Constraints = map[phase0.Hash32]*Constraint +func (c *Constraint) MarshalSSZTo(buf []byte) (dst []byte, err error) { + // Both fields are dynamically sized, so we start with two offsets of 4 bytes each + offset := 8 + dst = buf -// Constraint is a constraint on a block. For now just preconfirmations -// or inclusion constraints. -type Constraint struct { - RawTx Transaction `json:"rawTx"` + // Field (0) `Tx` + dst = ssz.WriteOffset(dst, offset) + offset += len(c.Tx) + + // Field (1) `Index` + dst = ssz.WriteOffset(dst, offset) + + // Field (0) `Tx` + dst = append(dst, c.Tx...) + + // Field (1) `Index` + if c.Index == nil { + dst = append(dst, 0) + } else { + // Index is `Union[None, uint64] + dst = append(dst, 1) + dst = ssz.MarshalUint64(dst, uint64(*c.Index)) + } + + return } -// ConstraintCache is a cache for constraints. -type ConstraintCache struct { - // map of slots to constraints - constraints map[uint64]Constraints +func (c *Constraint) SizeSSZ() int { + // Both fields are dynamically sized, so we start with two offsets of 4 bytes each + size := 8 + + // Field (0) 'Tx'. + size += len(c.Tx) + + // Field (1) 'Index'. + size += c.Index.SizeSSZ() + + return size } -// NewConstraintCache creates a new constraint cache. -func NewConstraintCache() *ConstraintCache { - return &ConstraintCache{ - // TODO: there should be a maximum length here that we can pre-allocate (probably the lookahead window size) - constraints: make(map[uint64]Constraints), +func (c *Constraint) UnmarshalSSZ(buf []byte) (err error) { + size := uint64(len(buf)) + if size < 8 { + // It needs to contain at least 8 bytes for the two offsets + return ssz.ErrSize } -} -// AddInclusionConstraint adds an inclusion constraint to the cache at the given slot for the given transaction. -func (c *ConstraintCache) AddInclusionConstraint(slot uint64, txHash phase0.Hash32, rawTx Transaction) { - if _, exists := c.constraints[slot]; !exists { - c.constraints[slot] = make(map[phase0.Hash32]*Constraint) + tail := buf + var o0, o1 uint64 + + // Offset (0) 'Tx' + if o0 = ssz.ReadOffset(buf[0:4]); o0 > size { + return ssz.ErrOffset + } + if o0 < 8 { + return ssz.ErrInvalidVariableOffset } - c.constraints[slot][txHash] = &Constraint{ - RawTx: rawTx, + // Offset (1) 'Index' + if o1 = ssz.ReadOffset(buf[4:8]); o1 > size || o0 > o1 { + return ssz.ErrOffset } -} -// Get gets the constraints at the given slot. -func (c *ConstraintCache) Get(slot uint64) Constraints { - return c.constraints[slot] + // Field (0) `Tx` + buf = tail[o0:o1] + if len(buf) > MAX_BYTES_PER_TRANSACTION { + return ssz.ErrBytesLengthFn("Constraint.Tx", len(buf), MAX_BYTES_PER_TRANSACTION) + } + c.Tx = make([]byte, 0, len(buf)) + c.Tx = append(c.Tx, buf...) + + // Field (1) `Index` + buf = tail[o1:] + if buf[0] == 0 { + // Means it's a None value + c.Index = nil + } else { + c.Index = new(Index) + *(c.Index) = Index(binary.LittleEndian.Uint64(buf[1:])) + } + + return } -// Delete deletes the constraints at the given slot. -func (c *ConstraintCache) Delete(slot uint64) { - delete(c.constraints, slot) +func (i *Index) SizeSSZ() int { + if i == nil { + return 1 + } + // selector + uint64 + return 9 } diff --git a/mev-boost-relay/services/api/constraints_test.go b/mev-boost-relay/services/api/constraints_test.go new file mode 100644 index 00000000..920ac4e7 --- /dev/null +++ b/mev-boost-relay/services/api/constraints_test.go @@ -0,0 +1,465 @@ +package api + +import ( + "encoding/hex" + "reflect" + "testing" + + "github.com/attestantio/go-eth2-client/spec/phase0" + "github.com/flashbots/go-boost-utils/bls" + "github.com/stretchr/testify/require" +) + +func TestSignedConstraints_MarshalSSZTo(t *testing.T) { + type fields struct { + Message *ConstraintsMessage + Signature phase0.BLSSignature + } + type args struct { + dst []byte + } + + tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") + require.NoError(t, err) + // remember that uints are in little endian! + // offset offset(8+16-1=23) tx none + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" + // wantDst1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00) + require.NoError(t, err) + // offset offset(8+16-1=23) tx selector and index + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" + // wantDst2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") + + // -------------------------------- SignedConstraints --------------------------------------------------------------------------------------------------------------------------------------------------- |-------- ConstraintsMessage ---------------- | -- offsets -- | --- raw constraint data + // | | | + // offset 96 bytes of signature | validatorIndex slot offset(20) | off off | + // 64000000_8b136ad4a3ce9443c1f42b29eeb79bf33c90f966671c2381ac25014d8b1dd4cc4b76731c4cd61dbd3978a9240b9a91ea0f9685c03f18372137a2b49eb0afeadd474476af3a7b84ccf76e7ed6a2973ea2b8eb972a455752f37578e365bf877df2_0200000000000000_0300000000000000_14000000_08000000_20000000_08000000170000000102030405060708090a0b0c0d0e0f00_08000000170000000102030405060708090a0b0c0d0e0f010100000000000000 + // + + wantDst, err := hex.DecodeString("640000008b136ad4a3ce9443c1f42b29eeb79bf33c90f966671c2381ac25014d8b1dd4cc4b76731c4cd61dbd3978a9240b9a91ea0f9685c03f18372137a2b49eb0afeadd474476af3a7b84ccf76e7ed6a2973ea2b8eb972a455752f37578e365bf877df20200000000000000030000000000000014000000080000002000000008000000170000000102030405060708090a0b0c0d0e0f0008000000170000000102030405060708090a0b0c0d0e0f010100000000000000") + require.NoError(t, err) + + skBytes, err := hex.DecodeString("51815cb2c5489f8d7dc4f9889b9771334a80ccc6a82ce9c2a1ef66dc270c9708") + require.NoError(t, err) + sk, _ := bls.SecretKeyFromBytes(skBytes) + require.NoError(t, err) + + message := &ConstraintsMessage{ + ValidatorIndex: 2, + Slot: 3, + Constraints: []*Constraint{ + {Tx: Transaction(tx1), Index: nil}, + {Tx: Transaction(tx1), Index: NewIndex(1)}, + }, + } + + // We tested this works gud below + messsageSSZ, err := message.MarshalSSZ() + require.NoError(t, err) + + sig := bls.Sign(sk, messsageSSZ) + sigBytes := bls.SignatureToBytes(sig) + + type test struct { + name string + fields fields + args args + wantDst []byte + wantErr bool + } + + tests := []test{ + { + name: "nil and non-nil index", + fields: fields{ + Message: message, + Signature: phase0.BLSSignature(sigBytes[:]), + }, + args: args{dst: make([]byte, 0)}, + wantDst: wantDst, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := &SignedConstraints{ + Message: tt.fields.Message, + Signature: tt.fields.Signature, + } + got, err := c.MarshalSSZTo(tt.args.dst) + if (err != nil) != tt.wantErr { + t.Errorf("SignedConstraints.MarshalSSZTo() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.wantDst) { + t.Errorf("SignedConstraints.MarshalSSZTo() = %v, want %v", got, tt.wantDst) + } + }) + } +} + +func TestSignedConstraints_UnmarshalSSZ(t *testing.T) { + type fields struct { + Message *ConstraintsMessage + Signature phase0.BLSSignature + } + + type args struct { + buf []byte + } + + tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") + require.NoError(t, err) + // remember that uints are in little endian! + // offset offset(8+16-1=23) tx none + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" + // wantDst1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00) + require.NoError(t, err) + // offset offset(8+16-1=23) tx selector and index + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" + // wantDst2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") + + // -------------------------------- SignedConstraints --------------------------------------------------------------------------------------------------------------------------------------------------- |-------- ConstraintsMessage ---------------- | -- offsets -- | --- raw constraint data + // | | | + // offset 96 bytes of signature | validatorIndex slot offset(20) | off off | + // 64000000_8b136ad4a3ce9443c1f42b29eeb79bf33c90f966671c2381ac25014d8b1dd4cc4b76731c4cd61dbd3978a9240b9a91ea0f9685c03f18372137a2b49eb0afeadd474476af3a7b84ccf76e7ed6a2973ea2b8eb972a455752f37578e365bf877df2_0200000000000000_0300000000000000_14000000_08000000_20000000_08000000170000000102030405060708090a0b0c0d0e0f00_08000000170000000102030405060708090a0b0c0d0e0f010100000000000000 + // + + buf, err := hex.DecodeString("640000008b136ad4a3ce9443c1f42b29eeb79bf33c90f966671c2381ac25014d8b1dd4cc4b76731c4cd61dbd3978a9240b9a91ea0f9685c03f18372137a2b49eb0afeadd474476af3a7b84ccf76e7ed6a2973ea2b8eb972a455752f37578e365bf877df20200000000000000030000000000000014000000080000002000000008000000170000000102030405060708090a0b0c0d0e0f0008000000170000000102030405060708090a0b0c0d0e0f010100000000000000") + require.NoError(t, err) + + skBytes, err := hex.DecodeString("51815cb2c5489f8d7dc4f9889b9771334a80ccc6a82ce9c2a1ef66dc270c9708") + require.NoError(t, err) + sk, _ := bls.SecretKeyFromBytes(skBytes) + require.NoError(t, err) + + message := &ConstraintsMessage{ + ValidatorIndex: 2, + Slot: 3, + Constraints: []*Constraint{ + {Tx: Transaction(tx1), Index: nil}, + {Tx: Transaction(tx1), Index: NewIndex(1)}, + }, + } + + // We tested this works gud below + messsageSSZ, err := message.MarshalSSZ() + require.NoError(t, err) + + sig := bls.Sign(sk, messsageSSZ) + sigBytes := bls.SignatureToBytes(sig) + + type test struct { + name string + fields fields + args args + wantDst []byte + wantErr bool + } + + tests := []test{ + { + name: "nil and non-nil index", + fields: fields{ + Message: message, + Signature: phase0.BLSSignature(sigBytes[:]), + }, + args: args{buf: buf}, + wantDst: make([]byte, 0), + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + expected := &SignedConstraints{ + Message: tt.fields.Message, + Signature: tt.fields.Signature, + } + actual := &SignedConstraints{} + if err := actual.UnmarshalSSZ(tt.args.buf); (err != nil) != tt.wantErr { + t.Errorf("SignedConstraints.UnmarshalSSZ() error = %v, wantErr %v", err, tt.wantErr) + } + if !reflect.DeepEqual(expected, actual) { + t.Errorf("SignedConstraints.UnmarshalSSZ() = %v, want %v", actual, expected) + } + }) + } +} + +func TestConstraintsMessage_MarshalSSZTo(t *testing.T) { + type fields struct { + ValidatorIndex uint64 + Slot uint64 + Constraints []*Constraint + } + type args struct { + buf []byte + } + + tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") + require.NoError(t, err) + // remember that uints are in little endian! + // offset offset(8+16-1=23) tx none + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" + // wantDst1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00") + require.NoError(t, err) + // offset offset(8+16-1=23) tx selector and index + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" + // wantDst2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") + + // ----------- ConstraintMessage ---------------| -- offsets -- | --- raw constraint data + // | | + // validatorIndex slot offset | off off | + // 0x0200000000000000_0300000000000000_14000000_08000000_20000000_08000000170000000102030405060708090a0b0c0d0e0f00_08000000170000000102030405060708090a0b0c0d0e0f010100000000000000 + // + + wantDst, err := hex.DecodeString("0200000000000000030000000000000014000000080000002000000008000000170000000102030405060708090a0b0c0d0e0f0008000000170000000102030405060708090a0b0c0d0e0f010100000000000000") + require.NoError(t, err) + + type test struct { + name string + fields fields + args args + wantDst []byte + wantErr bool + } + + tests := []test{ + { + name: "nil and non-nil index", + fields: fields{ + ValidatorIndex: 2, + Slot: 3, + Constraints: []*Constraint{ + {Tx: Transaction(tx1), Index: nil}, + {Tx: Transaction(tx1), Index: NewIndex(1)}, + }, + }, + wantDst: wantDst, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + m := &ConstraintsMessage{ + ValidatorIndex: tt.fields.ValidatorIndex, + Slot: tt.fields.Slot, + Constraints: tt.fields.Constraints, + } + gotDst, err := m.MarshalSSZTo(tt.args.buf) + if (err != nil) != tt.wantErr { + t.Errorf("ConstraintsMessage.MarshalSSZTo() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(gotDst, tt.wantDst) { + t.Errorf("ConstraintsMessage.MarshalSSZTo() = %v, want %v", gotDst, tt.wantDst) + } + }) + } +} + +func TestConstraintsMessage_UnmarshalSSZ(t *testing.T) { + type fields struct { + ValidatorIndex uint64 + Slot uint64 + Constraints []*Constraint + } + type args struct { + buf []byte + } + + tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") + require.NoError(t, err) + // remember that uints are in little endian! + // offset offset(8+16-1=23) tx none + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" + // wantDst1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00") + require.NoError(t, err) + // offset offset(8+16-1=23) tx selector and index + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" + // wantDst2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") + + // ----------- ConstraintMessage ---------------| -- offsets -- | --- raw constraint data + // | | + // validatorIndex slot offset | off off | + // 0x0200000000000000_0300000000000000_14000000_08000000_20000000_08000000170000000102030405060708090a0b0c0d0e0f00_08000000170000000102030405060708090a0b0c0d0e0f010100000000000000 + // + + buf, err := hex.DecodeString("0200000000000000030000000000000014000000080000002000000008000000170000000102030405060708090a0b0c0d0e0f0008000000170000000102030405060708090a0b0c0d0e0f010100000000000000") + require.NoError(t, err) + + type test struct { + name string + fields fields + args args + wantDst []byte + wantErr bool + } + + tests := []test{ + { + name: "nil and non-nil index", + fields: fields{ + ValidatorIndex: 2, + Slot: 3, + Constraints: []*Constraint{ + {Tx: Transaction(tx1), Index: nil}, + {Tx: Transaction(tx1), Index: NewIndex(1)}, + }, + }, + args: args{buf: buf}, + wantDst: []byte{}, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + expected := &ConstraintsMessage{ + ValidatorIndex: tt.fields.ValidatorIndex, + Slot: tt.fields.Slot, + Constraints: tt.fields.Constraints, + } + actual := &ConstraintsMessage{} + if err := actual.UnmarshalSSZ(tt.args.buf); (err != nil) != tt.wantErr { + t.Errorf("ConstraintsMessage.UnmarshalSSZ() error = %v, wantErr %v", err, tt.wantErr) + } + if !reflect.DeepEqual(expected, actual) { + t.Errorf("ConstraintMessage.UnmarshalSSZ() = %v, want %v", actual, expected) + } + }) + } +} + +func TestConstraint_MarshalSSZTo(t *testing.T) { + type fields struct { + Tx Transaction + Index *Index + } + type args struct { + buf []byte + } + type test struct { + name string + fields fields + args args + wantDst []byte + wantErr bool + } + + tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") + require.NoError(t, err) + // remember that uints are in little endian! + // offset offset(8+16-1=23) tx none + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" + wantDst1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00") + require.NoError(t, err) + // offset offset(8+16-1=23) tx selector and index + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" + wantDst2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") + require.NoError(t, err) + + tests := []test{ + { + name: "nil index", + fields: fields{ + Tx: Transaction(tx1), + Index: nil, + }, + args: args{ + buf: make([]byte, 0), + }, + wantDst: wantDst1, + wantErr: false, + }, + { + name: "not-nil index", + fields: fields{ + Tx: Transaction(tx1), + Index: NewIndex(1), + }, + args: args{ + buf: make([]byte, 0), + }, + wantDst: wantDst2, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := &Constraint{ + Tx: tt.fields.Tx, + Index: tt.fields.Index, + } + gotDst, err := c.MarshalSSZTo(tt.args.buf) + if (err != nil) != tt.wantErr { + t.Errorf("Constraint.MarshalSSZTo() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(gotDst, tt.wantDst) { + t.Errorf("Constraint.MarshalSSZTo() = %v, want %v", gotDst, tt.wantDst) + } + }) + } +} + +func TestConstraint_UnmarshalSSZ(t *testing.T) { + type fields struct { + Tx Transaction + Index *Index + } + type args struct { + buf []byte + } + type test struct { + name string + fields fields + args args + wantErr bool + } + + tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") + require.NoError(t, err) + // remember that uints are in little endian! + // offset offset(8+16-1=23) tx none + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" + buf1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00") + require.NoError(t, err) + // offset offset(8+16-1=23) tx selector and index + // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" + buf2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") + require.NoError(t, err) + + tests := []test{ + { + name: "nil index", + fields: fields{ + Tx: Transaction(tx1), + Index: nil, + }, + args: args{buf: buf1}, + }, + { + name: "non-nil index", + fields: fields{ + Tx: Transaction(tx1), + Index: NewIndex(1), + }, + args: args{buf: buf2}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + want := &Constraint{ + Tx: tt.fields.Tx, + Index: tt.fields.Index, + } + c := &Constraint{} + if err := c.UnmarshalSSZ(tt.args.buf); (err != nil) != tt.wantErr { + t.Errorf("Constraint.UnmarshalSSZ() error = %v, wantErr %v", err, tt.wantErr) + } + require.Equal(t, want.Tx, c.Tx) + require.Equal(t, want.Index, c.Index) + }) + } +} diff --git a/mev-boost-relay/services/api/proofs.go b/mev-boost-relay/services/api/proofs.go index eb5ffb17..d262a611 100644 --- a/mev-boost-relay/services/api/proofs.go +++ b/mev-boost-relay/services/api/proofs.go @@ -18,62 +18,60 @@ var ( ) // verifyConstraintProofs verifies the proofs against the constraints, and returns an error if the proofs are invalid. -func (api *RelayAPI) verifyConstraintProofs(transactionsRoot phase0.Root, proofs []*common.PreconfirmationWithProof, constraints Constraints) error { - log := api.log.WithFields(logrus.Fields{}) +func verifyConstraintProofs(log *logrus.Entry, transactionsRoot phase0.Root, proofs []*common.PreconfirmationWithProof, constraints map[phase0.Hash32]*Constraint) error { // BOLT: verify preconfirmation inclusion proofs. If they don't match, we don't consider the bid to be valid. - if proofs != nil { - // BOLT: remove unnecessary fields while logging - log.WithFields(logrus.Fields{}) + if proofs == nil { + return errors.New("proofs are nil") + } - log.WithField("len", len(proofs)).Info("[BOLT]: Verifying constraint proofs") + log.WithField("len", len(proofs)).Info("[BOLT]: Verifying constraint proofs") - for _, proof := range proofs { - if proof == nil { - log.Warn("[BOLT]: Nil proof!") - return ErrNilProof - } + for _, proof := range proofs { + if proof == nil { + log.Warn("[BOLT]: Nil proof!") + return ErrNilProof + } - // Find the raw tx with the hash specified - constraint, ok := constraints[proof.TxHash] - if !ok { - log.Warnf("[BOLT]: Tx hash %s not found in constraints", proof.TxHash.String()) - // We don't actually have to return an error here, the relay just provided a proof that was unnecessary - continue - } + // Find the raw tx with the hash specified + constraint, ok := constraints[proof.TxHash] + tx := Transaction(constraint.Tx) - rawTx := constraint.RawTx + if !ok { + log.Warnf("[BOLT]: Tx hash %s not found in constraints", proof.TxHash.String()) + // We don't actually have to return an error here, the relay just provided a proof that was unnecessary + continue + } - if len(rawTx) == 0 { - log.Warnf("[BOLT]: Raw tx is empty for tx hash %s", proof.TxHash.String()) - continue - } + if len(constraint.Tx) == 0 { + log.Warnf("[BOLT]: Raw tx is empty for tx hash %s", proof.TxHash.String()) + continue + } - // Compute the hash tree root for the raw preconfirmed transaction - // and use it as "Leaf" in the proof to be verified against - txHashTreeRoot, err := rawTx.HashTreeRoot() - if err != nil { - log.WithError(err).Error("[BOLT]: error getting tx hash tree root") - return ErrInvalidRoot - } + // Compute the hash tree root for the raw preconfirmed transaction + // and use it as "Leaf" in the proof to be verified against + txHashTreeRoot, err := tx.HashTreeRoot() + if err != nil { + log.WithError(err).Error("[BOLT]: error getting tx hash tree root") + return ErrInvalidRoot + } - // Verify the proof - sszProof := proof.MerkleProof.ToFastSszProof(txHashTreeRoot[:]) + // Verify the proof + sszProof := proof.MerkleProof.ToFastSszProof(txHashTreeRoot[:]) - currentTime := time.Now() - ok, err = fastSsz.VerifyProof(transactionsRoot[:], sszProof) - elapsed := time.Since(currentTime) + currentTime := time.Now() + ok, err = fastSsz.VerifyProof(transactionsRoot[:], sszProof) + elapsed := time.Since(currentTime) - if err != nil { - log.WithError(err).Error("error verifying merkle proof") - return err - } + if err != nil { + log.WithError(err).Error("error verifying merkle proof") + return err + } - if !ok { - log.Error("[BOLT]: proof verification failed: 'not ok' for tx hash: ", proof.TxHash.String()) - return ErrInvalidProofs - } else { - log.Info(fmt.Sprintf("[BOLT]: Preconfirmation proof verified for tx hash %s in %s", proof.TxHash.String(), elapsed)) - } + if !ok { + log.Error("[BOLT]: proof verification failed: 'not ok' for tx hash: ", proof.TxHash.String()) + return ErrInvalidProofs + } else { + log.Info(fmt.Sprintf("[BOLT]: Preconfirmation proof verified for tx hash %s in %s", proof.TxHash.String(), elapsed)) } } diff --git a/mev-boost-relay/services/api/service.go b/mev-boost-relay/services/api/service.go index 2b25da8f..1a20fffb 100644 --- a/mev-boost-relay/services/api/service.go +++ b/mev-boost-relay/services/api/service.go @@ -41,6 +41,7 @@ import ( "github.com/thedevbirb/flashbots-go-utils/cli" "github.com/thedevbirb/flashbots-go-utils/httplogger" uberatomic "go.uber.org/atomic" + "golang.org/x/crypto/sha3" "golang.org/x/exp/slices" ) @@ -74,7 +75,7 @@ var ( pathBuilderGetValidators = "/relay/v1/builder/validators" pathSubmitNewBlock = "/relay/v1/builder/blocks" // BOLT: allow builders to ship merkle proofs with their blocks - pathSubmitNewBlockWithPreconfs = "/relay/v1/builder/blocks_with_preconfs" + pathSubmitNewBlockWithProofs = "/relay/v1/builder/blocks_with_proofs" // BOLT: allow builders to subscribe to constraints pathSubscribeConstraints = "/relay/v1/builder/constraints" @@ -191,8 +192,8 @@ type RelayAPI struct { redis *datastore.RedisCache memcached *datastore.Memcached db database.IDatabaseService - constraints *shardmap.FIFOMap[uint64, *Constraints] - constraintsConsumers []chan *ConstraintSubmission + constraints *shardmap.FIFOMap[uint64, *[]*SignedConstraints] + constraintsConsumers []chan *SignedConstraints headSlot uberatomic.Uint64 genesisInfo *beaconclient.GetGenesisResponse @@ -292,8 +293,8 @@ func NewRelayAPI(opts RelayAPIOpts) (api *RelayAPI, err error) { redis: opts.Redis, memcached: opts.Memcached, db: opts.DB, - constraints: shardmap.NewFIFOMap[uint64, *Constraints](64, 8, shardmap.HashUint64), // 2 epochs cache - constraintsConsumers: make([]chan *ConstraintSubmission, 0, 10), + constraints: shardmap.NewFIFOMap[uint64, *[]*SignedConstraints](64, 8, shardmap.HashUint64), // 2 epochs cache + constraintsConsumers: make([]chan *SignedConstraints, 0, 10), payloadAttributes: make(map[string]payloadAttributesHelper), @@ -364,7 +365,7 @@ func (api *RelayAPI) getRouter() http.Handler { r.HandleFunc(pathBuilderGetValidators, api.handleBuilderGetValidators).Methods(http.MethodGet) r.HandleFunc(pathSubmitNewBlock, api.handleSubmitNewBlock).Methods(http.MethodPost) // BOLT - r.HandleFunc(pathSubmitNewBlockWithPreconfs, api.handleSubmitNewBlockWithPreconfs).Methods(http.MethodPost) + r.HandleFunc(pathSubmitNewBlockWithProofs, api.handleSubmitNewBlockWithProofs).Methods(http.MethodPost) r.HandleFunc(pathSubscribeConstraints, api.handleSubscribeConstraints).Methods(http.MethodGet) } @@ -585,7 +586,7 @@ func (api *RelayAPI) startValidatorRegistrationDBProcessor() { } // removeConstraintsConsumer is an helper function to remove the consumer from the list -func (api *RelayAPI) removeConstraintsConsumer(ch chan *ConstraintSubmission) { +func (api *RelayAPI) removeConstraintsConsumer(ch chan *SignedConstraints) { for i, c := range api.constraintsConsumers { if c == ch { api.constraintsConsumers = append(api.constraintsConsumers[:i], api.constraintsConsumers[i+1:]...) @@ -1677,7 +1678,7 @@ func (api *RelayAPI) handleSubmitConstraints(w http.ResponseWriter, req *http.Re } // Decode payload - payload := new([]*SignedConstraintSubmission) + payload := new([]*SignedConstraints) if err := json.NewDecoder(bytes.NewReader(body)).Decode(payload); err != nil { log.WithError(err).Warn("failed to decode submit contraints body") api.RespondError(w, http.StatusBadRequest, "failed to decode payload") @@ -1690,12 +1691,12 @@ func (api *RelayAPI) handleSubmitConstraints(w http.ResponseWriter, req *http.Re } // Add all constraints to the cache - for _, signedConstraint := range *payload { + for _, signedConstraints := range *payload { // Retrieve proposer information - proposerIndex := signedConstraint.ProposerIndex - proposerPubKeyStr, found := api.datastore.GetKnownValidatorPubkeyByIndex(proposerIndex) + validatorIndex := signedConstraints.Message.ValidatorIndex + proposerPubKeyStr, found := api.datastore.GetKnownValidatorPubkeyByIndex(validatorIndex) if !found { - log.Errorf("could not find proposer pubkey for index %d", proposerIndex) + log.Errorf("could not find proposer pubkey for index %d", validatorIndex) api.RespondError(w, http.StatusBadRequest, "could not match proposer index to pubkey") return } @@ -1713,22 +1714,23 @@ func (api *RelayAPI) handleSubmitConstraints(w http.ResponseWriter, req *http.Re } // Verify signature - signatureBytes := []byte(signedConstraint.Signature[:]) - signature, err := bls.SignatureFromBytes(signatureBytes) + signature, err := bls.SignatureFromBytes(signedConstraints.Signature[:]) if err != nil { log.Errorf("could not convert signature to bls.Signature: %v", err) api.RespondError(w, http.StatusBadRequest, "Invalid raw BLS signature") return } - // NOTE: Assuming this is what actually the proposer is signing - messageSigned, err := signedConstraint.Message.MarshalJSON() + message := signedConstraints.Message + + // NOTE: even if payload is sent with JSON, the signature digest is the SSZ encoding of the message + messageSSZ, err := message.MarshalSSZ() if err != nil { log.Errorf("could not marshal constraint message to json: %v", err) api.RespondError(w, http.StatusInternalServerError, "could not marshal constraint message to json") return } - ok, err := bls.VerifySignature(signature, blsPublicKey, messageSigned) + ok, err := bls.VerifySignature(signature, blsPublicKey, messageSSZ) if err != nil { log.Errorf("error while veryfing signature: %v", err) api.RespondError(w, http.StatusInternalServerError, "error while veryfing signature") @@ -1736,28 +1738,18 @@ func (api *RelayAPI) handleSubmitConstraints(w http.ResponseWriter, req *http.Re } if !ok { log.Error("Invalid BLS signature over constraint message") - api.RespondError(w, http.StatusBadRequest, fmt.Sprintf("Invalid BLS signature over constraint message %s", messageSigned)) + api.RespondError(w, http.StatusBadRequest, fmt.Sprintf("Invalid BLS signature over constraint message %s", messageSSZ)) return } - constraint := signedConstraint.Message - - log.WithFields(logrus.Fields{ - "slot": constraint.Slot, - "txHash": constraint.TxHash.String(), - "rawTx": fmt.Sprintf("%#x", constraint.RawTx), - }).Info("[BOLT]: adding inclusion constraint to cache") - - broadcastToChannels(api.constraintsConsumers, constraint) + broadcastToChannels(api.constraintsConsumers, signedConstraints) // Add the constraint to the cache. - slotConstraints, _ := api.constraints.Get(constraint.Slot) + slotConstraints, _ := api.constraints.Get(message.Slot) if slotConstraints == nil { - constraints := make(Constraints) - constraints[constraint.TxHash] = &Constraint{RawTx: constraint.RawTx} - api.constraints.Put(constraint.Slot, &constraints) + api.constraints.Put(message.Slot, &[]*SignedConstraints{signedConstraints}) } else { - (*slotConstraints)[constraint.TxHash] = &Constraint{RawTx: constraint.RawTx} + *slotConstraints = append(*slotConstraints, signedConstraints) } } @@ -1987,7 +1979,17 @@ func (api *RelayAPI) updateRedisBid( api.RespondError(opts.w, http.StatusBadRequest, err.Error()) return nil, nil, false } - err = api.verifyConstraintProofs(transactionsRoot, proofs, *slotConstraints) + constraints := make(map[phase0.Hash32]*Constraint) + for _, signedConstraints := range *slotConstraints { + for _, constraint := range signedConstraints.Message.Constraints { + hasher := sha3.New256() + hasher.Write(constraint.Tx) + hash := phase0.Hash32(hasher.Sum(nil)) + constraints[hash] = constraint + } + } + + err = verifyConstraintProofs(api.log, transactionsRoot, proofs, constraints) if err != nil { api.RespondError(opts.w, http.StatusBadRequest, err.Error()) return nil, nil, false @@ -2424,10 +2426,7 @@ func (api *RelayAPI) handleSubmitNewBlock(w http.ResponseWriter, req *http.Reque w.WriteHeader(http.StatusOK) } -// TODO: We should check the preconfirmation proofs in this function to discard bids that are not valid. -// This is necessary to avoid the relay accept a high bid with invalid proofs, resulting in a missed opportunity -// for the proposer, who will refuse to sign the associated block header. -func (api *RelayAPI) handleSubmitNewBlockWithPreconfs(w http.ResponseWriter, req *http.Request) { +func (api *RelayAPI) handleSubmitNewBlockWithProofs(w http.ResponseWriter, req *http.Request) { var pf common.Profile var prevTime, nextTime time.Time @@ -2488,7 +2487,7 @@ func (api *RelayAPI) handleSubmitNewBlockWithPreconfs(w http.ResponseWriter, req prevTime = nextTime // BOLT: new payload type - payload := new(common.VersionedSubmitBlockRequestWithPreconfsProofs) + payload := new(common.VersionedSubmitBlockRequestWithProofs) // Check for SSZ encoding contentType := req.Header.Get("Content-Type") @@ -2855,7 +2854,7 @@ func (api *RelayAPI) handleSubscribeConstraints(w http.ResponseWriter, req *http } // Add the new consumer - constraintsCh := make(chan *ConstraintSubmission, 256) + constraintsCh := make(chan *SignedConstraints, 256) api.constraintsConsumers = append(api.constraintsConsumers, constraintsCh) // Remove the consumer and close the channel when the client disconnects @@ -2882,7 +2881,7 @@ func (api *RelayAPI) handleSubscribeConstraints(w http.ResponseWriter, req *http // Client disconnected return case constraint := <-constraintsCh: - constraintJSON, err := constraint.MarshalJSON() + constraintJSON, err := json.Marshal(constraint) api.log.Infof("New constraint received: %s", constraint) if err != nil { diff --git a/mev-boost-relay/services/api/service_test.go b/mev-boost-relay/services/api/service_test.go index e93007ca..6606ccb2 100644 --- a/mev-boost-relay/services/api/service_test.go +++ b/mev-boost-relay/services/api/service_test.go @@ -304,9 +304,9 @@ func TestSubmitConstraints(t *testing.T) { require.NoError(t, err) proposerPublicKey, err := utils.BlsPublicKeyToPublicKey(proposerPublicKeyEC) require.NoError(t, err) - proposerIndex := uint64(1) + validatorIndex := uint64(1) mockValidatorEntry := beaconclient.ValidatorResponseEntry{ - Index: proposerIndex, Balance: "1000000", Validator: beaconclient.ValidatorResponseValidatorData{Pubkey: proposerPublicKey.String()}, + Index: validatorIndex, Balance: "1000000", Validator: beaconclient.ValidatorResponseValidatorData{Pubkey: proposerPublicKey.String()}, } // Update beacon client, create MultiBeaconClient and refresh validators in the datastore @@ -321,41 +321,50 @@ func TestSubmitConstraints(t *testing.T) { // request path path := "/eth/v1/builder/constraints" - txHash := _HexToHash("0xba40436abdc8adc037e2c92ea1099a5849053510c3911037ff663085ce44bc49") - rawTx := _HexToBytes("0x02f871018304a5758085025ff11caf82565f94388c818ca8b9251b393131c08a736a67ccb1929787a41bb7ee22b41380c001a0c8630f734aba7acb4275a8f3b0ce831cf0c7c487fd49ee7bcca26ac622a28939a04c3745096fa0130a188fa249289fd9e60f9d6360854820dba22ae779ea6f573f") + // txHash := _HexToHash("0xba40436abdc8adc037e2c92ea1099a5849053510c3911037ff663085ce44bc49") + tx := _HexToBytes("0x02f871018304a5758085025ff11caf82565f94388c818ca8b9251b393131c08a736a67ccb1929787a41bb7ee22b41380c001a0c8630f734aba7acb4275a8f3b0ce831cf0c7c487fd49ee7bcca26ac622a28939a04c3745096fa0130a188fa249289fd9e60f9d6360854820dba22ae779ea6f573f") - // Build the constraint - constraintSubmission := ConstraintSubmission{ - Slot: slot, - TxHash: txHash, - RawTx: rawTx, + constraintMessage := &ConstraintsMessage{ + ValidatorIndex: validatorIndex, + Slot: slot, + Constraints: []*Constraint{{ + Tx: tx, + Index: nil, + }}, } - constraintSubmissionJSON, err := constraintSubmission.MarshalJSON() + + constraintMessageSSZ, err := constraintMessage.MarshalSSZ() require.NoError(t, err) - signatureEC := bls.Sign(proposerSecretKeyEC, constraintSubmissionJSON) + signatureEC := bls.Sign(proposerSecretKeyEC, constraintMessageSSZ) constraintSignature := phase0.BLSSignature(bls.SignatureToBytes(signatureEC)[:]) - signedConstraintSubmission := SignedConstraintSubmission{Message: &constraintSubmission, Signature: constraintSignature, ProposerIndex: proposerIndex} - payload := []*SignedConstraintSubmission{&signedConstraintSubmission} + + // Build the constraint + signedConstraints := SignedConstraints{ + Message: constraintMessage, + Signature: constraintSignature, + } + + payload := []*SignedConstraints{&signedConstraints} t.Run("Constraints sent", func(t *testing.T) { - ch := make(chan *ConstraintSubmission, 256) - backend.relay.constraintsConsumers = []chan *ConstraintSubmission{ch} + ch := make(chan *SignedConstraints, 256) + backend.relay.constraintsConsumers = []chan *SignedConstraints{ch} rr := backend.request(http.MethodPost, path, payload) require.Equal(t, http.StatusOK, rr.Code) constraintCache := backend.relay.constraints - slotConstraints, _ := constraintCache.Get(slot) - require.NotNil(t, slotConstraints) - expected := (*slotConstraints)[txHash] - actual := Constraint{RawTx: constraintSubmission.RawTx} + actuals, _ := constraintCache.Get(slot) + require.NotNil(t, actuals) + actual := (*actuals)[0] actualFromCh := <-backend.relay.constraintsConsumers[0] - actualConstraintFromCh := Constraint{RawTx: actualFromCh.RawTx} - require.Equal(t, expected, &actual, actualConstraintFromCh) + expected := signedConstraints + + require.Equal(t, expected.String(), actual.String(), actualFromCh.String()) }) t.Run("Empty constraint list", func(t *testing.T) { - rr := backend.request(http.MethodPost, path, []SignedConstraintSubmission{}) + rr := backend.request(http.MethodPost, path, []*SignedConstraints{}) require.Equal(t, http.StatusBadRequest, rr.Code) }) } @@ -444,9 +453,9 @@ func TestSubscribeToConstraints(t *testing.T) { // Now we can safely send the constraints, and we should get a response // in the HTTP request defined in the goroutine above - backend.relay.constraintsConsumers[0] <- &ConstraintSubmission{} + backend.relay.constraintsConsumers[0] <- &SignedConstraints{} time.Sleep(500 * time.Millisecond) - backend.relay.constraintsConsumers[0] <- &ConstraintSubmission{} + backend.relay.constraintsConsumers[0] <- &SignedConstraints{} // Wait for the HTTP request goroutine to process the constraints time.Sleep(2 * time.Second) diff --git a/mev-boost-relay/services/api/transaction_ssz.go b/mev-boost-relay/services/api/transaction_ssz.go index 2d98f450..02192477 100644 --- a/mev-boost-relay/services/api/transaction_ssz.go +++ b/mev-boost-relay/services/api/transaction_ssz.go @@ -2,13 +2,14 @@ package api import ( ssz "github.com/ferranbt/fastssz" + "github.com/flashbots/mev-boost-relay/common" ) // MaxBytesPerTransaction is the maximum length in bytes of a raw RLP-encoded transaction var MaxBytesPerTransaction uint64 = 1_073_741_824 // 2**30 -// Transaction is a wrapper type of byte slice to implement the ssz.HashRoot interface -type Transaction []byte +// Transaction is a wrapper type of `common.HexBytes` to implement the ssz.HashRoot interface +type Transaction common.HexBytes // HashTreeRoot calculates the hash tree root of the transaction, which // is a list of basic types (byte). @@ -50,3 +51,15 @@ func (tx *Transaction) GetTree() (*ssz.Node, error) { tx.HashTreeRootWith(w) return w.Node(), nil } + +func (tx Transaction) MarshalJSON() ([]byte, error) { + return common.HexBytes(tx).MarshalJSON() +} + +func (tx *Transaction) UnmarshalJSON(buf []byte) error { + return (*common.HexBytes)(tx).UnmarshalJSON(buf) +} + +func (tx Transaction) String() string { + return JSONStringify(tx) +} diff --git a/mev-boost-relay/services/api/utils.go b/mev-boost-relay/services/api/utils.go index 91787cfb..c717a61f 100644 --- a/mev-boost-relay/services/api/utils.go +++ b/mev-boost-relay/services/api/utils.go @@ -203,3 +203,11 @@ func validateConstraintSubscriptionAuth(auth string, headSlot uint64) (phase0.BL } return authData.PublicKey, nil } + +func JSONStringify[T any](obj T) string { + out, err := json.Marshal(obj) + if err != nil { + return fmt.Sprintf("Error while marshalling: %v", err) + } + return string(out) +}