Skip to content

fix: codecv7 challenge digest #50

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Apr 1, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions encoding/codecv0_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -283,3 +283,8 @@ func (b *daBatchV0) SkippedL1MessageBitmap() []byte {
func (b *daBatchV0) DataHash() common.Hash {
return b.dataHash
}

// ChallengeDigest returns the challenge digest of the DABatch.
func (b *daBatchV0) ChallengeDigest() common.Hash {
return common.Hash{}
}
20 changes: 10 additions & 10 deletions encoding/codecv2.go
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) {
}

// blob payload
blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch())
blob, blobVersionedHash, z, _, _, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch())
if err != nil {
return nil, fmt.Errorf("failed to construct blob payload, index: %d, err: %w", batch.Index, err)
}
Expand All @@ -95,7 +95,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) {
}

// constructBlobPayload constructs the 4844 blob payload.
func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) {
func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, common.Hash, error) {
// metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk)
metadataLength := 2 + maxNumChunksPerBatch*4

Expand Down Expand Up @@ -126,7 +126,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
// encode L2 txs into blob payload
rlpTxData, err := convertTxDataToRLPEncoding(tx)
if err != nil {
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err)
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert txData to RLP encoding: %w", err)
}
batchBytes = append(batchBytes, rlpTxData...)
}
Expand Down Expand Up @@ -156,33 +156,33 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
// blobBytes represents the compressed blob payload (batchBytes)
blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes)
if err != nil {
return nil, common.Hash{}, nil, nil, err
return nil, common.Hash{}, nil, nil, common.Hash{}, err
}

// Only apply this check when the uncompressed batch data has exceeded 128 KiB.
if len(batchBytes) > minCompressedDataCheckSize {
// Check compressed data compatibility.
if err = checkCompressedDataCompatibility(blobBytes); err != nil {
log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes))
return nil, common.Hash{}, nil, nil, err
return nil, common.Hash{}, nil, nil, common.Hash{}, err
}
}

if len(blobBytes) > maxEffectiveBlobBytes {
log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes))
return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size")
return nil, common.Hash{}, nil, nil, common.Hash{}, errors.New("Blob payload exceeds maximum size")
}

// convert raw data to BLSFieldElements
blob, err := makeBlobCanonical(blobBytes)
if err != nil {
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
}

// compute blob versioned hash
c, err := kzg4844.BlobToCommitment(blob)
if err != nil {
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to create blob commitment: %w", err)
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to create blob commitment: %w", err)
}
blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c)

Expand All @@ -197,12 +197,12 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
// the challenge point z
var z kzg4844.Point
if len(pointBytes) > kzgPointByteSize {
return nil, common.Hash{}, nil, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes))
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes))
}
start := kzgPointByteSize - len(pointBytes)
copy(z[start:], pointBytes)

return blob, blobVersionedHash, &z, blobBytes, nil
return blob, blobVersionedHash, &z, blobBytes, challengeDigest, nil
}

// NewDABatchFromBytes decodes the given byte slice into a DABatch.
Expand Down
2 changes: 1 addition & 1 deletion encoding/codecv2_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1082,7 +1082,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) {
return nil
})

blob, blobVersionedHash, z, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, codecv2.MaxNumChunksPerBatch())
blob, blobVersionedHash, z, _, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, codecv2.MaxNumChunksPerBatch())
require.NoError(t, err)
actualZ := hex.EncodeToString(z[:])
assert.Equal(t, tc.expectedz, actualZ)
Expand Down
12 changes: 7 additions & 5 deletions encoding/codecv3.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) {
}

// blob payload
blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch())
blob, blobVersionedHash, z, blobBytes, challengeDigest, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch())
if err != nil {
return nil, err
}
Expand All @@ -72,6 +72,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) {
blob, // blob
z, // z
blobBytes, // blobBytes
challengeDigest, // challengeDigest
)
}

Expand All @@ -95,10 +96,11 @@ func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) {
common.BytesToHash(data[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash]), // dataHash
common.BytesToHash(data[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp]), // parentBatchHash
common.BytesToHash(data[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash]), // blobVersionedHash
nil, // skippedL1MessageBitmap
nil, // blob
nil, // z
nil, // blobBytes
nil, // skippedL1MessageBitmap
nil, // blob
nil, // z
nil, // blobBytes
common.Hash{}, // challengeDigest
[2]common.Hash{ // blobDataProof
common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointByteSize]),
common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointByteSize : daBatchV3EncodedLength]),
Expand Down
2 changes: 1 addition & 1 deletion encoding/codecv3_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1273,7 +1273,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) {
return nil
})

blob, blobVersionedHash, z, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, codecv3.MaxNumChunksPerBatch())
blob, blobVersionedHash, z, _, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, codecv3.MaxNumChunksPerBatch())
require.NoError(t, err)
actualZ := hex.EncodeToString(z[:])
assert.Equal(t, tc.expectedz, actualZ)
Expand Down
12 changes: 10 additions & 2 deletions encoding/codecv3_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,13 @@ type daBatchV3 struct {
blob *kzg4844.Blob
z *kzg4844.Point
blobBytes []byte
challengeDigest common.Hash
}

// newDABatchV3 is a constructor for daBatchV3 that calls blobDataProofForPICircuit internally.
func newDABatchV3(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64,
dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob,
z *kzg4844.Point, blobBytes []byte,
z *kzg4844.Point, blobBytes []byte, challengeDigest common.Hash,
) (*daBatchV3, error) {
daBatch := &daBatchV3{
daBatchV0: daBatchV0{
Expand All @@ -44,6 +45,7 @@ func newDABatchV3(version CodecVersion, batchIndex, l1MessagePopped, totalL1Mess
blob: blob,
z: z,
blobBytes: blobBytes,
challengeDigest: challengeDigest,
}

proof, err := daBatch.blobDataProofForPICircuit()
Expand All @@ -59,7 +61,7 @@ func newDABatchV3(version CodecVersion, batchIndex, l1MessagePopped, totalL1Mess
// newDABatchV3WithProof is a constructor for daBatchV3 that allows directly passing blobDataProof.
func newDABatchV3WithProof(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64,
dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte,
blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, blobDataProof [2]common.Hash,
blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, challengeDigest common.Hash, blobDataProof [2]common.Hash,
) *daBatchV3 {
return &daBatchV3{
daBatchV0: daBatchV0{
Expand All @@ -76,6 +78,7 @@ func newDABatchV3WithProof(version CodecVersion, batchIndex, l1MessagePopped, to
blob: blob,
z: z,
blobBytes: blobBytes,
challengeDigest: challengeDigest,
blobDataProof: blobDataProof, // Set blobDataProof directly
}
}
Expand Down Expand Up @@ -204,3 +207,8 @@ func (b *daBatchV3) SkippedL1MessageBitmap() []byte {
func (b *daBatchV3) DataHash() common.Hash {
return b.dataHash
}

// ChallengeDigest returns the challenge digest of the DABatch.
func (b *daBatchV3) ChallengeDigest() common.Hash {
return b.challengeDigest
}
30 changes: 16 additions & 14 deletions encoding/codecv4.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) {
}

// blob payload
blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), enableCompression)
blob, blobVersionedHash, z, blobBytes, challengeDigest, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), enableCompression)
if err != nil {
return nil, err
}
Expand All @@ -106,6 +106,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) {
blob, // blob
z, // z
blobBytes, // blobBytes
challengeDigest, // challengeDigest
)
}

Expand All @@ -129,10 +130,11 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) {
common.BytesToHash(data[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash]), // dataHash
common.BytesToHash(data[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp]), // parentBatchHash
common.BytesToHash(data[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash]), // blobVersionedHash
nil, // skippedL1MessageBitmap
nil, // blob
nil, // z
nil, // blobBytes
nil, // skippedL1MessageBitmap
nil, // blob
nil, // z
nil, // blobBytes
common.Hash{}, // challengeDigest
[2]common.Hash{ // blobDataProof
common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointByteSize]),
common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointByteSize : daBatchV3EncodedLength]),
Expand All @@ -141,7 +143,7 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) {
}

// constructBlobPayload constructs the 4844 blob payload.
func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompression bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) {
func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompression bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, common.Hash, error) {
// metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk)
metadataLength := 2 + maxNumChunksPerBatch*4

Expand Down Expand Up @@ -172,7 +174,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
// encode L2 txs into blob payload
rlpTxData, err := convertTxDataToRLPEncoding(tx)
if err != nil {
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err)
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert txData to RLP encoding: %w", err)
}
batchBytes = append(batchBytes, rlpTxData...)
}
Expand Down Expand Up @@ -205,12 +207,12 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
var err error
blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes)
if err != nil {
return nil, common.Hash{}, nil, nil, err
return nil, common.Hash{}, nil, nil, common.Hash{}, err
}
// Check compressed data compatibility.
if err = checkCompressedDataCompatibility(blobBytes); err != nil {
log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes))
return nil, common.Hash{}, nil, nil, err
return nil, common.Hash{}, nil, nil, common.Hash{}, err
}
blobBytes = append([]byte{1}, blobBytes...)
} else {
Expand All @@ -219,19 +221,19 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i

if len(blobBytes) > maxEffectiveBlobBytes {
log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes))
return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size")
return nil, common.Hash{}, nil, nil, common.Hash{}, errors.New("Blob payload exceeds maximum size")
}

// convert raw data to BLSFieldElements
blob, err := makeBlobCanonical(blobBytes)
if err != nil {
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
}

// compute blob versioned hash
c, err := kzg4844.BlobToCommitment(blob)
if err != nil {
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to create blob commitment: %w", err)
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to create blob commitment: %w", err)
}
blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c)

Expand All @@ -246,12 +248,12 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
// the challenge point z
var z kzg4844.Point
if len(pointBytes) > kzgPointByteSize {
return nil, common.Hash{}, nil, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes))
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes))
}
start := kzgPointByteSize - len(pointBytes)
copy(z[start:], pointBytes)

return blob, blobVersionedHash, &z, blobBytes, nil
return blob, blobVersionedHash, &z, blobBytes, challengeDigest, nil
}

func (d *DACodecV4) estimateL1CommitBatchSizeAndBlobSize(chunks []*Chunk) (uint64, uint64, error) {
Expand Down
4 changes: 2 additions & 2 deletions encoding/codecv4_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1300,7 +1300,7 @@ func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) {
return nil
})

blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), true /* enable encode */)
blob, blobVersionedHash, z, _, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), true /* enable encode */)
require.NoError(t, err)
actualZ := hex.EncodeToString(z[:])
assert.Equal(t, tc.expectedz, actualZ)
Expand Down Expand Up @@ -1463,7 +1463,7 @@ func TestCodecV4BatchStandardTestCasesDisableCompression(t *testing.T) {
return nil
})

blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), false /* disable encode */)
blob, blobVersionedHash, z, _, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), false /* disable encode */)
require.NoError(t, err)
actualZ := hex.EncodeToString(z[:])
assert.Equal(t, tc.expectedz, actualZ)
Expand Down
28 changes: 18 additions & 10 deletions encoding/codecv7.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (

"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/crypto/kzg4844"
"github.com/scroll-tech/go-ethereum/log"

Expand Down Expand Up @@ -80,30 +81,30 @@ func (d *DACodecV7) NewDABatch(batch *Batch) (DABatch, error) {
return nil, fmt.Errorf("failed to check blocks batch vs chunks consistency: %w", err)
}

blob, blobVersionedHash, blobBytes, err := d.constructBlob(batch)
blob, blobVersionedHash, blobBytes, challengeDigest, err := d.constructBlob(batch)
if err != nil {
return nil, fmt.Errorf("failed to construct blob: %w", err)
}

daBatch, err := newDABatchV7(CodecV7, batch.Index, blobVersionedHash, batch.ParentBatchHash, blob, blobBytes)
daBatch, err := newDABatchV7(CodecV7, batch.Index, blobVersionedHash, batch.ParentBatchHash, blob, blobBytes, challengeDigest)
if err != nil {
return nil, fmt.Errorf("failed to construct DABatch: %w", err)
}

return daBatch, nil
}

func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []byte, error) {
func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []byte, common.Hash, error) {
blobBytes := make([]byte, blobEnvelopeV7OffsetPayload)

payloadBytes, err := d.constructBlobPayload(batch)
if err != nil {
return nil, common.Hash{}, nil, fmt.Errorf("failed to construct blob payload: %w", err)
return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to construct blob payload: %w", err)
}

compressedPayloadBytes, enableCompression, err := d.checkCompressedDataCompatibility(payloadBytes)
if err != nil {
return nil, common.Hash{}, nil, fmt.Errorf("failed to check batch compressed data compatibility: %w", err)
return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to check batch compressed data compatibility: %w", err)
}

isCompressedFlag := uint8(0x0)
Expand All @@ -121,23 +122,30 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b

if len(blobBytes) > maxEffectiveBlobBytes {
log.Error("ConstructBlob: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes))
return nil, common.Hash{}, nil, fmt.Errorf("blob exceeds maximum size: got %d, allowed %d", len(blobBytes), maxEffectiveBlobBytes)
return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("blob exceeds maximum size: got %d, allowed %d", len(blobBytes), maxEffectiveBlobBytes)
}

// convert raw data to BLSFieldElements
blob, err := makeBlobCanonical(blobBytes)
if err != nil {
return nil, common.Hash{}, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
}

// compute blob versioned hash
c, err := kzg4844.BlobToCommitment(blob)
if err != nil {
return nil, common.Hash{}, nil, fmt.Errorf("failed to create blob commitment: %w", err)
return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to create blob commitment: %w", err)
}
blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c)

return blob, blobVersionedHash, blobBytes, nil
// compute challenge digest for codecv7, different from previous versions,
// the blob bytes are padded to the max effective blob size, which is 131072 / 32 * 31 due to the blob encoding
paddedBlobBytes := make([]byte, maxEffectiveBlobBytes)
copy(paddedBlobBytes, blobBytes)

challengeDigest := crypto.Keccak256Hash(crypto.Keccak256(paddedBlobBytes), blobVersionedHash[:])

return blob, blobVersionedHash, blobBytes, challengeDigest, nil
}

func (d *DACodecV7) constructBlobPayload(batch *Batch) ([]byte, error) {
Expand Down Expand Up @@ -166,7 +174,7 @@ func (d *DACodecV7) NewDABatchFromBytes(data []byte) (DABatch, error) {
}

func (d *DACodecV7) NewDABatchFromParams(batchIndex uint64, blobVersionedHash, parentBatchHash common.Hash) (DABatch, error) {
return newDABatchV7(CodecV7, batchIndex, blobVersionedHash, parentBatchHash, nil, nil)
return newDABatchV7(CodecV7, batchIndex, blobVersionedHash, parentBatchHash, nil, nil, common.Hash{})
}

func (d *DACodecV7) DecodeDAChunksRawTx(_ [][]byte) ([]*DAChunkRawTx, error) {
Expand Down
Loading