diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index 0e6c958..488d1e4 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -283,3 +283,8 @@ func (b *daBatchV0) SkippedL1MessageBitmap() []byte { func (b *daBatchV0) DataHash() common.Hash { return b.dataHash } + +// ChallengeDigest returns the challenge digest of the DABatch. +func (b *daBatchV0) ChallengeDigest() common.Hash { + return common.Hash{} +} diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 0a7b297..fe2d338 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -68,7 +68,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch()) + blob, blobVersionedHash, z, _, _, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch()) if err != nil { return nil, fmt.Errorf("failed to construct blob payload, index: %d, err: %w", batch.Index, err) } @@ -95,7 +95,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // constructBlobPayload constructs the 4844 blob payload. -func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, common.Hash, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + maxNumChunksPerBatch*4 @@ -126,7 +126,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // encode L2 txs into blob payload rlpTxData, err := convertTxDataToRLPEncoding(tx) if err != nil { - return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) + return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } batchBytes = append(batchBytes, rlpTxData...) } @@ -156,7 +156,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // blobBytes represents the compressed blob payload (batchBytes) blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { - return nil, common.Hash{}, nil, nil, err + return nil, common.Hash{}, nil, nil, common.Hash{}, err } // Only apply this check when the uncompressed batch data has exceeded 128 KiB. @@ -164,25 +164,25 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // Check compressed data compatibility. if err = checkCompressedDataCompatibility(blobBytes); err != nil { log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, err + return nil, common.Hash{}, nil, nil, common.Hash{}, err } } if len(blobBytes) > maxEffectiveBlobBytes { log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") + return nil, common.Hash{}, nil, nil, common.Hash{}, errors.New("Blob payload exceeds maximum size") } // convert raw data to BLSFieldElements blob, err := makeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) + return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) } // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to create blob commitment: %w", err) + return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to create blob commitment: %w", err) } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) @@ -197,12 +197,12 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // the challenge point z var z kzg4844.Point if len(pointBytes) > kzgPointByteSize { - return nil, common.Hash{}, nil, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes)) + return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes)) } start := kzgPointByteSize - len(pointBytes) copy(z[start:], pointBytes) - return blob, blobVersionedHash, &z, blobBytes, nil + return blob, blobVersionedHash, &z, blobBytes, challengeDigest, nil } // NewDABatchFromBytes decodes the given byte slice into a DABatch. diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 3ffa932..1338260 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -1082,7 +1082,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { return nil }) - blob, blobVersionedHash, z, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, codecv2.MaxNumChunksPerBatch()) + blob, blobVersionedHash, z, _, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, codecv2.MaxNumChunksPerBatch()) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 6127b66..44d4664 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -46,7 +46,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch()) + blob, blobVersionedHash, z, blobBytes, challengeDigest, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch()) if err != nil { return nil, err } @@ -72,6 +72,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { blob, // blob z, // z blobBytes, // blobBytes + challengeDigest, // challengeDigest ) } @@ -95,10 +96,11 @@ func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { common.BytesToHash(data[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash]), // dataHash common.BytesToHash(data[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp]), // parentBatchHash common.BytesToHash(data[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash]), // blobVersionedHash - nil, // skippedL1MessageBitmap - nil, // blob - nil, // z - nil, // blobBytes + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + common.Hash{}, // challengeDigest [2]common.Hash{ // blobDataProof common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointByteSize]), common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointByteSize : daBatchV3EncodedLength]), diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 03eaf8b..57f519b 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -1273,7 +1273,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { return nil }) - blob, blobVersionedHash, z, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, codecv3.MaxNumChunksPerBatch()) + blob, blobVersionedHash, z, _, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, codecv3.MaxNumChunksPerBatch()) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go index 43f8acf..8e30128 100644 --- a/encoding/codecv3_types.go +++ b/encoding/codecv3_types.go @@ -22,12 +22,13 @@ type daBatchV3 struct { blob *kzg4844.Blob z *kzg4844.Point blobBytes []byte + challengeDigest common.Hash } // newDABatchV3 is a constructor for daBatchV3 that calls blobDataProofForPICircuit internally. func newDABatchV3(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, - z *kzg4844.Point, blobBytes []byte, + z *kzg4844.Point, blobBytes []byte, challengeDigest common.Hash, ) (*daBatchV3, error) { daBatch := &daBatchV3{ daBatchV0: daBatchV0{ @@ -44,6 +45,7 @@ func newDABatchV3(version CodecVersion, batchIndex, l1MessagePopped, totalL1Mess blob: blob, z: z, blobBytes: blobBytes, + challengeDigest: challengeDigest, } proof, err := daBatch.blobDataProofForPICircuit() @@ -59,7 +61,7 @@ func newDABatchV3(version CodecVersion, batchIndex, l1MessagePopped, totalL1Mess // newDABatchV3WithProof is a constructor for daBatchV3 that allows directly passing blobDataProof. func newDABatchV3WithProof(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, - blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, blobDataProof [2]common.Hash, + blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, challengeDigest common.Hash, blobDataProof [2]common.Hash, ) *daBatchV3 { return &daBatchV3{ daBatchV0: daBatchV0{ @@ -76,6 +78,7 @@ func newDABatchV3WithProof(version CodecVersion, batchIndex, l1MessagePopped, to blob: blob, z: z, blobBytes: blobBytes, + challengeDigest: challengeDigest, blobDataProof: blobDataProof, // Set blobDataProof directly } } @@ -204,3 +207,8 @@ func (b *daBatchV3) SkippedL1MessageBitmap() []byte { func (b *daBatchV3) DataHash() common.Hash { return b.dataHash } + +// ChallengeDigest returns the challenge digest of the DABatch. +func (b *daBatchV3) ChallengeDigest() common.Hash { + return b.challengeDigest +} diff --git a/encoding/codecv4.go b/encoding/codecv4.go index fb0cc64..8ab6d20 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -80,7 +80,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), enableCompression) + blob, blobVersionedHash, z, blobBytes, challengeDigest, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), enableCompression) if err != nil { return nil, err } @@ -106,6 +106,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { blob, // blob z, // z blobBytes, // blobBytes + challengeDigest, // challengeDigest ) } @@ -129,10 +130,11 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { common.BytesToHash(data[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash]), // dataHash common.BytesToHash(data[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp]), // parentBatchHash common.BytesToHash(data[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash]), // blobVersionedHash - nil, // skippedL1MessageBitmap - nil, // blob - nil, // z - nil, // blobBytes + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + common.Hash{}, // challengeDigest [2]common.Hash{ // blobDataProof common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointByteSize]), common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointByteSize : daBatchV3EncodedLength]), @@ -141,7 +143,7 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } // constructBlobPayload constructs the 4844 blob payload. -func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompression bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompression bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, common.Hash, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + maxNumChunksPerBatch*4 @@ -172,7 +174,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // encode L2 txs into blob payload rlpTxData, err := convertTxDataToRLPEncoding(tx) if err != nil { - return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) + return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } batchBytes = append(batchBytes, rlpTxData...) } @@ -205,12 +207,12 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i var err error blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) if err != nil { - return nil, common.Hash{}, nil, nil, err + return nil, common.Hash{}, nil, nil, common.Hash{}, err } // Check compressed data compatibility. if err = checkCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, err + return nil, common.Hash{}, nil, nil, common.Hash{}, err } blobBytes = append([]byte{1}, blobBytes...) } else { @@ -219,19 +221,19 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i if len(blobBytes) > maxEffectiveBlobBytes { log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") + return nil, common.Hash{}, nil, nil, common.Hash{}, errors.New("Blob payload exceeds maximum size") } // convert raw data to BLSFieldElements blob, err := makeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) + return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) } // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to create blob commitment: %w", err) + return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to create blob commitment: %w", err) } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) @@ -246,12 +248,12 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // the challenge point z var z kzg4844.Point if len(pointBytes) > kzgPointByteSize { - return nil, common.Hash{}, nil, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes)) + return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes)) } start := kzgPointByteSize - len(pointBytes) copy(z[start:], pointBytes) - return blob, blobVersionedHash, &z, blobBytes, nil + return blob, blobVersionedHash, &z, blobBytes, challengeDigest, nil } func (d *DACodecV4) estimateL1CommitBatchSizeAndBlobSize(chunks []*Chunk) (uint64, uint64, error) { diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 576d6c9..45ba6b2 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -1300,7 +1300,7 @@ func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { return nil }) - blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), true /* enable encode */) + blob, blobVersionedHash, z, _, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), true /* enable encode */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -1463,7 +1463,7 @@ func TestCodecV4BatchStandardTestCasesDisableCompression(t *testing.T) { return nil }) - blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), false /* disable encode */) + blob, blobVersionedHash, z, _, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), false /* disable encode */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 5cdb803..b3510ba 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -10,6 +10,7 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" @@ -80,12 +81,12 @@ func (d *DACodecV7) NewDABatch(batch *Batch) (DABatch, error) { return nil, fmt.Errorf("failed to check blocks batch vs chunks consistency: %w", err) } - blob, blobVersionedHash, blobBytes, err := d.constructBlob(batch) + blob, blobVersionedHash, blobBytes, challengeDigest, err := d.constructBlob(batch) if err != nil { return nil, fmt.Errorf("failed to construct blob: %w", err) } - daBatch, err := newDABatchV7(CodecV7, batch.Index, blobVersionedHash, batch.ParentBatchHash, blob, blobBytes) + daBatch, err := newDABatchV7(CodecV7, batch.Index, blobVersionedHash, batch.ParentBatchHash, blob, blobBytes, challengeDigest) if err != nil { return nil, fmt.Errorf("failed to construct DABatch: %w", err) } @@ -93,17 +94,17 @@ func (d *DACodecV7) NewDABatch(batch *Batch) (DABatch, error) { return daBatch, nil } -func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []byte, error) { +func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []byte, common.Hash, error) { blobBytes := make([]byte, blobEnvelopeV7OffsetPayload) payloadBytes, err := d.constructBlobPayload(batch) if err != nil { - return nil, common.Hash{}, nil, fmt.Errorf("failed to construct blob payload: %w", err) + return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to construct blob payload: %w", err) } compressedPayloadBytes, enableCompression, err := d.checkCompressedDataCompatibility(payloadBytes) if err != nil { - return nil, common.Hash{}, nil, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) + return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) } isCompressedFlag := uint8(0x0) @@ -121,23 +122,30 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b if len(blobBytes) > maxEffectiveBlobBytes { log.Error("ConstructBlob: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, fmt.Errorf("blob exceeds maximum size: got %d, allowed %d", len(blobBytes), maxEffectiveBlobBytes) + return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("blob exceeds maximum size: got %d, allowed %d", len(blobBytes), maxEffectiveBlobBytes) } // convert raw data to BLSFieldElements blob, err := makeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) + return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) } // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, fmt.Errorf("failed to create blob commitment: %w", err) + return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to create blob commitment: %w", err) } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) - return blob, blobVersionedHash, blobBytes, nil + // compute challenge digest for codecv7, different from previous versions, + // the blob bytes are padded to the max effective blob size, which is 131072 / 32 * 31 due to the blob encoding + paddedBlobBytes := make([]byte, maxEffectiveBlobBytes) + copy(paddedBlobBytes, blobBytes) + + challengeDigest := crypto.Keccak256Hash(crypto.Keccak256(paddedBlobBytes), blobVersionedHash[:]) + + return blob, blobVersionedHash, blobBytes, challengeDigest, nil } func (d *DACodecV7) constructBlobPayload(batch *Batch) ([]byte, error) { @@ -166,7 +174,7 @@ func (d *DACodecV7) NewDABatchFromBytes(data []byte) (DABatch, error) { } func (d *DACodecV7) NewDABatchFromParams(batchIndex uint64, blobVersionedHash, parentBatchHash common.Hash) (DABatch, error) { - return newDABatchV7(CodecV7, batchIndex, blobVersionedHash, parentBatchHash, nil, nil) + return newDABatchV7(CodecV7, batchIndex, blobVersionedHash, parentBatchHash, nil, nil, common.Hash{}) } func (d *DACodecV7) DecodeDAChunksRawTx(_ [][]byte) ([]*DAChunkRawTx, error) { diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index 7a99238..e7a5497 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -492,7 +492,7 @@ func TestCodecV7BatchStandardTestCasesEnableCompression(t *testing.T) { blocks = append(blocks, block) } - _, blobVersionedHash, _, err := codecV7.(*DACodecV7).constructBlob(&Batch{Blocks: blocks}) + _, blobVersionedHash, _, _, err := codecV7.(*DACodecV7).constructBlob(&Batch{Blocks: blocks}) if tc.creationErr != "" { require.ErrorContains(t, err, tc.creationErr) return @@ -636,7 +636,7 @@ func TestCodecV7BatchStandardTestCasesDisableCompression(t *testing.T) { blocks = append(blocks, block) } - _, blobVersionedHash, _, err := codecV7.(*DACodecV7).constructBlob(&Batch{Blocks: blocks}) + _, blobVersionedHash, _, _, err := codecV7.(*DACodecV7).constructBlob(&Batch{Blocks: blocks}) if tc.creationErr != "" { require.ErrorContains(t, err, tc.creationErr) return @@ -822,7 +822,7 @@ func TestCodecV7BatchBlobDataProofForPointEvaluation(t *testing.T) { PostL1MessageQueueHash: common.Hash{}, Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, }, - expectedBlobDataProof: "0a8939c8acbd2bc2fb3ffd61624e55ebe6d0e000958d7505df6863c4062438414cf197faff537d1549b333f4f5d28a1f26123b723c316862e0f285193accead8949b925113ca4f9a8de59f234af023e4da3892e02dd786092699f15bdce7f3be248a075a1f40d82b86e65895b38693b68b08960479a11237c6699777fc97cf53c10f6503a6a8c0ad8eb35b68d6b051506b20ea3a8f41c3058a366c71fb7c1790", + expectedBlobDataProof: "2f1b3db4850bea780744479623d98dec2f5bd243e775a9e7667290136a53407454ae231942e31d6ba3430ffcb0bc306d01720f51d20f83aa40b18bb400c0dabf949b925113ca4f9a8de59f234af023e4da3892e02dd786092699f15bdce7f3be248a075a1f40d82b86e65895b38693b6948eafad556bbcde94b900939d9130138a5c4628b898196fbb948c51d62a8a1741d013eec12c4df8ec69386dad33b7db", }, { name: "Batch with 1 block, blocktrace 03", @@ -833,7 +833,7 @@ func TestCodecV7BatchBlobDataProofForPointEvaluation(t *testing.T) { PostL1MessageQueueHash: common.Hash{}, Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_03.json")}, }, - expectedBlobDataProof: "05a0e06b0cc573a726a3a3a48ee7b7014480968bd4ec9848effb7d0af33d4127589fc8cc458c673e174455d68d2c2c31847ad09b8805deb61cbef48505a34d88841ff44ffeeb9dc073ef133be9a34cc796babdfbd2f4d5785faf18b96558918e1fe5193d78e2611acf4671888a01a0fc89dde18bef6ab54c7af95df8e3016f0c930ca5f4967de08c6b20c52005acf1dc248eace2ff0a98a89c840bfe15b1594e", + expectedBlobDataProof: "67de386f8b4047b74b4cfe084c3a5f6679cfbf93c7bf0b6b9bd1c73e5321b48d5c4306c7ce75456d3c1c7ef3ad44d1b45baf280617fe1a1a2322a4b23d340c0c841ff44ffeeb9dc073ef133be9a34cc796babdfbd2f4d5785faf18b96558918e1fe5193d78e2611acf4671888a01a0fcb5c0f6130730dbe5f516bbf91c580872999b31f51c5c8856df44f83b341410d89b76cda317ab3ffef76b5b4adefb382f", }, { name: "Batch with 1 block, blocktrace 04", @@ -844,7 +844,7 @@ func TestCodecV7BatchBlobDataProofForPointEvaluation(t *testing.T) { PostL1MessageQueueHash: common.HexToHash("0x6250cf03e7f922eefe450e9d4234ec56a1502066cd55eff22939df6100000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_04.json")}, }, - expectedBlobDataProof: "1c8917a0f90db3a2370fd18528d1cc9146340ef5cab7511786e212685c0ecfb656d871474ea7fd56a454b4042222240bf4b2fa15ab651cf0cd0b2bed9a9c9271ab3f7d6468190f56f55aca9802683ee6b9cada6fead43bb3cedbb132bcf08a27fcff326a0bb8599a89a57facbbcb49f5a8fa213e77c56332f996e020fed17cf2e607d015b997a9ad1cb993efff674cd8810c00a7539a771feb6fb5b2d41c2512", + expectedBlobDataProof: "0bc3e60c09957e159865dddcb43437b5cf75335c2d768c366858686bbf3908d44dc27f678f5b2721d510e02e5d08e7c396d0daadced06a70d4477bdc284c328eab3f7d6468190f56f55aca9802683ee6b9cada6fead43bb3cedbb132bcf08a27fcff326a0bb8599a89a57facbbcb49f5a718ae5c501927c696ae3f80765cd90c3f42deed906ef7a4f408afd8ab3764075090d49bf6646ed81bc073014d284d40", }, { name: "Batch with 1 block, blocktrace 05", @@ -855,7 +855,7 @@ func TestCodecV7BatchBlobDataProofForPointEvaluation(t *testing.T) { PostL1MessageQueueHash: common.HexToHash("0xc31c3ca9a880b80c4e7fcb88844a5e21433bd2801bdd504e1ca4aed900000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_05.json")}, }, - expectedBlobDataProof: "21c2fc4f348de240738bec7591ef72586db52feb7fca79f4d86c87e2b68efa9f1a3bf56b3991eb2e31347054ff227759779acec5ff78c3285c4abb09f2e785bd8d724b0c40745df1e30d6609899b63d88015110bd0f7ca4c9bee0dda327f8ce038e8d0b1179838086799d3c33ce31766afcf23fb52de7757c16a7766f2dc20179d832614bb070431ad5b90fe5b393d34423bf3291373b6072e05c46bc519a752", + expectedBlobDataProof: "49ea320f3941ade70aa9bbb0a583e8d1f1f84b559f48214a98078dc4aa38556f607b2a2499ed718753d9dfbfcd5e35bb74231645b6360d2129f1bf7d772deff18d724b0c40745df1e30d6609899b63d88015110bd0f7ca4c9bee0dda327f8ce038e8d0b1179838086799d3c33ce3176685db53d5c95bc08df588acbadd8265801af644f145022e52803688058e3a10ebcc615e5836f62c40e997ba1cfa14e103", }, { name: "Batch with 3 blocks, blocktrace 02 + 03 + 04", @@ -866,7 +866,7 @@ func TestCodecV7BatchBlobDataProofForPointEvaluation(t *testing.T) { PostL1MessageQueueHash: common.HexToHash("0x20f1c72064552d63fb7e1352b7815a9f8231a028220bf63d27b24bec00000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, - expectedBlobDataProof: "0b2f1a222f892d9114f3218ce3e5d1a7ba5f043960eff378250e1fa8d649bd076f7ff992b3f030a568543585a9d20bd8ede981dc6901ece26e273b1217da07f4852da1ea424859a212ac35d7d2262ca380c4bc017b20a01b00786a580916b48e763e3ae5c59eeac4d121db442efc7763b3dca263a31bdb7f27ab0a59e8d80566120c8a8d92e4b22efeed5b1863349da44c5103b1420c45598a74cd7cc8d788df", + expectedBlobDataProof: "12b99c3254754f37a581f349ef5a121040c531beda289730e2d21f18e5f3b4ea16705a0e98c1819fdedee976064c0ec1e7f52d5177a154e3c2717006c062c32d852da1ea424859a212ac35d7d2262ca380c4bc017b20a01b00786a580916b48e763e3ae5c59eeac4d121db442efc7763a7a24ec387b54f73d1f02e6feea8468890845df9ca19ca63e219d30d9e5bd08a7039eb1cee399f8129355f3a8d4c4e62", }, { name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03", @@ -877,7 +877,7 @@ func TestCodecV7BatchBlobDataProofForPointEvaluation(t *testing.T) { PostL1MessageQueueHash: common.HexToHash("0x3d35d6b71c2769de1a4eb8f603e20f539c53a10c6764a6f5836cf13100000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, }, - expectedBlobDataProof: "04ca4fb500d52948a622671911cdfc4856b5d169a0a0aed5ff19dc2be2a4eb7f4665316bafd3bf33b8e1df624dbfbb1df762aa65a41c880d38b4e7d734a098c6a3e23c97184774ae69247dbec30060787f1ba97472bb41184b768d9180e860fc4ee91770a4236f224f01dcffb443c259a273b07de848a5db106f6fa7558e26011637c0851e047db4f12c26132d8a0355a3745f34b53ceadb6eb5f368d9ddfef0", + expectedBlobDataProof: "5e4a5a196a5d27be5b2d1a1a71c4b8fa7bfb97a4686ac7bd943a657088c1f05e672f68b9fcfddf6b88bf61a54c8865475937a73b345af77cde1c4a425194f341a3e23c97184774ae69247dbec30060787f1ba97472bb41184b768d9180e860fc4ee91770a4236f224f01dcffb443c25994dc6b8c8e7e4cd23c42dfc095ad761c80e414dc1f8e7f36732f9e0ba941d5d009d9589255229d6153137e3c6882fa5d", }, } diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index a1fb231..8e1a86b 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -91,11 +91,12 @@ type daBatchV7 struct { blobVersionedHash common.Hash parentBatchHash common.Hash - blob *kzg4844.Blob - blobBytes []byte + blob *kzg4844.Blob + blobBytes []byte + challengeDigest common.Hash } -func newDABatchV7(version CodecVersion, batchIndex uint64, blobVersionedHash, parentBatchHash common.Hash, blob *kzg4844.Blob, blobBytes []byte) (*daBatchV7, error) { +func newDABatchV7(version CodecVersion, batchIndex uint64, blobVersionedHash, parentBatchHash common.Hash, blob *kzg4844.Blob, blobBytes []byte, challengeDigest common.Hash) (*daBatchV7, error) { daBatch := &daBatchV7{ version: version, batchIndex: batchIndex, @@ -103,6 +104,7 @@ func newDABatchV7(version CodecVersion, batchIndex uint64, blobVersionedHash, pa parentBatchHash: parentBatchHash, blob: blob, blobBytes: blobBytes, + challengeDigest: challengeDigest, } return daBatch, nil @@ -118,7 +120,7 @@ func decodeDABatchV7(data []byte) (*daBatchV7, error) { blobVersionedHash := common.BytesToHash(data[daBatchV7OffsetBlobVersionedHash:daBatchV7OffsetParentBatchHash]) parentBatchHash := common.BytesToHash(data[daBatchV7OffsetParentBatchHash:daBatchV7EncodedLength]) - return newDABatchV7(version, batchIndex, blobVersionedHash, parentBatchHash, nil, nil) + return newDABatchV7(version, batchIndex, blobVersionedHash, parentBatchHash, nil, nil, common.Hash{}) } // Encode serializes the dABatchV7 into bytes. @@ -138,10 +140,8 @@ func (b *daBatchV7) Hash() common.Hash { // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. func (b *daBatchV7) BlobDataProofForPointEvaluation() ([]byte, error) { - challengeDigest := crypto.Keccak256Hash(crypto.Keccak256(b.blobBytes), b.blobVersionedHash.Bytes()) - // z = challengeDigest % BLS_MODULUS - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), blsModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(b.challengeDigest[:]), blsModulus) pointBytes := pointBigInt.Bytes() var z kzg4844.Point @@ -604,3 +604,8 @@ func checkBlocksBatchVSChunksConsistency(batch *Batch) error { return nil } + +// ChallengeDigest returns the challenge digest of the DABatch. +func (b *daBatchV7) ChallengeDigest() common.Hash { + return b.challengeDigest +} diff --git a/encoding/da.go b/encoding/da.go index fa4ed88..ab28fab 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -466,6 +466,8 @@ func checkCompressedDataCompatibility(data []byte) error { } // makeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. +// The canonical blob representation is a 32-byte array where every 31 bytes are prepended with 1 zero byte. +// The kzg4844.Blob is a 4096-byte array, thus 0s are padded to the end of the array. func makeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { if len(blobBytes) > maxEffectiveBlobBytes { return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), maxEffectiveBlobBytes) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index b465c46..a7e6098 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -39,6 +39,7 @@ type DABatch interface { BlobBytes() []byte Version() CodecVersion SkippedL1MessageBitmap() []byte + ChallengeDigest() common.Hash } type DABlobPayload interface {