Skip to content

Commit 9f9f538

Browse files
authored
fix: codecv7 challenge digest (#50)
* fix: codecv7 challenge digest * fix unit tests * update interface * refactor * add comments
1 parent 344f2d5 commit 9f9f538

File tree

13 files changed

+93
-60
lines changed

13 files changed

+93
-60
lines changed

encoding/codecv0_types.go

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -283,3 +283,8 @@ func (b *daBatchV0) SkippedL1MessageBitmap() []byte {
283283
func (b *daBatchV0) DataHash() common.Hash {
284284
return b.dataHash
285285
}
286+
287+
// ChallengeDigest returns the challenge digest of the DABatch.
288+
func (b *daBatchV0) ChallengeDigest() common.Hash {
289+
return common.Hash{}
290+
}

encoding/codecv2.go

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) {
6868
}
6969

7070
// blob payload
71-
blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch())
71+
blob, blobVersionedHash, z, _, _, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch())
7272
if err != nil {
7373
return nil, fmt.Errorf("failed to construct blob payload, index: %d, err: %w", batch.Index, err)
7474
}
@@ -95,7 +95,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) {
9595
}
9696

9797
// constructBlobPayload constructs the 4844 blob payload.
98-
func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) {
98+
func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, common.Hash, error) {
9999
// metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk)
100100
metadataLength := 2 + maxNumChunksPerBatch*4
101101

@@ -126,7 +126,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
126126
// encode L2 txs into blob payload
127127
rlpTxData, err := convertTxDataToRLPEncoding(tx)
128128
if err != nil {
129-
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err)
129+
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert txData to RLP encoding: %w", err)
130130
}
131131
batchBytes = append(batchBytes, rlpTxData...)
132132
}
@@ -156,33 +156,33 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
156156
// blobBytes represents the compressed blob payload (batchBytes)
157157
blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes)
158158
if err != nil {
159-
return nil, common.Hash{}, nil, nil, err
159+
return nil, common.Hash{}, nil, nil, common.Hash{}, err
160160
}
161161

162162
// Only apply this check when the uncompressed batch data has exceeded 128 KiB.
163163
if len(batchBytes) > minCompressedDataCheckSize {
164164
// Check compressed data compatibility.
165165
if err = checkCompressedDataCompatibility(blobBytes); err != nil {
166166
log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes))
167-
return nil, common.Hash{}, nil, nil, err
167+
return nil, common.Hash{}, nil, nil, common.Hash{}, err
168168
}
169169
}
170170

171171
if len(blobBytes) > maxEffectiveBlobBytes {
172172
log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes))
173-
return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size")
173+
return nil, common.Hash{}, nil, nil, common.Hash{}, errors.New("Blob payload exceeds maximum size")
174174
}
175175

176176
// convert raw data to BLSFieldElements
177177
blob, err := makeBlobCanonical(blobBytes)
178178
if err != nil {
179-
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
179+
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
180180
}
181181

182182
// compute blob versioned hash
183183
c, err := kzg4844.BlobToCommitment(blob)
184184
if err != nil {
185-
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to create blob commitment: %w", err)
185+
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to create blob commitment: %w", err)
186186
}
187187
blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c)
188188

@@ -197,12 +197,12 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
197197
// the challenge point z
198198
var z kzg4844.Point
199199
if len(pointBytes) > kzgPointByteSize {
200-
return nil, common.Hash{}, nil, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes))
200+
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes))
201201
}
202202
start := kzgPointByteSize - len(pointBytes)
203203
copy(z[start:], pointBytes)
204204

205-
return blob, blobVersionedHash, &z, blobBytes, nil
205+
return blob, blobVersionedHash, &z, blobBytes, challengeDigest, nil
206206
}
207207

208208
// NewDABatchFromBytes decodes the given byte slice into a DABatch.

encoding/codecv2_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1082,7 +1082,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) {
10821082
return nil
10831083
})
10841084

1085-
blob, blobVersionedHash, z, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, codecv2.MaxNumChunksPerBatch())
1085+
blob, blobVersionedHash, z, _, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, codecv2.MaxNumChunksPerBatch())
10861086
require.NoError(t, err)
10871087
actualZ := hex.EncodeToString(z[:])
10881088
assert.Equal(t, tc.expectedz, actualZ)

encoding/codecv3.go

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) {
4646
}
4747

4848
// blob payload
49-
blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch())
49+
blob, blobVersionedHash, z, blobBytes, challengeDigest, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch())
5050
if err != nil {
5151
return nil, err
5252
}
@@ -72,6 +72,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) {
7272
blob, // blob
7373
z, // z
7474
blobBytes, // blobBytes
75+
challengeDigest, // challengeDigest
7576
)
7677
}
7778

@@ -95,10 +96,11 @@ func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) {
9596
common.BytesToHash(data[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash]), // dataHash
9697
common.BytesToHash(data[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp]), // parentBatchHash
9798
common.BytesToHash(data[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash]), // blobVersionedHash
98-
nil, // skippedL1MessageBitmap
99-
nil, // blob
100-
nil, // z
101-
nil, // blobBytes
99+
nil, // skippedL1MessageBitmap
100+
nil, // blob
101+
nil, // z
102+
nil, // blobBytes
103+
common.Hash{}, // challengeDigest
102104
[2]common.Hash{ // blobDataProof
103105
common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointByteSize]),
104106
common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointByteSize : daBatchV3EncodedLength]),

encoding/codecv3_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1273,7 +1273,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) {
12731273
return nil
12741274
})
12751275

1276-
blob, blobVersionedHash, z, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, codecv3.MaxNumChunksPerBatch())
1276+
blob, blobVersionedHash, z, _, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, codecv3.MaxNumChunksPerBatch())
12771277
require.NoError(t, err)
12781278
actualZ := hex.EncodeToString(z[:])
12791279
assert.Equal(t, tc.expectedz, actualZ)

encoding/codecv3_types.go

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,13 @@ type daBatchV3 struct {
2222
blob *kzg4844.Blob
2323
z *kzg4844.Point
2424
blobBytes []byte
25+
challengeDigest common.Hash
2526
}
2627

2728
// newDABatchV3 is a constructor for daBatchV3 that calls blobDataProofForPICircuit internally.
2829
func newDABatchV3(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64,
2930
dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob,
30-
z *kzg4844.Point, blobBytes []byte,
31+
z *kzg4844.Point, blobBytes []byte, challengeDigest common.Hash,
3132
) (*daBatchV3, error) {
3233
daBatch := &daBatchV3{
3334
daBatchV0: daBatchV0{
@@ -44,6 +45,7 @@ func newDABatchV3(version CodecVersion, batchIndex, l1MessagePopped, totalL1Mess
4445
blob: blob,
4546
z: z,
4647
blobBytes: blobBytes,
48+
challengeDigest: challengeDigest,
4749
}
4850

4951
proof, err := daBatch.blobDataProofForPICircuit()
@@ -59,7 +61,7 @@ func newDABatchV3(version CodecVersion, batchIndex, l1MessagePopped, totalL1Mess
5961
// newDABatchV3WithProof is a constructor for daBatchV3 that allows directly passing blobDataProof.
6062
func newDABatchV3WithProof(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64,
6163
dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte,
62-
blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, blobDataProof [2]common.Hash,
64+
blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, challengeDigest common.Hash, blobDataProof [2]common.Hash,
6365
) *daBatchV3 {
6466
return &daBatchV3{
6567
daBatchV0: daBatchV0{
@@ -76,6 +78,7 @@ func newDABatchV3WithProof(version CodecVersion, batchIndex, l1MessagePopped, to
7678
blob: blob,
7779
z: z,
7880
blobBytes: blobBytes,
81+
challengeDigest: challengeDigest,
7982
blobDataProof: blobDataProof, // Set blobDataProof directly
8083
}
8184
}
@@ -204,3 +207,8 @@ func (b *daBatchV3) SkippedL1MessageBitmap() []byte {
204207
func (b *daBatchV3) DataHash() common.Hash {
205208
return b.dataHash
206209
}
210+
211+
// ChallengeDigest returns the challenge digest of the DABatch.
212+
func (b *daBatchV3) ChallengeDigest() common.Hash {
213+
return b.challengeDigest
214+
}

encoding/codecv4.go

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) {
8080
}
8181

8282
// blob payload
83-
blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), enableCompression)
83+
blob, blobVersionedHash, z, blobBytes, challengeDigest, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), enableCompression)
8484
if err != nil {
8585
return nil, err
8686
}
@@ -106,6 +106,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) {
106106
blob, // blob
107107
z, // z
108108
blobBytes, // blobBytes
109+
challengeDigest, // challengeDigest
109110
)
110111
}
111112

@@ -129,10 +130,11 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) {
129130
common.BytesToHash(data[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash]), // dataHash
130131
common.BytesToHash(data[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp]), // parentBatchHash
131132
common.BytesToHash(data[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash]), // blobVersionedHash
132-
nil, // skippedL1MessageBitmap
133-
nil, // blob
134-
nil, // z
135-
nil, // blobBytes
133+
nil, // skippedL1MessageBitmap
134+
nil, // blob
135+
nil, // z
136+
nil, // blobBytes
137+
common.Hash{}, // challengeDigest
136138
[2]common.Hash{ // blobDataProof
137139
common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointByteSize]),
138140
common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointByteSize : daBatchV3EncodedLength]),
@@ -141,7 +143,7 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) {
141143
}
142144

143145
// constructBlobPayload constructs the 4844 blob payload.
144-
func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompression bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) {
146+
func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompression bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, common.Hash, error) {
145147
// metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk)
146148
metadataLength := 2 + maxNumChunksPerBatch*4
147149

@@ -172,7 +174,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
172174
// encode L2 txs into blob payload
173175
rlpTxData, err := convertTxDataToRLPEncoding(tx)
174176
if err != nil {
175-
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err)
177+
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert txData to RLP encoding: %w", err)
176178
}
177179
batchBytes = append(batchBytes, rlpTxData...)
178180
}
@@ -205,12 +207,12 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
205207
var err error
206208
blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes)
207209
if err != nil {
208-
return nil, common.Hash{}, nil, nil, err
210+
return nil, common.Hash{}, nil, nil, common.Hash{}, err
209211
}
210212
// Check compressed data compatibility.
211213
if err = checkCompressedDataCompatibility(blobBytes); err != nil {
212214
log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes))
213-
return nil, common.Hash{}, nil, nil, err
215+
return nil, common.Hash{}, nil, nil, common.Hash{}, err
214216
}
215217
blobBytes = append([]byte{1}, blobBytes...)
216218
} else {
@@ -219,19 +221,19 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
219221

220222
if len(blobBytes) > maxEffectiveBlobBytes {
221223
log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes))
222-
return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size")
224+
return nil, common.Hash{}, nil, nil, common.Hash{}, errors.New("Blob payload exceeds maximum size")
223225
}
224226

225227
// convert raw data to BLSFieldElements
226228
blob, err := makeBlobCanonical(blobBytes)
227229
if err != nil {
228-
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
230+
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
229231
}
230232

231233
// compute blob versioned hash
232234
c, err := kzg4844.BlobToCommitment(blob)
233235
if err != nil {
234-
return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to create blob commitment: %w", err)
236+
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("failed to create blob commitment: %w", err)
235237
}
236238
blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c)
237239

@@ -246,12 +248,12 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i
246248
// the challenge point z
247249
var z kzg4844.Point
248250
if len(pointBytes) > kzgPointByteSize {
249-
return nil, common.Hash{}, nil, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes))
251+
return nil, common.Hash{}, nil, nil, common.Hash{}, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes))
250252
}
251253
start := kzgPointByteSize - len(pointBytes)
252254
copy(z[start:], pointBytes)
253255

254-
return blob, blobVersionedHash, &z, blobBytes, nil
256+
return blob, blobVersionedHash, &z, blobBytes, challengeDigest, nil
255257
}
256258

257259
func (d *DACodecV4) estimateL1CommitBatchSizeAndBlobSize(chunks []*Chunk) (uint64, uint64, error) {

encoding/codecv4_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1300,7 +1300,7 @@ func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) {
13001300
return nil
13011301
})
13021302

1303-
blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), true /* enable encode */)
1303+
blob, blobVersionedHash, z, _, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), true /* enable encode */)
13041304
require.NoError(t, err)
13051305
actualZ := hex.EncodeToString(z[:])
13061306
assert.Equal(t, tc.expectedz, actualZ)
@@ -1463,7 +1463,7 @@ func TestCodecV4BatchStandardTestCasesDisableCompression(t *testing.T) {
14631463
return nil
14641464
})
14651465

1466-
blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), false /* disable encode */)
1466+
blob, blobVersionedHash, z, _, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), false /* disable encode */)
14671467
require.NoError(t, err)
14681468
actualZ := hex.EncodeToString(z[:])
14691469
assert.Equal(t, tc.expectedz, actualZ)

encoding/codecv7.go

Lines changed: 18 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import (
1010

1111
"github.com/scroll-tech/go-ethereum/common"
1212
"github.com/scroll-tech/go-ethereum/core/types"
13+
"github.com/scroll-tech/go-ethereum/crypto"
1314
"github.com/scroll-tech/go-ethereum/crypto/kzg4844"
1415
"github.com/scroll-tech/go-ethereum/log"
1516

@@ -80,30 +81,30 @@ func (d *DACodecV7) NewDABatch(batch *Batch) (DABatch, error) {
8081
return nil, fmt.Errorf("failed to check blocks batch vs chunks consistency: %w", err)
8182
}
8283

83-
blob, blobVersionedHash, blobBytes, err := d.constructBlob(batch)
84+
blob, blobVersionedHash, blobBytes, challengeDigest, err := d.constructBlob(batch)
8485
if err != nil {
8586
return nil, fmt.Errorf("failed to construct blob: %w", err)
8687
}
8788

88-
daBatch, err := newDABatchV7(CodecV7, batch.Index, blobVersionedHash, batch.ParentBatchHash, blob, blobBytes)
89+
daBatch, err := newDABatchV7(CodecV7, batch.Index, blobVersionedHash, batch.ParentBatchHash, blob, blobBytes, challengeDigest)
8990
if err != nil {
9091
return nil, fmt.Errorf("failed to construct DABatch: %w", err)
9192
}
9293

9394
return daBatch, nil
9495
}
9596

96-
func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []byte, error) {
97+
func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []byte, common.Hash, error) {
9798
blobBytes := make([]byte, blobEnvelopeV7OffsetPayload)
9899

99100
payloadBytes, err := d.constructBlobPayload(batch)
100101
if err != nil {
101-
return nil, common.Hash{}, nil, fmt.Errorf("failed to construct blob payload: %w", err)
102+
return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to construct blob payload: %w", err)
102103
}
103104

104105
compressedPayloadBytes, enableCompression, err := d.checkCompressedDataCompatibility(payloadBytes)
105106
if err != nil {
106-
return nil, common.Hash{}, nil, fmt.Errorf("failed to check batch compressed data compatibility: %w", err)
107+
return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to check batch compressed data compatibility: %w", err)
107108
}
108109

109110
isCompressedFlag := uint8(0x0)
@@ -121,23 +122,30 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b
121122

122123
if len(blobBytes) > maxEffectiveBlobBytes {
123124
log.Error("ConstructBlob: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes))
124-
return nil, common.Hash{}, nil, fmt.Errorf("blob exceeds maximum size: got %d, allowed %d", len(blobBytes), maxEffectiveBlobBytes)
125+
return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("blob exceeds maximum size: got %d, allowed %d", len(blobBytes), maxEffectiveBlobBytes)
125126
}
126127

127128
// convert raw data to BLSFieldElements
128129
blob, err := makeBlobCanonical(blobBytes)
129130
if err != nil {
130-
return nil, common.Hash{}, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
131+
return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err)
131132
}
132133

133134
// compute blob versioned hash
134135
c, err := kzg4844.BlobToCommitment(blob)
135136
if err != nil {
136-
return nil, common.Hash{}, nil, fmt.Errorf("failed to create blob commitment: %w", err)
137+
return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to create blob commitment: %w", err)
137138
}
138139
blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c)
139140

140-
return blob, blobVersionedHash, blobBytes, nil
141+
// compute challenge digest for codecv7, different from previous versions,
142+
// the blob bytes are padded to the max effective blob size, which is 131072 / 32 * 31 due to the blob encoding
143+
paddedBlobBytes := make([]byte, maxEffectiveBlobBytes)
144+
copy(paddedBlobBytes, blobBytes)
145+
146+
challengeDigest := crypto.Keccak256Hash(crypto.Keccak256(paddedBlobBytes), blobVersionedHash[:])
147+
148+
return blob, blobVersionedHash, blobBytes, challengeDigest, nil
141149
}
142150

143151
func (d *DACodecV7) constructBlobPayload(batch *Batch) ([]byte, error) {
@@ -166,7 +174,7 @@ func (d *DACodecV7) NewDABatchFromBytes(data []byte) (DABatch, error) {
166174
}
167175

168176
func (d *DACodecV7) NewDABatchFromParams(batchIndex uint64, blobVersionedHash, parentBatchHash common.Hash) (DABatch, error) {
169-
return newDABatchV7(CodecV7, batchIndex, blobVersionedHash, parentBatchHash, nil, nil)
177+
return newDABatchV7(CodecV7, batchIndex, blobVersionedHash, parentBatchHash, nil, nil, common.Hash{})
170178
}
171179

172180
func (d *DACodecV7) DecodeDAChunksRawTx(_ [][]byte) ([]*DAChunkRawTx, error) {

0 commit comments

Comments
 (0)