Skip to content

Commit

Permalink
address PR comments
Browse files Browse the repository at this point in the history
  • Loading branch information
ganeshvanahalli committed Apr 5, 2024
1 parent 7415458 commit d3ffedc
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 41 deletions.
38 changes: 3 additions & 35 deletions arbstate/daprovider/reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,8 @@ type Reader interface {
preimageRecorder PreimageRecorder,
validateSeqMsg bool,
) ([]byte, error)

// RecordPreimagesTo takes in preimages map and returns a function that can be used
// In recording (hash,preimage) key value pairs into preimages map, when fetching payload
RecordPreimagesTo(
preimages map[arbutil.PreimageType]map[common.Hash][]byte,
) PreimageRecorder
}

type PreimageRecorder func(key common.Hash, value []byte)

// NewReaderForDAS is generally meant to be only used by nitro.
// DA Providers should implement methods in the Reader interface independently
func NewReaderForDAS(dasReader DASReader) *readerForDAS {
Expand All @@ -54,18 +46,6 @@ func (d *readerForDAS) IsValidHeaderByte(headerByte byte) bool {
return IsDASMessageHeaderByte(headerByte)
}

func (d *readerForDAS) RecordPreimagesTo(preimages map[arbutil.PreimageType]map[common.Hash][]byte) PreimageRecorder {
if preimages == nil {
return nil
}
if preimages[arbutil.Keccak256PreimageType] == nil {
preimages[arbutil.Keccak256PreimageType] = make(map[common.Hash][]byte)
}
return func(key common.Hash, value []byte) {
preimages[arbutil.Keccak256PreimageType][key] = value
}
}

func (d *readerForDAS) RecoverPayloadFromBatch(
ctx context.Context,
batchNum uint64,
Expand Down Expand Up @@ -149,8 +129,8 @@ func (d *readerForDAS) RecoverPayloadFromBatch(
if preimageRecorder != nil {
if version == 0 {
treeLeaf := dastree.FlatHashToTreeLeaf(dataHash)
preimageRecorder(dataHash, payload)
preimageRecorder(crypto.Keccak256Hash(treeLeaf), treeLeaf)
preimageRecorder(dataHash, payload, arbutil.Keccak256PreimageType)
preimageRecorder(crypto.Keccak256Hash(treeLeaf), treeLeaf, arbutil.Keccak256PreimageType)
} else {
dastree.RecordHash(preimageRecorder, payload)
}
Expand All @@ -173,18 +153,6 @@ func (b *readerForBlobReader) IsValidHeaderByte(headerByte byte) bool {
return IsBlobHashesHeaderByte(headerByte)
}

func (b *readerForBlobReader) RecordPreimagesTo(preimages map[arbutil.PreimageType]map[common.Hash][]byte) PreimageRecorder {
if preimages == nil {
return nil
}
if preimages[arbutil.EthVersionedHashPreimageType] == nil {
preimages[arbutil.EthVersionedHashPreimageType] = make(map[common.Hash][]byte)
}
return func(key common.Hash, value []byte) {
preimages[arbutil.EthVersionedHashPreimageType][key] = value
}
}

func (b *readerForBlobReader) RecoverPayloadFromBatch(
ctx context.Context,
batchNum uint64,
Expand All @@ -210,7 +178,7 @@ func (b *readerForBlobReader) RecoverPayloadFromBatch(
// Prevent aliasing `blob` when slicing it, as for range loops overwrite the same variable
// Won't be necessary after Go 1.22 with https://go.dev/blog/loopvar-preview
b := blob
preimageRecorder(versionedHashes[i], b[:])
preimageRecorder(versionedHashes[i], b[:], arbutil.EthVersionedHashPreimageType)
}
}
payload, err := blobs.DecodeBlobs(kzgBlobs)
Expand Down
18 changes: 17 additions & 1 deletion arbstate/daprovider/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,22 @@ type BlobReader interface {
Initialize(ctx context.Context) error
}

type PreimageRecorder func(key common.Hash, value []byte, ty arbutil.PreimageType)

// RecordPreimagesTo takes in preimages map and returns a function that can be used
// In recording (hash,preimage) key value pairs into preimages map, when fetching payload through RecoverPayloadFromBatch
func RecordPreimagesTo(preimages map[arbutil.PreimageType]map[common.Hash][]byte) PreimageRecorder {
if preimages == nil {
return nil
}
return func(key common.Hash, value []byte, ty arbutil.PreimageType) {
if preimages[ty] == nil {
preimages[ty] = make(map[common.Hash][]byte)
}
preimages[ty][key] = value
}
}

// DASMessageHeaderFlag indicates that this data is a certificate for the data availability service,
// which will retrieve the full batch data.
const DASMessageHeaderFlag byte = 0x80
Expand Down Expand Up @@ -136,7 +152,7 @@ func RecoverPayloadFromDasBatch(
return nil, nil
}
version := cert.Version
recordPreimage := func(key common.Hash, value []byte) {
recordPreimage := func(key common.Hash, value []byte, ty arbutil.PreimageType) {
keccakPreimages[key] = value
}

Expand Down
7 changes: 4 additions & 3 deletions das/dastree/dastree.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (

"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
"github.com/offchainlabs/nitro/arbutil"
"github.com/offchainlabs/nitro/util/arbmath"
)

Expand All @@ -26,7 +27,7 @@ type node struct {
// RecordHash chunks the preimage into 64kB bins and generates a recursive hash tree,
// calling the caller-supplied record function for each hash/preimage pair created in
// building the tree structure.
func RecordHash(record func(bytes32, []byte), preimage ...[]byte) bytes32 {
func RecordHash(record func(bytes32, []byte, arbutil.PreimageType), preimage ...[]byte) bytes32 {
// Algorithm
// 1. split the preimage into 64kB bins and double hash them to produce the tree's leaves
// 2. repeatedly hash pairs and their combined length, bubbling up any odd-one's out, to form the root
Expand All @@ -48,7 +49,7 @@ func RecordHash(record func(bytes32, []byte), preimage ...[]byte) bytes32 {

keccord := func(value []byte) bytes32 {
hash := crypto.Keccak256Hash(value)
record(hash, value)
record(hash, value, arbutil.Keccak256PreimageType)
return hash
}
prepend := func(before byte, slice []byte) []byte {
Expand Down Expand Up @@ -94,7 +95,7 @@ func RecordHash(record func(bytes32, []byte), preimage ...[]byte) bytes32 {

func Hash(preimage ...[]byte) bytes32 {
// Merkelizes without recording anything. All but the validator's DAS will call this
return RecordHash(func(bytes32, []byte) {}, preimage...)
return RecordHash(func(bytes32, []byte, arbutil.PreimageType) {}, preimage...)
}

func HashBytes(preimage ...[]byte) []byte {
Expand Down
3 changes: 2 additions & 1 deletion das/dastree/dastree_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"testing"

"github.com/ethereum/go-ethereum/crypto"
"github.com/offchainlabs/nitro/arbutil"
"github.com/offchainlabs/nitro/util/colors"
"github.com/offchainlabs/nitro/util/pretty"
"github.com/offchainlabs/nitro/util/testhelpers"
Expand All @@ -25,7 +26,7 @@ func TestDASTree(t *testing.T) {
tests = append(tests, large)
}

record := func(key bytes32, value []byte) {
record := func(key bytes32, value []byte, ty arbutil.PreimageType) {
colors.PrintGrey("storing ", key, " ", pretty.PrettyBytes(value))
store[key] = value
if crypto.Keccak256Hash(value) != key {
Expand Down
3 changes: 2 additions & 1 deletion das/ipfs_storage_service.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import (
"github.com/ipfs/interface-go-ipfs-core/path"
"github.com/multiformats/go-multihash"
"github.com/offchainlabs/nitro/arbstate/daprovider"
"github.com/offchainlabs/nitro/arbutil"
"github.com/offchainlabs/nitro/cmd/ipfshelper"
"github.com/offchainlabs/nitro/das/dastree"
"github.com/offchainlabs/nitro/util/pretty"
Expand Down Expand Up @@ -180,7 +181,7 @@ func (s *IpfsStorageService) Put(ctx context.Context, data []byte, timeout uint6

var chunks [][]byte

record := func(_ common.Hash, value []byte) {
record := func(_ common.Hash, value []byte, ty arbutil.PreimageType) {
chunks = append(chunks, value)
}

Expand Down

0 comments on commit d3ffedc

Please sign in to comment.