From 0f44d1af23d8172172eb2d0f705ab60b41147094 Mon Sep 17 00:00:00 2001 From: Alexander Bezobchuk Date: Tue, 11 Aug 2020 06:09:16 -0400 Subject: [PATCH] store: Remove Amino (#6984) * Update kv pair to proto * updates * fix LastCommitID * lint++ Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- proto/cosmos/kv/kv.proto | 9 +- proto/cosmos/store/commit_info.proto | 29 + store/iavl/store.go | 15 +- store/iavl/store_test.go | 31 +- store/iavl/wire.go | 7 - .../internal/maps/bench_test.go | 0 store/{rootmulti => }/internal/maps/maps.go | 37 +- .../internal/maps/maps_test.go | 0 .../internal/proofs/convert.go | 0 .../internal/proofs/convert_test.go | 0 .../{rootmulti => }/internal/proofs/create.go | 2 +- .../internal/proofs/create_test.go | 0 .../internal/proofs/helpers.go | 0 store/rootmulti/store.go | 205 ++--- store/rootmulti/store_test.go | 15 +- store/types/commit_info.go | 73 ++ store/types/commit_info.pb.go | 814 ++++++++++++++++++ store/types/store.go | 9 - types/kv/kv.go | 26 +- types/kv/kv.pb.go | 211 ++++- x/auth/simulation/decoder_test.go | 28 +- x/bank/simulation/decoder_test.go | 10 +- x/capability/simulation/decoder_test.go | 28 +- x/distribution/simulation/decoder_test.go | 26 +- x/evidence/simulation/decoder_test.go | 20 +- x/gov/simulation/decoder_test.go | 16 +- x/ibc/02-client/simulation/decoder_test.go | 42 +- .../03-connection/simulation/decoder_test.go | 34 +- x/ibc/04-channel/simulation/decoder_test.go | 66 +- x/ibc/simulation/decoder_test.go | 36 +- x/mint/simulation/decoder_test.go | 10 +- x/slashing/simulation/decoder_test.go | 14 +- x/staking/simulation/decoder_test.go | 20 +- 33 files changed, 1442 insertions(+), 391 deletions(-) create mode 100644 proto/cosmos/store/commit_info.proto delete mode 100644 store/iavl/wire.go rename store/{rootmulti => }/internal/maps/bench_test.go (100%) rename store/{rootmulti => }/internal/maps/maps.go (89%) rename store/{rootmulti => }/internal/maps/maps_test.go (100%) rename store/{rootmulti => }/internal/proofs/convert.go (100%) rename store/{rootmulti => }/internal/proofs/convert_test.go (100%) rename store/{rootmulti => }/internal/proofs/create.go (97%) rename store/{rootmulti => }/internal/proofs/create_test.go (100%) rename store/{rootmulti => }/internal/proofs/helpers.go (100%) create mode 100644 store/types/commit_info.go create mode 100644 store/types/commit_info.pb.go diff --git a/proto/cosmos/kv/kv.proto b/proto/cosmos/kv/kv.proto index f16b05dd6..edad83cb5 100644 --- a/proto/cosmos/kv/kv.proto +++ b/proto/cosmos/kv/kv.proto @@ -1,9 +1,16 @@ syntax = "proto3"; package cosmos.kv; +import "gogoproto/gogo.proto"; + option go_package = "github.com/cosmos/cosmos-sdk/types/kv"; -// Key-Value Pair +// Pairs defines a repeated slice of Pair objects. +message Pairs { + repeated Pair pairs = 1 [(gogoproto.nullable) = false]; +} + +// Pair defines a key/value bytes tuple. message Pair { bytes key = 1; bytes value = 2; diff --git a/proto/cosmos/store/commit_info.proto b/proto/cosmos/store/commit_info.proto new file mode 100644 index 000000000..f6720e439 --- /dev/null +++ b/proto/cosmos/store/commit_info.proto @@ -0,0 +1,29 @@ +syntax = "proto3"; +package cosmos.store; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/store/types"; + +// CommitInfo defines commit information used by the multi-store when committing +// a version/height. +message CommitInfo { + int64 version = 1; + repeated StoreInfo store_infos = 2 [(gogoproto.nullable) = false]; +} + +// StoreInfo defines store-specific commit information. It contains a reference +// between a store name and the commit ID. +message StoreInfo { + string name = 1; + CommitID commit_id = 2 [(gogoproto.nullable) = false, (gogoproto.customname) = "CommitID"]; +} + +// CommitID defines the committment information when a specific store is +// committed. +message CommitID { + option (gogoproto.goproto_stringer) = false; + + int64 version = 1; + bytes hash = 2; +} diff --git a/store/iavl/store.go b/store/iavl/store.go index f31a33ea0..e26baf4d9 100644 --- a/store/iavl/store.go +++ b/store/iavl/store.go @@ -270,18 +270,25 @@ func (st *Store) Query(req abci.RequestQuery) (res abci.ResponseQuery) { res.Proof = getProofFromTree(mtree, req.Data, res.Value != nil) case "/subspace": - var KVs []types.KVPair + pairs := kv.Pairs{ + Pairs: make([]kv.Pair, 0), + } subspace := req.Data res.Key = subspace iterator := types.KVStorePrefixIterator(st, subspace) for ; iterator.Valid(); iterator.Next() { - KVs = append(KVs, types.KVPair{Key: iterator.Key(), Value: iterator.Value()}) + pairs.Pairs = append(pairs.Pairs, kv.Pair{Key: iterator.Key(), Value: iterator.Value()}) + } + iterator.Close() + + bz, err := pairs.Marshal() + if err != nil { + panic(fmt.Errorf("failed to marshal KV pairs: %w", err)) } - iterator.Close() - res.Value = cdc.MustMarshalBinaryBare(KVs) + res.Value = bz default: return sdkerrors.QueryResult(sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "unexpected query path: %v", req.Path)) diff --git a/store/iavl/store_test.go b/store/iavl/store_test.go index 27ebc4dbd..ba71ec886 100644 --- a/store/iavl/store_test.go +++ b/store/iavl/store_test.go @@ -11,6 +11,7 @@ import ( dbm "github.com/tendermint/tm-db" "github.com/cosmos/cosmos-sdk/store/types" + "github.com/cosmos/cosmos-sdk/types/kv" ) var ( @@ -408,18 +409,28 @@ func TestIAVLStoreQuery(t *testing.T) { v3 := []byte("val3") ksub := []byte("key") - KVs0 := []types.KVPair{} - KVs1 := []types.KVPair{ - {Key: k1, Value: v1}, - {Key: k2, Value: v2}, + KVs0 := kv.Pairs{} + KVs1 := kv.Pairs{ + Pairs: []kv.Pair{ + {Key: k1, Value: v1}, + {Key: k2, Value: v2}, + }, } - KVs2 := []types.KVPair{ - {Key: k1, Value: v3}, - {Key: k2, Value: v2}, + KVs2 := kv.Pairs{ + Pairs: []kv.Pair{ + {Key: k1, Value: v3}, + {Key: k2, Value: v2}, + }, } - valExpSubEmpty := cdc.MustMarshalBinaryBare(KVs0) - valExpSub1 := cdc.MustMarshalBinaryBare(KVs1) - valExpSub2 := cdc.MustMarshalBinaryBare(KVs2) + + valExpSubEmpty, err := KVs0.Marshal() + require.NoError(t, err) + + valExpSub1, err := KVs1.Marshal() + require.NoError(t, err) + + valExpSub2, err := KVs2.Marshal() + require.NoError(t, err) cid := iavlStore.Commit() ver := cid.Version diff --git a/store/iavl/wire.go b/store/iavl/wire.go deleted file mode 100644 index 43173f3e7..000000000 --- a/store/iavl/wire.go +++ /dev/null @@ -1,7 +0,0 @@ -package iavl - -import ( - "github.com/cosmos/cosmos-sdk/codec" -) - -var cdc = codec.New() diff --git a/store/rootmulti/internal/maps/bench_test.go b/store/internal/maps/bench_test.go similarity index 100% rename from store/rootmulti/internal/maps/bench_test.go rename to store/internal/maps/bench_test.go diff --git a/store/rootmulti/internal/maps/maps.go b/store/internal/maps/maps.go similarity index 89% rename from store/rootmulti/internal/maps/maps.go rename to store/internal/maps/maps.go index eb29cc816..1757b0f5c 100644 --- a/store/rootmulti/internal/maps/maps.go +++ b/store/internal/maps/maps.go @@ -6,7 +6,6 @@ import ( "github.com/tendermint/tendermint/crypto/merkle" "github.com/tendermint/tendermint/crypto/tmhash" - "github.com/cosmos/cosmos-sdk/store/types" "github.com/cosmos/cosmos-sdk/types/kv" ) @@ -19,7 +18,7 @@ type merkleMap struct { func newMerkleMap() *merkleMap { return &merkleMap{ - kvs: nil, + kvs: kv.Pairs{}, sorted: false, } } @@ -29,7 +28,7 @@ func newMerkleMap() *merkleMap { // of kv.Pairs. Whenever called, the MerkleMap must be resorted. func (sm *merkleMap) set(key string, value []byte) { byteKey := []byte(key) - types.AssertValidKey(byteKey) + assertValidKey(byteKey) sm.sorted = false @@ -37,7 +36,7 @@ func (sm *merkleMap) set(key string, value []byte) { // and make a determination to fetch or not. vhash := tmhash.Sum(value) - sm.kvs = append(sm.kvs, kv.Pair{ + sm.kvs.Pairs = append(sm.kvs.Pairs, kv.Pair{ Key: byteKey, Value: vhash, }) @@ -61,8 +60,8 @@ func (sm *merkleMap) sort() { // hashKVPairs hashes a kvPair and creates a merkle tree where the leaves are // byte slices. func hashKVPairs(kvs kv.Pairs) []byte { - kvsH := make([][]byte, len(kvs)) - for i, kvp := range kvs { + kvsH := make([][]byte, len(kvs.Pairs)) + for i, kvp := range kvs.Pairs { kvsH[i] = KVPair(kvp).Bytes() } @@ -81,7 +80,7 @@ type simpleMap struct { func newSimpleMap() *simpleMap { return &simpleMap{ - Kvs: nil, + Kvs: kv.Pairs{}, sorted: false, } } @@ -90,7 +89,7 @@ func newSimpleMap() *simpleMap { // and then appends it to SimpleMap's kv pairs. func (sm *simpleMap) Set(key string, value []byte) { byteKey := []byte(key) - types.AssertValidKey(byteKey) + assertValidKey(byteKey) sm.sorted = false // The value is hashed, so you can @@ -98,7 +97,7 @@ func (sm *simpleMap) Set(key string, value []byte) { // and make a determination to fetch or not. vhash := tmhash.Sum(value) - sm.Kvs = append(sm.Kvs, kv.Pair{ + sm.Kvs.Pairs = append(sm.Kvs.Pairs, kv.Pair{ Key: byteKey, Value: vhash, }) @@ -123,8 +122,11 @@ func (sm *simpleMap) Sort() { // NOTE these contain the hashed key and value. func (sm *simpleMap) KVPairs() kv.Pairs { sm.Sort() - kvs := make(kv.Pairs, len(sm.Kvs)) - copy(kvs, sm.Kvs) + kvs := kv.Pairs{ + Pairs: make([]kv.Pair, len(sm.Kvs.Pairs)), + } + + copy(kvs.Pairs, sm.Kvs.Pairs) return kvs } @@ -188,18 +190,25 @@ func SimpleProofsFromMap(m map[string][]byte) ([]byte, map[string]*merkle.Simple sm.Sort() kvs := sm.Kvs - kvsBytes := make([][]byte, len(kvs)) - for i, kvp := range kvs { + kvsBytes := make([][]byte, len(kvs.Pairs)) + for i, kvp := range kvs.Pairs { kvsBytes[i] = KVPair(kvp).Bytes() } rootHash, proofList := merkle.SimpleProofsFromByteSlices(kvsBytes) proofs := make(map[string]*merkle.SimpleProof) keys := make([]string, len(proofList)) - for i, kvp := range kvs { + + for i, kvp := range kvs.Pairs { proofs[string(kvp.Key)] = proofList[i] keys[i] = string(kvp.Key) } return rootHash, proofs, keys } + +func assertValidKey(key []byte) { + if len(key) == 0 { + panic("key is nil") + } +} diff --git a/store/rootmulti/internal/maps/maps_test.go b/store/internal/maps/maps_test.go similarity index 100% rename from store/rootmulti/internal/maps/maps_test.go rename to store/internal/maps/maps_test.go diff --git a/store/rootmulti/internal/proofs/convert.go b/store/internal/proofs/convert.go similarity index 100% rename from store/rootmulti/internal/proofs/convert.go rename to store/internal/proofs/convert.go diff --git a/store/rootmulti/internal/proofs/convert_test.go b/store/internal/proofs/convert_test.go similarity index 100% rename from store/rootmulti/internal/proofs/convert_test.go rename to store/internal/proofs/convert_test.go diff --git a/store/rootmulti/internal/proofs/create.go b/store/internal/proofs/create.go similarity index 97% rename from store/rootmulti/internal/proofs/create.go rename to store/internal/proofs/create.go index 8ca7b63aa..0db30e4f2 100644 --- a/store/rootmulti/internal/proofs/create.go +++ b/store/internal/proofs/create.go @@ -6,7 +6,7 @@ import ( ics23 "github.com/confio/ics23/go" - sdkmaps "github.com/cosmos/cosmos-sdk/store/rootmulti/internal/maps" + sdkmaps "github.com/cosmos/cosmos-sdk/store/internal/maps" ) // TendermintSpec constrains the format from ics23-tendermint (crypto/merkle SimpleProof) diff --git a/store/rootmulti/internal/proofs/create_test.go b/store/internal/proofs/create_test.go similarity index 100% rename from store/rootmulti/internal/proofs/create_test.go rename to store/internal/proofs/create_test.go diff --git a/store/rootmulti/internal/proofs/helpers.go b/store/internal/proofs/helpers.go similarity index 100% rename from store/rootmulti/internal/proofs/helpers.go rename to store/internal/proofs/helpers.go diff --git a/store/rootmulti/store.go b/store/rootmulti/store.go index 2db747a28..677a804a7 100644 --- a/store/rootmulti/store.go +++ b/store/rootmulti/store.go @@ -1,24 +1,21 @@ package rootmulti import ( + "encoding/binary" "fmt" "io" "strings" - ics23 "github.com/confio/ics23/go" + gogotypes "github.com/gogo/protobuf/types" "github.com/pkg/errors" iavltree "github.com/tendermint/iavl" abci "github.com/tendermint/tendermint/abci/types" - "github.com/tendermint/tendermint/crypto/merkle" dbm "github.com/tendermint/tm-db" - "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/store/cachemulti" "github.com/cosmos/cosmos-sdk/store/dbadapter" "github.com/cosmos/cosmos-sdk/store/iavl" "github.com/cosmos/cosmos-sdk/store/mem" - sdkmaps "github.com/cosmos/cosmos-sdk/store/rootmulti/internal/maps" - sdkproofs "github.com/cosmos/cosmos-sdk/store/rootmulti/internal/proofs" "github.com/cosmos/cosmos-sdk/store/tracekv" "github.com/cosmos/cosmos-sdk/store/transient" "github.com/cosmos/cosmos-sdk/store/types" @@ -31,14 +28,12 @@ const ( commitInfoKeyFmt = "s/%d" // s/ ) -var cdc = codec.New() - // Store is composed of many CommitStores. Name contrasts with // cacheMultiStore which is for cache-wrapping other MultiStores. It implements // the CommitMultiStore interface. type Store struct { db dbm.DB - lastCommitInfo commitInfo + lastCommitInfo *types.CommitInfo pruningOpts types.PruningOptions storesParams map[types.StoreKey]storeParams stores map[types.StoreKey]types.CommitKVStore @@ -152,8 +147,9 @@ func (rs *Store) LoadVersion(ver int64) error { } func (rs *Store) loadVersion(ver int64, upgrades *types.StoreUpgrades) error { - infos := make(map[string]storeInfo) - var cInfo commitInfo + infos := make(map[string]types.StoreInfo) + + cInfo := &types.CommitInfo{} // load old data if we are not version 0 if ver != 0 { @@ -217,12 +213,13 @@ func (rs *Store) loadVersion(ver int64, upgrades *types.StoreUpgrades) error { return nil } -func (rs *Store) getCommitID(infos map[string]storeInfo, name string) types.CommitID { +func (rs *Store) getCommitID(infos map[string]types.StoreInfo, name string) types.CommitID { info, ok := infos[name] if !ok { return types.CommitID{} } - return info.Core.CommitID + + return info.CommitID } func deleteKVStore(kv types.KVStore) error { @@ -290,11 +287,12 @@ func (rs *Store) TracingEnabled() bool { return rs.traceWriter != nil } -//---------------------------------------- -// +CommitStore - // LastCommitID implements Committer/CommitStore. func (rs *Store) LastCommitID() types.CommitID { + if rs.lastCommitInfo == nil { + return types.CommitID{} + } + return rs.lastCommitInfo.CommitID() } @@ -365,9 +363,6 @@ func (rs *Store) CacheWrapWithTrace(_ io.Writer, _ types.TraceContext) types.Cac return rs.CacheWrap() } -//---------------------------------------- -// +MultiStore - // CacheMultiStore cache-wraps the multi-store and returns a CacheMultiStore. // It implements the MultiStore interface. func (rs *Store) CacheMultiStore() types.CacheMultiStore { @@ -453,8 +448,6 @@ func (rs *Store) getStoreByName(name string) types.Store { return rs.GetCommitKVStore(key) } -//---------------------- Query ------------------ - // Query calls substore.Query with the same `req` where `req.Path` is // modified to remove the substore prefix. // Ie. `req.Path` here is `//`, and trimmed to `/` for the substore. @@ -491,7 +484,7 @@ func (rs *Store) Query(req abci.RequestQuery) abci.ResponseQuery { // If the request's height is the latest height we've committed, then utilize // the store's lastCommitInfo as this commit info may not be flushed to disk. // Otherwise, we query for the commit info from disk. - var commitInfo commitInfo + var commitInfo *types.CommitInfo if res.Height == rs.lastCommitInfo.Version { commitInfo = rs.lastCommitInfo @@ -578,125 +571,32 @@ func (rs *Store) loadCommitStoreFromParams(key types.StoreKey, id types.CommitID } } -//---------------------------------------- -// storeParams - type storeParams struct { key types.StoreKey db dbm.DB typ types.StoreType } -//---------------------------------------- -// commitInfo - -// NOTE: Keep commitInfo a simple immutable struct. -type commitInfo struct { - - // Version - Version int64 - - // Store info for - StoreInfos []storeInfo -} - -func (ci commitInfo) toMap() map[string][]byte { - m := make(map[string][]byte, len(ci.StoreInfos)) - for _, storeInfo := range ci.StoreInfos { - m[storeInfo.Name] = storeInfo.GetHash() - } - return m -} - -// Hash returns the simple merkle root hash of the stores sorted by name. -func (ci commitInfo) Hash() []byte { - // we need a special case for empty set, as SimpleProofsFromMap requires at least one entry - if len(ci.StoreInfos) == 0 { - return nil - } - rootHash, _, _ := sdkmaps.SimpleProofsFromMap(ci.toMap()) - return rootHash -} - -func (ci commitInfo) ProofOp(storeName string) merkle.ProofOp { - cmap := ci.toMap() - _, proofs, _ := sdkmaps.SimpleProofsFromMap(cmap) - proof := proofs[storeName] - if proof == nil { - panic(fmt.Sprintf("ProofOp for %s but not registered store name", storeName)) - } - // convert merkle.SimpleProof to CommitmentProof - existProof, err := sdkproofs.ConvertExistenceProof(proof, []byte(storeName), cmap[storeName]) - if err != nil { - panic(fmt.Errorf("could not convert simple proof to existence proof: %w", err)) - } - commitmentProof := &ics23.CommitmentProof{ - Proof: &ics23.CommitmentProof_Exist{ - Exist: existProof, - }, - } - - return types.NewSimpleMerkleCommitmentOp([]byte(storeName), commitmentProof).ProofOp() -} - -func (ci commitInfo) CommitID() types.CommitID { - return types.CommitID{ - Version: ci.Version, - Hash: ci.Hash(), - } -} - -//---------------------------------------- -// storeInfo - -// storeInfo contains the name and core reference for an -// underlying store. It is the leaf of the Stores top -// level simple merkle tree. -type storeInfo struct { - Name string - Core storeCore -} - -type storeCore struct { - // StoreType StoreType - CommitID types.CommitID - // ... maybe add more state -} - -// GetHash returns the GetHash from the CommitID. -// This is used in CommitInfo.Hash() -// -// When we commit to this in a merkle proof, we create a map of storeInfo.Name -> storeInfo.GetHash() -// and build a merkle proof from that. -// This is then chained with the substore proof, so we prove the root hash from the substore before this -// and need to pass that (unmodified) as the leaf value of the multistore proof. -func (si storeInfo) GetHash() []byte { - return si.Core.CommitID.Hash -} - -//---------------------------------------- -// Misc. - func getLatestVersion(db dbm.DB) int64 { - var latest int64 - latestBytes, err := db.Get([]byte(latestVersionKey)) + bz, err := db.Get([]byte(latestVersionKey)) if err != nil { panic(err) - } else if latestBytes == nil { + } else if bz == nil { return 0 } - err = cdc.UnmarshalBinaryBare(latestBytes, &latest) - if err != nil { + var latestVersion int64 + + if err := gogotypes.StdInt64Unmarshal(&latestVersion, bz); err != nil { panic(err) } - return latest + return latestVersion } // Commits each store and returns a new commitInfo. -func commitStores(version int64, storeMap map[types.StoreKey]types.CommitKVStore) commitInfo { - storeInfos := make([]storeInfo, 0, len(storeMap)) +func commitStores(version int64, storeMap map[types.StoreKey]types.CommitKVStore) *types.CommitInfo { + storeInfos := make([]types.StoreInfo, 0, len(storeMap)) for key, store := range storeMap { commitID := store.Commit() @@ -705,52 +605,64 @@ func commitStores(version int64, storeMap map[types.StoreKey]types.CommitKVStore continue } - si := storeInfo{} + si := types.StoreInfo{} si.Name = key.Name() - si.Core.CommitID = commitID + si.CommitID = commitID storeInfos = append(storeInfos, si) } - return commitInfo{ + return &types.CommitInfo{ Version: version, StoreInfos: storeInfos, } } // Gets commitInfo from disk. -func getCommitInfo(db dbm.DB, ver int64) (commitInfo, error) { +func getCommitInfo(db dbm.DB, ver int64) (*types.CommitInfo, error) { cInfoKey := fmt.Sprintf(commitInfoKeyFmt, ver) - cInfoBytes, err := db.Get([]byte(cInfoKey)) + bz, err := db.Get([]byte(cInfoKey)) if err != nil { - return commitInfo{}, errors.Wrap(err, "failed to get commit info") - } else if cInfoBytes == nil { - return commitInfo{}, errors.New("failed to get commit info: no data") + return nil, errors.Wrap(err, "failed to get commit info") + } else if bz == nil { + return nil, errors.New("no commit info found") } - var cInfo commitInfo - - err = cdc.UnmarshalBinaryBare(cInfoBytes, &cInfo) - if err != nil { - return commitInfo{}, errors.Wrap(err, "failed to get store") + cInfo := &types.CommitInfo{} + if err = cInfo.Unmarshal(bz); err != nil { + return nil, errors.Wrap(err, "failed unmarshal commit info") } return cInfo, nil } -func setCommitInfo(batch dbm.Batch, version int64, cInfo commitInfo) { - cInfoBytes := cdc.MustMarshalBinaryBare(cInfo) +func setCommitInfo(batch dbm.Batch, version int64, cInfo *types.CommitInfo) { + bz, err := cInfo.Marshal() + if err != nil { + panic(err) + } + cInfoKey := fmt.Sprintf(commitInfoKeyFmt, version) - batch.Set([]byte(cInfoKey), cInfoBytes) + batch.Set([]byte(cInfoKey), bz) } func setLatestVersion(batch dbm.Batch, version int64) { - latestBytes := cdc.MustMarshalBinaryBare(version) - batch.Set([]byte(latestVersionKey), latestBytes) + bz, err := gogotypes.StdInt64Marshal(version) + if err != nil { + panic(err) + } + + batch.Set([]byte(latestVersionKey), bz) } func setPruningHeights(batch dbm.Batch, pruneHeights []int64) { - bz := cdc.MustMarshalBinaryBare(pruneHeights) + bz := make([]byte, 0) + for _, ph := range pruneHeights { + buf := make([]byte, 8) + binary.BigEndian.PutUint64(buf, uint64(ph)) + bz = append(bz, buf...) + } + batch.Set([]byte(pruneHeightsKey), bz) } @@ -763,15 +675,18 @@ func getPruningHeights(db dbm.DB) ([]int64, error) { return nil, errors.New("no pruned heights found") } - var prunedHeights []int64 - if err := cdc.UnmarshalBinaryBare(bz, &prunedHeights); err != nil { - return nil, fmt.Errorf("failed to unmarshal pruned heights: %w", err) + prunedHeights := make([]int64, len(bz)/8) + i, offset := 0, 0 + for offset < len(bz) { + prunedHeights[i] = int64(binary.BigEndian.Uint64(bz[offset : offset+8])) + i++ + offset += 8 } return prunedHeights, nil } -func flushMetadata(db dbm.DB, version int64, cInfo commitInfo, pruneHeights []int64) { +func flushMetadata(db dbm.DB, version int64, cInfo *types.CommitInfo, pruneHeights []int64) { batch := db.NewBatch() defer batch.Close() diff --git a/store/rootmulti/store_test.go b/store/rootmulti/store_test.go index da927dcc2..b4af426cc 100644 --- a/store/rootmulti/store_test.go +++ b/store/rootmulti/store_test.go @@ -9,7 +9,7 @@ import ( dbm "github.com/tendermint/tm-db" "github.com/cosmos/cosmos-sdk/store/iavl" - sdkmaps "github.com/cosmos/cosmos-sdk/store/rootmulti/internal/maps" + sdkmaps "github.com/cosmos/cosmos-sdk/store/internal/maps" "github.com/cosmos/cosmos-sdk/store/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) @@ -557,7 +557,7 @@ func checkStore(t *testing.T, store *Store, expect, got types.CommitID) { } -func checkContains(t testing.TB, info []storeInfo, wanted []string) { +func checkContains(t testing.TB, info []types.StoreInfo, wanted []string) { t.Helper() for _, want := range wanted { @@ -565,7 +565,7 @@ func checkContains(t testing.TB, info []storeInfo, wanted []string) { } } -func checkHas(t testing.TB, info []storeInfo, want string) { +func checkHas(t testing.TB, info []types.StoreInfo, want string) { t.Helper() for _, i := range info { if i.Name == want { @@ -586,12 +586,9 @@ func hashStores(stores map[types.StoreKey]types.CommitKVStore) []byte { m := make(map[string][]byte, len(stores)) for key, store := range stores { name := key.Name() - m[name] = storeInfo{ - Name: name, - Core: storeCore{ - CommitID: store.LastCommitID(), - // StoreType: store.GetStoreType(), - }, + m[name] = types.StoreInfo{ + Name: name, + CommitID: store.LastCommitID(), }.GetHash() } return sdkmaps.SimpleHashFromMap(m) diff --git a/store/types/commit_info.go b/store/types/commit_info.go new file mode 100644 index 000000000..0e229d2a9 --- /dev/null +++ b/store/types/commit_info.go @@ -0,0 +1,73 @@ +package types + +import ( + fmt "fmt" + + ics23 "github.com/confio/ics23/go" + "github.com/tendermint/tendermint/crypto/merkle" + + sdkmaps "github.com/cosmos/cosmos-sdk/store/internal/maps" + sdkproofs "github.com/cosmos/cosmos-sdk/store/internal/proofs" +) + +// GetHash returns the GetHash from the CommitID. +// This is used in CommitInfo.Hash() +// +// When we commit to this in a merkle proof, we create a map of storeInfo.Name -> storeInfo.GetHash() +// and build a merkle proof from that. +// This is then chained with the substore proof, so we prove the root hash from the substore before this +// and need to pass that (unmodified) as the leaf value of the multistore proof. +func (si StoreInfo) GetHash() []byte { + return si.CommitID.Hash +} + +func (ci CommitInfo) toMap() map[string][]byte { + m := make(map[string][]byte, len(ci.StoreInfos)) + for _, storeInfo := range ci.StoreInfos { + m[storeInfo.Name] = storeInfo.GetHash() + } + + return m +} + +// Hash returns the simple merkle root hash of the stores sorted by name. +func (ci CommitInfo) Hash() []byte { + // we need a special case for empty set, as SimpleProofsFromMap requires at least one entry + if len(ci.StoreInfos) == 0 { + return nil + } + + rootHash, _, _ := sdkmaps.SimpleProofsFromMap(ci.toMap()) + return rootHash +} + +func (ci CommitInfo) ProofOp(storeName string) merkle.ProofOp { + cmap := ci.toMap() + _, proofs, _ := sdkmaps.SimpleProofsFromMap(cmap) + + proof := proofs[storeName] + if proof == nil { + panic(fmt.Sprintf("ProofOp for %s but not registered store name", storeName)) + } + + // convert merkle.SimpleProof to CommitmentProof + existProof, err := sdkproofs.ConvertExistenceProof(proof, []byte(storeName), cmap[storeName]) + if err != nil { + panic(fmt.Errorf("could not convert simple proof to existence proof: %w", err)) + } + + commitmentProof := &ics23.CommitmentProof{ + Proof: &ics23.CommitmentProof_Exist{ + Exist: existProof, + }, + } + + return NewSimpleMerkleCommitmentOp([]byte(storeName), commitmentProof).ProofOp() +} + +func (ci CommitInfo) CommitID() CommitID { + return CommitID{ + Version: ci.Version, + Hash: ci.Hash(), + } +} diff --git a/store/types/commit_info.pb.go b/store/types/commit_info.pb.go new file mode 100644 index 000000000..a5e22891c --- /dev/null +++ b/store/types/commit_info.pb.go @@ -0,0 +1,814 @@ +// Code generated by protoc-gen-gogo. DO NOT EDIT. +// source: cosmos/store/commit_info.proto + +package types + +import ( + fmt "fmt" + _ "github.com/gogo/protobuf/gogoproto" + proto "github.com/gogo/protobuf/proto" + io "io" + math "math" + math_bits "math/bits" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package + +// CommitInfo defines commit information used by the multi-store when committing +// a version/height. +type CommitInfo struct { + Version int64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` + StoreInfos []StoreInfo `protobuf:"bytes,2,rep,name=store_infos,json=storeInfos,proto3" json:"store_infos"` +} + +func (m *CommitInfo) Reset() { *m = CommitInfo{} } +func (m *CommitInfo) String() string { return proto.CompactTextString(m) } +func (*CommitInfo) ProtoMessage() {} +func (*CommitInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_3e3b3900c32110a1, []int{0} +} +func (m *CommitInfo) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *CommitInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_CommitInfo.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *CommitInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitInfo.Merge(m, src) +} +func (m *CommitInfo) XXX_Size() int { + return m.Size() +} +func (m *CommitInfo) XXX_DiscardUnknown() { + xxx_messageInfo_CommitInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitInfo proto.InternalMessageInfo + +func (m *CommitInfo) GetVersion() int64 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *CommitInfo) GetStoreInfos() []StoreInfo { + if m != nil { + return m.StoreInfos + } + return nil +} + +// StoreInfo defines store-specific commit information. It contains a reference +// between a store name and the commit ID. +type StoreInfo struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + CommitID CommitID `protobuf:"bytes,2,opt,name=commit_id,json=commitId,proto3" json:"commit_id"` +} + +func (m *StoreInfo) Reset() { *m = StoreInfo{} } +func (m *StoreInfo) String() string { return proto.CompactTextString(m) } +func (*StoreInfo) ProtoMessage() {} +func (*StoreInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_3e3b3900c32110a1, []int{1} +} +func (m *StoreInfo) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *StoreInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_StoreInfo.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *StoreInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_StoreInfo.Merge(m, src) +} +func (m *StoreInfo) XXX_Size() int { + return m.Size() +} +func (m *StoreInfo) XXX_DiscardUnknown() { + xxx_messageInfo_StoreInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_StoreInfo proto.InternalMessageInfo + +func (m *StoreInfo) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *StoreInfo) GetCommitID() CommitID { + if m != nil { + return m.CommitID + } + return CommitID{} +} + +// CommitID defines the committment information when a specific store is +// committed. +type CommitID struct { + Version int64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` + Hash []byte `protobuf:"bytes,2,opt,name=hash,proto3" json:"hash,omitempty"` +} + +func (m *CommitID) Reset() { *m = CommitID{} } +func (*CommitID) ProtoMessage() {} +func (*CommitID) Descriptor() ([]byte, []int) { + return fileDescriptor_3e3b3900c32110a1, []int{2} +} +func (m *CommitID) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *CommitID) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_CommitID.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *CommitID) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitID.Merge(m, src) +} +func (m *CommitID) XXX_Size() int { + return m.Size() +} +func (m *CommitID) XXX_DiscardUnknown() { + xxx_messageInfo_CommitID.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitID proto.InternalMessageInfo + +func (m *CommitID) GetVersion() int64 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *CommitID) GetHash() []byte { + if m != nil { + return m.Hash + } + return nil +} + +func init() { + proto.RegisterType((*CommitInfo)(nil), "cosmos.store.CommitInfo") + proto.RegisterType((*StoreInfo)(nil), "cosmos.store.StoreInfo") + proto.RegisterType((*CommitID)(nil), "cosmos.store.CommitID") +} + +func init() { proto.RegisterFile("cosmos/store/commit_info.proto", fileDescriptor_3e3b3900c32110a1) } + +var fileDescriptor_3e3b3900c32110a1 = []byte{ + // 289 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xce, 0x2f, 0xce, + 0xcd, 0x2f, 0xd6, 0x2f, 0x2e, 0xc9, 0x2f, 0x4a, 0xd5, 0x4f, 0xce, 0xcf, 0xcd, 0xcd, 0x2c, 0x89, + 0xcf, 0xcc, 0x4b, 0xcb, 0xd7, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x81, 0xc8, 0xeb, 0x81, + 0xe5, 0xa5, 0x44, 0xd2, 0xf3, 0xd3, 0xf3, 0xc1, 0x12, 0xfa, 0x20, 0x16, 0x44, 0x8d, 0x52, 0x1a, + 0x17, 0x97, 0x33, 0x58, 0xa3, 0x67, 0x5e, 0x5a, 0xbe, 0x90, 0x04, 0x17, 0x7b, 0x59, 0x6a, 0x51, + 0x71, 0x66, 0x7e, 0x9e, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x73, 0x10, 0x8c, 0x2b, 0x64, 0xc7, 0xc5, + 0x0d, 0x36, 0x06, 0x6c, 0x7e, 0xb1, 0x04, 0x93, 0x02, 0xb3, 0x06, 0xb7, 0x91, 0xb8, 0x1e, 0xb2, + 0x0d, 0x7a, 0xc1, 0x20, 0x12, 0x64, 0x8e, 0x13, 0xcb, 0x89, 0x7b, 0xf2, 0x0c, 0x41, 0x5c, 0xc5, + 0x30, 0x81, 0x62, 0xa5, 0x34, 0x2e, 0x4e, 0xb8, 0xb4, 0x90, 0x10, 0x17, 0x4b, 0x5e, 0x62, 0x6e, + 0x2a, 0xd8, 0x0e, 0xce, 0x20, 0x30, 0x5b, 0xc8, 0x95, 0x8b, 0x13, 0xe6, 0x83, 0x14, 0x09, 0x26, + 0x05, 0x46, 0x0d, 0x6e, 0x23, 0x31, 0x54, 0xe3, 0xa1, 0xee, 0x74, 0x71, 0x12, 0x00, 0x99, 0xfe, + 0xe8, 0x9e, 0x3c, 0x07, 0x4c, 0x24, 0x88, 0x03, 0xa2, 0xd5, 0x33, 0x45, 0xc9, 0x8e, 0x0b, 0x2e, + 0x8a, 0xc7, 0x37, 0x42, 0x5c, 0x2c, 0x19, 0x89, 0xc5, 0x19, 0x60, 0x7b, 0x78, 0x82, 0xc0, 0x6c, + 0x2b, 0x96, 0x19, 0x0b, 0xe4, 0x19, 0x9c, 0x9c, 0x4e, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, + 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, 0x58, + 0x8e, 0x21, 0x4a, 0x23, 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, 0x1f, 0x1a, + 0xf0, 0x10, 0x4a, 0xb7, 0x38, 0x25, 0x1b, 0x1a, 0x07, 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x49, 0x6c, + 0xe0, 0xa0, 0x35, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x2f, 0x20, 0xcc, 0x8d, 0xa0, 0x01, 0x00, + 0x00, +} + +func (m *CommitInfo) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *CommitInfo) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *CommitInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.StoreInfos) > 0 { + for iNdEx := len(m.StoreInfos) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.StoreInfos[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintCommitInfo(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + } + if m.Version != 0 { + i = encodeVarintCommitInfo(dAtA, i, uint64(m.Version)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func (m *StoreInfo) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *StoreInfo) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *StoreInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + { + size, err := m.CommitID.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintCommitInfo(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + if len(m.Name) > 0 { + i -= len(m.Name) + copy(dAtA[i:], m.Name) + i = encodeVarintCommitInfo(dAtA, i, uint64(len(m.Name))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *CommitID) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *CommitID) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *CommitID) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Hash) > 0 { + i -= len(m.Hash) + copy(dAtA[i:], m.Hash) + i = encodeVarintCommitInfo(dAtA, i, uint64(len(m.Hash))) + i-- + dAtA[i] = 0x12 + } + if m.Version != 0 { + i = encodeVarintCommitInfo(dAtA, i, uint64(m.Version)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func encodeVarintCommitInfo(dAtA []byte, offset int, v uint64) int { + offset -= sovCommitInfo(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *CommitInfo) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Version != 0 { + n += 1 + sovCommitInfo(uint64(m.Version)) + } + if len(m.StoreInfos) > 0 { + for _, e := range m.StoreInfos { + l = e.Size() + n += 1 + l + sovCommitInfo(uint64(l)) + } + } + return n +} + +func (m *StoreInfo) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Name) + if l > 0 { + n += 1 + l + sovCommitInfo(uint64(l)) + } + l = m.CommitID.Size() + n += 1 + l + sovCommitInfo(uint64(l)) + return n +} + +func (m *CommitID) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Version != 0 { + n += 1 + sovCommitInfo(uint64(m.Version)) + } + l = len(m.Hash) + if l > 0 { + n += 1 + l + sovCommitInfo(uint64(l)) + } + return n +} + +func sovCommitInfo(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozCommitInfo(x uint64) (n int) { + return sovCommitInfo(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (m *CommitInfo) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: CommitInfo: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: CommitInfo: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Version", wireType) + } + m.Version = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Version |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field StoreInfos", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthCommitInfo + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthCommitInfo + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.StoreInfos = append(m.StoreInfos, StoreInfo{}) + if err := m.StoreInfos[len(m.StoreInfos)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipCommitInfo(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthCommitInfo + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthCommitInfo + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *StoreInfo) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: StoreInfo: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: StoreInfo: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthCommitInfo + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthCommitInfo + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Name = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field CommitID", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthCommitInfo + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthCommitInfo + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.CommitID.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipCommitInfo(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthCommitInfo + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthCommitInfo + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *CommitID) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: CommitID: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: CommitID: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Version", wireType) + } + m.Version = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Version |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Hash", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthCommitInfo + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthCommitInfo + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Hash = append(m.Hash[:0], dAtA[iNdEx:postIndex]...) + if m.Hash == nil { + m.Hash = []byte{} + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipCommitInfo(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthCommitInfo + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthCommitInfo + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipCommitInfo(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + depth := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + case 1: + iNdEx += 8 + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowCommitInfo + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthCommitInfo + } + iNdEx += length + case 3: + depth++ + case 4: + if depth == 0 { + return 0, ErrUnexpectedEndOfGroupCommitInfo + } + depth-- + case 5: + iNdEx += 4 + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + if iNdEx < 0 { + return 0, ErrInvalidLengthCommitInfo + } + if depth == 0 { + return iNdEx, nil + } + } + return 0, io.ErrUnexpectedEOF +} + +var ( + ErrInvalidLengthCommitInfo = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowCommitInfo = fmt.Errorf("proto: integer overflow") + ErrUnexpectedEndOfGroupCommitInfo = fmt.Errorf("proto: unexpected end of group") +) diff --git a/store/types/store.go b/store/types/store.go index 25a305919..4ca8f442c 100644 --- a/store/types/store.go +++ b/store/types/store.go @@ -247,15 +247,6 @@ type CacheWrapper interface { CacheWrapWithTrace(w io.Writer, tc TraceContext) CacheWrap } -//---------------------------------------- -// CommitID - -// CommitID contains the tree version number and its merkle root. -type CommitID struct { - Version int64 - Hash []byte -} - func (cid CommitID) IsZero() bool { return cid.Version == 0 && len(cid.Hash) == 0 } diff --git a/types/kv/kv.go b/types/kv/kv.go index 23cb7ec78..a0aab3f47 100644 --- a/types/kv/kv.go +++ b/types/kv/kv.go @@ -5,32 +5,22 @@ import ( "sort" ) -//---------------------------------------- -// KVPair - -/* -Defined in types.proto -type Pair struct { - Key []byte - Value []byte -} -*/ - -type Pairs []Pair - -// Sorting -func (kvs Pairs) Len() int { return len(kvs) } +func (kvs Pairs) Len() int { return len(kvs.Pairs) } func (kvs Pairs) Less(i, j int) bool { - switch bytes.Compare(kvs[i].Key, kvs[j].Key) { + switch bytes.Compare(kvs.Pairs[i].Key, kvs.Pairs[j].Key) { case -1: return true + case 0: - return bytes.Compare(kvs[i].Value, kvs[j].Value) < 0 + return bytes.Compare(kvs.Pairs[i].Value, kvs.Pairs[j].Value) < 0 + case 1: return false + default: panic("invalid comparison result") } } -func (kvs Pairs) Swap(i, j int) { kvs[i], kvs[j] = kvs[j], kvs[i] } + +func (kvs Pairs) Swap(i, j int) { kvs.Pairs[i], kvs.Pairs[j] = kvs.Pairs[j], kvs.Pairs[i] } func (kvs Pairs) Sort() { sort.Sort(kvs) } diff --git a/types/kv/kv.pb.go b/types/kv/kv.pb.go index 1e15bdc68..a5c2b7a86 100644 --- a/types/kv/kv.pb.go +++ b/types/kv/kv.pb.go @@ -5,6 +5,7 @@ package kv import ( fmt "fmt" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" io "io" math "math" @@ -22,7 +23,52 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package -// Key-Value Pair +// Pairs defines a repeated slice of Pair objects. +type Pairs struct { + Pairs []Pair `protobuf:"bytes,1,rep,name=pairs,proto3" json:"pairs"` +} + +func (m *Pairs) Reset() { *m = Pairs{} } +func (m *Pairs) String() string { return proto.CompactTextString(m) } +func (*Pairs) ProtoMessage() {} +func (*Pairs) Descriptor() ([]byte, []int) { + return fileDescriptor_23371bd43b515c6e, []int{0} +} +func (m *Pairs) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Pairs) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_Pairs.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *Pairs) XXX_Merge(src proto.Message) { + xxx_messageInfo_Pairs.Merge(m, src) +} +func (m *Pairs) XXX_Size() int { + return m.Size() +} +func (m *Pairs) XXX_DiscardUnknown() { + xxx_messageInfo_Pairs.DiscardUnknown(m) +} + +var xxx_messageInfo_Pairs proto.InternalMessageInfo + +func (m *Pairs) GetPairs() []Pair { + if m != nil { + return m.Pairs + } + return nil +} + +// Pair defines a key/value bytes tuple. type Pair struct { Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` @@ -32,7 +78,7 @@ func (m *Pair) Reset() { *m = Pair{} } func (m *Pair) String() string { return proto.CompactTextString(m) } func (*Pair) ProtoMessage() {} func (*Pair) Descriptor() ([]byte, []int) { - return fileDescriptor_23371bd43b515c6e, []int{0} + return fileDescriptor_23371bd43b515c6e, []int{1} } func (m *Pair) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -76,23 +122,64 @@ func (m *Pair) GetValue() []byte { } func init() { + proto.RegisterType((*Pairs)(nil), "cosmos.kv.Pairs") proto.RegisterType((*Pair)(nil), "cosmos.kv.Pair") } func init() { proto.RegisterFile("cosmos/kv/kv.proto", fileDescriptor_23371bd43b515c6e) } var fileDescriptor_23371bd43b515c6e = []byte{ - // 150 bytes of a gzipped FileDescriptorProto + // 200 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4a, 0xce, 0x2f, 0xce, 0xcd, 0x2f, 0xd6, 0xcf, 0x2e, 0xd3, 0xcf, 0x2e, 0xd3, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, - 0x84, 0x88, 0xe9, 0x65, 0x97, 0x29, 0xe9, 0x71, 0xb1, 0x04, 0x24, 0x66, 0x16, 0x09, 0x09, 0x70, - 0x31, 0x67, 0xa7, 0x56, 0x4a, 0x30, 0x2a, 0x30, 0x6a, 0xf0, 0x04, 0x81, 0x98, 0x42, 0x22, 0x5c, - 0xac, 0x65, 0x89, 0x39, 0xa5, 0xa9, 0x12, 0x4c, 0x60, 0x31, 0x08, 0xc7, 0xc9, 0xfe, 0xc4, 0x23, - 0x39, 0xc6, 0x0b, 0x8f, 0xe4, 0x18, 0x1f, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, 0x8e, 0xe1, 0xc2, - 0x63, 0x39, 0x86, 0x1b, 0x8f, 0xe5, 0x18, 0xa2, 0x54, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, - 0x92, 0xf3, 0x73, 0xf5, 0xa1, 0x76, 0x42, 0x28, 0xdd, 0xe2, 0x94, 0x6c, 0xfd, 0x92, 0xca, 0x82, - 0x54, 0x90, 0x23, 0x92, 0xd8, 0xc0, 0x4e, 0x30, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0xae, 0xee, - 0xa2, 0x4c, 0x98, 0x00, 0x00, 0x00, + 0x84, 0x88, 0xe9, 0x65, 0x97, 0x49, 0x89, 0xa4, 0xe7, 0xa7, 0xe7, 0x83, 0x45, 0xf5, 0x41, 0x2c, + 0x88, 0x02, 0x25, 0x13, 0x2e, 0xd6, 0x80, 0xc4, 0xcc, 0xa2, 0x62, 0x21, 0x6d, 0x2e, 0xd6, 0x02, + 0x10, 0x43, 0x82, 0x51, 0x81, 0x59, 0x83, 0xdb, 0x88, 0x5f, 0x0f, 0xae, 0x53, 0x0f, 0xa4, 0xc0, + 0x89, 0xe5, 0xc4, 0x3d, 0x79, 0x86, 0x20, 0x88, 0x1a, 0x25, 0x3d, 0x2e, 0x16, 0x90, 0xa0, 0x90, + 0x00, 0x17, 0x73, 0x76, 0x6a, 0xa5, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x4f, 0x10, 0x88, 0x29, 0x24, + 0xc2, 0xc5, 0x5a, 0x96, 0x98, 0x53, 0x9a, 0x2a, 0xc1, 0x04, 0x16, 0x83, 0x70, 0x9c, 0xec, 0x4f, + 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, 0x18, + 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, 0x58, 0x8e, 0x21, 0x4a, 0x35, 0x3d, 0xb3, 0x24, 0xa3, 0x34, + 0x49, 0x2f, 0x39, 0x3f, 0x57, 0x1f, 0xea, 0x7e, 0x08, 0xa5, 0x5b, 0x9c, 0x92, 0xad, 0x5f, 0x52, + 0x59, 0x90, 0x0a, 0xf2, 0x50, 0x12, 0x1b, 0xd8, 0xb5, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, + 0xce, 0x49, 0x74, 0x20, 0xe4, 0x00, 0x00, 0x00, +} + +func (m *Pairs) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Pairs) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *Pairs) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Pairs) > 0 { + for iNdEx := len(m.Pairs) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Pairs[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintKv(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil } func (m *Pair) Marshal() (dAtA []byte, err error) { @@ -143,6 +230,21 @@ func encodeVarintKv(dAtA []byte, offset int, v uint64) int { dAtA[offset] = uint8(v) return base } +func (m *Pairs) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Pairs) > 0 { + for _, e := range m.Pairs { + l = e.Size() + n += 1 + l + sovKv(uint64(l)) + } + } + return n +} + func (m *Pair) Size() (n int) { if m == nil { return 0 @@ -166,6 +268,93 @@ func sovKv(x uint64) (n int) { func sozKv(x uint64) (n int) { return sovKv(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } +func (m *Pairs) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowKv + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Pairs: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Pairs: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Pairs", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowKv + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthKv + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthKv + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Pairs = append(m.Pairs, Pair{}) + if err := m.Pairs[len(m.Pairs)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipKv(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthKv + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthKv + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *Pair) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 diff --git a/x/auth/simulation/decoder_test.go b/x/auth/simulation/decoder_test.go index 73f14eb44..453966a1f 100644 --- a/x/auth/simulation/decoder_test.go +++ b/x/auth/simulation/decoder_test.go @@ -32,17 +32,19 @@ func TestDecodeStore(t *testing.T) { globalAccNumber := gogotypes.UInt64Value{Value: 10} kvPairs := kv.Pairs{ - kv.Pair{ - Key: types.AddressStoreKey(delAddr1), - Value: accBz, - }, - kv.Pair{ - Key: types.GlobalAccountNumberKey, - Value: cdc.MustMarshalBinaryBare(&globalAccNumber), - }, - kv.Pair{ - Key: []byte{0x99}, - Value: []byte{0x99}, + Pairs: []kv.Pair{ + { + Key: types.AddressStoreKey(delAddr1), + Value: accBz, + }, + { + Key: types.GlobalAccountNumberKey, + Value: cdc.MustMarshalBinaryBare(&globalAccNumber), + }, + { + Key: []byte{0x99}, + Value: []byte{0x99}, + }, }, } tests := []struct { @@ -59,9 +61,9 @@ func TestDecodeStore(t *testing.T) { t.Run(tt.name, func(t *testing.T) { switch i { case len(tests) - 1: - require.Panics(t, func() { dec(kvPairs[i], kvPairs[i]) }, tt.name) + require.Panics(t, func() { dec(kvPairs.Pairs[i], kvPairs.Pairs[i]) }, tt.name) default: - require.Equal(t, tt.expectedLog, dec(kvPairs[i], kvPairs[i]), tt.name) + require.Equal(t, tt.expectedLog, dec(kvPairs.Pairs[i], kvPairs.Pairs[i]), tt.name) } }) } diff --git a/x/bank/simulation/decoder_test.go b/x/bank/simulation/decoder_test.go index d68e273bf..82ab32c14 100644 --- a/x/bank/simulation/decoder_test.go +++ b/x/bank/simulation/decoder_test.go @@ -23,8 +23,10 @@ func TestDecodeStore(t *testing.T) { require.NoError(t, err) kvPairs := kv.Pairs{ - kv.Pair{Key: types.SupplyKey, Value: supplyBz}, - kv.Pair{Key: []byte{0x99}, Value: []byte{0x99}}, + Pairs: []kv.Pair{ + {Key: types.SupplyKey, Value: supplyBz}, + {Key: []byte{0x99}, Value: []byte{0x99}}, + }, } tests := []struct { @@ -40,9 +42,9 @@ func TestDecodeStore(t *testing.T) { t.Run(tt.name, func(t *testing.T) { switch i { case len(tests) - 1: - require.Panics(t, func() { dec(kvPairs[i], kvPairs[i]) }, tt.name) + require.Panics(t, func() { dec(kvPairs.Pairs[i], kvPairs.Pairs[i]) }, tt.name) default: - require.Equal(t, tt.expectedLog, dec(kvPairs[i], kvPairs[i]), tt.name) + require.Equal(t, tt.expectedLog, dec(kvPairs.Pairs[i], kvPairs.Pairs[i]), tt.name) } }) } diff --git a/x/capability/simulation/decoder_test.go b/x/capability/simulation/decoder_test.go index e7993eb88..911580505 100644 --- a/x/capability/simulation/decoder_test.go +++ b/x/capability/simulation/decoder_test.go @@ -22,17 +22,19 @@ func TestDecodeStore(t *testing.T) { } kvPairs := kv.Pairs{ - kv.Pair{ - Key: types.KeyIndex, - Value: sdk.Uint64ToBigEndian(10), - }, - kv.Pair{ - Key: types.KeyPrefixIndexCapability, - Value: cdc.MustMarshalBinaryBare(&capOwners), - }, - kv.Pair{ - Key: []byte{0x99}, - Value: []byte{0x99}, + Pairs: []kv.Pair{ + { + Key: types.KeyIndex, + Value: sdk.Uint64ToBigEndian(10), + }, + { + Key: types.KeyPrefixIndexCapability, + Value: cdc.MustMarshalBinaryBare(&capOwners), + }, + { + Key: []byte{0x99}, + Value: []byte{0x99}, + }, }, } tests := []struct { @@ -49,9 +51,9 @@ func TestDecodeStore(t *testing.T) { t.Run(tt.name, func(t *testing.T) { switch i { case len(tests) - 1: - require.Panics(t, func() { dec(kvPairs[i], kvPairs[i]) }, tt.name) + require.Panics(t, func() { dec(kvPairs.Pairs[i], kvPairs.Pairs[i]) }, tt.name) default: - require.Equal(t, tt.expectedLog, dec(kvPairs[i], kvPairs[i]), tt.name) + require.Equal(t, tt.expectedLog, dec(kvPairs.Pairs[i], kvPairs.Pairs[i]), tt.name) } }) } diff --git a/x/distribution/simulation/decoder_test.go b/x/distribution/simulation/decoder_test.go index afb3671b2..8b65d08a0 100644 --- a/x/distribution/simulation/decoder_test.go +++ b/x/distribution/simulation/decoder_test.go @@ -36,16 +36,18 @@ func TestDecodeDistributionStore(t *testing.T) { slashEvent := types.NewValidatorSlashEvent(10, sdk.OneDec()) kvPairs := kv.Pairs{ - kv.Pair{Key: types.FeePoolKey, Value: cdc.MustMarshalBinaryBare(&feePool)}, - kv.Pair{Key: types.ProposerKey, Value: consAddr1.Bytes()}, - kv.Pair{Key: types.GetValidatorOutstandingRewardsKey(valAddr1), Value: cdc.MustMarshalBinaryBare(&outstanding)}, - kv.Pair{Key: types.GetDelegatorWithdrawAddrKey(delAddr1), Value: delAddr1.Bytes()}, - kv.Pair{Key: types.GetDelegatorStartingInfoKey(valAddr1, delAddr1), Value: cdc.MustMarshalBinaryBare(&info)}, - kv.Pair{Key: types.GetValidatorHistoricalRewardsKey(valAddr1, 100), Value: cdc.MustMarshalBinaryBare(&historicalRewards)}, - kv.Pair{Key: types.GetValidatorCurrentRewardsKey(valAddr1), Value: cdc.MustMarshalBinaryBare(¤tRewards)}, - kv.Pair{Key: types.GetValidatorAccumulatedCommissionKey(valAddr1), Value: cdc.MustMarshalBinaryBare(&commission)}, - kv.Pair{Key: types.GetValidatorSlashEventKeyPrefix(valAddr1, 13), Value: cdc.MustMarshalBinaryBare(&slashEvent)}, - kv.Pair{Key: []byte{0x99}, Value: []byte{0x99}}, + Pairs: []kv.Pair{ + {Key: types.FeePoolKey, Value: cdc.MustMarshalBinaryBare(&feePool)}, + {Key: types.ProposerKey, Value: consAddr1.Bytes()}, + {Key: types.GetValidatorOutstandingRewardsKey(valAddr1), Value: cdc.MustMarshalBinaryBare(&outstanding)}, + {Key: types.GetDelegatorWithdrawAddrKey(delAddr1), Value: delAddr1.Bytes()}, + {Key: types.GetDelegatorStartingInfoKey(valAddr1, delAddr1), Value: cdc.MustMarshalBinaryBare(&info)}, + {Key: types.GetValidatorHistoricalRewardsKey(valAddr1, 100), Value: cdc.MustMarshalBinaryBare(&historicalRewards)}, + {Key: types.GetValidatorCurrentRewardsKey(valAddr1), Value: cdc.MustMarshalBinaryBare(¤tRewards)}, + {Key: types.GetValidatorAccumulatedCommissionKey(valAddr1), Value: cdc.MustMarshalBinaryBare(&commission)}, + {Key: types.GetValidatorSlashEventKeyPrefix(valAddr1, 13), Value: cdc.MustMarshalBinaryBare(&slashEvent)}, + {Key: []byte{0x99}, Value: []byte{0x99}}, + }, } tests := []struct { @@ -68,9 +70,9 @@ func TestDecodeDistributionStore(t *testing.T) { t.Run(tt.name, func(t *testing.T) { switch i { case len(tests) - 1: - require.Panics(t, func() { dec(kvPairs[i], kvPairs[i]) }, tt.name) + require.Panics(t, func() { dec(kvPairs.Pairs[i], kvPairs.Pairs[i]) }, tt.name) default: - require.Equal(t, tt.expectedLog, dec(kvPairs[i], kvPairs[i]), tt.name) + require.Equal(t, tt.expectedLog, dec(kvPairs.Pairs[i], kvPairs.Pairs[i]), tt.name) } }) } diff --git a/x/evidence/simulation/decoder_test.go b/x/evidence/simulation/decoder_test.go index ff1927843..b8d1e70c1 100644 --- a/x/evidence/simulation/decoder_test.go +++ b/x/evidence/simulation/decoder_test.go @@ -32,13 +32,15 @@ func TestDecodeStore(t *testing.T) { require.NoError(t, err) kvPairs := kv.Pairs{ - kv.Pair{ - Key: types.KeyPrefixEvidence, - Value: evBz, - }, - kv.Pair{ - Key: []byte{0x99}, - Value: []byte{0x99}, + Pairs: []kv.Pair{ + { + Key: types.KeyPrefixEvidence, + Value: evBz, + }, + { + Key: []byte{0x99}, + Value: []byte{0x99}, + }, }, } tests := []struct { @@ -54,9 +56,9 @@ func TestDecodeStore(t *testing.T) { t.Run(tt.name, func(t *testing.T) { switch i { case len(tests) - 1: - require.Panics(t, func() { dec(kvPairs[i], kvPairs[i]) }, tt.name) + require.Panics(t, func() { dec(kvPairs.Pairs[i], kvPairs.Pairs[i]) }, tt.name) default: - require.Equal(t, tt.expectedLog, dec(kvPairs[i], kvPairs[i]), tt.name) + require.Equal(t, tt.expectedLog, dec(kvPairs.Pairs[i], kvPairs.Pairs[i]), tt.name) } }) } diff --git a/x/gov/simulation/decoder_test.go b/x/gov/simulation/decoder_test.go index e08c62b68..c5c9e720d 100644 --- a/x/gov/simulation/decoder_test.go +++ b/x/gov/simulation/decoder_test.go @@ -40,11 +40,13 @@ func TestDecodeStore(t *testing.T) { require.NoError(t, err) kvPairs := kv.Pairs{ - kv.Pair{Key: types.ProposalKey(1), Value: proposalBz}, - kv.Pair{Key: types.InactiveProposalQueueKey(1, endTime), Value: proposalIDBz}, - kv.Pair{Key: types.DepositKey(1, delAddr1), Value: cdc.MustMarshalBinaryBare(&deposit)}, - kv.Pair{Key: types.VoteKey(1, delAddr1), Value: cdc.MustMarshalBinaryBare(&vote)}, - kv.Pair{Key: []byte{0x99}, Value: []byte{0x99}}, + Pairs: []kv.Pair{ + {Key: types.ProposalKey(1), Value: proposalBz}, + {Key: types.InactiveProposalQueueKey(1, endTime), Value: proposalIDBz}, + {Key: types.DepositKey(1, delAddr1), Value: cdc.MustMarshalBinaryBare(&deposit)}, + {Key: types.VoteKey(1, delAddr1), Value: cdc.MustMarshalBinaryBare(&vote)}, + {Key: []byte{0x99}, Value: []byte{0x99}}, + }, } tests := []struct { @@ -63,9 +65,9 @@ func TestDecodeStore(t *testing.T) { t.Run(tt.name, func(t *testing.T) { switch i { case len(tests) - 1: - require.Panics(t, func() { dec(kvPairs[i], kvPairs[i]) }, tt.name) + require.Panics(t, func() { dec(kvPairs.Pairs[i], kvPairs.Pairs[i]) }, tt.name) default: - require.Equal(t, tt.expectedLog, dec(kvPairs[i], kvPairs[i]), tt.name) + require.Equal(t, tt.expectedLog, dec(kvPairs.Pairs[i], kvPairs.Pairs[i]), tt.name) } }) } diff --git a/x/ibc/02-client/simulation/decoder_test.go b/x/ibc/02-client/simulation/decoder_test.go index f1b2e2118..f016d0f15 100644 --- a/x/ibc/02-client/simulation/decoder_test.go +++ b/x/ibc/02-client/simulation/decoder_test.go @@ -29,21 +29,23 @@ func TestDecodeStore(t *testing.T) { } kvPairs := kv.Pairs{ - kv.Pair{ - Key: host.FullKeyClientPath(clientID, host.KeyClientState()), - Value: app.IBCKeeper.ClientKeeper.MustMarshalClientState(clientState), - }, - kv.Pair{ - Key: host.FullKeyClientPath(clientID, host.KeyClientType()), - Value: []byte(exported.Tendermint.String()), - }, - kv.Pair{ - Key: host.FullKeyClientPath(clientID, host.KeyConsensusState(10)), - Value: app.IBCKeeper.ClientKeeper.MustMarshalConsensusState(consState), - }, - kv.Pair{ - Key: []byte{0x99}, - Value: []byte{0x99}, + Pairs: []kv.Pair{ + { + Key: host.FullKeyClientPath(clientID, host.KeyClientState()), + Value: app.IBCKeeper.ClientKeeper.MustMarshalClientState(clientState), + }, + { + Key: host.FullKeyClientPath(clientID, host.KeyClientType()), + Value: []byte(exported.Tendermint.String()), + }, + { + Key: host.FullKeyClientPath(clientID, host.KeyConsensusState(10)), + Value: app.IBCKeeper.ClientKeeper.MustMarshalConsensusState(consState), + }, + { + Key: []byte{0x99}, + Value: []byte{0x99}, + }, }, } tests := []struct { @@ -59,13 +61,13 @@ func TestDecodeStore(t *testing.T) { for i, tt := range tests { i, tt := i, tt t.Run(tt.name, func(t *testing.T) { - res, found := simulation.NewDecodeStore(app.IBCKeeper.ClientKeeper, kvPairs[i], kvPairs[i]) + res, found := simulation.NewDecodeStore(app.IBCKeeper.ClientKeeper, kvPairs.Pairs[i], kvPairs.Pairs[i]) if i == len(tests)-1 { - require.False(t, found, string(kvPairs[i].Key)) - require.Empty(t, res, string(kvPairs[i].Key)) + require.False(t, found, string(kvPairs.Pairs[i].Key)) + require.Empty(t, res, string(kvPairs.Pairs[i].Key)) } else { - require.True(t, found, string(kvPairs[i].Key)) - require.Equal(t, tt.expectedLog, res, string(kvPairs[i].Key)) + require.True(t, found, string(kvPairs.Pairs[i].Key)) + require.Equal(t, tt.expectedLog, res, string(kvPairs.Pairs[i].Key)) } }) } diff --git a/x/ibc/03-connection/simulation/decoder_test.go b/x/ibc/03-connection/simulation/decoder_test.go index f610c83ad..c5cad2209 100644 --- a/x/ibc/03-connection/simulation/decoder_test.go +++ b/x/ibc/03-connection/simulation/decoder_test.go @@ -29,17 +29,19 @@ func TestDecodeStore(t *testing.T) { } kvPairs := kv.Pairs{ - kv.Pair{ - Key: host.KeyClientConnections(connection.ClientID), - Value: cdc.MustMarshalBinaryBare(&paths), - }, - kv.Pair{ - Key: host.KeyConnection(connectionID), - Value: cdc.MustMarshalBinaryBare(&connection), - }, - kv.Pair{ - Key: []byte{0x99}, - Value: []byte{0x99}, + Pairs: []kv.Pair{ + { + Key: host.KeyClientConnections(connection.ClientID), + Value: cdc.MustMarshalBinaryBare(&paths), + }, + { + Key: host.KeyConnection(connectionID), + Value: cdc.MustMarshalBinaryBare(&connection), + }, + { + Key: []byte{0x99}, + Value: []byte{0x99}, + }, }, } tests := []struct { @@ -54,13 +56,13 @@ func TestDecodeStore(t *testing.T) { for i, tt := range tests { i, tt := i, tt t.Run(tt.name, func(t *testing.T) { - res, found := simulation.NewDecodeStore(cdc, kvPairs[i], kvPairs[i]) + res, found := simulation.NewDecodeStore(cdc, kvPairs.Pairs[i], kvPairs.Pairs[i]) if i == len(tests)-1 { - require.False(t, found, string(kvPairs[i].Key)) - require.Empty(t, res, string(kvPairs[i].Key)) + require.False(t, found, string(kvPairs.Pairs[i].Key)) + require.Empty(t, res, string(kvPairs.Pairs[i].Key)) } else { - require.True(t, found, string(kvPairs[i].Key)) - require.Equal(t, tt.expectedLog, res, string(kvPairs[i].Key)) + require.True(t, found, string(kvPairs.Pairs[i].Key)) + require.Equal(t, tt.expectedLog, res, string(kvPairs.Pairs[i].Key)) } }) } diff --git a/x/ibc/04-channel/simulation/decoder_test.go b/x/ibc/04-channel/simulation/decoder_test.go index dfb1ace7d..d511273b8 100644 --- a/x/ibc/04-channel/simulation/decoder_test.go +++ b/x/ibc/04-channel/simulation/decoder_test.go @@ -29,33 +29,35 @@ func TestDecodeStore(t *testing.T) { bz := []byte{0x1, 0x2, 0x3} kvPairs := kv.Pairs{ - kv.Pair{ - Key: host.KeyChannel(portID, channelID), - Value: cdc.MustMarshalBinaryBare(&channel), - }, - kv.Pair{ - Key: host.KeyNextSequenceSend(portID, channelID), - Value: sdk.Uint64ToBigEndian(1), - }, - kv.Pair{ - Key: host.KeyNextSequenceRecv(portID, channelID), - Value: sdk.Uint64ToBigEndian(1), - }, - kv.Pair{ - Key: host.KeyNextSequenceAck(portID, channelID), - Value: sdk.Uint64ToBigEndian(1), - }, - kv.Pair{ - Key: host.KeyPacketCommitment(portID, channelID, 1), - Value: bz, - }, - kv.Pair{ - Key: host.KeyPacketAcknowledgement(portID, channelID, 1), - Value: bz, - }, - kv.Pair{ - Key: []byte{0x99}, - Value: []byte{0x99}, + Pairs: []kv.Pair{ + { + Key: host.KeyChannel(portID, channelID), + Value: cdc.MustMarshalBinaryBare(&channel), + }, + { + Key: host.KeyNextSequenceSend(portID, channelID), + Value: sdk.Uint64ToBigEndian(1), + }, + { + Key: host.KeyNextSequenceRecv(portID, channelID), + Value: sdk.Uint64ToBigEndian(1), + }, + { + Key: host.KeyNextSequenceAck(portID, channelID), + Value: sdk.Uint64ToBigEndian(1), + }, + { + Key: host.KeyPacketCommitment(portID, channelID, 1), + Value: bz, + }, + { + Key: host.KeyPacketAcknowledgement(portID, channelID, 1), + Value: bz, + }, + { + Key: []byte{0x99}, + Value: []byte{0x99}, + }, }, } tests := []struct { @@ -74,13 +76,13 @@ func TestDecodeStore(t *testing.T) { for i, tt := range tests { i, tt := i, tt t.Run(tt.name, func(t *testing.T) { - res, found := simulation.NewDecodeStore(cdc, kvPairs[i], kvPairs[i]) + res, found := simulation.NewDecodeStore(cdc, kvPairs.Pairs[i], kvPairs.Pairs[i]) if i == len(tests)-1 { - require.False(t, found, string(kvPairs[i].Key)) - require.Empty(t, res, string(kvPairs[i].Key)) + require.False(t, found, string(kvPairs.Pairs[i].Key)) + require.Empty(t, res, string(kvPairs.Pairs[i].Key)) } else { - require.True(t, found, string(kvPairs[i].Key)) - require.Equal(t, tt.expectedLog, res, string(kvPairs[i].Key)) + require.True(t, found, string(kvPairs.Pairs[i].Key)) + require.Equal(t, tt.expectedLog, res, string(kvPairs.Pairs[i].Key)) } }) } diff --git a/x/ibc/simulation/decoder_test.go b/x/ibc/simulation/decoder_test.go index d92b82daa..e88ff3e7b 100644 --- a/x/ibc/simulation/decoder_test.go +++ b/x/ibc/simulation/decoder_test.go @@ -37,21 +37,23 @@ func TestDecodeStore(t *testing.T) { } kvPairs := kv.Pairs{ - kv.Pair{ - Key: host.FullKeyClientPath(clientID, host.KeyClientState()), - Value: app.IBCKeeper.ClientKeeper.MustMarshalClientState(clientState), - }, - kv.Pair{ - Key: host.KeyConnection(connectionID), - Value: app.IBCKeeper.Codec().MustMarshalBinaryBare(&connection), - }, - kv.Pair{ - Key: host.KeyChannel(portID, channelID), - Value: app.IBCKeeper.Codec().MustMarshalBinaryBare(&channel), - }, - kv.Pair{ - Key: []byte{0x99}, - Value: []byte{0x99}, + Pairs: []kv.Pair{ + { + Key: host.FullKeyClientPath(clientID, host.KeyClientState()), + Value: app.IBCKeeper.ClientKeeper.MustMarshalClientState(clientState), + }, + { + Key: host.KeyConnection(connectionID), + Value: app.IBCKeeper.Codec().MustMarshalBinaryBare(&connection), + }, + { + Key: host.KeyChannel(portID, channelID), + Value: app.IBCKeeper.Codec().MustMarshalBinaryBare(&channel), + }, + { + Key: []byte{0x99}, + Value: []byte{0x99}, + }, }, } tests := []struct { @@ -68,9 +70,9 @@ func TestDecodeStore(t *testing.T) { i, tt := i, tt t.Run(tt.name, func(t *testing.T) { if i == len(tests)-1 { - require.Panics(t, func() { dec(kvPairs[i], kvPairs[i]) }, tt.name) + require.Panics(t, func() { dec(kvPairs.Pairs[i], kvPairs.Pairs[i]) }, tt.name) } else { - require.Equal(t, tt.expectedLog, dec(kvPairs[i], kvPairs[i]), tt.name) + require.Equal(t, tt.expectedLog, dec(kvPairs.Pairs[i], kvPairs.Pairs[i]), tt.name) } }) } diff --git a/x/mint/simulation/decoder_test.go b/x/mint/simulation/decoder_test.go index f67b3bb8a..762768c07 100644 --- a/x/mint/simulation/decoder_test.go +++ b/x/mint/simulation/decoder_test.go @@ -20,8 +20,10 @@ func TestDecodeStore(t *testing.T) { minter := types.NewMinter(sdk.OneDec(), sdk.NewDec(15)) kvPairs := kv.Pairs{ - kv.Pair{Key: types.MinterKey, Value: cdc.MustMarshalBinaryBare(&minter)}, - kv.Pair{Key: []byte{0x99}, Value: []byte{0x99}}, + Pairs: []kv.Pair{ + {Key: types.MinterKey, Value: cdc.MustMarshalBinaryBare(&minter)}, + {Key: []byte{0x99}, Value: []byte{0x99}}, + }, } tests := []struct { name string @@ -36,9 +38,9 @@ func TestDecodeStore(t *testing.T) { t.Run(tt.name, func(t *testing.T) { switch i { case len(tests) - 1: - require.Panics(t, func() { dec(kvPairs[i], kvPairs[i]) }, tt.name) + require.Panics(t, func() { dec(kvPairs.Pairs[i], kvPairs.Pairs[i]) }, tt.name) default: - require.Equal(t, tt.expectedLog, dec(kvPairs[i], kvPairs[i]), tt.name) + require.Equal(t, tt.expectedLog, dec(kvPairs.Pairs[i], kvPairs.Pairs[i]), tt.name) } }) } diff --git a/x/slashing/simulation/decoder_test.go b/x/slashing/simulation/decoder_test.go index 866422e70..493645392 100644 --- a/x/slashing/simulation/decoder_test.go +++ b/x/slashing/simulation/decoder_test.go @@ -33,10 +33,12 @@ func TestDecodeStore(t *testing.T) { missed := gogotypes.BoolValue{Value: true} kvPairs := kv.Pairs{ - kv.Pair{Key: types.ValidatorSigningInfoKey(consAddr1), Value: cdc.MustMarshalBinaryBare(&info)}, - kv.Pair{Key: types.ValidatorMissedBlockBitArrayKey(consAddr1, 6), Value: cdc.MustMarshalBinaryBare(&missed)}, - kv.Pair{Key: types.AddrPubkeyRelationKey(delAddr1), Value: cdc.MustMarshalBinaryBare(&gogotypes.StringValue{Value: bechPK})}, - kv.Pair{Key: []byte{0x99}, Value: []byte{0x99}}, + Pairs: []kv.Pair{ + {Key: types.ValidatorSigningInfoKey(consAddr1), Value: cdc.MustMarshalBinaryBare(&info)}, + {Key: types.ValidatorMissedBlockBitArrayKey(consAddr1, 6), Value: cdc.MustMarshalBinaryBare(&missed)}, + {Key: types.AddrPubkeyRelationKey(delAddr1), Value: cdc.MustMarshalBinaryBare(&gogotypes.StringValue{Value: bechPK})}, + {Key: []byte{0x99}, Value: []byte{0x99}}, + }, } tests := []struct { @@ -53,9 +55,9 @@ func TestDecodeStore(t *testing.T) { t.Run(tt.name, func(t *testing.T) { switch i { case len(tests) - 1: - require.Panics(t, func() { dec(kvPairs[i], kvPairs[i]) }, tt.name) + require.Panics(t, func() { dec(kvPairs.Pairs[i], kvPairs.Pairs[i]) }, tt.name) default: - require.Equal(t, tt.expectedLog, dec(kvPairs[i], kvPairs[i]), tt.name) + require.Equal(t, tt.expectedLog, dec(kvPairs.Pairs[i], kvPairs.Pairs[i]), tt.name) } }) } diff --git a/x/staking/simulation/decoder_test.go b/x/staking/simulation/decoder_test.go index 674d8f2e8..f442f648d 100644 --- a/x/staking/simulation/decoder_test.go +++ b/x/staking/simulation/decoder_test.go @@ -43,13 +43,15 @@ func TestDecodeStore(t *testing.T) { red := types.NewRedelegation(delAddr1, valAddr1, valAddr1, 12, bondTime, sdk.OneInt(), sdk.OneDec()) kvPairs := kv.Pairs{ - kv.Pair{Key: types.LastTotalPowerKey, Value: cdc.MustMarshalBinaryBare(&sdk.IntProto{Int: sdk.OneInt()})}, - kv.Pair{Key: types.GetValidatorKey(valAddr1), Value: cdc.MustMarshalBinaryBare(&val)}, - kv.Pair{Key: types.LastValidatorPowerKey, Value: valAddr1.Bytes()}, - kv.Pair{Key: types.GetDelegationKey(delAddr1, valAddr1), Value: cdc.MustMarshalBinaryBare(&del)}, - kv.Pair{Key: types.GetUBDKey(delAddr1, valAddr1), Value: cdc.MustMarshalBinaryBare(&ubd)}, - kv.Pair{Key: types.GetREDKey(delAddr1, valAddr1, valAddr1), Value: cdc.MustMarshalBinaryBare(&red)}, - kv.Pair{Key: []byte{0x99}, Value: []byte{0x99}}, + Pairs: []kv.Pair{ + {Key: types.LastTotalPowerKey, Value: cdc.MustMarshalBinaryBare(&sdk.IntProto{Int: sdk.OneInt()})}, + {Key: types.GetValidatorKey(valAddr1), Value: cdc.MustMarshalBinaryBare(&val)}, + {Key: types.LastValidatorPowerKey, Value: valAddr1.Bytes()}, + {Key: types.GetDelegationKey(delAddr1, valAddr1), Value: cdc.MustMarshalBinaryBare(&del)}, + {Key: types.GetUBDKey(delAddr1, valAddr1), Value: cdc.MustMarshalBinaryBare(&ubd)}, + {Key: types.GetREDKey(delAddr1, valAddr1, valAddr1), Value: cdc.MustMarshalBinaryBare(&red)}, + {Key: []byte{0x99}, Value: []byte{0x99}}, + }, } tests := []struct { @@ -69,9 +71,9 @@ func TestDecodeStore(t *testing.T) { t.Run(tt.name, func(t *testing.T) { switch i { case len(tests) - 1: - require.Panics(t, func() { dec(kvPairs[i], kvPairs[i]) }, tt.name) + require.Panics(t, func() { dec(kvPairs.Pairs[i], kvPairs.Pairs[i]) }, tt.name) default: - require.Equal(t, tt.expectedLog, dec(kvPairs[i], kvPairs[i]), tt.name) + require.Equal(t, tt.expectedLog, dec(kvPairs.Pairs[i], kvPairs.Pairs[i]), tt.name) } }) }