Merge branch 'master' into hulatown/adr-043-nft

This commit is contained in:
billy rennekamp 2021-07-12 19:18:12 +02:00 committed by GitHub
commit 121dff1be9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 70 additions and 18 deletions

View File

@ -75,6 +75,7 @@ Ref: https://keepachangelog.com/en/1.0.0/
### Bug Fixes ### Bug Fixes
* [\#9651](https://github.com/cosmos/cosmos-sdk/pull/9651) Change inconsistent limit of `0` to `MaxUint64` on InfiniteGasMeter and add GasRemaining func to GasMeter.
* [\#9639](https://github.com/cosmos/cosmos-sdk/pull/9639) Check store keys length before accessing them by making sure that `key` is of length `m+1` (for `key[n:m]`) * [\#9639](https://github.com/cosmos/cosmos-sdk/pull/9639) Check store keys length before accessing them by making sure that `key` is of length `m+1` (for `key[n:m]`)
* (types) [\#9627](https://github.com/cosmos/cosmos-sdk/pull/9627) Fix nil pointer panic on `NewBigIntFromInt` * (types) [\#9627](https://github.com/cosmos/cosmos-sdk/pull/9627) Fix nil pointer panic on `NewBigIntFromInt`
* (x/genutil) [\#9574](https://github.com/cosmos/cosmos-sdk/pull/9575) Actually use the `gentx` client tx flags (like `--keyring-dir`) * (x/genutil) [\#9574](https://github.com/cosmos/cosmos-sdk/pull/9575) Actually use the `gentx` client tx flags (like `--keyring-dir`)

View File

@ -70,7 +70,7 @@ The `DAEMON` specific code and operations (e.g. tendermint config, the applicati
Generally, the system requires that the system administrator place all relevant binaries on the disk before the upgrade happens. However, for people who don't need such control and want an easier setup (maybe they are syncing a non-validating fullnode and want to do little maintenance), there is another option. Generally, the system requires that the system administrator place all relevant binaries on the disk before the upgrade happens. However, for people who don't need such control and want an easier setup (maybe they are syncing a non-validating fullnode and want to do little maintenance), there is another option.
If you set `DAEMON_ALLOW_DOWNLOAD_BINARIES=on`, and no local binary can be found when an upgrade is triggered, `cosmovisor` will attempt to download and install the binary itself. The plan stored in the upgrade module has an info field for arbitrary JSON. This info is expected to be outputed on the halt log message. There are two valid formats to specify a download in such a message: If you set `DAEMON_ALLOW_DOWNLOAD_BINARIES=true`, and no local binary can be found when an upgrade is triggered, `cosmovisor` will attempt to download and install the binary itself. The plan stored in the upgrade module has an info field for arbitrary JSON. This info is expected to be outputed on the halt log message. There are two valid formats to specify a download in such a message:
1. Store an os/architecture -> binary URI map in the upgrade plan info field as JSON under the `"binaries"` key. For example: 1. Store an os/architecture -> binary URI map in the upgrade plan info field as JSON under the `"binaries"` key. For example:
@ -90,7 +90,7 @@ https://example.com/testnet-1001-info.json?checksum=sha256:deaaa99fda9407c4dbe1d
This file contained in the link will be retrieved by [go-getter](https://github.com/hashicorp/go-getter) and the `"binaries"` field will be parsed as above. This file contained in the link will be retrieved by [go-getter](https://github.com/hashicorp/go-getter) and the `"binaries"` field will be parsed as above.
If there is no local binary, `DAEMON_ALLOW_DOWNLOAD_BINARIES=on`, and we can access a canonical url for the new binary, then the `cosmovisor` will download it with [go-getter](https://github.com/hashicorp/go-getter) and unpack it into the `upgrades/<name>` folder to be run as if we installed it manually. If there is no local binary, `DAEMON_ALLOW_DOWNLOAD_BINARIES=true`, and we can access a canonical url for the new binary, then the `cosmovisor` will download it with [go-getter](https://github.com/hashicorp/go-getter) and unpack it into the `upgrades/<name>` folder to be run as if we installed it manually.
Note that for this mechanism to provide strong security guarantees, all URLs should include a SHA 256/512 checksum. This ensures that no false binary is run, even if someone hacks the server or hijacks the DNS. `go-getter` will always ensure the downloaded file matches the checksum if it is provided. `go-getter` will also handle unpacking archives into directories (in this case the download link should point to a `zip` file of all data in the `bin` directory). Note that for this mechanism to provide strong security guarantees, all URLs should include a SHA 256/512 checksum. This ensures that no false binary is run, even if someone hacks the server or hijacks the DNS. `go-getter` will always ensure the downloaded file matches the checksum if it is provided. `go-getter` will also handle unpacking archives into directories (in this case the download link should point to a `zip` file of all data in the `bin` directory).

View File

@ -71,4 +71,4 @@ networks:
ipam: ipam:
driver: default driver: default
config: config:
- subnet: 192.168.10.0/16 - subnet: 192.168.10.0/25

View File

@ -54,6 +54,7 @@ type Store struct {
lazyLoading bool lazyLoading bool
pruneHeights []int64 pruneHeights []int64
initialVersion int64 initialVersion int64
removalMap map[types.StoreKey]bool
traceWriter io.Writer traceWriter io.Writer
traceContext types.TraceContext traceContext types.TraceContext
@ -81,6 +82,7 @@ func NewStore(db dbm.DB) *Store {
keysByName: make(map[string]types.StoreKey), keysByName: make(map[string]types.StoreKey),
pruneHeights: make([]int64, 0), pruneHeights: make([]int64, 0),
listeners: make(map[types.StoreKey][]types.WriteListener), listeners: make(map[types.StoreKey][]types.WriteListener),
removalMap: make(map[types.StoreKey]bool),
} }
} }
@ -210,9 +212,10 @@ func (rs *Store) loadVersion(ver int64, upgrades *types.StoreUpgrades) error {
if err := deleteKVStore(store.(types.KVStore)); err != nil { if err := deleteKVStore(store.(types.KVStore)); err != nil {
return errors.Wrapf(err, "failed to delete store %s", key.Name()) return errors.Wrapf(err, "failed to delete store %s", key.Name())
} }
rs.removalMap[key] = true
} else if oldName := upgrades.RenamedFrom(key.Name()); oldName != "" { } else if oldName := upgrades.RenamedFrom(key.Name()); oldName != "" {
// handle renames specially // handle renames specially
// make an unregistered key to satify loadCommitStore params // make an unregistered key to satisfy loadCommitStore params
oldKey := types.NewKVStoreKey(oldName) oldKey := types.NewKVStoreKey(oldName)
oldParams := storeParams oldParams := storeParams
oldParams.key = oldKey oldParams.key = oldKey
@ -227,6 +230,11 @@ func (rs *Store) loadVersion(ver int64, upgrades *types.StoreUpgrades) error {
if err := moveKVStoreData(oldStore.(types.KVStore), store.(types.KVStore)); err != nil { if err := moveKVStoreData(oldStore.(types.KVStore), store.(types.KVStore)); err != nil {
return errors.Wrapf(err, "failed to move store %s -> %s", oldName, key.Name()) return errors.Wrapf(err, "failed to move store %s -> %s", oldName, key.Name())
} }
// add the old key so its deletion is committed
newStores[oldKey] = oldStore
// this will ensure it's not perpetually stored in commitInfo
rs.removalMap[oldKey] = true
} }
} }
@ -361,8 +369,19 @@ func (rs *Store) Commit() types.CommitID {
previousHeight = rs.lastCommitInfo.GetVersion() previousHeight = rs.lastCommitInfo.GetVersion()
version = previousHeight + 1 version = previousHeight + 1
} }
rs.lastCommitInfo = commitStores(version, rs.stores, rs.removalMap)
rs.lastCommitInfo = commitStores(version, rs.stores) // remove remnants of removed stores
for sk := range rs.removalMap {
if _, ok := rs.stores[sk]; ok {
delete(rs.stores, sk)
delete(rs.storesParams, sk)
delete(rs.keysByName, sk.Name())
}
}
// reset the removalMap
rs.removalMap = make(map[types.StoreKey]bool)
// Determine if pruneHeight height needs to be added to the list of heights to // Determine if pruneHeight height needs to be added to the list of heights to
// be pruned, where pruneHeight = (commitHeight - 1) - KeepRecent. // be pruned, where pruneHeight = (commitHeight - 1) - KeepRecent.
@ -944,7 +963,7 @@ func getLatestVersion(db dbm.DB) int64 {
} }
// Commits each store and returns a new commitInfo. // Commits each store and returns a new commitInfo.
func commitStores(version int64, storeMap map[types.StoreKey]types.CommitKVStore) *types.CommitInfo { func commitStores(version int64, storeMap map[types.StoreKey]types.CommitKVStore, removalMap map[types.StoreKey]bool) *types.CommitInfo {
storeInfos := make([]types.StoreInfo, 0, len(storeMap)) storeInfos := make([]types.StoreInfo, 0, len(storeMap))
for key, store := range storeMap { for key, store := range storeMap {
@ -954,10 +973,12 @@ func commitStores(version int64, storeMap map[types.StoreKey]types.CommitKVStore
continue continue
} }
si := types.StoreInfo{} if !removalMap[key] {
si.Name = key.Name() si := types.StoreInfo{}
si.CommitId = commitID si.Name = key.Name()
storeInfos = append(storeInfos, si) si.CommitId = commitID
storeInfos = append(storeInfos, si)
}
} }
return &types.CommitInfo{ return &types.CommitInfo{

View File

@ -300,8 +300,8 @@ func TestMultistoreLoadWithUpgrade(t *testing.T) {
ci, err = getCommitInfo(db, 2) ci, err = getCommitInfo(db, 2)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, int64(2), ci.Version) require.Equal(t, int64(2), ci.Version)
require.Equal(t, 4, len(ci.StoreInfos), ci.StoreInfos) require.Equal(t, 3, len(ci.StoreInfos), ci.StoreInfos)
checkContains(t, ci.StoreInfos, []string{"store1", "restore2", "store3", "store4"}) checkContains(t, ci.StoreInfos, []string{"store1", "restore2", "store4"})
} }
func TestParsePath(t *testing.T) { func TestParsePath(t *testing.T) {

View File

@ -41,6 +41,7 @@ type ErrorGasOverflow struct {
type GasMeter interface { type GasMeter interface {
GasConsumed() Gas GasConsumed() Gas
GasConsumedToLimit() Gas GasConsumedToLimit() Gas
GasRemaining() Gas
Limit() Gas Limit() Gas
ConsumeGas(amount Gas, descriptor string) ConsumeGas(amount Gas, descriptor string)
RefundGas(amount Gas, descriptor string) RefundGas(amount Gas, descriptor string)
@ -66,6 +67,13 @@ func (g *basicGasMeter) GasConsumed() Gas {
return g.consumed return g.consumed
} }
func (g *basicGasMeter) GasRemaining() Gas {
if g.IsPastLimit() {
return 0
}
return g.limit - g.consumed
}
func (g *basicGasMeter) Limit() Gas { func (g *basicGasMeter) Limit() Gas {
return g.limit return g.limit
} }
@ -145,8 +153,12 @@ func (g *infiniteGasMeter) GasConsumedToLimit() Gas {
return g.consumed return g.consumed
} }
func (g *infiniteGasMeter) GasRemaining() Gas {
return math.MaxUint64
}
func (g *infiniteGasMeter) Limit() Gas { func (g *infiniteGasMeter) Limit() Gas {
return 0 return math.MaxUint64
} }
func (g *infiniteGasMeter) ConsumeGas(amount Gas, descriptor string) { func (g *infiniteGasMeter) ConsumeGas(amount Gas, descriptor string) {

View File

@ -10,13 +10,16 @@ import (
func TestInfiniteGasMeter(t *testing.T) { func TestInfiniteGasMeter(t *testing.T) {
t.Parallel() t.Parallel()
meter := NewInfiniteGasMeter() meter := NewInfiniteGasMeter()
require.Equal(t, uint64(0), meter.Limit()) require.Equal(t, uint64(math.MaxUint64), meter.Limit())
require.Equal(t, uint64(math.MaxUint64), meter.GasRemaining())
require.Equal(t, uint64(0), meter.GasConsumed()) require.Equal(t, uint64(0), meter.GasConsumed())
require.Equal(t, uint64(0), meter.GasConsumedToLimit()) require.Equal(t, uint64(0), meter.GasConsumedToLimit())
meter.ConsumeGas(10, "consume 10") meter.ConsumeGas(10, "consume 10")
require.Equal(t, uint64(math.MaxUint64), meter.GasRemaining())
require.Equal(t, uint64(10), meter.GasConsumed()) require.Equal(t, uint64(10), meter.GasConsumed())
require.Equal(t, uint64(10), meter.GasConsumedToLimit()) require.Equal(t, uint64(10), meter.GasConsumedToLimit())
meter.RefundGas(1, "refund 1") meter.RefundGas(1, "refund 1")
require.Equal(t, uint64(math.MaxUint64), meter.GasRemaining())
require.Equal(t, uint64(9), meter.GasConsumed()) require.Equal(t, uint64(9), meter.GasConsumed())
require.False(t, meter.IsPastLimit()) require.False(t, meter.IsPastLimit())
require.False(t, meter.IsOutOfGas()) require.False(t, meter.IsOutOfGas())
@ -48,6 +51,7 @@ func TestGasMeter(t *testing.T) {
used += usage used += usage
require.NotPanics(t, func() { meter.ConsumeGas(usage, "") }, "Not exceeded limit but panicked. tc #%d, usage #%d", tcnum, unum) require.NotPanics(t, func() { meter.ConsumeGas(usage, "") }, "Not exceeded limit but panicked. tc #%d, usage #%d", tcnum, unum)
require.Equal(t, used, meter.GasConsumed(), "Gas consumption not match. tc #%d, usage #%d", tcnum, unum) require.Equal(t, used, meter.GasConsumed(), "Gas consumption not match. tc #%d, usage #%d", tcnum, unum)
require.Equal(t, tc.limit-used, meter.GasRemaining(), "Gas left not match. tc #%d, usage #%d", tcnum, unum)
require.Equal(t, used, meter.GasConsumedToLimit(), "Gas consumption (to limit) not match. tc #%d, usage #%d", tcnum, unum) require.Equal(t, used, meter.GasConsumedToLimit(), "Gas consumption (to limit) not match. tc #%d, usage #%d", tcnum, unum)
require.False(t, meter.IsPastLimit(), "Not exceeded limit but got IsPastLimit() true") require.False(t, meter.IsPastLimit(), "Not exceeded limit but got IsPastLimit() true")
if unum < len(tc.usage)-1 { if unum < len(tc.usage)-1 {
@ -60,13 +64,20 @@ func TestGasMeter(t *testing.T) {
require.Panics(t, func() { meter.ConsumeGas(1, "") }, "Exceeded but not panicked. tc #%d", tcnum) require.Panics(t, func() { meter.ConsumeGas(1, "") }, "Exceeded but not panicked. tc #%d", tcnum)
require.Equal(t, meter.GasConsumedToLimit(), meter.Limit(), "Gas consumption (to limit) not match limit") require.Equal(t, meter.GasConsumedToLimit(), meter.Limit(), "Gas consumption (to limit) not match limit")
require.Equal(t, meter.GasConsumed(), meter.Limit()+1, "Gas consumption not match limit+1") require.Equal(t, meter.GasConsumed(), meter.Limit()+1, "Gas consumption not match limit+1")
require.Equal(t, uint64(0), meter.GasRemaining())
require.NotPanics(t, func() { meter.RefundGas(1, "refund 1") }) require.NotPanics(t, func() { meter.RefundGas(1, "refund 1") })
require.Equal(t, meter.GasConsumed(), meter.Limit(), "Gas consumption not match limit+1") require.Equal(t, meter.GasConsumed(), meter.Limit(), "Gas consumption not match with limit")
require.Equal(t, uint64(0), meter.GasRemaining())
require.Panics(t, func() { meter.RefundGas(meter.GasConsumed()+1, "refund greater than consumed") }) require.Panics(t, func() { meter.RefundGas(meter.GasConsumed()+1, "refund greater than consumed") })
require.NotPanics(t, func() { meter.RefundGas(meter.GasConsumed(), "refund consumed gas") })
require.Equal(t, meter.Limit(), meter.GasRemaining())
meter2 := NewGasMeter(math.MaxUint64) meter2 := NewGasMeter(math.MaxUint64)
require.Equal(t, uint64(math.MaxUint64), meter2.GasRemaining())
meter2.ConsumeGas(Gas(math.MaxUint64/2), "consume half max uint64") meter2.ConsumeGas(Gas(math.MaxUint64/2), "consume half max uint64")
require.Equal(t, Gas(math.MaxUint64-(math.MaxUint64/2)), meter2.GasRemaining())
require.Panics(t, func() { meter2.ConsumeGas(Gas(math.MaxUint64/2)+2, "panic") }) require.Panics(t, func() { meter2.ConsumeGas(Gas(math.MaxUint64/2)+2, "panic") })
} }
} }

View File

@ -1,6 +1,8 @@
package ante_test package ante_test
import ( import (
"math"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
"github.com/cosmos/cosmos-sdk/testutil/testdata" "github.com/cosmos/cosmos-sdk/testutil/testdata"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
@ -33,8 +35,8 @@ func (suite *AnteTestSuite) TestSetup() {
// Set height to non-zero value for GasMeter to be set // Set height to non-zero value for GasMeter to be set
suite.ctx = suite.ctx.WithBlockHeight(1) suite.ctx = suite.ctx.WithBlockHeight(1)
// Context GasMeter Limit not set // Context GasMeter Limit set to MaxUint64
suite.Require().Equal(uint64(0), suite.ctx.GasMeter().Limit(), "GasMeter set with limit before setup") suite.Require().Equal(uint64(math.MaxUint64), suite.ctx.GasMeter().Limit(), "GasMeter set with limit other than MaxUint64 before setup")
newCtx, err := antehandler(suite.ctx, tx, false) newCtx, err := antehandler(suite.ctx, tx, false)
suite.Require().Nil(err, "SetUpContextDecorator returned error") suite.Require().Nil(err, "SetUpContextDecorator returned error")

View File

@ -90,6 +90,7 @@ func TestSetLoader(t *testing.T) {
loadStoreKey string loadStoreKey string
}{ }{
"don't set loader": { "don't set loader": {
setLoader: nil,
origStoreKey: "foo", origStoreKey: "foo",
loadStoreKey: "foo", loadStoreKey: "foo",
}, },
@ -145,9 +146,13 @@ func TestSetLoader(t *testing.T) {
res := app.Commit() res := app.Commit()
require.NotNil(t, res.Data) require.NotNil(t, res.Data)
// checking the case of the store being renamed
if tc.setLoader != nil {
checkStore(t, db, upgradeHeight, tc.origStoreKey, k, nil)
}
// check db is properly updated // check db is properly updated
checkStore(t, db, upgradeHeight, tc.loadStoreKey, k, v) checkStore(t, db, upgradeHeight, tc.loadStoreKey, k, v)
checkStore(t, db, upgradeHeight, tc.loadStoreKey, []byte("foo"), nil)
}) })
} }
} }