Merge branch 'master' into jordan/cosmos-docs

This commit is contained in:
Jordan Bibla 2018-07-12 20:01:27 -04:00 committed by GitHub
commit 28da104e1a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 1009 additions and 127 deletions

View File

@ -14,8 +14,10 @@ FEATURES
IMPROVEMENTS IMPROVEMENTS
* Updated docs folder to accommodate cosmos.network docs project * Updated docs folder to accommodate cosmos.network docs project
* [store] Added support for tracing multi-store operations via `--trace-store`
BUG FIXES BUG FIXES
* \#1630 - redelegation nolonger removes tokens from the delegator liquid account
* [keys] \#1629 - updating password no longer asks for a new password when the first entered password was incorrect * [keys] \#1629 - updating password no longer asks for a new password when the first entered password was incorrect
* [lcd] importing an account would create a random account * [lcd] importing an account would create a random account
@ -111,6 +113,7 @@ FEATURES
- Auth has its invariants checked within the framework - Auth has its invariants checked within the framework
* [tests] Add WaitForNextNBlocksTM helper method * [tests] Add WaitForNextNBlocksTM helper method
* [keys] New keys now have 24 word recovery keys, for heightened security * [keys] New keys now have 24 word recovery keys, for heightened security
- [keys] Add a temporary method for exporting the private key
IMPROVEMENTS IMPROVEMENTS
* [x/bank] Now uses go-wire codec instead of 'encoding/json' * [x/bank] Now uses go-wire codec instead of 'encoding/json'

View File

@ -2,12 +2,14 @@ package baseapp
import ( import (
"fmt" "fmt"
"io"
"runtime/debug" "runtime/debug"
"strings" "strings"
"github.com/pkg/errors" "github.com/pkg/errors"
abci "github.com/tendermint/tendermint/abci/types" abci "github.com/tendermint/tendermint/abci/types"
"github.com/tendermint/tendermint/crypto/tmhash"
cmn "github.com/tendermint/tendermint/libs/common" cmn "github.com/tendermint/tendermint/libs/common"
dbm "github.com/tendermint/tendermint/libs/db" dbm "github.com/tendermint/tendermint/libs/db"
"github.com/tendermint/tendermint/libs/log" "github.com/tendermint/tendermint/libs/log"
@ -37,7 +39,7 @@ const (
runTxModeDeliver runTxMode = iota runTxModeDeliver runTxMode = iota
) )
// The ABCI application // BaseApp reflects the ABCI application implementation.
type BaseApp struct { type BaseApp struct {
// initialized on creation // initialized on creation
Logger log.Logger Logger log.Logger
@ -71,7 +73,12 @@ type BaseApp struct {
var _ abci.Application = (*BaseApp)(nil) var _ abci.Application = (*BaseApp)(nil)
// Create and name new BaseApp // NewBaseApp returns a reference to an initialized BaseApp.
//
// TODO: Determine how to use a flexible and robust configuration paradigm that
// allows for sensible defaults while being highly configurable
// (e.g. functional options).
//
// NOTE: The db is used to store the version number for now. // NOTE: The db is used to store the version number for now.
// Accepts variable number of option functions, which act on the BaseApp to set configuration choices // Accepts variable number of option functions, which act on the BaseApp to set configuration choices
func NewBaseApp(name string, cdc *wire.Codec, logger log.Logger, db dbm.DB, options ...func(*BaseApp)) *BaseApp { func NewBaseApp(name string, cdc *wire.Codec, logger log.Logger, db dbm.DB, options ...func(*BaseApp)) *BaseApp {
@ -85,7 +92,9 @@ func NewBaseApp(name string, cdc *wire.Codec, logger log.Logger, db dbm.DB, opti
codespacer: sdk.NewCodespacer(), codespacer: sdk.NewCodespacer(),
txDecoder: defaultTxDecoder(cdc), txDecoder: defaultTxDecoder(cdc),
} }
// Register the undefined & root codespaces, which should not be used by any modules
// Register the undefined & root codespaces, which should not be used by
// any modules.
app.codespacer.RegisterOrPanic(sdk.CodespaceRoot) app.codespacer.RegisterOrPanic(sdk.CodespaceRoot)
for _, option := range options { for _, option := range options {
option(app) option(app)
@ -98,6 +107,12 @@ func (app *BaseApp) Name() string {
return app.name return app.name
} }
// SetCommitMultiStoreTracer sets the store tracer on the BaseApp's underlying
// CommitMultiStore.
func (app *BaseApp) SetCommitMultiStoreTracer(w io.Writer) {
app.cms.WithTracer(w)
}
// Register the next available codespace through the baseapp's codespacer, starting from a default // Register the next available codespace through the baseapp's codespacer, starting from a default
func (app *BaseApp) RegisterCodespace(codespace sdk.CodespaceType) sdk.CodespaceType { func (app *BaseApp) RegisterCodespace(codespace sdk.CodespaceType) sdk.CodespaceType {
return app.codespacer.RegisterNext(codespace) return app.codespacer.RegisterNext(codespace)
@ -392,13 +407,18 @@ func handleQueryP2P(app *BaseApp, path []string, req abci.RequestQuery) (res abc
return sdk.ErrUnknownRequest(msg).QueryResult() return sdk.ErrUnknownRequest(msg).QueryResult()
} }
// Implements ABCI // BeginBlock implements the ABCI application interface.
func (app *BaseApp) BeginBlock(req abci.RequestBeginBlock) (res abci.ResponseBeginBlock) { func (app *BaseApp) BeginBlock(req abci.RequestBeginBlock) (res abci.ResponseBeginBlock) {
// Initialize the DeliverTx state. if app.cms.TracingEnabled() {
// If this is the first block, it should already app.cms.ResetTraceContext()
// be initialized in InitChain. app.cms.WithTracingContext(sdk.TraceContext(
// Otherwise app.deliverState will be nil, since it map[string]interface{}{"blockHeight": req.Header.Height},
// is reset on Commit. ))
}
// Initialize the DeliverTx state. If this is the first block, it should
// already be initialized in InitChain. Otherwise app.deliverState will be
// nil, since it is reset on Commit.
if app.deliverState == nil { if app.deliverState == nil {
app.setDeliverState(req.Header) app.setDeliverState(req.Header)
} else { } else {
@ -406,9 +426,11 @@ func (app *BaseApp) BeginBlock(req abci.RequestBeginBlock) (res abci.ResponseBeg
// by InitChain. Context is now updated with Header information. // by InitChain. Context is now updated with Header information.
app.deliverState.ctx = app.deliverState.ctx.WithBlockHeader(req.Header) app.deliverState.ctx = app.deliverState.ctx.WithBlockHeader(req.Header)
} }
if app.beginBlocker != nil { if app.beginBlocker != nil {
res = app.beginBlocker(app.deliverState.ctx, req) res = app.beginBlocker(app.deliverState.ctx, req)
} }
// set the signed validators for addition to context in deliverTx // set the signed validators for addition to context in deliverTx
app.signedValidators = req.Validators app.signedValidators = req.Validators
return return
@ -548,25 +570,26 @@ func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg) (result sdk.Result)
return result return result
} }
// Returns deliverState if app is in runTxModeDeliver, otherwhise returns checkstate // Returns the applicantion's deliverState if app is in runTxModeDeliver,
// otherwise it returns the application's checkstate.
func getState(app *BaseApp, mode runTxMode) *state { func getState(app *BaseApp, mode runTxMode) *state {
if mode == runTxModeCheck || mode == runTxModeSimulate { if mode == runTxModeCheck || mode == runTxModeSimulate {
return app.checkState return app.checkState
} }
return app.deliverState return app.deliverState
} }
// txBytes may be nil in some cases, eg. in tests. // runTx processes a transaction. The transactions is proccessed via an
// Also, in the future we may support "internal" transactions. // anteHandler. txBytes may be nil in some cases, eg. in tests. Also, in the
// future we may support "internal" transactions.
func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (result sdk.Result) { func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (result sdk.Result) {
//NOTE: GasWanted should be returned by the AnteHandler. // NOTE: GasWanted should be returned by the AnteHandler. GasUsed is
// GasUsed is determined by the GasMeter. // determined by the GasMeter. We need access to the context to get the gas
// We need access to the context to get the gas meter so // meter so we initialize upfront.
// we initialize upfront
var gasWanted int64 var gasWanted int64
ctx := app.getContextForAnte(mode, txBytes) ctx := app.getContextForAnte(mode, txBytes)
// Handle any panics.
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
switch rType := r.(type) { switch rType := r.(type) {
@ -578,11 +601,11 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (result sdk
result = sdk.ErrInternal(log).Result() result = sdk.ErrInternal(log).Result()
} }
} }
result.GasWanted = gasWanted result.GasWanted = gasWanted
result.GasUsed = ctx.GasMeter().GasConsumed() result.GasUsed = ctx.GasMeter().GasConsumed()
}() }()
// Get the Msg.
var msgs = tx.GetMsgs() var msgs = tx.GetMsgs()
err := validateBasicTxMsgs(msgs) err := validateBasicTxMsgs(msgs)
@ -590,7 +613,7 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (result sdk
return err.Result() return err.Result()
} }
// Run the ante handler. // run the ante handler
if app.anteHandler != nil { if app.anteHandler != nil {
newCtx, anteResult, abort := app.anteHandler(ctx, tx) newCtx, anteResult, abort := app.anteHandler(ctx, tx)
if abort { if abort {
@ -599,17 +622,24 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (result sdk
if !newCtx.IsZero() { if !newCtx.IsZero() {
ctx = newCtx ctx = newCtx
} }
gasWanted = result.GasWanted gasWanted = result.GasWanted
} }
// CacheWrap the state in case it fails. // Keep the state in a transient CacheWrap in case processing the messages
// fails.
msCache := getState(app, mode).CacheMultiStore() msCache := getState(app, mode).CacheMultiStore()
ctx = ctx.WithMultiStore(msCache) if msCache.TracingEnabled() {
msCache = msCache.WithTracingContext(sdk.TraceContext(
map[string]interface{}{"txHash": cmn.HexBytes(tmhash.Sum(txBytes)).String()},
)).(sdk.CacheMultiStore)
}
ctx = ctx.WithMultiStore(msCache)
result = app.runMsgs(ctx, msgs) result = app.runMsgs(ctx, msgs)
result.GasWanted = gasWanted result.GasWanted = gasWanted
// Only update state if all messages pass and we're not in a simulation. // only update state if all messages pass and we're not in a simulation
if result.IsOK() && mode != runTxModeSimulate { if result.IsOK() && mode != runTxModeSimulate {
msCache.Write() msCache.Write()
} }
@ -617,11 +647,16 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (result sdk
return return
} }
// Implements ABCI // EndBlock implements the ABCI application interface.
func (app *BaseApp) EndBlock(req abci.RequestEndBlock) (res abci.ResponseEndBlock) { func (app *BaseApp) EndBlock(req abci.RequestEndBlock) (res abci.ResponseEndBlock) {
if app.deliverState.ms.TracingEnabled() {
app.deliverState.ms = app.deliverState.ms.ResetTraceContext().(sdk.CacheMultiStore)
}
if app.endBlocker != nil { if app.endBlocker != nil {
res = app.endBlocker(app.deliverState.ctx, req) res = app.endBlocker(app.deliverState.ctx, req)
} }
return return
} }

View File

@ -105,7 +105,7 @@ func InitializeTestLCD(t *testing.T, nValidators int, initAddrs []sdk.AccAddress
privVal := pvm.LoadOrGenFilePV(privValidatorFile) privVal := pvm.LoadOrGenFilePV(privValidatorFile)
privVal.Reset() privVal.Reset()
db := dbm.NewMemDB() db := dbm.NewMemDB()
app := gapp.NewGaiaApp(logger, db) app := gapp.NewGaiaApp(logger, db, nil)
cdc = gapp.MakeCodec() cdc = gapp.MakeCodec()
genesisFile := config.GenesisFile() genesisFile := config.GenesisFile()

View File

@ -2,6 +2,7 @@ package app
import ( import (
"encoding/json" "encoding/json"
"io"
"os" "os"
abci "github.com/tendermint/tendermint/abci/types" abci "github.com/tendermint/tendermint/abci/types"
@ -55,12 +56,19 @@ type GaiaApp struct {
govKeeper gov.Keeper govKeeper gov.Keeper
} }
func NewGaiaApp(logger log.Logger, db dbm.DB) *GaiaApp { // NewGaiaApp returns a reference to an initialized GaiaApp.
//
// TODO: Determine how to use a flexible and robust configuration paradigm that
// allows for sensible defaults while being highly configurable
// (e.g. functional options).
func NewGaiaApp(logger log.Logger, db dbm.DB, traceStore io.Writer) *GaiaApp {
cdc := MakeCodec() cdc := MakeCodec()
// create your application object bApp := bam.NewBaseApp(appName, cdc, logger, db)
bApp.SetCommitMultiStoreTracer(traceStore)
var app = &GaiaApp{ var app = &GaiaApp{
BaseApp: bam.NewBaseApp(appName, cdc, logger, db), BaseApp: bApp,
cdc: cdc, cdc: cdc,
keyMain: sdk.NewKVStoreKey("main"), keyMain: sdk.NewKVStoreKey("main"),
keyAccount: sdk.NewKVStoreKey("acc"), keyAccount: sdk.NewKVStoreKey("acc"),

View File

@ -2,6 +2,7 @@ package main
import ( import (
"encoding/json" "encoding/json"
"io"
"github.com/spf13/cobra" "github.com/spf13/cobra"
@ -38,11 +39,13 @@ func main() {
} }
} }
func newApp(logger log.Logger, db dbm.DB) abci.Application { func newApp(logger log.Logger, db dbm.DB, traceStore io.Writer) abci.Application {
return app.NewGaiaApp(logger, db) return app.NewGaiaApp(logger, db, traceStore)
} }
func exportAppStateAndTMValidators(logger log.Logger, db dbm.DB) (json.RawMessage, []tmtypes.GenesisValidator, error) { func exportAppStateAndTMValidators(
gapp := app.NewGaiaApp(logger, db) logger log.Logger, db dbm.DB, traceStore io.Writer,
return gapp.ExportAppStateAndValidators() ) (json.RawMessage, []tmtypes.GenesisValidator, error) {
gApp := app.NewGaiaApp(logger, db, traceStore)
return gApp.ExportAppStateAndValidators()
} }

View File

@ -143,9 +143,12 @@ type GaiaApp struct {
func NewGaiaApp(logger log.Logger, db dbm.DB) *GaiaApp { func NewGaiaApp(logger log.Logger, db dbm.DB) *GaiaApp {
cdc := MakeCodec() cdc := MakeCodec()
bApp := bam.NewBaseApp(appName, cdc, logger, db)
bApp.SetCommitMultiStoreTracer(os.Stdout)
// create your application object // create your application object
var app = &GaiaApp{ var app = &GaiaApp{
BaseApp: bam.NewBaseApp(appName, cdc, logger, db), BaseApp: bApp,
cdc: cdc, cdc: cdc,
keyMain: sdk.NewKVStoreKey("main"), keyMain: sdk.NewKVStoreKey("main"),
keyAccount: sdk.NewKVStoreKey("acc"), keyAccount: sdk.NewKVStoreKey("acc"),

View File

@ -240,6 +240,31 @@ func (kb dbKeybase) Sign(name, passphrase string, msg []byte) (sig tcrypto.Signa
return sig, pub, nil return sig, pub, nil
} }
func (kb dbKeybase) ExportPrivateKeyObject(name string, passphrase string) (tcrypto.PrivKey, error) {
info, err := kb.Get(name)
if err != nil {
return nil, err
}
var priv tcrypto.PrivKey
switch info.(type) {
case localInfo:
linfo := info.(localInfo)
if linfo.PrivKeyArmor == "" {
err = fmt.Errorf("private key not available")
return nil, err
}
priv, err = unarmorDecryptPrivKey(linfo.PrivKeyArmor, passphrase)
if err != nil {
return nil, err
}
case ledgerInfo:
return nil, errors.New("Only works on local private keys")
case offlineInfo:
return nil, errors.New("Only works on local private keys")
}
return priv, nil
}
func (kb dbKeybase) Export(name string) (armor string, err error) { func (kb dbKeybase) Export(name string) (armor string, err error) {
bz := kb.db.Get(infoKey(name)) bz := kb.db.Get(infoKey(name))
if bz == nil { if bz == nil {

View File

@ -39,6 +39,9 @@ type Keybase interface {
ImportPubKey(name string, armor string) (err error) ImportPubKey(name string, armor string) (err error)
Export(name string) (armor string, err error) Export(name string) (armor string, err error)
ExportPubKey(name string) (armor string, err error) ExportPubKey(name string) (armor string, err error)
// *only* works on locally-stored keys. Temporary method until we redo the exporting API
ExportPrivateKeyObject(name string, passphrase string) (crypto.PrivKey, error)
} }
// Info is the publicly exposed information about a keypair // Info is the publicly exposed information about a keypair

View File

@ -1,10 +1,45 @@
# Running a Node # Running a Node
TODO: document `gaiad` > TODO: Improve documentation of `gaiad`
## Basics
To start a node:
```shell
$ gaiad start <flags>
```
Options for running the `gaiad` binary are effectively the same as for `tendermint`. Options for running the `gaiad` binary are effectively the same as for `tendermint`.
See `gaiad --help` and the See `gaiad --help` and the
[guide to using Tendermint](https://github.com/tendermint/tendermint/blob/master/docs/using-tendermint.md) [guide to using Tendermint](https://github.com/tendermint/tendermint/blob/master/docs/using-tendermint.md)
for more details. for more details.
## Debugging
Optionally, you can run `gaiad` with `--trace-store` to trace all store operations
to a specified file.
```shell
$ gaiad start <flags> --trace-store=/path/to/trace.out
```
Key/value pairs will be base64 encoded. Additionally, the block number and any
correlated transaction hash will be included as metadata.
e.g.
```json
...
{"operation":"write","key":"ATW6Bu997eeuUeRBwv1EPGvXRfPR","value":"BggEEBYgFg==","metadata":{"blockHeight":12,"txHash":"5AAC197EC45E6C5DE0798C4A4E2F54BBB695CA9E"}}
{"operation":"write","key":"AjW6Bu997eeuUeRBwv1EPGvXRfPRCgAAAAAAAAA=","value":"AQE=","metadata":{"blockHeight":12,"txHash":"5AAC197EC45E6C5DE0798C4A4E2F54BBB695CA9E"}}
{"operation":"read","key":"ATW6Bu997eeuUeRBwv1EPGvXRfPR","value":"BggEEBYgFg==","metadata":{"blockHeight":13}}
{"operation":"read","key":"AjW6Bu997eeuUeRBwv1EPGvXRfPRCwAAAAAAAAA=","value":"","metadata":{"blockHeight":13}}
...
```
You can then query for the various traced operations using a tool like [jq](https://github.com/stedolan/jq).
```shell
$ jq -s '.[] | select((.key=="ATW6Bu997eeuUeRBwv1EPGvXRfPR") and .metadata.blockHeight==14)' /path/to/trace.out
```

View File

@ -2,6 +2,7 @@ package main
import ( import (
"encoding/json" "encoding/json"
"io"
"os" "os"
"github.com/cosmos/cosmos-sdk/examples/basecoin/app" "github.com/cosmos/cosmos-sdk/examples/basecoin/app"
@ -39,11 +40,11 @@ func main() {
} }
} }
func newApp(logger log.Logger, db dbm.DB) abci.Application { func newApp(logger log.Logger, db dbm.DB, storeTracer io.Writer) abci.Application {
return app.NewBasecoinApp(logger, db) return app.NewBasecoinApp(logger, db)
} }
func exportAppStateAndTMValidators(logger log.Logger, db dbm.DB) (json.RawMessage, []tmtypes.GenesisValidator, error) { func exportAppStateAndTMValidators(logger log.Logger, db dbm.DB, storeTracer io.Writer) (json.RawMessage, []tmtypes.GenesisValidator, error) {
bapp := app.NewBasecoinApp(logger, db) bapp := app.NewBasecoinApp(logger, db)
return bapp.ExportAppStateAndValidators() return bapp.ExportAppStateAndValidators()
} }

View File

@ -2,6 +2,7 @@ package main
import ( import (
"encoding/json" "encoding/json"
"io"
"os" "os"
"github.com/spf13/cobra" "github.com/spf13/cobra"
@ -50,11 +51,11 @@ func CoolAppGenState(cdc *wire.Codec, appGenTxs []json.RawMessage) (appState jso
return return
} }
func newApp(logger log.Logger, db dbm.DB) abci.Application { func newApp(logger log.Logger, db dbm.DB, _ io.Writer) abci.Application {
return app.NewDemocoinApp(logger, db) return app.NewDemocoinApp(logger, db)
} }
func exportAppStateAndTMValidators(logger log.Logger, db dbm.DB) (json.RawMessage, []tmtypes.GenesisValidator, error) { func exportAppStateAndTMValidators(logger log.Logger, db dbm.DB, _ io.Writer) (json.RawMessage, []tmtypes.GenesisValidator, error) {
dapp := app.NewDemocoinApp(logger, db) dapp := app.NewDemocoinApp(logger, db)
return dapp.ExportAppStateAndValidators() return dapp.ExportAppStateAndValidators()
} }

View File

@ -2,6 +2,8 @@ package server
import ( import (
"encoding/json" "encoding/json"
"io"
"os"
"path/filepath" "path/filepath"
abci "github.com/tendermint/tendermint/abci/types" abci "github.com/tendermint/tendermint/abci/types"
@ -10,34 +12,73 @@ import (
tmtypes "github.com/tendermint/tendermint/types" tmtypes "github.com/tendermint/tendermint/types"
) )
// AppCreator lets us lazily initialize app, using home dir type (
// and other flags (?) to start // AppCreator reflects a function that allows us to lazily initialize an
type AppCreator func(string, log.Logger) (abci.Application, error) // application using various configurations.
AppCreator func(home string, logger log.Logger, traceStore string) (abci.Application, error)
// AppExporter dumps all app state to JSON-serializable structure and returns the current validator set // AppExporter reflects a function that dumps all app state to
type AppExporter func(home string, log log.Logger) (json.RawMessage, []tmtypes.GenesisValidator, error) // JSON-serializable structure and returns the current validator set.
AppExporter func(home string, logger log.Logger, traceStore string) (json.RawMessage, []tmtypes.GenesisValidator, error)
// ConstructAppCreator returns an application generation function // AppCreatorInit reflects a function that performs initialization of an
func ConstructAppCreator(appFn func(log.Logger, dbm.DB) abci.Application, name string) AppCreator { // AppCreator.
return func(rootDir string, logger log.Logger) (abci.Application, error) { AppCreatorInit func(log.Logger, dbm.DB, io.Writer) abci.Application
// AppExporterInit reflects a function that performs initialization of an
// AppExporter.
AppExporterInit func(log.Logger, dbm.DB, io.Writer) (json.RawMessage, []tmtypes.GenesisValidator, error)
)
// ConstructAppCreator returns an application generation function.
func ConstructAppCreator(appFn AppCreatorInit, name string) AppCreator {
return func(rootDir string, logger log.Logger, traceStore string) (abci.Application, error) {
dataDir := filepath.Join(rootDir, "data") dataDir := filepath.Join(rootDir, "data")
db, err := dbm.NewGoLevelDB(name, dataDir) db, err := dbm.NewGoLevelDB(name, dataDir)
if err != nil { if err != nil {
return nil, err return nil, err
} }
app := appFn(logger, db)
var traceStoreWriter io.Writer
if traceStore != "" {
traceStoreWriter, err = os.OpenFile(
traceStore,
os.O_WRONLY|os.O_APPEND|os.O_CREATE,
0666,
)
if err != nil {
return nil, err
}
}
app := appFn(logger, db, traceStoreWriter)
return app, nil return app, nil
} }
} }
// ConstructAppExporter returns an application export function // ConstructAppExporter returns an application export function.
func ConstructAppExporter(appFn func(log.Logger, dbm.DB) (json.RawMessage, []tmtypes.GenesisValidator, error), name string) AppExporter { func ConstructAppExporter(appFn AppExporterInit, name string) AppExporter {
return func(rootDir string, logger log.Logger) (json.RawMessage, []tmtypes.GenesisValidator, error) { return func(rootDir string, logger log.Logger, traceStore string) (json.RawMessage, []tmtypes.GenesisValidator, error) {
dataDir := filepath.Join(rootDir, "data") dataDir := filepath.Join(rootDir, "data")
db, err := dbm.NewGoLevelDB(name, dataDir) db, err := dbm.NewGoLevelDB(name, dataDir)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
return appFn(logger, db)
var traceStoreWriter io.Writer
if traceStore != "" {
traceStoreWriter, err = os.OpenFile(
traceStore,
os.O_WRONLY|os.O_APPEND|os.O_CREATE,
0666,
)
if err != nil {
return nil, nil, err
}
}
return appFn(logger, db, traceStoreWriter)
} }
} }

View File

@ -11,27 +11,33 @@ import (
tmtypes "github.com/tendermint/tendermint/types" tmtypes "github.com/tendermint/tendermint/types"
) )
// ExportCmd dumps app state to JSON // ExportCmd dumps app state to JSON.
func ExportCmd(ctx *Context, cdc *wire.Codec, appExporter AppExporter) *cobra.Command { func ExportCmd(ctx *Context, cdc *wire.Codec, appExporter AppExporter) *cobra.Command {
return &cobra.Command{ return &cobra.Command{
Use: "export", Use: "export",
Short: "Export state to JSON", Short: "Export state to JSON",
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
home := viper.GetString("home") home := viper.GetString("home")
appState, validators, err := appExporter(home, ctx.Logger) traceStore := viper.GetString(flagTraceStore)
appState, validators, err := appExporter(home, ctx.Logger, traceStore)
if err != nil { if err != nil {
return errors.Errorf("error exporting state: %v\n", err) return errors.Errorf("error exporting state: %v\n", err)
} }
doc, err := tmtypes.GenesisDocFromFile(ctx.Config.GenesisFile()) doc, err := tmtypes.GenesisDocFromFile(ctx.Config.GenesisFile())
if err != nil { if err != nil {
return err return err
} }
doc.AppStateJSON = appState doc.AppStateJSON = appState
doc.Validators = validators doc.Validators = validators
encoded, err := wire.MarshalJSONIndent(cdc, doc) encoded, err := wire.MarshalJSONIndent(cdc, doc)
if err != nil { if err != nil {
return err return err
} }
fmt.Println(string(encoded)) fmt.Println(string(encoded))
return nil return nil
}, },

View File

@ -1,6 +1,8 @@
package mock package mock
import ( import (
"io"
dbm "github.com/tendermint/tendermint/libs/db" dbm "github.com/tendermint/tendermint/libs/db"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
@ -18,6 +20,26 @@ func (ms multiStore) CacheWrap() sdk.CacheWrap {
panic("not implemented") panic("not implemented")
} }
func (ms multiStore) CacheWrapWithTrace(_ io.Writer, _ sdk.TraceContext) sdk.CacheWrap {
panic("not implemented")
}
func (ms multiStore) ResetTraceContext() sdk.MultiStore {
panic("not implemented")
}
func (ms multiStore) TracingEnabled() bool {
panic("not implemented")
}
func (ms multiStore) WithTracingContext(tc sdk.TraceContext) sdk.MultiStore {
panic("not implemented")
}
func (ms multiStore) WithTracer(w io.Writer) sdk.MultiStore {
panic("not implemented")
}
func (ms multiStore) Commit() sdk.CommitID { func (ms multiStore) Commit() sdk.CommitID {
panic("not implemented") panic("not implemented")
} }
@ -70,6 +92,10 @@ func (kv kvStore) CacheWrap() sdk.CacheWrap {
panic("not implemented") panic("not implemented")
} }
func (kv kvStore) CacheWrapWithTrace(w io.Writer, tc sdk.TraceContext) sdk.CacheWrap {
panic("not implemented")
}
func (kv kvStore) GetStoreType() sdk.StoreType { func (kv kvStore) GetStoreType() sdk.StoreType {
panic("not implemented") panic("not implemented")
} }

View File

@ -17,10 +17,11 @@ import (
const ( const (
flagWithTendermint = "with-tendermint" flagWithTendermint = "with-tendermint"
flagAddress = "address" flagAddress = "address"
flagTraceStore = "trace-store"
) )
// StartCmd runs the service passed in, either // StartCmd runs the service passed in, either stand-alone or in-process with
// stand-alone, or in-process with tendermint // Tendermint.
func StartCmd(ctx *Context, appCreator AppCreator) *cobra.Command { func StartCmd(ctx *Context, appCreator AppCreator) *cobra.Command {
cmd := &cobra.Command{ cmd := &cobra.Command{
Use: "start", Use: "start",
@ -30,26 +31,30 @@ func StartCmd(ctx *Context, appCreator AppCreator) *cobra.Command {
ctx.Logger.Info("Starting ABCI without Tendermint") ctx.Logger.Info("Starting ABCI without Tendermint")
return startStandAlone(ctx, appCreator) return startStandAlone(ctx, appCreator)
} }
ctx.Logger.Info("Starting ABCI with Tendermint") ctx.Logger.Info("Starting ABCI with Tendermint")
_, err := startInProcess(ctx, appCreator) _, err := startInProcess(ctx, appCreator)
return err return err
}, },
} }
// basic flags for abci app // core flags for the ABCI application
cmd.Flags().Bool(flagWithTendermint, true, "run abci app embedded in-process with tendermint") cmd.Flags().Bool(flagWithTendermint, true, "Run abci app embedded in-process with tendermint")
cmd.Flags().String(flagAddress, "tcp://0.0.0.0:26658", "Listen address") cmd.Flags().String(flagAddress, "tcp://0.0.0.0:26658", "Listen address")
cmd.Flags().String(flagTraceStore, "", "Enable KVStore tracing to an output file")
// AddNodeFlags adds support for all tendermint-specific command line options // add support for all Tendermint-specific command line options
tcmd.AddNodeFlags(cmd) tcmd.AddNodeFlags(cmd)
return cmd return cmd
} }
func startStandAlone(ctx *Context, appCreator AppCreator) error { func startStandAlone(ctx *Context, appCreator AppCreator) error {
// Generate the app in the proper dir
addr := viper.GetString(flagAddress) addr := viper.GetString(flagAddress)
home := viper.GetString("home") home := viper.GetString("home")
app, err := appCreator(home, ctx.Logger) traceStore := viper.GetString(flagTraceStore)
app, err := appCreator(home, ctx.Logger, traceStore)
if err != nil { if err != nil {
return err return err
} }
@ -58,15 +63,17 @@ func startStandAlone(ctx *Context, appCreator AppCreator) error {
if err != nil { if err != nil {
return errors.Errorf("error creating listener: %v\n", err) return errors.Errorf("error creating listener: %v\n", err)
} }
svr.SetLogger(ctx.Logger.With("module", "abci-server")) svr.SetLogger(ctx.Logger.With("module", "abci-server"))
err = svr.Start() err = svr.Start()
if err != nil { if err != nil {
cmn.Exit(err.Error()) cmn.Exit(err.Error())
} }
// Wait forever // wait forever
cmn.TrapSignal(func() { cmn.TrapSignal(func() {
// Cleanup // cleanup
err = svr.Stop() err = svr.Stop()
if err != nil { if err != nil {
cmn.Exit(err.Error()) cmn.Exit(err.Error())
@ -78,29 +85,33 @@ func startStandAlone(ctx *Context, appCreator AppCreator) error {
func startInProcess(ctx *Context, appCreator AppCreator) (*node.Node, error) { func startInProcess(ctx *Context, appCreator AppCreator) (*node.Node, error) {
cfg := ctx.Config cfg := ctx.Config
home := cfg.RootDir home := cfg.RootDir
app, err := appCreator(home, ctx.Logger) traceStore := viper.GetString(flagTraceStore)
app, err := appCreator(home, ctx.Logger, traceStore)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Create & start tendermint node // create & start tendermint node
n, err := node.NewNode(cfg, tmNode, err := node.NewNode(
cfg,
pvm.LoadOrGenFilePV(cfg.PrivValidatorFile()), pvm.LoadOrGenFilePV(cfg.PrivValidatorFile()),
proxy.NewLocalClientCreator(app), proxy.NewLocalClientCreator(app),
node.DefaultGenesisDocProviderFunc(cfg), node.DefaultGenesisDocProviderFunc(cfg),
node.DefaultDBProvider, node.DefaultDBProvider,
node.DefaultMetricsProvider, node.DefaultMetricsProvider,
ctx.Logger.With("module", "node")) ctx.Logger.With("module", "node"),
)
if err != nil { if err != nil {
return nil, err return nil, err
} }
err = n.Start() err = tmNode.Start()
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Trap signal, run forever. // trap signal (run forever)
n.RunForever() tmNode.RunForever()
return n, nil return tmNode, nil
} }

View File

@ -2,6 +2,7 @@ package store
import ( import (
"bytes" "bytes"
"io"
"sort" "sort"
"sync" "sync"
@ -27,11 +28,10 @@ var _ CacheKVStore = (*cacheKVStore)(nil)
// nolint // nolint
func NewCacheKVStore(parent KVStore) *cacheKVStore { func NewCacheKVStore(parent KVStore) *cacheKVStore {
ci := &cacheKVStore{ return &cacheKVStore{
cache: make(map[string]cValue), cache: make(map[string]cValue),
parent: parent, parent: parent,
} }
return ci
} }
// Implements Store. // Implements Store.
@ -98,6 +98,7 @@ func (ci *cacheKVStore) Write() {
keys = append(keys, key) keys = append(keys, key)
} }
} }
sort.Strings(keys) sort.Strings(keys)
// TODO: Consider allowing usage of Batch, which would allow the write to // TODO: Consider allowing usage of Batch, which would allow the write to
@ -125,6 +126,11 @@ func (ci *cacheKVStore) CacheWrap() CacheWrap {
return NewCacheKVStore(ci) return NewCacheKVStore(ci)
} }
// CacheWrapWithTrace implements the CacheWrapper interface.
func (ci *cacheKVStore) CacheWrapWithTrace(w io.Writer, tc TraceContext) CacheWrap {
return NewCacheKVStore(NewTraceKVStore(ci, w, tc))
}
//---------------------------------------- //----------------------------------------
// Iteration // Iteration
@ -140,32 +146,39 @@ func (ci *cacheKVStore) ReverseIterator(start, end []byte) Iterator {
func (ci *cacheKVStore) iterator(start, end []byte, ascending bool) Iterator { func (ci *cacheKVStore) iterator(start, end []byte, ascending bool) Iterator {
var parent, cache Iterator var parent, cache Iterator
if ascending { if ascending {
parent = ci.parent.Iterator(start, end) parent = ci.parent.Iterator(start, end)
} else { } else {
parent = ci.parent.ReverseIterator(start, end) parent = ci.parent.ReverseIterator(start, end)
} }
items := ci.dirtyItems(ascending) items := ci.dirtyItems(ascending)
cache = newMemIterator(start, end, items) cache = newMemIterator(start, end, items)
return newCacheMergeIterator(parent, cache, ascending) return newCacheMergeIterator(parent, cache, ascending)
} }
// Constructs a slice of dirty items, to use w/ memIterator. // Constructs a slice of dirty items, to use w/ memIterator.
func (ci *cacheKVStore) dirtyItems(ascending bool) []cmn.KVPair { func (ci *cacheKVStore) dirtyItems(ascending bool) []cmn.KVPair {
items := make([]cmn.KVPair, 0, len(ci.cache)) items := make([]cmn.KVPair, 0, len(ci.cache))
for key, cacheValue := range ci.cache { for key, cacheValue := range ci.cache {
if !cacheValue.dirty { if !cacheValue.dirty {
continue continue
} }
items = append(items,
cmn.KVPair{[]byte(key), cacheValue.value}) items = append(items, cmn.KVPair{Key: []byte(key), Value: cacheValue.value})
} }
sort.Slice(items, func(i, j int) bool { sort.Slice(items, func(i, j int) bool {
if ascending { if ascending {
return bytes.Compare(items[i].Key, items[j].Key) < 0 return bytes.Compare(items[i].Key, items[j].Key) < 0
} }
return bytes.Compare(items[i].Key, items[j].Key) > 0 return bytes.Compare(items[i].Key, items[j].Key) > 0
}) })
return items return items
} }
@ -180,10 +193,9 @@ func (ci *cacheKVStore) assertValidKey(key []byte) {
// Only entrypoint to mutate ci.cache. // Only entrypoint to mutate ci.cache.
func (ci *cacheKVStore) setCacheValue(key, value []byte, deleted bool, dirty bool) { func (ci *cacheKVStore) setCacheValue(key, value []byte, deleted bool, dirty bool) {
cacheValue := cValue{ ci.cache[string(key)] = cValue{
value: value, value: value,
deleted: deleted, deleted: deleted,
dirty: dirty, dirty: dirty,
} }
ci.cache[string(key)] = cacheValue
} }

View File

@ -1,6 +1,8 @@
package store package store
import ( import (
"io"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
) )
@ -13,6 +15,9 @@ type cacheMultiStore struct {
db CacheKVStore db CacheKVStore
stores map[StoreKey]CacheWrap stores map[StoreKey]CacheWrap
keysByName map[string]StoreKey keysByName map[string]StoreKey
traceWriter io.Writer
traceContext TraceContext
} }
var _ CacheMultiStore = cacheMultiStore{} var _ CacheMultiStore = cacheMultiStore{}
@ -22,10 +27,18 @@ func newCacheMultiStoreFromRMS(rms *rootMultiStore) cacheMultiStore {
db: NewCacheKVStore(dbStoreAdapter{rms.db}), db: NewCacheKVStore(dbStoreAdapter{rms.db}),
stores: make(map[StoreKey]CacheWrap, len(rms.stores)), stores: make(map[StoreKey]CacheWrap, len(rms.stores)),
keysByName: rms.keysByName, keysByName: rms.keysByName,
traceWriter: rms.traceWriter,
traceContext: rms.traceContext,
} }
for key, store := range rms.stores { for key, store := range rms.stores {
if cms.TracingEnabled() {
cms.stores[key] = store.CacheWrapWithTrace(cms.traceWriter, cms.traceContext)
} else {
cms.stores[key] = store.CacheWrap() cms.stores[key] = store.CacheWrap()
} }
}
return cms return cms
} }
@ -33,13 +46,55 @@ func newCacheMultiStoreFromCMS(cms cacheMultiStore) cacheMultiStore {
cms2 := cacheMultiStore{ cms2 := cacheMultiStore{
db: NewCacheKVStore(cms.db), db: NewCacheKVStore(cms.db),
stores: make(map[StoreKey]CacheWrap, len(cms.stores)), stores: make(map[StoreKey]CacheWrap, len(cms.stores)),
traceWriter: cms.traceWriter,
traceContext: cms.traceContext,
} }
for key, store := range cms.stores { for key, store := range cms.stores {
if cms2.TracingEnabled() {
cms2.stores[key] = store.CacheWrapWithTrace(cms2.traceWriter, cms2.traceContext)
} else {
cms2.stores[key] = store.CacheWrap() cms2.stores[key] = store.CacheWrap()
} }
}
return cms2 return cms2
} }
// WithTracer sets the tracer for the MultiStore that the underlying
// stores will utilize to trace operations. A MultiStore is returned.
func (cms cacheMultiStore) WithTracer(w io.Writer) MultiStore {
cms.traceWriter = w
return cms
}
// WithTracingContext updates the tracing context for the MultiStore by merging
// the given context with the existing context by key. Any existing keys will
// be overwritten. It is implied that the caller should update the context when
// necessary between tracing operations. It returns a modified MultiStore.
func (cms cacheMultiStore) WithTracingContext(tc TraceContext) MultiStore {
if cms.traceContext != nil {
for k, v := range tc {
cms.traceContext[k] = v
}
} else {
cms.traceContext = tc
}
return cms
}
// TracingEnabled returns if tracing is enabled for the MultiStore.
func (cms cacheMultiStore) TracingEnabled() bool {
return cms.traceWriter != nil
}
// ResetTraceContext resets the current tracing context.
func (cms cacheMultiStore) ResetTraceContext() MultiStore {
cms.traceContext = nil
return cms
}
// Implements Store. // Implements Store.
func (cms cacheMultiStore) GetStoreType() StoreType { func (cms cacheMultiStore) GetStoreType() StoreType {
return sdk.StoreTypeMulti return sdk.StoreTypeMulti
@ -58,6 +113,11 @@ func (cms cacheMultiStore) CacheWrap() CacheWrap {
return cms.CacheMultiStore().(CacheWrap) return cms.CacheMultiStore().(CacheWrap)
} }
// CacheWrapWithTrace implements the CacheWrapper interface.
func (cms cacheMultiStore) CacheWrapWithTrace(_ io.Writer, _ TraceContext) CacheWrap {
return cms.CacheWrap()
}
// Implements MultiStore. // Implements MultiStore.
func (cms cacheMultiStore) CacheMultiStore() CacheMultiStore { func (cms cacheMultiStore) CacheMultiStore() CacheMultiStore {
return newCacheMultiStoreFromCMS(cms) return newCacheMultiStoreFromCMS(cms)

View File

@ -1,6 +1,8 @@
package store package store
import ( import (
"io"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
dbm "github.com/tendermint/tendermint/libs/db" dbm "github.com/tendermint/tendermint/libs/db"
) )
@ -19,6 +21,11 @@ func (dsa dbStoreAdapter) CacheWrap() CacheWrap {
return NewCacheKVStore(dsa) return NewCacheKVStore(dsa)
} }
// CacheWrapWithTrace implements the KVStore interface.
func (dsa dbStoreAdapter) CacheWrapWithTrace(w io.Writer, tc TraceContext) CacheWrap {
return NewCacheKVStore(NewTraceKVStore(dsa, w, tc))
}
// Implements KVStore // Implements KVStore
func (dsa dbStoreAdapter) Prefix(prefix []byte) KVStore { func (dsa dbStoreAdapter) Prefix(prefix []byte) KVStore {
return prefixStore{dsa, prefix} return prefixStore{dsa, prefix}

View File

@ -1,6 +1,8 @@
package store package store
import ( import (
"io"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
) )
@ -82,7 +84,12 @@ func (gi *gasKVStore) ReverseIterator(start, end []byte) sdk.Iterator {
// Implements KVStore. // Implements KVStore.
func (gi *gasKVStore) CacheWrap() sdk.CacheWrap { func (gi *gasKVStore) CacheWrap() sdk.CacheWrap {
panic("you cannot CacheWrap a GasKVStore") panic("cannot CacheWrap a GasKVStore")
}
// CacheWrapWithTrace implements the KVStore interface.
func (gi *gasKVStore) CacheWrapWithTrace(_ io.Writer, _ TraceContext) CacheWrap {
panic("cannot CacheWrapWithTrace a GasKVStore")
} }
func (gi *gasKVStore) iterator(start, end []byte, ascending bool) sdk.Iterator { func (gi *gasKVStore) iterator(start, end []byte, ascending bool) sdk.Iterator {

View File

@ -2,6 +2,7 @@ package store
import ( import (
"fmt" "fmt"
"io"
"sync" "sync"
"github.com/tendermint/go-amino" "github.com/tendermint/go-amino"
@ -117,6 +118,11 @@ func (st *iavlStore) CacheWrap() CacheWrap {
return NewCacheKVStore(st) return NewCacheKVStore(st)
} }
// CacheWrapWithTrace implements the Store interface.
func (st *iavlStore) CacheWrapWithTrace(w io.Writer, tc TraceContext) CacheWrap {
return NewCacheKVStore(NewTraceKVStore(st, w, tc))
}
// Implements KVStore. // Implements KVStore.
func (st *iavlStore) Set(key, value []byte) { func (st *iavlStore) Set(key, value []byte) {
st.tree.Set(key, value) st.tree.Set(key, value)

View File

@ -1,6 +1,8 @@
package store package store
import ( import (
"io"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
) )
@ -19,6 +21,11 @@ func (s prefixStore) CacheWrap() CacheWrap {
return NewCacheKVStore(s) return NewCacheKVStore(s)
} }
// CacheWrapWithTrace implements the KVStore interface.
func (s prefixStore) CacheWrapWithTrace(w io.Writer, tc TraceContext) CacheWrap {
return NewCacheKVStore(NewTraceKVStore(s, w, tc))
}
// Implements KVStore // Implements KVStore
func (s prefixStore) Get(key []byte) []byte { func (s prefixStore) Get(key []byte) []byte {
return s.store.Get(append(s.prefix, key...)) return s.store.Get(append(s.prefix, key...))

View File

@ -2,6 +2,7 @@ package store
import ( import (
"fmt" "fmt"
"io"
"strings" "strings"
"golang.org/x/crypto/ripemd160" "golang.org/x/crypto/ripemd160"
@ -18,16 +19,18 @@ const (
commitInfoKeyFmt = "s/%d" // s/<version> commitInfoKeyFmt = "s/%d" // s/<version>
) )
// rootMultiStore is composed of many CommitStores. // rootMultiStore is composed of many CommitStores. Name contrasts with
// Name contrasts with cacheMultiStore which is for cache-wrapping // cacheMultiStore which is for cache-wrapping other MultiStores. It implements
// other MultiStores. // the CommitMultiStore interface.
// Implements MultiStore.
type rootMultiStore struct { type rootMultiStore struct {
db dbm.DB db dbm.DB
lastCommitID CommitID lastCommitID CommitID
storesParams map[StoreKey]storeParams storesParams map[StoreKey]storeParams
stores map[StoreKey]CommitStore stores map[StoreKey]CommitStore
keysByName map[string]StoreKey keysByName map[string]StoreKey
traceWriter io.Writer
traceContext TraceContext
} }
var _ CommitMultiStore = (*rootMultiStore)(nil) var _ CommitMultiStore = (*rootMultiStore)(nil)
@ -130,6 +133,40 @@ func (rs *rootMultiStore) LoadVersion(ver int64) error {
return nil return nil
} }
// WithTracer sets the tracer for the MultiStore that the underlying
// stores will utilize to trace operations. A MultiStore is returned.
func (rs *rootMultiStore) WithTracer(w io.Writer) MultiStore {
rs.traceWriter = w
return rs
}
// WithTracingContext updates the tracing context for the MultiStore by merging
// the given context with the existing context by key. Any existing keys will
// be overwritten. It is implied that the caller should update the context when
// necessary between tracing operations. It returns a modified MultiStore.
func (rs *rootMultiStore) WithTracingContext(tc TraceContext) MultiStore {
if rs.traceContext != nil {
for k, v := range tc {
rs.traceContext[k] = v
}
} else {
rs.traceContext = tc
}
return rs
}
// TracingEnabled returns if tracing is enabled for the MultiStore.
func (rs *rootMultiStore) TracingEnabled() bool {
return rs.traceWriter != nil
}
// ResetTraceContext resets the current tracing context.
func (rs *rootMultiStore) ResetTraceContext() MultiStore {
rs.traceContext = nil
return rs
}
//---------------------------------------- //----------------------------------------
// +CommitStore // +CommitStore
@ -165,6 +202,11 @@ func (rs *rootMultiStore) CacheWrap() CacheWrap {
return rs.CacheMultiStore().(CacheWrap) return rs.CacheMultiStore().(CacheWrap)
} }
// CacheWrapWithTrace implements the CacheWrapper interface.
func (rs *rootMultiStore) CacheWrapWithTrace(_ io.Writer, _ TraceContext) CacheWrap {
return rs.CacheWrap()
}
//---------------------------------------- //----------------------------------------
// +MultiStore // +MultiStore
@ -178,9 +220,17 @@ func (rs *rootMultiStore) GetStore(key StoreKey) Store {
return rs.stores[key] return rs.stores[key]
} }
// Implements MultiStore. // GetKVStore implements the MultiStore interface. If tracing is enabled on the
// rootMultiStore, a wrapped TraceKVStore will be returned with the given
// tracer, otherwise, the original KVStore will be returned.
func (rs *rootMultiStore) GetKVStore(key StoreKey) KVStore { func (rs *rootMultiStore) GetKVStore(key StoreKey) KVStore {
return rs.stores[key].(KVStore) store := rs.stores[key].(KVStore)
if rs.TracingEnabled() {
store = NewTraceKVStore(store, rs.traceWriter, rs.traceContext)
}
return store
} }
// Implements MultiStore. // Implements MultiStore.

198
store/tracekvstore.go Normal file
View File

@ -0,0 +1,198 @@
package store
import (
"encoding/base64"
"encoding/json"
"fmt"
"io"
sdk "github.com/cosmos/cosmos-sdk/types"
)
const (
writeOp operation = "write"
readOp operation = "read"
deleteOp operation = "delete"
iterKeyOp operation = "iterKey"
iterValueOp operation = "iterValue"
)
type (
// TraceKVStore implements the KVStore interface with tracing enabled.
// Operations are traced on each core KVStore call and written to the
// underlying io.writer.
//
// TODO: Should we use a buffered writer and implement Commit on
// TraceKVStore?
TraceKVStore struct {
parent sdk.KVStore
writer io.Writer
context TraceContext
}
// operation represents an IO operation
operation string
// traceOperation implements a traced KVStore operation
traceOperation struct {
Operation operation `json:"operation"`
Key string `json:"key"`
Value string `json:"value"`
Metadata map[string]interface{} `json:"metadata"`
}
)
// NewTraceKVStore returns a reference to a new traceKVStore given a parent
// KVStore implementation and a buffered writer.
func NewTraceKVStore(parent sdk.KVStore, writer io.Writer, tc TraceContext) *TraceKVStore {
return &TraceKVStore{parent: parent, writer: writer, context: tc}
}
// Get implements the KVStore interface. It traces a read operation and
// delegates a Get call to the parent KVStore.
func (tkv *TraceKVStore) Get(key []byte) []byte {
value := tkv.parent.Get(key)
writeOperation(tkv.writer, readOp, tkv.context, key, value)
return value
}
// Set implements the KVStore interface. It traces a write operation and
// delegates the Set call to the parent KVStore.
func (tkv *TraceKVStore) Set(key []byte, value []byte) {
writeOperation(tkv.writer, writeOp, tkv.context, key, value)
tkv.parent.Set(key, value)
}
// Delete implements the KVStore interface. It traces a write operation and
// delegates the Delete call to the parent KVStore.
func (tkv *TraceKVStore) Delete(key []byte) {
writeOperation(tkv.writer, deleteOp, tkv.context, key, nil)
tkv.parent.Delete(key)
}
// Has implements the KVStore interface. It delegates the Has call to the
// parent KVStore.
func (tkv *TraceKVStore) Has(key []byte) bool {
return tkv.parent.Has(key)
}
// Prefix implements the KVStore interface.
func (tkv *TraceKVStore) Prefix(prefix []byte) KVStore {
return prefixStore{tkv, prefix}
}
// Iterator implements the KVStore interface. It delegates the Iterator call
// the to the parent KVStore.
func (tkv *TraceKVStore) Iterator(start, end []byte) sdk.Iterator {
return tkv.iterator(start, end, true)
}
// ReverseIterator implements the KVStore interface. It delegates the
// ReverseIterator call the to the parent KVStore.
func (tkv *TraceKVStore) ReverseIterator(start, end []byte) sdk.Iterator {
return tkv.iterator(start, end, false)
}
// iterator facilitates iteration over a KVStore. It delegates the necessary
// calls to it's parent KVStore.
func (tkv *TraceKVStore) iterator(start, end []byte, ascending bool) sdk.Iterator {
var parent sdk.Iterator
if ascending {
parent = tkv.parent.Iterator(start, end)
} else {
parent = tkv.parent.ReverseIterator(start, end)
}
return newTraceIterator(tkv.writer, parent, tkv.context)
}
type traceIterator struct {
parent sdk.Iterator
writer io.Writer
context TraceContext
}
func newTraceIterator(w io.Writer, parent sdk.Iterator, tc TraceContext) sdk.Iterator {
return &traceIterator{writer: w, parent: parent, context: tc}
}
// Domain implements the Iterator interface.
func (ti *traceIterator) Domain() (start []byte, end []byte) {
return ti.parent.Domain()
}
// Valid implements the Iterator interface.
func (ti *traceIterator) Valid() bool {
return ti.parent.Valid()
}
// Next implements the Iterator interface.
func (ti *traceIterator) Next() {
ti.parent.Next()
}
// Key implements the Iterator interface.
func (ti *traceIterator) Key() []byte {
key := ti.parent.Key()
writeOperation(ti.writer, iterKeyOp, ti.context, key, nil)
return key
}
// Value implements the Iterator interface.
func (ti *traceIterator) Value() []byte {
value := ti.parent.Value()
writeOperation(ti.writer, iterValueOp, ti.context, nil, value)
return value
}
// Close implements the Iterator interface.
func (ti *traceIterator) Close() {
ti.parent.Close()
}
// GetStoreType implements the KVStore interface. It returns the underlying
// KVStore type.
func (tkv *TraceKVStore) GetStoreType() sdk.StoreType {
return tkv.parent.GetStoreType()
}
// CacheWrap implements the KVStore interface. It panics as a TraceKVStore
// cannot be cache wrapped.
func (tkv *TraceKVStore) CacheWrap() sdk.CacheWrap {
panic("cannot CacheWrap a TraceKVStore")
}
// CacheWrapWithTrace implements the KVStore interface. It panics as a
// TraceKVStore cannot be cache wrapped.
func (tkv *TraceKVStore) CacheWrapWithTrace(_ io.Writer, _ TraceContext) CacheWrap {
panic("cannot CacheWrapWithTrace a TraceKVStore")
}
// writeOperation writes a KVStore operation to the underlying io.Writer as
// JSON-encoded data where the key/value pair is base64 encoded.
func writeOperation(w io.Writer, op operation, tc TraceContext, key, value []byte) {
traceOp := traceOperation{
Operation: op,
Key: base64.StdEncoding.EncodeToString(key),
Value: base64.StdEncoding.EncodeToString(value),
}
if tc != nil {
traceOp.Metadata = tc
}
raw, err := json.Marshal(traceOp)
if err != nil {
panic(fmt.Sprintf("failed to serialize trace operation: %v", err))
}
if _, err := w.Write(raw); err != nil {
panic(fmt.Sprintf("failed to write trace operation: %v", err))
}
io.WriteString(w, "\n")
}

283
store/tracekvstore_test.go Normal file
View File

@ -0,0 +1,283 @@
package store
import (
"bytes"
"io"
"testing"
"github.com/stretchr/testify/require"
dbm "github.com/tendermint/tendermint/libs/db"
)
var kvPairs = []KVPair{
KVPair{Key: keyFmt(1), Value: valFmt(1)},
KVPair{Key: keyFmt(2), Value: valFmt(2)},
KVPair{Key: keyFmt(3), Value: valFmt(3)},
}
func newTraceKVStore(w io.Writer) *TraceKVStore {
store := newEmptyTraceKVStore(w)
for _, kvPair := range kvPairs {
store.Set(kvPair.Key, kvPair.Value)
}
return store
}
func newEmptyTraceKVStore(w io.Writer) *TraceKVStore {
memDB := dbStoreAdapter{dbm.NewMemDB()}
tc := TraceContext(map[string]interface{}{"blockHeight": 64})
return NewTraceKVStore(memDB, w, tc)
}
func TestTraceKVStoreGet(t *testing.T) {
testCases := []struct {
key []byte
expectedValue []byte
expectedOut string
}{
{
key: []byte{},
expectedValue: nil,
expectedOut: "{\"operation\":\"read\",\"key\":\"\",\"value\":\"\",\"metadata\":{\"blockHeight\":64}}\n",
},
{
key: kvPairs[0].Key,
expectedValue: kvPairs[0].Value,
expectedOut: "{\"operation\":\"read\",\"key\":\"a2V5MDAwMDAwMDE=\",\"value\":\"dmFsdWUwMDAwMDAwMQ==\",\"metadata\":{\"blockHeight\":64}}\n",
},
{
key: []byte("does-not-exist"),
expectedValue: nil,
expectedOut: "{\"operation\":\"read\",\"key\":\"ZG9lcy1ub3QtZXhpc3Q=\",\"value\":\"\",\"metadata\":{\"blockHeight\":64}}\n",
},
}
for _, tc := range testCases {
var buf bytes.Buffer
store := newTraceKVStore(&buf)
buf.Reset()
value := store.Get(tc.key)
require.Equal(t, tc.expectedValue, value)
require.Equal(t, tc.expectedOut, buf.String())
}
}
func TestTraceKVStoreSet(t *testing.T) {
testCases := []struct {
key []byte
value []byte
expectedOut string
}{
{
key: []byte{},
value: nil,
expectedOut: "{\"operation\":\"write\",\"key\":\"\",\"value\":\"\",\"metadata\":{\"blockHeight\":64}}\n",
},
{
key: kvPairs[0].Key,
value: kvPairs[0].Value,
expectedOut: "{\"operation\":\"write\",\"key\":\"a2V5MDAwMDAwMDE=\",\"value\":\"dmFsdWUwMDAwMDAwMQ==\",\"metadata\":{\"blockHeight\":64}}\n",
},
}
for _, tc := range testCases {
var buf bytes.Buffer
store := newEmptyTraceKVStore(&buf)
buf.Reset()
store.Set(tc.key, tc.value)
require.Equal(t, tc.expectedOut, buf.String())
}
}
func TestTraceKVStoreDelete(t *testing.T) {
testCases := []struct {
key []byte
expectedOut string
}{
{
key: []byte{},
expectedOut: "{\"operation\":\"delete\",\"key\":\"\",\"value\":\"\",\"metadata\":{\"blockHeight\":64}}\n",
},
{
key: kvPairs[0].Key,
expectedOut: "{\"operation\":\"delete\",\"key\":\"a2V5MDAwMDAwMDE=\",\"value\":\"\",\"metadata\":{\"blockHeight\":64}}\n",
},
}
for _, tc := range testCases {
var buf bytes.Buffer
store := newTraceKVStore(&buf)
buf.Reset()
store.Delete(tc.key)
require.Equal(t, tc.expectedOut, buf.String())
}
}
func TestTraceKVStoreHas(t *testing.T) {
testCases := []struct {
key []byte
expected bool
}{
{
key: []byte{},
expected: false,
},
{
key: kvPairs[0].Key,
expected: true,
},
}
for _, tc := range testCases {
var buf bytes.Buffer
store := newTraceKVStore(&buf)
buf.Reset()
ok := store.Has(tc.key)
require.Equal(t, tc.expected, ok)
}
}
func TestTestTraceKVStoreIterator(t *testing.T) {
var buf bytes.Buffer
store := newTraceKVStore(&buf)
iterator := store.Iterator(nil, nil)
s, e := iterator.Domain()
require.Equal(t, []uint8([]byte(nil)), s)
require.Equal(t, []uint8([]byte(nil)), e)
testCases := []struct {
expectedKey []byte
expectedValue []byte
expectedKeyOut string
expectedvalueOut string
}{
{
expectedKey: kvPairs[0].Key,
expectedValue: kvPairs[0].Value,
expectedKeyOut: "{\"operation\":\"iterKey\",\"key\":\"a2V5MDAwMDAwMDE=\",\"value\":\"\",\"metadata\":{\"blockHeight\":64}}\n",
expectedvalueOut: "{\"operation\":\"iterValue\",\"key\":\"\",\"value\":\"dmFsdWUwMDAwMDAwMQ==\",\"metadata\":{\"blockHeight\":64}}\n",
},
{
expectedKey: kvPairs[1].Key,
expectedValue: kvPairs[1].Value,
expectedKeyOut: "{\"operation\":\"iterKey\",\"key\":\"a2V5MDAwMDAwMDI=\",\"value\":\"\",\"metadata\":{\"blockHeight\":64}}\n",
expectedvalueOut: "{\"operation\":\"iterValue\",\"key\":\"\",\"value\":\"dmFsdWUwMDAwMDAwMg==\",\"metadata\":{\"blockHeight\":64}}\n",
},
{
expectedKey: kvPairs[2].Key,
expectedValue: kvPairs[2].Value,
expectedKeyOut: "{\"operation\":\"iterKey\",\"key\":\"a2V5MDAwMDAwMDM=\",\"value\":\"\",\"metadata\":{\"blockHeight\":64}}\n",
expectedvalueOut: "{\"operation\":\"iterValue\",\"key\":\"\",\"value\":\"dmFsdWUwMDAwMDAwMw==\",\"metadata\":{\"blockHeight\":64}}\n",
},
}
for _, tc := range testCases {
buf.Reset()
ka := iterator.Key()
require.Equal(t, tc.expectedKeyOut, buf.String())
buf.Reset()
va := iterator.Value()
require.Equal(t, tc.expectedvalueOut, buf.String())
require.Equal(t, tc.expectedKey, ka)
require.Equal(t, tc.expectedValue, va)
iterator.Next()
}
require.False(t, iterator.Valid())
require.Panics(t, iterator.Next)
require.NotPanics(t, iterator.Close)
}
func TestTestTraceKVStoreReverseIterator(t *testing.T) {
var buf bytes.Buffer
store := newTraceKVStore(&buf)
iterator := store.ReverseIterator(nil, nil)
s, e := iterator.Domain()
require.Equal(t, []uint8([]byte(nil)), s)
require.Equal(t, []uint8([]byte(nil)), e)
testCases := []struct {
expectedKey []byte
expectedValue []byte
expectedKeyOut string
expectedvalueOut string
}{
{
expectedKey: kvPairs[2].Key,
expectedValue: kvPairs[2].Value,
expectedKeyOut: "{\"operation\":\"iterKey\",\"key\":\"a2V5MDAwMDAwMDM=\",\"value\":\"\",\"metadata\":{\"blockHeight\":64}}\n",
expectedvalueOut: "{\"operation\":\"iterValue\",\"key\":\"\",\"value\":\"dmFsdWUwMDAwMDAwMw==\",\"metadata\":{\"blockHeight\":64}}\n",
},
{
expectedKey: kvPairs[1].Key,
expectedValue: kvPairs[1].Value,
expectedKeyOut: "{\"operation\":\"iterKey\",\"key\":\"a2V5MDAwMDAwMDI=\",\"value\":\"\",\"metadata\":{\"blockHeight\":64}}\n",
expectedvalueOut: "{\"operation\":\"iterValue\",\"key\":\"\",\"value\":\"dmFsdWUwMDAwMDAwMg==\",\"metadata\":{\"blockHeight\":64}}\n",
},
{
expectedKey: kvPairs[0].Key,
expectedValue: kvPairs[0].Value,
expectedKeyOut: "{\"operation\":\"iterKey\",\"key\":\"a2V5MDAwMDAwMDE=\",\"value\":\"\",\"metadata\":{\"blockHeight\":64}}\n",
expectedvalueOut: "{\"operation\":\"iterValue\",\"key\":\"\",\"value\":\"dmFsdWUwMDAwMDAwMQ==\",\"metadata\":{\"blockHeight\":64}}\n",
},
}
for _, tc := range testCases {
buf.Reset()
ka := iterator.Key()
require.Equal(t, tc.expectedKeyOut, buf.String())
buf.Reset()
va := iterator.Value()
require.Equal(t, tc.expectedvalueOut, buf.String())
require.Equal(t, tc.expectedKey, ka)
require.Equal(t, tc.expectedValue, va)
iterator.Next()
}
require.False(t, iterator.Valid())
require.Panics(t, iterator.Next)
require.NotPanics(t, iterator.Close)
}
func TestTraceKVStorePrefix(t *testing.T) {
store := newEmptyTraceKVStore(nil)
pStore := store.Prefix([]byte("trace_prefix"))
require.IsType(t, prefixStore{}, pStore)
}
func TestTraceKVStoreGetStoreType(t *testing.T) {
memDB := dbStoreAdapter{dbm.NewMemDB()}
store := newEmptyTraceKVStore(nil)
require.Equal(t, memDB.GetStoreType(), store.GetStoreType())
}
func TestTraceKVStoreCacheWrap(t *testing.T) {
store := newEmptyTraceKVStore(nil)
require.Panics(t, func() { store.CacheWrap() })
}
func TestTraceKVStoreCacheWrapWithTrace(t *testing.T) {
store := newEmptyTraceKVStore(nil)
require.Panics(t, func() { store.CacheWrapWithTrace(nil, nil) })
}

View File

@ -6,20 +6,23 @@ import (
// Import cosmos-sdk/types/store.go for convenience. // Import cosmos-sdk/types/store.go for convenience.
// nolint // nolint
type Store = types.Store type (
type Committer = types.Committer Store = types.Store
type CommitStore = types.CommitStore Committer = types.Committer
type MultiStore = types.MultiStore CommitStore = types.CommitStore
type CacheMultiStore = types.CacheMultiStore MultiStore = types.MultiStore
type CommitMultiStore = types.CommitMultiStore CacheMultiStore = types.CacheMultiStore
type KVStore = types.KVStore CommitMultiStore = types.CommitMultiStore
type KVPair = types.KVPair KVStore = types.KVStore
type Iterator = types.Iterator KVPair = types.KVPair
type CacheKVStore = types.CacheKVStore Iterator = types.Iterator
type CommitKVStore = types.CommitKVStore CacheKVStore = types.CacheKVStore
type CacheWrapper = types.CacheWrapper CommitKVStore = types.CommitKVStore
type CacheWrap = types.CacheWrap CacheWrapper = types.CacheWrapper
type CommitID = types.CommitID CacheWrap = types.CacheWrap
type StoreKey = types.StoreKey CommitID = types.CommitID
type StoreType = types.StoreType StoreKey = types.StoreKey
type Queryable = types.Queryable StoreType = types.StoreType
Queryable = types.Queryable
TraceContext = types.TraceContext
)

View File

@ -2,6 +2,7 @@ package types
import ( import (
"fmt" "fmt"
"io"
abci "github.com/tendermint/tendermint/abci/types" abci "github.com/tendermint/tendermint/abci/types"
cmn "github.com/tendermint/tendermint/libs/common" cmn "github.com/tendermint/tendermint/libs/common"
@ -50,6 +51,21 @@ type MultiStore interface { //nolint
GetStore(StoreKey) Store GetStore(StoreKey) Store
GetKVStore(StoreKey) KVStore GetKVStore(StoreKey) KVStore
GetKVStoreWithGas(GasMeter, StoreKey) KVStore GetKVStoreWithGas(GasMeter, StoreKey) KVStore
// TracingEnabled returns if tracing is enabled for the MultiStore.
TracingEnabled() bool
// WithTracer sets the tracer for the MultiStore that the underlying
// stores will utilize to trace operations. A MultiStore is returned.
WithTracer(w io.Writer) MultiStore
// WithTracingContext sets the tracing context for a MultiStore. It is
// implied that the caller should update the context when necessary between
// tracing operations. A MultiStore is returned.
WithTracingContext(TraceContext) MultiStore
// ResetTraceContext resets the current tracing context.
ResetTraceContext() MultiStore
} }
// From MultiStore.CacheMultiStore().... // From MultiStore.CacheMultiStore()....
@ -163,25 +179,27 @@ type KVStoreGetter interface {
//---------------------------------------- //----------------------------------------
// CacheWrap // CacheWrap
/* // CacheWrap makes the most appropriate cache-wrap. For example,
CacheWrap() makes the most appropriate cache-wrap. For example, // IAVLStore.CacheWrap() returns a CacheKVStore. CacheWrap should not return
IAVLStore.CacheWrap() returns a CacheKVStore. // a Committer, since Commit cache-wraps make no sense. It can return KVStore,
// HeapStore, SpaceStore, etc.
CacheWrap() should not return a Committer, since Commit() on
cache-wraps make no sense. It can return KVStore, HeapStore,
SpaceStore, etc.
*/
type CacheWrap interface { type CacheWrap interface {
// Write syncs with the underlying store. // Write syncs with the underlying store.
Write() Write()
// CacheWrap recursively wraps again. // CacheWrap recursively wraps again.
CacheWrap() CacheWrap CacheWrap() CacheWrap
// CacheWrapWithTrace recursively wraps again with tracing enabled.
CacheWrapWithTrace(w io.Writer, tc TraceContext) CacheWrap
} }
type CacheWrapper interface { //nolint type CacheWrapper interface { //nolint
// CacheWrap cache wraps.
CacheWrap() CacheWrap CacheWrap() CacheWrap
// CacheWrapWithTrace cache wraps with tracing enabled.
CacheWrapWithTrace(w io.Writer, tc TraceContext) CacheWrap
} }
//---------------------------------------- //----------------------------------------
@ -298,3 +316,7 @@ func (getter PrefixStoreGetter) KVStore(ctx Context) KVStore {
type KVPair cmn.KVPair type KVPair cmn.KVPair
//---------------------------------------- //----------------------------------------
// TraceContext contains TraceKVStore context data. It will be written with
// every trace operation.
type TraceContext map[string]interface{}

View File

@ -81,7 +81,7 @@ func handleMsgCreateValidator(ctx sdk.Context, msg types.MsgCreateValidator, k k
// move coins from the msg.Address account to a (self-delegation) delegator account // move coins from the msg.Address account to a (self-delegation) delegator account
// the validator account and global shares are updated within here // the validator account and global shares are updated within here
_, err := k.Delegate(ctx, msg.DelegatorAddr, msg.Delegation, validator) _, err := k.Delegate(ctx, msg.DelegatorAddr, msg.Delegation, validator, true)
if err != nil { if err != nil {
return err.Result() return err.Result()
} }
@ -136,7 +136,7 @@ func handleMsgDelegate(ctx sdk.Context, msg types.MsgDelegate, k keeper.Keeper)
if validator.Revoked == true { if validator.Revoked == true {
return ErrValidatorRevoked(k.Codespace()).Result() return ErrValidatorRevoked(k.Codespace()).Result()
} }
_, err := k.Delegate(ctx, msg.DelegatorAddr, msg.Delegation, validator) _, err := k.Delegate(ctx, msg.DelegatorAddr, msg.Delegation, validator, true)
if err != nil { if err != nil {
return err.Result() return err.Result()
} }

View File

@ -268,6 +268,7 @@ func TestIncrementsMsgUnbond(t *testing.T) {
initBond := int64(1000) initBond := int64(1000)
ctx, accMapper, keeper := keep.CreateTestInput(t, false, initBond) ctx, accMapper, keeper := keep.CreateTestInput(t, false, initBond)
params := setInstantUnbondPeriod(keeper, ctx) params := setInstantUnbondPeriod(keeper, ctx)
denom := params.BondDenom
// create validator, delegate // create validator, delegate
validatorAddr, delegatorAddr := keep.Addrs[0], keep.Addrs[1] validatorAddr, delegatorAddr := keep.Addrs[0], keep.Addrs[1]
@ -276,10 +277,17 @@ func TestIncrementsMsgUnbond(t *testing.T) {
got := handleMsgCreateValidator(ctx, msgCreateValidator, keeper) got := handleMsgCreateValidator(ctx, msgCreateValidator, keeper)
require.True(t, got.IsOK(), "expected create-validator to be ok, got %v", got) require.True(t, got.IsOK(), "expected create-validator to be ok, got %v", got)
// initial balance
amt1 := accMapper.GetAccount(ctx, delegatorAddr).GetCoins().AmountOf(denom)
msgDelegate := newTestMsgDelegate(delegatorAddr, validatorAddr, initBond) msgDelegate := newTestMsgDelegate(delegatorAddr, validatorAddr, initBond)
got = handleMsgDelegate(ctx, msgDelegate, keeper) got = handleMsgDelegate(ctx, msgDelegate, keeper)
require.True(t, got.IsOK(), "expected delegation to be ok, got %v", got) require.True(t, got.IsOK(), "expected delegation to be ok, got %v", got)
// balance should have been subtracted after delegation
amt2 := accMapper.GetAccount(ctx, delegatorAddr).GetCoins().AmountOf(denom)
require.Equal(t, amt1.Sub(sdk.NewInt(initBond)).Int64(), amt2.Int64(), "expected coins to be subtracted")
validator, found := keeper.GetValidator(ctx, validatorAddr) validator, found := keeper.GetValidator(ctx, validatorAddr)
require.True(t, found) require.True(t, found)
require.Equal(t, initBond*2, validator.DelegatorShares.RoundInt64()) require.Equal(t, initBond*2, validator.DelegatorShares.RoundInt64())
@ -528,8 +536,9 @@ func TestUnbondingPeriod(t *testing.T) {
} }
func TestRedelegationPeriod(t *testing.T) { func TestRedelegationPeriod(t *testing.T) {
ctx, _, keeper := keep.CreateTestInput(t, false, 1000) ctx, AccMapper, keeper := keep.CreateTestInput(t, false, 1000)
validatorAddr, validatorAddr2 := keep.Addrs[0], keep.Addrs[1] validatorAddr, validatorAddr2 := keep.Addrs[0], keep.Addrs[1]
denom := keeper.GetParams(ctx).BondDenom
// set the unbonding time // set the unbonding time
params := keeper.GetParams(ctx) params := keeper.GetParams(ctx)
@ -538,18 +547,32 @@ func TestRedelegationPeriod(t *testing.T) {
// create the validators // create the validators
msgCreateValidator := newTestMsgCreateValidator(validatorAddr, keep.PKs[0], 10) msgCreateValidator := newTestMsgCreateValidator(validatorAddr, keep.PKs[0], 10)
// initial balance
amt1 := AccMapper.GetAccount(ctx, validatorAddr).GetCoins().AmountOf(denom)
got := handleMsgCreateValidator(ctx, msgCreateValidator, keeper) got := handleMsgCreateValidator(ctx, msgCreateValidator, keeper)
require.True(t, got.IsOK(), "expected no error on runMsgCreateValidator") require.True(t, got.IsOK(), "expected no error on runMsgCreateValidator")
// balance should have been subtracted after creation
amt2 := AccMapper.GetAccount(ctx, validatorAddr).GetCoins().AmountOf(denom)
require.Equal(t, amt1.Sub(sdk.NewInt(10)).Int64(), amt2.Int64(), "expected coins to be subtracted")
msgCreateValidator = newTestMsgCreateValidator(validatorAddr2, keep.PKs[1], 10) msgCreateValidator = newTestMsgCreateValidator(validatorAddr2, keep.PKs[1], 10)
got = handleMsgCreateValidator(ctx, msgCreateValidator, keeper) got = handleMsgCreateValidator(ctx, msgCreateValidator, keeper)
require.True(t, got.IsOK(), "expected no error on runMsgCreateValidator") require.True(t, got.IsOK(), "expected no error on runMsgCreateValidator")
bal1 := AccMapper.GetAccount(ctx, validatorAddr).GetCoins()
// begin redelegate // begin redelegate
msgBeginRedelegate := NewMsgBeginRedelegate(validatorAddr, validatorAddr, validatorAddr2, sdk.NewRat(10)) msgBeginRedelegate := NewMsgBeginRedelegate(validatorAddr, validatorAddr, validatorAddr2, sdk.NewRat(10))
got = handleMsgBeginRedelegate(ctx, msgBeginRedelegate, keeper) got = handleMsgBeginRedelegate(ctx, msgBeginRedelegate, keeper)
require.True(t, got.IsOK(), "expected no error, %v", got) require.True(t, got.IsOK(), "expected no error, %v", got)
// origin account should not lose tokens as with a regular delegation
bal2 := AccMapper.GetAccount(ctx, validatorAddr).GetCoins()
require.Equal(t, bal1, bal2)
// cannot complete redelegation at same time // cannot complete redelegation at same time
msgCompleteRedelegate := NewMsgCompleteRedelegate(validatorAddr, validatorAddr, validatorAddr2) msgCompleteRedelegate := NewMsgCompleteRedelegate(validatorAddr, validatorAddr, validatorAddr2)
got = handleMsgCompleteRedelegate(ctx, msgCompleteRedelegate, keeper) got = handleMsgCompleteRedelegate(ctx, msgCompleteRedelegate, keeper)

View File

@ -200,9 +200,9 @@ func (k Keeper) RemoveRedelegation(ctx sdk.Context, red types.Redelegation) {
//_____________________________________________________________________________________ //_____________________________________________________________________________________
// Perform a delegation, set/update everything necessary within the store // Perform a delegation, set/update everything necessary within the store.
func (k Keeper) Delegate(ctx sdk.Context, delegatorAddr sdk.AccAddress, bondAmt sdk.Coin, func (k Keeper) Delegate(ctx sdk.Context, delegatorAddr sdk.AccAddress, bondAmt sdk.Coin,
validator types.Validator) (newShares sdk.Rat, err sdk.Error) { validator types.Validator, subtractAccount bool) (newShares sdk.Rat, err sdk.Error) {
// Get or create the delegator delegation // Get or create the delegator delegation
delegation, found := k.GetDelegation(ctx, delegatorAddr, validator.Owner) delegation, found := k.GetDelegation(ctx, delegatorAddr, validator.Owner)
@ -214,12 +214,15 @@ func (k Keeper) Delegate(ctx sdk.Context, delegatorAddr sdk.AccAddress, bondAmt
} }
} }
if subtractAccount {
// Account new shares, save // Account new shares, save
pool := k.GetPool(ctx)
_, _, err = k.coinKeeper.SubtractCoins(ctx, delegation.DelegatorAddr, sdk.Coins{bondAmt}) _, _, err = k.coinKeeper.SubtractCoins(ctx, delegation.DelegatorAddr, sdk.Coins{bondAmt})
if err != nil { if err != nil {
return return
} }
}
pool := k.GetPool(ctx)
validator, pool, newShares = validator.AddTokensFromDel(pool, bondAmt.Amount.Int64()) validator, pool, newShares = validator.AddTokensFromDel(pool, bondAmt.Amount.Int64())
delegation.Shares = delegation.Shares.Add(newShares) delegation.Shares = delegation.Shares.Add(newShares)
@ -358,7 +361,7 @@ func (k Keeper) BeginRedelegation(ctx sdk.Context, delegatorAddr, validatorSrcAd
if !found { if !found {
return types.ErrBadRedelegationDst(k.Codespace()) return types.ErrBadRedelegationDst(k.Codespace())
} }
sharesCreated, err := k.Delegate(ctx, delegatorAddr, returnCoin, dstValidator) sharesCreated, err := k.Delegate(ctx, delegatorAddr, returnCoin, dstValidator, false)
if err != nil { if err != nil {
return err return err
} }