x/ibc: fix missing return statement (#6099)

* enable the wsl linter

Fix various wsl-related warnings.

x/ibc/04-channel/keeper/handshake.go: fix missing return statement in ChanOpenTry().

* goimports -w files

* remove unknown linter references

* run make format

* Revert "run make format"

This reverts commit f810b62b9e4993f08506663d4e5f2ec2228a9863.

* run make format
This commit is contained in:
Alessio Treglia 2020-04-30 03:36:34 +01:00 committed by GitHub
parent b854c485e4
commit 2879c0702c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
36 changed files with 183 additions and 75 deletions

View File

@ -33,6 +33,7 @@ linters:
- unconvert - unconvert
- unused - unused
- misspell - misspell
- wsl
issues: issues:
exclude-rules: exclude-rules:

View File

@ -25,10 +25,6 @@ const (
var ( var (
_ abci.Application = (*BaseApp)(nil) _ abci.Application = (*BaseApp)(nil)
// mainConsensusParamsKey defines a key to store the consensus params in the
// main store.
mainConsensusParamsKey = []byte("consensus_params")
) )
type ( type (
@ -104,7 +100,6 @@ type BaseApp struct { // nolint: maligned
func NewBaseApp( func NewBaseApp(
name string, logger log.Logger, db dbm.DB, txDecoder sdk.TxDecoder, options ...func(*BaseApp), name string, logger log.Logger, db dbm.DB, txDecoder sdk.TxDecoder, options ...func(*BaseApp),
) *BaseApp { ) *BaseApp {
app := &BaseApp{ app := &BaseApp{
logger: logger, logger: logger,
name: name, name: name,
@ -116,6 +111,7 @@ func NewBaseApp(
txDecoder: txDecoder, txDecoder: txDecoder,
fauxMerkleMode: false, fauxMerkleMode: false,
} }
for _, option := range options { for _, option := range options {
option(app) option(app)
} }
@ -279,6 +275,7 @@ func (app *BaseApp) Router() sdk.Router {
// any routes modified which would cause unexpected routing behavior. // any routes modified which would cause unexpected routing behavior.
panic("Router() on sealed BaseApp") panic("Router() on sealed BaseApp")
} }
return app.router return app.router
} }
@ -326,18 +323,21 @@ func (app *BaseApp) GetConsensusParams(ctx sdk.Context) *abci.ConsensusParams {
if app.paramStore.Has(ctx, ParamStoreKeyBlockParams) { if app.paramStore.Has(ctx, ParamStoreKeyBlockParams) {
var bp abci.BlockParams var bp abci.BlockParams
app.paramStore.Get(ctx, ParamStoreKeyBlockParams, &bp) app.paramStore.Get(ctx, ParamStoreKeyBlockParams, &bp)
cp.Block = &bp cp.Block = &bp
} }
if app.paramStore.Has(ctx, ParamStoreKeyEvidenceParams) { if app.paramStore.Has(ctx, ParamStoreKeyEvidenceParams) {
var ep abci.EvidenceParams var ep abci.EvidenceParams
app.paramStore.Get(ctx, ParamStoreKeyEvidenceParams, &ep) app.paramStore.Get(ctx, ParamStoreKeyEvidenceParams, &ep)
cp.Evidence = &ep cp.Evidence = &ep
} }
if app.paramStore.Has(ctx, ParamStoreKeyValidatorParams) { if app.paramStore.Has(ctx, ParamStoreKeyValidatorParams) {
var vp abci.ValidatorParams var vp abci.ValidatorParams
app.paramStore.Get(ctx, ParamStoreKeyValidatorParams, &vp) app.paramStore.Get(ctx, ParamStoreKeyValidatorParams, &vp)
cp.Validator = &vp cp.Validator = &vp
} }
@ -350,6 +350,7 @@ func (app *BaseApp) StoreConsensusParams(ctx sdk.Context, cp *abci.ConsensusPara
if app.paramStore == nil { if app.paramStore == nil {
panic("cannot store consensus params with no params store set") panic("cannot store consensus params with no params store set")
} }
if cp == nil { if cp == nil {
return return
} }
@ -369,6 +370,7 @@ func (app *BaseApp) getMaximumBlockGas(ctx sdk.Context) uint64 {
} }
maxGas := cp.Block.MaxGas maxGas := cp.Block.MaxGas
switch { switch {
case maxGas < -1: case maxGas < -1:
panic(fmt.Sprintf("invalid maximum block gas: %d", maxGas)) panic(fmt.Sprintf("invalid maximum block gas: %d", maxGas))
@ -431,6 +433,7 @@ func (app *BaseApp) getContextForTx(mode runTxMode, txBytes []byte) sdk.Context
if mode == runTxModeReCheck { if mode == runTxModeReCheck {
ctx = ctx.WithIsReCheckTx(true) ctx = ctx.WithIsReCheckTx(true)
} }
if mode == runTxModeSimulate { if mode == runTxModeSimulate {
ctx, _ = ctx.CacheContext() ctx, _ = ctx.CacheContext()
} }
@ -534,8 +537,10 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (gInfo sdk.
} }
if app.anteHandler != nil { if app.anteHandler != nil {
var anteCtx sdk.Context var (
var msCache sdk.CacheMultiStore anteCtx sdk.Context
msCache sdk.CacheMultiStore
)
// Cache wrap context before AnteHandler call in case it aborts. // Cache wrap context before AnteHandler call in case it aborts.
// This is required for both CheckTx and DeliverTx. // This is required for both CheckTx and DeliverTx.
@ -545,8 +550,8 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (gInfo sdk.
// writes do not happen if aborted/failed. This may have some // writes do not happen if aborted/failed. This may have some
// performance benefits, but it'll be more difficult to get right. // performance benefits, but it'll be more difficult to get right.
anteCtx, msCache = app.cacheTxContext(ctx, txBytes) anteCtx, msCache = app.cacheTxContext(ctx, txBytes)
newCtx, err := app.anteHandler(anteCtx, tx, mode == runTxModeSimulate) newCtx, err := app.anteHandler(anteCtx, tx, mode == runTxModeSimulate)
if !newCtx.IsZero() { if !newCtx.IsZero() {
// At this point, newCtx.MultiStore() is cache-wrapped, or something else // At this point, newCtx.MultiStore() is cache-wrapped, or something else
// replaced by the AnteHandler. We want the original multistore, not one // replaced by the AnteHandler. We want the original multistore, not one
@ -603,6 +608,7 @@ func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, mode runTxMode) (*s
msgRoute := msg.Route() msgRoute := msg.Route()
handler := app.router.Route(ctx, msgRoute) handler := app.router.Route(ctx, msgRoute)
if handler == nil { if handler == nil {
return nil, sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "unrecognized message route: %s; message index: %d", msgRoute, i) return nil, sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "unrecognized message route: %s; message index: %d", msgRoute, i)
} }
@ -622,6 +628,7 @@ func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, mode runTxMode) (*s
// Note: Each message result's data must be length-prefixed in order to // Note: Each message result's data must be length-prefixed in order to
// separate each result. // separate each result.
events = events.AppendEvents(msgEvents) events = events.AppendEvents(msgEvents)
data = append(data, msgResult.Data...) data = append(data, msgResult.Data...)
msgLogs = append(msgLogs, sdk.NewABCIMessageLog(uint16(i), msgResult.Log, msgEvents)) msgLogs = append(msgLogs, sdk.NewABCIMessageLog(uint16(i), msgResult.Log, msgEvents))
} }

View File

@ -80,6 +80,7 @@ the flag --nosort is set.
cmd.Flags().Uint32(flagIndex, 0, "Address index number for HD derivation") cmd.Flags().Uint32(flagIndex, 0, "Address index number for HD derivation")
cmd.Flags().Bool(flags.FlagIndentResponse, false, "Add indent to JSON response") cmd.Flags().Bool(flags.FlagIndentResponse, false, "Add indent to JSON response")
cmd.Flags().String(flagKeyAlgo, string(hd.Secp256k1Type), "Key signing algorithm to generate keys for") cmd.Flags().String(flagKeyAlgo, string(hd.Secp256k1Type), "Key signing algorithm to generate keys for")
return cmd return cmd
} }
@ -94,6 +95,7 @@ func getKeybase(transient bool, buf io.Reader) (keyring.Keyring, error) {
func runAddCmd(cmd *cobra.Command, args []string) error { func runAddCmd(cmd *cobra.Command, args []string) error {
inBuf := bufio.NewReader(cmd.InOrStdin()) inBuf := bufio.NewReader(cmd.InOrStdin())
kb, err := getKeybase(viper.GetBool(flags.FlagDryRun), inBuf) kb, err := getKeybase(viper.GetBool(flags.FlagDryRun), inBuf)
if err != nil { if err != nil {
return err return err
} }
@ -131,6 +133,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
if err2 != nil { if err2 != nil {
return err2 return err2
} }
if !response { if !response {
return errors.New("aborted") return errors.New("aborted")
} }
@ -155,6 +158,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
if err != nil { if err != nil {
return err return err
} }
pks = append(pks, k.GetPubKey()) pks = append(pks, k.GetPubKey())
} }
@ -171,6 +175,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
} }
cmd.PrintErrf("Key %q saved to disk.\n", name) cmd.PrintErrf("Key %q saved to disk.\n", name)
return nil return nil
} }
} }
@ -180,10 +185,11 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
if err != nil { if err != nil {
return err return err
} }
_, err = kb.SavePubKey(name, pk, algo.Name())
if err != nil { if _, err := kb.SavePubKey(name, pk, algo.Name()); err != nil {
return err return err
} }
return nil return nil
} }
@ -202,6 +208,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
if viper.GetBool(flags.FlagUseLedger) { if viper.GetBool(flags.FlagUseLedger) {
bech32PrefixAccAddr := sdk.GetConfig().GetBech32AccountAddrPrefix() bech32PrefixAccAddr := sdk.GetConfig().GetBech32AccountAddrPrefix()
info, err := kb.SaveLedgerKey(name, hd.Secp256k1, bech32PrefixAccAddr, coinType, account, index) info, err := kb.SaveLedgerKey(name, hd.Secp256k1, bech32PrefixAccAddr, coinType, account, index)
if err != nil { if err != nil {
return err return err
} }
@ -210,8 +217,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
} }
// Get bip39 mnemonic // Get bip39 mnemonic
var mnemonic string var mnemonic, bip39Passphrase string
var bip39Passphrase string
if interactive || viper.GetBool(flagRecover) { if interactive || viper.GetBool(flagRecover) {
bip39Message := "Enter your bip39 mnemonic" bip39Message := "Enter your bip39 mnemonic"
@ -314,6 +320,7 @@ func printCreate(cmd *cobra.Command, info keyring.Info, showMnemonic bool, mnemo
if err != nil { if err != nil {
return err return err
} }
cmd.PrintErrln(string(jsonString)) cmd.PrintErrln(string(jsonString))
default: default:
return fmt.Errorf("invalid output format %s", output) return fmt.Errorf("invalid output format %s", output)

View File

@ -47,11 +47,13 @@ type bech32Output struct {
func newBech32Output(bs []byte) bech32Output { func newBech32Output(bs []byte) bech32Output {
out := bech32Output{Formats: make([]string, len(bech32Prefixes))} out := bech32Output{Formats: make([]string, len(bech32Prefixes))}
for i, prefix := range bech32Prefixes { for i, prefix := range bech32Prefixes {
bech32Addr, err := bech32.ConvertAndEncode(prefix, bs) bech32Addr, err := bech32.ConvertAndEncode(prefix, bs)
if err != nil { if err != nil {
panic(err) panic(err)
} }
out.Formats[i] = bech32Addr out.Formats[i] = bech32Addr
} }
@ -87,12 +89,15 @@ hexadecimal into bech32 cosmos prefixed format and vice versa.
func parseKey(cmd *cobra.Command, args []string) error { func parseKey(cmd *cobra.Command, args []string) error {
addr := strings.TrimSpace(args[0]) addr := strings.TrimSpace(args[0])
outstream := cmd.OutOrStdout() outstream := cmd.OutOrStdout()
if len(addr) == 0 { if len(addr) == 0 {
return errors.New("couldn't parse empty input") return errors.New("couldn't parse empty input")
} }
if !(runFromBech32(outstream, addr) || runFromHex(outstream, addr)) { if !(runFromBech32(outstream, addr) || runFromHex(outstream, addr)) {
return errors.New("couldn't find valid bech32 nor hex data") return errors.New("couldn't find valid bech32 nor hex data")
} }
return nil return nil
} }
@ -102,7 +107,9 @@ func runFromBech32(w io.Writer, bech32str string) bool {
if err != nil { if err != nil {
return false return false
} }
displayParseKeyInfo(w, newHexOutput(hrp, bz)) displayParseKeyInfo(w, newHexOutput(hrp, bz))
return true return true
} }
@ -112,31 +119,33 @@ func runFromHex(w io.Writer, hexstr string) bool {
if err != nil { if err != nil {
return false return false
} }
displayParseKeyInfo(w, newBech32Output(bz)) displayParseKeyInfo(w, newBech32Output(bz))
return true return true
} }
func displayParseKeyInfo(w io.Writer, stringer fmt.Stringer) { func displayParseKeyInfo(w io.Writer, stringer fmt.Stringer) {
var out []byte var (
var err error err error
out []byte
)
switch viper.Get(cli.OutputFlag) { switch viper.Get(cli.OutputFlag) {
case OutputFormatText: case OutputFormatText:
out, err = yaml.Marshal(&stringer) out, err = yaml.Marshal(&stringer)
case OutputFormatJSON: case OutputFormatJSON:
if viper.GetBool(flags.FlagIndentResponse) { if viper.GetBool(flags.FlagIndentResponse) {
out, err = KeysCdc.MarshalJSONIndent(stringer, "", " ") out, err = KeysCdc.MarshalJSONIndent(stringer, "", " ")
} else { } else {
out = KeysCdc.MustMarshalJSON(stringer) out = KeysCdc.MustMarshalJSON(stringer)
} }
} }
if err != nil { if err != nil {
panic(err) panic(err)
} }
fmt.Fprintln(w, string(out)) _, _ = fmt.Fprintln(w, string(out))
} }

View File

@ -142,6 +142,7 @@ func BroadcastTx(ctx context.CLIContext, txf Factory, msgs ...sdk.Msg) error {
buf := bufio.NewReader(os.Stdin) buf := bufio.NewReader(os.Stdin)
ok, err := input.GetConfirmation("confirm transaction before signing and broadcasting", buf, os.Stderr) ok, err := input.GetConfirmation("confirm transaction before signing and broadcasting", buf, os.Stderr)
if err != nil || !ok { if err != nil || !ok {
_, _ = fmt.Fprintf(os.Stderr, "%s\n", "cancelled transaction") _, _ = fmt.Fprintf(os.Stderr, "%s\n", "cancelled transaction")
return err return err
@ -168,7 +169,6 @@ func BroadcastTx(ctx context.CLIContext, txf Factory, msgs ...sdk.Msg) error {
func WriteGeneratedTxResponse( func WriteGeneratedTxResponse(
ctx context.CLIContext, w http.ResponseWriter, txg Generator, br rest.BaseReq, msgs ...sdk.Msg, ctx context.CLIContext, w http.ResponseWriter, txg Generator, br rest.BaseReq, msgs ...sdk.Msg,
) { ) {
gasAdj, ok := rest.ParseFloat64OrReturnBadRequest(w, br.GasAdjustment, flags.DefaultGasAdjustment) gasAdj, ok := rest.ParseFloat64OrReturnBadRequest(w, br.GasAdjustment, flags.DefaultGasAdjustment)
if !ok { if !ok {
return return
@ -231,6 +231,7 @@ func BuildUnsignedTx(txf Factory, msgs ...sdk.Msg) (ClientTx, error) {
} }
fees := txf.fees fees := txf.fees
if !txf.gasPrices.IsZero() { if !txf.gasPrices.IsZero() {
if !fees.IsZero() { if !fees.IsZero() {
return nil, errors.New("cannot provide both fees and gas prices") return nil, errors.New("cannot provide both fees and gas prices")
@ -241,6 +242,7 @@ func BuildUnsignedTx(txf Factory, msgs ...sdk.Msg) (ClientTx, error) {
// Derive the fees based on the provided gas prices, where // Derive the fees based on the provided gas prices, where
// fee = ceil(gasPrice * gasLimit). // fee = ceil(gasPrice * gasLimit).
fees = make(sdk.Coins, len(txf.gasPrices)) fees = make(sdk.Coins, len(txf.gasPrices))
for i, gp := range txf.gasPrices { for i, gp := range txf.gasPrices {
fee := gp.Amount.Mul(glDec) fee := gp.Amount.Mul(glDec)
fees[i] = sdk.NewCoin(gp.Denom, fee.Ceil().RoundInt()) fees[i] = sdk.NewCoin(gp.Denom, fee.Ceil().RoundInt())
@ -294,7 +296,6 @@ func BuildSimTx(txf Factory, msgs ...sdk.Msg) ([]byte, error) {
func CalculateGas( func CalculateGas(
queryFunc func(string, []byte) ([]byte, int64, error), txf Factory, msgs ...sdk.Msg, queryFunc func(string, []byte) ([]byte, int64, error), txf Factory, msgs ...sdk.Msg,
) (sdk.SimulationResponse, uint64, error) { ) (sdk.SimulationResponse, uint64, error) {
txBytes, err := BuildSimTx(txf, msgs...) txBytes, err := BuildSimTx(txf, msgs...)
if err != nil { if err != nil {
return sdk.SimulationResponse{}, 0, err return sdk.SimulationResponse{}, 0, err
@ -334,6 +335,7 @@ func PrepareFactory(ctx context.CLIContext, txf Factory) (Factory, error) {
if initNum == 0 { if initNum == 0 {
txf = txf.WithAccountNumber(num) txf = txf.WithAccountNumber(num)
} }
if initSeq == 0 { if initSeq == 0 {
txf = txf.WithSequence(seq) txf = txf.WithSequence(seq)
} }

View File

@ -42,7 +42,7 @@ func (ac *AminoCodec) MustUnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMars
ac.amino.MustUnmarshalBinaryLengthPrefixed(bz, ptr) ac.amino.MustUnmarshalBinaryLengthPrefixed(bz, ptr)
} }
func (ac *AminoCodec) MarshalJSON(o interface{}) ([]byte, error) { // nolint: stdmethods func (ac *AminoCodec) MarshalJSON(o interface{}) ([]byte, error) {
return ac.amino.MarshalJSON(o) return ac.amino.MarshalJSON(o)
} }
@ -50,7 +50,7 @@ func (ac *AminoCodec) MustMarshalJSON(o interface{}) []byte {
return ac.amino.MustMarshalJSON(o) return ac.amino.MustMarshalJSON(o)
} }
func (ac *AminoCodec) UnmarshalJSON(bz []byte, ptr interface{}) error { // nolint: stdmethods func (ac *AminoCodec) UnmarshalJSON(bz []byte, ptr interface{}) error {
return ac.amino.UnmarshalJSON(bz, ptr) return ac.amino.UnmarshalJSON(bz, ptr)
} }

View File

@ -34,10 +34,10 @@ type (
} }
JSONMarshaler interface { JSONMarshaler interface {
MarshalJSON(o interface{}) ([]byte, error) // nolint: stdmethods MarshalJSON(o interface{}) ([]byte, error)
MustMarshalJSON(o interface{}) []byte MustMarshalJSON(o interface{}) []byte
UnmarshalJSON(bz []byte, ptr interface{}) error // nolint: stdmethods UnmarshalJSON(bz []byte, ptr interface{}) error
MustUnmarshalJSON(bz []byte, ptr interface{}) MustUnmarshalJSON(bz []byte, ptr interface{})
} }

View File

@ -46,7 +46,7 @@ func (hc *HybridCodec) MustUnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMar
hc.proto.MustUnmarshalBinaryLengthPrefixed(bz, ptr) hc.proto.MustUnmarshalBinaryLengthPrefixed(bz, ptr)
} }
func (hc *HybridCodec) MarshalJSON(o interface{}) ([]byte, error) { // nolint: stdmethods func (hc *HybridCodec) MarshalJSON(o interface{}) ([]byte, error) {
return hc.amino.MarshalJSON(o) return hc.amino.MarshalJSON(o)
} }
@ -54,7 +54,7 @@ func (hc *HybridCodec) MustMarshalJSON(o interface{}) []byte {
return hc.amino.MustMarshalJSON(o) return hc.amino.MustMarshalJSON(o)
} }
func (hc *HybridCodec) UnmarshalJSON(bz []byte, ptr interface{}) error { // nolint: stdmethods func (hc *HybridCodec) UnmarshalJSON(bz []byte, ptr interface{}) error {
return hc.amino.UnmarshalJSON(bz, ptr) return hc.amino.UnmarshalJSON(bz, ptr)
} }

View File

@ -89,7 +89,7 @@ func (pc *ProtoCodec) MustUnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMars
} }
} }
func (pc *ProtoCodec) MarshalJSON(o interface{}) ([]byte, error) { // nolint: stdmethods func (pc *ProtoCodec) MarshalJSON(o interface{}) ([]byte, error) {
m, ok := o.(ProtoMarshaler) m, ok := o.(ProtoMarshaler)
if !ok { if !ok {
return nil, fmt.Errorf("cannot protobuf JSON encode unsupported type: %T", o) return nil, fmt.Errorf("cannot protobuf JSON encode unsupported type: %T", o)
@ -107,7 +107,7 @@ func (pc *ProtoCodec) MustMarshalJSON(o interface{}) []byte {
return bz return bz
} }
func (pc *ProtoCodec) UnmarshalJSON(bz []byte, ptr interface{}) error { // nolint: stdmethods func (pc *ProtoCodec) UnmarshalJSON(bz []byte, ptr interface{}) error {
m, ok := ptr.(ProtoMarshaler) m, ok := ptr.(ProtoMarshaler)
if !ok { if !ok {
return fmt.Errorf("cannot protobuf JSON decode unsupported type: %T", ptr) return fmt.Errorf("cannot protobuf JSON decode unsupported type: %T", ptr)

View File

@ -49,6 +49,7 @@ func ArmorInfoBytes(bz []byte) string {
headerType: "Info", headerType: "Info",
headerVersion: "0.0.0", headerVersion: "0.0.0",
} }
return armor.EncodeArmor(blockTypeKeyInfo, header, bz) return armor.EncodeArmor(blockTypeKeyInfo, header, bz)
} }
@ -60,6 +61,7 @@ func ArmorPubKeyBytes(bz []byte, algo string) string {
if algo != "" { if algo != "" {
header[headerType] = algo header[headerType] = algo
} }
return armor.EncodeArmor(blockTypePubKey, header, bz) return armor.EncodeArmor(blockTypePubKey, header, bz)
} }
@ -76,6 +78,7 @@ func UnarmorInfoBytes(armorStr string) ([]byte, error) {
if header[headerVersion] != "0.0.0" { if header[headerVersion] != "0.0.0" {
return nil, fmt.Errorf("unrecognized version: %v", header[headerVersion]) return nil, fmt.Errorf("unrecognized version: %v", header[headerVersion])
} }
return bz, nil return bz, nil
} }
@ -93,6 +96,7 @@ func UnarmorPubKeyBytes(armorStr string) (bz []byte, algo string, err error) {
if header[headerType] == "" { if header[headerType] == "" {
header[headerType] = defaultAlgo header[headerType] = defaultAlgo
} }
return bz, header[headerType], err return bz, header[headerType], err
case "": case "":
return nil, "", fmt.Errorf("header's version field is empty") return nil, "", fmt.Errorf("header's version field is empty")
@ -107,10 +111,12 @@ func unarmorBytes(armorStr, blockType string) (bz []byte, header map[string]stri
if err != nil { if err != nil {
return return
} }
if bType != blockType { if bType != blockType {
err = fmt.Errorf("unrecognized armor type %q, expected: %q", bType, blockType) err = fmt.Errorf("unrecognized armor type %q, expected: %q", bType, blockType)
return return
} }
return return
} }
@ -124,10 +130,13 @@ func EncryptArmorPrivKey(privKey crypto.PrivKey, passphrase string, algo string)
"kdf": "bcrypt", "kdf": "bcrypt",
"salt": fmt.Sprintf("%X", saltBytes), "salt": fmt.Sprintf("%X", saltBytes),
} }
if algo != "" { if algo != "" {
header[headerType] = algo header[headerType] = algo
} }
armorStr := armor.EncodeArmor(blockTypePrivKey, header, encBytes) armorStr := armor.EncodeArmor(blockTypePrivKey, header, encBytes)
return armorStr return armorStr
} }
@ -137,11 +146,14 @@ func EncryptArmorPrivKey(privKey crypto.PrivKey, passphrase string, algo string)
func encryptPrivKey(privKey crypto.PrivKey, passphrase string) (saltBytes []byte, encBytes []byte) { func encryptPrivKey(privKey crypto.PrivKey, passphrase string) (saltBytes []byte, encBytes []byte) {
saltBytes = crypto.CRandBytes(16) saltBytes = crypto.CRandBytes(16)
key, err := bcrypt.GenerateFromPassword(saltBytes, []byte(passphrase), BcryptSecurityParameter) key, err := bcrypt.GenerateFromPassword(saltBytes, []byte(passphrase), BcryptSecurityParameter)
if err != nil { if err != nil {
panic(sdkerrors.Wrap(err, "error generating bcrypt key from passphrase")) panic(sdkerrors.Wrap(err, "error generating bcrypt key from passphrase"))
} }
key = crypto.Sha256(key) // get 32 bytes key = crypto.Sha256(key) // get 32 bytes
privKeyBytes := privKey.Bytes() privKeyBytes := privKey.Bytes()
return saltBytes, xsalsa20symmetric.EncryptSymmetric(privKeyBytes, key) return saltBytes, xsalsa20symmetric.EncryptSymmetric(privKeyBytes, key)
} }
@ -151,24 +163,30 @@ func UnarmorDecryptPrivKey(armorStr string, passphrase string) (privKey crypto.P
if err != nil { if err != nil {
return privKey, "", err return privKey, "", err
} }
if blockType != blockTypePrivKey { if blockType != blockTypePrivKey {
return privKey, "", fmt.Errorf("unrecognized armor type: %v", blockType) return privKey, "", fmt.Errorf("unrecognized armor type: %v", blockType)
} }
if header["kdf"] != "bcrypt" { if header["kdf"] != "bcrypt" {
return privKey, "", fmt.Errorf("unrecognized KDF type: %v", header["kdf"]) return privKey, "", fmt.Errorf("unrecognized KDF type: %v", header["kdf"])
} }
if header["salt"] == "" { if header["salt"] == "" {
return privKey, "", fmt.Errorf("missing salt bytes") return privKey, "", fmt.Errorf("missing salt bytes")
} }
saltBytes, err := hex.DecodeString(header["salt"]) saltBytes, err := hex.DecodeString(header["salt"])
if err != nil { if err != nil {
return privKey, "", fmt.Errorf("error decoding salt: %v", err.Error()) return privKey, "", fmt.Errorf("error decoding salt: %v", err.Error())
} }
privKey, err = decryptPrivKey(saltBytes, encBytes, passphrase) privKey, err = decryptPrivKey(saltBytes, encBytes, passphrase)
if header[headerType] == "" { if header[headerType] == "" {
header[headerType] = defaultAlgo header[headerType] = defaultAlgo
} }
return privKey, header[headerType], err return privKey, header[headerType], err
} }
@ -177,13 +195,15 @@ func decryptPrivKey(saltBytes []byte, encBytes []byte, passphrase string) (privK
if err != nil { if err != nil {
return privKey, sdkerrors.Wrap(err, "error generating bcrypt key from passphrase") return privKey, sdkerrors.Wrap(err, "error generating bcrypt key from passphrase")
} }
key = crypto.Sha256(key) // Get 32 bytes key = crypto.Sha256(key) // Get 32 bytes
privKeyBytes, err := xsalsa20symmetric.DecryptSymmetric(encBytes, key) privKeyBytes, err := xsalsa20symmetric.DecryptSymmetric(encBytes, key)
if err != nil && err.Error() == "Ciphertext decryption failed" { if err != nil && err.Error() == "Ciphertext decryption failed" {
return privKey, sdkerrors.ErrWrongPassword return privKey, sdkerrors.ErrWrongPassword
} else if err != nil { } else if err != nil {
return privKey, err return privKey, err
} }
privKey, err = cryptoAmino.PrivKeyFromBytes(privKeyBytes)
return privKey, err return cryptoAmino.PrivKeyFromBytes(privKeyBytes)
} }

View File

@ -49,14 +49,17 @@ func NewParamsFromPath(path string) (*BIP44Params, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
coinType, err := hardenedInt(spl[1]) coinType, err := hardenedInt(spl[1])
if err != nil { if err != nil {
return nil, err return nil, err
} }
account, err := hardenedInt(spl[2]) account, err := hardenedInt(spl[2])
if err != nil { if err != nil {
return nil, err return nil, err
} }
change, err := hardenedInt(spl[3]) change, err := hardenedInt(spl[3])
if err != nil { if err != nil {
return nil, err return nil, err
@ -76,6 +79,7 @@ func NewParamsFromPath(path string) (*BIP44Params, error) {
return nil, return nil,
fmt.Errorf("second and third field in path must be hardened (ie. contain the suffix ', got %v and %v", spl[1], spl[2]) fmt.Errorf("second and third field in path must be hardened (ie. contain the suffix ', got %v and %v", spl[1], spl[2])
} }
if isHardened(spl[3]) || isHardened(spl[4]) { if isHardened(spl[3]) || isHardened(spl[4]) {
return nil, return nil,
fmt.Errorf("fourth and fifth field in path must not be hardened (ie. not contain the suffix ', got %v and %v", spl[3], spl[4]) fmt.Errorf("fourth and fifth field in path must not be hardened (ie. not contain the suffix ', got %v and %v", spl[3], spl[4])
@ -97,12 +101,15 @@ func NewParamsFromPath(path string) (*BIP44Params, error) {
func hardenedInt(field string) (uint32, error) { func hardenedInt(field string) (uint32, error) {
field = strings.TrimSuffix(field, "'") field = strings.TrimSuffix(field, "'")
i, err := strconv.Atoi(field) i, err := strconv.Atoi(field)
if err != nil { if err != nil {
return 0, err return 0, err
} }
if i < 0 { if i < 0 {
return 0, fmt.Errorf("fields must not be negative. got %d", i) return 0, fmt.Errorf("fields must not be negative. got %d", i)
} }
return uint32(i), nil return uint32(i), nil
} }
@ -123,6 +130,7 @@ func (p BIP44Params) DerivationPath() []uint32 {
if p.Change { if p.Change {
change = 1 change = 1
} }
return []uint32{ return []uint32{
p.Purpose, p.Purpose,
p.CoinType, p.CoinType,
@ -161,6 +169,7 @@ func ComputeMastersFromSeed(seed []byte) (secret [32]byte, chainCode [32]byte) {
func DerivePrivateKeyForPath(privKeyBytes [32]byte, chainCode [32]byte, path string) ([32]byte, error) { func DerivePrivateKeyForPath(privKeyBytes [32]byte, chainCode [32]byte, path string) ([32]byte, error) {
data := privKeyBytes data := privKeyBytes
parts := strings.Split(path, "/") parts := strings.Split(path, "/")
for _, part := range parts { for _, part := range parts {
// do we have an apostrophe? // do we have an apostrophe?
harden := part[len(part)-1:] == "'" harden := part[len(part)-1:] == "'"
@ -168,17 +177,23 @@ func DerivePrivateKeyForPath(privKeyBytes [32]byte, chainCode [32]byte, path str
if harden { if harden {
part = part[:len(part)-1] part = part[:len(part)-1]
} }
idx, err := strconv.Atoi(part) idx, err := strconv.Atoi(part)
if err != nil { if err != nil {
return [32]byte{}, fmt.Errorf("invalid BIP 32 path: %s", err) return [32]byte{}, fmt.Errorf("invalid BIP 32 path: %s", err)
} }
if idx < 0 { if idx < 0 {
return [32]byte{}, errors.New("invalid BIP 32 path: index negative ot too large") return [32]byte{}, errors.New("invalid BIP 32 path: index negative ot too large")
} }
data, chainCode = derivePrivateKey(data, chainCode, uint32(idx), harden) data, chainCode = derivePrivateKey(data, chainCode, uint32(idx), harden)
} }
var derivedKey [32]byte var derivedKey [32]byte
n := copy(derivedKey[:], data[:]) n := copy(derivedKey[:], data[:])
if n != 32 || len(data) != 32 { if n != 32 || len(data) != 32 {
return [32]byte{}, fmt.Errorf("expected a (secp256k1) key of length 32, got length: %v", len(data)) return [32]byte{}, fmt.Errorf("expected a (secp256k1) key of length 32, got length: %v", len(data))
} }
@ -193,8 +208,10 @@ func DerivePrivateKeyForPath(privKeyBytes [32]byte, chainCode [32]byte, path str
// - https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki // - https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki
func derivePrivateKey(privKeyBytes [32]byte, chainCode [32]byte, index uint32, harden bool) ([32]byte, [32]byte) { func derivePrivateKey(privKeyBytes [32]byte, chainCode [32]byte, index uint32, harden bool) ([32]byte, [32]byte) {
var data []byte var data []byte
if harden { if harden {
index |= 0x80000000 index |= 0x80000000
data = append([]byte{byte(0)}, privKeyBytes[:]...) data = append([]byte{byte(0)}, privKeyBytes[:]...)
} else { } else {
// this can't return an error: // this can't return an error:
@ -208,9 +225,11 @@ func derivePrivateKey(privKeyBytes [32]byte, chainCode [32]byte, index uint32, h
data = public[:] data = public[:]
*/ */
} }
data = append(data, uint32ToBytes(index)...) data = append(data, uint32ToBytes(index)...)
data2, chainCode2 := i64(chainCode[:], data) data2, chainCode2 := i64(chainCode[:], data)
x := addScalars(privKeyBytes[:], data2[:]) x := addScalars(privKeyBytes[:], data2[:])
return x, chainCode2 return x, chainCode2
} }
@ -222,12 +241,14 @@ func addScalars(a []byte, b []byte) [32]byte {
x := sInt.Mod(sInt, btcec.S256().N).Bytes() x := sInt.Mod(sInt, btcec.S256().N).Bytes()
x2 := [32]byte{} x2 := [32]byte{}
copy(x2[32-len(x):], x) copy(x2[32-len(x):], x)
return x2 return x2
} }
func uint32ToBytes(i uint32) []byte { func uint32ToBytes(i uint32) []byte {
b := [4]byte{} b := [4]byte{}
binary.BigEndian.PutUint32(b[:], i) binary.BigEndian.PutUint32(b[:], i)
return b[:] return b[:]
} }

View File

@ -19,15 +19,19 @@ type memIterator struct {
func newMemIterator(start, end []byte, items *list.List, ascending bool) *memIterator { func newMemIterator(start, end []byte, items *list.List, ascending bool) *memIterator {
itemsInDomain := make([]*tmkv.Pair, 0) itemsInDomain := make([]*tmkv.Pair, 0)
var entered bool var entered bool
for e := items.Front(); e != nil; e = e.Next() { for e := items.Front(); e != nil; e = e.Next() {
item := e.Value.(*tmkv.Pair) item := e.Value.(*tmkv.Pair)
if !dbm.IsKeyInDomain(item.Key, start, end) { if !dbm.IsKeyInDomain(item.Key, start, end) {
if entered { if entered {
break break
} }
continue continue
} }
itemsInDomain = append(itemsInDomain, item) itemsInDomain = append(itemsInDomain, item)
entered = true entered = true
} }
@ -56,6 +60,7 @@ func (mi *memIterator) assertValid() {
func (mi *memIterator) Next() { func (mi *memIterator) Next() {
mi.assertValid() mi.assertValid()
if mi.ascending { if mi.ascending {
mi.items = mi.items[1:] mi.items = mi.items[1:]
} else { } else {
@ -65,17 +70,21 @@ func (mi *memIterator) Next() {
func (mi *memIterator) Key() []byte { func (mi *memIterator) Key() []byte {
mi.assertValid() mi.assertValid()
if mi.ascending { if mi.ascending {
return mi.items[0].Key return mi.items[0].Key
} }
return mi.items[len(mi.items)-1].Key return mi.items[len(mi.items)-1].Key
} }
func (mi *memIterator) Value() []byte { func (mi *memIterator) Value() []byte {
mi.assertValid() mi.assertValid()
if mi.ascending { if mi.ascending {
return mi.items[0].Value return mi.items[0].Value
} }
return mi.items[len(mi.items)-1].Value return mi.items[len(mi.items)-1].Value
} }

View File

@ -28,6 +28,7 @@ func newCacheMergeIterator(parent, cache types.Iterator, ascending bool) *cacheM
cache: cache, cache: cache,
ascending: ascending, ascending: ascending,
} }
return iter return iter
} }
@ -36,16 +37,19 @@ func newCacheMergeIterator(parent, cache types.Iterator, ascending bool) *cacheM
func (iter *cacheMergeIterator) Domain() (start, end []byte) { func (iter *cacheMergeIterator) Domain() (start, end []byte) {
startP, endP := iter.parent.Domain() startP, endP := iter.parent.Domain()
startC, endC := iter.cache.Domain() startC, endC := iter.cache.Domain()
if iter.compare(startP, startC) < 0 { if iter.compare(startP, startC) < 0 {
start = startP start = startP
} else { } else {
start = startC start = startC
} }
if iter.compare(endP, endC) < 0 { if iter.compare(endP, endC) < 0 {
end = endC end = endC
} else { } else {
end = endP end = endP
} }
return start, end return start, end
} }
@ -101,6 +105,7 @@ func (iter *cacheMergeIterator) Key() []byte {
// Both are valid. Compare keys. // Both are valid. Compare keys.
keyP, keyC := iter.parent.Key(), iter.cache.Key() keyP, keyC := iter.parent.Key(), iter.cache.Key()
cmp := iter.compare(keyP, keyC) cmp := iter.compare(keyP, keyC)
switch cmp { switch cmp {
case -1: // parent < cache case -1: // parent < cache
@ -131,6 +136,7 @@ func (iter *cacheMergeIterator) Value() []byte {
// Both are valid. Compare keys. // Both are valid. Compare keys.
keyP, keyC := iter.parent.Key(), iter.cache.Key() keyP, keyC := iter.parent.Key(), iter.cache.Key()
cmp := iter.compare(keyP, keyC) cmp := iter.compare(keyP, keyC)
switch cmp { switch cmp {
case -1: // parent < cache case -1: // parent < cache
@ -173,6 +179,7 @@ func (iter *cacheMergeIterator) compare(a, b []byte) int {
if iter.ascending { if iter.ascending {
return bytes.Compare(a, b) return bytes.Compare(a, b)
} }
return bytes.Compare(a, b) * -1 return bytes.Compare(a, b) * -1
} }
@ -185,7 +192,6 @@ func (iter *cacheMergeIterator) skipCacheDeletes(until []byte) {
for iter.cache.Valid() && for iter.cache.Valid() &&
iter.cache.Value() == nil && iter.cache.Value() == nil &&
(until == nil || iter.compare(iter.cache.Key(), until) < 0) { (until == nil || iter.compare(iter.cache.Key(), until) < 0) {
iter.cache.Next() iter.cache.Next()
} }
} }
@ -210,26 +216,24 @@ func (iter *cacheMergeIterator) skipUntilExistsOrInvalid() bool {
// Compare parent and cache. // Compare parent and cache.
keyP := iter.parent.Key() keyP := iter.parent.Key()
keyC := iter.cache.Key() keyC := iter.cache.Key()
switch iter.compare(keyP, keyC) {
switch iter.compare(keyP, keyC) {
case -1: // parent < cache. case -1: // parent < cache.
return true return true
case 0: // parent == cache. case 0: // parent == cache.
// Skip over if cache item is a delete. // Skip over if cache item is a delete.
valueC := iter.cache.Value() valueC := iter.cache.Value()
if valueC == nil { if valueC == nil {
iter.parent.Next() iter.parent.Next()
iter.cache.Next() iter.cache.Next()
continue continue
} }
// Cache is not a delete. // Cache is not a delete.
return true // cache exists. return true // cache exists.
case 1: // cache < parent case 1: // cache < parent
// Skip over if cache item is a delete. // Skip over if cache item is a delete.
valueC := iter.cache.Value() valueC := iter.cache.Value()
if valueC == nil { if valueC == nil {

View File

@ -100,6 +100,7 @@ func (store *Store) Write() {
// We need a copy of all of the keys. // We need a copy of all of the keys.
// Not the best, but probably not a bottleneck depending. // Not the best, but probably not a bottleneck depending.
keys := make([]string, 0, len(store.cache)) keys := make([]string, 0, len(store.cache))
for key, dbValue := range store.cache { for key, dbValue := range store.cache {
if dbValue.dirty { if dbValue.dirty {
keys = append(keys, key) keys = append(keys, key)
@ -112,6 +113,7 @@ func (store *Store) Write() {
// at least happen atomically. // at least happen atomically.
for _, key := range keys { for _, key := range keys {
cacheValue := store.cache[key] cacheValue := store.cache[key]
switch { switch {
case cacheValue.deleted: case cacheValue.deleted:
store.parent.Delete([]byte(key)) store.parent.Delete([]byte(key))
@ -178,8 +180,10 @@ func (store *Store) dirtyItems(start, end []byte) {
for key := range store.unsortedCache { for key := range store.unsortedCache {
cacheValue := store.cache[key] cacheValue := store.cache[key]
if dbm.IsKeyInDomain([]byte(key), start, end) { if dbm.IsKeyInDomain([]byte(key), start, end) {
unsorted = append(unsorted, &tmkv.Pair{Key: []byte(key), Value: cacheValue.value}) unsorted = append(unsorted, &tmkv.Pair{Key: []byte(key), Value: cacheValue.value})
delete(store.unsortedCache, key) delete(store.unsortedCache, key)
} }
} }
@ -192,9 +196,11 @@ func (store *Store) dirtyItems(start, end []byte) {
uitem := unsorted[0] uitem := unsorted[0]
sitem := e.Value.(*tmkv.Pair) sitem := e.Value.(*tmkv.Pair)
comp := bytes.Compare(uitem.Key, sitem.Key) comp := bytes.Compare(uitem.Key, sitem.Key)
switch comp { switch comp {
case -1: case -1:
unsorted = unsorted[1:] unsorted = unsorted[1:]
store.sortedCache.InsertBefore(uitem, e) store.sortedCache.InsertBefore(uitem, e)
case 1: case 1:
e = e.Next() e = e.Next()
@ -208,7 +214,6 @@ func (store *Store) dirtyItems(start, end []byte) {
for _, kvp := range unsorted { for _, kvp := range unsorted {
store.sortedCache.PushBack(kvp) store.sortedCache.PushBack(kvp)
} }
} }
//---------------------------------------- //----------------------------------------

View File

@ -149,8 +149,8 @@ func (pi *prefixIterator) Next() {
if !pi.valid { if !pi.valid {
panic("prefixIterator invalid, cannot call Next()") panic("prefixIterator invalid, cannot call Next()")
} }
pi.iter.Next()
if !pi.iter.Valid() || !bytes.HasPrefix(pi.iter.Key(), pi.prefix) { if pi.iter.Next(); !pi.iter.Valid() || !bytes.HasPrefix(pi.iter.Key(), pi.prefix) {
// TODO: shouldn't pi be set to nil instead? // TODO: shouldn't pi be set to nil instead?
pi.valid = false pi.valid = false
} }
@ -161,8 +161,10 @@ func (pi *prefixIterator) Key() (key []byte) {
if !pi.valid { if !pi.valid {
panic("prefixIterator invalid, cannot call Key()") panic("prefixIterator invalid, cannot call Key()")
} }
key = pi.iter.Key() key = pi.iter.Key()
key = stripPrefix(key, pi.prefix) key = stripPrefix(key, pi.prefix)
return return
} }
@ -171,6 +173,7 @@ func (pi *prefixIterator) Value() []byte {
if !pi.valid { if !pi.valid {
panic("prefixIterator invalid, cannot call Value()") panic("prefixIterator invalid, cannot call Value()")
} }
return pi.iter.Value() return pi.iter.Value()
} }
@ -194,6 +197,7 @@ func stripPrefix(key []byte, prefix []byte) []byte {
if len(key) < len(prefix) || !bytes.Equal(key[:len(prefix)], prefix) { if len(key) < len(prefix) || !bytes.Equal(key[:len(prefix)], prefix) {
panic("should not happen") panic("should not happen")
} }
return key[len(prefix):] return key[len(prefix):]
} }

View File

@ -55,16 +55,6 @@ func (sm *merkleMap) sort() {
sm.sorted = true sm.sorted = true
} }
// kvPairs sorts the merkleMap kv.Pair objects and returns a copy as a slice.
func (sm *merkleMap) kvPairs() kv.Pairs {
sm.sort()
kvs := make(kv.Pairs, len(sm.kvs))
copy(kvs, sm.kvs)
return kvs
}
// kvPair defines a type alias for kv.Pair so that we can create bytes to hash // kvPair defines a type alias for kv.Pair so that we can create bytes to hash
// when constructing the merkle root. Note, key and values are both length-prefixed. // when constructing the merkle root. Note, key and values are both length-prefixed.
type kvPair kv.Pair type kvPair kv.Pair

View File

@ -6,10 +6,11 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/cosmos/cosmos-sdk/std"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
tmtypes "github.com/tendermint/tendermint/types" tmtypes "github.com/tendermint/tendermint/types"
"github.com/cosmos/cosmos-sdk/std"
"github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/server" "github.com/cosmos/cosmos-sdk/server"
"github.com/cosmos/cosmos-sdk/simapp" "github.com/cosmos/cosmos-sdk/simapp"

View File

@ -66,7 +66,6 @@ func NewBaseReq(
from, memo, chainID string, gas, gasAdjustment string, accNumber, seq uint64, from, memo, chainID string, gas, gasAdjustment string, accNumber, seq uint64,
fees sdk.Coins, gasPrices sdk.DecCoins, simulate bool, fees sdk.Coins, gasPrices sdk.DecCoins, simulate bool,
) BaseReq { ) BaseReq {
return BaseReq{ return BaseReq{
From: strings.TrimSpace(from), From: strings.TrimSpace(from),
Memo: strings.TrimSpace(memo), Memo: strings.TrimSpace(memo),
@ -154,6 +153,7 @@ func CheckError(w http.ResponseWriter, status int, err error) bool {
WriteErrorResponse(w, status, err.Error()) WriteErrorResponse(w, status, err.Error())
return true return true
} }
return false return false
} }
@ -204,8 +204,8 @@ func ParseUint64OrReturnBadRequest(w http.ResponseWriter, s string) (n uint64, o
n, err = strconv.ParseUint(s, 10, 64) n, err = strconv.ParseUint(s, 10, 64)
if err != nil { if err != nil {
err := fmt.Errorf("'%s' is not a valid uint64", s) WriteErrorResponse(w, http.StatusBadRequest, fmt.Sprintf("'%s' is not a valid uint64", s))
WriteErrorResponse(w, http.StatusBadRequest, err.Error())
return n, false return n, false
} }
@ -358,11 +358,13 @@ func ParseHTTPArgsWithLimit(r *http.Request, defaultLimit int) (tags []string, p
var value string var value string
value, err = url.QueryUnescape(values[0]) value, err = url.QueryUnescape(values[0])
if err != nil { if err != nil {
return tags, page, limit, err return tags, page, limit, err
} }
var tag string var tag string
switch key { switch key {
case types.TxHeightKey: case types.TxHeightKey:
tag = fmt.Sprintf("%s=%s", key, value) tag = fmt.Sprintf("%s=%s", key, value)
@ -419,5 +421,6 @@ func ParseQueryParamBool(r *http.Request, param string) bool {
if value, err := strconv.ParseBool(r.FormValue(param)); err == nil { if value, err := strconv.ParseBool(r.FormValue(param)); err == nil {
return value return value
} }
return false return false
} }

View File

@ -1,9 +1,10 @@
package keeper_test package keeper_test
import ( import (
"github.com/cosmos/cosmos-sdk/std"
"testing" "testing"
"github.com/cosmos/cosmos-sdk/std"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/cosmos/cosmos-sdk/simapp" "github.com/cosmos/cosmos-sdk/simapp"

View File

@ -3,12 +3,14 @@ package cli
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/stretchr/testify/require"
clientkeys "github.com/cosmos/cosmos-sdk/client/keys" clientkeys "github.com/cosmos/cosmos-sdk/client/keys"
"github.com/cosmos/cosmos-sdk/tests" "github.com/cosmos/cosmos-sdk/tests"
"github.com/cosmos/cosmos-sdk/tests/cli/helpers" "github.com/cosmos/cosmos-sdk/tests/cli/helpers"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/x/auth" "github.com/cosmos/cosmos-sdk/x/auth"
"github.com/stretchr/testify/require"
) )
// TxSend is simcli tx send // TxSend is simcli tx send

View File

@ -4,12 +4,14 @@ package cli_test
import ( import (
"fmt" "fmt"
"testing"
"github.com/stretchr/testify/require"
"github.com/cosmos/cosmos-sdk/tests" "github.com/cosmos/cosmos-sdk/tests"
"github.com/cosmos/cosmos-sdk/tests/cli/helpers" "github.com/cosmos/cosmos-sdk/tests/cli/helpers"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
bankcli "github.com/cosmos/cosmos-sdk/x/bank/client/cli_test" bankcli "github.com/cosmos/cosmos-sdk/x/bank/client/cli_test"
"github.com/stretchr/testify/require"
"testing"
) )
func TestCLISend(t *testing.T) { func TestCLISend(t *testing.T) {

View File

@ -1,10 +1,11 @@
package keeper_test package keeper_test
import ( import (
"github.com/cosmos/cosmos-sdk/std"
"testing" "testing"
"time" "time"
"github.com/cosmos/cosmos-sdk/std"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
abci "github.com/tendermint/tendermint/abci/types" abci "github.com/tendermint/tendermint/abci/types"
tmkv "github.com/tendermint/tendermint/libs/kv" tmkv "github.com/tendermint/tendermint/libs/kv"

View File

@ -2,9 +2,10 @@ package simulation_test
import ( import (
"fmt" "fmt"
"github.com/cosmos/cosmos-sdk/std"
"testing" "testing"
"github.com/cosmos/cosmos-sdk/std"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
tmkv "github.com/tendermint/tendermint/libs/kv" tmkv "github.com/tendermint/tendermint/libs/kv"

View File

@ -2,9 +2,10 @@ package simulation_test
import ( import (
"fmt" "fmt"
"github.com/cosmos/cosmos-sdk/std"
"testing" "testing"
"github.com/cosmos/cosmos-sdk/std"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/crypto/ed25519" "github.com/tendermint/tendermint/crypto/ed25519"

View File

@ -19,12 +19,13 @@ func NewDecodeStore(cdc types.Codec) func(kvA, kvB tmkv.Pair) string {
if err != nil { if err != nil {
panic(fmt.Sprintf("cannot unmarshal evidence: %s", err.Error())) panic(fmt.Sprintf("cannot unmarshal evidence: %s", err.Error()))
} }
evidenceB, err := cdc.UnmarshalEvidence(kvB.Value) evidenceB, err := cdc.UnmarshalEvidence(kvB.Value)
if err != nil { if err != nil {
panic(fmt.Sprintf("cannot unmarshal evidence: %s", err.Error())) panic(fmt.Sprintf("cannot unmarshal evidence: %s", err.Error()))
} }
return fmt.Sprintf("%v\n%v", evidenceA, evidenceB)
return fmt.Sprintf("%v\n%v", evidenceA, evidenceB)
default: default:
panic(fmt.Sprintf("invalid %s key prefix %X", types.ModuleName, kvA.Key[:1])) panic(fmt.Sprintf("invalid %s key prefix %X", types.ModuleName, kvA.Key[:1]))
} }

View File

@ -25,6 +25,7 @@ func GenEvidences(_ *rand.Rand, _ []simtypes.Account) []exported.Evidence {
// RandomizedGenState generates a random GenesisState for evidence // RandomizedGenState generates a random GenesisState for evidence
func RandomizedGenState(simState *module.SimulationState) { func RandomizedGenState(simState *module.SimulationState) {
var ev []exported.Evidence var ev []exported.Evidence
simState.AppParams.GetOrGenerate( simState.AppParams.GetOrGenerate(
simState.Cdc, evidence, &ev, simState.Rand, simState.Cdc, evidence, &ev, simState.Rand,
func(r *rand.Rand) { ev = GenEvidences(r, simState.Accounts) }, func(r *rand.Rand) { ev = GenEvidences(r, simState.Accounts) },

View File

@ -1,10 +1,11 @@
package gov_test package gov_test
import ( import (
"github.com/cosmos/cosmos-sdk/std"
"testing" "testing"
"time" "time"
"github.com/cosmos/cosmos-sdk/std"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
abci "github.com/tendermint/tendermint/abci/types" abci "github.com/tendermint/tendermint/abci/types"

View File

@ -1,12 +1,13 @@
package keeper_test package keeper_test
import ( import (
"github.com/cosmos/cosmos-sdk/std"
"math/rand" "math/rand"
"strings" "strings"
"testing" "testing"
"time" "time"
"github.com/cosmos/cosmos-sdk/std"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
abci "github.com/tendermint/tendermint/abci/types" abci "github.com/tendermint/tendermint/abci/types"

View File

@ -3,10 +3,11 @@ package simulation_test
import ( import (
"encoding/binary" "encoding/binary"
"fmt" "fmt"
"github.com/cosmos/cosmos-sdk/std"
"testing" "testing"
"time" "time"
"github.com/cosmos/cosmos-sdk/std"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/crypto/ed25519" "github.com/tendermint/tendermint/crypto/ed25519"

View File

@ -18,13 +18,16 @@ import (
// The counterparty hops are stored in the inverse order as the channel's. // The counterparty hops are stored in the inverse order as the channel's.
func (k Keeper) CounterpartyHops(ctx sdk.Context, ch types.Channel) ([]string, bool) { func (k Keeper) CounterpartyHops(ctx sdk.Context, ch types.Channel) ([]string, bool) {
counterPartyHops := make([]string, len(ch.ConnectionHops)) counterPartyHops := make([]string, len(ch.ConnectionHops))
for i, hop := range ch.ConnectionHops { for i, hop := range ch.ConnectionHops {
connection, found := k.connectionKeeper.GetConnection(ctx, hop) conn, found := k.connectionKeeper.GetConnection(ctx, hop)
if !found { if !found {
return []string{}, false return []string{}, false
} }
counterPartyHops[len(counterPartyHops)-1-i] = connection.GetCounterparty().GetConnectionID()
counterPartyHops[len(counterPartyHops)-1-i] = conn.GetCounterparty().GetConnectionID()
} }
return counterPartyHops, true return counterPartyHops, true
} }
@ -41,7 +44,6 @@ func (k Keeper) ChanOpenInit(
version string, version string,
) (*capability.Capability, error) { ) (*capability.Capability, error) {
// channel identifier and connection hop length checked on msg.ValidateBasic() // channel identifier and connection hop length checked on msg.ValidateBasic()
_, found := k.GetChannel(ctx, portID, channelID) _, found := k.GetChannel(ctx, portID, channelID)
if found { if found {
return nil, sdkerrors.Wrap(types.ErrChannelExists, channelID) return nil, sdkerrors.Wrap(types.ErrChannelExists, channelID)
@ -70,6 +72,7 @@ func (k Keeper) ChanOpenInit(
if err != nil { if err != nil {
return nil, sdkerrors.Wrap(types.ErrInvalidChannelCapability, err.Error()) return nil, sdkerrors.Wrap(types.ErrInvalidChannelCapability, err.Error())
} }
k.SetNextSequenceSend(ctx, portID, channelID, 1) k.SetNextSequenceSend(ctx, portID, channelID, 1)
k.SetNextSequenceRecv(ctx, portID, channelID, 1) k.SetNextSequenceRecv(ctx, portID, channelID, 1)
@ -92,7 +95,6 @@ func (k Keeper) ChanOpenTry(
proofHeight uint64, proofHeight uint64,
) (*capability.Capability, error) { ) (*capability.Capability, error) {
// channel identifier and connection hop length checked on msg.ValidateBasic() // channel identifier and connection hop length checked on msg.ValidateBasic()
previousChannel, found := k.GetChannel(ctx, portID, channelID) previousChannel, found := k.GetChannel(ctx, portID, channelID)
if found && !(previousChannel.State == exported.INIT && if found && !(previousChannel.State == exported.INIT &&
previousChannel.Ordering == order && previousChannel.Ordering == order &&
@ -100,7 +102,7 @@ func (k Keeper) ChanOpenTry(
previousChannel.Counterparty.ChannelID == counterparty.ChannelID && previousChannel.Counterparty.ChannelID == counterparty.ChannelID &&
previousChannel.ConnectionHops[0] == connectionHops[0] && previousChannel.ConnectionHops[0] == connectionHops[0] &&
previousChannel.Version == version) { previousChannel.Version == version) {
sdkerrors.Wrap(types.ErrInvalidChannel, "cannot relay connection attempt") return nil, sdkerrors.Wrap(types.ErrInvalidChannel, "cannot relay connection attempt")
} }
if !k.portKeeper.Authenticate(ctx, portCap, portID) { if !k.portKeeper.Authenticate(ctx, portCap, portID) {
@ -150,6 +152,7 @@ func (k Keeper) ChanOpenTry(
if err != nil { if err != nil {
return nil, sdkerrors.Wrap(types.ErrInvalidChannelCapability, err.Error()) return nil, sdkerrors.Wrap(types.ErrInvalidChannelCapability, err.Error())
} }
k.SetNextSequenceSend(ctx, portID, channelID, 1) k.SetNextSequenceSend(ctx, portID, channelID, 1)
k.SetNextSequenceRecv(ctx, portID, channelID, 1) k.SetNextSequenceRecv(ctx, portID, channelID, 1)
@ -328,6 +331,7 @@ func (k Keeper) ChanCloseInit(
channel.State = exported.CLOSED channel.State = exported.CLOSED
k.SetChannel(ctx, portID, channelID, channel) k.SetChannel(ctx, portID, channelID, channel)
k.Logger(ctx).Info("channel close initialized: portID (%s), channelID (%s)", portID, channelID) k.Logger(ctx).Info("channel close initialized: portID (%s), channelID (%s)", portID, channelID)
return nil return nil
} }

View File

@ -2,10 +2,11 @@ package simulation_test
import ( import (
"fmt" "fmt"
"github.com/cosmos/cosmos-sdk/std"
"testing" "testing"
"time" "time"
"github.com/cosmos/cosmos-sdk/std"
gogotypes "github.com/gogo/protobuf/types" gogotypes "github.com/gogo/protobuf/types"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"

View File

@ -2,12 +2,14 @@ package cli
import ( import (
"fmt" "fmt"
"github.com/stretchr/testify/require"
clientkeys "github.com/cosmos/cosmos-sdk/client/keys" clientkeys "github.com/cosmos/cosmos-sdk/client/keys"
"github.com/cosmos/cosmos-sdk/tests" "github.com/cosmos/cosmos-sdk/tests"
"github.com/cosmos/cosmos-sdk/tests/cli/helpers" "github.com/cosmos/cosmos-sdk/tests/cli/helpers"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/x/staking" "github.com/cosmos/cosmos-sdk/x/staking"
"github.com/stretchr/testify/require"
) )
// TxStakingCreateValidator is simcli tx staking create-validator // TxStakingCreateValidator is simcli tx staking create-validator

View File

@ -3,14 +3,16 @@
package cli_test package cli_test
import ( import (
"testing"
"github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/crypto/ed25519"
"github.com/cosmos/cosmos-sdk/tests" "github.com/cosmos/cosmos-sdk/tests"
"github.com/cosmos/cosmos-sdk/tests/cli/helpers" "github.com/cosmos/cosmos-sdk/tests/cli/helpers"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
bankcli "github.com/cosmos/cosmos-sdk/x/bank/client/cli_test" bankcli "github.com/cosmos/cosmos-sdk/x/bank/client/cli_test"
stakingcli "github.com/cosmos/cosmos-sdk/x/staking/client/cli_test" stakingcli "github.com/cosmos/cosmos-sdk/x/staking/client/cli_test"
"github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/crypto/ed25519"
"testing"
) )
func TestCLICreateValidator(t *testing.T) { func TestCLICreateValidator(t *testing.T) {

View File

@ -1,11 +1,12 @@
package staking_test package staking_test
import ( import (
"github.com/cosmos/cosmos-sdk/std"
abci "github.com/tendermint/tendermint/abci/types" abci "github.com/tendermint/tendermint/abci/types"
"github.com/tendermint/tendermint/crypto" "github.com/tendermint/tendermint/crypto"
"github.com/tendermint/tendermint/crypto/secp256k1" "github.com/tendermint/tendermint/crypto/secp256k1"
"github.com/cosmos/cosmos-sdk/std"
"github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/simapp" "github.com/cosmos/cosmos-sdk/simapp"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"

View File

@ -1,9 +1,10 @@
package keeper_test package keeper_test
import ( import (
"github.com/cosmos/cosmos-sdk/std"
"testing" "testing"
"github.com/cosmos/cosmos-sdk/std"
abci "github.com/tendermint/tendermint/abci/types" abci "github.com/tendermint/tendermint/abci/types"
"github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/codec"

View File

@ -2,10 +2,11 @@ package simulation_test
import ( import (
"fmt" "fmt"
"github.com/cosmos/cosmos-sdk/std"
"testing" "testing"
"time" "time"
"github.com/cosmos/cosmos-sdk/std"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/crypto/ed25519" "github.com/tendermint/tendermint/crypto/ed25519"