x/ibc: fix missing return statement (#6099)
* enable the wsl linter Fix various wsl-related warnings. x/ibc/04-channel/keeper/handshake.go: fix missing return statement in ChanOpenTry(). * goimports -w files * remove unknown linter references * run make format * Revert "run make format" This reverts commit f810b62b9e4993f08506663d4e5f2ec2228a9863. * run make format
This commit is contained in:
parent
b854c485e4
commit
2879c0702c
|
@ -33,6 +33,7 @@ linters:
|
|||
- unconvert
|
||||
- unused
|
||||
- misspell
|
||||
- wsl
|
||||
|
||||
issues:
|
||||
exclude-rules:
|
||||
|
|
|
@ -25,10 +25,6 @@ const (
|
|||
|
||||
var (
|
||||
_ abci.Application = (*BaseApp)(nil)
|
||||
|
||||
// mainConsensusParamsKey defines a key to store the consensus params in the
|
||||
// main store.
|
||||
mainConsensusParamsKey = []byte("consensus_params")
|
||||
)
|
||||
|
||||
type (
|
||||
|
@ -104,7 +100,6 @@ type BaseApp struct { // nolint: maligned
|
|||
func NewBaseApp(
|
||||
name string, logger log.Logger, db dbm.DB, txDecoder sdk.TxDecoder, options ...func(*BaseApp),
|
||||
) *BaseApp {
|
||||
|
||||
app := &BaseApp{
|
||||
logger: logger,
|
||||
name: name,
|
||||
|
@ -116,6 +111,7 @@ func NewBaseApp(
|
|||
txDecoder: txDecoder,
|
||||
fauxMerkleMode: false,
|
||||
}
|
||||
|
||||
for _, option := range options {
|
||||
option(app)
|
||||
}
|
||||
|
@ -279,6 +275,7 @@ func (app *BaseApp) Router() sdk.Router {
|
|||
// any routes modified which would cause unexpected routing behavior.
|
||||
panic("Router() on sealed BaseApp")
|
||||
}
|
||||
|
||||
return app.router
|
||||
}
|
||||
|
||||
|
@ -326,18 +323,21 @@ func (app *BaseApp) GetConsensusParams(ctx sdk.Context) *abci.ConsensusParams {
|
|||
|
||||
if app.paramStore.Has(ctx, ParamStoreKeyBlockParams) {
|
||||
var bp abci.BlockParams
|
||||
|
||||
app.paramStore.Get(ctx, ParamStoreKeyBlockParams, &bp)
|
||||
cp.Block = &bp
|
||||
}
|
||||
|
||||
if app.paramStore.Has(ctx, ParamStoreKeyEvidenceParams) {
|
||||
var ep abci.EvidenceParams
|
||||
|
||||
app.paramStore.Get(ctx, ParamStoreKeyEvidenceParams, &ep)
|
||||
cp.Evidence = &ep
|
||||
}
|
||||
|
||||
if app.paramStore.Has(ctx, ParamStoreKeyValidatorParams) {
|
||||
var vp abci.ValidatorParams
|
||||
|
||||
app.paramStore.Get(ctx, ParamStoreKeyValidatorParams, &vp)
|
||||
cp.Validator = &vp
|
||||
}
|
||||
|
@ -350,6 +350,7 @@ func (app *BaseApp) StoreConsensusParams(ctx sdk.Context, cp *abci.ConsensusPara
|
|||
if app.paramStore == nil {
|
||||
panic("cannot store consensus params with no params store set")
|
||||
}
|
||||
|
||||
if cp == nil {
|
||||
return
|
||||
}
|
||||
|
@ -369,6 +370,7 @@ func (app *BaseApp) getMaximumBlockGas(ctx sdk.Context) uint64 {
|
|||
}
|
||||
|
||||
maxGas := cp.Block.MaxGas
|
||||
|
||||
switch {
|
||||
case maxGas < -1:
|
||||
panic(fmt.Sprintf("invalid maximum block gas: %d", maxGas))
|
||||
|
@ -431,6 +433,7 @@ func (app *BaseApp) getContextForTx(mode runTxMode, txBytes []byte) sdk.Context
|
|||
if mode == runTxModeReCheck {
|
||||
ctx = ctx.WithIsReCheckTx(true)
|
||||
}
|
||||
|
||||
if mode == runTxModeSimulate {
|
||||
ctx, _ = ctx.CacheContext()
|
||||
}
|
||||
|
@ -534,8 +537,10 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (gInfo sdk.
|
|||
}
|
||||
|
||||
if app.anteHandler != nil {
|
||||
var anteCtx sdk.Context
|
||||
var msCache sdk.CacheMultiStore
|
||||
var (
|
||||
anteCtx sdk.Context
|
||||
msCache sdk.CacheMultiStore
|
||||
)
|
||||
|
||||
// Cache wrap context before AnteHandler call in case it aborts.
|
||||
// This is required for both CheckTx and DeliverTx.
|
||||
|
@ -545,8 +550,8 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (gInfo sdk.
|
|||
// writes do not happen if aborted/failed. This may have some
|
||||
// performance benefits, but it'll be more difficult to get right.
|
||||
anteCtx, msCache = app.cacheTxContext(ctx, txBytes)
|
||||
|
||||
newCtx, err := app.anteHandler(anteCtx, tx, mode == runTxModeSimulate)
|
||||
|
||||
if !newCtx.IsZero() {
|
||||
// At this point, newCtx.MultiStore() is cache-wrapped, or something else
|
||||
// replaced by the AnteHandler. We want the original multistore, not one
|
||||
|
@ -603,6 +608,7 @@ func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, mode runTxMode) (*s
|
|||
|
||||
msgRoute := msg.Route()
|
||||
handler := app.router.Route(ctx, msgRoute)
|
||||
|
||||
if handler == nil {
|
||||
return nil, sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "unrecognized message route: %s; message index: %d", msgRoute, i)
|
||||
}
|
||||
|
@ -622,6 +628,7 @@ func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, mode runTxMode) (*s
|
|||
// Note: Each message result's data must be length-prefixed in order to
|
||||
// separate each result.
|
||||
events = events.AppendEvents(msgEvents)
|
||||
|
||||
data = append(data, msgResult.Data...)
|
||||
msgLogs = append(msgLogs, sdk.NewABCIMessageLog(uint16(i), msgResult.Log, msgEvents))
|
||||
}
|
||||
|
|
|
@ -80,6 +80,7 @@ the flag --nosort is set.
|
|||
cmd.Flags().Uint32(flagIndex, 0, "Address index number for HD derivation")
|
||||
cmd.Flags().Bool(flags.FlagIndentResponse, false, "Add indent to JSON response")
|
||||
cmd.Flags().String(flagKeyAlgo, string(hd.Secp256k1Type), "Key signing algorithm to generate keys for")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
|
@ -94,6 +95,7 @@ func getKeybase(transient bool, buf io.Reader) (keyring.Keyring, error) {
|
|||
func runAddCmd(cmd *cobra.Command, args []string) error {
|
||||
inBuf := bufio.NewReader(cmd.InOrStdin())
|
||||
kb, err := getKeybase(viper.GetBool(flags.FlagDryRun), inBuf)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -131,6 +133,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
|
|||
if err2 != nil {
|
||||
return err2
|
||||
}
|
||||
|
||||
if !response {
|
||||
return errors.New("aborted")
|
||||
}
|
||||
|
@ -155,6 +158,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
pks = append(pks, k.GetPubKey())
|
||||
}
|
||||
|
||||
|
@ -171,6 +175,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
|
|||
}
|
||||
|
||||
cmd.PrintErrf("Key %q saved to disk.\n", name)
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
@ -180,10 +185,11 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = kb.SavePubKey(name, pk, algo.Name())
|
||||
if err != nil {
|
||||
|
||||
if _, err := kb.SavePubKey(name, pk, algo.Name()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -202,6 +208,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
|
|||
if viper.GetBool(flags.FlagUseLedger) {
|
||||
bech32PrefixAccAddr := sdk.GetConfig().GetBech32AccountAddrPrefix()
|
||||
info, err := kb.SaveLedgerKey(name, hd.Secp256k1, bech32PrefixAccAddr, coinType, account, index)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -210,8 +217,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
|
|||
}
|
||||
|
||||
// Get bip39 mnemonic
|
||||
var mnemonic string
|
||||
var bip39Passphrase string
|
||||
var mnemonic, bip39Passphrase string
|
||||
|
||||
if interactive || viper.GetBool(flagRecover) {
|
||||
bip39Message := "Enter your bip39 mnemonic"
|
||||
|
@ -314,6 +320,7 @@ func printCreate(cmd *cobra.Command, info keyring.Info, showMnemonic bool, mnemo
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cmd.PrintErrln(string(jsonString))
|
||||
default:
|
||||
return fmt.Errorf("invalid output format %s", output)
|
||||
|
|
|
@ -47,11 +47,13 @@ type bech32Output struct {
|
|||
|
||||
func newBech32Output(bs []byte) bech32Output {
|
||||
out := bech32Output{Formats: make([]string, len(bech32Prefixes))}
|
||||
|
||||
for i, prefix := range bech32Prefixes {
|
||||
bech32Addr, err := bech32.ConvertAndEncode(prefix, bs)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
out.Formats[i] = bech32Addr
|
||||
}
|
||||
|
||||
|
@ -87,12 +89,15 @@ hexadecimal into bech32 cosmos prefixed format and vice versa.
|
|||
func parseKey(cmd *cobra.Command, args []string) error {
|
||||
addr := strings.TrimSpace(args[0])
|
||||
outstream := cmd.OutOrStdout()
|
||||
|
||||
if len(addr) == 0 {
|
||||
return errors.New("couldn't parse empty input")
|
||||
}
|
||||
|
||||
if !(runFromBech32(outstream, addr) || runFromHex(outstream, addr)) {
|
||||
return errors.New("couldn't find valid bech32 nor hex data")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -102,7 +107,9 @@ func runFromBech32(w io.Writer, bech32str string) bool {
|
|||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
displayParseKeyInfo(w, newHexOutput(hrp, bz))
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
|
@ -112,31 +119,33 @@ func runFromHex(w io.Writer, hexstr string) bool {
|
|||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
displayParseKeyInfo(w, newBech32Output(bz))
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func displayParseKeyInfo(w io.Writer, stringer fmt.Stringer) {
|
||||
var out []byte
|
||||
var err error
|
||||
var (
|
||||
err error
|
||||
out []byte
|
||||
)
|
||||
|
||||
switch viper.Get(cli.OutputFlag) {
|
||||
case OutputFormatText:
|
||||
out, err = yaml.Marshal(&stringer)
|
||||
|
||||
case OutputFormatJSON:
|
||||
|
||||
if viper.GetBool(flags.FlagIndentResponse) {
|
||||
out, err = KeysCdc.MarshalJSONIndent(stringer, "", " ")
|
||||
} else {
|
||||
out = KeysCdc.MustMarshalJSON(stringer)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Fprintln(w, string(out))
|
||||
_, _ = fmt.Fprintln(w, string(out))
|
||||
}
|
||||
|
|
|
@ -142,6 +142,7 @@ func BroadcastTx(ctx context.CLIContext, txf Factory, msgs ...sdk.Msg) error {
|
|||
|
||||
buf := bufio.NewReader(os.Stdin)
|
||||
ok, err := input.GetConfirmation("confirm transaction before signing and broadcasting", buf, os.Stderr)
|
||||
|
||||
if err != nil || !ok {
|
||||
_, _ = fmt.Fprintf(os.Stderr, "%s\n", "cancelled transaction")
|
||||
return err
|
||||
|
@ -168,7 +169,6 @@ func BroadcastTx(ctx context.CLIContext, txf Factory, msgs ...sdk.Msg) error {
|
|||
func WriteGeneratedTxResponse(
|
||||
ctx context.CLIContext, w http.ResponseWriter, txg Generator, br rest.BaseReq, msgs ...sdk.Msg,
|
||||
) {
|
||||
|
||||
gasAdj, ok := rest.ParseFloat64OrReturnBadRequest(w, br.GasAdjustment, flags.DefaultGasAdjustment)
|
||||
if !ok {
|
||||
return
|
||||
|
@ -231,6 +231,7 @@ func BuildUnsignedTx(txf Factory, msgs ...sdk.Msg) (ClientTx, error) {
|
|||
}
|
||||
|
||||
fees := txf.fees
|
||||
|
||||
if !txf.gasPrices.IsZero() {
|
||||
if !fees.IsZero() {
|
||||
return nil, errors.New("cannot provide both fees and gas prices")
|
||||
|
@ -241,6 +242,7 @@ func BuildUnsignedTx(txf Factory, msgs ...sdk.Msg) (ClientTx, error) {
|
|||
// Derive the fees based on the provided gas prices, where
|
||||
// fee = ceil(gasPrice * gasLimit).
|
||||
fees = make(sdk.Coins, len(txf.gasPrices))
|
||||
|
||||
for i, gp := range txf.gasPrices {
|
||||
fee := gp.Amount.Mul(glDec)
|
||||
fees[i] = sdk.NewCoin(gp.Denom, fee.Ceil().RoundInt())
|
||||
|
@ -294,7 +296,6 @@ func BuildSimTx(txf Factory, msgs ...sdk.Msg) ([]byte, error) {
|
|||
func CalculateGas(
|
||||
queryFunc func(string, []byte) ([]byte, int64, error), txf Factory, msgs ...sdk.Msg,
|
||||
) (sdk.SimulationResponse, uint64, error) {
|
||||
|
||||
txBytes, err := BuildSimTx(txf, msgs...)
|
||||
if err != nil {
|
||||
return sdk.SimulationResponse{}, 0, err
|
||||
|
@ -334,6 +335,7 @@ func PrepareFactory(ctx context.CLIContext, txf Factory) (Factory, error) {
|
|||
if initNum == 0 {
|
||||
txf = txf.WithAccountNumber(num)
|
||||
}
|
||||
|
||||
if initSeq == 0 {
|
||||
txf = txf.WithSequence(seq)
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ func (ac *AminoCodec) MustUnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMars
|
|||
ac.amino.MustUnmarshalBinaryLengthPrefixed(bz, ptr)
|
||||
}
|
||||
|
||||
func (ac *AminoCodec) MarshalJSON(o interface{}) ([]byte, error) { // nolint: stdmethods
|
||||
func (ac *AminoCodec) MarshalJSON(o interface{}) ([]byte, error) {
|
||||
return ac.amino.MarshalJSON(o)
|
||||
}
|
||||
|
||||
|
@ -50,7 +50,7 @@ func (ac *AminoCodec) MustMarshalJSON(o interface{}) []byte {
|
|||
return ac.amino.MustMarshalJSON(o)
|
||||
}
|
||||
|
||||
func (ac *AminoCodec) UnmarshalJSON(bz []byte, ptr interface{}) error { // nolint: stdmethods
|
||||
func (ac *AminoCodec) UnmarshalJSON(bz []byte, ptr interface{}) error {
|
||||
return ac.amino.UnmarshalJSON(bz, ptr)
|
||||
}
|
||||
|
||||
|
|
|
@ -34,10 +34,10 @@ type (
|
|||
}
|
||||
|
||||
JSONMarshaler interface {
|
||||
MarshalJSON(o interface{}) ([]byte, error) // nolint: stdmethods
|
||||
MarshalJSON(o interface{}) ([]byte, error)
|
||||
MustMarshalJSON(o interface{}) []byte
|
||||
|
||||
UnmarshalJSON(bz []byte, ptr interface{}) error // nolint: stdmethods
|
||||
UnmarshalJSON(bz []byte, ptr interface{}) error
|
||||
MustUnmarshalJSON(bz []byte, ptr interface{})
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ func (hc *HybridCodec) MustUnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMar
|
|||
hc.proto.MustUnmarshalBinaryLengthPrefixed(bz, ptr)
|
||||
}
|
||||
|
||||
func (hc *HybridCodec) MarshalJSON(o interface{}) ([]byte, error) { // nolint: stdmethods
|
||||
func (hc *HybridCodec) MarshalJSON(o interface{}) ([]byte, error) {
|
||||
return hc.amino.MarshalJSON(o)
|
||||
}
|
||||
|
||||
|
@ -54,7 +54,7 @@ func (hc *HybridCodec) MustMarshalJSON(o interface{}) []byte {
|
|||
return hc.amino.MustMarshalJSON(o)
|
||||
}
|
||||
|
||||
func (hc *HybridCodec) UnmarshalJSON(bz []byte, ptr interface{}) error { // nolint: stdmethods
|
||||
func (hc *HybridCodec) UnmarshalJSON(bz []byte, ptr interface{}) error {
|
||||
return hc.amino.UnmarshalJSON(bz, ptr)
|
||||
}
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ func (pc *ProtoCodec) MustUnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMars
|
|||
}
|
||||
}
|
||||
|
||||
func (pc *ProtoCodec) MarshalJSON(o interface{}) ([]byte, error) { // nolint: stdmethods
|
||||
func (pc *ProtoCodec) MarshalJSON(o interface{}) ([]byte, error) {
|
||||
m, ok := o.(ProtoMarshaler)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("cannot protobuf JSON encode unsupported type: %T", o)
|
||||
|
@ -107,7 +107,7 @@ func (pc *ProtoCodec) MustMarshalJSON(o interface{}) []byte {
|
|||
return bz
|
||||
}
|
||||
|
||||
func (pc *ProtoCodec) UnmarshalJSON(bz []byte, ptr interface{}) error { // nolint: stdmethods
|
||||
func (pc *ProtoCodec) UnmarshalJSON(bz []byte, ptr interface{}) error {
|
||||
m, ok := ptr.(ProtoMarshaler)
|
||||
if !ok {
|
||||
return fmt.Errorf("cannot protobuf JSON decode unsupported type: %T", ptr)
|
||||
|
|
|
@ -49,6 +49,7 @@ func ArmorInfoBytes(bz []byte) string {
|
|||
headerType: "Info",
|
||||
headerVersion: "0.0.0",
|
||||
}
|
||||
|
||||
return armor.EncodeArmor(blockTypeKeyInfo, header, bz)
|
||||
}
|
||||
|
||||
|
@ -60,6 +61,7 @@ func ArmorPubKeyBytes(bz []byte, algo string) string {
|
|||
if algo != "" {
|
||||
header[headerType] = algo
|
||||
}
|
||||
|
||||
return armor.EncodeArmor(blockTypePubKey, header, bz)
|
||||
}
|
||||
|
||||
|
@ -76,6 +78,7 @@ func UnarmorInfoBytes(armorStr string) ([]byte, error) {
|
|||
if header[headerVersion] != "0.0.0" {
|
||||
return nil, fmt.Errorf("unrecognized version: %v", header[headerVersion])
|
||||
}
|
||||
|
||||
return bz, nil
|
||||
}
|
||||
|
||||
|
@ -93,6 +96,7 @@ func UnarmorPubKeyBytes(armorStr string) (bz []byte, algo string, err error) {
|
|||
if header[headerType] == "" {
|
||||
header[headerType] = defaultAlgo
|
||||
}
|
||||
|
||||
return bz, header[headerType], err
|
||||
case "":
|
||||
return nil, "", fmt.Errorf("header's version field is empty")
|
||||
|
@ -107,10 +111,12 @@ func unarmorBytes(armorStr, blockType string) (bz []byte, header map[string]stri
|
|||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if bType != blockType {
|
||||
err = fmt.Errorf("unrecognized armor type %q, expected: %q", bType, blockType)
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -124,10 +130,13 @@ func EncryptArmorPrivKey(privKey crypto.PrivKey, passphrase string, algo string)
|
|||
"kdf": "bcrypt",
|
||||
"salt": fmt.Sprintf("%X", saltBytes),
|
||||
}
|
||||
|
||||
if algo != "" {
|
||||
header[headerType] = algo
|
||||
}
|
||||
|
||||
armorStr := armor.EncodeArmor(blockTypePrivKey, header, encBytes)
|
||||
|
||||
return armorStr
|
||||
}
|
||||
|
||||
|
@ -137,11 +146,14 @@ func EncryptArmorPrivKey(privKey crypto.PrivKey, passphrase string, algo string)
|
|||
func encryptPrivKey(privKey crypto.PrivKey, passphrase string) (saltBytes []byte, encBytes []byte) {
|
||||
saltBytes = crypto.CRandBytes(16)
|
||||
key, err := bcrypt.GenerateFromPassword(saltBytes, []byte(passphrase), BcryptSecurityParameter)
|
||||
|
||||
if err != nil {
|
||||
panic(sdkerrors.Wrap(err, "error generating bcrypt key from passphrase"))
|
||||
}
|
||||
|
||||
key = crypto.Sha256(key) // get 32 bytes
|
||||
privKeyBytes := privKey.Bytes()
|
||||
|
||||
return saltBytes, xsalsa20symmetric.EncryptSymmetric(privKeyBytes, key)
|
||||
}
|
||||
|
||||
|
@ -151,24 +163,30 @@ func UnarmorDecryptPrivKey(armorStr string, passphrase string) (privKey crypto.P
|
|||
if err != nil {
|
||||
return privKey, "", err
|
||||
}
|
||||
|
||||
if blockType != blockTypePrivKey {
|
||||
return privKey, "", fmt.Errorf("unrecognized armor type: %v", blockType)
|
||||
}
|
||||
|
||||
if header["kdf"] != "bcrypt" {
|
||||
return privKey, "", fmt.Errorf("unrecognized KDF type: %v", header["kdf"])
|
||||
}
|
||||
|
||||
if header["salt"] == "" {
|
||||
return privKey, "", fmt.Errorf("missing salt bytes")
|
||||
}
|
||||
|
||||
saltBytes, err := hex.DecodeString(header["salt"])
|
||||
if err != nil {
|
||||
return privKey, "", fmt.Errorf("error decoding salt: %v", err.Error())
|
||||
}
|
||||
|
||||
privKey, err = decryptPrivKey(saltBytes, encBytes, passphrase)
|
||||
|
||||
if header[headerType] == "" {
|
||||
header[headerType] = defaultAlgo
|
||||
}
|
||||
|
||||
return privKey, header[headerType], err
|
||||
}
|
||||
|
||||
|
@ -177,13 +195,15 @@ func decryptPrivKey(saltBytes []byte, encBytes []byte, passphrase string) (privK
|
|||
if err != nil {
|
||||
return privKey, sdkerrors.Wrap(err, "error generating bcrypt key from passphrase")
|
||||
}
|
||||
|
||||
key = crypto.Sha256(key) // Get 32 bytes
|
||||
|
||||
privKeyBytes, err := xsalsa20symmetric.DecryptSymmetric(encBytes, key)
|
||||
if err != nil && err.Error() == "Ciphertext decryption failed" {
|
||||
return privKey, sdkerrors.ErrWrongPassword
|
||||
} else if err != nil {
|
||||
return privKey, err
|
||||
}
|
||||
privKey, err = cryptoAmino.PrivKeyFromBytes(privKeyBytes)
|
||||
return privKey, err
|
||||
|
||||
return cryptoAmino.PrivKeyFromBytes(privKeyBytes)
|
||||
}
|
||||
|
|
|
@ -49,14 +49,17 @@ func NewParamsFromPath(path string) (*BIP44Params, error) {
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
coinType, err := hardenedInt(spl[1])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
account, err := hardenedInt(spl[2])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
change, err := hardenedInt(spl[3])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -76,6 +79,7 @@ func NewParamsFromPath(path string) (*BIP44Params, error) {
|
|||
return nil,
|
||||
fmt.Errorf("second and third field in path must be hardened (ie. contain the suffix ', got %v and %v", spl[1], spl[2])
|
||||
}
|
||||
|
||||
if isHardened(spl[3]) || isHardened(spl[4]) {
|
||||
return nil,
|
||||
fmt.Errorf("fourth and fifth field in path must not be hardened (ie. not contain the suffix ', got %v and %v", spl[3], spl[4])
|
||||
|
@ -97,12 +101,15 @@ func NewParamsFromPath(path string) (*BIP44Params, error) {
|
|||
func hardenedInt(field string) (uint32, error) {
|
||||
field = strings.TrimSuffix(field, "'")
|
||||
i, err := strconv.Atoi(field)
|
||||
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if i < 0 {
|
||||
return 0, fmt.Errorf("fields must not be negative. got %d", i)
|
||||
}
|
||||
|
||||
return uint32(i), nil
|
||||
}
|
||||
|
||||
|
@ -123,6 +130,7 @@ func (p BIP44Params) DerivationPath() []uint32 {
|
|||
if p.Change {
|
||||
change = 1
|
||||
}
|
||||
|
||||
return []uint32{
|
||||
p.Purpose,
|
||||
p.CoinType,
|
||||
|
@ -161,6 +169,7 @@ func ComputeMastersFromSeed(seed []byte) (secret [32]byte, chainCode [32]byte) {
|
|||
func DerivePrivateKeyForPath(privKeyBytes [32]byte, chainCode [32]byte, path string) ([32]byte, error) {
|
||||
data := privKeyBytes
|
||||
parts := strings.Split(path, "/")
|
||||
|
||||
for _, part := range parts {
|
||||
// do we have an apostrophe?
|
||||
harden := part[len(part)-1:] == "'"
|
||||
|
@ -168,17 +177,23 @@ func DerivePrivateKeyForPath(privKeyBytes [32]byte, chainCode [32]byte, path str
|
|||
if harden {
|
||||
part = part[:len(part)-1]
|
||||
}
|
||||
|
||||
idx, err := strconv.Atoi(part)
|
||||
|
||||
if err != nil {
|
||||
return [32]byte{}, fmt.Errorf("invalid BIP 32 path: %s", err)
|
||||
}
|
||||
|
||||
if idx < 0 {
|
||||
return [32]byte{}, errors.New("invalid BIP 32 path: index negative ot too large")
|
||||
}
|
||||
|
||||
data, chainCode = derivePrivateKey(data, chainCode, uint32(idx), harden)
|
||||
}
|
||||
|
||||
var derivedKey [32]byte
|
||||
n := copy(derivedKey[:], data[:])
|
||||
|
||||
if n != 32 || len(data) != 32 {
|
||||
return [32]byte{}, fmt.Errorf("expected a (secp256k1) key of length 32, got length: %v", len(data))
|
||||
}
|
||||
|
@ -193,8 +208,10 @@ func DerivePrivateKeyForPath(privKeyBytes [32]byte, chainCode [32]byte, path str
|
|||
// - https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki
|
||||
func derivePrivateKey(privKeyBytes [32]byte, chainCode [32]byte, index uint32, harden bool) ([32]byte, [32]byte) {
|
||||
var data []byte
|
||||
|
||||
if harden {
|
||||
index |= 0x80000000
|
||||
|
||||
data = append([]byte{byte(0)}, privKeyBytes[:]...)
|
||||
} else {
|
||||
// this can't return an error:
|
||||
|
@ -208,9 +225,11 @@ func derivePrivateKey(privKeyBytes [32]byte, chainCode [32]byte, index uint32, h
|
|||
data = public[:]
|
||||
*/
|
||||
}
|
||||
|
||||
data = append(data, uint32ToBytes(index)...)
|
||||
data2, chainCode2 := i64(chainCode[:], data)
|
||||
x := addScalars(privKeyBytes[:], data2[:])
|
||||
|
||||
return x, chainCode2
|
||||
}
|
||||
|
||||
|
@ -222,12 +241,14 @@ func addScalars(a []byte, b []byte) [32]byte {
|
|||
x := sInt.Mod(sInt, btcec.S256().N).Bytes()
|
||||
x2 := [32]byte{}
|
||||
copy(x2[32-len(x):], x)
|
||||
|
||||
return x2
|
||||
}
|
||||
|
||||
func uint32ToBytes(i uint32) []byte {
|
||||
b := [4]byte{}
|
||||
binary.BigEndian.PutUint32(b[:], i)
|
||||
|
||||
return b[:]
|
||||
}
|
||||
|
||||
|
|
|
@ -19,15 +19,19 @@ type memIterator struct {
|
|||
|
||||
func newMemIterator(start, end []byte, items *list.List, ascending bool) *memIterator {
|
||||
itemsInDomain := make([]*tmkv.Pair, 0)
|
||||
|
||||
var entered bool
|
||||
|
||||
for e := items.Front(); e != nil; e = e.Next() {
|
||||
item := e.Value.(*tmkv.Pair)
|
||||
if !dbm.IsKeyInDomain(item.Key, start, end) {
|
||||
if entered {
|
||||
break
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
itemsInDomain = append(itemsInDomain, item)
|
||||
entered = true
|
||||
}
|
||||
|
@ -56,6 +60,7 @@ func (mi *memIterator) assertValid() {
|
|||
|
||||
func (mi *memIterator) Next() {
|
||||
mi.assertValid()
|
||||
|
||||
if mi.ascending {
|
||||
mi.items = mi.items[1:]
|
||||
} else {
|
||||
|
@ -65,17 +70,21 @@ func (mi *memIterator) Next() {
|
|||
|
||||
func (mi *memIterator) Key() []byte {
|
||||
mi.assertValid()
|
||||
|
||||
if mi.ascending {
|
||||
return mi.items[0].Key
|
||||
}
|
||||
|
||||
return mi.items[len(mi.items)-1].Key
|
||||
}
|
||||
|
||||
func (mi *memIterator) Value() []byte {
|
||||
mi.assertValid()
|
||||
|
||||
if mi.ascending {
|
||||
return mi.items[0].Value
|
||||
}
|
||||
|
||||
return mi.items[len(mi.items)-1].Value
|
||||
}
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ func newCacheMergeIterator(parent, cache types.Iterator, ascending bool) *cacheM
|
|||
cache: cache,
|
||||
ascending: ascending,
|
||||
}
|
||||
|
||||
return iter
|
||||
}
|
||||
|
||||
|
@ -36,16 +37,19 @@ func newCacheMergeIterator(parent, cache types.Iterator, ascending bool) *cacheM
|
|||
func (iter *cacheMergeIterator) Domain() (start, end []byte) {
|
||||
startP, endP := iter.parent.Domain()
|
||||
startC, endC := iter.cache.Domain()
|
||||
|
||||
if iter.compare(startP, startC) < 0 {
|
||||
start = startP
|
||||
} else {
|
||||
start = startC
|
||||
}
|
||||
|
||||
if iter.compare(endP, endC) < 0 {
|
||||
end = endC
|
||||
} else {
|
||||
end = endP
|
||||
}
|
||||
|
||||
return start, end
|
||||
}
|
||||
|
||||
|
@ -101,6 +105,7 @@ func (iter *cacheMergeIterator) Key() []byte {
|
|||
|
||||
// Both are valid. Compare keys.
|
||||
keyP, keyC := iter.parent.Key(), iter.cache.Key()
|
||||
|
||||
cmp := iter.compare(keyP, keyC)
|
||||
switch cmp {
|
||||
case -1: // parent < cache
|
||||
|
@ -131,6 +136,7 @@ func (iter *cacheMergeIterator) Value() []byte {
|
|||
|
||||
// Both are valid. Compare keys.
|
||||
keyP, keyC := iter.parent.Key(), iter.cache.Key()
|
||||
|
||||
cmp := iter.compare(keyP, keyC)
|
||||
switch cmp {
|
||||
case -1: // parent < cache
|
||||
|
@ -173,6 +179,7 @@ func (iter *cacheMergeIterator) compare(a, b []byte) int {
|
|||
if iter.ascending {
|
||||
return bytes.Compare(a, b)
|
||||
}
|
||||
|
||||
return bytes.Compare(a, b) * -1
|
||||
}
|
||||
|
||||
|
@ -185,7 +192,6 @@ func (iter *cacheMergeIterator) skipCacheDeletes(until []byte) {
|
|||
for iter.cache.Valid() &&
|
||||
iter.cache.Value() == nil &&
|
||||
(until == nil || iter.compare(iter.cache.Key(), until) < 0) {
|
||||
|
||||
iter.cache.Next()
|
||||
}
|
||||
}
|
||||
|
@ -210,26 +216,24 @@ func (iter *cacheMergeIterator) skipUntilExistsOrInvalid() bool {
|
|||
// Compare parent and cache.
|
||||
keyP := iter.parent.Key()
|
||||
keyC := iter.cache.Key()
|
||||
switch iter.compare(keyP, keyC) {
|
||||
|
||||
switch iter.compare(keyP, keyC) {
|
||||
case -1: // parent < cache.
|
||||
return true
|
||||
|
||||
case 0: // parent == cache.
|
||||
|
||||
// Skip over if cache item is a delete.
|
||||
valueC := iter.cache.Value()
|
||||
if valueC == nil {
|
||||
iter.parent.Next()
|
||||
iter.cache.Next()
|
||||
|
||||
continue
|
||||
}
|
||||
// Cache is not a delete.
|
||||
|
||||
return true // cache exists.
|
||||
|
||||
case 1: // cache < parent
|
||||
|
||||
// Skip over if cache item is a delete.
|
||||
valueC := iter.cache.Value()
|
||||
if valueC == nil {
|
||||
|
|
|
@ -100,6 +100,7 @@ func (store *Store) Write() {
|
|||
// We need a copy of all of the keys.
|
||||
// Not the best, but probably not a bottleneck depending.
|
||||
keys := make([]string, 0, len(store.cache))
|
||||
|
||||
for key, dbValue := range store.cache {
|
||||
if dbValue.dirty {
|
||||
keys = append(keys, key)
|
||||
|
@ -112,6 +113,7 @@ func (store *Store) Write() {
|
|||
// at least happen atomically.
|
||||
for _, key := range keys {
|
||||
cacheValue := store.cache[key]
|
||||
|
||||
switch {
|
||||
case cacheValue.deleted:
|
||||
store.parent.Delete([]byte(key))
|
||||
|
@ -178,8 +180,10 @@ func (store *Store) dirtyItems(start, end []byte) {
|
|||
|
||||
for key := range store.unsortedCache {
|
||||
cacheValue := store.cache[key]
|
||||
|
||||
if dbm.IsKeyInDomain([]byte(key), start, end) {
|
||||
unsorted = append(unsorted, &tmkv.Pair{Key: []byte(key), Value: cacheValue.value})
|
||||
|
||||
delete(store.unsortedCache, key)
|
||||
}
|
||||
}
|
||||
|
@ -192,9 +196,11 @@ func (store *Store) dirtyItems(start, end []byte) {
|
|||
uitem := unsorted[0]
|
||||
sitem := e.Value.(*tmkv.Pair)
|
||||
comp := bytes.Compare(uitem.Key, sitem.Key)
|
||||
|
||||
switch comp {
|
||||
case -1:
|
||||
unsorted = unsorted[1:]
|
||||
|
||||
store.sortedCache.InsertBefore(uitem, e)
|
||||
case 1:
|
||||
e = e.Next()
|
||||
|
@ -208,7 +214,6 @@ func (store *Store) dirtyItems(start, end []byte) {
|
|||
for _, kvp := range unsorted {
|
||||
store.sortedCache.PushBack(kvp)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//----------------------------------------
|
||||
|
|
|
@ -149,8 +149,8 @@ func (pi *prefixIterator) Next() {
|
|||
if !pi.valid {
|
||||
panic("prefixIterator invalid, cannot call Next()")
|
||||
}
|
||||
pi.iter.Next()
|
||||
if !pi.iter.Valid() || !bytes.HasPrefix(pi.iter.Key(), pi.prefix) {
|
||||
|
||||
if pi.iter.Next(); !pi.iter.Valid() || !bytes.HasPrefix(pi.iter.Key(), pi.prefix) {
|
||||
// TODO: shouldn't pi be set to nil instead?
|
||||
pi.valid = false
|
||||
}
|
||||
|
@ -161,8 +161,10 @@ func (pi *prefixIterator) Key() (key []byte) {
|
|||
if !pi.valid {
|
||||
panic("prefixIterator invalid, cannot call Key()")
|
||||
}
|
||||
|
||||
key = pi.iter.Key()
|
||||
key = stripPrefix(key, pi.prefix)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -171,6 +173,7 @@ func (pi *prefixIterator) Value() []byte {
|
|||
if !pi.valid {
|
||||
panic("prefixIterator invalid, cannot call Value()")
|
||||
}
|
||||
|
||||
return pi.iter.Value()
|
||||
}
|
||||
|
||||
|
@ -194,6 +197,7 @@ func stripPrefix(key []byte, prefix []byte) []byte {
|
|||
if len(key) < len(prefix) || !bytes.Equal(key[:len(prefix)], prefix) {
|
||||
panic("should not happen")
|
||||
}
|
||||
|
||||
return key[len(prefix):]
|
||||
}
|
||||
|
||||
|
|
|
@ -55,16 +55,6 @@ func (sm *merkleMap) sort() {
|
|||
sm.sorted = true
|
||||
}
|
||||
|
||||
// kvPairs sorts the merkleMap kv.Pair objects and returns a copy as a slice.
|
||||
func (sm *merkleMap) kvPairs() kv.Pairs {
|
||||
sm.sort()
|
||||
|
||||
kvs := make(kv.Pairs, len(sm.kvs))
|
||||
copy(kvs, sm.kvs)
|
||||
|
||||
return kvs
|
||||
}
|
||||
|
||||
// kvPair defines a type alias for kv.Pair so that we can create bytes to hash
|
||||
// when constructing the merkle root. Note, key and values are both length-prefixed.
|
||||
type kvPair kv.Pair
|
||||
|
|
|
@ -6,10 +6,11 @@ import (
|
|||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
"github.com/stretchr/testify/require"
|
||||
tmtypes "github.com/tendermint/tendermint/types"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
"github.com/cosmos/cosmos-sdk/server"
|
||||
"github.com/cosmos/cosmos-sdk/simapp"
|
||||
|
|
|
@ -66,7 +66,6 @@ func NewBaseReq(
|
|||
from, memo, chainID string, gas, gasAdjustment string, accNumber, seq uint64,
|
||||
fees sdk.Coins, gasPrices sdk.DecCoins, simulate bool,
|
||||
) BaseReq {
|
||||
|
||||
return BaseReq{
|
||||
From: strings.TrimSpace(from),
|
||||
Memo: strings.TrimSpace(memo),
|
||||
|
@ -154,6 +153,7 @@ func CheckError(w http.ResponseWriter, status int, err error) bool {
|
|||
WriteErrorResponse(w, status, err.Error())
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -204,8 +204,8 @@ func ParseUint64OrReturnBadRequest(w http.ResponseWriter, s string) (n uint64, o
|
|||
|
||||
n, err = strconv.ParseUint(s, 10, 64)
|
||||
if err != nil {
|
||||
err := fmt.Errorf("'%s' is not a valid uint64", s)
|
||||
WriteErrorResponse(w, http.StatusBadRequest, err.Error())
|
||||
WriteErrorResponse(w, http.StatusBadRequest, fmt.Sprintf("'%s' is not a valid uint64", s))
|
||||
|
||||
return n, false
|
||||
}
|
||||
|
||||
|
@ -358,11 +358,13 @@ func ParseHTTPArgsWithLimit(r *http.Request, defaultLimit int) (tags []string, p
|
|||
|
||||
var value string
|
||||
value, err = url.QueryUnescape(values[0])
|
||||
|
||||
if err != nil {
|
||||
return tags, page, limit, err
|
||||
}
|
||||
|
||||
var tag string
|
||||
|
||||
switch key {
|
||||
case types.TxHeightKey:
|
||||
tag = fmt.Sprintf("%s=%s", key, value)
|
||||
|
@ -419,5 +421,6 @@ func ParseQueryParamBool(r *http.Request, param string) bool {
|
|||
if value, err := strconv.ParseBool(r.FormValue(param)); err == nil {
|
||||
return value
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
package keeper_test
|
||||
|
||||
import (
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
"testing"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/simapp"
|
||||
|
|
|
@ -3,12 +3,14 @@ package cli
|
|||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
clientkeys "github.com/cosmos/cosmos-sdk/client/keys"
|
||||
"github.com/cosmos/cosmos-sdk/tests"
|
||||
"github.com/cosmos/cosmos-sdk/tests/cli/helpers"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
"github.com/cosmos/cosmos-sdk/x/auth"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TxSend is simcli tx send
|
||||
|
|
|
@ -4,12 +4,14 @@ package cli_test
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/tests"
|
||||
"github.com/cosmos/cosmos-sdk/tests/cli/helpers"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
bankcli "github.com/cosmos/cosmos-sdk/x/bank/client/cli_test"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCLISend(t *testing.T) {
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
package keeper_test
|
||||
|
||||
import (
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
"github.com/stretchr/testify/suite"
|
||||
abci "github.com/tendermint/tendermint/abci/types"
|
||||
tmkv "github.com/tendermint/tendermint/libs/kv"
|
||||
|
|
|
@ -2,9 +2,10 @@ package simulation_test
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
"testing"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
tmkv "github.com/tendermint/tendermint/libs/kv"
|
||||
|
||||
|
|
|
@ -2,9 +2,10 @@ package simulation_test
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
"testing"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/tendermint/tendermint/crypto/ed25519"
|
||||
|
|
|
@ -19,12 +19,13 @@ func NewDecodeStore(cdc types.Codec) func(kvA, kvB tmkv.Pair) string {
|
|||
if err != nil {
|
||||
panic(fmt.Sprintf("cannot unmarshal evidence: %s", err.Error()))
|
||||
}
|
||||
|
||||
evidenceB, err := cdc.UnmarshalEvidence(kvB.Value)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("cannot unmarshal evidence: %s", err.Error()))
|
||||
}
|
||||
return fmt.Sprintf("%v\n%v", evidenceA, evidenceB)
|
||||
|
||||
return fmt.Sprintf("%v\n%v", evidenceA, evidenceB)
|
||||
default:
|
||||
panic(fmt.Sprintf("invalid %s key prefix %X", types.ModuleName, kvA.Key[:1]))
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ func GenEvidences(_ *rand.Rand, _ []simtypes.Account) []exported.Evidence {
|
|||
// RandomizedGenState generates a random GenesisState for evidence
|
||||
func RandomizedGenState(simState *module.SimulationState) {
|
||||
var ev []exported.Evidence
|
||||
|
||||
simState.AppParams.GetOrGenerate(
|
||||
simState.Cdc, evidence, &ev, simState.Rand,
|
||||
func(r *rand.Rand) { ev = GenEvidences(r, simState.Accounts) },
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
package gov_test
|
||||
|
||||
import (
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
abci "github.com/tendermint/tendermint/abci/types"
|
||||
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
package keeper_test
|
||||
|
||||
import (
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
"math/rand"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
abci "github.com/tendermint/tendermint/abci/types"
|
||||
|
||||
|
|
|
@ -3,10 +3,11 @@ package simulation_test
|
|||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/tendermint/tendermint/crypto/ed25519"
|
||||
|
|
|
@ -18,13 +18,16 @@ import (
|
|||
// The counterparty hops are stored in the inverse order as the channel's.
|
||||
func (k Keeper) CounterpartyHops(ctx sdk.Context, ch types.Channel) ([]string, bool) {
|
||||
counterPartyHops := make([]string, len(ch.ConnectionHops))
|
||||
|
||||
for i, hop := range ch.ConnectionHops {
|
||||
connection, found := k.connectionKeeper.GetConnection(ctx, hop)
|
||||
conn, found := k.connectionKeeper.GetConnection(ctx, hop)
|
||||
if !found {
|
||||
return []string{}, false
|
||||
}
|
||||
counterPartyHops[len(counterPartyHops)-1-i] = connection.GetCounterparty().GetConnectionID()
|
||||
|
||||
counterPartyHops[len(counterPartyHops)-1-i] = conn.GetCounterparty().GetConnectionID()
|
||||
}
|
||||
|
||||
return counterPartyHops, true
|
||||
}
|
||||
|
||||
|
@ -41,7 +44,6 @@ func (k Keeper) ChanOpenInit(
|
|||
version string,
|
||||
) (*capability.Capability, error) {
|
||||
// channel identifier and connection hop length checked on msg.ValidateBasic()
|
||||
|
||||
_, found := k.GetChannel(ctx, portID, channelID)
|
||||
if found {
|
||||
return nil, sdkerrors.Wrap(types.ErrChannelExists, channelID)
|
||||
|
@ -70,6 +72,7 @@ func (k Keeper) ChanOpenInit(
|
|||
if err != nil {
|
||||
return nil, sdkerrors.Wrap(types.ErrInvalidChannelCapability, err.Error())
|
||||
}
|
||||
|
||||
k.SetNextSequenceSend(ctx, portID, channelID, 1)
|
||||
k.SetNextSequenceRecv(ctx, portID, channelID, 1)
|
||||
|
||||
|
@ -92,7 +95,6 @@ func (k Keeper) ChanOpenTry(
|
|||
proofHeight uint64,
|
||||
) (*capability.Capability, error) {
|
||||
// channel identifier and connection hop length checked on msg.ValidateBasic()
|
||||
|
||||
previousChannel, found := k.GetChannel(ctx, portID, channelID)
|
||||
if found && !(previousChannel.State == exported.INIT &&
|
||||
previousChannel.Ordering == order &&
|
||||
|
@ -100,7 +102,7 @@ func (k Keeper) ChanOpenTry(
|
|||
previousChannel.Counterparty.ChannelID == counterparty.ChannelID &&
|
||||
previousChannel.ConnectionHops[0] == connectionHops[0] &&
|
||||
previousChannel.Version == version) {
|
||||
sdkerrors.Wrap(types.ErrInvalidChannel, "cannot relay connection attempt")
|
||||
return nil, sdkerrors.Wrap(types.ErrInvalidChannel, "cannot relay connection attempt")
|
||||
}
|
||||
|
||||
if !k.portKeeper.Authenticate(ctx, portCap, portID) {
|
||||
|
@ -150,6 +152,7 @@ func (k Keeper) ChanOpenTry(
|
|||
if err != nil {
|
||||
return nil, sdkerrors.Wrap(types.ErrInvalidChannelCapability, err.Error())
|
||||
}
|
||||
|
||||
k.SetNextSequenceSend(ctx, portID, channelID, 1)
|
||||
k.SetNextSequenceRecv(ctx, portID, channelID, 1)
|
||||
|
||||
|
@ -328,6 +331,7 @@ func (k Keeper) ChanCloseInit(
|
|||
channel.State = exported.CLOSED
|
||||
k.SetChannel(ctx, portID, channelID, channel)
|
||||
k.Logger(ctx).Info("channel close initialized: portID (%s), channelID (%s)", portID, channelID)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -2,10 +2,11 @@ package simulation_test
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
gogotypes "github.com/gogo/protobuf/types"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
|
|
@ -2,12 +2,14 @@ package cli
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
clientkeys "github.com/cosmos/cosmos-sdk/client/keys"
|
||||
"github.com/cosmos/cosmos-sdk/tests"
|
||||
"github.com/cosmos/cosmos-sdk/tests/cli/helpers"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
"github.com/cosmos/cosmos-sdk/x/staking"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TxStakingCreateValidator is simcli tx staking create-validator
|
||||
|
|
|
@ -3,14 +3,16 @@
|
|||
package cli_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/tendermint/tendermint/crypto/ed25519"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/tests"
|
||||
"github.com/cosmos/cosmos-sdk/tests/cli/helpers"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
bankcli "github.com/cosmos/cosmos-sdk/x/bank/client/cli_test"
|
||||
stakingcli "github.com/cosmos/cosmos-sdk/x/staking/client/cli_test"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/tendermint/tendermint/crypto/ed25519"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCLICreateValidator(t *testing.T) {
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
package staking_test
|
||||
|
||||
import (
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
abci "github.com/tendermint/tendermint/abci/types"
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
"github.com/tendermint/tendermint/crypto/secp256k1"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
"github.com/cosmos/cosmos-sdk/simapp"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
package keeper_test
|
||||
|
||||
import (
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
"testing"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
abci "github.com/tendermint/tendermint/abci/types"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
|
|
|
@ -2,10 +2,11 @@ package simulation_test
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/std"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/tendermint/tendermint/crypto/ed25519"
|
||||
|
|
Loading…
Reference in New Issue