From df6414d040e7a4909f7532960f654bbbf897922b Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Thu, 8 Oct 2020 20:47:05 -0400 Subject: [PATCH 01/40] adr --- docs/architecture/adr-032-change-pubkey.md | 81 ++++++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 docs/architecture/adr-032-change-pubkey.md diff --git a/docs/architecture/adr-032-change-pubkey.md b/docs/architecture/adr-032-change-pubkey.md new file mode 100644 index 000000000..bc20b3439 --- /dev/null +++ b/docs/architecture/adr-032-change-pubkey.md @@ -0,0 +1,81 @@ +# ADR 032: Change PubKey + +## Changelog + +- 30-09-2020: Initial Draft + +## Status + +Proposed + +## Context + +Currently, in the Cosmos SDK, the address of an auth account is always based on the hash of the public key. Once an account is created, the public key for the account is set in stone, and cannot be changed. This can be a problem for users, as key rotation is a useful security practice, but is not possible currently. Furthermore, as multisigs are a type of pubkey, once a multisig for an account is set, it can not be updated. This is problematic, as multisigs are often used by organizations or companies, who may need to change their set of multisig signers for internal reasons. + +Transferring all the assets of an account to a new account with the updated pubkey is not sufficient, because some "engagements" of an account are not easily transferable. For example, in staking, to transfer bonded Atoms, an account would have to unbond all delegations and wait the three week unbonding period. Even more significantly, for validator operators, ownership over a validator is not transferrable at all, meaning that the operator key for a validator can never be updated, leading to poor operational security for validators. + +## Decision + +We propose the creation of a new feature called `changepubkey` that is an extension to `auth` that allows accounts to update the public key associated with their account, while keeping the address the same. + +This is possible because the Cosmos SDK `StdAccount` stores the public key for an account in state, instead of making the assumption that the public key is included in the transaction (whether explicitly or implicitly through the signature) as in other blockchains such as Bitcoin and Ethereum. Because the public key is stored on chain, it is okay for the public key to not hash to the address of an account, as the address is not pertinent to the signature checking process. + +To build this system, we design a new Msg type as follows: + +```protobuf +message MsgChangePubKey { + bytes address = 1 [ + (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress" + ]; + bytes pub_key = 2 [ + (gogoproto.jsontag) = "public_key,omitempty", (gogoproto.moretags) = "yaml:\"public_key\"" + ]; +} +``` + +As an example, account pubkey change message can be defined as follows. + +```json +{ + "type": "cosmos-sdk/StdTx", + "value": { + "address": "cosmos1wf5h7meplxu3sc6rk2agavkdsmlsen7rgsasxk", + "public_key": "cosmospub1addwnpepqdszcr95mrqqs8lw099aa9h8h906zmet22pmwe9vquzcgvnm93eqygufdlv" + }, + "signature": "a9n7pIqCUuYJTCm7ZBv1cqqlM3uYyX/7SnaSXA8zrG0CBWP6p55pTFFHYn39tVvFtRbGE7gXF1qCiaOilJ8NtQ==" +} +``` + +Here, the signature is signed for the public key thats current in-state for account `cosmos1wf5h7meplxu3sc6rk2agavkdsmlsen7rgsasxk`, as normally done in the ante-handler. + +Once, approved, the handler for this message type, which takes in the AccountKeeper, will update the in-state pubkey for the account and replace it with the pubkey from the Msg. + +Because an account can no longer be pruned from state once its pubkey has changed, we can charge an additional gas fee for this operation to compensate for this this externality (this bound gas amount is configured as parameter `PubKeyChangeCost`). The bonus gas is charged inside handler, using the `ConsumeGas` function. + +```go + amount := ak.GetParams(ctx).PubKeyChangeCost + ctx.GasMeter().ConsumeGas(amount, "pubkey change fee") +``` + + +## Consequences + +### Positive + +* Will allow users and validator operators to employ better operational security practices with key rotation. +* Will allow organizations or groups to easily change and add/remove multisig signers. + +### Negative + +Breaks the current assumed relationship between address and pubkeys as H(pubkey) = address. This has a couple of consequences. + +* We cannot prune accounts with 0 balance that have had their pubkey changed (we currently do not currently do this anyways, but the reason we have account numbers is presumably for this purpose). +* This makes wallets that support this feature more complicated. For example, if an address on chain was updated, the corresponding key in the CLI wallet also needs to be updated. + +### Neutral + +* While the purpose of this is intended to allow the owner of an account to update to a new pubkey they own, this could technically also be used to transfer ownership of an account to a new owner. For example, this could be use used to sell a staked position without unbonding or an account that has vesting tokens. However, the friction of this is very high as this would essentially have to be done as a very specific OTC trade. Furthermore, additional constraints could be added to prevent accouns with Vesting tokens to use this feature. +* Will require that PubKeys for an account are included in the genesis exports. + +## References + From 2affa79c0ab438a2ed538aef013eb8e58247e1b7 Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Tue, 13 Oct 2020 10:13:30 -0400 Subject: [PATCH 02/40] adr 32 -> 34 --- .../{adr-032-change-pubkey.md => adr-034-change-pubkey.md} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename docs/architecture/{adr-032-change-pubkey.md => adr-034-change-pubkey.md} (99%) diff --git a/docs/architecture/adr-032-change-pubkey.md b/docs/architecture/adr-034-change-pubkey.md similarity index 99% rename from docs/architecture/adr-032-change-pubkey.md rename to docs/architecture/adr-034-change-pubkey.md index bc20b3439..54597a9ba 100644 --- a/docs/architecture/adr-032-change-pubkey.md +++ b/docs/architecture/adr-034-change-pubkey.md @@ -1,4 +1,4 @@ -# ADR 032: Change PubKey +# ADR 034: Change PubKey ## Changelog From 30c13ea5549b614d26c1b1d35ee5729863cf6de7 Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Wed, 21 Oct 2020 15:37:40 -0400 Subject: [PATCH 03/40] Apply suggestions from code review Co-authored-by: Aaron Craelius --- docs/architecture/adr-034-change-pubkey.md | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/docs/architecture/adr-034-change-pubkey.md b/docs/architecture/adr-034-change-pubkey.md index 54597a9ba..25265e4f3 100644 --- a/docs/architecture/adr-034-change-pubkey.md +++ b/docs/architecture/adr-034-change-pubkey.md @@ -16,7 +16,7 @@ Transferring all the assets of an account to a new account with the updated pubk ## Decision -We propose the creation of a new feature called `changepubkey` that is an extension to `auth` that allows accounts to update the public key associated with their account, while keeping the address the same. +We propose the creation of a new feature to `x/auth` that allows accounts to update the public key associated with their account, while keeping the address the same. This is possible because the Cosmos SDK `StdAccount` stores the public key for an account in state, instead of making the assumption that the public key is included in the transaction (whether explicitly or implicitly through the signature) as in other blockchains such as Bitcoin and Ethereum. Because the public key is stored on chain, it is okay for the public key to not hash to the address of an account, as the address is not pertinent to the signature checking process. @@ -24,12 +24,8 @@ To build this system, we design a new Msg type as follows: ```protobuf message MsgChangePubKey { - bytes address = 1 [ - (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress" - ]; - bytes pub_key = 2 [ - (gogoproto.jsontag) = "public_key,omitempty", (gogoproto.moretags) = "yaml:\"public_key\"" - ]; + string address = 1; + google.protobuf.Any pub_key = 2; } ``` @@ -78,4 +74,3 @@ Breaks the current assumed relationship between address and pubkeys as H(pubkey) * Will require that PubKeys for an account are included in the genesis exports. ## References - From cb99dbd413f6a3e9b7500f7a43a2d935a028cd3f Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Wed, 21 Oct 2020 21:42:06 -0400 Subject: [PATCH 04/40] Apply suggestions from code review Co-authored-by: Aaron Craelius --- docs/architecture/adr-034-change-pubkey.md | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/docs/architecture/adr-034-change-pubkey.md b/docs/architecture/adr-034-change-pubkey.md index 25265e4f3..ff262495e 100644 --- a/docs/architecture/adr-034-change-pubkey.md +++ b/docs/architecture/adr-034-change-pubkey.md @@ -31,16 +31,6 @@ message MsgChangePubKey { As an example, account pubkey change message can be defined as follows. -```json -{ - "type": "cosmos-sdk/StdTx", - "value": { - "address": "cosmos1wf5h7meplxu3sc6rk2agavkdsmlsen7rgsasxk", - "public_key": "cosmospub1addwnpepqdszcr95mrqqs8lw099aa9h8h906zmet22pmwe9vquzcgvnm93eqygufdlv" - }, - "signature": "a9n7pIqCUuYJTCm7ZBv1cqqlM3uYyX/7SnaSXA8zrG0CBWP6p55pTFFHYn39tVvFtRbGE7gXF1qCiaOilJ8NtQ==" -} -``` Here, the signature is signed for the public key thats current in-state for account `cosmos1wf5h7meplxu3sc6rk2agavkdsmlsen7rgsasxk`, as normally done in the ante-handler. From 68ba5647a24be83bb008a6bebbbdf508f43e2cc7 Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Wed, 21 Oct 2020 21:42:47 -0400 Subject: [PATCH 05/40] Update adr-034-change-pubkey.md --- docs/architecture/adr-034-change-pubkey.md | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/docs/architecture/adr-034-change-pubkey.md b/docs/architecture/adr-034-change-pubkey.md index ff262495e..75d69ac42 100644 --- a/docs/architecture/adr-034-change-pubkey.md +++ b/docs/architecture/adr-034-change-pubkey.md @@ -29,10 +29,7 @@ message MsgChangePubKey { } ``` -As an example, account pubkey change message can be defined as follows. - - -Here, the signature is signed for the public key thats current in-state for account `cosmos1wf5h7meplxu3sc6rk2agavkdsmlsen7rgsasxk`, as normally done in the ante-handler. +The MsgChangePubKey transaction needs to be signed by the existing pubkey in state. Once, approved, the handler for this message type, which takes in the AccountKeeper, will update the in-state pubkey for the account and replace it with the pubkey from the Msg. From 091c8350603e9778d6d8732e96e1e45a70d37252 Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Wed, 21 Oct 2020 21:44:40 -0400 Subject: [PATCH 06/40] Update adr-034-change-pubkey.md --- docs/architecture/adr-034-change-pubkey.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/architecture/adr-034-change-pubkey.md b/docs/architecture/adr-034-change-pubkey.md index 75d69ac42..0eddf6a94 100644 --- a/docs/architecture/adr-034-change-pubkey.md +++ b/docs/architecture/adr-034-change-pubkey.md @@ -16,7 +16,7 @@ Transferring all the assets of an account to a new account with the updated pubk ## Decision -We propose the creation of a new feature to `x/auth` that allows accounts to update the public key associated with their account, while keeping the address the same. +We propose the creation of a new feature to `x/auth` that allows accounts to update the public key associated with their account, while keeping the address the same. This feature can be enabled using an `EnableChangePubKey` param. This is possible because the Cosmos SDK `StdAccount` stores the public key for an account in state, instead of making the assumption that the public key is included in the transaction (whether explicitly or implicitly through the signature) as in other blockchains such as Bitcoin and Ethereum. Because the public key is stored on chain, it is okay for the public key to not hash to the address of an account, as the address is not pertinent to the signature checking process. From c8c73a483a197b0eddd62a028ff8ee5cb97e7176 Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Thu, 22 Oct 2020 12:00:16 -0400 Subject: [PATCH 07/40] Apply suggestions from code review Co-authored-by: Aaron Craelius --- docs/architecture/adr-034-change-pubkey.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/architecture/adr-034-change-pubkey.md b/docs/architecture/adr-034-change-pubkey.md index 0eddf6a94..73f9080a6 100644 --- a/docs/architecture/adr-034-change-pubkey.md +++ b/docs/architecture/adr-034-change-pubkey.md @@ -16,7 +16,7 @@ Transferring all the assets of an account to a new account with the updated pubk ## Decision -We propose the creation of a new feature to `x/auth` that allows accounts to update the public key associated with their account, while keeping the address the same. This feature can be enabled using an `EnableChangePubKey` param. +We propose the addition of a new feature to `x/auth` that allows accounts to update the public key associated with their account, while keeping the address the same. This feature can be enabled using an `EnableChangePubKey` param. This is possible because the Cosmos SDK `StdAccount` stores the public key for an account in state, instead of making the assumption that the public key is included in the transaction (whether explicitly or implicitly through the signature) as in other blockchains such as Bitcoin and Ethereum. Because the public key is stored on chain, it is okay for the public key to not hash to the address of an account, as the address is not pertinent to the signature checking process. @@ -33,7 +33,7 @@ The MsgChangePubKey transaction needs to be signed by the existing pubkey in sta Once, approved, the handler for this message type, which takes in the AccountKeeper, will update the in-state pubkey for the account and replace it with the pubkey from the Msg. -Because an account can no longer be pruned from state once its pubkey has changed, we can charge an additional gas fee for this operation to compensate for this this externality (this bound gas amount is configured as parameter `PubKeyChangeCost`). The bonus gas is charged inside handler, using the `ConsumeGas` function. +Because an account can no longer be pruned from state once its pubkey has changed, we can charge an additional gas fee for this operation to compensate for this this externality (this bound gas amount is configured as parameter `PubKeyChangeCost`). The bonus gas is charged inside the handler, using the `ConsumeGas` function. ```go amount := ak.GetParams(ctx).PubKeyChangeCost From 2ace2a738bb9c165a352f60fdc2a58aa517a0a98 Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Wed, 28 Oct 2020 22:20:25 -0400 Subject: [PATCH 08/40] Update docs/architecture/adr-034-change-pubkey.md Co-authored-by: Amaury Martiny --- docs/architecture/adr-034-change-pubkey.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/architecture/adr-034-change-pubkey.md b/docs/architecture/adr-034-change-pubkey.md index 73f9080a6..880fe8543 100644 --- a/docs/architecture/adr-034-change-pubkey.md +++ b/docs/architecture/adr-034-change-pubkey.md @@ -18,7 +18,7 @@ Transferring all the assets of an account to a new account with the updated pubk We propose the addition of a new feature to `x/auth` that allows accounts to update the public key associated with their account, while keeping the address the same. This feature can be enabled using an `EnableChangePubKey` param. -This is possible because the Cosmos SDK `StdAccount` stores the public key for an account in state, instead of making the assumption that the public key is included in the transaction (whether explicitly or implicitly through the signature) as in other blockchains such as Bitcoin and Ethereum. Because the public key is stored on chain, it is okay for the public key to not hash to the address of an account, as the address is not pertinent to the signature checking process. +This is possible because the Cosmos SDK `BaseAccount` stores the public key for an account in state, instead of making the assumption that the public key is included in the transaction (whether explicitly or implicitly through the signature) as in other blockchains such as Bitcoin and Ethereum. Because the public key is stored on chain, it is okay for the public key to not hash to the address of an account, as the address is not pertinent to the signature checking process. To build this system, we design a new Msg type as follows: From 6274240d7cade92e61676fa610dfe6b2e5c67c7e Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Tue, 3 Nov 2020 15:55:16 -0400 Subject: [PATCH 09/40] Apply suggestions from code review Co-authored-by: Robert Zaremba --- docs/architecture/adr-034-change-pubkey.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/architecture/adr-034-change-pubkey.md b/docs/architecture/adr-034-change-pubkey.md index 880fe8543..15979759b 100644 --- a/docs/architecture/adr-034-change-pubkey.md +++ b/docs/architecture/adr-034-change-pubkey.md @@ -10,7 +10,7 @@ Proposed ## Context -Currently, in the Cosmos SDK, the address of an auth account is always based on the hash of the public key. Once an account is created, the public key for the account is set in stone, and cannot be changed. This can be a problem for users, as key rotation is a useful security practice, but is not possible currently. Furthermore, as multisigs are a type of pubkey, once a multisig for an account is set, it can not be updated. This is problematic, as multisigs are often used by organizations or companies, who may need to change their set of multisig signers for internal reasons. +Currently, in the Cosmos SDK, the address of an auth `BaseAccount` is based on the hash of the public key. Once an account is created, the public key for the account is set in stone, and cannot be changed. This can be a problem for users, as key rotation is a useful security practice, but is not possible currently. Furthermore, as multisigs are a type of pubkey, once a multisig for an account is set, it can not be updated. This is problematic, as multisigs are often used by organizations or companies, who may need to change their set of multisig signers for internal reasons. Transferring all the assets of an account to a new account with the updated pubkey is not sufficient, because some "engagements" of an account are not easily transferable. For example, in staking, to transfer bonded Atoms, an account would have to unbond all delegations and wait the three week unbonding period. Even more significantly, for validator operators, ownership over a validator is not transferrable at all, meaning that the operator key for a validator can never be updated, leading to poor operational security for validators. @@ -61,3 +61,5 @@ Breaks the current assumed relationship between address and pubkeys as H(pubkey) * Will require that PubKeys for an account are included in the genesis exports. ## References + ++ https://www.algorand.com/resources/blog/announcing-rekeying From c4e5e292444e138b968fb27cfac4f1e6ee375bc3 Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Fri, 6 Nov 2020 11:21:45 -0400 Subject: [PATCH 10/40] Update and rename adr-034-change-pubkey.md to adr-034-account-rekeying.md --- ...hange-pubkey.md => adr-034-account-rekeying.md} | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) rename docs/architecture/{adr-034-change-pubkey.md => adr-034-account-rekeying.md} (93%) diff --git a/docs/architecture/adr-034-change-pubkey.md b/docs/architecture/adr-034-account-rekeying.md similarity index 93% rename from docs/architecture/adr-034-change-pubkey.md rename to docs/architecture/adr-034-account-rekeying.md index 15979759b..0073d6f0a 100644 --- a/docs/architecture/adr-034-change-pubkey.md +++ b/docs/architecture/adr-034-account-rekeying.md @@ -1,4 +1,4 @@ -# ADR 034: Change PubKey +# ADR 034: Account Rekying ## Changelog @@ -6,7 +6,11 @@ ## Status -Proposed +PROPOSED + +## Abstract + +Account rekeying is a process hat allows an account to replace its authentication pubkey with a new one. ## Context @@ -23,10 +27,16 @@ This is possible because the Cosmos SDK `BaseAccount` stores the public key for To build this system, we design a new Msg type as follows: ```protobuf +service Msg { + rpc ChangePubKey(MsgChangePubKey) returns (MsgChangePubKeyResponse); +} + message MsgChangePubKey { string address = 1; google.protobuf.Any pub_key = 2; } + +message MsgChangePubKeyResponse {} ``` The MsgChangePubKey transaction needs to be signed by the existing pubkey in state. From 53108e76373673a816655bcc47651be72e643268 Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Fri, 6 Nov 2020 11:28:18 -0400 Subject: [PATCH 11/40] Update adr-034-account-rekeying.md --- docs/architecture/adr-034-account-rekeying.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/architecture/adr-034-account-rekeying.md b/docs/architecture/adr-034-account-rekeying.md index 0073d6f0a..f51ab4672 100644 --- a/docs/architecture/adr-034-account-rekeying.md +++ b/docs/architecture/adr-034-account-rekeying.md @@ -1,4 +1,4 @@ -# ADR 034: Account Rekying +# ADR 034: Account Rekeying ## Changelog From 20617d0db4dedc0d77cbf774cacda0693430d7c8 Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Mon, 23 Nov 2020 15:30:25 -0500 Subject: [PATCH 12/40] Update docs/architecture/adr-034-account-rekeying.md Co-authored-by: Robert Zaremba --- docs/architecture/adr-034-account-rekeying.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/architecture/adr-034-account-rekeying.md b/docs/architecture/adr-034-account-rekeying.md index f51ab4672..28e67e97f 100644 --- a/docs/architecture/adr-034-account-rekeying.md +++ b/docs/architecture/adr-034-account-rekeying.md @@ -62,7 +62,7 @@ Because an account can no longer be pruned from state once its pubkey has change Breaks the current assumed relationship between address and pubkeys as H(pubkey) = address. This has a couple of consequences. -* We cannot prune accounts with 0 balance that have had their pubkey changed (we currently do not currently do this anyways, but the reason we have account numbers is presumably for this purpose). +* We cannot prune accounts with 0 balance that have had their pubkey changed. Currently, we do not prune accounts anyways, but the reason we have account numbers is presumably for this purpose. * This makes wallets that support this feature more complicated. For example, if an address on chain was updated, the corresponding key in the CLI wallet also needs to be updated. ### Neutral From 045e7874b05525d1d603b51f0061921ed19273e3 Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Mon, 23 Nov 2020 15:33:40 -0500 Subject: [PATCH 13/40] Update adr-034-account-rekeying.md Add stack of past keys --- docs/architecture/adr-034-account-rekeying.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/architecture/adr-034-account-rekeying.md b/docs/architecture/adr-034-account-rekeying.md index 28e67e97f..cc7fc8530 100644 --- a/docs/architecture/adr-034-account-rekeying.md +++ b/docs/architecture/adr-034-account-rekeying.md @@ -50,6 +50,8 @@ Because an account can no longer be pruned from state once its pubkey has change ctx.GasMeter().ConsumeGas(amount, "pubkey change fee") ``` +Everytime a key for an address is changed, we will store a log of this change in the state of the chain, thus creating a stack of all previous keys for an address and the time intervals for which they were active. This allows dapps and clients to easily query past keys for an account which may be useful for features such as verifying timestamped off-chain signed messages. + ## Consequences From 101d62a69d47ed21a85bd665fed42cbc3e59e3fb Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Mon, 23 Nov 2020 15:47:55 -0500 Subject: [PATCH 14/40] Add details about pruning --- docs/architecture/adr-034-account-rekeying.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/architecture/adr-034-account-rekeying.md b/docs/architecture/adr-034-account-rekeying.md index cc7fc8530..9a08bf306 100644 --- a/docs/architecture/adr-034-account-rekeying.md +++ b/docs/architecture/adr-034-account-rekeying.md @@ -64,8 +64,9 @@ Everytime a key for an address is changed, we will store a log of this change in Breaks the current assumed relationship between address and pubkeys as H(pubkey) = address. This has a couple of consequences. -* We cannot prune accounts with 0 balance that have had their pubkey changed. Currently, we do not prune accounts anyways, but the reason we have account numbers is presumably for this purpose. * This makes wallets that support this feature more complicated. For example, if an address on chain was updated, the corresponding key in the CLI wallet also needs to be updated. +* We cannot prune accounts with 0 balance that have had their pubkey changed. This is because if pruned, the original pubkey of the account would be needed to recreate the same address, but the owner of the address may not have the original pubkey anymore. Currently, we do not automatically prune accounts anyways, but the purpose of account numbers is to allow for this down the road. One way to allow accounts that have had their pubkeys changed is to allow their owners to manually prune their accounts using a new Msg type such as `MsgDeleteAccount`. Manually pruning accounts can give a gas refund as an incentive for performing the action. + ### Neutral From 972095e7faab7b438f72e12d3ed36754bc4f281f Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Mon, 23 Nov 2020 15:52:47 -0500 Subject: [PATCH 15/40] pruning info --- docs/architecture/adr-034-account-rekeying.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/architecture/adr-034-account-rekeying.md b/docs/architecture/adr-034-account-rekeying.md index 9a08bf306..8dfcc9e66 100644 --- a/docs/architecture/adr-034-account-rekeying.md +++ b/docs/architecture/adr-034-account-rekeying.md @@ -43,13 +43,16 @@ The MsgChangePubKey transaction needs to be signed by the existing pubkey in sta Once, approved, the handler for this message type, which takes in the AccountKeeper, will update the in-state pubkey for the account and replace it with the pubkey from the Msg. -Because an account can no longer be pruned from state once its pubkey has changed, we can charge an additional gas fee for this operation to compensate for this this externality (this bound gas amount is configured as parameter `PubKeyChangeCost`). The bonus gas is charged inside the handler, using the `ConsumeGas` function. + +An account that has had its pubkey changed cannot be automatically pruned from state. This is because if pruned, the original pubkey of the account would be needed to recreate the same address, but the owner of the address may not have the original pubkey anymore. Currently, we do not automatically prune any accounts anyways, but we would like to keep this option open the road (this is the purpose of account numbers). To resolve this, we charge an additional gas fee for this operation to compensate for this this externality (this bound gas amount is configured as parameter `PubKeyChangeCost`). The bonus gas is charged inside the handler, using the `ConsumeGas` function. Furthermore, in the future, we can allow accounts that have rekeyed manually prune themselves using a new Msg type such as `MsgDeleteAccount`. Manually pruning accounts can give a gas refund as an incentive for performing the action. + ```go amount := ak.GetParams(ctx).PubKeyChangeCost ctx.GasMeter().ConsumeGas(amount, "pubkey change fee") ``` + Everytime a key for an address is changed, we will store a log of this change in the state of the chain, thus creating a stack of all previous keys for an address and the time intervals for which they were active. This allows dapps and clients to easily query past keys for an account which may be useful for features such as verifying timestamped off-chain signed messages. @@ -65,7 +68,7 @@ Everytime a key for an address is changed, we will store a log of this change in Breaks the current assumed relationship between address and pubkeys as H(pubkey) = address. This has a couple of consequences. * This makes wallets that support this feature more complicated. For example, if an address on chain was updated, the corresponding key in the CLI wallet also needs to be updated. -* We cannot prune accounts with 0 balance that have had their pubkey changed. This is because if pruned, the original pubkey of the account would be needed to recreate the same address, but the owner of the address may not have the original pubkey anymore. Currently, we do not automatically prune accounts anyways, but the purpose of account numbers is to allow for this down the road. One way to allow accounts that have had their pubkeys changed is to allow their owners to manually prune their accounts using a new Msg type such as `MsgDeleteAccount`. Manually pruning accounts can give a gas refund as an incentive for performing the action. +* Cannot automatically prune accounts with 0 balance that have had their pubkey changed. ### Neutral From b5442e1d5e11a8e42cb0471498241ad6a9882f2d Mon Sep 17 00:00:00 2001 From: Sunny Aggarwal Date: Tue, 24 Nov 2020 13:06:58 -0500 Subject: [PATCH 16/40] Update adr-034-account-rekeying.md --- docs/architecture/adr-034-account-rekeying.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/architecture/adr-034-account-rekeying.md b/docs/architecture/adr-034-account-rekeying.md index 8dfcc9e66..22cb79e31 100644 --- a/docs/architecture/adr-034-account-rekeying.md +++ b/docs/architecture/adr-034-account-rekeying.md @@ -20,7 +20,7 @@ Transferring all the assets of an account to a new account with the updated pubk ## Decision -We propose the addition of a new feature to `x/auth` that allows accounts to update the public key associated with their account, while keeping the address the same. This feature can be enabled using an `EnableChangePubKey` param. +We propose the addition of a new feature to `x/auth` that allows accounts to update the public key associated with their account, while keeping the address the same. This is possible because the Cosmos SDK `BaseAccount` stores the public key for an account in state, instead of making the assumption that the public key is included in the transaction (whether explicitly or implicitly through the signature) as in other blockchains such as Bitcoin and Ethereum. Because the public key is stored on chain, it is okay for the public key to not hash to the address of an account, as the address is not pertinent to the signature checking process. From b6bd4f5c2e6c00d4fe33f07a54ef482b39d3b72f Mon Sep 17 00:00:00 2001 From: Amaury Date: Fri, 27 Nov 2020 13:57:14 +0100 Subject: [PATCH 17/40] docs: Remove legacy Msg/queriers in "Basics" section (#7782) * docs: Remove legacy Msg/queriers in Basics * Service * Update links * Update to rc2 * Update refs * Use TM hashes * REformulate * Remove deep copy * Update docs/DOCS_README.md Co-authored-by: Cory Co-authored-by: Cory Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- docs/DOCS_README.md | 4 +-- docs/basics/accounts.md | 31 +++++++++--------- docs/basics/app-anatomy.md | 65 +++++++------------------------------ docs/basics/gas-fees.md | 42 ++++++++++++------------ docs/basics/tx-lifecycle.md | 8 +++-- 5 files changed, 55 insertions(+), 95 deletions(-) diff --git a/docs/DOCS_README.md b/docs/DOCS_README.md index 96986bef8..98dbb33c3 100644 --- a/docs/DOCS_README.md +++ b/docs/DOCS_README.md @@ -5,7 +5,7 @@ If you want to open a PR on the Cosmos SDK to update the documentation, please f ## Translating - Docs translations live in a `docs/country-code/` folder, where `country-code` stands for the country code of the language used (`cn` for Chinese, `kr` for Korea, `fr` for France, ...). -- Always translate content living on `master`. +- Always translate content living on `master`. - Only content under `/docs/intro/`, `/docs/basics/`, `/docs/core/`, `/docs/building-modules/` and `docs/interfaces` needs to be translated, as well as `docs/README.md`. It is also nice (but not mandatory) to translate `/docs/spec/`. - Specify the release/tag of the translation in the README of your translation folder. Update the release/tag each time you update the translation. @@ -103,7 +103,7 @@ We are using [Algolia](https://www.algolia.com) to power full-text search. This ## Consistency Because the build processes are identical (as is the information contained herein), this file should be kept in sync as -much as possible with its [counterpart in the Tendermint Core repo](https://github.com/tendermint/tendermint/blob/master/docs/DOCS_README.md). +much as possible with its [counterpart in the Tendermint Core repo](https://github.com/tendermint/tendermint/blob/v0.34.0/docs/DOCS_README.md). ### Update and Build the RPC docs diff --git a/docs/basics/accounts.md b/docs/basics/accounts.md index 411aa201a..930914d73 100644 --- a/docs/basics/accounts.md +++ b/docs/basics/accounts.md @@ -62,17 +62,20 @@ In the Cosmos SDK, accounts are stored and managed via an object called a [`Keyr A `Keyring` is an object that stores and manages accounts. In the Cosmos SDK, a `Keyring` implementation follows the `Keyring` interface: -+++ https://github.com/cosmos/cosmos-sdk/blob/d9175200920e96bfa4182b5c8bc46d91b17a28a1/crypto/keyring/keyring.go#L50-L88 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/crypto/keyring/keyring.go#L50-L88 The default implementation of `Keyring` comes from the third-party [`99designs/keyring`](https://github.com/99designs/keyring) library. A few notes on the `Keyring` methods: -- `Sign(uid string, msg []byte) ([]byte, tmcrypto.PubKey, error)` strictly deals with the signature of the `message` bytes. Some preliminary work should be done beforehand to prepare and encode the `message` into a canonical `[]byte` form, and this is done in the `GetSignBytes` method. See an example of `message` preparation from the `x/bank` module. Note that signature verification is not implemented in the SDK by default. It is deferred to the [`anteHandler`](#antehandler). - +++ https://github.com/cosmos/cosmos-sdk/blob/d9175200920e96bfa4182b5c8bc46d91b17a28a1/x/bank/types/msgs.go#L51-L54 -- `NewAccount(uid, mnemonic, bip39Passwd, hdPath string, algo SignatureAlgo) (Info, error)` creates a new account based on the [`bip44 path`](https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki) and persists it on disk (note that the `PrivKey` is [encrypted with a passphrase before being persisted](https://github.com/cosmos/cosmos-sdk/blob/d9175200920e96bfa4182b5c8bc46d91b17a28a1/crypto/keys/mintkey/mintkey.go), it is **never stored unencrypted**). In the context of this method, the `account` and `address` parameters refer to the segment of the BIP44 derivation path (e.g. `0`, `1`, `2`, ...) used to derive the `PrivKey` and `PubKey` from the mnemonic (note that given the same mnemonic and `account`, the same `PrivKey` will be generated, and given the same `account` and `address`, the same `PubKey` and `Address` will be generated). Finally, note that the `NewAccount` method derives keys and addresses using the algorithm specified in the last argument `algo`. Currently, the SDK supports two public key algorithms: - - `secp256k1`, as implemented in the [SDK's `crypto/keys/secp256k1` package](https://github.com/cosmos/cosmos-sdk/blob/d9175200920e96bfa4182b5c8bc46d91b17a28a1/crypto/keys/secp256k1/secp256k1.go), - - `ed25519`, as implemented in the [SDK's `crypto/keys/ed25519` package](https://github.com/cosmos/cosmos-sdk/blob/d9175200920e96bfa4182b5c8bc46d91b17a28a1/crypto/keys/ed25519/ed25519.go). +- `Sign(uid string, payload []byte) ([]byte, tmcrypto.PubKey, error)` strictly deals with the signature of the `payload` bytes. Some preliminary work should be done beforehand to prepare and encode the transaction into a canonical `[]byte` form. Protobuf being not deterministic, it has been decided in [ADR-020](../architecture/adr-020-protobuf-transaction-encoding.md) that the canonical `payload` to sign is the `SignDoc` struct, deterministically encoded using [ADR-027](adr-027-deterministic-protobuf-serialization.md). Note that signature verification is not implemented in the SDK by default, it is deferred to the [`anteHandler`](../core/baseapp.md#antehandler). + +++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/proto/cosmos/tx/v1beta1/tx.proto#L47-L64 + +- `NewAccount(uid, mnemonic, bip39Passwd, hdPath string, algo SignatureAlgo) (Info, error)` creates a new account based on the [`bip44 path`](https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki) and persists it on disk (note that the `PrivKey` is [encrypted with a passphrase before being persisted](https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/crypto/armor.go), it is **never stored unencrypted**). In the context of this method, the `account` and `address` parameters refer to the segment of the BIP44 derivation path (e.g. `0`, `1`, `2`, ...) used to derive the `PrivKey` and `PubKey` from the mnemonic (note that given the same mnemonic and `account`, the same `PrivKey` will be generated, and given the same `account` and `address`, the same `PubKey` and `Address` will be generated). Finally, note that the `NewAccount` method derives keys and addresses using the algorithm specified in the last argument `algo`. Currently, the SDK supports two public key algorithms: + + - `secp256k1`, as implemented in the [SDK's `crypto/keys/secp256k1` package](https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/crypto/keys/secp256k1/secp256k1.go), + - `ed25519`, as implemented in the [SDK's `crypto/keys/ed25519` package](https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/crypto/keys/ed25519/ed25519.go). + - `ExportPrivKeyArmor(uid, encryptPassphrase string) (armor string, err error)` exports a private key in ASCII-armored encrypted format, using the given passphrase. You can then either import it again into the keyring using the `ImportPrivKey(uid, armor, passphrase string)` function, or decrypt it into a raw private key using the `UnarmorDecryptPrivKey(armorStr string, passphrase string)` function. Also see the [`Addresses`](#addresses) section for more information. @@ -93,20 +96,18 @@ Also see the [`Addresses`](#addresses) section for more information. ### PubKeys -`PubKey`s used in the Cosmos SDK are Protobuf messages and extend the `Pubkey` interface defined in tendermint's `crypto` package: +`PubKey`s used in the Cosmos SDK are Protobuf messages and have the following methods: -+++ https://github.com/cosmos/cosmos-sdk/blob/d9175200920e96bfa4182b5c8bc46d91b17a28a1/crypto/types/types.go#L8-L13 ++++ https://github.com/cosmos/cosmos-sdk/blob/master/crypto/types/types.go#L8-L17 -+++ https://github.com/tendermint/tendermint/blob/01c32c62e8840d812359c9e87e9c575aa67acb09/crypto/crypto.go#L22-L28 - -- For `secp256k1` keys, the actual implementation can be found [here](https://github.com/cosmos/cosmos-sdk/blob/d9175200920e96bfa4182b5c8bc46d91b17a28a1/crypto/keys/secp256k1/secp256k1.go). -- For `ed25519` keys, it can be found [here](https://github.com/cosmos/cosmos-sdk/blob/d9175200920e96bfa4182b5c8bc46d91b17a28a1/crypto/keys/ed25519/ed25519.go). +- For `secp256k1` keys, the actual implementation can be found [here](https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/crypto/keys/secp256k1/secp256k1.go). +- For `ed25519` keys, it can be found [here](https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/crypto/keys/ed25519/ed25519.go). In both case, the actual key (as raw bytes) is the compressed form of the pubkey. The first byte is a `0x02` byte if the `y`-coordinate is the lexicographically largest of the two associated with the `x`-coordinate. Otherwise the first byte is a `0x03`. This prefix is followed with the `x`-coordinate. Note that in the Cosmos SDK, `Pubkeys` are not manipulated in their raw bytes form. Instead, they are encoded to string using [`Amino`](../core/encoding.md#amino) and [`bech32`](https://en.bitcoin.it/wiki/Bech32). In the SDK, it is done by first calling the `Bytes()` method on the raw `Pubkey` (which applies amino encoding), and then the `ConvertAndEncode` method of `bech32`. -+++ https://github.com/cosmos/cosmos-sdk/blob/d9175200920e96bfa4182b5c8bc46d91b17a28a1/types/address.go#L579-L729 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/types/address.go#L579-L729 ### Addresses @@ -124,11 +125,11 @@ aa := sdk.AccAddress(pub.Address().Bytes()) These addresses implement the `Address` interface: -+++ https://github.com/cosmos/cosmos-sdk/blob/d9175200920e96bfa4182b5c8bc46d91b17a28a1/types/address.go#L73-L82 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/types/address.go#L73-L82 Of note, the `Marshal()` and `Bytes()` method both return the same raw `[]byte` form of the address, the former being needed for Protobuf compatibility. Also, the `String()` method is used to return the `bech32` encoded form of the address, which should be the only address format with which end-user interract. Here is an example: -+++ https://github.com/cosmos/cosmos-sdk/blob/d9175200920e96bfa4182b5c8bc46d91b17a28a1/types/address.go#L232-L246 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/types/address.go#L232-L246 ## Next {hide} diff --git a/docs/basics/app-anatomy.md b/docs/basics/app-anatomy.md index b45e22d6b..9bd2256f0 100644 --- a/docs/basics/app-anatomy.md +++ b/docs/basics/app-anatomy.md @@ -49,17 +49,17 @@ The first thing defined in `app.go` is the `type` of the application. It is gene - **A list of module's `keeper`s.** Each module defines an abstraction called [`keeper`](../building-modules/keeper.md), which handles reads and writes for this module's store(s). The `keeper`'s methods of one module can be called from other modules (if authorized), which is why they are declared in the application's type and exported as interfaces to other modules so that the latter can only access the authorized functions. - **A reference to an [`appCodec`](../core/encoding.md).** The application's `appCodec` is used to serialize and deserialize data structures in order to store them, as stores can only persist `[]bytes`. The default codec is [Protocol Buffers](../core/encoding.md). - **A reference to a [`legacyAmino`](../core/encoding.md) codec.** Some parts of the SDK have not been migrated to use the `appCodec` above, and are still hardcoded to use Amino. Other parts explicity use Amino for backwards compatibility. For these reasons, the application still holds a reference to the legacy Amino codec. Please note that the Amino codec will be removed from the SDK in the upcoming releases. -- **A reference to a [module manager](../building-modules/module-manager.md#manager)** and a [basic module manager](../building-modules/module-manager.md#basicmanager). The module manager is an object that contains a list of the application's module. It facilitates operations related to these modules, like registering their [`Msg` services](../core/baseapp.md#msg-services) and [gRPC `Query` services](../core/baseapp.md#grpc-query-services), or setting the order of execution between modules for various functions like [`InitChainer`](#initchainer), [`BeginBlocker` and `EndBlocker`](#beginblocker-and-endblocker). For backwards-compatibility reasons, all modules expose [legacy `Msg`s routes](../core/baseapp.md#routing) and [legacy query routes](../core/baseapp.md#legacy-query-routing), which are also registered by the module manager.. +- **A reference to a [module manager](../building-modules/module-manager.md#manager)** and a [basic module manager](../building-modules/module-manager.md#basicmanager). The module manager is an object that contains a list of the application's module. It facilitates operations related to these modules, like registering their [`Msg` service](../core/baseapp.md#msg-services) and [gRPC `Query` service](../core/baseapp.md#grpc-query-services), or setting the order of execution between modules for various functions like [`InitChainer`](#initchainer), [`BeginBlocker` and `EndBlocker`](#beginblocker-and-endblocker). See an example of application type definition from `simapp`, the SDK's own app used for demo and testing purposes: -+++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/simapp/app.go#L139-L181 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/simapp/app.go#L139-L181 ### Constructor Function This function constructs a new application of the type defined in the section above. It must fulfill the `AppCreator` signature in order to be used in the [`start` command](../core/node.md#start-command) of the application's daemon command. -+++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/server/types/app.go#L42-L44 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/server/types/app.go#L42-L44 Here are the main actions performed by this function: @@ -81,7 +81,7 @@ Note that this function only creates an instance of the app, while the actual st See an example of application constructor from `simapp`: -+++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/simapp/app.go#L192-L429 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/simapp/app.go#L192-L429 ### InitChainer @@ -91,7 +91,7 @@ In general, the `InitChainer` is mostly composed of the [`InitGenesis`](../build See an example of an `InitChainer` from `simapp`: -+++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/simapp/app.go#L452-L459 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/simapp/app.go#L452-L459 ### BeginBlocker and EndBlocker @@ -103,13 +103,13 @@ As a sidenote, it is important to remember that application-specific blockchains See an example of `BeginBlocker` and `EndBlocker` functions from `simapp` -+++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/simapp/app.go#L442-L450 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/simapp/app.go#L442-L450 ### Register Codec The `EncodingConfig` structure is the last important part of the `app.go` file. The goal of this structure is to define the codecs that will be used throughout the app. -+++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/simapp/params/encoding.go#L9-L16 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/simapp/params/encoding.go#L9-L16 Here are descriptions of what each of the four fields means: @@ -123,7 +123,7 @@ The SDK exposes a `MakeCodecs` function used to create a `EncodingConfig`. It us See an example of a `MakeCodecs` from `simapp`: -+++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/simapp/app.go#L429-L435 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/simapp/app.go#L429-L435 ## Modules @@ -143,33 +143,19 @@ When a valid block of transactions is received by the full-node, Tendermint rela 1. Upon receiving the transaction, the application first unmarshalls it from `[]bytes`. 2. Then, it verifies a few things about the transaction like [fee payment and signatures](#gas-fees.md#antehandler) before extracting the `Msg`(s) contained in the transaction. -3. `Msg`s are encoded as Protobuf [`Any`s](#register-codec) via the `sdk.ServiceMsg` struct. By analyzing each `Any`'s `type_url`, the application routes the `Msg` to the corresponding module's `Msg` service. +3. `Msg`s are encoded as Protobuf [`Any`s](#register-codec) via the `sdk.ServiceMsg` struct. By analyzing each `Any`'s `type_url`, baseapp's `msgServiceRouter` routes the `Msg` to the corresponding module's `Msg` service. 4. If the message is successfully processed, the state is updated. For a more detailed look at a transaction lifecycle, click [here](./tx-lifecycle.md). Module developers create custom `Msg`s when they build their own module. The general practice is to define all `Msg`s in a Protobuf service called `service Msg {}`, and define each `Msg` as a Protobuf service method, using the `rpc` keyword. These definitions usually reside in a `tx.proto` file. For example, the `x/bank` module defines two `Msg`s to allows users to transfer tokens: -+++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/proto/cosmos/bank/v1beta1/tx.proto#L10-L17 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/proto/cosmos/bank/v1beta1/tx.proto#L10-L17 These two `Msg`s are processed by the `Msg` service of the `x/bank` module, which ultimately calls the `keeper` of the `x/auth` module in order to update the state. Each module should also implement the `RegisterServices` method as part of the [`AppModule` interface](#application-module-interface). This method should call the `RegisterMsgServer` function provided by the generated Protobuf code. -#### Handlers - -The [handler](../building-modules/msg-services.md#handler-type) refers to the part of the module responsible for processing the `Msg` after it is routed by `baseapp`. Handler functions of modules are only executed if the transaction is relayed from Tendermint by the `DeliverTx` ABCI message. If the transaction is relayed by `CheckTx`, only stateless checks and fee-related stateful checks are performed. To better understand the difference between `DeliverTx`and `CheckTx`, as well as the difference between stateful and stateless checks, click [here](./tx-lifecycle.md). - -The `handler` of a module is generally defined in a file called `handler.go` and consists of: - -- A **switch function** `NewHandler` to route the message to the appropriate `handler` function. This function returns a `handler` function, and is registered in the [`AppModule`](#application-module-interface) to be used in the application's module manager to initialize the [application's router](../core/baseapp.md#routing). Next is an example from `x/bank`: - +++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/x/bank/handler.go#L10-L30 -- **One handler function for each message type defined by the module**. Developers write the message processing logic in these functions. This generally involves doing stateful checks to ensure the message is valid and calling [`keeper`](#keeper)'s methods to update the state. - -Handler functions return a result of type `sdk.Result`, which informs the application on whether the message was successfully processed: - -+++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/types/result.go#L15-L40 - ### gRPC `Query` Services gRPC `Query` services are introduced in the v0.40 Stargate release. They allow users to query the state using [gRPC](https://grpc.io). They are enabled by default, and can be configued under the `grpc.enable` and `grpc.address` fields inside `app.toml`. @@ -180,35 +166,6 @@ Protobuf generates a `QueryServer` interface for each module, containing all the Finally, each module should also implement the `RegisterServices` method as part of the [`AppModule` interface](#application-module-interface). This method should call the `RegisterQueryServer` function provided by the generated Protobuf code. -### Legacy `Msg`s - -While the [`Msg` service](#msg-services) introduced in v0.40 is the official way to define `Msg`s, the SDK still handles legacy `Msg`s defined with previous versions of the SDK. - -[Legacy `Msg`s](../building-modules/messages-and-queries.md#messages) are objects defined by each module that implement the [`sdk.Msg`](../building-modules/messages-and-queries.md#messages) interface. Each [`transaction`](../core/transactions.md) contains one or multiple legacy `Msg`s, and can also contain both legacy and non-legacy `Msg`s. - -The application handles the transaction almost like with `Msg` service `Msg`s, only the third step (routing) differs: - -1. Upon receiving the transaction, the application first unmarshalls it from `[]bytes`. -2. Then, it verifies a few things about the transaction like [fee payment and signatures](#gas-fees.md#antehandler) before extracting the message(s) contained in the transaction. -3. With the `Type()` method of the legacy `Msg`, `baseapp` is able to route it to the appropriate module's [legacy `Msg` handler](#handler) in order for it to be processed. -4. If the message is successfully processed, the state is updated. - -New `Msg` services are compatible with legacy `Msg`s in terms of how `Msg`s are handled, please refer to the [handler](#handlers) section for more information. - -### Legacy Query Routes - -Legacy queriers were queriers used before the introduction of Protobuf and gRPC in the SDK. They are present for existing modules, but will be deprecated in a future release of the SDK. If you are developing new modules, gRPC `Query` services should be preferred, and you only need to implement the `LegacyQuerierHandler` interface if you wish to use legacy queriers. - -[`Legacy queriers`](../building-modules/query-services.md#legacy-queriers) are very similar to `handlers`, except they serve user queries to the state as opposed to processing transactions. A [query](../building-modules/messages-and-queries.md#queries) is initiated from an [interface](#application-interface) by an end-user who provides a `queryRoute` and some `data`. The query is then routed to the correct application's `querier` by `baseapp`'s `handleQueryCustom` method using `queryRoute`: - -+++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/baseapp/abci.go#L388-L418 - -The `Querier` of a module is defined in a file called `keeper/querier.go`, and consists of: - -- A **switch function** `NewQuerier` to route the query to the appropriate `querier` function. This function returns a `querier` function, and is is registered in the [`AppModule`](#application-module-interface) to be used in the application's module manager to initialize the [application's query router](../core/baseapp.md#query-routing). See an example of such a switch from the [nameservice tutorial](https://github.com/cosmos/sdk-tutorials/tree/master/nameservice): - +++ https://github.com/cosmos/sdk-tutorials/blob/86a27321cf89cc637581762e953d0c07f8c78ece/nameservice/x/nameservice/internal/keeper/querier.go#L19-L32 -- **One querier function for each data type defined by the module that needs to be queryable**. Developers write the query processing logic in these functions. This generally involves calling [`keeper`](#keeper)'s methods to query the state and marshalling it to JSON. - ### Keeper [`Keepers`](../building-modules/keeper.md) are the gatekeepers of their module's store(s). To read or write in a module's store, it is mandatory to go through one of its `keeper`'s methods. This is ensured by the [object-capabilities](../core/ocap.md) model of the Cosmos SDK. Only objects that hold the key to a store can access it, and only the module's `keeper` should hold the key(s) to the module's store(s). @@ -257,7 +214,7 @@ The [module's Legacy REST interface](../building-modules/module-interfaces.md#le - A `RegisterRoutes` function, which registers each route defined in the file. This function is called from the [main application's interface](#application-interfaces) for each module used within the application. The router used in the SDK is [Gorilla's mux](https://github.com/gorilla/mux). - Custom request type definitions for each query or transaction creation function that needs to be exposed. These custom request types build on the base `request` type of the Cosmos SDK: - +++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc1/types/rest/rest.go#L62-L76 + +++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/types/rest/rest.go#L62-L76 - One handler function for each request that can be routed to the given module. These functions implement the core logic necessary to serve the request. These Legacy API endpoints are present in the SDK for backward compatibility purposes and will be removed in the next release. diff --git a/docs/basics/gas-fees.md b/docs/basics/gas-fees.md index 94cd59a56..270066996 100644 --- a/docs/basics/gas-fees.md +++ b/docs/basics/gas-fees.md @@ -2,7 +2,7 @@ order: 4 --> -# Gas and Fees +# Gas and Fees This document describes the default strategies to handle gas and fees within a Cosmos SDK application. {synopsis} @@ -15,21 +15,21 @@ This document describes the default strategies to handle gas and fees within a C In the Cosmos SDK, `gas` is a special unit that is used to track the consumption of resources during execution. `gas` is typically consumed whenever read and writes are made to the store, but it can also be consumed if expensive computation needs to be done. It serves two main purposes: - Make sure blocks are not consuming too many resources and will be finalized. This is implemented by default in the SDK via the [block gas meter](#block-gas-meter). -- Prevent spam and abuse from end-user. To this end, `gas` consumed during [`message`](../building-modules/messages-and-queries.md#messages) execution is typically priced, resulting in a `fee` (`fees = gas * gas-prices`). `fees` generally have to be paid by the sender of the `message`. Note that the SDK does not enforce `gas` pricing by default, as there may be other ways to prevent spam (e.g. bandwidth schemes). Still, most applications will implement `fee` mechanisms to prevent spam. This is done via the [`AnteHandler`](#antehandler). +- Prevent spam and abuse from end-user. To this end, `gas` consumed during [`message`](../building-modules/messages-and-queries.md#messages) execution is typically priced, resulting in a `fee` (`fees = gas * gas-prices`). `fees` generally have to be paid by the sender of the `message`. Note that the SDK does not enforce `gas` pricing by default, as there may be other ways to prevent spam (e.g. bandwidth schemes). Still, most applications will implement `fee` mechanisms to prevent spam. This is done via the [`AnteHandler`](#antehandler). ## Gas Meter -In the Cosmos SDK, `gas` is a simple alias for `uint64`, and is managed by an object called a *gas meter*. Gas meters implement the `GasMeter` interface +In the Cosmos SDK, `gas` is a simple alias for `uint64`, and is managed by an object called a _gas meter_. Gas meters implement the `GasMeter` interface -+++ https://github.com/cosmos/cosmos-sdk/blob/7d7821b9af132b0f6131640195326aa02b6751db/store/types/gas.go#L31-L39 ++++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/store/types/gas.go#L34-L43 where: - `GasConsumed()` returns the amount of gas that was consumed by the gas meter instance. - `GasConsumedToLimit()` returns the amount of gas that was consumed by gas meter instance, or the limit if it is reached. -- `Limit()` returns the limit of the gas meter instance. `0` if the gas meter is infinite. -- `ConsumeGas(amount Gas, descriptor string)` consumes the amount of `gas` provided. If the `gas` overflows, it panics with the `descriptor` message. If the gas meter is not infinite, it panics if `gas` consumed goes above the limit. -- `IsPastLimit()` returns `true` if the amount of gas consumed by the gas meter instance is strictly above the limit, `false` otherwise. +- `Limit()` returns the limit of the gas meter instance. `0` if the gas meter is infinite. +- `ConsumeGas(amount Gas, descriptor string)` consumes the amount of `gas` provided. If the `gas` overflows, it panics with the `descriptor` message. If the gas meter is not infinite, it panics if `gas` consumed goes above the limit. +- `IsPastLimit()` returns `true` if the amount of gas consumed by the gas meter instance is strictly above the limit, `false` otherwise. - `IsOutOfGas()` returns `true` if the amount of gas consumed by the gas meter instance is above or equal to the limit, `false` otherwise. The gas meter is generally held in [`ctx`](../core/context.md), and consuming gas is done with the following pattern: @@ -38,19 +38,19 @@ The gas meter is generally held in [`ctx`](../core/context.md), and consuming ga ctx.GasMeter().ConsumeGas(amount, "description") ``` -By default, the Cosmos SDK makes use of two different gas meters, the [main gas meter](#main-gas-metter[) and the [block gas meter](#block-gas-meter). +By default, the Cosmos SDK makes use of two different gas meters, the [main gas meter](#main-gas-metter[) and the [block gas meter](#block-gas-meter). ### Main Gas Meter -`ctx.GasMeter()` is the main gas meter of the application. The main gas meter is initialized in `BeginBlock` via `setDeliverState`, and then tracks gas consumption during execution sequences that lead to state-transitions, i.e. those originally triggered by [`BeginBlock`](../core/baseapp.md#beginblock), [`DeliverTx`](../core/baseapp.md#delivertx) and [`EndBlock`](../core/baseapp.md#endblock). At the beginning of each `DeliverTx`, the main gas meter **must be set to 0** in the [`AnteHandler`](#antehandler), so that it can track gas consumption per-transaction. +`ctx.GasMeter()` is the main gas meter of the application. The main gas meter is initialized in `BeginBlock` via `setDeliverState`, and then tracks gas consumption during execution sequences that lead to state-transitions, i.e. those originally triggered by [`BeginBlock`](../core/baseapp.md#beginblock), [`DeliverTx`](../core/baseapp.md#delivertx) and [`EndBlock`](../core/baseapp.md#endblock). At the beginning of each `DeliverTx`, the main gas meter **must be set to 0** in the [`AnteHandler`](#antehandler), so that it can track gas consumption per-transaction. -Gas consumption can be done manually, generally by the module developer in the [`BeginBlocker`, `EndBlocker`](../building-modules/beginblock-endblock.md) or [`Msg` service](../building-modules/msg-services.md), but most of the time it is done automatically whenever there is a read or write to the store. This automatic gas consumption logic is implemented in a special store called [`GasKv`](../core/store.md#gaskv-store). +Gas consumption can be done manually, generally by the module developer in the [`BeginBlocker`, `EndBlocker`](../building-modules/beginblock-endblock.md) or [`Msg` service](../building-modules/msg-services.md), but most of the time it is done automatically whenever there is a read or write to the store. This automatic gas consumption logic is implemented in a special store called [`GasKv`](../core/store.md#gaskv-store). ### Block Gas Meter `ctx.BlockGasMeter()` is the gas meter used to track gas consumption per block and make sure it does not go above a certain limit. A new instance of the `BlockGasMeter` is created each time [`BeginBlock`](../core/baseapp.md#beginblock) is called. The `BlockGasMeter` is finite, and the limit of gas per block is defined in the application's consensus parameters. By default Cosmos SDK applications use the default consensus parameters provided by Tendermint: -+++ https://github.com/tendermint/tendermint/blob/f323c80cb3b78e123ea6238c8e136a30ff749ccc/types/params.go#L65-L72 ++++ https://github.com/tendermint/tendermint/blob/v0.34.0-rc6/types/params.go#L34-L41 When a new [transaction](../core/transactions.md) is being processed via `DeliverTx`, the current value of `BlockGasMeter` is checked to see if it is above the limit. If it is, `DeliverTx` returns immediately. This can happen even with the first transaction in a block, as `BeginBlock` itself can consume gas. If not, the transaction is processed normally. At the end of `DeliverTx`, the gas tracked by `ctx.BlockGasMeter()` is increased by the amount consumed to process the transaction: @@ -63,7 +63,7 @@ ctx.BlockGasMeter().ConsumeGas( ## AnteHandler -The `AnteHandler` is a special `handler` that is run for every transaction during `CheckTx` and `DeliverTx`, before the `handler` of each `message` in the transaction. `AnteHandler`s have a different signature than `handler`s: +The `AnteHandler` is run for every transaction during `CheckTx` and `DeliverTx`, before the `Msg` service of each `Msg` in the transaction. `AnteHandler`s have the following signature: ```go // AnteHandler authenticates transactions, before their internal messages are handled. @@ -71,18 +71,18 @@ The `AnteHandler` is a special `handler` that is run for every transaction durin type AnteHandler func(ctx Context, tx Tx, simulate bool) (newCtx Context, result Result, abort bool) ``` -The `anteHandler` is not implemented in the core SDK but in a module. This gives the possibility to developers to choose which version of `AnteHandler` fits their application's needs. That said, most applications today use the default implementation defined in the [`auth` module](https://github.com/cosmos/cosmos-sdk/tree/master/x/auth). Here is what the `anteHandler` is intended to do in a normal Cosmos SDK application: +The `anteHandler` is not implemented in the core SDK but in a module. This gives the possibility to developers to choose which version of `AnteHandler` fits their application's needs. That said, most applications today use the default implementation defined in the [`auth` module](https://github.com/cosmos/cosmos-sdk/tree/master/x/auth). Here is what the `anteHandler` is intended to do in a normal Cosmos SDK application: - Verify that the transaction are of the correct type. Transaction types are defined in the module that implements the `anteHandler`, and they follow the transaction interface: - +++ https://github.com/cosmos/cosmos-sdk/blob/7d7821b9af132b0f6131640195326aa02b6751db/types/tx_msg.go#L33-L41 -This enables developers to play with various types for the transaction of their application. In the default `auth` module, the standard transaction type is `StdTx`: - +++ https://github.com/cosmos/cosmos-sdk/blob/7d7821b9af132b0f6131640195326aa02b6751db/x/auth/types/stdtx.go#L22-L29 -- Verify signatures for each [`message`](../building-modules/messages-and-queries.md#messages) contained in the transaction. Each `message` should be signed by one or multiple sender(s), and these signatures must be verified in the `anteHandler`. -- During `CheckTx`, verify that the gas prices provided with the transaction is greater than the local `min-gas-prices` (as a reminder, gas-prices can be deducted from the following equation: `fees = gas * gas-prices`). `min-gas-prices` is a parameter local to each full-node and used during `CheckTx` to discard transactions that do not provide a minimum amount of fees. This ensure that the mempool cannot be spammed with garbage transactions. -- Verify that the sender of the transaction has enough funds to cover for the `fees`. When the end-user generates a transaction, they must indicate 2 of the 3 following parameters (the third one being implicit): `fees`, `gas` and `gas-prices`. This signals how much they are willing to pay for nodes to execute their transaction. The provided `gas` value is stored in a parameter called `GasWanted` for later use. -- Set `newCtx.GasMeter` to 0, with a limit of `GasWanted`. **This step is extremely important**, as it not only makes sure the transaction cannot consume infinite gas, but also that `ctx.GasMeter` is reset in-between each `DeliverTx` (`ctx` is set to `newCtx` after `anteHandler` is run, and the `anteHandler` is run each time `DeliverTx` is called). + +++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/types/tx_msg.go#L49-L57 + This enables developers to play with various types for the transaction of their application. In the default `auth` module, the default transaction type is `Tx`: + +++ https://github.com/cosmos/cosmos-sdk/blob/v0.40.0-rc3/proto/cosmos/tx/v1beta1/tx.proto#L12-L25 +- Verify signatures for each [`message`](../building-modules/messages-and-queries.md#messages) contained in the transaction. Each `message` should be signed by one or multiple sender(s), and these signatures must be verified in the `anteHandler`. +- During `CheckTx`, verify that the gas prices provided with the transaction is greater than the local `min-gas-prices` (as a reminder, gas-prices can be deducted from the following equation: `fees = gas * gas-prices`). `min-gas-prices` is a parameter local to each full-node and used during `CheckTx` to discard transactions that do not provide a minimum amount of fees. This ensure that the mempool cannot be spammed with garbage transactions. +- Verify that the sender of the transaction has enough funds to cover for the `fees`. When the end-user generates a transaction, they must indicate 2 of the 3 following parameters (the third one being implicit): `fees`, `gas` and `gas-prices`. This signals how much they are willing to pay for nodes to execute their transaction. The provided `gas` value is stored in a parameter called `GasWanted` for later use. +- Set `newCtx.GasMeter` to 0, with a limit of `GasWanted`. **This step is extremely important**, as it not only makes sure the transaction cannot consume infinite gas, but also that `ctx.GasMeter` is reset in-between each `DeliverTx` (`ctx` is set to `newCtx` after `anteHandler` is run, and the `anteHandler` is run each time `DeliverTx` is called). -As explained above, the `anteHandler` returns a maximum limit of `gas` the transaction can consume during execution called `GasWanted`. The actual amount consumed in the end is denominated `GasUsed`, and we must therefore have `GasUsed =< GasWanted`. Both `GasWanted` and `GasUsed` are relayed to the underlying consensus engine when [`DeliverTx`](../core/baseapp.md#delivertx) returns. +As explained above, the `anteHandler` returns a maximum limit of `gas` the transaction can consume during execution called `GasWanted`. The actual amount consumed in the end is denominated `GasUsed`, and we must therefore have `GasUsed =< GasWanted`. Both `GasWanted` and `GasUsed` are relayed to the underlying consensus engine when [`DeliverTx`](../core/baseapp.md#delivertx) returns. ## Next {hide} diff --git a/docs/basics/tx-lifecycle.md b/docs/basics/tx-lifecycle.md index e02576554..daed33f82 100644 --- a/docs/basics/tx-lifecycle.md +++ b/docs/basics/tx-lifecycle.md @@ -83,11 +83,13 @@ When `Tx` is received by the application from the underlying consensus engine (e ### ValidateBasic -[`Message`s](../core/transactions.md#messages) are extracted from `Tx` and `ValidateBasic`, a method of the `Msg` interface implemented by the module developer, is run for each one. It should include basic **stateless** sanity checks. For example, if the message is to send coins from one address to another, `ValidateBasic` likely checks for nonempty addresses and a nonnegative coin amount, but does not require knowledge of state such as account balance of an address. +[`Msg`s](../core/transactions.md#messages) are extracted from `Tx` and `ValidateBasic`, a method of the `Msg` interface implemented by the module developer, is run for each one. It should include basic **stateless** sanity checks. For example, if the message is to send coins from one address to another, `ValidateBasic` likely checks for nonempty addresses and a nonnegative coin amount, but does not require knowledge of state such as account balance of an address. ### AnteHandler -The [`AnteHandler`](../basics/gas-fees.md#antehandler), which is technically optional but should be defined for each application, is run. A deep copy of the internal state, `checkState`, is made and the defined `AnteHandler` performs limited checks specified for the transaction type. Using a copy allows the handler to do stateful checks for `Tx` without modifying the last committed state, and revert back to the original if the execution fails. +After the ValidateBasic checks, the `AnteHandler`s are run. Technically, they are optional, but in practice, they are very often present to perform signature verification, gas calculation, fee deduction and other core operations related to blockchain transactions. + +A copy of the cached context is provided to the `AnteHandler`, which performs limited checks specified for the transaction type. Using a copy allows the AnteHandler to do stateful checks for `Tx` without modifying the last committed state, and revert back to the original if the execution fails. For example, the [`auth`](https://github.com/cosmos/cosmos-sdk/tree/master/x/auth/spec) module `AnteHandler` checks and increments sequence numbers, checks signatures and account numbers, and deducts fees from the first signer of the transaction - all state changes are made using the `checkState`. @@ -198,7 +200,7 @@ Instead of using their `checkState`, full-nodes use `deliverState`: [`runMsgs`](../core/baseapp.md#runtx-and-runmsgs) to fully execute each `Msg` within the transaction. Since the transaction may have messages from different modules, `BaseApp` needs to know which module to find the appropriate handler. This is achieved using `BaseApp`'s `MsgServiceRouter` so that it can be processed by the module's [`Msg` service](../building-modules/msg-services.md). - For legacy `Msg` routing, the `Route` function is called via the [module manager](../building-modules/module-manager.md) to retrieve the route name and find the legacy [`Handler`](../building-modules/msg-services.md#handler-type) within the module. + For legacy `Msg` routing, the `Route` function is called via the [module manager](../building-modules/module-manager.md) to retrieve the route name and find the legacy [`Handler`](../building-modules/msg-services.md#handler-type) within the module. - **`Msg` service:** The `Msg` service, a step up from `AnteHandler`, is responsible for executing each message in the `Tx` and causes state transitions to persist in `deliverTxState`. It is defined From bcb3240d06c81e691ae9bd367e4f0092c1b55061 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?colin=20axn=C3=A9r?= <25233464+colin-axner@users.noreply.github.com> Date: Mon, 30 Nov 2020 16:52:45 +0100 Subject: [PATCH 18/40] Use generated client identifiers (#8034) * add client identifier generation * update proto and start fixing tests * fix ibc tests * fix auth rest test * update spec * fix lint * add parsing tests and fix bugs * fix regexp * add godoc * address @AdityaSripal review suggestions for identifier parsing * address rest of @AdityaSripal's review comments * remove unnecessary comment * typos * fix lint * Apply suggestions from code review Co-authored-by: Aditya * add more heigh tests as per @AdityaSripal suggestion Co-authored-by: Aditya --- proto/ibc/core/client/v1/genesis.proto | 2 + proto/ibc/core/client/v1/tx.proto | 8 +- types/query/query.pb.go | 2 +- x/auth/client/rest/rest_test.go | 1 - x/ibc/core/02-client/genesis.go | 26 +--- x/ibc/core/02-client/keeper/client.go | 13 +- x/ibc/core/02-client/keeper/client_test.go | 115 ++++++++------- x/ibc/core/02-client/keeper/keeper.go | 28 ++++ x/ibc/core/02-client/keeper/keeper_test.go | 6 +- x/ibc/core/02-client/types/client.go | 29 ++++ x/ibc/core/02-client/types/client_test.go | 31 ++++ x/ibc/core/02-client/types/genesis.go | 20 +-- x/ibc/core/02-client/types/genesis.pb.go | 86 ++++++++--- x/ibc/core/02-client/types/genesis_test.go | 10 ++ x/ibc/core/02-client/types/height.go | 2 +- x/ibc/core/02-client/types/height_test.go | 6 +- x/ibc/core/02-client/types/keys.go | 53 +++++++ x/ibc/core/02-client/types/keys_test.go | 53 +++++++ x/ibc/core/02-client/types/msgs.go | 14 +- x/ibc/core/02-client/types/msgs_test.go | 31 ++-- x/ibc/core/02-client/types/tx.pb.go | 137 ++++++------------ .../03-connection/keeper/handshake_test.go | 5 +- x/ibc/core/03-connection/types/keys.go | 26 ++-- x/ibc/core/03-connection/types/keys_test.go | 6 +- x/ibc/core/04-channel/types/keys.go | 26 ++-- x/ibc/core/04-channel/types/keys_test.go | 2 + x/ibc/core/24-host/parse.go | 26 ++++ x/ibc/core/24-host/parse_test.go | 47 ++++++ x/ibc/core/genesis_test.go | 3 + x/ibc/core/keeper/msg_server.go | 5 +- x/ibc/core/spec/01_concepts.md | 10 +- x/ibc/core/spec/02_state.md | 3 + x/ibc/core/spec/03_state_transitions.md | 2 +- x/ibc/core/spec/04_messages.md | 41 ++---- .../06-solomachine/client/cli/tx.go | 16 +- .../07-tendermint/client/cli/tx.go | 20 ++- x/ibc/testing/chain.go | 6 +- x/ibc/testing/chain_test.go | 1 + x/ibc/testing/coordinator.go | 2 +- 39 files changed, 583 insertions(+), 337 deletions(-) create mode 100644 x/ibc/core/02-client/types/keys_test.go create mode 100644 x/ibc/core/24-host/parse_test.go diff --git a/proto/ibc/core/client/v1/genesis.proto b/proto/ibc/core/client/v1/genesis.proto index b1c4247c9..06b4bbd06 100644 --- a/proto/ibc/core/client/v1/genesis.proto +++ b/proto/ibc/core/client/v1/genesis.proto @@ -20,4 +20,6 @@ message GenesisState { Params params = 3 [(gogoproto.nullable) = false]; // create localhost on initialization bool create_localhost = 4 [(gogoproto.moretags) = "yaml:\"create_localhost\""]; + // the sequence for the next generated client identifier + uint64 next_client_sequence = 5 [(gogoproto.moretags) = "yaml:\"next_client_sequence\""]; } diff --git a/proto/ibc/core/client/v1/tx.proto b/proto/ibc/core/client/v1/tx.proto index 1019e15a0..a30ec8bbf 100644 --- a/proto/ibc/core/client/v1/tx.proto +++ b/proto/ibc/core/client/v1/tx.proto @@ -27,15 +27,13 @@ message MsgCreateClient { option (gogoproto.equal) = false; option (gogoproto.goproto_getters) = false; - // client unique identifier - string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; // light client state - google.protobuf.Any client_state = 2 [(gogoproto.moretags) = "yaml:\"client_state\""]; + google.protobuf.Any client_state = 1 [(gogoproto.moretags) = "yaml:\"client_state\""]; // consensus state associated with the client that corresponds to a given // height. - google.protobuf.Any consensus_state = 3 [(gogoproto.moretags) = "yaml:\"consensus_state\""]; + google.protobuf.Any consensus_state = 2 [(gogoproto.moretags) = "yaml:\"consensus_state\""]; // signer address - string signer = 4; + string signer = 3; } // MsgCreateClientResponse defines the Msg/CreateClient response type. diff --git a/types/query/query.pb.go b/types/query/query.pb.go index c04a6bde3..266d33700 100644 --- a/types/query/query.pb.go +++ b/types/query/query.pb.go @@ -783,7 +783,7 @@ type Module struct { Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` // module version Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` - //checksum + // checksum Sum string `protobuf:"bytes,3,opt,name=sum,proto3" json:"sum,omitempty"` } diff --git a/x/auth/client/rest/rest_test.go b/x/auth/client/rest/rest_test.go index 1d7cac684..671df87a8 100644 --- a/x/auth/client/rest/rest_test.go +++ b/x/auth/client/rest/rest_test.go @@ -453,7 +453,6 @@ func (s *IntegrationTestSuite) TestLegacyRestErrMessages() { "Successful IBC message", ibcsolomachinecli.NewCreateClientCmd(), []string{ - "21212121212", // dummy client-id "1", // dummy sequence consensusJSON.Name(), // path to consensus json, fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), diff --git a/x/ibc/core/02-client/genesis.go b/x/ibc/core/02-client/genesis.go index ef00930f0..6e77b20e3 100644 --- a/x/ibc/core/02-client/genesis.go +++ b/x/ibc/core/02-client/genesis.go @@ -7,7 +7,6 @@ import ( "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/keeper" "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/types" "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" - localhosttypes "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/09-localhost/types" ) // InitGenesis initializes the ibc client submodule's state from a provided genesis @@ -39,29 +38,10 @@ func InitGenesis(ctx sdk.Context, k keeper.Keeper, gs types.GenesisState) { } } - if !gs.CreateLocalhost { - return - } + k.SetNextClientSequence(ctx, gs.NextClientSequence) - // NOTE: return if the localhost client was already imported. The chain-id and - // block height will be overwriten to the correct values during BeginBlock. - if _, found := k.GetClientState(ctx, exported.Localhost); found { - return - } - - // client id is always "localhost" - revision := types.ParseChainID(ctx.ChainID()) - clientState := localhosttypes.NewClientState( - ctx.ChainID(), types.NewHeight(revision, uint64(ctx.BlockHeight())), - ) - - if err := clientState.Validate(); err != nil { - panic(err) - } - - if err := k.CreateClient(ctx, exported.Localhost, clientState, nil); err != nil { - panic(err) - } + // NOTE: localhost creation is specifically disallowed for the time being. + // Issue: https://github.com/cosmos/cosmos-sdk/issues/7871 } // ExportGenesis returns the ibc client submodule's exported genesis. diff --git a/x/ibc/core/02-client/keeper/client.go b/x/ibc/core/02-client/keeper/client.go index ddb742563..49d5a04eb 100644 --- a/x/ibc/core/02-client/keeper/client.go +++ b/x/ibc/core/02-client/keeper/client.go @@ -15,20 +15,17 @@ import ( // // CONTRACT: ClientState was constructed correctly from given initial consensusState func (k Keeper) CreateClient( - ctx sdk.Context, clientID string, clientState exported.ClientState, consensusState exported.ConsensusState, -) error { + ctx sdk.Context, clientState exported.ClientState, consensusState exported.ConsensusState, +) (string, error) { params := k.GetParams(ctx) if !params.IsAllowedClient(clientState.ClientType()) { - return sdkerrors.Wrapf( + return "", sdkerrors.Wrapf( types.ErrInvalidClientType, "client state type %s is not registered in the allowlist", clientState.ClientType(), ) } - _, found := k.GetClientState(ctx, clientID) - if found { - return sdkerrors.Wrapf(types.ErrClientExists, "cannot create client with ID %s", clientID) - } + clientID := k.GenerateClientIdentifier(ctx, clientState.ClientType()) // check if consensus state is nil in case the created client is Localhost if consensusState != nil { @@ -46,7 +43,7 @@ func (k Keeper) CreateClient( ) }() - return nil + return clientID, nil } // UpdateClient updates the consensus state and the state root from a provided header. diff --git a/x/ibc/core/02-client/keeper/client_test.go b/x/ibc/core/02-client/keeper/client_test.go index 736a34d1e..3d772761c 100644 --- a/x/ibc/core/02-client/keeper/client_test.go +++ b/x/ibc/core/02-client/keeper/client_test.go @@ -19,36 +19,23 @@ import ( func (suite *KeeperTestSuite) TestCreateClient() { cases := []struct { - msg string - clientID string - expPass bool - expPanic bool + msg string + clientState exported.ClientState + expPass bool }{ - {"success", testClientID, true, false}, - {"client ID exists", testClientID, false, false}, + {"success", ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), true}, + {"client type not supported", localhosttypes.NewClientState(testChainID, clienttypes.NewHeight(0, 1)), false}, } for i, tc := range cases { - tc := tc - i := i - if tc.expPanic { - suite.Require().Panics(func() { - clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - suite.keeper.CreateClient(suite.ctx, tc.clientID, clientState, suite.consensusState) - }, "Msg %d didn't panic: %s", i, tc.msg) - } else { - clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - if tc.expPass { - suite.Require().NotNil(clientState, "valid test case %d failed: %s", i, tc.msg) - } - // If we were able to NewClientState clientstate successfully, try persisting it to state - err := suite.keeper.CreateClient(suite.ctx, tc.clientID, clientState, suite.consensusState) - if tc.expPass { - suite.Require().NoError(err, "valid test case %d failed: %s", i, tc.msg) - } else { - suite.Require().Error(err, "invalid test case %d passed: %s", i, tc.msg) - } + clientID, err := suite.keeper.CreateClient(suite.ctx, tc.clientState, suite.consensusState) + if tc.expPass { + suite.Require().NoError(err, "valid test case %d failed: %s", i, tc.msg) + suite.Require().NotNil(clientID, "valid test case %d failed: %s", i, tc.msg) + } else { + suite.Require().Error(err, "invalid test case %d passed: %s", i, tc.msg) + suite.Require().Equal("", clientID, "invalid test case %d passed: %s", i, tc.msg) } } } @@ -72,6 +59,8 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { var ( updateHeader *ibctmtypes.Header clientState *ibctmtypes.ClientState + clientID string + err error ) cases := []struct { @@ -81,7 +70,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { }{ {"valid update", func() error { clientState = ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) // store intermediate consensus state to check that trustedHeight does not need to be highest consensus state before header height incrementedClientHeight := testClientHeight.Increment() @@ -89,17 +78,17 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { Timestamp: suite.now.Add(time.Minute), NextValidatorsHash: suite.valSetHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, incrementedClientHeight, intermediateConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, incrementedClientHeight, intermediateConsState) clientState.LatestHeight = incrementedClientHeight - suite.keeper.SetClientState(suite.ctx, testClientID, clientState) + suite.keeper.SetClientState(suite.ctx, clientID, clientState) updateHeader = createFutureUpdateFn(suite) return err }, true}, {"valid past update", func() error { clientState = ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) suite.Require().NoError(err) height1 := types.NewHeight(0, 1) @@ -109,7 +98,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { Timestamp: suite.past, NextValidatorsHash: suite.valSetHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, height1, prevConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, height1, prevConsState) height2 := types.NewHeight(0, 2) @@ -118,7 +107,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { Timestamp: suite.past.Add(time.Minute), NextValidatorsHash: suite.valSetHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, height2, intermediateConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, height2, intermediateConsState) // updateHeader will fill in consensus state between prevConsState and suite.consState // clientState should not be updated @@ -147,7 +136,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { {"valid past update before client was frozen", func() error { clientState = ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) clientState.FrozenHeight = types.NewHeight(0, testClientHeight.RevisionHeight-1) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) suite.Require().NoError(err) height1 := types.NewHeight(0, 1) @@ -157,7 +146,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { Timestamp: suite.past, NextValidatorsHash: suite.valSetHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, height1, prevConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, height1, prevConsState) // updateHeader will fill in consensus state between prevConsState and suite.consState // clientState should not be updated @@ -166,7 +155,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { }, true}, {"invalid header", func() error { clientState = ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + _, err := suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) suite.Require().NoError(err) updateHeader = createPastUpdateFn(suite) @@ -179,13 +168,14 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { i := i suite.Run(fmt.Sprintf("Case %s", tc.name), func() { suite.SetupTest() + clientID = testClientID // must be explicitly changed err := tc.malleate() suite.Require().NoError(err) suite.ctx = suite.ctx.WithBlockTime(updateHeader.Header.Time.Add(time.Minute)) - err = suite.keeper.UpdateClient(suite.ctx, testClientID, updateHeader) + err = suite.keeper.UpdateClient(suite.ctx, clientID, updateHeader) if tc.expPass { suite.Require().NoError(err, err) @@ -196,10 +186,10 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { NextValidatorsHash: updateHeader.Header.NextValidatorsHash, } - newClientState, found := suite.keeper.GetClientState(suite.ctx, testClientID) + newClientState, found := suite.keeper.GetClientState(suite.ctx, clientID) suite.Require().True(found, "valid test case %d failed: %s", i, tc.name) - consensusState, found := suite.keeper.GetClientConsensusState(suite.ctx, testClientID, updateHeader.GetHeight()) + consensusState, found := suite.keeper.GetClientConsensusState(suite.ctx, clientID, updateHeader.GetHeight()) suite.Require().True(found, "valid test case %d failed: %s", i, tc.name) // Determine if clientState should be updated or not @@ -399,6 +389,11 @@ func (suite *KeeperTestSuite) TestUpgradeClient() { } func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { + var ( + clientID string + err error + ) + altPrivVal := ibctestingmock.NewPV() altPubKey, err := altPrivVal.GetPubKey() suite.Require().NoError(err) @@ -437,12 +432,12 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, altTime, bothValSet, bothValSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, suite.ctx.BlockTime(), bothValSet, bothValSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = bothValsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) return err }, @@ -453,22 +448,22 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), testClientHeight, altTime, bothValSet, valSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), testClientHeight, suite.ctx.BlockTime(), bothValSet, valSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = valsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) // store intermediate consensus state to check that trustedHeight does not need to be highest consensus state before header height intermediateConsState := &ibctmtypes.ConsensusState{ Timestamp: suite.now.Add(time.Minute), NextValidatorsHash: suite.valSetHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, heightPlus3, intermediateConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, heightPlus3, intermediateConsState) clientState.LatestHeight = heightPlus3 - suite.keeper.SetClientState(suite.ctx, testClientID, clientState) + suite.keeper.SetClientState(suite.ctx, clientID, clientState) return err }, @@ -479,22 +474,22 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), testClientHeight, altTime, bothValSet, valSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), heightPlus3, suite.ctx.BlockTime(), bothValSet, bothValSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = valsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) // store trusted consensus state for Header2 intermediateConsState := &ibctmtypes.ConsensusState{ Timestamp: suite.now.Add(time.Minute), NextValidatorsHash: bothValsHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, heightPlus3, intermediateConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, heightPlus3, intermediateConsState) clientState.LatestHeight = heightPlus3 - suite.keeper.SetClientState(suite.ctx, testClientID, clientState) + suite.keeper.SetClientState(suite.ctx, clientID, clientState) return err }, @@ -505,12 +500,12 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), heightPlus3, altTime, bothValSet, bothValSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), testClientHeight, suite.ctx.BlockTime(), bothValSet, valSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = valsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) // intermediate consensus state at height + 3 is not created return err }, @@ -521,12 +516,12 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), testClientHeight, altTime, bothValSet, valSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), heightPlus3, suite.ctx.BlockTime(), bothValSet, bothValSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = valsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) // intermediate consensus state at height + 3 is not created return err }, @@ -543,15 +538,15 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, altTime, bothValSet, bothValSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, suite.ctx.BlockTime(), bothValSet, bothValSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = bothValsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) clientState.FrozenHeight = types.NewHeight(0, 1) - suite.keeper.SetClientState(suite.ctx, testClientID, clientState) + suite.keeper.SetClientState(suite.ctx, clientID, clientState) return err }, @@ -562,14 +557,14 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, altTime, bothValSet, bothValSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, suite.ctx.BlockTime(), altValSet, bothValSet, altSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) if err != nil { return err } - err = suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) return err }, @@ -580,18 +575,22 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { for i, tc := range testCases { tc := tc i := i + suite.Run(tc.name, func() { - suite.SetupTest() // reset + suite.SetupTest() // reset + clientID = testClientID // must be explicitly changed err := tc.malleate() suite.Require().NoError(err) + tc.misbehaviour.ClientId = clientID + err = suite.keeper.CheckMisbehaviourAndUpdateState(suite.ctx, tc.misbehaviour) if tc.expPass { suite.Require().NoError(err, "valid test case %d failed: %s", i, tc.name) - clientState, found := suite.keeper.GetClientState(suite.ctx, testClientID) + clientState, found := suite.keeper.GetClientState(suite.ctx, clientID) suite.Require().True(found, "valid test case %d failed: %s", i, tc.name) suite.Require().True(clientState.IsFrozen(), "valid test case %d failed: %s", i, tc.name) suite.Require().Equal(tc.misbehaviour.GetHeight(), clientState.GetFrozenHeight(), diff --git a/x/ibc/core/02-client/keeper/keeper.go b/x/ibc/core/02-client/keeper/keeper.go index accf48bb1..18cb4afd6 100644 --- a/x/ibc/core/02-client/keeper/keeper.go +++ b/x/ibc/core/02-client/keeper/keeper.go @@ -50,6 +50,16 @@ func (k Keeper) Logger(ctx sdk.Context) log.Logger { return ctx.Logger().With("module", fmt.Sprintf("x/%s/%s", host.ModuleName, types.SubModuleName)) } +// GenerateClientIdentifier returns the next client identifier. +func (k Keeper) GenerateClientIdentifier(ctx sdk.Context, clientType string) string { + nextClientSeq := k.GetNextClientSequence(ctx) + clientID := types.FormatClientIdentifier(clientType, nextClientSeq) + + nextClientSeq++ + k.SetNextClientSequence(ctx, nextClientSeq) + return clientID +} + // GetClientState gets a particular client from the store func (k Keeper) GetClientState(ctx sdk.Context, clientID string) (exported.ClientState, bool) { store := k.ClientStore(ctx, clientID) @@ -87,6 +97,24 @@ func (k Keeper) SetClientConsensusState(ctx sdk.Context, clientID string, height store.Set(host.ConsensusStateKey(height), k.MustMarshalConsensusState(consensusState)) } +// GetNextClientSequence gets the next client sequence from the store. +func (k Keeper) GetNextClientSequence(ctx sdk.Context) uint64 { + store := ctx.KVStore(k.storeKey) + bz := store.Get([]byte(types.KeyNextClientSequence)) + if bz == nil { + panic("next client sequence is nil") + } + + return sdk.BigEndianToUint64(bz) +} + +// SetNextClientSequence sets the next client sequence to the store. +func (k Keeper) SetNextClientSequence(ctx sdk.Context, sequence uint64) { + store := ctx.KVStore(k.storeKey) + bz := sdk.Uint64ToBigEndian(sequence) + store.Set([]byte(types.KeyNextClientSequence), bz) +} + // IterateConsensusStates provides an iterator over all stored consensus states. // objects. For each State object, cb will be called. If the cb returns true, // the iterator will close and stop. diff --git a/x/ibc/core/02-client/keeper/keeper_test.go b/x/ibc/core/02-client/keeper/keeper_test.go index 45ff4f674..ec3c0229c 100644 --- a/x/ibc/core/02-client/keeper/keeper_test.go +++ b/x/ibc/core/02-client/keeper/keeper_test.go @@ -30,9 +30,9 @@ const ( testChainID = "gaiahub-0" testChainIDRevision1 = "gaiahub-1" - testClientID = "gaiachain" - testClientID2 = "ethbridge" - testClientID3 = "ethermint" + testClientID = "tendermint-0" + testClientID2 = "tendermint-1" + testClientID3 = "tendermint-2" height = 5 diff --git a/x/ibc/core/02-client/types/client.go b/x/ibc/core/02-client/types/client.go index 305b07cec..1c44d6e2b 100644 --- a/x/ibc/core/02-client/types/client.go +++ b/x/ibc/core/02-client/types/client.go @@ -2,11 +2,15 @@ package types import ( "fmt" + "math" "sort" + "strings" proto "github.com/gogo/protobuf/proto" codectypes "github.com/cosmos/cosmos-sdk/codec/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" ) @@ -80,3 +84,28 @@ func NewConsensusStateWithHeight(height Height, consensusState exported.Consensu func (cswh ConsensusStateWithHeight) UnpackInterfaces(unpacker codectypes.AnyUnpacker) error { return unpacker.UnpackAny(cswh.ConsensusState, new(exported.ConsensusState)) } + +// ValidateClientType validates the client type. It cannot be blank or empty. It must be a valid +// client identifier when used with '0' or the maximum uint64 as the sequence. +func ValidateClientType(clientType string) error { + if strings.TrimSpace(clientType) == "" { + return sdkerrors.Wrap(ErrInvalidClientType, "client type cannot be blank") + } + + smallestPossibleClientID := FormatClientIdentifier(clientType, 0) + largestPossibleClientID := FormatClientIdentifier(clientType, math.MaxUint64) + + // IsValidClientID will check client type format and if the sequence is a uint64 + if !IsValidClientID(smallestPossibleClientID) { + return sdkerrors.Wrap(ErrInvalidClientType, "") + } + + if err := host.ClientIdentifierValidator(smallestPossibleClientID); err != nil { + return sdkerrors.Wrap(err, "client type results in smallest client identifier being invalid") + } + if err := host.ClientIdentifierValidator(largestPossibleClientID); err != nil { + return sdkerrors.Wrap(err, "client type results in largest client identifier being invalid") + } + + return nil +} diff --git a/x/ibc/core/02-client/types/client_test.go b/x/ibc/core/02-client/types/client_test.go index 409ab5305..2dfd3967d 100644 --- a/x/ibc/core/02-client/types/client_test.go +++ b/x/ibc/core/02-client/types/client_test.go @@ -1,6 +1,10 @@ package types_test import ( + "testing" + + "github.com/stretchr/testify/require" + "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/types" "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" ibctesting "github.com/cosmos/cosmos-sdk/x/ibc/testing" @@ -54,3 +58,30 @@ func (suite *TypesTestSuite) TestMarshalConsensusStateWithHeight() { }) } } + +func TestValidateClientType(t *testing.T) { + testCases := []struct { + name string + clientType string + expPass bool + }{ + {"valid", "tendermint", true}, + {"valid solomachine", "solomachine-v1", true}, + {"too large", "tenderminttenderminttenderminttenderminttendermintt", false}, + {"too short", "t", false}, + {"blank id", " ", false}, + {"empty id", "", false}, + {"ends with dash", "tendermint-", false}, + } + + for _, tc := range testCases { + + err := types.ValidateClientType(tc.clientType) + + if tc.expPass { + require.NoError(t, err, tc.name) + } else { + require.Error(t, err, tc.name) + } + } +} diff --git a/x/ibc/core/02-client/types/genesis.go b/x/ibc/core/02-client/types/genesis.go index 4da2f5e92..f7939c478 100644 --- a/x/ibc/core/02-client/types/genesis.go +++ b/x/ibc/core/02-client/types/genesis.go @@ -67,23 +67,25 @@ func (ccs ClientConsensusStates) UnpackInterfaces(unpacker codectypes.AnyUnpacke // NewGenesisState creates a GenesisState instance. func NewGenesisState( clients []IdentifiedClientState, clientsConsensus ClientsConsensusStates, - params Params, createLocalhost bool, + params Params, createLocalhost bool, nextClientSequence uint64, ) GenesisState { return GenesisState{ - Clients: clients, - ClientsConsensus: clientsConsensus, - Params: params, - CreateLocalhost: createLocalhost, + Clients: clients, + ClientsConsensus: clientsConsensus, + Params: params, + CreateLocalhost: createLocalhost, + NextClientSequence: nextClientSequence, } } // DefaultGenesisState returns the ibc client submodule's default genesis state. func DefaultGenesisState() GenesisState { return GenesisState{ - Clients: []IdentifiedClientState{}, - ClientsConsensus: ClientsConsensusStates{}, - Params: DefaultParams(), - CreateLocalhost: false, + Clients: []IdentifiedClientState{}, + ClientsConsensus: ClientsConsensusStates{}, + Params: DefaultParams(), + CreateLocalhost: false, + NextClientSequence: 0, } } diff --git a/x/ibc/core/02-client/types/genesis.pb.go b/x/ibc/core/02-client/types/genesis.pb.go index 4c72edfe5..becfa1bab 100644 --- a/x/ibc/core/02-client/types/genesis.pb.go +++ b/x/ibc/core/02-client/types/genesis.pb.go @@ -32,6 +32,8 @@ type GenesisState struct { Params Params `protobuf:"bytes,3,opt,name=params,proto3" json:"params"` // create localhost on initialization CreateLocalhost bool `protobuf:"varint,4,opt,name=create_localhost,json=createLocalhost,proto3" json:"create_localhost,omitempty" yaml:"create_localhost"` + // the sequence for the next generated client identifier + NextClientSequence uint64 `protobuf:"varint,5,opt,name=next_client_sequence,json=nextClientSequence,proto3" json:"next_client_sequence,omitempty" yaml:"next_client_sequence"` } func (m *GenesisState) Reset() { *m = GenesisState{} } @@ -95,6 +97,13 @@ func (m *GenesisState) GetCreateLocalhost() bool { return false } +func (m *GenesisState) GetNextClientSequence() uint64 { + if m != nil { + return m.NextClientSequence + } + return 0 +} + func init() { proto.RegisterType((*GenesisState)(nil), "ibc.core.client.v1.GenesisState") } @@ -102,30 +111,32 @@ func init() { func init() { proto.RegisterFile("ibc/core/client/v1/genesis.proto", fileDescriptor_bcd0c0f1f2e6a91a) } var fileDescriptor_bcd0c0f1f2e6a91a = []byte{ - // 362 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0xc1, 0x4e, 0xea, 0x40, - 0x14, 0x86, 0xdb, 0x0b, 0xe1, 0xde, 0x94, 0x9b, 0x88, 0x8d, 0xd1, 0x06, 0x93, 0xb6, 0xe9, 0x0a, - 0x17, 0xcc, 0x08, 0x2e, 0x34, 0x2c, 0x4b, 0xa2, 0x31, 0x71, 0xa1, 0x75, 0xe7, 0x86, 0xb4, 0xc3, - 0x58, 0x26, 0xb6, 0x1d, 0xd2, 0x33, 0x10, 0x79, 0x05, 0x57, 0xc6, 0xc7, 0xf0, 0x49, 0x58, 0xb2, - 0x74, 0x85, 0x06, 0xde, 0x80, 0x27, 0x30, 0xed, 0x14, 0x17, 0x80, 0xab, 0x39, 0xf9, 0xe7, 0xff, - 0xfe, 0xff, 0x24, 0x47, 0xb3, 0x59, 0x40, 0x30, 0xe1, 0x29, 0xc5, 0x24, 0x62, 0x34, 0x11, 0x78, - 0xdc, 0xc2, 0x21, 0x4d, 0x28, 0x30, 0x40, 0xc3, 0x94, 0x0b, 0xae, 0xeb, 0x2c, 0x20, 0x28, 0x73, - 0x20, 0xe9, 0x40, 0xe3, 0x56, 0xdd, 0xda, 0x41, 0x15, 0xbf, 0x39, 0x54, 0x3f, 0x08, 0x79, 0xc8, - 0xf3, 0x11, 0x67, 0x93, 0x54, 0x9d, 0x97, 0x92, 0xf6, 0xff, 0x4a, 0x86, 0xdf, 0x0b, 0x5f, 0x50, - 0x9d, 0x68, 0x7f, 0x25, 0x06, 0x86, 0x6a, 0x97, 0x1a, 0xd5, 0xf6, 0x09, 0xda, 0x6e, 0x43, 0xd7, - 0x7d, 0x9a, 0x08, 0xf6, 0xc8, 0x68, 0xbf, 0x9b, 0x6b, 0x39, 0xeb, 0x9a, 0xd3, 0xb9, 0xa5, 0xbc, - 0x7f, 0x5a, 0x87, 0x3b, 0xbf, 0xc1, 0x5b, 0x27, 0xeb, 0x6f, 0xaa, 0xb6, 0x5f, 0xcc, 0x3d, 0xc2, - 0x13, 0xa0, 0x09, 0x8c, 0xc0, 0xf8, 0xf3, 0x7b, 0x9f, 0x8c, 0xe9, 0xae, 0xad, 0x32, 0xcf, 0xed, - 0x64, 0x7d, 0xab, 0xb9, 0x65, 0x4c, 0xfc, 0x38, 0xea, 0x38, 0x5b, 0x89, 0x4e, 0xb6, 0x8b, 0x44, - 0x61, 0x83, 0xf5, 0x6a, 0x64, 0x43, 0xd7, 0x2f, 0xb4, 0xca, 0xd0, 0x4f, 0xfd, 0x18, 0x8c, 0x92, - 0xad, 0x36, 0xaa, 0xed, 0xfa, 0xae, 0x45, 0x6e, 0x73, 0x87, 0x5b, 0xce, 0x9a, 0xbd, 0xc2, 0xaf, - 0x5f, 0x6a, 0x35, 0x92, 0x52, 0x5f, 0xd0, 0x5e, 0xc4, 0x89, 0x1f, 0x0d, 0x38, 0x08, 0xa3, 0x6c, - 0xab, 0x8d, 0x7f, 0xee, 0xf1, 0x6a, 0x6e, 0x1d, 0x15, 0xdb, 0x6d, 0x38, 0x1c, 0x6f, 0x4f, 0x4a, - 0x37, 0x6b, 0xc5, 0xbd, 0x9b, 0x2e, 0x4c, 0x75, 0xb6, 0x30, 0xd5, 0xaf, 0x85, 0xa9, 0xbe, 0x2e, - 0x4d, 0x65, 0xb6, 0x34, 0x95, 0x8f, 0xa5, 0xa9, 0x3c, 0x9c, 0x87, 0x4c, 0x0c, 0x46, 0x01, 0x22, - 0x3c, 0xc6, 0x84, 0x43, 0xcc, 0xa1, 0x78, 0x9a, 0xd0, 0x7f, 0xc2, 0xcf, 0xf8, 0xe7, 0xf8, 0xa7, - 0xed, 0x66, 0x71, 0x7f, 0x31, 0x19, 0x52, 0x08, 0x2a, 0xf9, 0x99, 0xcf, 0xbe, 0x03, 0x00, 0x00, - 0xff, 0xff, 0x8d, 0xa4, 0x74, 0xd6, 0x55, 0x02, 0x00, 0x00, + // 400 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xc1, 0x8e, 0x9a, 0x40, + 0x1c, 0xc6, 0x99, 0x6a, 0x6d, 0x83, 0x4d, 0x6a, 0x27, 0xa6, 0x25, 0x9a, 0x00, 0xe1, 0x44, 0x0f, + 0x32, 0xd5, 0x1e, 0xda, 0x78, 0xc4, 0xa4, 0x4d, 0x93, 0x1e, 0x2a, 0xbd, 0xf5, 0x42, 0x60, 0x9c, + 0x22, 0x29, 0x30, 0x96, 0x19, 0x8d, 0xbe, 0xc5, 0x66, 0x1f, 0x63, 0x9f, 0xc4, 0xa3, 0xc7, 0xbd, + 0x2c, 0xbb, 0xd1, 0x37, 0xf0, 0x09, 0x36, 0x30, 0xe3, 0x1e, 0x94, 0x3d, 0xf1, 0xcf, 0x37, 0xbf, + 0xef, 0xfb, 0xfe, 0x21, 0x7f, 0xd5, 0x8c, 0x43, 0x8c, 0x30, 0xcd, 0x09, 0xc2, 0x49, 0x4c, 0x32, + 0x8e, 0x56, 0x43, 0x14, 0x91, 0x8c, 0xb0, 0x98, 0x39, 0x8b, 0x9c, 0x72, 0x0a, 0x61, 0x1c, 0x62, + 0xa7, 0x24, 0x1c, 0x41, 0x38, 0xab, 0x61, 0xcf, 0xa8, 0x71, 0xc9, 0xd7, 0xca, 0xd4, 0xeb, 0x46, + 0x34, 0xa2, 0xd5, 0x88, 0xca, 0x49, 0xa8, 0xd6, 0x5d, 0x43, 0x7d, 0xf3, 0x5d, 0x84, 0xff, 0xe6, + 0x01, 0x27, 0x10, 0xab, 0xaf, 0x84, 0x8d, 0x69, 0xc0, 0x6c, 0xd8, 0xed, 0xd1, 0x47, 0xe7, 0xb2, + 0xcd, 0xf9, 0x31, 0x23, 0x19, 0x8f, 0xff, 0xc6, 0x64, 0x36, 0xa9, 0xb4, 0xca, 0xeb, 0xea, 0xdb, + 0xc2, 0x50, 0x6e, 0xee, 0x8d, 0xf7, 0xb5, 0xcf, 0xcc, 0x3b, 0x25, 0xc3, 0x6b, 0xa0, 0xbe, 0x93, + 0xb3, 0x8f, 0x69, 0xc6, 0x48, 0xc6, 0x96, 0x4c, 0x7b, 0xf1, 0x7c, 0x9f, 0x88, 0x99, 0x9c, 0x50, + 0x91, 0xe7, 0x8e, 0xcb, 0xbe, 0x63, 0x61, 0x68, 0x9b, 0x20, 0x4d, 0xc6, 0xd6, 0x45, 0xa2, 0x55, + 0xee, 0x22, 0xac, 0xec, 0xcc, 0xeb, 0x75, 0xf0, 0x99, 0x0e, 0xbf, 0xaa, 0xad, 0x45, 0x90, 0x07, + 0x29, 0xd3, 0x1a, 0x26, 0xb0, 0xdb, 0xa3, 0x5e, 0xdd, 0x22, 0xbf, 0x2a, 0xc2, 0x6d, 0x96, 0xcd, + 0x9e, 0xe4, 0xe1, 0x37, 0xb5, 0x83, 0x73, 0x12, 0x70, 0xe2, 0x27, 0x14, 0x07, 0xc9, 0x9c, 0x32, + 0xae, 0x35, 0x4d, 0x60, 0xbf, 0x76, 0xfb, 0xc7, 0xc2, 0xf8, 0x20, 0xb7, 0x3b, 0x23, 0x2c, 0xef, + 0xad, 0x90, 0x7e, 0x9e, 0x14, 0x38, 0x55, 0xbb, 0x19, 0x59, 0x73, 0x5f, 0xd4, 0xf9, 0x8c, 0xfc, + 0x5f, 0x92, 0x0c, 0x13, 0xed, 0xa5, 0x09, 0xec, 0xa6, 0x6b, 0x1c, 0x0b, 0xa3, 0x2f, 0xb2, 0xea, + 0x28, 0xcb, 0x83, 0xa5, 0x2c, 0x7f, 0xb8, 0x14, 0xdd, 0xe9, 0x76, 0xaf, 0x83, 0xdd, 0x5e, 0x07, + 0x0f, 0x7b, 0x1d, 0x5c, 0x1d, 0x74, 0x65, 0x77, 0xd0, 0x95, 0xdb, 0x83, 0xae, 0xfc, 0xf9, 0x12, + 0xc5, 0x7c, 0xbe, 0x0c, 0x1d, 0x4c, 0x53, 0x84, 0x29, 0x4b, 0x29, 0x93, 0x9f, 0x01, 0x9b, 0xfd, + 0x43, 0x6b, 0xf4, 0x74, 0x4f, 0x9f, 0x46, 0x03, 0x79, 0x52, 0x7c, 0xb3, 0x20, 0x2c, 0x6c, 0x55, + 0x97, 0xf3, 0xf9, 0x31, 0x00, 0x00, 0xff, 0xff, 0x7c, 0xcd, 0xe7, 0x85, 0xa8, 0x02, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { @@ -148,6 +159,11 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.NextClientSequence != 0 { + i = encodeVarintGenesis(dAtA, i, uint64(m.NextClientSequence)) + i-- + dAtA[i] = 0x28 + } if m.CreateLocalhost { i-- if m.CreateLocalhost { @@ -233,6 +249,9 @@ func (m *GenesisState) Size() (n int) { if m.CreateLocalhost { n += 2 } + if m.NextClientSequence != 0 { + n += 1 + sovGenesis(uint64(m.NextClientSequence)) + } return n } @@ -392,6 +411,25 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { } } m.CreateLocalhost = bool(v != 0) + case 5: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field NextClientSequence", wireType) + } + m.NextClientSequence = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.NextClientSequence |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipGenesis(dAtA[iNdEx:]) diff --git a/x/ibc/core/02-client/types/genesis_test.go b/x/ibc/core/02-client/types/genesis_test.go index 7d131bae6..81eff7833 100644 --- a/x/ibc/core/02-client/types/genesis_test.go +++ b/x/ibc/core/02-client/types/genesis_test.go @@ -90,6 +90,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint, exported.Localhost), false, + 0, ), expPass: true, }, @@ -119,6 +120,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint), false, + 0, ), expPass: false, }, @@ -134,6 +136,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { nil, types.NewParams(exported.Tendermint), false, + 0, ), expPass: false, }, @@ -163,6 +166,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint), false, + 0, ), expPass: false, }, @@ -192,6 +196,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint), false, + 0, ), expPass: false, }, @@ -221,6 +226,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint), false, + 0, ), expPass: false, }, @@ -250,6 +256,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Solomachine), false, + 0, ), expPass: false, }, @@ -279,6 +286,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(" "), false, + 0, ), expPass: false, }, @@ -308,6 +316,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(" "), true, + 0, ), expPass: false, }, @@ -337,6 +346,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint), true, + 0, ), expPass: false, }, diff --git a/x/ibc/core/02-client/types/height.go b/x/ibc/core/02-client/types/height.go index d507ddea8..9ac6fec75 100644 --- a/x/ibc/core/02-client/types/height.go +++ b/x/ibc/core/02-client/types/height.go @@ -17,7 +17,7 @@ var _ exported.Height = (*Height)(nil) // IsRevisionFormat checks if a chainID is in the format required for parsing revisions // The chainID must be in the form: `{chainID}-{revision} // 24-host may enforce stricter checks on chainID -var IsRevisionFormat = regexp.MustCompile(`^.+[^-]-{1}[1-9][0-9]*$`).MatchString +var IsRevisionFormat = regexp.MustCompile(`^.*[^-]-{1}[1-9][0-9]*$`).MatchString // ZeroHeight is a helper function which returns an uninitialized height. func ZeroHeight() Height { diff --git a/x/ibc/core/02-client/types/height_test.go b/x/ibc/core/02-client/types/height_test.go index f2615c8c1..a455b7f58 100644 --- a/x/ibc/core/02-client/types/height_test.go +++ b/x/ibc/core/02-client/types/height_test.go @@ -104,6 +104,7 @@ func TestParseChainID(t *testing.T) { formatted bool }{ {"gaiamainnet-3", 3, true}, + {"a-1", 1, true}, {"gaia-mainnet-40", 40, true}, {"gaiamainnet-3-39", 39, true}, {"gaiamainnet--", 0, false}, @@ -111,10 +112,13 @@ func TestParseChainID(t *testing.T) { {"gaiamainnet--4", 0, false}, {"gaiamainnet-3.4", 0, false}, {"gaiamainnet", 0, false}, + {"a--1", 0, false}, + {"-1", 0, false}, + {"--1", 0, false}, } for i, tc := range cases { - require.Equal(t, tc.formatted, types.IsRevisionFormat(tc.chainID), "case %d does not match expected format", i) + require.Equal(t, tc.formatted, types.IsRevisionFormat(tc.chainID), "id %s does not match expected format", tc.chainID) revision := types.ParseChainID(tc.chainID) require.Equal(t, tc.revision, revision, "case %d returns incorrect revision", i) diff --git a/x/ibc/core/02-client/types/keys.go b/x/ibc/core/02-client/types/keys.go index f36039333..321f5e3ff 100644 --- a/x/ibc/core/02-client/types/keys.go +++ b/x/ibc/core/02-client/types/keys.go @@ -1,5 +1,15 @@ package types +import ( + "fmt" + "regexp" + "strconv" + "strings" + + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" +) + const ( // SubModuleName defines the IBC client name SubModuleName string = "client" @@ -9,4 +19,47 @@ const ( // QuerierRoute is the querier route for IBC client QuerierRoute string = SubModuleName + + // KeyNextClientSequence is the key used to store the next client sequence in + // the keeper. + KeyNextClientSequence = "nextClientSequence" ) + +// FormatClientIdentifier returns the client identifier with the sequence appended. +// This is a SDK specific format not enforced by IBC protocol. +func FormatClientIdentifier(clientType string, sequence uint64) string { + return fmt.Sprintf("%s-%d", clientType, sequence) +} + +// IsClientIDFormat checks if a clientID is in the format required on the SDK for +// parsing client identifiers. The client identifier must be in the form: `{client-type}-{N} +var IsClientIDFormat = regexp.MustCompile(`^.*[^-]-[0-9]{1,20}$`).MatchString + +// IsValidClientID checks if the clientID is valid and can be parsed into the client +// identifier format. +func IsValidClientID(clientID string) bool { + _, _, err := ParseClientIdentifier(clientID) + return err == nil +} + +// ParseClientIdentifier parses the client type and sequence from the client identifier. +func ParseClientIdentifier(clientID string) (string, uint64, error) { + if !IsClientIDFormat(clientID) { + return "", 0, sdkerrors.Wrapf(host.ErrInvalidID, "invalid client identifier %s is not in format: `{client-type}-{N}`", clientID) + } + + splitStr := strings.Split(clientID, "-") + lastIndex := len(splitStr) - 1 + + clientType := strings.Join(splitStr[:lastIndex], "-") + if strings.TrimSpace(clientType) == "" { + return "", 0, sdkerrors.Wrap(host.ErrInvalidID, "client identifier must be in format: `{client-type}-{N}` and client type cannot be blank") + } + + sequence, err := strconv.ParseUint(splitStr[lastIndex], 10, 64) + if err != nil { + return "", 0, sdkerrors.Wrap(err, "failed to parse client identifier sequence") + } + + return clientType, sequence, nil +} diff --git a/x/ibc/core/02-client/types/keys_test.go b/x/ibc/core/02-client/types/keys_test.go new file mode 100644 index 000000000..dbe56657a --- /dev/null +++ b/x/ibc/core/02-client/types/keys_test.go @@ -0,0 +1,53 @@ +package types_test + +import ( + "math" + "testing" + + "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/types" + "github.com/stretchr/testify/require" +) + +// tests ParseClientIdentifier and IsValidClientID +func TestParseClientIdentifier(t *testing.T) { + testCases := []struct { + name string + clientID string + clientType string + expSeq uint64 + expPass bool + }{ + {"valid 0", "tendermint-0", "tendermint", 0, true}, + {"valid 1", "tendermint-1", "tendermint", 1, true}, + {"valid solemachine", "solomachine-v1-1", "solomachine-v1", 1, true}, + {"valid large sequence", types.FormatClientIdentifier("tendermint", math.MaxUint64), "tendermint", math.MaxUint64, true}, + {"valid short client type", "t-0", "t", 0, true}, + // one above uint64 max + {"invalid uint64", "tendermint-18446744073709551616", "tendermint", 0, false}, + // uint64 == 20 characters + {"invalid large sequence", "tendermint-2345682193567182931243", "tendermint", 0, false}, + {"missing dash", "tendermint0", "tendermint", 0, false}, + {"blank id", " ", " ", 0, false}, + {"empty id", "", "", 0, false}, + {"negative sequence", "tendermint--1", "tendermint", 0, false}, + {"invalid format", "tendermint-tm", "tendermint", 0, false}, + {"empty clientype", " -100", "tendermint", 0, false}, + } + + for _, tc := range testCases { + + clientType, seq, err := types.ParseClientIdentifier(tc.clientID) + valid := types.IsValidClientID(tc.clientID) + require.Equal(t, tc.expSeq, seq, tc.clientID) + + if tc.expPass { + require.NoError(t, err, tc.name) + require.True(t, valid) + require.Equal(t, tc.clientType, clientType) + } else { + require.Error(t, err, tc.name, tc.clientID) + require.False(t, valid) + require.Equal(t, "", clientType) + } + } +} diff --git a/x/ibc/core/02-client/types/msgs.go b/x/ibc/core/02-client/types/msgs.go index d1d084dda..1e884123d 100644 --- a/x/ibc/core/02-client/types/msgs.go +++ b/x/ibc/core/02-client/types/msgs.go @@ -31,7 +31,7 @@ var ( // NewMsgCreateClient creates a new MsgCreateClient instance //nolint:interfacer func NewMsgCreateClient( - id string, clientState exported.ClientState, consensusState exported.ConsensusState, signer sdk.AccAddress, + clientState exported.ClientState, consensusState exported.ConsensusState, signer sdk.AccAddress, ) (*MsgCreateClient, error) { anyClientState, err := PackClientState(clientState) @@ -45,7 +45,6 @@ func NewMsgCreateClient( } return &MsgCreateClient{ - ClientId: id, ClientState: anyClientState, ConsensusState: anyConsensusState, Signer: signer.String(), @@ -75,17 +74,20 @@ func (msg MsgCreateClient) ValidateBasic() error { if err := clientState.Validate(); err != nil { return err } - if clientState.ClientType() == exported.Localhost || msg.ClientId == exported.Localhost { + if clientState.ClientType() == exported.Localhost { return sdkerrors.Wrap(ErrInvalidClient, "localhost client can only be created on chain initialization") } consensusState, err := UnpackConsensusState(msg.ConsensusState) if err != nil { return err } - if err := consensusState.ValidateBasic(); err != nil { - return err + if clientState.ClientType() != consensusState.ClientType() { + return sdkerrors.Wrap(ErrInvalidClientType, "client type for client state and consensus state do not match") } - return host.ClientIdentifierValidator(msg.ClientId) + if err := ValidateClientType(clientState.ClientType()); err != nil { + return sdkerrors.Wrap(err, "client type does not meet naming constraints") + } + return consensusState.ValidateBasic() } // GetSignBytes implements sdk.Msg. The function will panic since it is used diff --git a/x/ibc/core/02-client/types/msgs_test.go b/x/ibc/core/02-client/types/msgs_test.go index 7d2da65d2..e42725bae 100644 --- a/x/ibc/core/02-client/types/msgs_test.go +++ b/x/ibc/core/02-client/types/msgs_test.go @@ -12,7 +12,6 @@ import ( "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" solomachinetypes "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/06-solomachine/types" ibctmtypes "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/07-tendermint/types" - localhosttypes "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/09-localhost/types" ibctesting "github.com/cosmos/cosmos-sdk/x/ibc/testing" ) @@ -50,14 +49,14 @@ func (suite *TypesTestSuite) TestMarshalMsgCreateClient() { { "solo machine client", func() { soloMachine := ibctesting.NewSolomachine(suite.T(), suite.chainA.Codec, "solomachine", "", 2) - msg, err = types.NewMsgCreateClient(soloMachine.ClientID, soloMachine.ClientState(), soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(soloMachine.ClientState(), soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, }, { "tendermint client", func() { tendermintClient := ibctmtypes.NewClientState(suite.chainA.ChainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - msg, err = types.NewMsgCreateClient("tendermint", tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, }, @@ -98,18 +97,11 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { malleate func() expPass bool }{ - { - "invalid client-id", - func() { - msg.ClientId = "" - }, - false, - }, { "valid - tendermint client", func() { tendermintClient := ibctmtypes.NewClientState(suite.chainA.ChainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - msg, err = types.NewMsgCreateClient("tendermint", tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, true, @@ -117,7 +109,7 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { { "invalid tendermint client", func() { - msg, err = types.NewMsgCreateClient("tendermint", &ibctmtypes.ClientState{}, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(&ibctmtypes.ClientState{}, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, false, @@ -133,7 +125,7 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { "failed to unpack consensus state", func() { tendermintClient := ibctmtypes.NewClientState(suite.chainA.ChainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - msg, err = types.NewMsgCreateClient("tendermint", tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) msg.ConsensusState = nil }, @@ -150,7 +142,7 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { "valid - solomachine client", func() { soloMachine := ibctesting.NewSolomachine(suite.T(), suite.chainA.Codec, "solomachine", "", 2) - msg, err = types.NewMsgCreateClient(soloMachine.ClientID, soloMachine.ClientState(), soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(soloMachine.ClientState(), soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, true, @@ -159,7 +151,7 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { "invalid solomachine client", func() { soloMachine := ibctesting.NewSolomachine(suite.T(), suite.chainA.Codec, "solomachine", "", 2) - msg, err = types.NewMsgCreateClient(soloMachine.ClientID, &solomachinetypes.ClientState{}, soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(&solomachinetypes.ClientState{}, soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, false, @@ -168,16 +160,17 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { "invalid solomachine consensus state", func() { soloMachine := ibctesting.NewSolomachine(suite.T(), suite.chainA.Codec, "solomachine", "", 2) - msg, err = types.NewMsgCreateClient(soloMachine.ClientID, soloMachine.ClientState(), &solomachinetypes.ConsensusState{}, suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(soloMachine.ClientState(), &solomachinetypes.ConsensusState{}, suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, false, }, { - "unsupported - localhost client", + "invalid - client state and consensus state client types do not match", func() { - localhostClient := localhosttypes.NewClientState(suite.chainA.ChainID, types.NewHeight(0, uint64(suite.chainA.LastHeader.Header.Height))) - msg, err = types.NewMsgCreateClient("localhost", localhostClient, suite.chainA.LastHeader.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + tendermintClient := ibctmtypes.NewClientState(suite.chainA.ChainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) + soloMachine := ibctesting.NewSolomachine(suite.T(), suite.chainA.Codec, "solomachine", "", 2) + msg, err = types.NewMsgCreateClient(tendermintClient, soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, false, diff --git a/x/ibc/core/02-client/types/tx.pb.go b/x/ibc/core/02-client/types/tx.pb.go index 2a1a6b888..b0bf22d0c 100644 --- a/x/ibc/core/02-client/types/tx.pb.go +++ b/x/ibc/core/02-client/types/tx.pb.go @@ -31,15 +31,13 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package // MsgCreateClient defines a message to create an IBC client type MsgCreateClient struct { - // client unique identifier - ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty" yaml:"client_id"` // light client state - ClientState *types.Any `protobuf:"bytes,2,opt,name=client_state,json=clientState,proto3" json:"client_state,omitempty" yaml:"client_state"` + ClientState *types.Any `protobuf:"bytes,1,opt,name=client_state,json=clientState,proto3" json:"client_state,omitempty" yaml:"client_state"` // consensus state associated with the client that corresponds to a given // height. - ConsensusState *types.Any `protobuf:"bytes,3,opt,name=consensus_state,json=consensusState,proto3" json:"consensus_state,omitempty" yaml:"consensus_state"` + ConsensusState *types.Any `protobuf:"bytes,2,opt,name=consensus_state,json=consensusState,proto3" json:"consensus_state,omitempty" yaml:"consensus_state"` // signer address - Signer string `protobuf:"bytes,4,opt,name=signer,proto3" json:"signer,omitempty"` + Signer string `protobuf:"bytes,3,opt,name=signer,proto3" json:"signer,omitempty"` } func (m *MsgCreateClient) Reset() { *m = MsgCreateClient{} } @@ -374,45 +372,45 @@ func init() { func init() { proto.RegisterFile("ibc/core/client/v1/tx.proto", fileDescriptor_cb5dc4651eb49a04) } var fileDescriptor_cb5dc4651eb49a04 = []byte{ - // 598 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x55, 0x3f, 0x6f, 0xd3, 0x4e, - 0x18, 0x8e, 0x9b, 0xdf, 0x2f, 0x6a, 0xaf, 0x81, 0x56, 0x26, 0xb4, 0xa9, 0xab, 0xda, 0x95, 0xe9, - 0x10, 0x44, 0xeb, 0x23, 0x61, 0x00, 0x75, 0x23, 0x9d, 0x18, 0x22, 0x51, 0x57, 0x0c, 0xb0, 0x04, - 0xff, 0xb9, 0x5e, 0xac, 0x26, 0xbe, 0xc8, 0x67, 0x47, 0xcd, 0x37, 0x60, 0x44, 0x82, 0x0f, 0x50, - 0x31, 0xf0, 0x59, 0x18, 0x3b, 0x30, 0x30, 0x45, 0x28, 0x59, 0x98, 0xf3, 0x09, 0x90, 0xef, 0x1c, - 0xcb, 0x76, 0xed, 0x28, 0x82, 0x91, 0xc9, 0x7e, 0xef, 0x7d, 0xee, 0x79, 0x9e, 0x7b, 0xdf, 0xf7, - 0x6c, 0xb0, 0xef, 0x98, 0x16, 0xb4, 0x88, 0x87, 0xa0, 0xd5, 0x77, 0x90, 0xeb, 0xc3, 0x51, 0x13, - 0xfa, 0xd7, 0xda, 0xd0, 0x23, 0x3e, 0x11, 0x45, 0xc7, 0xb4, 0xb4, 0x30, 0xa9, 0xf1, 0xa4, 0x36, - 0x6a, 0x4a, 0x35, 0x4c, 0x30, 0x61, 0x69, 0x18, 0xbe, 0x71, 0xa4, 0xb4, 0x87, 0x09, 0xc1, 0x7d, - 0x04, 0x59, 0x64, 0x06, 0x97, 0xd0, 0x70, 0xc7, 0x51, 0x4a, 0xc9, 0x51, 0x88, 0xe8, 0x18, 0x40, - 0xfd, 0xb4, 0x06, 0xb6, 0x3a, 0x14, 0x9f, 0x79, 0xc8, 0xf0, 0xd1, 0x19, 0xcb, 0x88, 0x4d, 0xb0, - 0xc1, 0x31, 0x5d, 0xc7, 0xae, 0x0b, 0x87, 0x42, 0x63, 0xa3, 0x5d, 0x9b, 0x4f, 0x94, 0xed, 0xb1, - 0x31, 0xe8, 0x9f, 0xaa, 0x71, 0x4a, 0xd5, 0xd7, 0xf9, 0xfb, 0x2b, 0x5b, 0x7c, 0x0d, 0xaa, 0xd1, - 0x3a, 0xf5, 0x0d, 0x1f, 0xd5, 0xd7, 0x0e, 0x85, 0xc6, 0x66, 0xab, 0xa6, 0x71, 0x67, 0xda, 0xc2, - 0x99, 0xf6, 0xd2, 0x1d, 0xb7, 0x77, 0xe7, 0x13, 0xe5, 0x41, 0x8a, 0x8b, 0xed, 0x51, 0xf5, 0x4d, - 0x1e, 0x5e, 0x84, 0x91, 0xf8, 0x16, 0x6c, 0x59, 0xc4, 0xa5, 0xc8, 0xa5, 0x01, 0x8d, 0x48, 0xcb, - 0x4b, 0x48, 0xa5, 0xf9, 0x44, 0xd9, 0x89, 0x48, 0xd3, 0xdb, 0x54, 0xfd, 0x7e, 0xbc, 0xc2, 0xa9, - 0x77, 0x40, 0x85, 0x3a, 0xd8, 0x45, 0x5e, 0xfd, 0xbf, 0xf0, 0x70, 0x7a, 0x14, 0x9d, 0xae, 0x7f, - 0xb8, 0x51, 0x4a, 0xbf, 0x6e, 0x94, 0x92, 0xba, 0x07, 0x76, 0x33, 0x45, 0xd1, 0x11, 0x1d, 0x86, - 0x2c, 0xea, 0x67, 0x81, 0x15, 0xec, 0xcd, 0xd0, 0xfe, 0xab, 0x82, 0x1d, 0x83, 0x4a, 0x0f, 0x19, - 0x36, 0xf2, 0x96, 0x95, 0x4a, 0x8f, 0x30, 0x09, 0xc7, 0xe5, 0xa5, 0x8e, 0x93, 0xae, 0x62, 0xc7, - 0xdf, 0xcb, 0x60, 0x9b, 0xe5, 0xb0, 0x67, 0xd8, 0xff, 0x4a, 0x8f, 0xcf, 0x41, 0x6d, 0xe8, 0x11, - 0x72, 0xd9, 0x0d, 0xf8, 0xb1, 0xbb, 0x5c, 0x97, 0x75, 0xbc, 0xda, 0x56, 0xe6, 0x13, 0x65, 0x9f, - 0x33, 0xe5, 0xa1, 0x54, 0x5d, 0x64, 0xcb, 0xe9, 0x92, 0x5d, 0x81, 0x83, 0x0c, 0x38, 0xe3, 0xfd, - 0x7f, 0xc6, 0xdd, 0x98, 0x4f, 0x94, 0xa3, 0x5c, 0xee, 0xac, 0x67, 0x29, 0x25, 0x52, 0x34, 0xa3, - 0x95, 0x82, 0x8e, 0x4b, 0xa0, 0x9e, 0xed, 0x6a, 0xdc, 0xf2, 0xaf, 0x02, 0x78, 0xd8, 0xa1, 0xf8, - 0x22, 0x30, 0x07, 0x8e, 0xdf, 0x71, 0xa8, 0x89, 0x7a, 0xc6, 0xc8, 0x21, 0x81, 0xf7, 0x27, 0x7d, - 0x7f, 0x01, 0xaa, 0x83, 0x04, 0xc5, 0xd2, 0x81, 0x4d, 0x21, 0x57, 0x18, 0x5b, 0x05, 0x1c, 0xe4, - 0xfa, 0x5c, 0x9c, 0xa4, 0xf5, 0xa5, 0x0c, 0xca, 0x1d, 0x8a, 0xc5, 0xf7, 0xa0, 0x9a, 0xfa, 0x46, - 0x3d, 0xd2, 0xee, 0x7e, 0x1e, 0xb5, 0xcc, 0x9d, 0x95, 0x9e, 0xac, 0x00, 0x5a, 0x28, 0x85, 0x0a, - 0xa9, 0x4b, 0x5d, 0xa4, 0x90, 0x04, 0x15, 0x2a, 0xe4, 0x5d, 0x44, 0xd1, 0x02, 0xf7, 0xd2, 0x13, - 0x75, 0x54, 0xb8, 0x3b, 0x81, 0x92, 0x8e, 0x57, 0x41, 0xc5, 0x22, 0x1e, 0x10, 0x73, 0xda, 0xfe, - 0xb8, 0x80, 0xe3, 0x2e, 0x54, 0x6a, 0xae, 0x0c, 0x5d, 0x68, 0xb6, 0xcf, 0xbf, 0x4d, 0x65, 0xe1, - 0x76, 0x2a, 0x0b, 0x3f, 0xa7, 0xb2, 0xf0, 0x71, 0x26, 0x97, 0x6e, 0x67, 0x72, 0xe9, 0xc7, 0x4c, - 0x2e, 0xbd, 0x7b, 0x8e, 0x1d, 0xbf, 0x17, 0x98, 0x9a, 0x45, 0x06, 0xd0, 0x22, 0x74, 0x40, 0x68, - 0xf4, 0x38, 0xa1, 0xf6, 0x15, 0xbc, 0x86, 0xf1, 0xef, 0xe9, 0x69, 0xeb, 0x24, 0xfa, 0x43, 0xf9, - 0xe3, 0x21, 0xa2, 0x66, 0x85, 0x8d, 0xd5, 0xb3, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xd2, 0x19, - 0x59, 0x52, 0x23, 0x07, 0x00, 0x00, + // 601 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x3f, 0x6f, 0xd3, 0x40, + 0x1c, 0x8d, 0x1b, 0x88, 0x9a, 0x6b, 0xa0, 0x95, 0x09, 0x6d, 0xea, 0xaa, 0x76, 0x64, 0x3a, 0x04, + 0xd1, 0xfa, 0x48, 0x18, 0x40, 0xdd, 0x48, 0x27, 0x86, 0x48, 0xd4, 0x15, 0x03, 0x2c, 0xc1, 0x7f, + 0xae, 0x97, 0x53, 0x13, 0x5f, 0xe4, 0xb3, 0xa3, 0xe6, 0x1b, 0x30, 0x32, 0xf0, 0x01, 0x2a, 0x06, + 0x3e, 0x0b, 0x63, 0x07, 0x06, 0xa6, 0xa8, 0x4a, 0x16, 0xe6, 0x7c, 0x02, 0x14, 0x9f, 0x13, 0x62, + 0xd7, 0x8e, 0x2c, 0xa0, 0x53, 0x7c, 0xfe, 0xbd, 0x7b, 0xef, 0xf7, 0xf2, 0x7e, 0xe7, 0x03, 0x7b, + 0xc4, 0xb4, 0xa0, 0x45, 0x5d, 0x04, 0xad, 0x2e, 0x41, 0x8e, 0x07, 0x07, 0x75, 0xe8, 0x5d, 0x6a, + 0x7d, 0x97, 0x7a, 0x54, 0x14, 0x89, 0x69, 0x69, 0xb3, 0xa2, 0xc6, 0x8b, 0xda, 0xa0, 0x2e, 0x95, + 0x31, 0xc5, 0x34, 0x28, 0xc3, 0xd9, 0x13, 0x47, 0x4a, 0xbb, 0x98, 0x52, 0xdc, 0x45, 0x30, 0x58, + 0x99, 0xfe, 0x39, 0x34, 0x9c, 0x61, 0x58, 0x52, 0x12, 0x14, 0x42, 0xba, 0x00, 0xa0, 0xde, 0x08, + 0x60, 0xb3, 0xc5, 0xf0, 0x89, 0x8b, 0x0c, 0x0f, 0x9d, 0x04, 0x15, 0xf1, 0x2d, 0x28, 0x71, 0x4c, + 0x9b, 0x79, 0x86, 0x87, 0x2a, 0x42, 0x55, 0xa8, 0x6d, 0x34, 0xca, 0x1a, 0x97, 0xd1, 0xe6, 0x32, + 0xda, 0x6b, 0x67, 0xd8, 0xdc, 0x99, 0x8e, 0x94, 0x47, 0x43, 0xa3, 0xd7, 0x3d, 0x56, 0x97, 0xf7, + 0xa8, 0xfa, 0x06, 0x5f, 0x9e, 0xcd, 0x56, 0xe2, 0x7b, 0xb0, 0x69, 0x51, 0x87, 0x21, 0x87, 0xf9, + 0x2c, 0x24, 0x5d, 0x5b, 0x41, 0x2a, 0x4d, 0x47, 0xca, 0x76, 0x48, 0x1a, 0xdd, 0xa6, 0xea, 0x0f, + 0x17, 0x6f, 0x38, 0xf5, 0x36, 0x28, 0x30, 0x82, 0x1d, 0xe4, 0x56, 0xf2, 0x55, 0xa1, 0x56, 0xd4, + 0xc3, 0xd5, 0xf1, 0xfa, 0xa7, 0x2b, 0x25, 0xf7, 0xeb, 0x4a, 0xc9, 0xa9, 0xbb, 0x60, 0x27, 0xe6, + 0x50, 0x47, 0xac, 0x3f, 0x63, 0x51, 0xbf, 0x70, 0xf7, 0xef, 0xfa, 0xf6, 0x1f, 0xf7, 0x75, 0x50, + 0x0c, 0x9d, 0x10, 0x3b, 0xb0, 0x5e, 0x6c, 0x96, 0xa7, 0x23, 0x65, 0x2b, 0x62, 0x92, 0xd8, 0xaa, + 0xbe, 0xce, 0x9f, 0xdf, 0xd8, 0xe2, 0x21, 0x28, 0x74, 0x90, 0x61, 0x23, 0x77, 0x95, 0x2b, 0x3d, + 0xc4, 0x64, 0xee, 0x78, 0xb9, 0xab, 0x45, 0xc7, 0x3f, 0xf2, 0x60, 0x2b, 0xa8, 0x61, 0xd7, 0xb0, + 0xff, 0xa1, 0xe5, 0x78, 0xc6, 0x6b, 0x77, 0x91, 0x71, 0xfe, 0x3f, 0x65, 0x7c, 0x0a, 0xca, 0x7d, + 0x97, 0xd2, 0xf3, 0xb6, 0xcf, 0x6d, 0xb7, 0xb9, 0x6e, 0xe5, 0x5e, 0x55, 0xa8, 0x95, 0x9a, 0xca, + 0x74, 0xa4, 0xec, 0x71, 0xa6, 0x24, 0x94, 0xaa, 0x8b, 0xc1, 0xeb, 0xe8, 0x5f, 0x76, 0x01, 0xf6, + 0x63, 0xe0, 0x58, 0xef, 0xf7, 0x03, 0xee, 0xda, 0x74, 0xa4, 0x1c, 0x24, 0x72, 0xc7, 0x7b, 0x96, + 0x22, 0x22, 0x69, 0x33, 0x5a, 0x48, 0x49, 0x5c, 0x02, 0x95, 0x78, 0xaa, 0x8b, 0xc8, 0xbf, 0x09, + 0xe0, 0x71, 0x8b, 0xe1, 0x33, 0xdf, 0xec, 0x11, 0xaf, 0x45, 0x98, 0x89, 0x3a, 0xc6, 0x80, 0x50, + 0xdf, 0xfd, 0x9b, 0xdc, 0x5f, 0x81, 0x52, 0x6f, 0x89, 0x62, 0xe5, 0xc0, 0x46, 0x90, 0x19, 0xc6, + 0x56, 0x01, 0xfb, 0x89, 0x7d, 0xce, 0x9d, 0x34, 0xbe, 0xe6, 0x41, 0xbe, 0xc5, 0xb0, 0xf8, 0x11, + 0x94, 0x22, 0x1f, 0x9c, 0x27, 0xda, 0xed, 0x6f, 0x9d, 0x16, 0x3b, 0xb3, 0xd2, 0xb3, 0x0c, 0xa0, + 0xb9, 0xd2, 0x4c, 0x21, 0x72, 0xa8, 0xd3, 0x14, 0x96, 0x41, 0xa9, 0x0a, 0x49, 0x07, 0x51, 0xb4, + 0xc0, 0x83, 0xe8, 0x44, 0x1d, 0xa4, 0xee, 0x5e, 0x42, 0x49, 0x87, 0x59, 0x50, 0x0b, 0x11, 0x17, + 0x88, 0x09, 0xb1, 0x3f, 0x4d, 0xe1, 0xb8, 0x0d, 0x95, 0xea, 0x99, 0xa1, 0x73, 0xcd, 0xe6, 0xe9, + 0xf7, 0xb1, 0x2c, 0x5c, 0x8f, 0x65, 0xe1, 0x66, 0x2c, 0x0b, 0x9f, 0x27, 0x72, 0xee, 0x7a, 0x22, + 0xe7, 0x7e, 0x4e, 0xe4, 0xdc, 0x87, 0x97, 0x98, 0x78, 0x1d, 0xdf, 0xd4, 0x2c, 0xda, 0x83, 0x16, + 0x65, 0x3d, 0xca, 0xc2, 0x9f, 0x23, 0x66, 0x5f, 0xc0, 0x4b, 0xb8, 0xb8, 0x6b, 0x9e, 0x37, 0x8e, + 0xc2, 0xeb, 0xc6, 0x1b, 0xf6, 0x11, 0x33, 0x0b, 0xc1, 0x58, 0xbd, 0xf8, 0x1d, 0x00, 0x00, 0xff, + 0xff, 0xf4, 0xf1, 0xa7, 0x9a, 0xf0, 0x06, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -636,7 +634,7 @@ func (m *MsgCreateClient) MarshalToSizedBuffer(dAtA []byte) (int, error) { copy(dAtA[i:], m.Signer) i = encodeVarintTx(dAtA, i, uint64(len(m.Signer))) i-- - dAtA[i] = 0x22 + dAtA[i] = 0x1a } if m.ConsensusState != nil { { @@ -648,7 +646,7 @@ func (m *MsgCreateClient) MarshalToSizedBuffer(dAtA []byte) (int, error) { i = encodeVarintTx(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x1a + dAtA[i] = 0x12 } if m.ClientState != nil { { @@ -660,13 +658,6 @@ func (m *MsgCreateClient) MarshalToSizedBuffer(dAtA []byte) (int, error) { i = encodeVarintTx(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x12 - } - if len(m.ClientId) > 0 { - i -= len(m.ClientId) - copy(dAtA[i:], m.ClientId) - i = encodeVarintTx(dAtA, i, uint64(len(m.ClientId))) - i-- dAtA[i] = 0xa } return len(dAtA) - i, nil @@ -954,10 +945,6 @@ func (m *MsgCreateClient) Size() (n int) { } var l int _ = l - l = len(m.ClientId) - if l > 0 { - n += 1 + l + sovTx(uint64(l)) - } if m.ClientState != nil { l = m.ClientState.Size() n += 1 + l + sovTx(uint64(l)) @@ -1120,38 +1107,6 @@ func (m *MsgCreateClient) Unmarshal(dAtA []byte) error { } switch fieldNum { case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ClientId", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTx - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthTx - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthTx - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.ClientId = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ClientState", wireType) } @@ -1187,7 +1142,7 @@ func (m *MsgCreateClient) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 3: + case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ConsensusState", wireType) } @@ -1223,7 +1178,7 @@ func (m *MsgCreateClient) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 4: + case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Signer", wireType) } diff --git a/x/ibc/core/03-connection/keeper/handshake_test.go b/x/ibc/core/03-connection/keeper/handshake_test.go index eb9ca5c40..d70fd013d 100644 --- a/x/ibc/core/03-connection/keeper/handshake_test.go +++ b/x/ibc/core/03-connection/keeper/handshake_test.go @@ -41,8 +41,9 @@ func (suite *KeeperTestSuite) TestConnOpenInit() { version = &types.Version{} }, false}, {"couldn't add connection to client", func() { - // swap client identifiers to result in client that does not exist - clientB, clientA = suite.coordinator.SetupClients(suite.chainA, suite.chainB, exported.Tendermint) + clientA, clientB = suite.coordinator.SetupClients(suite.chainA, suite.chainB, exported.Tendermint) + // set clientA to invalid client identifier + clientA = "clientidentifier" }, false}, } diff --git a/x/ibc/core/03-connection/types/keys.go b/x/ibc/core/03-connection/types/keys.go index c44602203..65af565c2 100644 --- a/x/ibc/core/03-connection/types/keys.go +++ b/x/ibc/core/03-connection/types/keys.go @@ -2,10 +2,10 @@ package types import ( "fmt" - "strconv" - "strings" + "regexp" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" ) const ( @@ -30,11 +30,17 @@ const ( ) // FormatConnectionIdentifier returns the connection identifier with the sequence appended. +// This is a SDK specific format not enforced by IBC protocol. func FormatConnectionIdentifier(sequence uint64) string { return fmt.Sprintf("%s%d", ConnectionPrefix, sequence) } -// IsValidConnectionID return true if the connection identifier is valid. +// IsConnectionIDFormat checks if a connectionID is in the format required on the SDK for +// parsing connection identifiers. The connection identifier must be in the form: `connection-{N} +var IsConnectionIDFormat = regexp.MustCompile(`^connection-[0-9]{1,20}$`).MatchString + +// IsValidConnectionID checks if the connection identifier is valid and can be parsed to +// the connection identifier format. func IsValidConnectionID(connectionID string) bool { _, err := ParseConnectionSequence(connectionID) return err == nil @@ -42,18 +48,14 @@ func IsValidConnectionID(connectionID string) bool { // ParseConnectionSequence parses the connection sequence from the connection identifier. func ParseConnectionSequence(connectionID string) (uint64, error) { - if !strings.HasPrefix(connectionID, ConnectionPrefix) { - return 0, sdkerrors.Wrapf(ErrInvalidConnectionIdentifier, "doesn't contain prefix `%s`", ConnectionPrefix) + if !IsConnectionIDFormat(connectionID) { + return 0, sdkerrors.Wrap(host.ErrInvalidID, "connection identifier is not in the format: `connection-{N}`") } - splitStr := strings.Split(connectionID, ConnectionPrefix) - if len(splitStr) != 2 { - return 0, sdkerrors.Wrap(ErrInvalidConnectionIdentifier, "connection identifier must be in format: `connection-{N}`") - } - - sequence, err := strconv.ParseUint(splitStr[1], 10, 64) + sequence, err := host.ParseIdentifier(connectionID, ConnectionPrefix) if err != nil { - return 0, sdkerrors.Wrap(err, "failed to parse connection identifier sequence") + return 0, sdkerrors.Wrap(err, "invalid connection identifier") } + return sequence, nil } diff --git a/x/ibc/core/03-connection/types/keys_test.go b/x/ibc/core/03-connection/types/keys_test.go index 261bd2895..c899dc3c5 100644 --- a/x/ibc/core/03-connection/types/keys_test.go +++ b/x/ibc/core/03-connection/types/keys_test.go @@ -1,6 +1,7 @@ package types_test import ( + "math" "testing" "github.com/cosmos/cosmos-sdk/x/ibc/core/03-connection/types" @@ -17,10 +18,13 @@ func TestParseConnectionSequence(t *testing.T) { }{ {"valid 0", "connection-0", 0, true}, {"valid 1", "connection-1", 1, true}, - {"valid large sequence", "connection-234568219356718293", 234568219356718293, true}, + {"valid large sequence", types.FormatConnectionIdentifier(math.MaxUint64), math.MaxUint64, true}, + // one above uint64 max + {"invalid uint64", "connection-18446744073709551616", 0, false}, // uint64 == 20 characters {"invalid large sequence", "connection-2345682193567182931243", 0, false}, {"capital prefix", "Connection-0", 0, false}, + {"double prefix", "connection-connection-0", 0, false}, {"missing dash", "connection0", 0, false}, {"blank id", " ", 0, false}, {"empty id", "", 0, false}, diff --git a/x/ibc/core/04-channel/types/keys.go b/x/ibc/core/04-channel/types/keys.go index b6c5745dd..d3a6cde24 100644 --- a/x/ibc/core/04-channel/types/keys.go +++ b/x/ibc/core/04-channel/types/keys.go @@ -2,10 +2,10 @@ package types import ( "fmt" - "strconv" - "strings" + "regexp" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" ) const ( @@ -30,11 +30,17 @@ const ( ) // FormatChannelIdentifier returns the channel identifier with the sequence appended. +// This is a SDK specific format not enforced by IBC protocol. func FormatChannelIdentifier(sequence uint64) string { return fmt.Sprintf("%s%d", ChannelPrefix, sequence) } -// IsValidChannelID return true if the channel identifier is valid. +// IsChannelIDFormat checks if a channelID is in the format required on the SDK for +// parsing channel identifiers. The channel identifier must be in the form: `channel-{N} +var IsChannelIDFormat = regexp.MustCompile(`^channel-[0-9]{1,20}$`).MatchString + +// IsValidChannelID checks if a channelID is valid and can be parsed to the channel +// identifier format. func IsValidChannelID(channelID string) bool { _, err := ParseChannelSequence(channelID) return err == nil @@ -42,18 +48,14 @@ func IsValidChannelID(channelID string) bool { // ParseChannelSequence parses the channel sequence from the channel identifier. func ParseChannelSequence(channelID string) (uint64, error) { - if !strings.HasPrefix(channelID, ChannelPrefix) { - return 0, sdkerrors.Wrapf(ErrInvalidChannelIdentifier, "doesn't contain prefix `%s`", ChannelPrefix) + if !IsChannelIDFormat(channelID) { + return 0, sdkerrors.Wrap(host.ErrInvalidID, "channel identifier is not in the format: `channel-{N}`") } - splitStr := strings.Split(channelID, ChannelPrefix) - if len(splitStr) != 2 { - return 0, sdkerrors.Wrap(ErrInvalidChannelIdentifier, "channel identifier must be in format: `channel-{N}`") - } - - sequence, err := strconv.ParseUint(splitStr[1], 10, 64) + sequence, err := host.ParseIdentifier(channelID, ChannelPrefix) if err != nil { - return 0, sdkerrors.Wrap(err, "failed to parse channel identifier sequence") + return 0, sdkerrors.Wrap(err, "invalid channel identifier") } + return sequence, nil } diff --git a/x/ibc/core/04-channel/types/keys_test.go b/x/ibc/core/04-channel/types/keys_test.go index 418174cb0..86e4e61aa 100644 --- a/x/ibc/core/04-channel/types/keys_test.go +++ b/x/ibc/core/04-channel/types/keys_test.go @@ -18,6 +18,8 @@ func TestParseChannelSequence(t *testing.T) { {"valid 0", "channel-0", 0, true}, {"valid 1", "channel-1", 1, true}, {"valid large sequence", "channel-234568219356718293", 234568219356718293, true}, + // one above uint64 max + {"invalid uint64", "channel-18446744073709551616", 0, false}, // uint64 == 20 characters {"invalid large sequence", "channel-2345682193567182931243", 0, false}, {"capital prefix", "Channel-0", 0, false}, diff --git a/x/ibc/core/24-host/parse.go b/x/ibc/core/24-host/parse.go index 7e6301a39..8c3459500 100644 --- a/x/ibc/core/24-host/parse.go +++ b/x/ibc/core/24-host/parse.go @@ -1,11 +1,37 @@ package host import ( + "strconv" "strings" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) +// ParseIdentifier parses the sequence from the identifier using the provided prefix. This function +// does not need to be used by counterparty chains. SDK generated connection and channel identifiers +// are required to use this format. +func ParseIdentifier(identifier, prefix string) (uint64, error) { + if !strings.HasPrefix(identifier, prefix) { + return 0, sdkerrors.Wrapf(ErrInvalidID, "identifier doesn't contain prefix `%s`", prefix) + } + + splitStr := strings.Split(identifier, prefix) + if len(splitStr) != 2 { + return 0, sdkerrors.Wrapf(ErrInvalidID, "identifier must be in format: `%s{N}`", prefix) + } + + // sanity check + if splitStr[0] != "" { + return 0, sdkerrors.Wrapf(ErrInvalidID, "identifier must begin with prefix %s", prefix) + } + + sequence, err := strconv.ParseUint(splitStr[1], 10, 64) + if err != nil { + return 0, sdkerrors.Wrap(err, "failed to parse identifier sequence") + } + return sequence, nil +} + // ParseConnectionPath returns the connection ID from a full path. It returns // an error if the provided path is invalid. func ParseConnectionPath(path string) (string, error) { diff --git a/x/ibc/core/24-host/parse_test.go b/x/ibc/core/24-host/parse_test.go new file mode 100644 index 000000000..cbee37ddb --- /dev/null +++ b/x/ibc/core/24-host/parse_test.go @@ -0,0 +1,47 @@ +package host_test + +import ( + "math" + "testing" + + connectiontypes "github.com/cosmos/cosmos-sdk/x/ibc/core/03-connection/types" + host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" + "github.com/stretchr/testify/require" +) + +func TestParseIdentifier(t *testing.T) { + testCases := []struct { + name string + identifier string + prefix string + expSeq uint64 + expPass bool + }{ + {"valid 0", "connection-0", "connection-", 0, true}, + {"valid 1", "connection-1", "connection-", 1, true}, + {"valid large sequence", connectiontypes.FormatConnectionIdentifier(math.MaxUint64), "connection-", math.MaxUint64, true}, + // one above uint64 max + {"invalid uint64", "connection-18446744073709551616", "connection-", 0, false}, + // uint64 == 20 characters + {"invalid large sequence", "connection-2345682193567182931243", "conenction-", 0, false}, + {"capital prefix", "Connection-0", "connection-", 0, false}, + {"double prefix", "connection-connection-0", "connection-", 0, false}, + {"doesn't have prefix", "connection-0", "prefix", 0, false}, + {"missing dash", "connection0", "connection-", 0, false}, + {"blank id", " ", "connection-", 0, false}, + {"empty id", "", "connection-", 0, false}, + {"negative sequence", "connection--1", "connection-", 0, false}, + } + + for _, tc := range testCases { + + seq, err := host.ParseIdentifier(tc.identifier, tc.prefix) + require.Equal(t, tc.expSeq, seq) + + if tc.expPass { + require.NoError(t, err, tc.name) + } else { + require.Error(t, err, tc.name) + } + } +} diff --git a/x/ibc/core/genesis_test.go b/x/ibc/core/genesis_test.go index 29a26aad7..bc4c5834d 100644 --- a/x/ibc/core/genesis_test.go +++ b/x/ibc/core/genesis_test.go @@ -97,6 +97,7 @@ func (suite *IBCTestSuite) TestValidateGenesis() { }, clienttypes.NewParams(exported.Tendermint, exported.Localhost), true, + 0, ), ConnectionGenesis: connectiontypes.NewGenesisState( []connectiontypes.IdentifiedConnection{ @@ -154,6 +155,7 @@ func (suite *IBCTestSuite) TestValidateGenesis() { nil, clienttypes.NewParams(exported.Tendermint), false, + 0, ), ConnectionGenesis: connectiontypes.DefaultGenesisState(), }, @@ -239,6 +241,7 @@ func (suite *IBCTestSuite) TestInitGenesis() { }, clienttypes.NewParams(exported.Tendermint, exported.Localhost), true, + 0, ), ConnectionGenesis: connectiontypes.NewGenesisState( []connectiontypes.IdentifiedConnection{ diff --git a/x/ibc/core/keeper/msg_server.go b/x/ibc/core/keeper/msg_server.go index 2b655ff9a..78f822e4d 100644 --- a/x/ibc/core/keeper/msg_server.go +++ b/x/ibc/core/keeper/msg_server.go @@ -32,14 +32,15 @@ func (k Keeper) CreateClient(goCtx context.Context, msg *clienttypes.MsgCreateCl return nil, err } - if err = k.ClientKeeper.CreateClient(ctx, msg.ClientId, clientState, consensusState); err != nil { + clientID, err := k.ClientKeeper.CreateClient(ctx, clientState, consensusState) + if err != nil { return nil, err } ctx.EventManager().EmitEvents(sdk.Events{ sdk.NewEvent( clienttypes.EventTypeCreateClient, - sdk.NewAttribute(clienttypes.AttributeKeyClientID, msg.ClientId), + sdk.NewAttribute(clienttypes.AttributeKeyClientID, clientID), sdk.NewAttribute(clienttypes.AttributeKeyClientType, clientState.ClientType()), sdk.NewAttribute(clienttypes.AttributeKeyConsensusHeight, clientState.GetLatestHeight().String()), ), diff --git a/x/ibc/core/spec/01_concepts.md b/x/ibc/core/spec/01_concepts.md index c3059e5f5..045508999 100644 --- a/x/ibc/core/spec/01_concepts.md +++ b/x/ibc/core/spec/01_concepts.md @@ -10,11 +10,11 @@ this [document](https://github.com/cosmos/ics/blob/master/ibc/1_IBC_TERMINOLOGY. ## Client Creation, Updates, and Upgrades IBC clients are on chain light clients. The light client is responsible for verifying -counterparty state. A light client can be created by any user submitting a client -identifier and a valid initial `ClientState` and `ConsensusState`. The client identifier -must not already be used. Clients are given a client identifier prefixed store to -store their associated client state and consensus states. Consensus states are -stored using their associated height. +counterparty state. A light client can be created by any user submitting a valid initial +`ClientState` and `ConsensusState`. The client identifier is auto generated using the +client type and the global client counter appended in the format: `{client-type}-{N}`. +Clients are given a client identifier prefixed store to store their associated client +state and consensus states. Consensus states are stored using their associated height. Clients can be updated by any user submitting a valid `Header`. The client state callback to `CheckHeaderAndUpdateState` is responsible for verifying the header against previously diff --git a/x/ibc/core/spec/02_state.md b/x/ibc/core/spec/02_state.md index 54eb41bc4..2c85a525a 100644 --- a/x/ibc/core/spec/02_state.md +++ b/x/ibc/core/spec/02_state.md @@ -13,9 +13,12 @@ The client type is not stored since it can be obtained through the client state. | "0/" | "clients/{identifier}/clientState" | ClientState | | "0/" | "clients/{identifier}/consensusStates/{height}" | ConsensusState | | "0/" | "clients/{identifier}/connections" | []string | +| "0/" | "nextClientSequence | uint64 | | "0/" | "connections/{identifier}" | ConnectionEnd | +| "0/" | "nextConnectionSequence" | uint64 | | "0/" | "ports/{identifier}" | CapabilityKey | | "0/" | "channelEnds/ports/{identifier}/channels/{identifier}" | ChannelEnd | +| "0/" | "nextChannelSequence" | uint64 | | "0/" | "capabilities/ports/{identifier}/channels/{identifier}" | CapabilityKey | | "0/" | "nextSequenceSend/ports/{identifier}/channels/{identifier}" | uint64 | | "0/" | "nextSequenceRecv/ports/{identifier}/channels/{identifier}" | uint64 | diff --git a/x/ibc/core/spec/03_state_transitions.md b/x/ibc/core/spec/03_state_transitions.md index de31957d6..be3b508b7 100644 --- a/x/ibc/core/spec/03_state_transitions.md +++ b/x/ibc/core/spec/03_state_transitions.md @@ -9,7 +9,7 @@ The described state transitions assume successful message exection. ## Create Client `MsgCreateClient` will initialize and store a `ClientState` and `ConsensusState` in the sub-store -created using the given client identifier. +created using a generated client identifier. ## Update Client diff --git a/x/ibc/core/spec/04_messages.md b/x/ibc/core/spec/04_messages.md index 34d68200b..3728e6d6f 100644 --- a/x/ibc/core/spec/04_messages.md +++ b/x/ibc/core/spec/04_messages.md @@ -14,7 +14,6 @@ A light client is created using the `MsgCreateClient`. ```go type MsgCreateClient struct { - ClientId string ClientState *types.Any // proto-packed client state ConsensusState *types.Any // proto-packed consensus state Signer sdk.AccAddress @@ -23,13 +22,11 @@ type MsgCreateClient struct { This message is expected to fail if: -- `ClientId` is invalid (see naming requirements) - `ClientState` is empty or invalid - `ConsensusState` is empty or invalid - `Signer` is empty -- A light client with the provided id and type already exist -The message creates and stores a light client with an initial consensus state for the given client +The message creates and stores a light client with an initial consensus state using a generated client identifier. ### MsgUpdateClient @@ -112,7 +109,6 @@ A connection is initialized on a light client using the `MsgConnectionOpenInit`. ```go type MsgConnectionOpenInit struct { ClientId string - ConnectionId string Counterparty Counterparty Version string Signer sdk.AccAddress @@ -121,7 +117,6 @@ type MsgConnectionOpenInit struct { This message is expected to fail if: - `ClientId` is invalid (see naming requirements) -- `ConnectionId` is invalid (see naming requirements) - `Counterparty` is empty - 'Version' is not empty and invalid - `Signer` is empty @@ -138,8 +133,7 @@ using the `MsgConnectionOpenTry`. ```go type MsgConnectionOpenTry struct { ClientId string - DesiredConnectionId string - CounterpartyChosenConnectionId string + PreviousConnectionId string ClientState *types.Any // proto-packed counterparty client Counterparty Counterparty CounterpartyVersions []string @@ -155,8 +149,7 @@ type MsgConnectionOpenTry struct { This message is expected to fail if: - `ClientId` is invalid (see naming requirements) -- `DesiredConnectionId` is invalid (see naming requirements) -- `CounterpartyChosenConnectionId` is not empty and doesn't match `DesiredConnectionId` +- `PreviousConnectionId` is not empty and invalid (see naming requirements) - `ClientState` is not a valid client of the executing chain - `Counterparty` is empty - `CounterpartyVersions` is empty @@ -167,15 +160,13 @@ This message is expected to fail if: - `ConsensusHeight` is zero - `Signer` is empty - A Client hasn't been created for the given ID -- A Connection for the given ID already exists +- If a previous connection exists but does not match the supplied parameters. - `ProofInit` does not prove that the counterparty connection is in state INIT - `ProofClient` does not prove that the counterparty has stored the `ClientState` provided in message - `ProofConsensus` does not prove that the counterparty has the correct consensus state for this chain -The message creates a connection for the given ID with an TRYOPEN State. The `CounterpartyChosenConnectionID` -represents the connection ID the counterparty set under `connection.Counterparty.ConnectionId` -to represent the connection ID this chain should use. An empty string indicates the connection -identifier is flexible and gives this chain an opportunity to choose its own identifier. +The message creates a connection for a generated connection ID with an TRYOPEN State. If a previous +connection already exists, it updates the connection state from INIT to TRYOPEN. ### MsgConnectionOpenAck @@ -251,7 +242,6 @@ message. ```go type MsgChannelOpenInit struct { PortId string - ChannelId string Channel Channel Signer sdk.AccAddress } @@ -260,12 +250,11 @@ type MsgChannelOpenInit struct { This message is expected to fail if: - `PortId` is invalid (see naming requirements) -- `ChannelId` is invalid (see naming requirements) - `Channel` is empty - `Signer` is empty - A Channel End exists for the given Channel ID and Port ID -The message creates a channel on chain A with an INIT state for the given Channel ID +The message creates a channel on chain A with an INIT state for a generated Channel ID and Port ID. ### MsgChannelOpenTry @@ -276,8 +265,7 @@ the `MsgChannelOpenTry` message. ```go type MsgChannelOpenTry struct { PortId string - DesiredChannelId string - CounterpartyChosenChannelId string + PreviousChannelId string Channel Channel CounterpartyVersion string ProofInit []byte @@ -289,21 +277,18 @@ type MsgChannelOpenTry struct { This message is expected to fail if: - `PortId` is invalid (see naming requirements) -- `DesiredChannelId` is invalid (see naming requirements) -- `CounterpartyChosenChannelId` is not empty and not equal to `ChannelId` +- `PreviousChannelId` is not empty and invalid (see naming requirements) - `Channel` is empty - `CounterpartyVersion` is empty - `ProofInit` is empty - `ProofHeight` is zero - `Signer` is empty -- A Channel End exists for the given Channel and Port ID +- A previous channel exists and does not match the provided parameters. - `ProofInit` does not prove that the counterparty's Channel state is in INIT -The message creates a channel on chain B with an TRYOPEN state for the given Channel ID -and Port ID. The `CounterpartyChosenChannelId` represents the channel ID the counterparty set under -`connection.Counterparty.ChannelId` to represent the channel ID this chain should use. -An empty string indicates the channel identifier is flexible and gives this chain an -opportunity to choose its own identifier. +The message creates a channel on chain B with an TRYOPEN state for using a generated Channel ID +and given Port ID if the previous channel does not already exist. Otherwise it udates the +previous channel state from INIT to TRYOPEN. ### MsgChannelOpenAck diff --git a/x/ibc/light-clients/06-solomachine/client/cli/tx.go b/x/ibc/light-clients/06-solomachine/client/cli/tx.go index 37db768a3..8d1709fc2 100644 --- a/x/ibc/light-clients/06-solomachine/client/cli/tx.go +++ b/x/ibc/light-clients/06-solomachine/client/cli/tx.go @@ -24,12 +24,12 @@ const ( // NewCreateClientCmd defines the command to create a new solo machine client. func NewCreateClientCmd() *cobra.Command { cmd := &cobra.Command{ - Use: "create [client-id] [sequence] [path/to/consensus_state.json]", + Use: "create [sequence] [path/to/consensus_state.json]", Short: "create new solo machine client", Long: `create a new solo machine client with the specified identifier and public key - ConsensusState json example: {"public_key":{"@type":"/cosmos.crypto.secp256k1.PubKey","key":"A/3SXL2ONYaOkxpdR5P8tHTlSlPv1AwQwSFxKRee5JQW"},"diversifier":"diversifier","timestamp":"10"}`, - Example: fmt.Sprintf("%s tx ibc %s create [client-id] [sequence] [path/to/consensus_state] --from node0 --home ../node0/cli --chain-id $CID", version.AppName, types.SubModuleName), - Args: cobra.ExactArgs(3), + Example: fmt.Sprintf("%s tx ibc %s create [sequence] [path/to/consensus_state] --from node0 --home ../node0/cli --chain-id $CID", version.AppName, types.SubModuleName), + Args: cobra.ExactArgs(2), RunE: func(cmd *cobra.Command, args []string) error { clientCtx := client.GetClientContextFromCmd(cmd) clientCtx, err := client.ReadTxCommandFlags(clientCtx, cmd.Flags()) @@ -37,9 +37,7 @@ func NewCreateClientCmd() *cobra.Command { return err } - clientID := args[0] - - sequence, err := strconv.ParseUint(args[1], 10, 64) + sequence, err := strconv.ParseUint(args[0], 10, 64) if err != nil { return err } @@ -48,10 +46,10 @@ func NewCreateClientCmd() *cobra.Command { // attempt to unmarshal consensus state argument consensusState := &types.ConsensusState{} - if err := cdc.UnmarshalJSON([]byte(args[2]), consensusState); err != nil { + if err := cdc.UnmarshalJSON([]byte(args[1]), consensusState); err != nil { // check for file path if JSON input is not provided - contents, err := ioutil.ReadFile(args[2]) + contents, err := ioutil.ReadFile(args[1]) if err != nil { return errors.Wrap(err, "neither JSON input nor path to .json file for consensus state were provided") } @@ -64,7 +62,7 @@ func NewCreateClientCmd() *cobra.Command { allowUpdateAfterProposal, _ := cmd.Flags().GetBool(flagAllowUpdateAfterProposal) clientState := types.NewClientState(sequence, consensusState, allowUpdateAfterProposal) - msg, err := clienttypes.NewMsgCreateClient(clientID, clientState, consensusState, clientCtx.GetFromAddress()) + msg, err := clienttypes.NewMsgCreateClient(clientState, consensusState, clientCtx.GetFromAddress()) if err != nil { return err } diff --git a/x/ibc/light-clients/07-tendermint/client/cli/tx.go b/x/ibc/light-clients/07-tendermint/client/cli/tx.go index 3bd2f0c0b..04a274ace 100644 --- a/x/ibc/light-clients/07-tendermint/client/cli/tx.go +++ b/x/ibc/light-clients/07-tendermint/client/cli/tx.go @@ -34,15 +34,15 @@ const ( // in https://github.com/cosmos/ics/tree/master/spec/ics-002-client-semantics#create func NewCreateClientCmd() *cobra.Command { cmd := &cobra.Command{ - Use: "create [client-id] [path/to/consensus_state.json] [trusting_period] [unbonding_period] [max_clock_drift]", + Use: "create [path/to/consensus_state.json] [trusting_period] [unbonding_period] [max_clock_drift]", Short: "create new tendermint client", Long: `Create a new tendermint IBC client. - 'trust-level' flag can be a fraction (eg: '1/3') or 'default' - 'proof-specs' flag can be JSON input, a path to a .json file or 'default' - 'upgrade-path' flag is a string specifying the upgrade path for this chain where a future upgraded client will be stored. The path is a comma-separated list representing the keys in order of the keyPath to the committed upgraded client. e.g. 'upgrade/upgradedClient'`, - Example: fmt.Sprintf("%s tx ibc %s create [client-id] [path/to/consensus_state.json] [trusting_period] [unbonding_period] [max_clock_drift] --trust-level default --consensus-params [path/to/consensus-params.json] --proof-specs [path/to/proof-specs.json] --upgrade-path upgrade/upgradedClient --from node0 --home ../node0/cli --chain-id $CID", version.AppName, types.SubModuleName), - Args: cobra.ExactArgs(5), + Example: fmt.Sprintf("%s tx ibc %s create [path/to/consensus_state.json] [trusting_period] [unbonding_period] [max_clock_drift] --trust-level default --consensus-params [path/to/consensus-params.json] --proof-specs [path/to/proof-specs.json] --upgrade-path upgrade/upgradedClient --from node0 --home ../node0/cli --chain-id $CID", version.AppName, types.SubModuleName), + Args: cobra.ExactArgs(4), RunE: func(cmd *cobra.Command, args []string) error { clientCtx := client.GetClientContextFromCmd(cmd) clientCtx, err := client.ReadTxCommandFlags(clientCtx, cmd.Flags()) @@ -50,15 +50,13 @@ func NewCreateClientCmd() *cobra.Command { return err } - clientID := args[0] - cdc := codec.NewProtoCodec(clientCtx.InterfaceRegistry) legacyAmino := codec.NewLegacyAmino() var header *types.Header - if err := cdc.UnmarshalJSON([]byte(args[1]), header); err != nil { + if err := cdc.UnmarshalJSON([]byte(args[0]), header); err != nil { // check for file path if JSON input is not provided - contents, err := ioutil.ReadFile(args[1]) + contents, err := ioutil.ReadFile(args[0]) if err != nil { return errors.New("neither JSON input nor path to .json file were provided for consensus header") } @@ -83,17 +81,17 @@ func NewCreateClientCmd() *cobra.Command { } } - trustingPeriod, err := time.ParseDuration(args[2]) + trustingPeriod, err := time.ParseDuration(args[1]) if err != nil { return err } - ubdPeriod, err := time.ParseDuration(args[3]) + ubdPeriod, err := time.ParseDuration(args[2]) if err != nil { return err } - maxClockDrift, err := time.ParseDuration(args[4]) + maxClockDrift, err := time.ParseDuration(args[3]) if err != nil { return err } @@ -137,7 +135,7 @@ func NewCreateClientCmd() *cobra.Command { consensusState := header.ConsensusState() msg, err := clienttypes.NewMsgCreateClient( - clientID, clientState, consensusState, clientCtx.GetFromAddress(), + clientState, consensusState, clientCtx.GetFromAddress(), ) if err != nil { return err diff --git a/x/ibc/testing/chain.go b/x/ibc/testing/chain.go index b63e72c3e..4dc8ec4f2 100644 --- a/x/ibc/testing/chain.go +++ b/x/ibc/testing/chain.go @@ -369,8 +369,8 @@ func (chain *TestChain) GetPrefix() commitmenttypes.MerklePrefix { // NewClientID appends a new clientID string in the format: // ClientFor -func (chain *TestChain) NewClientID(counterpartyChainID string) string { - clientID := "client" + strconv.Itoa(len(chain.ClientIDs)) + "For" + counterpartyChainID +func (chain *TestChain) NewClientID(clientType string) string { + clientID := fmt.Sprintf("%s-%s", clientType, strconv.Itoa(len(chain.ClientIDs))) chain.ClientIDs = append(chain.ClientIDs, clientID) return clientID } @@ -460,7 +460,7 @@ func (chain *TestChain) ConstructMsgCreateClient(counterparty *TestChain, client } msg, err := clienttypes.NewMsgCreateClient( - clientID, clientState, consensusState, chain.SenderAccount.GetAddress(), + clientState, consensusState, chain.SenderAccount.GetAddress(), ) require.NoError(chain.t, err) return msg diff --git a/x/ibc/testing/chain_test.go b/x/ibc/testing/chain_test.go index bf5af6de6..361a9c4c1 100644 --- a/x/ibc/testing/chain_test.go +++ b/x/ibc/testing/chain_test.go @@ -33,6 +33,7 @@ func TestCreateSortedSignerArray(t *testing.T) { // smaller address validator1.Address = []byte{1} + validator2.Address = []byte{2} validator2.VotingPower = 1 expected = []tmtypes.PrivValidator{privVal1, privVal2} diff --git a/x/ibc/testing/coordinator.go b/x/ibc/testing/coordinator.go index aecdcd4b7..95b59a1db 100644 --- a/x/ibc/testing/coordinator.go +++ b/x/ibc/testing/coordinator.go @@ -95,7 +95,7 @@ func (coord *Coordinator) CreateClient( ) (clientID string, err error) { coord.CommitBlock(source, counterparty) - clientID = source.NewClientID(counterparty.ChainID) + clientID = source.NewClientID(clientType) switch clientType { case exported.Tendermint: From 7ad2aab2c4fc6b9406b6f2ad2e59b9f228eced88 Mon Sep 17 00:00:00 2001 From: Amaury Date: Mon, 30 Nov 2020 17:59:35 +0100 Subject: [PATCH 19/40] Add panic handler on queries (#8039) * Add test that panics * Add panic in abci query * Move proto gen files to correct place * Add panic handler in grpc server * Fix test * Fix build * Use %v * Better panic message * Fix tests Co-authored-by: Aleksandr Bezobchuk --- baseapp/abci.go | 10 +- baseapp/grpcserver.go | 7 +- .../grpc/tmservice}/query.pb.go | 157 +++++++++--------- .../grpc/tmservice}/query.pb.gw.go | 4 +- client/grpc/tmservice/service.go | 46 ++--- client/grpc/tmservice/service_test.go | 33 ++-- go.mod | 1 + go.sum | 1 + .../base/tendermint/v1beta1/query.proto | 2 +- server/grpc/server_test.go | 16 +- types/errors/abci.go | 4 +- types/errors/abci_test.go | 4 +- x/auth/tx/service_test.go | 19 ++- 13 files changed, 175 insertions(+), 129 deletions(-) rename {types/query => client/grpc/tmservice}/query.pb.go (93%) rename {types/query => client/grpc/tmservice}/query.pb.gw.go (99%) diff --git a/baseapp/abci.go b/baseapp/abci.go index d988ead0f..b46afd6c7 100644 --- a/baseapp/abci.go +++ b/baseapp/abci.go @@ -379,9 +379,17 @@ func (app *BaseApp) snapshot(height int64) { // Query implements the ABCI interface. It delegates to CommitMultiStore if it // implements Queryable. -func (app *BaseApp) Query(req abci.RequestQuery) abci.ResponseQuery { +func (app *BaseApp) Query(req abci.RequestQuery) (res abci.ResponseQuery) { defer telemetry.MeasureSince(time.Now(), "abci", "query") + // Add panic recovery for all queries. + // ref: https://github.com/cosmos/cosmos-sdk/pull/8039 + defer func() { + if r := recover(); r != nil { + res = sdkerrors.QueryResult(sdkerrors.Wrapf(sdkerrors.ErrPanic, "%v", r)) + } + }() + // when a client did not provide a query height, manually inject the latest if req.Height == 0 { req.Height = app.LastBlockHeight() diff --git a/baseapp/grpcserver.go b/baseapp/grpcserver.go index 74e8b8a21..a4342e0b8 100644 --- a/baseapp/grpcserver.go +++ b/baseapp/grpcserver.go @@ -5,6 +5,8 @@ import ( "strconv" gogogrpc "github.com/gogo/protobuf/grpc" + grpcmiddleware "github.com/grpc-ecosystem/go-grpc-middleware" + grpcrecovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" @@ -74,7 +76,10 @@ func (app *BaseApp) RegisterGRPCServer(server gogogrpc.Server) { newMethods[i] = grpc.MethodDesc{ MethodName: method.MethodName, Handler: func(srv interface{}, ctx context.Context, dec func(interface{}) error, _ grpc.UnaryServerInterceptor) (interface{}, error) { - return methodHandler(srv, ctx, dec, interceptor) + return methodHandler(srv, ctx, dec, grpcmiddleware.ChainUnaryServer( + grpcrecovery.UnaryServerInterceptor(), + interceptor, + )) }, } } diff --git a/types/query/query.pb.go b/client/grpc/tmservice/query.pb.go similarity index 93% rename from types/query/query.pb.go rename to client/grpc/tmservice/query.pb.go index 266d33700..c08d33dc2 100644 --- a/types/query/query.pb.go +++ b/client/grpc/tmservice/query.pb.go @@ -1,11 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. // source: cosmos/base/tendermint/v1beta1/query.proto -package query +package tmservice import ( context "context" fmt "fmt" + query "github.com/cosmos/cosmos-sdk/types/query" _ "github.com/gogo/protobuf/gogoproto" grpc1 "github.com/gogo/protobuf/grpc" proto "github.com/gogo/protobuf/proto" @@ -35,7 +36,7 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type GetValidatorSetByHeightRequest struct { Height int64 `protobuf:"varint,1,opt,name=height,proto3" json:"height,omitempty"` // pagination defines an pagination for the request. - Pagination *PageRequest `protobuf:"bytes,2,opt,name=pagination,proto3" json:"pagination,omitempty"` + Pagination *query.PageRequest `protobuf:"bytes,2,opt,name=pagination,proto3" json:"pagination,omitempty"` } func (m *GetValidatorSetByHeightRequest) Reset() { *m = GetValidatorSetByHeightRequest{} } @@ -78,7 +79,7 @@ func (m *GetValidatorSetByHeightRequest) GetHeight() int64 { return 0 } -func (m *GetValidatorSetByHeightRequest) GetPagination() *PageRequest { +func (m *GetValidatorSetByHeightRequest) GetPagination() *query.PageRequest { if m != nil { return m.Pagination } @@ -90,7 +91,7 @@ type GetValidatorSetByHeightResponse struct { BlockHeight int64 `protobuf:"varint,1,opt,name=block_height,json=blockHeight,proto3" json:"block_height,omitempty"` Validators []*Validator `protobuf:"bytes,2,rep,name=validators,proto3" json:"validators,omitempty"` // pagination defines an pagination for the response. - Pagination *PageResponse `protobuf:"bytes,3,opt,name=pagination,proto3" json:"pagination,omitempty"` + Pagination *query.PageResponse `protobuf:"bytes,3,opt,name=pagination,proto3" json:"pagination,omitempty"` } func (m *GetValidatorSetByHeightResponse) Reset() { *m = GetValidatorSetByHeightResponse{} } @@ -140,7 +141,7 @@ func (m *GetValidatorSetByHeightResponse) GetValidators() []*Validator { return nil } -func (m *GetValidatorSetByHeightResponse) GetPagination() *PageResponse { +func (m *GetValidatorSetByHeightResponse) GetPagination() *query.PageResponse { if m != nil { return m.Pagination } @@ -150,7 +151,7 @@ func (m *GetValidatorSetByHeightResponse) GetPagination() *PageResponse { // GetLatestValidatorSetRequest is the request type for the Query/GetValidatorSetByHeight RPC method. type GetLatestValidatorSetRequest struct { // pagination defines an pagination for the request. - Pagination *PageRequest `protobuf:"bytes,1,opt,name=pagination,proto3" json:"pagination,omitempty"` + Pagination *query.PageRequest `protobuf:"bytes,1,opt,name=pagination,proto3" json:"pagination,omitempty"` } func (m *GetLatestValidatorSetRequest) Reset() { *m = GetLatestValidatorSetRequest{} } @@ -186,7 +187,7 @@ func (m *GetLatestValidatorSetRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetLatestValidatorSetRequest proto.InternalMessageInfo -func (m *GetLatestValidatorSetRequest) GetPagination() *PageRequest { +func (m *GetLatestValidatorSetRequest) GetPagination() *query.PageRequest { if m != nil { return m.Pagination } @@ -198,7 +199,7 @@ type GetLatestValidatorSetResponse struct { BlockHeight int64 `protobuf:"varint,1,opt,name=block_height,json=blockHeight,proto3" json:"block_height,omitempty"` Validators []*Validator `protobuf:"bytes,2,rep,name=validators,proto3" json:"validators,omitempty"` // pagination defines an pagination for the response. - Pagination *PageResponse `protobuf:"bytes,3,opt,name=pagination,proto3" json:"pagination,omitempty"` + Pagination *query.PageResponse `protobuf:"bytes,3,opt,name=pagination,proto3" json:"pagination,omitempty"` } func (m *GetLatestValidatorSetResponse) Reset() { *m = GetLatestValidatorSetResponse{} } @@ -248,7 +249,7 @@ func (m *GetLatestValidatorSetResponse) GetValidators() []*Validator { return nil } -func (m *GetLatestValidatorSetResponse) GetPagination() *PageResponse { +func (m *GetLatestValidatorSetResponse) GetPagination() *query.PageResponse { if m != nil { return m.Pagination } @@ -864,72 +865,72 @@ func init() { } var fileDescriptor_40c93fb3ef485c5d = []byte{ - // 1031 bytes of a gzipped FileDescriptorProto + // 1040 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x56, 0x4f, 0x6f, 0x1b, 0x45, - 0x14, 0xcf, 0xc6, 0x6d, 0x1c, 0x3f, 0x57, 0x90, 0x4c, 0x4a, 0xb3, 0xb5, 0x52, 0x37, 0xec, 0xa1, - 0x4d, 0x88, 0xb2, 0x2b, 0x3b, 0x84, 0x82, 0xf8, 0x27, 0x85, 0x40, 0x1a, 0x15, 0xaa, 0x68, 0x83, - 0x38, 0x20, 0xa4, 0xd5, 0xd8, 0x3b, 0xd9, 0x8c, 0x62, 0xef, 0x4c, 0x77, 0xc6, 0x41, 0x16, 0xaa, - 0x40, 0xfd, 0x00, 0x08, 0x89, 0xaf, 0xc0, 0x85, 0x2f, 0xc0, 0x11, 0x71, 0xe4, 0x46, 0x25, 0x24, - 0xe8, 0x11, 0x25, 0x7c, 0x0a, 0x4e, 0x68, 0x67, 0x66, 0xed, 0xdd, 0x26, 0xa9, 0xed, 0x1c, 0x90, - 0x38, 0x79, 0xf6, 0xbd, 0xf7, 0x7b, 0xf3, 0xfb, 0xbd, 0x79, 0x33, 0x7e, 0xf0, 0x5a, 0x9b, 0x89, - 0x2e, 0x13, 0x5e, 0x0b, 0x0b, 0xe2, 0x49, 0x12, 0x87, 0x24, 0xe9, 0xd2, 0x58, 0x7a, 0xc7, 0x8d, - 0x16, 0x91, 0xb8, 0xe1, 0x3d, 0xea, 0x91, 0xa4, 0xef, 0xf2, 0x84, 0x49, 0x86, 0xea, 0x3a, 0xd6, - 0x4d, 0x63, 0xdd, 0x61, 0xac, 0x6b, 0x62, 0x6b, 0xd7, 0x23, 0x16, 0x31, 0x15, 0xea, 0xa5, 0x2b, - 0x8d, 0xaa, 0x2d, 0x45, 0x8c, 0x45, 0x1d, 0xe2, 0x61, 0x4e, 0x3d, 0x1c, 0xc7, 0x4c, 0x62, 0x49, - 0x59, 0x2c, 0x8c, 0xb7, 0x96, 0xdb, 0x93, 0x37, 0xb9, 0x27, 0xfb, 0x9c, 0x64, 0xbe, 0xa5, 0x9c, - 0x4f, 0xd9, 0xbd, 0x56, 0x87, 0xb5, 0x8f, 0x2e, 0xf4, 0xe6, 0xb1, 0x05, 0x5d, 0x4a, 0xc4, 0x40, - 0x12, 0xc7, 0x11, 0x8d, 0x15, 0x09, 0x1d, 0xeb, 0x7c, 0x63, 0x41, 0x7d, 0x87, 0xc8, 0xcf, 0x70, - 0x87, 0x86, 0x58, 0xb2, 0x64, 0x9f, 0xc8, 0xad, 0xfe, 0x7d, 0x42, 0xa3, 0x43, 0xe9, 0x93, 0x47, - 0x3d, 0x22, 0x24, 0xba, 0x01, 0x33, 0x87, 0xca, 0x60, 0x5b, 0xcb, 0xd6, 0x4a, 0xc9, 0x37, 0x5f, - 0xe8, 0x23, 0x80, 0x61, 0x3a, 0x7b, 0x7a, 0xd9, 0x5a, 0xa9, 0x36, 0xef, 0xb8, 0xf9, 0x3a, 0xe9, - 0x02, 0x9a, 0xbd, 0xdd, 0x3d, 0x1c, 0x11, 0x93, 0xd3, 0xcf, 0x21, 0x9d, 0x67, 0x16, 0xdc, 0xbe, - 0x90, 0x82, 0xe0, 0x2c, 0x16, 0x04, 0xbd, 0x0a, 0xd7, 0x94, 0xfe, 0xa0, 0xc0, 0xa4, 0xaa, 0x6c, - 0x3a, 0x14, 0xed, 0x02, 0x1c, 0x67, 0x29, 0x84, 0x3d, 0xbd, 0x5c, 0x5a, 0xa9, 0x36, 0x57, 0xdd, - 0x17, 0x1f, 0x9b, 0x3b, 0xd8, 0xd4, 0xcf, 0x81, 0xd1, 0x4e, 0x41, 0x59, 0x49, 0x29, 0xbb, 0x3b, - 0x52, 0x99, 0xa6, 0x5a, 0x90, 0x76, 0x00, 0x4b, 0x3b, 0x44, 0x7e, 0x8c, 0x25, 0x11, 0x05, 0x7d, - 0x59, 0x69, 0x8b, 0x25, 0xb4, 0x2e, 0x5d, 0xc2, 0x3f, 0x2c, 0xb8, 0x75, 0xc1, 0x46, 0xff, 0xef, - 0x02, 0x7e, 0x6b, 0x41, 0x65, 0xb0, 0x05, 0xb2, 0xa1, 0x8c, 0xc3, 0x30, 0x21, 0x42, 0x28, 0xfe, - 0xd7, 0xfc, 0xec, 0x13, 0x2d, 0x42, 0x99, 0xf7, 0x5a, 0xc1, 0x11, 0xe9, 0xab, 0x46, 0xac, 0xf8, - 0x33, 0xbc, 0xd7, 0x7a, 0x40, 0xfa, 0xa9, 0xee, 0x63, 0x26, 0x69, 0x1c, 0x05, 0x9c, 0x7d, 0x49, - 0x12, 0xc5, 0xa5, 0xe4, 0x57, 0xb5, 0x6d, 0x2f, 0x35, 0xa1, 0x35, 0x98, 0xe7, 0x09, 0xe3, 0x4c, - 0x90, 0x24, 0xe0, 0x09, 0x65, 0x09, 0x95, 0x7d, 0xfb, 0x8a, 0x8a, 0x9b, 0xcb, 0x1c, 0x7b, 0xc6, - 0xee, 0x34, 0x60, 0x71, 0x87, 0xc8, 0xad, 0xb4, 0x6c, 0x63, 0xde, 0x13, 0xe7, 0x6b, 0xb0, 0xcf, - 0x42, 0xcc, 0xb1, 0xbc, 0x0e, 0xb3, 0xfa, 0x58, 0x68, 0x68, 0x8e, 0xff, 0x66, 0xbe, 0xca, 0xfa, - 0x56, 0x2b, 0xe8, 0xee, 0xb6, 0x5f, 0x56, 0xa1, 0xbb, 0x21, 0x5a, 0x87, 0xab, 0x6a, 0x69, 0x2e, - 0xdd, 0xe2, 0x05, 0x10, 0x5f, 0x47, 0x39, 0x8b, 0xf0, 0xca, 0xa0, 0x39, 0xb4, 0x43, 0x33, 0x76, - 0x1e, 0xc3, 0x8d, 0xe7, 0x1d, 0xff, 0x25, 0xaf, 0x05, 0x98, 0xdf, 0x21, 0x72, 0xbf, 0x1f, 0xb7, - 0x69, 0x1c, 0x65, 0x9c, 0x5c, 0x40, 0x79, 0xa3, 0xe1, 0x63, 0x43, 0x59, 0x68, 0x93, 0xa2, 0x33, - 0xeb, 0x67, 0x9f, 0xce, 0x75, 0x15, 0xff, 0x90, 0x85, 0x64, 0x37, 0x3e, 0x60, 0x59, 0x96, 0x5f, - 0x2c, 0x58, 0x28, 0x98, 0x4d, 0x9e, 0x07, 0x30, 0x1f, 0x92, 0x03, 0xdc, 0xeb, 0xc8, 0x20, 0x66, - 0x21, 0x09, 0x68, 0x7c, 0xc0, 0x8c, 0xc0, 0xdb, 0x79, 0xb6, 0xbc, 0xc9, 0xdd, 0x6d, 0x1d, 0x38, - 0xc8, 0xf1, 0x72, 0x58, 0x34, 0xa0, 0x2f, 0x60, 0x01, 0x73, 0xde, 0xa1, 0x6d, 0xd5, 0xab, 0xc1, - 0x31, 0x49, 0xc4, 0xf0, 0x25, 0x5c, 0x1b, 0x79, 0x73, 0x74, 0xb8, 0x4a, 0x8d, 0x72, 0x79, 0x8c, - 0xdd, 0xf9, 0xc7, 0x82, 0x6a, 0x2e, 0x06, 0x21, 0xb8, 0x12, 0xe3, 0x2e, 0x51, 0x6c, 0x2b, 0xbe, - 0x5a, 0xa3, 0x9b, 0x30, 0x8b, 0x39, 0x0f, 0x94, 0x5d, 0xf7, 0x7d, 0x19, 0x73, 0xfe, 0x30, 0x75, - 0xd9, 0x50, 0xce, 0x08, 0x95, 0xb4, 0xc7, 0x7c, 0xa2, 0x5b, 0x00, 0x11, 0x95, 0x41, 0x9b, 0x75, - 0xbb, 0x54, 0xaa, 0x46, 0xaf, 0xf8, 0x95, 0x88, 0xca, 0x0f, 0x94, 0x21, 0x75, 0xb7, 0x7a, 0xb4, - 0x13, 0x06, 0x12, 0x47, 0xc2, 0xbe, 0xaa, 0xdd, 0xca, 0xf2, 0x29, 0x8e, 0x84, 0x42, 0xb3, 0x81, - 0xd6, 0x19, 0x83, 0x66, 0x86, 0x29, 0xfa, 0x30, 0x43, 0x87, 0x84, 0x0b, 0xbb, 0xac, 0x1e, 0x91, - 0x3b, 0xa3, 0x4a, 0xf1, 0x09, 0x0b, 0x7b, 0x1d, 0x62, 0x76, 0xd9, 0x26, 0x5c, 0x38, 0xf7, 0x61, - 0x46, 0x1b, 0x53, 0xd9, 0x1c, 0xcb, 0xc3, 0x4c, 0x76, 0xba, 0xce, 0x6b, 0x9b, 0x2e, 0x6a, 0x9b, - 0x83, 0x92, 0xe8, 0x75, 0x8d, 0xe2, 0x74, 0xd9, 0x7c, 0x52, 0x81, 0xf2, 0x3e, 0x49, 0x8e, 0x69, - 0x9b, 0xa0, 0x1f, 0x2d, 0xa8, 0xe6, 0xba, 0x02, 0x35, 0x47, 0x11, 0x3b, 0xdb, 0x59, 0xb5, 0x8d, - 0x89, 0x30, 0xba, 0xed, 0x9c, 0xc6, 0x93, 0xdf, 0xff, 0xfe, 0x7e, 0x7a, 0x0d, 0xad, 0x7a, 0x23, - 0x46, 0x8e, 0x41, 0x53, 0xa2, 0x1f, 0x2c, 0x80, 0xe1, 0x45, 0x40, 0x8d, 0x31, 0xb6, 0x2d, 0xde, - 0xa4, 0x5a, 0x73, 0x12, 0x88, 0x21, 0xea, 0x29, 0xa2, 0xab, 0xe8, 0xee, 0x28, 0xa2, 0xe6, 0xfa, - 0xa1, 0x9f, 0x2c, 0x78, 0xa9, 0xf8, 0x86, 0xa0, 0xcd, 0x31, 0xf6, 0x3d, 0xfb, 0x18, 0xd5, 0xde, - 0x98, 0x14, 0x66, 0x28, 0x6f, 0x2a, 0xca, 0x1e, 0x5a, 0x1f, 0x45, 0x59, 0x3d, 0x3a, 0xc2, 0xeb, - 0xa8, 0x1c, 0xe8, 0x67, 0x0b, 0xe6, 0x9e, 0x7f, 0x96, 0xd1, 0xbd, 0x31, 0x38, 0x9c, 0xf7, 0xf6, - 0xd7, 0xde, 0x9c, 0x1c, 0x68, 0xe8, 0xdf, 0x53, 0xf4, 0x1b, 0xc8, 0x1b, 0x93, 0xfe, 0x57, 0xfa, - 0x5f, 0xe5, 0x31, 0xfa, 0xcd, 0xca, 0x3d, 0xeb, 0xf9, 0xff, 0x7c, 0xf4, 0xce, 0xd8, 0x95, 0x3c, - 0x67, 0x26, 0xa9, 0xbd, 0x7b, 0x49, 0xb4, 0xd1, 0xf3, 0x96, 0xd2, 0xb3, 0x81, 0x1a, 0xa3, 0xf4, - 0x0c, 0xc7, 0x85, 0xec, 0x48, 0xfe, 0xb4, 0xd4, 0x9f, 0xeb, 0x79, 0x83, 0x20, 0x7a, 0x6f, 0x0c, - 0x56, 0x2f, 0x18, 0x62, 0x6b, 0xef, 0x5f, 0x1a, 0x6f, 0x74, 0xbd, 0xad, 0x74, 0x6d, 0xa2, 0x8d, - 0x09, 0x74, 0x65, 0x67, 0xb5, 0xb5, 0xf5, 0xeb, 0x49, 0xdd, 0x7a, 0x7a, 0x52, 0xb7, 0xfe, 0x3a, - 0xa9, 0x5b, 0xdf, 0x9d, 0xd6, 0xa7, 0x9e, 0x9e, 0xd6, 0xa7, 0x9e, 0x9d, 0xd6, 0xa7, 0x3e, 0x5f, - 0x89, 0xa8, 0x3c, 0xec, 0xb5, 0xdc, 0x36, 0xeb, 0x66, 0x89, 0xf5, 0xcf, 0xba, 0x08, 0x8f, 0xcc, - 0x6c, 0xaf, 0x66, 0xa5, 0xd6, 0x8c, 0x1a, 0xd8, 0x37, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x20, - 0x61, 0xbc, 0x54, 0xb6, 0x0c, 0x00, 0x00, + 0x14, 0xcf, 0xc6, 0xad, 0x1d, 0x3f, 0x57, 0x90, 0x4c, 0x4a, 0xb3, 0xb5, 0x52, 0x37, 0xec, 0xa1, + 0x4d, 0x88, 0xb2, 0x2b, 0x3b, 0x84, 0x82, 0xf8, 0x27, 0x85, 0x80, 0x1b, 0xb5, 0x54, 0xd1, 0x06, + 0x71, 0x40, 0x48, 0xab, 0xb5, 0x77, 0xb2, 0x19, 0xc5, 0xde, 0x99, 0xee, 0x8c, 0x8d, 0x2c, 0x54, + 0x81, 0xfa, 0x01, 0x10, 0x12, 0x5f, 0x81, 0x0b, 0x5f, 0x80, 0x23, 0xe2, 0xc8, 0x8d, 0x4a, 0x48, + 0xd0, 0x23, 0x4a, 0xf8, 0x14, 0x9c, 0xd0, 0xce, 0xcc, 0xda, 0xbb, 0xcd, 0x1f, 0xdb, 0x39, 0x20, + 0xf5, 0xe4, 0xd9, 0xf7, 0xde, 0xef, 0xcd, 0xef, 0xf7, 0xe6, 0xcd, 0xf3, 0xc0, 0x1b, 0x6d, 0xca, + 0xbb, 0x94, 0x3b, 0x2d, 0x9f, 0x63, 0x47, 0xe0, 0x28, 0xc0, 0x71, 0x97, 0x44, 0xc2, 0xe9, 0xd7, + 0x5b, 0x58, 0xf8, 0x75, 0xe7, 0x71, 0x0f, 0xc7, 0x03, 0x9b, 0xc5, 0x54, 0x50, 0x54, 0x53, 0xb1, + 0x76, 0x12, 0x6b, 0x8f, 0x62, 0x6d, 0x1d, 0x5b, 0xbd, 0x1e, 0xd2, 0x90, 0xca, 0x50, 0x27, 0x59, + 0x29, 0x54, 0x75, 0x39, 0xa4, 0x34, 0xec, 0x60, 0xc7, 0x67, 0xc4, 0xf1, 0xa3, 0x88, 0x0a, 0x5f, + 0x10, 0x1a, 0x71, 0xed, 0xad, 0x66, 0xf6, 0x64, 0x0d, 0xe6, 0x88, 0x01, 0xc3, 0xa9, 0x6f, 0x39, + 0xe3, 0x93, 0x76, 0xa7, 0xd5, 0xa1, 0xed, 0xa3, 0x73, 0xbd, 0x59, 0x6c, 0x4e, 0x97, 0x14, 0x31, + 0x94, 0xc4, 0xfc, 0x90, 0x44, 0x92, 0x84, 0x8a, 0xb5, 0xbe, 0x35, 0xa0, 0xd6, 0xc4, 0xe2, 0x73, + 0xbf, 0x43, 0x02, 0x5f, 0xd0, 0x78, 0x1f, 0x8b, 0xed, 0xc1, 0x7d, 0x4c, 0xc2, 0x43, 0xe1, 0xe2, + 0xc7, 0x3d, 0xcc, 0x05, 0xba, 0x01, 0xc5, 0x43, 0x69, 0x30, 0x8d, 0x15, 0x63, 0xb5, 0xe0, 0xea, + 0x2f, 0xf4, 0x09, 0xc0, 0x28, 0x9d, 0x39, 0xbb, 0x62, 0xac, 0x56, 0x1a, 0x77, 0xec, 0x6c, 0x9d, + 0x54, 0x01, 0xf5, 0xde, 0xf6, 0x9e, 0x1f, 0x62, 0x9d, 0xd3, 0xcd, 0x20, 0xad, 0xe7, 0x06, 0xdc, + 0x3e, 0x97, 0x02, 0x67, 0x34, 0xe2, 0x18, 0xbd, 0x0e, 0xd7, 0xa4, 0x7e, 0x2f, 0xc7, 0xa4, 0x22, + 0x6d, 0x2a, 0x14, 0xed, 0x02, 0xf4, 0xd3, 0x14, 0xdc, 0x9c, 0x5d, 0x29, 0xac, 0x56, 0x1a, 0x6b, + 0xf6, 0xc5, 0xc7, 0x66, 0x0f, 0x37, 0x75, 0x33, 0x60, 0xd4, 0xcc, 0x29, 0x2b, 0x48, 0x65, 0x77, + 0xc7, 0x2a, 0x53, 0x54, 0x73, 0xd2, 0x0e, 0x60, 0xb9, 0x89, 0xc5, 0x43, 0x5f, 0x60, 0x9e, 0xd3, + 0x97, 0x96, 0x36, 0x5f, 0x42, 0xe3, 0xd2, 0x25, 0xfc, 0xd3, 0x80, 0x5b, 0xe7, 0x6c, 0xf4, 0x72, + 0x17, 0xf0, 0x3b, 0x03, 0xca, 0xc3, 0x2d, 0x90, 0x09, 0x25, 0x3f, 0x08, 0x62, 0xcc, 0xb9, 0xe4, + 0x7f, 0xcd, 0x4d, 0x3f, 0xd1, 0x12, 0x94, 0x58, 0xaf, 0xe5, 0x1d, 0xe1, 0x81, 0x6c, 0xc4, 0xb2, + 0x5b, 0x64, 0xbd, 0xd6, 0x03, 0x3c, 0x48, 0x74, 0xf7, 0xa9, 0x20, 0x51, 0xe8, 0x31, 0xfa, 0x15, + 0x8e, 0x25, 0x97, 0x82, 0x5b, 0x51, 0xb6, 0xbd, 0xc4, 0x84, 0xd6, 0x61, 0x81, 0xc5, 0x94, 0x51, + 0x8e, 0x63, 0x8f, 0xc5, 0x84, 0xc6, 0x44, 0x0c, 0xcc, 0x2b, 0x32, 0x6e, 0x3e, 0x75, 0xec, 0x69, + 0xbb, 0x55, 0x87, 0xa5, 0x26, 0x16, 0xdb, 0x49, 0xd9, 0x26, 0xbc, 0x27, 0xd6, 0x37, 0x60, 0x9e, + 0x86, 0xe8, 0x63, 0x79, 0x13, 0xe6, 0xd4, 0xb1, 0x90, 0x40, 0x1f, 0xff, 0xcd, 0x6c, 0x95, 0xd5, + 0xad, 0x96, 0xd0, 0xdd, 0x1d, 0xb7, 0x24, 0x43, 0x77, 0x03, 0xb4, 0x01, 0x57, 0xe5, 0x52, 0x5f, + 0xba, 0xa5, 0x73, 0x20, 0xae, 0x8a, 0xb2, 0x96, 0xe0, 0xb5, 0x61, 0x73, 0x28, 0x87, 0x62, 0x6c, + 0x3d, 0x81, 0x1b, 0x2f, 0x3a, 0xfe, 0x4f, 0x5e, 0x8b, 0xb0, 0xd0, 0xc4, 0x62, 0x7f, 0x10, 0xb5, + 0x49, 0x14, 0xa6, 0x9c, 0x6c, 0x40, 0x59, 0xa3, 0xe6, 0x63, 0x42, 0x89, 0x2b, 0x93, 0xa4, 0x33, + 0xe7, 0xa6, 0x9f, 0xd6, 0x75, 0x19, 0xff, 0x88, 0x06, 0x78, 0x37, 0x3a, 0xa0, 0x69, 0x96, 0x5f, + 0x0d, 0x58, 0xcc, 0x99, 0x75, 0x9e, 0x07, 0xb0, 0x10, 0xe0, 0x03, 0xbf, 0xd7, 0x11, 0x5e, 0x44, + 0x03, 0xec, 0x91, 0xe8, 0x80, 0x6a, 0x81, 0xb7, 0xb3, 0x6c, 0x59, 0x83, 0xd9, 0x3b, 0x2a, 0x70, + 0x98, 0xe3, 0xd5, 0x20, 0x6f, 0x40, 0x5f, 0xc2, 0xa2, 0xcf, 0x58, 0x87, 0xb4, 0x65, 0xaf, 0x7a, + 0x7d, 0x1c, 0xf3, 0xd1, 0x24, 0x5c, 0x1f, 0x7b, 0x73, 0x54, 0xb8, 0x4c, 0x8d, 0x32, 0x79, 0xb4, + 0xdd, 0xfa, 0xd7, 0x80, 0x4a, 0x26, 0x06, 0x21, 0xb8, 0x12, 0xf9, 0x5d, 0x2c, 0xd9, 0x96, 0x5d, + 0xb9, 0x46, 0x37, 0x61, 0xce, 0x67, 0xcc, 0x93, 0x76, 0xd5, 0xf7, 0x25, 0x9f, 0xb1, 0x47, 0x89, + 0xcb, 0x84, 0x52, 0x4a, 0xa8, 0xa0, 0x3c, 0xfa, 0x13, 0xdd, 0x02, 0x08, 0x89, 0xf0, 0xda, 0xb4, + 0xdb, 0x25, 0x42, 0x36, 0x7a, 0xd9, 0x2d, 0x87, 0x44, 0x7c, 0x24, 0x0d, 0x89, 0xbb, 0xd5, 0x23, + 0x9d, 0xc0, 0x13, 0x7e, 0xc8, 0xcd, 0xab, 0xca, 0x2d, 0x2d, 0x9f, 0xf9, 0x21, 0x97, 0x68, 0x3a, + 0xd4, 0x5a, 0xd4, 0x68, 0xaa, 0x99, 0xa2, 0x8f, 0x53, 0x74, 0x80, 0x19, 0x37, 0x4b, 0x72, 0x88, + 0xdc, 0x19, 0x57, 0x8a, 0x4f, 0x69, 0xd0, 0xeb, 0x60, 0xbd, 0xcb, 0x0e, 0x66, 0xdc, 0xba, 0x0f, + 0x45, 0x65, 0x4c, 0x64, 0x33, 0x5f, 0x1c, 0xa6, 0xb2, 0x93, 0x75, 0x56, 0xdb, 0x6c, 0x5e, 0xdb, + 0x3c, 0x14, 0x78, 0xaf, 0xab, 0x15, 0x27, 0xcb, 0xc6, 0xd3, 0x32, 0x94, 0xf6, 0x71, 0xdc, 0x27, + 0x6d, 0x8c, 0x7e, 0x32, 0xa0, 0x92, 0xe9, 0x0a, 0xd4, 0x18, 0x47, 0xec, 0x74, 0x67, 0x55, 0x37, + 0xa7, 0xc2, 0xa8, 0xb6, 0xb3, 0xea, 0x4f, 0xff, 0xf8, 0xe7, 0x87, 0xd9, 0x75, 0xb4, 0xe6, 0x8c, + 0x79, 0x72, 0x0c, 0x9b, 0x12, 0xfd, 0x68, 0x00, 0x8c, 0x2e, 0x02, 0xaa, 0x4f, 0xb0, 0x6d, 0xfe, + 0x26, 0x55, 0x1b, 0xd3, 0x40, 0x34, 0x51, 0x47, 0x12, 0x5d, 0x43, 0x77, 0xc7, 0x11, 0xd5, 0xd7, + 0x0f, 0xfd, 0x6c, 0xc0, 0x2b, 0xf9, 0x19, 0x82, 0xb6, 0x26, 0xd8, 0xf7, 0xf4, 0x30, 0xaa, 0xbe, + 0x35, 0x2d, 0x4c, 0x53, 0xde, 0x92, 0x94, 0x1d, 0xb4, 0x31, 0x8e, 0xb2, 0x1c, 0x3a, 0xdc, 0xe9, + 0xc8, 0x1c, 0xe8, 0x17, 0x03, 0xe6, 0x5f, 0x1c, 0xcb, 0xe8, 0xde, 0x04, 0x1c, 0xce, 0x9a, 0xfd, + 0xd5, 0xb7, 0xa7, 0x07, 0x6a, 0xfa, 0xf7, 0x24, 0xfd, 0x3a, 0x72, 0x26, 0xa4, 0xff, 0xb5, 0xfa, + 0x57, 0x79, 0x82, 0x7e, 0x37, 0x32, 0x63, 0x3d, 0xfb, 0x9f, 0x8f, 0xde, 0x9b, 0xb8, 0x92, 0x67, + 0xbc, 0x49, 0xaa, 0xef, 0x5f, 0x12, 0xad, 0xf5, 0xbc, 0x23, 0xf5, 0x6c, 0xa2, 0xfa, 0x38, 0x3d, + 0xa3, 0xe7, 0x42, 0x7a, 0x24, 0x7f, 0x19, 0xf2, 0xcf, 0xf5, 0xac, 0x87, 0x20, 0xfa, 0x60, 0x02, + 0x56, 0x17, 0x3c, 0x62, 0xab, 0x1f, 0x5e, 0x1a, 0xaf, 0x75, 0xbd, 0x2b, 0x75, 0x6d, 0xa1, 0xcd, + 0x29, 0x74, 0xa5, 0x67, 0xb5, 0xfd, 0xf0, 0xb7, 0xe3, 0x9a, 0xf1, 0xec, 0xb8, 0x66, 0xfc, 0x7d, + 0x5c, 0x33, 0xbe, 0x3f, 0xa9, 0xcd, 0x3c, 0x3b, 0xa9, 0xcd, 0x3c, 0x3f, 0xa9, 0xcd, 0x7c, 0xd1, + 0x08, 0x89, 0x38, 0xec, 0xb5, 0xec, 0x36, 0xed, 0xa6, 0x89, 0xd5, 0xcf, 0x06, 0x0f, 0x8e, 0x9c, + 0x76, 0x87, 0xe0, 0x48, 0x38, 0x61, 0xcc, 0xda, 0x8e, 0xe8, 0x72, 0x35, 0xc6, 0x5a, 0x45, 0xf9, + 0x74, 0xdf, 0xfc, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x82, 0x6b, 0xb0, 0xbd, 0xc0, 0x0c, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -2181,7 +2182,7 @@ func (m *GetValidatorSetByHeightRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Pagination == nil { - m.Pagination = &PageRequest{} + m.Pagination = &query.PageRequest{} } if err := m.Pagination.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -2323,7 +2324,7 @@ func (m *GetValidatorSetByHeightResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Pagination == nil { - m.Pagination = &PageResponse{} + m.Pagination = &query.PageResponse{} } if err := m.Pagination.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -2412,7 +2413,7 @@ func (m *GetLatestValidatorSetRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Pagination == nil { - m.Pagination = &PageRequest{} + m.Pagination = &query.PageRequest{} } if err := m.Pagination.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -2554,7 +2555,7 @@ func (m *GetLatestValidatorSetResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Pagination == nil { - m.Pagination = &PageResponse{} + m.Pagination = &query.PageResponse{} } if err := m.Pagination.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err diff --git a/types/query/query.pb.gw.go b/client/grpc/tmservice/query.pb.gw.go similarity index 99% rename from types/query/query.pb.gw.go rename to client/grpc/tmservice/query.pb.gw.go index 01b943030..1fcd85268 100644 --- a/types/query/query.pb.gw.go +++ b/client/grpc/tmservice/query.pb.gw.go @@ -2,11 +2,11 @@ // source: cosmos/base/tendermint/v1beta1/query.proto /* -Package query is a reverse proxy. +Package tmservice is a reverse proxy. It translates gRPC into RESTful JSON APIs. */ -package query +package tmservice import ( "context" diff --git a/client/grpc/tmservice/service.go b/client/grpc/tmservice/service.go index b1198eb67..b4d9f8c95 100644 --- a/client/grpc/tmservice/service.go +++ b/client/grpc/tmservice/service.go @@ -21,10 +21,10 @@ type queryServer struct { interfaceRegistry codectypes.InterfaceRegistry } -var _ qtypes.ServiceServer = queryServer{} +var _ ServiceServer = queryServer{} // NewQueryServer creates a new tendermint query server. -func NewQueryServer(clientCtx client.Context, interfaceRegistry codectypes.InterfaceRegistry) qtypes.ServiceServer { +func NewQueryServer(clientCtx client.Context, interfaceRegistry codectypes.InterfaceRegistry) ServiceServer { return queryServer{ clientCtx: clientCtx, interfaceRegistry: interfaceRegistry, @@ -32,18 +32,18 @@ func NewQueryServer(clientCtx client.Context, interfaceRegistry codectypes.Inter } // GetSyncing implements ServiceServer.GetSyncing -func (s queryServer) GetSyncing(_ context.Context, _ *qtypes.GetSyncingRequest) (*qtypes.GetSyncingResponse, error) { +func (s queryServer) GetSyncing(_ context.Context, _ *GetSyncingRequest) (*GetSyncingResponse, error) { status, err := getNodeStatus(s.clientCtx) if err != nil { return nil, err } - return &qtypes.GetSyncingResponse{ + return &GetSyncingResponse{ Syncing: status.SyncInfo.CatchingUp, }, nil } // GetLatestBlock implements ServiceServer.GetLatestBlock -func (s queryServer) GetLatestBlock(context.Context, *qtypes.GetLatestBlockRequest) (*qtypes.GetLatestBlockResponse, error) { +func (s queryServer) GetLatestBlock(context.Context, *GetLatestBlockRequest) (*GetLatestBlockResponse, error) { status, err := getBlock(s.clientCtx, nil) if err != nil { return nil, err @@ -55,14 +55,14 @@ func (s queryServer) GetLatestBlock(context.Context, *qtypes.GetLatestBlockReque return nil, err } - return &qtypes.GetLatestBlockResponse{ + return &GetLatestBlockResponse{ BlockId: &protoBlockID, Block: protoBlock, }, nil } // GetBlockByHeight implements ServiceServer.GetBlockByHeight -func (s queryServer) GetBlockByHeight(_ context.Context, req *qtypes.GetBlockByHeightRequest) (*qtypes.GetBlockByHeightResponse, error) { +func (s queryServer) GetBlockByHeight(_ context.Context, req *GetBlockByHeightRequest) (*GetBlockByHeightResponse, error) { chainHeight, err := rpc.GetChainHeight(s.clientCtx) if err != nil { return nil, err @@ -81,14 +81,14 @@ func (s queryServer) GetBlockByHeight(_ context.Context, req *qtypes.GetBlockByH if err != nil { return nil, err } - return &qtypes.GetBlockByHeightResponse{ + return &GetBlockByHeightResponse{ BlockId: &protoBlockID, Block: protoBlock, }, nil } // GetLatestValidatorSet implements ServiceServer.GetLatestValidatorSet -func (s queryServer) GetLatestValidatorSet(ctx context.Context, req *qtypes.GetLatestValidatorSetRequest) (*qtypes.GetLatestValidatorSetResponse, error) { +func (s queryServer) GetLatestValidatorSet(ctx context.Context, req *GetLatestValidatorSetRequest) (*GetLatestValidatorSetResponse, error) { page, limit, err := qtypes.ParsePagination(req.Pagination) if err != nil { return nil, err @@ -99,13 +99,13 @@ func (s queryServer) GetLatestValidatorSet(ctx context.Context, req *qtypes.GetL return nil, err } - outputValidatorsRes := &qtypes.GetLatestValidatorSetResponse{ + outputValidatorsRes := &GetLatestValidatorSetResponse{ BlockHeight: validatorsRes.BlockHeight, - Validators: make([]*qtypes.Validator, len(validatorsRes.Validators)), + Validators: make([]*Validator, len(validatorsRes.Validators)), } for i, validator := range validatorsRes.Validators { - outputValidatorsRes.Validators[i] = &qtypes.Validator{ + outputValidatorsRes.Validators[i] = &Validator{ Address: validator.Address, ProposerPriority: validator.ProposerPriority, PubKey: validator.PubKey, @@ -116,7 +116,7 @@ func (s queryServer) GetLatestValidatorSet(ctx context.Context, req *qtypes.GetL } // GetValidatorSetByHeight implements ServiceServer.GetValidatorSetByHeight -func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *qtypes.GetValidatorSetByHeightRequest) (*qtypes.GetValidatorSetByHeightResponse, error) { +func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *GetValidatorSetByHeightRequest) (*GetValidatorSetByHeightResponse, error) { page, limit, err := qtypes.ParsePagination(req.Pagination) if err != nil { return nil, err @@ -136,13 +136,13 @@ func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *qtypes.Ge return nil, err } - outputValidatorsRes := &qtypes.GetValidatorSetByHeightResponse{ + outputValidatorsRes := &GetValidatorSetByHeightResponse{ BlockHeight: validatorsRes.BlockHeight, - Validators: make([]*qtypes.Validator, len(validatorsRes.Validators)), + Validators: make([]*Validator, len(validatorsRes.Validators)), } for i, validator := range validatorsRes.Validators { - outputValidatorsRes.Validators[i] = &qtypes.Validator{ + outputValidatorsRes.Validators[i] = &Validator{ Address: validator.Address, ProposerPriority: validator.ProposerPriority, PubKey: validator.PubKey, @@ -153,7 +153,7 @@ func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *qtypes.Ge } // GetNodeInfo implements ServiceServer.GetNodeInfo -func (s queryServer) GetNodeInfo(ctx context.Context, req *qtypes.GetNodeInfoRequest) (*qtypes.GetNodeInfoResponse, error) { +func (s queryServer) GetNodeInfo(ctx context.Context, req *GetNodeInfoRequest) (*GetNodeInfoResponse, error) { status, err := getNodeStatus(s.clientCtx) if err != nil { return nil, err @@ -162,19 +162,19 @@ func (s queryServer) GetNodeInfo(ctx context.Context, req *qtypes.GetNodeInfoReq protoNodeInfo := status.NodeInfo.ToProto() nodeInfo := version.NewInfo() - deps := make([]*qtypes.Module, len(nodeInfo.BuildDeps)) + deps := make([]*Module, len(nodeInfo.BuildDeps)) for i, dep := range nodeInfo.BuildDeps { - deps[i] = &qtypes.Module{ + deps[i] = &Module{ Path: dep.Path, Sum: dep.Sum, Version: dep.Version, } } - resp := qtypes.GetNodeInfoResponse{ + resp := GetNodeInfoResponse{ DefaultNodeInfo: protoNodeInfo, - ApplicationVersion: &qtypes.VersionInfo{ + ApplicationVersion: &VersionInfo{ AppName: nodeInfo.AppName, Name: nodeInfo.Name, GitCommit: nodeInfo.GitCommit, @@ -193,7 +193,7 @@ func RegisterTendermintService( clientCtx client.Context, interfaceRegistry codectypes.InterfaceRegistry, ) { - qtypes.RegisterServiceServer( + RegisterServiceServer( qrt, NewQueryServer(clientCtx, interfaceRegistry), ) @@ -202,5 +202,5 @@ func RegisterTendermintService( // RegisterGRPCGatewayRoutes mounts the tendermint service's GRPC-gateway routes on the // given Mux. func RegisterGRPCGatewayRoutes(clientConn gogogrpc.ClientConn, mux *runtime.ServeMux) { - qtypes.RegisterServiceHandlerClient(context.Background(), mux, qtypes.NewServiceClient(clientConn)) + RegisterServiceHandlerClient(context.Background(), mux, NewServiceClient(clientConn)) } diff --git a/client/grpc/tmservice/service_test.go b/client/grpc/tmservice/service_test.go index bb145e6e2..2210486f4 100644 --- a/client/grpc/tmservice/service_test.go +++ b/client/grpc/tmservice/service_test.go @@ -7,6 +7,7 @@ import ( "github.com/stretchr/testify/suite" + "github.com/cosmos/cosmos-sdk/client/grpc/tmservice" "github.com/cosmos/cosmos-sdk/testutil/network" qtypes "github.com/cosmos/cosmos-sdk/types/query" "github.com/cosmos/cosmos-sdk/types/rest" @@ -19,7 +20,7 @@ type IntegrationTestSuite struct { cfg network.Config network *network.Network - queryClient qtypes.ServiceClient + queryClient tmservice.ServiceClient } func (s *IntegrationTestSuite) SetupSuite() { @@ -36,7 +37,7 @@ func (s *IntegrationTestSuite) SetupSuite() { _, err := s.network.WaitForHeight(1) s.Require().NoError(err) - s.queryClient = qtypes.NewServiceClient(s.network.Validators[0].ClientCtx) + s.queryClient = tmservice.NewServiceClient(s.network.Validators[0].ClientCtx) } func (s *IntegrationTestSuite) TearDownSuite() { @@ -47,13 +48,13 @@ func (s *IntegrationTestSuite) TearDownSuite() { func (s IntegrationTestSuite) TestQueryNodeInfo() { val := s.network.Validators[0] - res, err := s.queryClient.GetNodeInfo(context.Background(), &qtypes.GetNodeInfoRequest{}) + res, err := s.queryClient.GetNodeInfo(context.Background(), &tmservice.GetNodeInfoRequest{}) s.Require().NoError(err) s.Require().Equal(res.ApplicationVersion.AppName, version.NewInfo().AppName) restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/node_info", val.APIAddress)) s.Require().NoError(err) - var getInfoRes qtypes.GetNodeInfoResponse + var getInfoRes tmservice.GetNodeInfoResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &getInfoRes)) s.Require().Equal(getInfoRes.ApplicationVersion.AppName, version.NewInfo().AppName) } @@ -61,35 +62,35 @@ func (s IntegrationTestSuite) TestQueryNodeInfo() { func (s IntegrationTestSuite) TestQuerySyncing() { val := s.network.Validators[0] - _, err := s.queryClient.GetSyncing(context.Background(), &qtypes.GetSyncingRequest{}) + _, err := s.queryClient.GetSyncing(context.Background(), &tmservice.GetSyncingRequest{}) s.Require().NoError(err) restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/syncing", val.APIAddress)) s.Require().NoError(err) - var syncingRes qtypes.GetSyncingResponse + var syncingRes tmservice.GetSyncingResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &syncingRes)) } func (s IntegrationTestSuite) TestQueryLatestBlock() { val := s.network.Validators[0] - _, err := s.queryClient.GetLatestBlock(context.Background(), &qtypes.GetLatestBlockRequest{}) + _, err := s.queryClient.GetLatestBlock(context.Background(), &tmservice.GetLatestBlockRequest{}) s.Require().NoError(err) restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/blocks/latest", val.APIAddress)) s.Require().NoError(err) - var blockInfoRes qtypes.GetLatestBlockResponse + var blockInfoRes tmservice.GetLatestBlockResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &blockInfoRes)) } func (s IntegrationTestSuite) TestQueryBlockByHeight() { val := s.network.Validators[0] - _, err := s.queryClient.GetBlockByHeight(context.Background(), &qtypes.GetBlockByHeightRequest{Height: 1}) + _, err := s.queryClient.GetBlockByHeight(context.Background(), &tmservice.GetBlockByHeightRequest{Height: 1}) s.Require().NoError(err) restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/blocks/%d", val.APIAddress, 1)) s.Require().NoError(err) - var blockInfoRes qtypes.GetBlockByHeightResponse + var blockInfoRes tmservice.GetBlockByHeightResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &blockInfoRes)) } @@ -97,13 +98,13 @@ func (s IntegrationTestSuite) TestQueryLatestValidatorSet() { val := s.network.Validators[0] // nil pagination - _, err := s.queryClient.GetLatestValidatorSet(context.Background(), &qtypes.GetLatestValidatorSetRequest{ + _, err := s.queryClient.GetLatestValidatorSet(context.Background(), &tmservice.GetLatestValidatorSetRequest{ Pagination: nil, }) s.Require().NoError(err) //with pagination - _, err = s.queryClient.GetLatestValidatorSet(context.Background(), &qtypes.GetLatestValidatorSetRequest{Pagination: &qtypes.PageRequest{ + _, err = s.queryClient.GetLatestValidatorSet(context.Background(), &tmservice.GetLatestValidatorSetRequest{Pagination: &qtypes.PageRequest{ Offset: 0, Limit: 10, }}) @@ -116,7 +117,7 @@ func (s IntegrationTestSuite) TestQueryLatestValidatorSet() { // rest request with pagination restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validators/latest?pagination.offset=%d&pagination.limit=%d", val.APIAddress, 0, 1)) s.Require().NoError(err) - var validatorSetRes qtypes.GetLatestValidatorSetResponse + var validatorSetRes tmservice.GetLatestValidatorSetResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &validatorSetRes)) } @@ -124,13 +125,13 @@ func (s IntegrationTestSuite) TestQueryValidatorSetByHeight() { val := s.network.Validators[0] // nil pagination - _, err := s.queryClient.GetValidatorSetByHeight(context.Background(), &qtypes.GetValidatorSetByHeightRequest{ + _, err := s.queryClient.GetValidatorSetByHeight(context.Background(), &tmservice.GetValidatorSetByHeightRequest{ Height: 1, Pagination: nil, }) s.Require().NoError(err) - _, err = s.queryClient.GetValidatorSetByHeight(context.Background(), &qtypes.GetValidatorSetByHeightRequest{ + _, err = s.queryClient.GetValidatorSetByHeight(context.Background(), &tmservice.GetValidatorSetByHeightRequest{ Height: 1, Pagination: &qtypes.PageRequest{ Offset: 0, @@ -144,7 +145,7 @@ func (s IntegrationTestSuite) TestQueryValidatorSetByHeight() { // rest query with pagination restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validators/%d?pagination.offset=%d&pagination.limit=%d", val.APIAddress, 1, 0, 1)) - var validatorSetRes qtypes.GetValidatorSetByHeightResponse + var validatorSetRes tmservice.GetValidatorSetByHeightResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &validatorSetRes)) } diff --git a/go.mod b/go.mod index 39e280424..8163ebbfb 100644 --- a/go.mod +++ b/go.mod @@ -23,6 +23,7 @@ require ( github.com/golang/snappy v0.0.2 // indirect github.com/gorilla/handlers v1.5.1 github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/go-grpc-middleware v1.2.2 github.com/grpc-ecosystem/grpc-gateway v1.16.0 github.com/hashicorp/golang-lru v0.5.4 github.com/magiconair/properties v1.8.4 diff --git a/go.sum b/go.sum index da0703bd6..b6ae47006 100644 --- a/go.sum +++ b/go.sum @@ -247,6 +247,7 @@ github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/ad github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.2.1/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= +github.com/grpc-ecosystem/go-grpc-middleware v1.2.2 h1:FlFbCRLd5Jr4iYXZufAvgWN6Ao0JrI5chLINnUXDDr0= github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= diff --git a/proto/cosmos/base/tendermint/v1beta1/query.proto b/proto/cosmos/base/tendermint/v1beta1/query.proto index b5220231a..640522e7a 100644 --- a/proto/cosmos/base/tendermint/v1beta1/query.proto +++ b/proto/cosmos/base/tendermint/v1beta1/query.proto @@ -8,7 +8,7 @@ import "tendermint/types/block.proto"; import "tendermint/types/types.proto"; import "cosmos/base/query/v1beta1/pagination.proto"; -option go_package = "github.com/cosmos/cosmos-sdk/types/query"; +option go_package = "github.com/cosmos/cosmos-sdk/client/grpc/tmservice"; // Service defines the gRPC querier service for tendermint queries. service Service { diff --git a/server/grpc/server_test.go b/server/grpc/server_test.go index bfa39d818..9afa0b750 100644 --- a/server/grpc/server_test.go +++ b/server/grpc/server_test.go @@ -11,13 +11,14 @@ import ( "github.com/stretchr/testify/suite" "google.golang.org/grpc" "google.golang.org/grpc/metadata" - rpb "google.golang.org/grpc/reflection/grpc_reflection_v1alpha" "github.com/cosmos/cosmos-sdk/testutil/network" "github.com/cosmos/cosmos-sdk/testutil/testdata" sdk "github.com/cosmos/cosmos-sdk/types" grpctypes "github.com/cosmos/cosmos-sdk/types/grpc" + "github.com/cosmos/cosmos-sdk/types/tx" + txtypes "github.com/cosmos/cosmos-sdk/types/tx" banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" ) @@ -99,6 +100,19 @@ func (s *IntegrationTestSuite) TestGRPCServer() { } // Make sure the following services are present s.Require().True(servicesMap["cosmos.bank.v1beta1.Query"]) + + // Query the tx via gRPC without pagination. This used to panic, see + // https://github.com/cosmos/cosmos-sdk/issues/8038. + txServiceClient := txtypes.NewServiceClient(conn) + _, err = txServiceClient.GetTxsEvent( + context.Background(), + &tx.GetTxsEventRequest{ + Event: "message.action=send", + }, + ) + // TODO Once https://github.com/cosmos/cosmos-sdk/pull/8029 is merged, this + // should not error anymore. + s.Require().Error(err) } // Test and enforce that we upfront reject any connections to baseapp containing diff --git a/types/errors/abci.go b/types/errors/abci.go index f297e4ca6..df85f6bc8 100644 --- a/types/errors/abci.go +++ b/types/errors/abci.go @@ -151,12 +151,14 @@ func errIsNil(err error) bool { return false } +var errPanicWithMsg = Wrapf(ErrPanic, "panic message redacted to hide potentially sensitive system info") + // Redact replaces an error that is not initialized as an ABCI Error with a // generic internal error instance. If the error is an ABCI Error, that error is // simply returned. func Redact(err error) error { if ErrPanic.Is(err) { - return ErrPanic + return errPanicWithMsg } if abciCode(err) == internalABCICode { return errInternal diff --git a/types/errors/abci_test.go b/types/errors/abci_test.go index 18d6b1f2e..02c12e7bb 100644 --- a/types/errors/abci_test.go +++ b/types/errors/abci_test.go @@ -171,7 +171,7 @@ func (s *abciTestSuite) TestRedact() { }{ "panic looses message": { err: Wrap(ErrPanic, "some secret stack trace"), - changed: ErrPanic, + changed: errPanicWithMsg, }, "sdk errors untouched": { err: Wrap(ErrUnauthorized, "cannot drop db"), @@ -233,7 +233,7 @@ func (s *abciTestSuite) TestABCIInfoSerializeErr() { }, "redact in default encoder": { src: myPanic, - exp: "panic", + exp: "panic message redacted to hide potentially sensitive system info: panic", }, "do not redact in debug encoder": { src: myPanic, diff --git a/x/auth/tx/service_test.go b/x/auth/tx/service_test.go index a1bacb55c..54fe14fd7 100644 --- a/x/auth/tx/service_test.go +++ b/x/auth/tx/service_test.go @@ -127,7 +127,8 @@ func (s IntegrationTestSuite) TestGetTxEvents() { // Query the tx via gRPC. grpcRes, err := s.queryClient.GetTxsEvent( context.Background(), - &tx.GetTxsEventRequest{Event: "message.action=send", + &tx.GetTxsEventRequest{ + Event: "message.action=send", Pagination: &query.PageRequest{ CountTotal: false, Offset: 0, @@ -139,14 +140,26 @@ func (s IntegrationTestSuite) TestGetTxEvents() { s.Require().Equal(len(grpcRes.Txs), 1) s.Require().Equal("foobar", grpcRes.Txs[0].Body.Memo) + // Query the tx via gRPC without pagination. This used to panic, see + // https://github.com/cosmos/cosmos-sdk/issues/8038. + grpcRes, err = s.queryClient.GetTxsEvent( + context.Background(), + &tx.GetTxsEventRequest{ + Event: "message.action=send", + }, + ) + // TODO Once https://github.com/cosmos/cosmos-sdk/pull/8029 is merged, this + // should not error anymore. + s.Require().Error(err) + // Query the tx via grpc-gateway. restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/txs?event=%s&pagination.offset=%d&pagination.limit=%d", val.APIAddress, "message.action=send", 0, 1)) s.Require().NoError(err) var getTxRes tx.GetTxsEventResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &getTxRes)) - s.Require().Equal(len(grpcRes.Txs), 1) + s.Require().Equal(len(getTxRes.Txs), 1) s.Require().Equal("foobar", getTxRes.Txs[0].Body.Memo) - s.Require().NotZero(grpcRes.TxResponses[0].Height) + s.Require().NotZero(getTxRes.TxResponses[0].Height) } func (s IntegrationTestSuite) TestGetTx() { From 6476b09b64d20a160766b24ab424acdbc275eba8 Mon Sep 17 00:00:00 2001 From: Aleksandr Bezobchuk Date: Mon, 30 Nov 2020 12:22:05 -0500 Subject: [PATCH 20/40] multistore: fix SetInitialVersion (#8048) --- store/rootmulti/store.go | 9 ++++++--- store/rootmulti/store_test.go | 7 +++++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/store/rootmulti/store.go b/store/rootmulti/store.go index 72238cbb1..61378a366 100644 --- a/store/rootmulti/store.go +++ b/store/rootmulti/store.go @@ -549,9 +549,12 @@ func (rs *Store) SetInitialVersion(version int64) error { // Loop through all the stores, if it's an IAVL store, then set initial // version on it. - for _, commitKVStore := range rs.stores { - if storeWithVersion, ok := commitKVStore.(types.StoreWithInitialVersion); ok { - storeWithVersion.SetInitialVersion(version) + for key, store := range rs.stores { + if store.GetStoreType() == types.StoreTypeIAVL { + // If the store is wrapped with an inter-block cache, we must first unwrap + // it to get the underlying IAVL store. + store = rs.GetCommitKVStore(key) + store.(*iavl.Store).SetInitialVersion(version) } } diff --git a/store/rootmulti/store_test.go b/store/rootmulti/store_test.go index efe261877..7934b514d 100644 --- a/store/rootmulti/store_test.go +++ b/store/rootmulti/store_test.go @@ -657,11 +657,18 @@ func TestSetInitialVersion(t *testing.T) { db := dbm.NewMemDB() multi := newMultiStoreWithMounts(db, types.PruneNothing) + require.NoError(t, multi.LoadLatestVersion()) + multi.SetInitialVersion(5) require.Equal(t, int64(5), multi.initialVersion) multi.Commit() require.Equal(t, int64(5), multi.LastCommitID().Version) + + ckvs := multi.GetCommitKVStore(multi.keysByName["store1"]) + iavlStore, ok := ckvs.(*iavl.Store) + require.True(t, ok) + require.True(t, iavlStore.VersionExists(5)) } func BenchmarkMultistoreSnapshot100K(b *testing.B) { From 513dabaec83c04ad5a7191480b5f46fed03e42e5 Mon Sep 17 00:00:00 2001 From: Alessio Treglia Date: Mon, 30 Nov 2020 20:55:39 +0000 Subject: [PATCH 21/40] client/keys: support export of unarmored private key (#8043) The --unarmored-hex and --unsafe flags are added to the keys export command. Users must use both to export private keys material. The output would be in hexadecimal format and unarmored. See #8042 for scope and motivations. introduce new UnsafeKeyring interface. Unsafe operations are supported by UnsafeKeyring types. By doing so, we try to make the client developer aware of the risks. Co-authored-by: Sunny Aggarwal --- client/keys/export.go | 52 ++++++++++++++++++++++++++++++++-- client/keys/export_test.go | 25 ++++++++++++++-- crypto/keyring/keyring.go | 38 +++++++++++++++++++++++++ crypto/keyring/keyring_test.go | 24 ++++++++++++++++ 4 files changed, 133 insertions(+), 6 deletions(-) diff --git a/client/keys/export.go b/client/keys/export.go index 3f75ee2e5..ac2b6b476 100644 --- a/client/keys/export.go +++ b/client/keys/export.go @@ -2,23 +2,45 @@ package keys import ( "bufio" + "fmt" "github.com/spf13/cobra" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/input" + "github.com/cosmos/cosmos-sdk/crypto/keyring" +) + +const ( + flagUnarmoredHex = "unarmored-hex" + flagUnsafe = "unsafe" ) // ExportKeyCommand exports private keys from the key store. func ExportKeyCommand() *cobra.Command { - return &cobra.Command{ + cmd := &cobra.Command{ Use: "export ", Short: "Export private keys", - Long: `Export a private key from the local keybase in ASCII-armored encrypted format.`, - Args: cobra.ExactArgs(1), + Long: `Export a private key from the local keyring in ASCII-armored encrypted format. + +When both the --unarmored-hex and --unsafe flags are selected, cryptographic +private key material is exported in an INSECURE fashion that is designed to +allow users to import their keys in hot wallets. This feature is for advanced +users only that are confident about how to handle private keys work and are +FULLY AWARE OF THE RISKS. If you are unsure, you may want to do some research +and export your keys in ASCII-armored encrypted format.`, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { buf := bufio.NewReader(cmd.InOrStdin()) clientCtx := client.GetClientContextFromCmd(cmd) + unarmored, _ := cmd.Flags().GetBool(flagUnarmoredHex) + unsafe, _ := cmd.Flags().GetBool(flagUnsafe) + + if unarmored && unsafe { + return exportUnsafeUnarmored(cmd, args[0], buf, clientCtx.Keyring) + } else if unarmored || unsafe { + return fmt.Errorf("the flags %s and %s must be used together", flagUnsafe, flagUnarmoredHex) + } encryptPassword, err := input.GetPassword("Enter passphrase to encrypt the exported key:", buf) if err != nil { @@ -31,7 +53,31 @@ func ExportKeyCommand() *cobra.Command { } cmd.Println(armored) + return nil }, } + + cmd.Flags().Bool(flagUnarmoredHex, false, "Export unarmored hex privkey. Requires --unsafe.") + cmd.Flags().Bool(flagUnsafe, false, "Enable unsafe operations. This flag must be switched on along with all unsafe operation-specific options.") + + return cmd +} + +func exportUnsafeUnarmored(cmd *cobra.Command, uid string, buf *bufio.Reader, kr keyring.Keyring) error { + // confirm deletion, unless -y is passed + if yes, err := input.GetConfirmation("WARNING: The private key will be exported as an unarmored hexadecimal string. USE AT YOUR OWN RISK. Continue?", buf, cmd.ErrOrStderr()); err != nil { + return err + } else if !yes { + return nil + } + + hexPrivKey, err := keyring.NewUnsafe(kr).UnsafeExportPrivKeyHex(uid) + if err != nil { + return err + } + + cmd.Println(hexPrivKey) + + return nil } diff --git a/client/keys/export_test.go b/client/keys/export_test.go index 4276db1c1..b01bbf823 100644 --- a/client/keys/export_test.go +++ b/client/keys/export_test.go @@ -36,15 +36,34 @@ func Test_runExportCmd(t *testing.T) { require.NoError(t, err) // Now enter password - mockIn.Reset("123456789\n123456789\n") - cmd.SetArgs([]string{ + args := []string{ "keyname1", fmt.Sprintf("--%s=%s", flags.FlagHome, kbHome), fmt.Sprintf("--%s=%s", flags.FlagKeyringBackend, keyring.BackendTest), - }) + } + + mockIn.Reset("123456789\n123456789\n") + cmd.SetArgs(args) clientCtx := client.Context{}.WithKeyring(kb) ctx := context.WithValue(context.Background(), client.ClientContextKey, &clientCtx) require.NoError(t, cmd.ExecuteContext(ctx)) + + argsUnsafeOnly := append(args, "--unsafe") + cmd.SetArgs(argsUnsafeOnly) + require.Error(t, cmd.ExecuteContext(ctx)) + + argsUnarmoredHexOnly := append(args, "--unarmored-hex") + cmd.SetArgs(argsUnarmoredHexOnly) + require.Error(t, cmd.ExecuteContext(ctx)) + + argsUnsafeUnarmoredHex := append(args, "--unsafe", "--unarmored-hex") + cmd.SetArgs(argsUnsafeUnarmoredHex) + require.Error(t, cmd.ExecuteContext(ctx)) + + mockIn, mockOut := testutil.ApplyMockIO(cmd) + mockIn.Reset("y\n") + require.NoError(t, cmd.ExecuteContext(ctx)) + require.Equal(t, "2485e33678db4175dc0ecef2d6e1fc493d4a0d7f7ce83324b6ed70afe77f3485\n", mockOut.String()) } diff --git a/crypto/keyring/keyring.go b/crypto/keyring/keyring.go index d3220b5b4..f26a06b93 100644 --- a/crypto/keyring/keyring.go +++ b/crypto/keyring/keyring.go @@ -88,6 +88,13 @@ type Keyring interface { Exporter } +// UnsafeKeyring exposes unsafe operations such as unsafe unarmored export in +// addition to those that are made available by the Keyring interface. +type UnsafeKeyring interface { + Keyring + UnsafeExporter +} + // Signer is implemented by key stores that want to provide signing capabilities. type Signer interface { // Sign sign byte messages with a user key. @@ -110,12 +117,20 @@ type Exporter interface { // Export public key ExportPubKeyArmor(uid string) (string, error) ExportPubKeyArmorByAddress(address sdk.Address) (string, error) + // ExportPrivKey returns a private key in ASCII armored format. // It returns an error if the key does not exist or a wrong encryption passphrase is supplied. ExportPrivKeyArmor(uid, encryptPassphrase string) (armor string, err error) ExportPrivKeyArmorByAddress(address sdk.Address, encryptPassphrase string) (armor string, err error) } +// UnsafeExporter is implemented by key stores that support unsafe export +// of private keys' material. +type UnsafeExporter interface { + // UnsafeExportPrivKeyHex returns a private key in unarmored hex format + UnsafeExportPrivKeyHex(uid string) (string, error) +} + // Option overrides keyring configuration options. type Option func(options *Options) @@ -774,6 +789,29 @@ func (ks keystore) writeMultisigKey(name string, pub types.PubKey) (Info, error) return info, nil } +type unsafeKeystore struct { + keystore +} + +// NewUnsafe returns a new keyring that provides support for unsafe operations. +func NewUnsafe(kr Keyring) UnsafeKeyring { + // The type assertion is against the only keystore + // implementation that is currently provided. + ks := kr.(keystore) + + return unsafeKeystore{ks} +} + +// UnsafeExportPrivKeyHex exports private keys in unarmored hexadecimal format. +func (ks unsafeKeystore) UnsafeExportPrivKeyHex(uid string) (privkey string, err error) { + priv, err := ks.ExportPrivateKeyObject(uid) + if err != nil { + return "", err + } + + return hex.EncodeToString(priv.Bytes()), nil +} + func addrHexKeyAsString(address sdk.Address) string { return fmt.Sprintf("%s.%s", hex.EncodeToString(address.Bytes()), addressSuffix) } diff --git a/crypto/keyring/keyring_test.go b/crypto/keyring/keyring_test.go index 540ea57bf..bcd7eddf3 100644 --- a/crypto/keyring/keyring_test.go +++ b/crypto/keyring/keyring_test.go @@ -1,6 +1,7 @@ package keyring import ( + "encoding/hex" "fmt" "strings" "testing" @@ -1092,6 +1093,29 @@ func TestAltKeyring_ImportExportPubKey_ByAddress(t *testing.T) { require.EqualError(t, err, fmt.Sprintf("cannot overwrite key: %s", newUID)) } +func TestAltKeyring_UnsafeExportPrivKeyHex(t *testing.T) { + keyring, err := New(t.Name(), BackendTest, t.TempDir(), nil) + require.NoError(t, err) + + uid := theID + + _, _, err = keyring.NewMnemonic(uid, English, sdk.FullFundraiserPath, hd.Secp256k1) + require.NoError(t, err) + + unsafeKeyring := NewUnsafe(keyring) + privKey, err := unsafeKeyring.UnsafeExportPrivKeyHex(uid) + + require.NoError(t, err) + require.Equal(t, 64, len(privKey)) + + _, err = hex.DecodeString(privKey) + require.NoError(t, err) + + // test error on non existing key + _, err = unsafeKeyring.UnsafeExportPrivKeyHex("non-existing") + require.Error(t, err) +} + func TestAltKeyring_ConstructorSupportedAlgos(t *testing.T) { keyring, err := New(t.Name(), BackendTest, t.TempDir(), nil) require.NoError(t, err) From a51eac4f15a7f8110e529df9460fa7126c427aa3 Mon Sep 17 00:00:00 2001 From: MD Aleem <72057206+aleem1413@users.noreply.github.com> Date: Tue, 1 Dec 2020 09:48:35 +0530 Subject: [PATCH 22/40] Fix QueryByEvents with multiple events and empty pagination request (#8029) * fix pagination issue * Fix querying with multiple events * reuse ParsePagination * update tests * change event type to array * fix test * review changes * add test case for escape params * review changes * resolve conflicts * fix test * fix test Co-authored-by: anilCSE Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- proto/cosmos/tx/v1beta1/service.proto | 4 +- server/grpc/server_test.go | 4 +- types/tx/service.pb.go | 104 +++++++++++++------------- x/auth/tx/service.go | 29 ++----- x/auth/tx/service_test.go | 73 +++++++++++++++--- 5 files changed, 127 insertions(+), 87 deletions(-) diff --git a/proto/cosmos/tx/v1beta1/service.proto b/proto/cosmos/tx/v1beta1/service.proto index d78216d03..68eaaae83 100644 --- a/proto/cosmos/tx/v1beta1/service.proto +++ b/proto/cosmos/tx/v1beta1/service.proto @@ -28,8 +28,8 @@ service Service { // GetTxsEventRequest is the request type for the Service.TxsByEvents // RPC method. message GetTxsEventRequest { - // event is the transaction event type. - string event = 1; + // events is the list of transaction event type. + repeated string events = 1; // pagination defines an pagination for the request. cosmos.base.query.v1beta1.PageRequest pagination = 2; } diff --git a/server/grpc/server_test.go b/server/grpc/server_test.go index 9afa0b750..59c726c6a 100644 --- a/server/grpc/server_test.go +++ b/server/grpc/server_test.go @@ -107,12 +107,12 @@ func (s *IntegrationTestSuite) TestGRPCServer() { _, err = txServiceClient.GetTxsEvent( context.Background(), &tx.GetTxsEventRequest{ - Event: "message.action=send", + Events: []string{"message.action=send"}, }, ) // TODO Once https://github.com/cosmos/cosmos-sdk/pull/8029 is merged, this // should not error anymore. - s.Require().Error(err) + s.Require().NoError(err) } // Test and enforce that we upfront reject any connections to baseapp containing diff --git a/types/tx/service.pb.go b/types/tx/service.pb.go index 2302593c5..44242b031 100644 --- a/types/tx/service.pb.go +++ b/types/tx/service.pb.go @@ -33,8 +33,8 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package // GetTxsEventRequest is the request type for the Service.TxsByEvents // RPC method. type GetTxsEventRequest struct { - // event is the transaction event type. - Event string `protobuf:"bytes,1,opt,name=event,proto3" json:"event,omitempty"` + // events is the list of transaction event type. + Events []string `protobuf:"bytes,1,rep,name=events,proto3" json:"events,omitempty"` // pagination defines an pagination for the request. Pagination *query.PageRequest `protobuf:"bytes,2,opt,name=pagination,proto3" json:"pagination,omitempty"` } @@ -72,11 +72,11 @@ func (m *GetTxsEventRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetTxsEventRequest proto.InternalMessageInfo -func (m *GetTxsEventRequest) GetEvent() string { +func (m *GetTxsEventRequest) GetEvents() []string { if m != nil { - return m.Event + return m.Events } - return "" + return nil } func (m *GetTxsEventRequest) GetPagination() *query.PageRequest { @@ -370,40 +370,40 @@ func init() { proto.RegisterFile("cosmos/tx/v1beta1/service.proto", fileDescript var fileDescriptor_e0b00a618705eca7 = []byte{ // 563 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xcd, 0x6e, 0xd3, 0x40, - 0x10, 0xae, 0x1d, 0xfa, 0xc3, 0xa4, 0x08, 0x58, 0x7e, 0x14, 0x99, 0xe2, 0x06, 0xa7, 0x69, 0x23, - 0x24, 0xbc, 0x6a, 0xb8, 0xf4, 0x80, 0x84, 0x84, 0x54, 0x22, 0x6e, 0xc8, 0xed, 0x89, 0x4b, 0xb5, - 0x09, 0x5b, 0xc7, 0x22, 0xf1, 0xba, 0xde, 0x4d, 0xb4, 0x15, 0xf4, 0xc2, 0x91, 0x13, 0x12, 0x2f, - 0xc5, 0x31, 0x12, 0x17, 0x8e, 0x28, 0xe1, 0x0d, 0x78, 0x01, 0xe4, 0xf5, 0x3a, 0x71, 0xa8, 0x4d, - 0x7b, 0xf2, 0xae, 0xfc, 0xfd, 0xcc, 0x37, 0xe3, 0x31, 0x6c, 0xf7, 0x18, 0x1f, 0x32, 0x8e, 0x85, - 0xc4, 0xe3, 0xfd, 0x2e, 0x15, 0x64, 0x1f, 0x73, 0x1a, 0x8f, 0x83, 0x1e, 0x75, 0xa3, 0x98, 0x09, - 0x86, 0xee, 0xa6, 0x00, 0x57, 0x48, 0x57, 0x03, 0xac, 0x2d, 0x9f, 0x31, 0x7f, 0x40, 0x31, 0x89, - 0x02, 0x4c, 0xc2, 0x90, 0x09, 0x22, 0x02, 0x16, 0xf2, 0x94, 0x60, 0x35, 0xb4, 0x62, 0x97, 0x70, - 0x8a, 0x49, 0xb7, 0x17, 0xcc, 0x85, 0x93, 0x8b, 0x06, 0x59, 0x97, 0x6d, 0x85, 0xd4, 0xef, 0x9e, - 0xe6, 0x05, 0xce, 0x46, 0x34, 0x3e, 0x9f, 0x63, 0x22, 0xe2, 0x07, 0xa1, 0x72, 0x4b, 0xb1, 0x4e, - 0x0c, 0xa8, 0x43, 0xc5, 0xb1, 0xe4, 0x87, 0x63, 0x1a, 0x0a, 0x8f, 0x9e, 0x8d, 0x28, 0x17, 0xe8, - 0x3e, 0xac, 0xd2, 0xe4, 0x5e, 0x33, 0xea, 0x46, 0xeb, 0xa6, 0x97, 0x5e, 0xd0, 0x6b, 0x80, 0x05, - 0xbf, 0x66, 0xd6, 0x8d, 0x56, 0xb5, 0xbd, 0xeb, 0xea, 0x78, 0x89, 0x99, 0xab, 0xcc, 0xb2, 0x98, - 0xee, 0x5b, 0xe2, 0x53, 0xad, 0xe8, 0xe5, 0x98, 0xce, 0xc4, 0x80, 0x7b, 0x4b, 0xa6, 0x3c, 0x62, - 0x21, 0xa7, 0x68, 0x0f, 0x2a, 0x42, 0xf2, 0x9a, 0x51, 0xaf, 0xb4, 0xaa, 0xed, 0x07, 0xee, 0xa5, - 0xbe, 0xb9, 0xc7, 0xd2, 0x4b, 0x10, 0xa8, 0x03, 0x9b, 0x42, 0x9e, 0xc4, 0x9a, 0xc7, 0x6b, 0xa6, - 0x62, 0xec, 0x2c, 0x95, 0xa2, 0x7a, 0x95, 0x23, 0x6a, 0xb0, 0x57, 0x15, 0xf3, 0x73, 0x22, 0x94, - 0x4f, 0x54, 0x51, 0x89, 0xf6, 0xae, 0x4c, 0xa4, 0x95, 0xf2, 0x91, 0x0e, 0xe0, 0xf6, 0x51, 0x30, - 0x1c, 0x0d, 0x88, 0xc8, 0x12, 0xa3, 0x26, 0x98, 0x42, 0xaa, 0x06, 0x96, 0x86, 0x31, 0x85, 0x74, - 0xbe, 0x18, 0x70, 0x67, 0x41, 0xd5, 0x9d, 0x78, 0x01, 0x1b, 0x3e, 0xe1, 0x27, 0x41, 0x78, 0xca, - 0xb4, 0xc2, 0x93, 0xf2, 0x70, 0x1d, 0xc2, 0xdf, 0x84, 0xa7, 0xcc, 0x5b, 0xf7, 0xd3, 0x03, 0x3a, - 0x80, 0xb5, 0x98, 0xf2, 0xd1, 0x40, 0xe8, 0x19, 0xd5, 0xcb, 0xb9, 0x9e, 0xc2, 0x79, 0x1a, 0xef, - 0x38, 0xb0, 0xa9, 0x06, 0x93, 0x65, 0x40, 0x70, 0xa3, 0x4f, 0x78, 0x5f, 0x7f, 0x06, 0xea, 0xec, - 0x5c, 0xc0, 0x2d, 0x8d, 0xd1, 0xc5, 0x5e, 0x2f, 0x28, 0x3a, 0x84, 0x6a, 0x6e, 0x68, 0xba, 0xb4, - 0xeb, 0xcd, 0x0c, 0x16, 0x33, 0x6b, 0xff, 0x31, 0x61, 0xfd, 0x28, 0x5d, 0x30, 0x24, 0x61, 0x23, - 0x6b, 0x1d, 0x72, 0x0a, 0x9c, 0xff, 0x19, 0x89, 0xd5, 0xf8, 0x2f, 0x26, 0x35, 0x70, 0x1a, 0x9f, - 0x7f, 0xfc, 0xfe, 0x66, 0x3e, 0x76, 0x1e, 0xe1, 0x82, 0xcd, 0xce, 0xdc, 0x22, 0x58, 0x55, 0x4d, - 0x40, 0xdb, 0x05, 0x92, 0xf9, 0x16, 0x5a, 0xf5, 0x72, 0x80, 0x36, 0xdc, 0x51, 0x86, 0x36, 0xda, - 0xc2, 0x45, 0x3b, 0x8d, 0x3f, 0x26, 0x5d, 0xbf, 0x40, 0x9f, 0xa0, 0x9a, 0xdb, 0x19, 0xd4, 0x2c, - 0x93, 0x5d, 0x5a, 0x64, 0x6b, 0xf7, 0x2a, 0x98, 0xae, 0xc1, 0x56, 0x35, 0xd4, 0xd0, 0xc3, 0xc2, - 0x1a, 0xf8, 0xab, 0x97, 0xdf, 0xa7, 0xb6, 0x31, 0x99, 0xda, 0xc6, 0xaf, 0xa9, 0x6d, 0x7c, 0x9d, - 0xd9, 0x2b, 0x93, 0x99, 0xbd, 0xf2, 0x73, 0x66, 0xaf, 0xbc, 0x6b, 0xfa, 0x81, 0xe8, 0x8f, 0xba, - 0x6e, 0x8f, 0x0d, 0x33, 0x6e, 0xfa, 0x78, 0xc6, 0xdf, 0x7f, 0xc0, 0xe2, 0x3c, 0xa2, 0x89, 0x58, - 0x77, 0x4d, 0xfd, 0x6e, 0x9e, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xae, 0x63, 0xf9, 0x38, 0x2f, + 0x10, 0xae, 0x1d, 0x48, 0xdb, 0x49, 0x11, 0xb0, 0x88, 0x2a, 0x32, 0xc5, 0x0d, 0x4e, 0xd3, 0x56, + 0x48, 0xd8, 0x6a, 0xb8, 0xf4, 0x80, 0x84, 0x84, 0x54, 0x22, 0x6e, 0xc8, 0xed, 0x89, 0x4b, 0xb5, + 0x09, 0x5b, 0xc7, 0x22, 0xf1, 0xba, 0xd9, 0x49, 0xb4, 0x15, 0xf4, 0xc2, 0x91, 0x13, 0x12, 0x2f, + 0xc5, 0x31, 0x12, 0x17, 0x8e, 0x28, 0xe1, 0x0d, 0x78, 0x01, 0xe4, 0xf5, 0x3a, 0x71, 0x68, 0x4c, + 0x7b, 0xca, 0x8e, 0xf2, 0xfd, 0xcc, 0x37, 0xa3, 0x31, 0x6c, 0x77, 0xb8, 0xe8, 0x73, 0xe1, 0xa1, + 0xf4, 0x46, 0x07, 0x6d, 0x86, 0xf4, 0xc0, 0x13, 0x6c, 0x30, 0x0a, 0x3b, 0xcc, 0x8d, 0x07, 0x1c, + 0x39, 0xb9, 0x9f, 0x02, 0x5c, 0x94, 0xae, 0x06, 0x58, 0x5b, 0x01, 0xe7, 0x41, 0x8f, 0x79, 0x34, + 0x0e, 0x3d, 0x1a, 0x45, 0x1c, 0x29, 0x86, 0x3c, 0x12, 0x29, 0xc1, 0xaa, 0x6b, 0xc5, 0x36, 0x15, + 0xcc, 0xa3, 0xed, 0x4e, 0x38, 0x13, 0x4e, 0x0a, 0x0d, 0xb2, 0xae, 0xda, 0xa2, 0xd4, 0xff, 0x3d, + 0xcd, 0x0b, 0x9c, 0x0f, 0xd9, 0xe0, 0x62, 0x86, 0x89, 0x69, 0x10, 0x46, 0xca, 0x2d, 0xc5, 0x3a, + 0x08, 0xa4, 0xc5, 0xf0, 0x44, 0x8a, 0xa3, 0x11, 0x8b, 0xd0, 0x67, 0xe7, 0x43, 0x26, 0x90, 0x6c, + 0x42, 0x99, 0x25, 0xb5, 0xa8, 0x1a, 0xb5, 0xd2, 0xfe, 0xba, 0xaf, 0x2b, 0xf2, 0x1a, 0x60, 0xae, + 0x50, 0x35, 0x6b, 0xc6, 0x7e, 0xa5, 0xb9, 0xeb, 0xea, 0x80, 0x89, 0x9d, 0xab, 0xec, 0xb2, 0xa0, + 0xee, 0x5b, 0x1a, 0x30, 0xad, 0xe9, 0xe7, 0x98, 0xce, 0xd8, 0x80, 0x07, 0x0b, 0xb6, 0x22, 0xe6, + 0x91, 0x60, 0x64, 0x0f, 0x4a, 0x28, 0x53, 0xd3, 0x4a, 0xf3, 0xa1, 0x7b, 0x65, 0x72, 0xee, 0x89, + 0xf4, 0x13, 0x04, 0x69, 0xc1, 0x06, 0xca, 0xd3, 0x81, 0xe6, 0x89, 0xaa, 0xa9, 0x18, 0x3b, 0x0b, + 0xad, 0xa8, 0x69, 0xe5, 0x88, 0x1a, 0xec, 0x57, 0x70, 0xf6, 0x4e, 0x84, 0xf2, 0x89, 0x4a, 0x2a, + 0xd1, 0xde, 0xb5, 0x89, 0xb4, 0x52, 0x3e, 0xd2, 0x21, 0xdc, 0x3d, 0x0e, 0xfb, 0xc3, 0x1e, 0xc5, + 0x2c, 0x31, 0x69, 0x80, 0x89, 0xb2, 0x6a, 0x28, 0xcd, 0x82, 0x30, 0x26, 0x4a, 0xe7, 0x8b, 0x01, + 0xf7, 0xe6, 0x54, 0x3d, 0x89, 0x17, 0xb0, 0x16, 0x50, 0x71, 0x1a, 0x46, 0x67, 0x5c, 0x2b, 0x3c, + 0x29, 0x0e, 0xd7, 0xa2, 0xe2, 0x4d, 0x74, 0xc6, 0xfd, 0xd5, 0x20, 0x7d, 0x90, 0x43, 0x28, 0x0f, + 0x98, 0x18, 0xf6, 0x50, 0xef, 0xa8, 0x56, 0xcc, 0xf5, 0x15, 0xce, 0xd7, 0x78, 0xc7, 0x81, 0x0d, + 0xb5, 0x98, 0x2c, 0x03, 0x81, 0x5b, 0x5d, 0x2a, 0xba, 0xaa, 0x87, 0x75, 0x5f, 0xbd, 0x9d, 0x4b, + 0xb8, 0xa3, 0x31, 0xba, 0xd9, 0x9b, 0x05, 0x25, 0x47, 0x50, 0xc9, 0x2d, 0x4d, 0xb7, 0x76, 0xb3, + 0x9d, 0xc1, 0x7c, 0x67, 0xcd, 0x3f, 0x26, 0xac, 0x1e, 0xa7, 0x27, 0x46, 0x24, 0xac, 0x65, 0xa3, + 0x23, 0xce, 0x12, 0xe7, 0x7f, 0x56, 0x62, 0xd5, 0xff, 0x8b, 0x49, 0x0d, 0x9c, 0xfa, 0xe7, 0x1f, + 0xbf, 0xbf, 0x99, 0x8f, 0x9d, 0x47, 0xde, 0x92, 0xdb, 0xce, 0xdc, 0x62, 0xb8, 0xad, 0x86, 0x40, + 0xb6, 0x97, 0x48, 0xe6, 0x47, 0x68, 0xd5, 0x8a, 0x01, 0xda, 0x70, 0x47, 0x19, 0xda, 0x64, 0xcb, + 0x5b, 0x76, 0xd5, 0xde, 0xc7, 0x64, 0xea, 0x97, 0xe4, 0x13, 0x54, 0x72, 0x37, 0x43, 0x1a, 0x45, + 0xb2, 0x0b, 0xa7, 0x6c, 0xed, 0x5e, 0x07, 0xd3, 0x3d, 0xd8, 0xaa, 0x87, 0x2a, 0xd9, 0x5c, 0xda, + 0x83, 0x78, 0xf5, 0xf2, 0xfb, 0xc4, 0x36, 0xc6, 0x13, 0xdb, 0xf8, 0x35, 0xb1, 0x8d, 0xaf, 0x53, + 0x7b, 0x65, 0x3c, 0xb5, 0x57, 0x7e, 0x4e, 0xed, 0x95, 0x77, 0x8d, 0x20, 0xc4, 0xee, 0xb0, 0xed, + 0x76, 0x78, 0x3f, 0xe3, 0xa6, 0x3f, 0xcf, 0xc4, 0xfb, 0x0f, 0x1e, 0x5e, 0xc4, 0x2c, 0x11, 0x6b, + 0x97, 0xd5, 0x07, 0xe7, 0xf9, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x62, 0x59, 0xdc, 0x70, 0x31, 0x05, 0x00, 0x00, } @@ -597,12 +597,14 @@ func (m *GetTxsEventRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x12 } - if len(m.Event) > 0 { - i -= len(m.Event) - copy(dAtA[i:], m.Event) - i = encodeVarintService(dAtA, i, uint64(len(m.Event))) - i-- - dAtA[i] = 0xa + if len(m.Events) > 0 { + for iNdEx := len(m.Events) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Events[iNdEx]) + copy(dAtA[i:], m.Events[iNdEx]) + i = encodeVarintService(dAtA, i, uint64(len(m.Events[iNdEx]))) + i-- + dAtA[i] = 0xa + } } return len(dAtA) - i, nil } @@ -846,9 +848,11 @@ func (m *GetTxsEventRequest) Size() (n int) { } var l int _ = l - l = len(m.Event) - if l > 0 { - n += 1 + l + sovService(uint64(l)) + if len(m.Events) > 0 { + for _, s := range m.Events { + l = len(s) + n += 1 + l + sovService(uint64(l)) + } } if m.Pagination != nil { l = m.Pagination.Size() @@ -979,7 +983,7 @@ func (m *GetTxsEventRequest) Unmarshal(dAtA []byte) error { switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Event", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Events", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -1007,7 +1011,7 @@ func (m *GetTxsEventRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Event = string(dAtA[iNdEx:postIndex]) + m.Events = append(m.Events, string(dAtA[iNdEx:postIndex])) iNdEx = postIndex case 2: if wireType != 2 { diff --git a/x/auth/tx/service.go b/x/auth/tx/service.go index b12808a16..751c8fbb2 100644 --- a/x/auth/tx/service.go +++ b/x/auth/tx/service.go @@ -49,32 +49,17 @@ const ( // TxsByEvents implements the ServiceServer.TxsByEvents RPC method. func (s txServer) GetTxsEvent(ctx context.Context, req *txtypes.GetTxsEventRequest) (*txtypes.GetTxsEventResponse, error) { - offset := int(req.Pagination.Offset) - limit := int(req.Pagination.Limit) - if offset < 0 { - return nil, status.Error(codes.InvalidArgument, "offset must greater than 0") + page, limit, err := pagination.ParsePagination(req.Pagination) + if err != nil { + return nil, err } - if len(req.Event) == 0 { + + if len(req.Events) == 0 { return nil, status.Error(codes.InvalidArgument, "must declare at least one event to search") } - if limit < 0 { - return nil, status.Error(codes.InvalidArgument, "limit must greater than 0") - } else if limit == 0 { - limit = pagination.DefaultLimit - } - - page := offset/limit + 1 - - var events []string - - if strings.Contains(req.Event, "&") { - events = strings.Split(req.Event, "&") - } else { - events = append(events, req.Event) - } - tmEvents := make([]string, len(events)) - for i, event := range events { + tmEvents := make([]string, len(req.Events)) + for i, event := range req.Events { if !strings.Contains(event, "=") { return nil, status.Error(codes.InvalidArgument, fmt.Sprintf("invalid event; event %s should be of the format: %s", event, eventFormat)) } else if strings.Count(event, "=") > 1 { diff --git a/x/auth/tx/service_test.go b/x/auth/tx/service_test.go index 54fe14fd7..480529541 100644 --- a/x/auth/tx/service_test.go +++ b/x/auth/tx/service_test.go @@ -124,11 +124,18 @@ func (s IntegrationTestSuite) TestGetTxEvents() { s.Require().NoError(s.network.WaitForNextBlock()) + // Query the tx via gRPC empty params. + _, err = s.queryClient.GetTxsEvent( + context.Background(), + &tx.GetTxsEventRequest{}, + ) + s.Require().Error(err) + // Query the tx via gRPC. grpcRes, err := s.queryClient.GetTxsEvent( context.Background(), &tx.GetTxsEventRequest{ - Event: "message.action=send", + Events: []string{"message.action=send"}, Pagination: &query.PageRequest{ CountTotal: false, Offset: 0, @@ -145,21 +152,65 @@ func (s IntegrationTestSuite) TestGetTxEvents() { grpcRes, err = s.queryClient.GetTxsEvent( context.Background(), &tx.GetTxsEventRequest{ - Event: "message.action=send", + Events: []string{"message.action=send"}, }, ) // TODO Once https://github.com/cosmos/cosmos-sdk/pull/8029 is merged, this // should not error anymore. - s.Require().Error(err) - - // Query the tx via grpc-gateway. - restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/txs?event=%s&pagination.offset=%d&pagination.limit=%d", val.APIAddress, "message.action=send", 0, 1)) s.Require().NoError(err) - var getTxRes tx.GetTxsEventResponse - s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &getTxRes)) - s.Require().Equal(len(getTxRes.Txs), 1) - s.Require().Equal("foobar", getTxRes.Txs[0].Body.Memo) - s.Require().NotZero(getTxRes.TxResponses[0].Height) + + rpcTests := []struct { + name string + url string + expectErr bool + expErrMsg string + }{ + { + "empty params", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs", val.APIAddress), + true, + "must declare at least one event to search", + }, + { + "without pagination", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs?events=%s", val.APIAddress, "message.action=send"), + false, + "", + }, + { + "with pagination", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs?events=%s&pagination.offset=%d&pagination.limit=%d", val.APIAddress, "message.action=send", 0, 10), + false, + "", + }, + { + "expect pass with multiple-events", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs?events=%s&events=%s", val.APIAddress, "message.action=send", "message.module=bank"), + false, + "", + }, + { + "expect pass with escape event", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs?events=%s", val.APIAddress, "message.action%3Dsend"), + false, + "", + }, + } + for _, tc := range rpcTests { + s.Run(tc.name, func() { + res, err := rest.GetRequest(tc.url) + s.Require().NoError(err) + if tc.expectErr { + s.Require().Contains(string(res), tc.expErrMsg) + } else { + var result tx.GetTxsEventResponse + val.ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result) + s.Require().GreaterOrEqual(len(result.Txs), 1) + s.Require().Equal("foobar", result.Txs[0].Body.Memo) + s.Require().NotZero(result.TxResponses[0].Height) + } + }) + } } func (s IntegrationTestSuite) TestGetTx() { From f57828c0919ed098437cdd8ae5b53c86f29bc8ca Mon Sep 17 00:00:00 2001 From: vincent Date: Wed, 2 Dec 2020 21:50:50 +0800 Subject: [PATCH 23/40] Fix CryptoCdc inconsistent (#7987) * simple fix * refactor crypto * just use codec/legacy.Cdc * revert armor * add changelog entry Co-authored-by: Aleksandr Bezobchuk --- CHANGELOG.md | 1 + codec/legacy/codec.go | 14 +++++++++++++- codec/types/any.go | 3 ++- crypto/armor.go | 5 ++--- crypto/armor_test.go | 5 ++--- crypto/codec/amino.go | 19 ------------------- crypto/keyring/codec.go | 10 ++-------- crypto/keyring/info.go | 7 ++++--- crypto/keyring/keyring.go | 12 ++++++------ crypto/keyring/types_test.go | 3 ++- crypto/ledger/ledger_test.go | 8 ++++---- types/address.go | 3 +-- 12 files changed, 39 insertions(+), 51 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b2c97295..d4cf410ec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,6 +37,7 @@ Ref: https://keepachangelog.com/en/1.0.0/ ## [Unreleased] ### Improvements +* (crypto) [\#7987](https://github.com/cosmos/cosmos-sdk/pull/7987) Fix the inconsistency of CryptoCdc, only use `codec/legacy.Cdc`. * (SDK) [\#7925](https://github.com/cosmos/cosmos-sdk/pull/7925) Updated dependencies to use gRPC v1.33.2 * Updated gRPC dependency to v1.33.2 * Updated iavl dependency to v0.15-rc2 diff --git a/codec/legacy/codec.go b/codec/legacy/codec.go index 09225998e..5ec6b2976 100644 --- a/codec/legacy/codec.go +++ b/codec/legacy/codec.go @@ -3,6 +3,7 @@ package legacy import ( "github.com/cosmos/cosmos-sdk/codec" cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" + cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" ) // Cdc defines a global generic sealed Amino codec to be used throughout sdk. It @@ -15,5 +16,16 @@ func init() { Cdc = codec.NewLegacyAmino() cryptocodec.RegisterCrypto(Cdc) codec.RegisterEvidences(Cdc) - Cdc.Seal() +} + +// PrivKeyFromBytes unmarshals private key bytes and returns a PrivKey +func PrivKeyFromBytes(privKeyBytes []byte) (privKey cryptotypes.PrivKey, err error) { + err = Cdc.UnmarshalBinaryBare(privKeyBytes, &privKey) + return +} + +// PubKeyFromBytes unmarshals public key bytes and returns a PubKey +func PubKeyFromBytes(pubKeyBytes []byte) (pubKey cryptotypes.PubKey, err error) { + err = Cdc.UnmarshalBinaryBare(pubKeyBytes, &pubKey) + return } diff --git a/codec/types/any.go b/codec/types/any.go index 38fe4b42a..4a30ddad4 100644 --- a/codec/types/any.go +++ b/codec/types/any.go @@ -1,8 +1,9 @@ package types import ( - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/gogo/protobuf/proto" + + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) type Any struct { diff --git a/crypto/armor.go b/crypto/armor.go index 3ee472707..35deb1077 100644 --- a/crypto/armor.go +++ b/crypto/armor.go @@ -10,7 +10,6 @@ import ( "github.com/tendermint/tendermint/crypto/xsalsa20symmetric" "github.com/cosmos/cosmos-sdk/codec/legacy" - cryptoAmino "github.com/cosmos/cosmos-sdk/crypto/codec" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) @@ -153,7 +152,7 @@ func encryptPrivKey(privKey cryptotypes.PrivKey, passphrase string) (saltBytes [ } key = crypto.Sha256(key) // get 32 bytes - privKeyBytes := legacy.Cdc.Amino.MustMarshalBinaryBare(privKey) + privKeyBytes := legacy.Cdc.MustMarshalBinaryBare(privKey) return saltBytes, xsalsa20symmetric.EncryptSymmetric(privKeyBytes, key) } @@ -206,5 +205,5 @@ func decryptPrivKey(saltBytes []byte, encBytes []byte, passphrase string) (privK return privKey, err } - return cryptoAmino.PrivKeyFromBytes(privKeyBytes) + return legacy.PrivKeyFromBytes(privKeyBytes) } diff --git a/crypto/armor_test.go b/crypto/armor_test.go index 3267ca98a..abbc7870a 100644 --- a/crypto/armor_test.go +++ b/crypto/armor_test.go @@ -15,7 +15,6 @@ import ( "github.com/cosmos/cosmos-sdk/codec/legacy" "github.com/cosmos/cosmos-sdk/crypto" - cryptoAmino "github.com/cosmos/cosmos-sdk/crypto/codec" "github.com/cosmos/cosmos-sdk/crypto/hd" "github.com/cosmos/cosmos-sdk/crypto/keyring" "github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1" @@ -79,7 +78,7 @@ func TestArmorUnarmorPubKey(t *testing.T) { armored := crypto.ArmorPubKeyBytes(legacy.Cdc.Amino.MustMarshalBinaryBare(info.GetPubKey()), "") pubBytes, algo, err := crypto.UnarmorPubKeyBytes(armored) require.NoError(t, err) - pub, err := cryptoAmino.PubKeyFromBytes(pubBytes) + pub, err := legacy.PubKeyFromBytes(pubBytes) require.NoError(t, err) require.Equal(t, string(hd.Secp256k1Type), algo) require.True(t, pub.Equals(info.GetPubKey())) @@ -87,7 +86,7 @@ func TestArmorUnarmorPubKey(t *testing.T) { armored = crypto.ArmorPubKeyBytes(legacy.Cdc.Amino.MustMarshalBinaryBare(info.GetPubKey()), "unknown") pubBytes, algo, err = crypto.UnarmorPubKeyBytes(armored) require.NoError(t, err) - pub, err = cryptoAmino.PubKeyFromBytes(pubBytes) + pub, err = legacy.PubKeyFromBytes(pubBytes) require.NoError(t, err) require.Equal(t, "unknown", algo) require.True(t, pub.Equals(info.GetPubKey())) diff --git a/crypto/codec/amino.go b/crypto/codec/amino.go index 5cd61c3dd..d50a08864 100644 --- a/crypto/codec/amino.go +++ b/crypto/codec/amino.go @@ -10,13 +10,6 @@ import ( cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" ) -var amino *codec.LegacyAmino - -func init() { - amino = codec.NewLegacyAmino() - RegisterCrypto(amino) -} - // RegisterCrypto registers all crypto dependency types with the provided Amino // codec. func RegisterCrypto(cdc *codec.LegacyAmino) { @@ -38,15 +31,3 @@ func RegisterCrypto(cdc *codec.LegacyAmino) { cdc.RegisterConcrete(&secp256k1.PrivKey{}, secp256k1.PrivKeyName, nil) } - -// PrivKeyFromBytes unmarshals private key bytes and returns a PrivKey -func PrivKeyFromBytes(privKeyBytes []byte) (privKey cryptotypes.PrivKey, err error) { - err = amino.UnmarshalBinaryBare(privKeyBytes, &privKey) - return -} - -// PubKeyFromBytes unmarshals public key bytes and returns a PubKey -func PubKeyFromBytes(pubKeyBytes []byte) (pubKey cryptotypes.PubKey, err error) { - err = amino.UnmarshalBinaryBare(pubKeyBytes, &pubKey) - return -} diff --git a/crypto/keyring/codec.go b/crypto/keyring/codec.go index 6e6a254c8..558f37775 100644 --- a/crypto/keyring/codec.go +++ b/crypto/keyring/codec.go @@ -2,18 +2,12 @@ package keyring import ( "github.com/cosmos/cosmos-sdk/codec" - cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" + "github.com/cosmos/cosmos-sdk/codec/legacy" "github.com/cosmos/cosmos-sdk/crypto/hd" ) -// CryptoCdc defines the codec required for keys and info -var CryptoCdc *codec.LegacyAmino - func init() { - CryptoCdc = codec.NewLegacyAmino() - cryptocodec.RegisterCrypto(CryptoCdc) - RegisterLegacyAminoCodec(CryptoCdc) - CryptoCdc.Seal() + RegisterLegacyAminoCodec(legacy.Cdc) } // RegisterLegacyAminoCodec registers concrete types and interfaces on the given codec. diff --git a/crypto/keyring/info.go b/crypto/keyring/info.go index f05e2f0dd..24024599b 100644 --- a/crypto/keyring/info.go +++ b/crypto/keyring/info.go @@ -3,6 +3,7 @@ package keyring import ( "fmt" + "github.com/cosmos/cosmos-sdk/codec/legacy" codectypes "github.com/cosmos/cosmos-sdk/codec/types" "github.com/cosmos/cosmos-sdk/crypto/hd" "github.com/cosmos/cosmos-sdk/crypto/keys/multisig" @@ -246,12 +247,12 @@ func (i multiInfo) UnpackInterfaces(unpacker codectypes.AnyUnpacker) error { // encoding info func marshalInfo(i Info) []byte { - return CryptoCdc.MustMarshalBinaryLengthPrefixed(i) + return legacy.Cdc.MustMarshalBinaryLengthPrefixed(i) } // decoding info func unmarshalInfo(bz []byte) (info Info, err error) { - err = CryptoCdc.UnmarshalBinaryLengthPrefixed(bz, &info) + err = legacy.Cdc.UnmarshalBinaryLengthPrefixed(bz, &info) if err != nil { return nil, err } @@ -266,7 +267,7 @@ func unmarshalInfo(bz []byte) (info Info, err error) { _, ok := info.(multiInfo) if ok { var multi multiInfo - err = CryptoCdc.UnmarshalBinaryLengthPrefixed(bz, &multi) + err = legacy.Cdc.UnmarshalBinaryLengthPrefixed(bz, &multi) return multi, err } diff --git a/crypto/keyring/keyring.go b/crypto/keyring/keyring.go index f26a06b93..f88ffdc37 100644 --- a/crypto/keyring/keyring.go +++ b/crypto/keyring/keyring.go @@ -18,8 +18,8 @@ import ( tmcrypto "github.com/tendermint/tendermint/crypto" "github.com/cosmos/cosmos-sdk/client/input" + "github.com/cosmos/cosmos-sdk/codec/legacy" "github.com/cosmos/cosmos-sdk/crypto" - cryptoamino "github.com/cosmos/cosmos-sdk/crypto/codec" "github.com/cosmos/cosmos-sdk/crypto/hd" "github.com/cosmos/cosmos-sdk/crypto/ledger" "github.com/cosmos/cosmos-sdk/crypto/types" @@ -213,7 +213,7 @@ func (ks keystore) ExportPubKeyArmor(uid string) (string, error) { return "", fmt.Errorf("no key to export with name: %s", uid) } - return crypto.ArmorPubKeyBytes(CryptoCdc.MustMarshalBinaryBare(bz.GetPubKey()), string(bz.GetAlgo())), nil + return crypto.ArmorPubKeyBytes(legacy.Cdc.MustMarshalBinaryBare(bz.GetPubKey()), string(bz.GetAlgo())), nil } func (ks keystore) ExportPubKeyArmorByAddress(address sdk.Address) (string, error) { @@ -255,7 +255,7 @@ func (ks keystore) ExportPrivateKeyObject(uid string) (types.PrivKey, error) { return nil, err } - priv, err = cryptoamino.PrivKeyFromBytes([]byte(linfo.PrivKeyArmor)) + priv, err = legacy.PrivKeyFromBytes([]byte(linfo.PrivKeyArmor)) if err != nil { return nil, err } @@ -304,7 +304,7 @@ func (ks keystore) ImportPubKey(uid string, armor string) error { return err } - pubKey, err := cryptoamino.PubKeyFromBytes(pubBytes) + pubKey, err := legacy.PubKeyFromBytes(pubBytes) if err != nil { return err } @@ -331,7 +331,7 @@ func (ks keystore) Sign(uid string, msg []byte) ([]byte, types.PubKey, error) { return nil, nil, fmt.Errorf("private key not available") } - priv, err = cryptoamino.PrivKeyFromBytes([]byte(i.PrivKeyArmor)) + priv, err = legacy.PrivKeyFromBytes([]byte(i.PrivKeyArmor)) if err != nil { return nil, nil, err } @@ -711,7 +711,7 @@ func (ks keystore) writeLocalKey(name string, priv types.PrivKey, algo hd.PubKey // encrypt private key using keyring pub := priv.PubKey() - info := newLocalInfo(name, pub, string(CryptoCdc.MustMarshalBinaryBare(priv)), algo) + info := newLocalInfo(name, pub, string(legacy.Cdc.MustMarshalBinaryBare(priv)), algo) if err := ks.writeInfo(info); err != nil { return nil, err } diff --git a/crypto/keyring/types_test.go b/crypto/keyring/types_test.go index 8c68bce67..b04aa4547 100644 --- a/crypto/keyring/types_test.go +++ b/crypto/keyring/types_test.go @@ -4,10 +4,11 @@ import ( "encoding/hex" "testing" + "github.com/stretchr/testify/require" + "github.com/cosmos/cosmos-sdk/crypto/hd" "github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/stretchr/testify/require" ) func Test_writeReadLedgerInfo(t *testing.T) { diff --git a/crypto/ledger/ledger_test.go b/crypto/ledger/ledger_test.go index 5fe9da8f5..ec1b8dbed 100644 --- a/crypto/ledger/ledger_test.go +++ b/crypto/ledger/ledger_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - cryptoAmino "github.com/cosmos/cosmos-sdk/crypto/codec" + "github.com/cosmos/cosmos-sdk/codec/legacy" "github.com/cosmos/cosmos-sdk/crypto/hd" "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/cosmos/cosmos-sdk/testutil" @@ -238,7 +238,7 @@ func TestRealDeviceSecp256k1(t *testing.T) { // now, let's serialize the public key and make sure it still works bs := cdc.Amino.MustMarshalBinaryBare(priv.PubKey()) - pub2, err := cryptoAmino.PubKeyFromBytes(bs) + pub2, err := legacy.PubKeyFromBytes(bs) require.Nil(t, err, "%+v", err) // make sure we get the same pubkey when we load from disk @@ -251,8 +251,8 @@ func TestRealDeviceSecp256k1(t *testing.T) { require.True(t, valid) // make sure pubkeys serialize properly as well - bs = cdc.Amino.MustMarshalBinaryBare(pub) - bpub, err := cryptoAmino.PubKeyFromBytes(bs) + bs = legacy.Cdc.MustMarshalBinaryBare(pub) + bpub, err := legacy.PubKeyFromBytes(bs) require.NoError(t, err) require.Equal(t, pub, bpub) } diff --git a/types/address.go b/types/address.go index 7e0408a84..ba9e5b303 100644 --- a/types/address.go +++ b/types/address.go @@ -11,7 +11,6 @@ import ( yaml "gopkg.in/yaml.v2" "github.com/cosmos/cosmos-sdk/codec/legacy" - cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/cosmos/cosmos-sdk/types/bech32" ) @@ -663,7 +662,7 @@ func GetPubKeyFromBech32(pkt Bech32PubKeyType, pubkeyStr string) (cryptotypes.Pu return nil, err } - return cryptocodec.PubKeyFromBytes(bz) + return legacy.PubKeyFromBytes(bz) } // MustGetPubKeyFromBech32 calls GetPubKeyFromBech32 except it panics on error. From b6c8d5ea9fa6d2c952be7d06bd2db8e6bf75c69c Mon Sep 17 00:00:00 2001 From: MD Aleem <72057206+aleem1413@users.noreply.github.com> Date: Wed, 2 Dec 2020 23:20:40 +0530 Subject: [PATCH 24/40] Add tx broadcast gRPC endpoint (#7852) * WIP tx/broadcast grpc endpoint * fix lint * fix proto lint * Update service.proto * resolve conflicts * update service.proto * Update service.proto * review changes * proto lint * Switch to txraw * Add check breaking at the end * Fix broadcast * Send Msg on SetupSuite * Remove proto-check-breaking * 1 validator in test * Add grpc server tests for broadcast * Fix grpc server tests * Add some changes * Add ress comments * Add table tests for tx service * Add test for mode * Add simulate tests * Add build flag back * Revert custom stringer for enum * Remove stray logs * Use /txs/{hash} Co-authored-by: Amaury Martiny Co-authored-by: Aleksandr Bezobchuk --- Makefile | 2 +- client/broadcast.go | 36 ++ client/grpc_query.go | 41 +- docs/migrations/rest.md | 2 +- proto/cosmos/tx/v1beta1/service.proto | 46 ++- server/grpc/server_test.go | 88 +++- types/tx/service.pb.go | 570 ++++++++++++++++++++++++-- types/tx/service.pb.gw.go | 98 ++++- x/auth/client/rest/query.go | 2 +- x/auth/client/rest/rest_test.go | 4 +- x/auth/tx/service.go | 14 +- x/auth/tx/service_test.go | 449 ++++++++++++++------ 12 files changed, 1146 insertions(+), 206 deletions(-) diff --git a/Makefile b/Makefile index baace1ee1..f9011d13d 100644 --- a/Makefile +++ b/Makefile @@ -357,7 +357,7 @@ devdoc-update: ### Protobuf ### ############################################################################### -proto-all: proto-format proto-lint proto-check-breaking proto-gen +proto-all: proto-format proto-lint proto-gen proto-gen: @echo "Generating Protobuf files" diff --git a/client/broadcast.go b/client/broadcast.go index e28d76c54..e21bc080c 100644 --- a/client/broadcast.go +++ b/client/broadcast.go @@ -7,10 +7,13 @@ import ( "github.com/tendermint/tendermint/crypto/tmhash" "github.com/tendermint/tendermint/mempool" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" "github.com/cosmos/cosmos-sdk/client/flags" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/cosmos/cosmos-sdk/types/tx" ) // BroadcastTx broadcasts a transactions either synchronously or asynchronously @@ -142,3 +145,36 @@ func (ctx Context) BroadcastTxAsync(txBytes []byte) (*sdk.TxResponse, error) { return sdk.NewResponseFormatBroadcastTx(res), err } + +// TxServiceBroadcast is a helper function to broadcast a Tx with the correct gRPC types +// from the tx service. Calls `clientCtx.BroadcastTx` under the hood. +func TxServiceBroadcast(grpcCtx context.Context, clientCtx Context, req *tx.BroadcastTxRequest) (*tx.BroadcastTxResponse, error) { + if req == nil || req.TxBytes == nil { + return nil, status.Error(codes.InvalidArgument, "invalid empty tx") + } + + clientCtx = clientCtx.WithBroadcastMode(normalizeBroadcastMode(req.Mode)) + resp, err := clientCtx.BroadcastTx(req.TxBytes) + if err != nil { + return nil, err + } + + return &tx.BroadcastTxResponse{ + TxResponse: resp, + }, nil +} + +// normalizeBroadcastMode converts a broadcast mode into a normalized string +// to be passed into the clientCtx. +func normalizeBroadcastMode(mode tx.BroadcastMode) string { + switch mode { + case tx.BroadcastMode_BROADCAST_MODE_ASYNC: + return "async" + case tx.BroadcastMode_BROADCAST_MODE_BLOCK: + return "block" + case tx.BroadcastMode_BROADCAST_MODE_SYNC: + return "sync" + default: + return "unspecified" + } +} diff --git a/client/grpc_query.go b/client/grpc_query.go index 979333f64..fc8cdeeb8 100644 --- a/client/grpc_query.go +++ b/client/grpc_query.go @@ -3,6 +3,7 @@ package client import ( gocontext "context" "fmt" + "reflect" "strconv" gogogrpc "github.com/gogo/protobuf/grpc" @@ -12,10 +13,10 @@ import ( "google.golang.org/grpc/encoding/proto" "google.golang.org/grpc/metadata" - grpctypes "github.com/cosmos/cosmos-sdk/types/grpc" - "github.com/cosmos/cosmos-sdk/codec/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + grpctypes "github.com/cosmos/cosmos-sdk/types/grpc" + "github.com/cosmos/cosmos-sdk/types/tx" ) var _ gogogrpc.ClientConn = Context{} @@ -23,7 +24,37 @@ var _ gogogrpc.ClientConn = Context{} var protoCodec = encoding.GetCodec(proto.Name) // Invoke implements the grpc ClientConn.Invoke method -func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply interface{}, opts ...grpc.CallOption) error { +func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply interface{}, opts ...grpc.CallOption) (err error) { + // Two things can happen here: + // 1. either we're broadcasting a Tx, in which call we call Tendermint's broadcast endpoint directly, + // 2. or we are querying for state, in which case we call ABCI's Query. + + // In both cases, we don't allow empty request args (it will panic unexpectedly). + if reflect.ValueOf(args).IsNil() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "request cannot be nil") + } + + // Case 1. Broadcasting a Tx. + if isBroadcast(method) { + req, ok := args.(*tx.BroadcastTxRequest) + if !ok { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "expected %T, got %T", (*tx.BroadcastTxRequest)(nil), args) + } + res, ok := reply.(*tx.BroadcastTxResponse) + if !ok { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "expected %T, got %T", (*tx.BroadcastTxResponse)(nil), args) + } + + broadcastRes, err := TxServiceBroadcast(grpcCtx, ctx, req) + if err != nil { + return err + } + *res = *broadcastRes + + return err + } + + // Case 2. Querying state. reqBz, err := protoCodec.Marshal(args) if err != nil { return err @@ -86,3 +117,7 @@ func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply func (Context) NewStream(gocontext.Context, *grpc.StreamDesc, string, ...grpc.CallOption) (grpc.ClientStream, error) { return nil, fmt.Errorf("streaming rpc not supported") } + +func isBroadcast(method string) bool { + return method == "/cosmos.tx.v1beta1.Service/BroadcastTx" +} diff --git a/docs/migrations/rest.md b/docs/migrations/rest.md index d418587d0..f27635bf3 100644 --- a/docs/migrations/rest.md +++ b/docs/migrations/rest.md @@ -31,7 +31,7 @@ Some modules expose legacy `POST` endpoints to generate unsigned transactions fo | Legacy REST Endpoint | Description | New gGPC-gateway REST Endpoint | | ------------------------------------------------------------------------------- | ------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | -| `GET /txs/{hash}` | Query tx by hash | `GET /cosmos/tx/v1beta1/tx/{hash}` | +| `GET /txs/{hash}` | Query tx by hash | `GET /cosmos/tx/v1beta1/txs/{hash}` | | `GET /txs` | Query tx by events | `GET /cosmos/tx/v1beta1/txs` | | `POST /txs` | Broadcast tx | `POST /cosmos/tx/v1beta1/txs` | | `POST /txs/encode` | Encodes an Amino JSON tx to an Amino binary tx | N/A, use Protobuf directly | diff --git a/proto/cosmos/tx/v1beta1/service.proto b/proto/cosmos/tx/v1beta1/service.proto index 68eaaae83..59df75bab 100644 --- a/proto/cosmos/tx/v1beta1/service.proto +++ b/proto/cosmos/tx/v1beta1/service.proto @@ -4,6 +4,7 @@ package cosmos.tx.v1beta1; import "google/api/annotations.proto"; import "cosmos/base/abci/v1beta1/abci.proto"; import "cosmos/tx/v1beta1/tx.proto"; +import "gogoproto/gogo.proto"; import "cosmos/base/query/v1beta1/pagination.proto"; option go_package = "github.com/cosmos/cosmos-sdk/types/tx"; @@ -12,13 +13,22 @@ option go_package = "github.com/cosmos/cosmos-sdk/types/tx"; service Service { // Simulate simulates executing a transaction for estimating gas usage. rpc Simulate(SimulateRequest) returns (SimulateResponse) { - option (google.api.http).post = "/cosmos/tx/v1beta1/simulate"; + option (google.api.http) = { + post: "/cosmos/tx/v1beta1/simulate" + body: "*" + }; } // GetTx fetches a tx by hash. rpc GetTx(GetTxRequest) returns (GetTxResponse) { - option (google.api.http).get = "/cosmos/tx/v1beta1/tx/{hash}"; + option (google.api.http).get = "/cosmos/tx/v1beta1/txs/{hash}"; + } + // BroadcastTx broadcast transaction. + rpc BroadcastTx(BroadcastTxRequest) returns (BroadcastTxResponse) { + option (google.api.http) = { + post: "/cosmos/tx/v1beta1/txs" + body: "*" + }; } - // GetTxsEvent fetches txs by event. rpc GetTxsEvent(GetTxsEventRequest) returns (GetTxsEventResponse) { option (google.api.http).get = "/cosmos/tx/v1beta1/txs"; @@ -45,6 +55,36 @@ message GetTxsEventResponse { cosmos.base.query.v1beta1.PageResponse pagination = 3; } +// BroadcastTxRequest is the request type for the Service.BroadcastTxRequest +// RPC method. +message BroadcastTxRequest { + // tx_bytes is the raw transaction. + bytes tx_bytes = 1; + BroadcastMode mode = 2; +} + +// BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method. +enum BroadcastMode { + // zero-value for mode ordering + BROADCAST_MODE_UNSPECIFIED = 0; + // BROADCAST_MODE_BLOCK defines a tx broadcasting mode where the client waits for + // the tx to be committed in a block. + BROADCAST_MODE_BLOCK = 1; + // BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for + // a CheckTx execution response only. + BROADCAST_MODE_SYNC = 2; + // BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns + // immediately. + BROADCAST_MODE_ASYNC = 3; +} + +// BroadcastTxResponse is the response type for the +// Service.BroadcastTx method. +message BroadcastTxResponse { + // tx_response is the queried TxResponses. + cosmos.base.abci.v1beta1.TxResponse tx_response = 1; +} + // SimulateRequest is the request type for the Service.Simulate // RPC method. message SimulateRequest { diff --git a/server/grpc/server_test.go b/server/grpc/server_test.go index 59c726c6a..5648233f8 100644 --- a/server/grpc/server_test.go +++ b/server/grpc/server_test.go @@ -13,54 +13,64 @@ import ( "google.golang.org/grpc/metadata" rpb "google.golang.org/grpc/reflection/grpc_reflection_v1alpha" + clienttx "github.com/cosmos/cosmos-sdk/client/tx" "github.com/cosmos/cosmos-sdk/testutil/network" "github.com/cosmos/cosmos-sdk/testutil/testdata" sdk "github.com/cosmos/cosmos-sdk/types" grpctypes "github.com/cosmos/cosmos-sdk/types/grpc" "github.com/cosmos/cosmos-sdk/types/tx" txtypes "github.com/cosmos/cosmos-sdk/types/tx" + "github.com/cosmos/cosmos-sdk/types/tx/signing" + authclient "github.com/cosmos/cosmos-sdk/x/auth/client" banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" ) type IntegrationTestSuite struct { suite.Suite + cfg network.Config network *network.Network + conn *grpc.ClientConn } func (s *IntegrationTestSuite) SetupSuite() { s.T().Log("setting up integration test suite") - s.network = network.New(s.T(), network.DefaultConfig()) + s.cfg = network.DefaultConfig() + s.network = network.New(s.T(), s.cfg) s.Require().NotNil(s.network) _, err := s.network.WaitForHeight(2) s.Require().NoError(err) + + val0 := s.network.Validators[0] + s.conn, err = grpc.Dial( + val0.AppConfig.GRPC.Address, + grpc.WithInsecure(), // Or else we get "no transport security set" + ) + s.Require().NoError(err) } func (s *IntegrationTestSuite) TearDownSuite() { s.T().Log("tearing down integration test suite") + s.conn.Close() s.network.Cleanup() } -func (s *IntegrationTestSuite) TestGRPCServer() { - val0 := s.network.Validators[0] - conn, err := grpc.Dial( - val0.AppConfig.GRPC.Address, - grpc.WithInsecure(), // Or else we get "no transport security set" - ) - s.Require().NoError(err) - defer conn.Close() - +func (s *IntegrationTestSuite) TestGRPCServer_TestService() { // gRPC query to test service should work - testClient := testdata.NewQueryClient(conn) + testClient := testdata.NewQueryClient(s.conn) testRes, err := testClient.Echo(context.Background(), &testdata.EchoRequest{Message: "hello"}) s.Require().NoError(err) s.Require().Equal("hello", testRes.Message) +} + +func (s *IntegrationTestSuite) TestGRPCServer_BankBalance() { + val0 := s.network.Validators[0] // gRPC query to bank service should work denom := fmt.Sprintf("%stoken", val0.Moniker) - bankClient := banktypes.NewQueryClient(conn) + bankClient := banktypes.NewQueryClient(s.conn) var header metadata.MD bankRes, err := bankClient.Balance( context.Background(), @@ -83,9 +93,11 @@ func (s *IntegrationTestSuite) TestGRPCServer() { ) blockHeight = header.Get(grpctypes.GRPCBlockHeightHeader) s.Require().Equal([]string{"1"}, blockHeight) +} +func (s *IntegrationTestSuite) TestGRPCServer_Reflection() { // Test server reflection - reflectClient := rpb.NewServerReflectionClient(conn) + reflectClient := rpb.NewServerReflectionClient(s.conn) stream, err := reflectClient.ServerReflectionInfo(context.Background(), grpc.WaitForReady(true)) s.Require().NoError(err) s.Require().NoError(stream.Send(&rpb.ServerReflectionRequest{ @@ -100,11 +112,13 @@ func (s *IntegrationTestSuite) TestGRPCServer() { } // Make sure the following services are present s.Require().True(servicesMap["cosmos.bank.v1beta1.Query"]) +} +func (s *IntegrationTestSuite) TestGRPCServer_GetTxsEvent() { // Query the tx via gRPC without pagination. This used to panic, see // https://github.com/cosmos/cosmos-sdk/issues/8038. - txServiceClient := txtypes.NewServiceClient(conn) - _, err = txServiceClient.GetTxsEvent( + txServiceClient := txtypes.NewServiceClient(s.conn) + _, err := txServiceClient.GetTxsEvent( context.Background(), &tx.GetTxsEventRequest{ Events: []string{"message.action=send"}, @@ -115,6 +129,50 @@ func (s *IntegrationTestSuite) TestGRPCServer() { s.Require().NoError(err) } +func (s *IntegrationTestSuite) TestGRPCServer_BroadcastTx() { + val0 := s.network.Validators[0] + + // prepare txBuilder with msg + txBuilder := val0.ClientCtx.TxConfig.NewTxBuilder() + feeAmount := sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)} + gasLimit := testdata.NewTestGasLimit() + s.Require().NoError( + txBuilder.SetMsgs(&banktypes.MsgSend{ + FromAddress: val0.Address.String(), + ToAddress: val0.Address.String(), + Amount: sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)}, + }), + ) + txBuilder.SetFeeAmount(feeAmount) + txBuilder.SetGasLimit(gasLimit) + + // setup txFactory + txFactory := clienttx.Factory{}. + WithChainID(val0.ClientCtx.ChainID). + WithKeybase(val0.ClientCtx.Keyring). + WithTxConfig(val0.ClientCtx.TxConfig). + WithSignMode(signing.SignMode_SIGN_MODE_DIRECT) + + // Sign Tx. + err := authclient.SignTx(txFactory, val0.ClientCtx, val0.Moniker, txBuilder, false) + s.Require().NoError(err) + + txBytes, err := val0.ClientCtx.TxConfig.TxEncoder()(txBuilder.GetTx()) + s.Require().NoError(err) + + // Broadcast the tx via gRPC. + queryClient := tx.NewServiceClient(s.conn) + grpcRes, err := queryClient.BroadcastTx( + context.Background(), + &tx.BroadcastTxRequest{ + Mode: tx.BroadcastMode_BROADCAST_MODE_SYNC, + TxBytes: txBytes, + }, + ) + s.Require().NoError(err) + s.Require().Equal(uint32(0), grpcRes.TxResponse.Code) +} + // Test and enforce that we upfront reject any connections to baseapp containing // invalid initial x-cosmos-block-height that aren't positive and in the range [0, max(int64)] // See issue https://github.com/cosmos/cosmos-sdk/issues/7662. diff --git a/types/tx/service.pb.go b/types/tx/service.pb.go index 44242b031..57f1f6276 100644 --- a/types/tx/service.pb.go +++ b/types/tx/service.pb.go @@ -8,6 +8,7 @@ import ( fmt "fmt" types "github.com/cosmos/cosmos-sdk/types" query "github.com/cosmos/cosmos-sdk/types/query" + _ "github.com/gogo/protobuf/gogoproto" grpc1 "github.com/gogo/protobuf/grpc" proto "github.com/gogo/protobuf/proto" _ "google.golang.org/genproto/googleapis/api/annotations" @@ -30,6 +31,45 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package +// BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method. +type BroadcastMode int32 + +const ( + // zero-value for mode ordering + BroadcastMode_BROADCAST_MODE_UNSPECIFIED BroadcastMode = 0 + // BROADCAST_MODE_BLOCK defines a tx broadcasting mode where the client waits for + // the tx to be committed in a block. + BroadcastMode_BROADCAST_MODE_BLOCK BroadcastMode = 1 + // BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for + // a CheckTx execution response only. + BroadcastMode_BROADCAST_MODE_SYNC BroadcastMode = 2 + // BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns + // immediately. + BroadcastMode_BROADCAST_MODE_ASYNC BroadcastMode = 3 +) + +var BroadcastMode_name = map[int32]string{ + 0: "BROADCAST_MODE_UNSPECIFIED", + 1: "BROADCAST_MODE_BLOCK", + 2: "BROADCAST_MODE_SYNC", + 3: "BROADCAST_MODE_ASYNC", +} + +var BroadcastMode_value = map[string]int32{ + "BROADCAST_MODE_UNSPECIFIED": 0, + "BROADCAST_MODE_BLOCK": 1, + "BROADCAST_MODE_SYNC": 2, + "BROADCAST_MODE_ASYNC": 3, +} + +func (x BroadcastMode) String() string { + return proto.EnumName(BroadcastMode_name, int32(x)) +} + +func (BroadcastMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_e0b00a618705eca7, []int{0} +} + // GetTxsEventRequest is the request type for the Service.TxsByEvents // RPC method. type GetTxsEventRequest struct { @@ -151,6 +191,108 @@ func (m *GetTxsEventResponse) GetPagination() *query.PageResponse { return nil } +// BroadcastTxRequest is the request type for the Service.BroadcastTxRequest +// RPC method. +type BroadcastTxRequest struct { + // tx_bytes is the raw transaction. + TxBytes []byte `protobuf:"bytes,1,opt,name=tx_bytes,json=txBytes,proto3" json:"tx_bytes,omitempty"` + Mode BroadcastMode `protobuf:"varint,2,opt,name=mode,proto3,enum=cosmos.tx.v1beta1.BroadcastMode" json:"mode,omitempty"` +} + +func (m *BroadcastTxRequest) Reset() { *m = BroadcastTxRequest{} } +func (m *BroadcastTxRequest) String() string { return proto.CompactTextString(m) } +func (*BroadcastTxRequest) ProtoMessage() {} +func (*BroadcastTxRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_e0b00a618705eca7, []int{2} +} +func (m *BroadcastTxRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *BroadcastTxRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_BroadcastTxRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *BroadcastTxRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BroadcastTxRequest.Merge(m, src) +} +func (m *BroadcastTxRequest) XXX_Size() int { + return m.Size() +} +func (m *BroadcastTxRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BroadcastTxRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BroadcastTxRequest proto.InternalMessageInfo + +func (m *BroadcastTxRequest) GetTxBytes() []byte { + if m != nil { + return m.TxBytes + } + return nil +} + +func (m *BroadcastTxRequest) GetMode() BroadcastMode { + if m != nil { + return m.Mode + } + return BroadcastMode_BROADCAST_MODE_UNSPECIFIED +} + +// BroadcastTxResponse is the response type for the +// Service.BroadcastTx method. +type BroadcastTxResponse struct { + // tx_response is the queried TxResponses. + TxResponse *types.TxResponse `protobuf:"bytes,1,opt,name=tx_response,json=txResponse,proto3" json:"tx_response,omitempty"` +} + +func (m *BroadcastTxResponse) Reset() { *m = BroadcastTxResponse{} } +func (m *BroadcastTxResponse) String() string { return proto.CompactTextString(m) } +func (*BroadcastTxResponse) ProtoMessage() {} +func (*BroadcastTxResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_e0b00a618705eca7, []int{3} +} +func (m *BroadcastTxResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *BroadcastTxResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_BroadcastTxResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *BroadcastTxResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BroadcastTxResponse.Merge(m, src) +} +func (m *BroadcastTxResponse) XXX_Size() int { + return m.Size() +} +func (m *BroadcastTxResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BroadcastTxResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BroadcastTxResponse proto.InternalMessageInfo + +func (m *BroadcastTxResponse) GetTxResponse() *types.TxResponse { + if m != nil { + return m.TxResponse + } + return nil +} + // SimulateRequest is the request type for the Service.Simulate // RPC method. type SimulateRequest struct { @@ -162,7 +304,7 @@ func (m *SimulateRequest) Reset() { *m = SimulateRequest{} } func (m *SimulateRequest) String() string { return proto.CompactTextString(m) } func (*SimulateRequest) ProtoMessage() {} func (*SimulateRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_e0b00a618705eca7, []int{2} + return fileDescriptor_e0b00a618705eca7, []int{4} } func (m *SimulateRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -211,7 +353,7 @@ func (m *SimulateResponse) Reset() { *m = SimulateResponse{} } func (m *SimulateResponse) String() string { return proto.CompactTextString(m) } func (*SimulateResponse) ProtoMessage() {} func (*SimulateResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_e0b00a618705eca7, []int{3} + return fileDescriptor_e0b00a618705eca7, []int{5} } func (m *SimulateResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -265,7 +407,7 @@ func (m *GetTxRequest) Reset() { *m = GetTxRequest{} } func (m *GetTxRequest) String() string { return proto.CompactTextString(m) } func (*GetTxRequest) ProtoMessage() {} func (*GetTxRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_e0b00a618705eca7, []int{4} + return fileDescriptor_e0b00a618705eca7, []int{6} } func (m *GetTxRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -313,7 +455,7 @@ func (m *GetTxResponse) Reset() { *m = GetTxResponse{} } func (m *GetTxResponse) String() string { return proto.CompactTextString(m) } func (*GetTxResponse) ProtoMessage() {} func (*GetTxResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_e0b00a618705eca7, []int{5} + return fileDescriptor_e0b00a618705eca7, []int{7} } func (m *GetTxResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -357,8 +499,11 @@ func (m *GetTxResponse) GetTxResponse() *types.TxResponse { } func init() { + proto.RegisterEnum("cosmos.tx.v1beta1.BroadcastMode", BroadcastMode_name, BroadcastMode_value) proto.RegisterType((*GetTxsEventRequest)(nil), "cosmos.tx.v1beta1.GetTxsEventRequest") proto.RegisterType((*GetTxsEventResponse)(nil), "cosmos.tx.v1beta1.GetTxsEventResponse") + proto.RegisterType((*BroadcastTxRequest)(nil), "cosmos.tx.v1beta1.BroadcastTxRequest") + proto.RegisterType((*BroadcastTxResponse)(nil), "cosmos.tx.v1beta1.BroadcastTxResponse") proto.RegisterType((*SimulateRequest)(nil), "cosmos.tx.v1beta1.SimulateRequest") proto.RegisterType((*SimulateResponse)(nil), "cosmos.tx.v1beta1.SimulateResponse") proto.RegisterType((*GetTxRequest)(nil), "cosmos.tx.v1beta1.GetTxRequest") @@ -368,43 +513,54 @@ func init() { func init() { proto.RegisterFile("cosmos/tx/v1beta1/service.proto", fileDescriptor_e0b00a618705eca7) } var fileDescriptor_e0b00a618705eca7 = []byte{ - // 563 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xcd, 0x6e, 0xd3, 0x40, - 0x10, 0xae, 0x1d, 0x48, 0xdb, 0x49, 0x11, 0xb0, 0x88, 0x2a, 0x32, 0xc5, 0x0d, 0x4e, 0xd3, 0x56, - 0x48, 0xd8, 0x6a, 0xb8, 0xf4, 0x80, 0x84, 0x84, 0x54, 0x22, 0x6e, 0xc8, 0xed, 0x89, 0x4b, 0xb5, - 0x09, 0x5b, 0xc7, 0x22, 0xf1, 0xba, 0xd9, 0x49, 0xb4, 0x15, 0xf4, 0xc2, 0x91, 0x13, 0x12, 0x2f, - 0xc5, 0x31, 0x12, 0x17, 0x8e, 0x28, 0xe1, 0x0d, 0x78, 0x01, 0xe4, 0xf5, 0x3a, 0x71, 0x68, 0x4c, - 0x7b, 0xca, 0x8e, 0xf2, 0xfd, 0xcc, 0x37, 0xa3, 0x31, 0x6c, 0x77, 0xb8, 0xe8, 0x73, 0xe1, 0xa1, - 0xf4, 0x46, 0x07, 0x6d, 0x86, 0xf4, 0xc0, 0x13, 0x6c, 0x30, 0x0a, 0x3b, 0xcc, 0x8d, 0x07, 0x1c, - 0x39, 0xb9, 0x9f, 0x02, 0x5c, 0x94, 0xae, 0x06, 0x58, 0x5b, 0x01, 0xe7, 0x41, 0x8f, 0x79, 0x34, - 0x0e, 0x3d, 0x1a, 0x45, 0x1c, 0x29, 0x86, 0x3c, 0x12, 0x29, 0xc1, 0xaa, 0x6b, 0xc5, 0x36, 0x15, - 0xcc, 0xa3, 0xed, 0x4e, 0x38, 0x13, 0x4e, 0x0a, 0x0d, 0xb2, 0xae, 0xda, 0xa2, 0xd4, 0xff, 0x3d, - 0xcd, 0x0b, 0x9c, 0x0f, 0xd9, 0xe0, 0x62, 0x86, 0x89, 0x69, 0x10, 0x46, 0xca, 0x2d, 0xc5, 0x3a, - 0x08, 0xa4, 0xc5, 0xf0, 0x44, 0x8a, 0xa3, 0x11, 0x8b, 0xd0, 0x67, 0xe7, 0x43, 0x26, 0x90, 0x6c, - 0x42, 0x99, 0x25, 0xb5, 0xa8, 0x1a, 0xb5, 0xd2, 0xfe, 0xba, 0xaf, 0x2b, 0xf2, 0x1a, 0x60, 0xae, - 0x50, 0x35, 0x6b, 0xc6, 0x7e, 0xa5, 0xb9, 0xeb, 0xea, 0x80, 0x89, 0x9d, 0xab, 0xec, 0xb2, 0xa0, - 0xee, 0x5b, 0x1a, 0x30, 0xad, 0xe9, 0xe7, 0x98, 0xce, 0xd8, 0x80, 0x07, 0x0b, 0xb6, 0x22, 0xe6, - 0x91, 0x60, 0x64, 0x0f, 0x4a, 0x28, 0x53, 0xd3, 0x4a, 0xf3, 0xa1, 0x7b, 0x65, 0x72, 0xee, 0x89, - 0xf4, 0x13, 0x04, 0x69, 0xc1, 0x06, 0xca, 0xd3, 0x81, 0xe6, 0x89, 0xaa, 0xa9, 0x18, 0x3b, 0x0b, - 0xad, 0xa8, 0x69, 0xe5, 0x88, 0x1a, 0xec, 0x57, 0x70, 0xf6, 0x4e, 0x84, 0xf2, 0x89, 0x4a, 0x2a, - 0xd1, 0xde, 0xb5, 0x89, 0xb4, 0x52, 0x3e, 0xd2, 0x21, 0xdc, 0x3d, 0x0e, 0xfb, 0xc3, 0x1e, 0xc5, - 0x2c, 0x31, 0x69, 0x80, 0x89, 0xb2, 0x6a, 0x28, 0xcd, 0x82, 0x30, 0x26, 0x4a, 0xe7, 0x8b, 0x01, - 0xf7, 0xe6, 0x54, 0x3d, 0x89, 0x17, 0xb0, 0x16, 0x50, 0x71, 0x1a, 0x46, 0x67, 0x5c, 0x2b, 0x3c, - 0x29, 0x0e, 0xd7, 0xa2, 0xe2, 0x4d, 0x74, 0xc6, 0xfd, 0xd5, 0x20, 0x7d, 0x90, 0x43, 0x28, 0x0f, - 0x98, 0x18, 0xf6, 0x50, 0xef, 0xa8, 0x56, 0xcc, 0xf5, 0x15, 0xce, 0xd7, 0x78, 0xc7, 0x81, 0x0d, - 0xb5, 0x98, 0x2c, 0x03, 0x81, 0x5b, 0x5d, 0x2a, 0xba, 0xaa, 0x87, 0x75, 0x5f, 0xbd, 0x9d, 0x4b, - 0xb8, 0xa3, 0x31, 0xba, 0xd9, 0x9b, 0x05, 0x25, 0x47, 0x50, 0xc9, 0x2d, 0x4d, 0xb7, 0x76, 0xb3, - 0x9d, 0xc1, 0x7c, 0x67, 0xcd, 0x3f, 0x26, 0xac, 0x1e, 0xa7, 0x27, 0x46, 0x24, 0xac, 0x65, 0xa3, - 0x23, 0xce, 0x12, 0xe7, 0x7f, 0x56, 0x62, 0xd5, 0xff, 0x8b, 0x49, 0x0d, 0x9c, 0xfa, 0xe7, 0x1f, - 0xbf, 0xbf, 0x99, 0x8f, 0x9d, 0x47, 0xde, 0x92, 0xdb, 0xce, 0xdc, 0x62, 0xb8, 0xad, 0x86, 0x40, - 0xb6, 0x97, 0x48, 0xe6, 0x47, 0x68, 0xd5, 0x8a, 0x01, 0xda, 0x70, 0x47, 0x19, 0xda, 0x64, 0xcb, - 0x5b, 0x76, 0xd5, 0xde, 0xc7, 0x64, 0xea, 0x97, 0xe4, 0x13, 0x54, 0x72, 0x37, 0x43, 0x1a, 0x45, - 0xb2, 0x0b, 0xa7, 0x6c, 0xed, 0x5e, 0x07, 0xd3, 0x3d, 0xd8, 0xaa, 0x87, 0x2a, 0xd9, 0x5c, 0xda, - 0x83, 0x78, 0xf5, 0xf2, 0xfb, 0xc4, 0x36, 0xc6, 0x13, 0xdb, 0xf8, 0x35, 0xb1, 0x8d, 0xaf, 0x53, - 0x7b, 0x65, 0x3c, 0xb5, 0x57, 0x7e, 0x4e, 0xed, 0x95, 0x77, 0x8d, 0x20, 0xc4, 0xee, 0xb0, 0xed, - 0x76, 0x78, 0x3f, 0xe3, 0xa6, 0x3f, 0xcf, 0xc4, 0xfb, 0x0f, 0x1e, 0x5e, 0xc4, 0x2c, 0x11, 0x6b, - 0x97, 0xd5, 0x07, 0xe7, 0xf9, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x62, 0x59, 0xdc, 0x70, 0x31, - 0x05, 0x00, 0x00, + // 737 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xcd, 0x4f, 0x13, 0x41, + 0x14, 0xef, 0xb6, 0xc8, 0xc7, 0x2b, 0x68, 0x1d, 0x10, 0x6b, 0xd1, 0xa5, 0x2c, 0x16, 0x08, 0x89, + 0xbb, 0xa1, 0x7a, 0x20, 0xc6, 0xc4, 0xd0, 0x52, 0x08, 0x51, 0x3e, 0xb2, 0xc5, 0x83, 0xc6, 0xa4, + 0x99, 0xb6, 0xc3, 0xb2, 0x91, 0xee, 0x94, 0xce, 0x94, 0x2c, 0x01, 0x62, 0xe2, 0xd1, 0x93, 0x89, + 0xff, 0x94, 0x47, 0x12, 0x2f, 0x1e, 0x0d, 0xf8, 0x47, 0x78, 0x34, 0x3b, 0x3b, 0x6d, 0xb7, 0x65, + 0x0b, 0xc4, 0x13, 0x33, 0xcc, 0xef, 0xfd, 0x3e, 0xde, 0x9b, 0x9d, 0xc2, 0x74, 0x85, 0xb2, 0x1a, + 0x65, 0x06, 0x77, 0x8d, 0xa3, 0xa5, 0x32, 0xe1, 0x78, 0xc9, 0x60, 0xa4, 0x71, 0x64, 0x57, 0x88, + 0x5e, 0x6f, 0x50, 0x4e, 0xd1, 0x7d, 0x1f, 0xa0, 0x73, 0x57, 0x97, 0x80, 0xd4, 0x63, 0x8b, 0x52, + 0xeb, 0x80, 0x18, 0xb8, 0x6e, 0x1b, 0xd8, 0x71, 0x28, 0xc7, 0xdc, 0xa6, 0x0e, 0xf3, 0x0b, 0x52, + 0xb3, 0x92, 0xb1, 0x8c, 0x19, 0x31, 0x70, 0xb9, 0x62, 0xb7, 0x89, 0xbd, 0x8d, 0x04, 0xa5, 0xae, + 0xca, 0x72, 0x57, 0x9e, 0x4d, 0x58, 0xd4, 0xa2, 0x62, 0x69, 0x78, 0x2b, 0xf9, 0xdf, 0xc5, 0x20, + 0xed, 0x61, 0x93, 0x34, 0x8e, 0xdb, 0x95, 0x75, 0x6c, 0xd9, 0x8e, 0xf0, 0xe0, 0x63, 0x35, 0x0e, + 0x68, 0x9d, 0xf0, 0x5d, 0x97, 0x15, 0x8e, 0x88, 0xc3, 0x4d, 0x72, 0xd8, 0x24, 0x8c, 0xa3, 0x49, + 0x18, 0x24, 0xde, 0x9e, 0x25, 0x95, 0x74, 0x6c, 0x61, 0xc4, 0x94, 0x3b, 0xb4, 0x06, 0xd0, 0x61, + 0x48, 0x46, 0xd3, 0xca, 0x42, 0x3c, 0x3b, 0xa7, 0xcb, 0xd8, 0x9e, 0x9c, 0x2e, 0xe4, 0x5a, 0xf1, + 0xf5, 0x1d, 0x6c, 0x11, 0xc9, 0x69, 0x06, 0x2a, 0xb5, 0x73, 0x05, 0xc6, 0xbb, 0x64, 0x59, 0x9d, + 0x3a, 0x8c, 0xa0, 0x79, 0x88, 0x71, 0xd7, 0x17, 0x8d, 0x67, 0x1f, 0xe8, 0x57, 0xfa, 0xa9, 0xef, + 0xba, 0xa6, 0x87, 0x40, 0xeb, 0x30, 0xca, 0xdd, 0x52, 0x43, 0xd6, 0xb1, 0x64, 0x54, 0x54, 0x3c, + 0xed, 0xb2, 0x22, 0x7a, 0x18, 0x28, 0x94, 0x60, 0x33, 0xce, 0xdb, 0x6b, 0x8f, 0x28, 0x98, 0x28, + 0x26, 0x12, 0xcd, 0xdf, 0x98, 0x48, 0x32, 0x05, 0x23, 0x11, 0x40, 0xb9, 0x06, 0xc5, 0xd5, 0x0a, + 0x66, 0xdc, 0x13, 0xf3, 0x1b, 0xf9, 0x08, 0x86, 0xb9, 0x5b, 0x2a, 0x1f, 0x73, 0xe2, 0xa5, 0x52, + 0x16, 0x46, 0xcd, 0x21, 0xee, 0xe6, 0xbc, 0x2d, 0x7a, 0x01, 0x03, 0x35, 0x5a, 0x25, 0xa2, 0x8b, + 0x77, 0xb3, 0xe9, 0x90, 0xb0, 0x6d, 0xbe, 0x4d, 0x5a, 0x25, 0xa6, 0x40, 0x6b, 0x1f, 0x61, 0xbc, + 0x4b, 0x46, 0x36, 0xae, 0x00, 0xf1, 0x40, 0x3f, 0x84, 0xd4, 0x6d, 0xdb, 0x01, 0x9d, 0x76, 0x68, + 0xcb, 0x70, 0xaf, 0x68, 0xd7, 0x9a, 0x07, 0x98, 0xb7, 0xc6, 0x86, 0x32, 0x10, 0xe5, 0xae, 0x24, + 0xec, 0x33, 0x91, 0x28, 0x77, 0xb5, 0xaf, 0x0a, 0x24, 0x3a, 0xa5, 0xd2, 0xd5, 0x2b, 0x18, 0xb6, + 0x30, 0x2b, 0xd9, 0xce, 0x1e, 0x95, 0x0c, 0x33, 0xfd, 0x2d, 0xad, 0x63, 0xb6, 0xe1, 0xec, 0x51, + 0x73, 0xc8, 0xf2, 0x17, 0x68, 0x19, 0x06, 0x1b, 0x84, 0x35, 0x0f, 0xb8, 0xbc, 0x68, 0xe9, 0xfe, + 0xb5, 0xa6, 0xc0, 0x99, 0x12, 0xaf, 0x69, 0x30, 0x2a, 0x6e, 0x57, 0x2b, 0x03, 0x82, 0x81, 0x7d, + 0xcc, 0xf6, 0x85, 0x87, 0x11, 0x53, 0xac, 0xb5, 0x33, 0x18, 0x93, 0x18, 0x69, 0xf6, 0x76, 0x41, + 0x7b, 0x3b, 0x1d, 0xfd, 0xbf, 0x4e, 0x2f, 0x9e, 0xc2, 0x58, 0xd7, 0x78, 0x91, 0x0a, 0xa9, 0x9c, + 0xb9, 0xbd, 0xb2, 0x9a, 0x5f, 0x29, 0xee, 0x96, 0x36, 0xb7, 0x57, 0x0b, 0xa5, 0x77, 0x5b, 0xc5, + 0x9d, 0x42, 0x7e, 0x63, 0x6d, 0xa3, 0xb0, 0x9a, 0x88, 0xa0, 0x24, 0x4c, 0xf4, 0x9c, 0xe7, 0xde, + 0x6e, 0xe7, 0xdf, 0x24, 0x14, 0xf4, 0x10, 0xc6, 0x7b, 0x4e, 0x8a, 0xef, 0xb7, 0xf2, 0x89, 0x68, + 0x48, 0xc9, 0x8a, 0x38, 0x89, 0x65, 0xff, 0xc6, 0x60, 0xa8, 0xe8, 0xbf, 0x5d, 0xe8, 0x04, 0x86, + 0x5b, 0x83, 0x43, 0x5a, 0x48, 0xee, 0x9e, 0x0b, 0x91, 0x9a, 0xbd, 0x16, 0x23, 0x2f, 0xd2, 0xdc, + 0x97, 0x9f, 0x7f, 0xbe, 0x47, 0xd3, 0xda, 0x94, 0x11, 0xf2, 0x68, 0x4a, 0xf0, 0x4b, 0x65, 0x11, + 0x1d, 0xc2, 0x1d, 0x31, 0x05, 0x34, 0x1d, 0xc2, 0x1a, 0x9c, 0x61, 0x2a, 0xdd, 0x1f, 0x20, 0x35, + 0x33, 0x42, 0x73, 0x1a, 0x3d, 0x31, 0xc2, 0x5e, 0x4c, 0x66, 0x9c, 0x78, 0x73, 0x3f, 0x43, 0x9f, + 0x21, 0x1e, 0xf8, 0x82, 0x50, 0xe6, 0xba, 0x0f, 0xaf, 0x23, 0x3f, 0x77, 0x13, 0x4c, 0x9a, 0x98, + 0x11, 0x26, 0xa6, 0xb4, 0xc9, 0x70, 0x13, 0x5e, 0xe6, 0x53, 0x88, 0x07, 0xde, 0xbe, 0x50, 0x03, + 0x57, 0x9f, 0xe4, 0x50, 0x03, 0x21, 0x4f, 0xa8, 0xa6, 0x0a, 0x03, 0x49, 0xd4, 0xc7, 0x40, 0xee, + 0xf5, 0x8f, 0x0b, 0x55, 0x39, 0xbf, 0x50, 0x95, 0xdf, 0x17, 0xaa, 0xf2, 0xed, 0x52, 0x8d, 0x9c, + 0x5f, 0xaa, 0x91, 0x5f, 0x97, 0x6a, 0xe4, 0x43, 0xc6, 0xb2, 0xf9, 0x7e, 0xb3, 0xac, 0x57, 0x68, + 0xad, 0x55, 0xeb, 0xff, 0x79, 0xc6, 0xaa, 0x9f, 0x0c, 0x7e, 0x5c, 0x27, 0x1e, 0x59, 0x79, 0x50, + 0xfc, 0x70, 0x3c, 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0x96, 0xba, 0xfb, 0xcb, 0x0f, 0x07, 0x00, + 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -423,6 +579,8 @@ type ServiceClient interface { Simulate(ctx context.Context, in *SimulateRequest, opts ...grpc.CallOption) (*SimulateResponse, error) // GetTx fetches a tx by hash. GetTx(ctx context.Context, in *GetTxRequest, opts ...grpc.CallOption) (*GetTxResponse, error) + // BroadcastTx broadcast transaction. + BroadcastTx(ctx context.Context, in *BroadcastTxRequest, opts ...grpc.CallOption) (*BroadcastTxResponse, error) // GetTxsEvent fetches txs by event. GetTxsEvent(ctx context.Context, in *GetTxsEventRequest, opts ...grpc.CallOption) (*GetTxsEventResponse, error) } @@ -453,6 +611,15 @@ func (c *serviceClient) GetTx(ctx context.Context, in *GetTxRequest, opts ...grp return out, nil } +func (c *serviceClient) BroadcastTx(ctx context.Context, in *BroadcastTxRequest, opts ...grpc.CallOption) (*BroadcastTxResponse, error) { + out := new(BroadcastTxResponse) + err := c.cc.Invoke(ctx, "/cosmos.tx.v1beta1.Service/BroadcastTx", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + func (c *serviceClient) GetTxsEvent(ctx context.Context, in *GetTxsEventRequest, opts ...grpc.CallOption) (*GetTxsEventResponse, error) { out := new(GetTxsEventResponse) err := c.cc.Invoke(ctx, "/cosmos.tx.v1beta1.Service/GetTxsEvent", in, out, opts...) @@ -468,6 +635,8 @@ type ServiceServer interface { Simulate(context.Context, *SimulateRequest) (*SimulateResponse, error) // GetTx fetches a tx by hash. GetTx(context.Context, *GetTxRequest) (*GetTxResponse, error) + // BroadcastTx broadcast transaction. + BroadcastTx(context.Context, *BroadcastTxRequest) (*BroadcastTxResponse, error) // GetTxsEvent fetches txs by event. GetTxsEvent(context.Context, *GetTxsEventRequest) (*GetTxsEventResponse, error) } @@ -482,6 +651,9 @@ func (*UnimplementedServiceServer) Simulate(ctx context.Context, req *SimulateRe func (*UnimplementedServiceServer) GetTx(ctx context.Context, req *GetTxRequest) (*GetTxResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetTx not implemented") } +func (*UnimplementedServiceServer) BroadcastTx(ctx context.Context, req *BroadcastTxRequest) (*BroadcastTxResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method BroadcastTx not implemented") +} func (*UnimplementedServiceServer) GetTxsEvent(ctx context.Context, req *GetTxsEventRequest) (*GetTxsEventResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetTxsEvent not implemented") } @@ -526,6 +698,24 @@ func _Service_GetTx_Handler(srv interface{}, ctx context.Context, dec func(inter return interceptor(ctx, in, info, handler) } +func _Service_BroadcastTx_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BroadcastTxRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).BroadcastTx(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/cosmos.tx.v1beta1.Service/BroadcastTx", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).BroadcastTx(ctx, req.(*BroadcastTxRequest)) + } + return interceptor(ctx, in, info, handler) +} + func _Service_GetTxsEvent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(GetTxsEventRequest) if err := dec(in); err != nil { @@ -556,6 +746,10 @@ var _Service_serviceDesc = grpc.ServiceDesc{ MethodName: "GetTx", Handler: _Service_GetTx_Handler, }, + { + MethodName: "BroadcastTx", + Handler: _Service_BroadcastTx_Handler, + }, { MethodName: "GetTxsEvent", Handler: _Service_GetTxsEvent_Handler, @@ -672,6 +866,76 @@ func (m *GetTxsEventResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *BroadcastTxRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *BroadcastTxRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *BroadcastTxRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Mode != 0 { + i = encodeVarintService(dAtA, i, uint64(m.Mode)) + i-- + dAtA[i] = 0x10 + } + if len(m.TxBytes) > 0 { + i -= len(m.TxBytes) + copy(dAtA[i:], m.TxBytes) + i = encodeVarintService(dAtA, i, uint64(len(m.TxBytes))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *BroadcastTxResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *BroadcastTxResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *BroadcastTxResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.TxResponse != nil { + { + size, err := m.TxResponse.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintService(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func (m *SimulateRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -886,6 +1150,35 @@ func (m *GetTxsEventResponse) Size() (n int) { return n } +func (m *BroadcastTxRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.TxBytes) + if l > 0 { + n += 1 + l + sovService(uint64(l)) + } + if m.Mode != 0 { + n += 1 + sovService(uint64(m.Mode)) + } + return n +} + +func (m *BroadcastTxResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.TxResponse != nil { + l = m.TxResponse.Size() + n += 1 + l + sovService(uint64(l)) + } + return n +} + func (m *SimulateRequest) Size() (n int) { if m == nil { return 0 @@ -1230,6 +1523,201 @@ func (m *GetTxsEventResponse) Unmarshal(dAtA []byte) error { } return nil } +func (m *BroadcastTxRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowService + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: BroadcastTxRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: BroadcastTxRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field TxBytes", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowService + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthService + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthService + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.TxBytes = append(m.TxBytes[:0], dAtA[iNdEx:postIndex]...) + if m.TxBytes == nil { + m.TxBytes = []byte{} + } + iNdEx = postIndex + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Mode", wireType) + } + m.Mode = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowService + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Mode |= BroadcastMode(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipService(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthService + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthService + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *BroadcastTxResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowService + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: BroadcastTxResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: BroadcastTxResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field TxResponse", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowService + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthService + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthService + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.TxResponse == nil { + m.TxResponse = &types.TxResponse{} + } + if err := m.TxResponse.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipService(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthService + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthService + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *SimulateRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 diff --git a/types/tx/service.pb.gw.go b/types/tx/service.pb.gw.go index a7d5b56c5..6d94c423e 100644 --- a/types/tx/service.pb.gw.go +++ b/types/tx/service.pb.gw.go @@ -31,18 +31,15 @@ var _ = runtime.String var _ = utilities.NewDoubleArray var _ = descriptor.ForMessage -var ( - filter_Service_Simulate_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) - func request_Service_Simulate_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { var protoReq SimulateRequest var metadata runtime.ServerMetadata - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Service_Simulate_0); err != nil { + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -55,10 +52,11 @@ func local_request_Service_Simulate_0(ctx context.Context, marshaler runtime.Mar var protoReq SimulateRequest var metadata runtime.ServerMetadata - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Service_Simulate_0); err != nil { + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -121,6 +119,40 @@ func local_request_Service_GetTx_0(ctx context.Context, marshaler runtime.Marsha } +func request_Service_BroadcastTx_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq BroadcastTxRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.BroadcastTx(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_Service_BroadcastTx_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq BroadcastTxRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.BroadcastTx(ctx, &protoReq) + return msg, metadata, err + +} + var ( filter_Service_GetTxsEvent_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} ) @@ -203,6 +235,26 @@ func RegisterServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, se }) + mux.Handle("POST", pattern_Service_BroadcastTx_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_Service_BroadcastTx_0(rctx, inboundMarshaler, server, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Service_BroadcastTx_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + mux.Handle("GET", pattern_Service_GetTxsEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() @@ -304,6 +356,26 @@ func RegisterServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, cl }) + mux.Handle("POST", pattern_Service_BroadcastTx_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Service_BroadcastTx_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Service_BroadcastTx_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + mux.Handle("GET", pattern_Service_GetTxsEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() @@ -330,7 +402,9 @@ func RegisterServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, cl var ( pattern_Service_Simulate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"cosmos", "tx", "v1beta1", "simulate"}, "", runtime.AssumeColonVerbOpt(true))) - pattern_Service_GetTx_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 1, 1, 0, 4, 1, 5, 3}, []string{"cosmos", "tx", "v1beta1", "hash"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Service_GetTx_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"cosmos", "tx", "v1beta1", "txs", "hash"}, "", runtime.AssumeColonVerbOpt(true))) + + pattern_Service_BroadcastTx_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"cosmos", "tx", "v1beta1", "txs"}, "", runtime.AssumeColonVerbOpt(true))) pattern_Service_GetTxsEvent_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"cosmos", "tx", "v1beta1", "txs"}, "", runtime.AssumeColonVerbOpt(true))) ) @@ -340,5 +414,7 @@ var ( forward_Service_GetTx_0 = runtime.ForwardResponseMessage + forward_Service_BroadcastTx_0 = runtime.ForwardResponseMessage + forward_Service_GetTxsEvent_0 = runtime.ForwardResponseMessage ) diff --git a/x/auth/client/rest/query.go b/x/auth/client/rest/query.go index f9abae6b7..6fe54404b 100644 --- a/x/auth/client/rest/query.go +++ b/x/auth/client/rest/query.go @@ -151,7 +151,7 @@ func QueryTxRequestHandlerFn(clientCtx client.Context) http.HandlerFunc { rest.WriteErrorResponse(w, http.StatusNotFound, fmt.Sprintf("no transaction found with hash %s", hashHexStr)) } - err = checkSignModeError(clientCtx, output, "/cosmos/tx/v1beta1/tx/{txhash}") + err = checkSignModeError(clientCtx, output, "/cosmos/tx/v1beta1/txs/{txhash}") if err != nil { rest.WriteErrorResponse(w, http.StatusInternalServerError, err.Error()) diff --git a/x/auth/client/rest/rest_test.go b/x/auth/client/rest/rest_test.go index 671df87a8..67c3cfc3f 100644 --- a/x/auth/client/rest/rest_test.go +++ b/x/auth/client/rest/rest_test.go @@ -345,7 +345,7 @@ func (s *IntegrationTestSuite) broadcastReq(stdTx legacytx.StdTx, mode string) ( // testQueryIBCTx is a helper function to test querying txs which: // - show an error message on legacy REST endpoints // - succeed using gRPC -// In practise, we call this function on IBC txs. +// In practice, we call this function on IBC txs. func (s *IntegrationTestSuite) testQueryIBCTx(txRes sdk.TxResponse, cmd *cobra.Command, args []string) { val := s.network.Validators[0] @@ -381,7 +381,7 @@ func (s *IntegrationTestSuite) testQueryIBCTx(txRes sdk.TxResponse, cmd *cobra.C } // try fetching the txn using gRPC req, it will fetch info since it has proto codec. - grpcJSON, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/tx/%s", val.APIAddress, txRes.TxHash)) + grpcJSON, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/txs/%s", val.APIAddress, txRes.TxHash)) s.Require().NoError(err) var getTxRes txtypes.GetTxResponse diff --git a/x/auth/tx/service.go b/x/auth/tx/service.go index 751c8fbb2..853d401be 100644 --- a/x/auth/tx/service.go +++ b/x/auth/tx/service.go @@ -49,6 +49,10 @@ const ( // TxsByEvents implements the ServiceServer.TxsByEvents RPC method. func (s txServer) GetTxsEvent(ctx context.Context, req *txtypes.GetTxsEventRequest) (*txtypes.GetTxsEventResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "request cannot be nil") + } + page, limit, err := pagination.ParsePagination(req.Pagination) if err != nil { return nil, err @@ -113,7 +117,7 @@ func (s txServer) GetTxsEvent(ctx context.Context, req *txtypes.GetTxsEventReque // Simulate implements the ServiceServer.Simulate RPC method. func (s txServer) Simulate(ctx context.Context, req *txtypes.SimulateRequest) (*txtypes.SimulateResponse, error) { - if req.Tx == nil { + if req == nil || req.Tx == nil { return nil, status.Error(codes.InvalidArgument, "invalid empty tx") } @@ -139,6 +143,10 @@ func (s txServer) Simulate(ctx context.Context, req *txtypes.SimulateRequest) (* // GetTx implements the ServiceServer.GetTx RPC method. func (s txServer) GetTx(ctx context.Context, req *txtypes.GetTxRequest) (*txtypes.GetTxResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "request cannot be nil") + } + // We get hash as a hex string in the request, convert it to bytes. hash, err := hex.DecodeString(req.Hash) if err != nil { @@ -170,6 +178,10 @@ func (s txServer) GetTx(ctx context.Context, req *txtypes.GetTxRequest) (*txtype }, nil } +func (s txServer) BroadcastTx(ctx context.Context, req *txtypes.BroadcastTxRequest) (*txtypes.BroadcastTxResponse, error) { + return client.TxServiceBroadcast(ctx, s.clientCtx, req) +} + // RegisterTxService registers the tx service on the gRPC router. func RegisterTxService( qrt gogogrpc.Server, diff --git a/x/auth/tx/service_test.go b/x/auth/tx/service_test.go index 480529541..341684acb 100644 --- a/x/auth/tx/service_test.go +++ b/x/auth/tx/service_test.go @@ -31,6 +31,7 @@ type IntegrationTestSuite struct { network *network.Network queryClient tx.ServiceClient + txRes sdk.TxResponse } func (s *IntegrationTestSuite) SetupSuite() { @@ -41,67 +42,14 @@ func (s *IntegrationTestSuite) SetupSuite() { s.cfg = cfg s.network = network.New(s.T(), cfg) - s.Require().NotNil(s.network) + val := s.network.Validators[0] + _, err := s.network.WaitForHeight(1) s.Require().NoError(err) - s.queryClient = tx.NewServiceClient(s.network.Validators[0].ClientCtx) -} - -func (s *IntegrationTestSuite) TearDownSuite() { - s.T().Log("tearing down integration test suite") - s.network.Cleanup() -} - -func (s IntegrationTestSuite) TestSimulate() { - val := s.network.Validators[0] - - // prepare txBuilder with msg - txBuilder := val.ClientCtx.TxConfig.NewTxBuilder() - feeAmount := sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)} - gasLimit := testdata.NewTestGasLimit() - s.Require().NoError( - txBuilder.SetMsgs(&banktypes.MsgSend{ - FromAddress: val.Address.String(), - ToAddress: val.Address.String(), - Amount: sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)}, - }), - ) - txBuilder.SetFeeAmount(feeAmount) - txBuilder.SetGasLimit(gasLimit) - txBuilder.SetMemo("foobar") - - // setup txFactory - txFactory := clienttx.Factory{}. - WithChainID(val.ClientCtx.ChainID). - WithKeybase(val.ClientCtx.Keyring). - WithTxConfig(val.ClientCtx.TxConfig). - WithSignMode(signing.SignMode_SIGN_MODE_DIRECT) - - // Sign Tx. - err := authclient.SignTx(txFactory, val.ClientCtx, val.Moniker, txBuilder, false) - s.Require().NoError(err) - - // Convert the txBuilder to a tx.Tx. - protoTx, err := txBuilderToProtoTx(txBuilder) - s.Require().NoError(err) - - // Run the simulate gRPC query. - res, err := s.queryClient.Simulate( - context.Background(), - &tx.SimulateRequest{Tx: protoTx}, - ) - s.Require().NoError(err) - - // Check the result and gas used are correct. - s.Require().Equal(len(res.GetResult().GetEvents()), 4) // 1 transfer, 3 messages. - s.Require().True(res.GetGasInfo().GetGasUsed() > 0) // Gas used sometimes change, just check it's not empty. -} - -func (s IntegrationTestSuite) TestGetTxEvents() { - val := s.network.Validators[0] + s.queryClient = tx.NewServiceClient(val.ClientCtx) // Create a new MsgSend tx from val to itself. out, err := bankcli.MsgSendExec( @@ -118,51 +66,161 @@ func (s IntegrationTestSuite) TestGetTxEvents() { fmt.Sprintf("--%s=foobar", flags.FlagMemo), ) s.Require().NoError(err) - var txRes sdk.TxResponse - s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(out.Bytes(), &txRes)) - s.Require().Equal(uint32(0), txRes.Code) + s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(out.Bytes(), &s.txRes)) + s.Require().Equal(uint32(0), s.txRes.Code) s.Require().NoError(s.network.WaitForNextBlock()) +} - // Query the tx via gRPC empty params. - _, err = s.queryClient.GetTxsEvent( - context.Background(), - &tx.GetTxsEventRequest{}, - ) - s.Require().Error(err) +func (s *IntegrationTestSuite) TearDownSuite() { + s.T().Log("tearing down integration test suite") + s.network.Cleanup() +} - // Query the tx via gRPC. - grpcRes, err := s.queryClient.GetTxsEvent( - context.Background(), - &tx.GetTxsEventRequest{ - Events: []string{"message.action=send"}, - Pagination: &query.PageRequest{ - CountTotal: false, - Offset: 0, - Limit: 1, +func (s IntegrationTestSuite) TestSimulateTx_GRPC() { + txBuilder := s.mkTxBuilder() + // Convert the txBuilder to a tx.Tx. + protoTx, err := txBuilderToProtoTx(txBuilder) + s.Require().NoError(err) + + testCases := []struct { + name string + req *tx.SimulateRequest + expErr bool + expErrMsg string + }{ + {"nil request", nil, true, "request cannot be nil"}, + {"empty request", &tx.SimulateRequest{}, true, "invalid empty tx"}, + {"valid request", &tx.SimulateRequest{Tx: protoTx}, false, ""}, + } + + for _, tc := range testCases { + tc := tc + s.Run(tc.name, func() { + // Broadcast the tx via gRPC via the validator's clientCtx (which goes + // through Tendermint). + res, err := s.queryClient.Simulate(context.Background(), tc.req) + if tc.expErr { + s.Require().Error(err) + s.Require().Contains(err.Error(), tc.expErrMsg) + } else { + s.Require().NoError(err) + // Check the result and gas used are correct. + s.Require().Equal(len(res.GetResult().GetEvents()), 4) // 1 transfer, 3 messages. + s.Require().True(res.GetGasInfo().GetGasUsed() > 0) // Gas used sometimes change, just check it's not empty. + } + }) + } +} + +func (s IntegrationTestSuite) TestSimulateTx_GRPCGateway() { + val := s.network.Validators[0] + txBuilder := s.mkTxBuilder() + // Convert the txBuilder to a tx.Tx. + protoTx, err := txBuilderToProtoTx(txBuilder) + s.Require().NoError(err) + + testCases := []struct { + name string + req *tx.SimulateRequest + expErr bool + expErrMsg string + }{ + {"empty request", &tx.SimulateRequest{}, true, "invalid empty tx"}, + {"valid request", &tx.SimulateRequest{Tx: protoTx}, false, ""}, + } + + for _, tc := range testCases { + s.Run(tc.name, func() { + req, err := val.ClientCtx.JSONMarshaler.MarshalJSON(tc.req) + s.Require().NoError(err) + res, err := rest.PostRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/simulate", val.APIAddress), "application/json", req) + s.Require().NoError(err) + if tc.expErr { + s.Require().Contains(string(res), tc.expErrMsg) + } else { + var result tx.SimulateResponse + err = val.ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result) + s.Require().NoError(err) + // Check the result and gas used are correct. + s.Require().Equal(len(result.GetResult().GetEvents()), 4) // 1 transfer, 3 messages. + s.Require().True(result.GetGasInfo().GetGasUsed() > 0) // Gas used sometimes change, just check it's not empty. + } + }) + } +} + +func (s IntegrationTestSuite) TestGetTxEvents_GRPC() { + testCases := []struct { + name string + req *tx.GetTxsEventRequest + expErr bool + expErrMsg string + }{ + { + "nil request", + nil, + true, "request cannot be nil", + }, + { + "empty request", + &tx.GetTxsEventRequest{}, + true, "must declare at least one event to search", + }, + { + "request with dummy event", + &tx.GetTxsEventRequest{Events: []string{"foobar"}}, + true, "event foobar should be of the format: {eventType}.{eventAttribute}={value}", + }, + { + "without pagination", + &tx.GetTxsEventRequest{ + Events: []string{"message.action=send"}, }, + false, "", }, - ) - s.Require().NoError(err) - s.Require().Equal(len(grpcRes.Txs), 1) - s.Require().Equal("foobar", grpcRes.Txs[0].Body.Memo) - - // Query the tx via gRPC without pagination. This used to panic, see - // https://github.com/cosmos/cosmos-sdk/issues/8038. - grpcRes, err = s.queryClient.GetTxsEvent( - context.Background(), - &tx.GetTxsEventRequest{ - Events: []string{"message.action=send"}, + { + "with pagination", + &tx.GetTxsEventRequest{ + Events: []string{"message.action=send"}, + Pagination: &query.PageRequest{ + CountTotal: false, + Offset: 0, + Limit: 1, + }, + }, + false, "", }, - ) - // TODO Once https://github.com/cosmos/cosmos-sdk/pull/8029 is merged, this - // should not error anymore. - s.Require().NoError(err) + { + "with multi events", + &tx.GetTxsEventRequest{ + Events: []string{"message.action=send", "message.module=bank"}, + }, + false, "", + }, + } + for _, tc := range testCases { + s.Run(tc.name, func() { + // Query the tx via gRPC. + grpcRes, err := s.queryClient.GetTxsEvent(context.Background(), tc.req) + if tc.expErr { + s.Require().Error(err) + s.Require().Contains(err.Error(), tc.expErrMsg) + } else { + s.Require().NoError(err) + s.Require().GreaterOrEqual(len(grpcRes.Txs), 1) + s.Require().Equal("foobar", grpcRes.Txs[0].Body.Memo) + } + }) + } +} - rpcTests := []struct { +func (s IntegrationTestSuite) TestGetTxEvents_GRPCGateway() { + val := s.network.Validators[0] + testCases := []struct { name string url string - expectErr bool + expErr bool expErrMsg string }{ { @@ -196,15 +254,16 @@ func (s IntegrationTestSuite) TestGetTxEvents() { "", }, } - for _, tc := range rpcTests { + for _, tc := range testCases { s.Run(tc.name, func() { res, err := rest.GetRequest(tc.url) s.Require().NoError(err) - if tc.expectErr { + if tc.expErr { s.Require().Contains(string(res), tc.expErrMsg) } else { var result tx.GetTxsEventResponse - val.ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result) + err = val.ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result) + s.Require().NoError(err) s.Require().GreaterOrEqual(len(result.Txs), 1) s.Require().Equal("foobar", result.Txs[0].Body.Memo) s.Require().NotZero(result.TxResponses[0].Height) @@ -213,52 +272,188 @@ func (s IntegrationTestSuite) TestGetTxEvents() { } } -func (s IntegrationTestSuite) TestGetTx() { +func (s IntegrationTestSuite) TestGetTx_GRPC() { + testCases := []struct { + name string + req *tx.GetTxRequest + expErr bool + expErrMsg string + }{ + {"nil request", nil, true, "request cannot be nil"}, + {"empty request", &tx.GetTxRequest{}, true, "transaction hash cannot be empty"}, + {"request with dummy hash", &tx.GetTxRequest{Hash: "deadbeef"}, true, "tx (DEADBEEF) not found"}, + {"good request", &tx.GetTxRequest{Hash: s.txRes.TxHash}, false, ""}, + } + for _, tc := range testCases { + s.Run(tc.name, func() { + // Query the tx via gRPC. + grpcRes, err := s.queryClient.GetTx(context.Background(), tc.req) + if tc.expErr { + s.Require().Error(err) + s.Require().Contains(err.Error(), tc.expErrMsg) + } else { + s.Require().NoError(err) + s.Require().Equal("foobar", grpcRes.Tx.Body.Memo) + } + }) + } +} + +func (s IntegrationTestSuite) TestGetTx_GRPCGateway() { val := s.network.Validators[0] + testCases := []struct { + name string + url string + expErr bool + expErrMsg string + }{ + { + "empty params", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs/", val.APIAddress), + true, "transaction hash cannot be empty", + }, + { + "dummy hash", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs/%s", val.APIAddress, "deadbeef"), + true, "tx (DEADBEEF) not found", + }, + { + "good hash", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs/%s", val.APIAddress, s.txRes.TxHash), + false, "", + }, + } + for _, tc := range testCases { + s.Run(tc.name, func() { + res, err := rest.GetRequest(tc.url) + s.Require().NoError(err) + if tc.expErr { + s.Require().Contains(string(res), tc.expErrMsg) + } else { + var result tx.GetTxResponse + err = val.ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result) + s.Require().NoError(err) + s.Require().Equal("foobar", result.Tx.Body.Memo) + s.Require().NotZero(result.TxResponse.Height) + } + }) + } +} - // Create a new MsgSend tx from val to itself. - out, err := bankcli.MsgSendExec( - val.ClientCtx, - val.Address, - val.Address, - sdk.NewCoins( - sdk.NewCoin(s.cfg.BondDenom, sdk.NewInt(10)), - ), - fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), - fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), - fmt.Sprintf("--%s=%s", flags.FlagFees, sdk.NewCoins(sdk.NewCoin(s.cfg.BondDenom, sdk.NewInt(10))).String()), - fmt.Sprintf("--gas=%d", flags.DefaultGasLimit), - fmt.Sprintf("--%s=foobar", flags.FlagMemo), - ) +func (s IntegrationTestSuite) TestBroadcastTx_GRPC() { + val := s.network.Validators[0] + txBuilder := s.mkTxBuilder() + txBytes, err := val.ClientCtx.TxConfig.TxEncoder()(txBuilder.GetTx()) s.Require().NoError(err) - var txRes sdk.TxResponse - s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(out.Bytes(), &txRes)) - s.Require().Equal(uint32(0), txRes.Code) - s.Require().NoError(s.network.WaitForNextBlock()) + testCases := []struct { + name string + req *tx.BroadcastTxRequest + expErr bool + expErrMsg string + }{ + {"nil request", nil, true, "request cannot be nil"}, + {"empty request", &tx.BroadcastTxRequest{}, true, "invalid empty tx"}, + {"no mode", &tx.BroadcastTxRequest{ + TxBytes: txBytes, + }, true, "supported types: sync, async, block"}, + {"valid request", &tx.BroadcastTxRequest{ + Mode: tx.BroadcastMode_BROADCAST_MODE_SYNC, + TxBytes: txBytes, + }, false, ""}, + } - // Query the tx via gRPC. - grpcRes, err := s.queryClient.GetTx( - context.Background(), - &tx.GetTxRequest{Hash: txRes.TxHash}, - ) + for _, tc := range testCases { + tc := tc + s.Run(tc.name, func() { + // Broadcast the tx via gRPC via the validator's clientCtx (which goes + // through Tendermint). + grpcRes, err := s.queryClient.BroadcastTx(context.Background(), tc.req) + if tc.expErr { + s.Require().Error(err) + s.Require().Contains(err.Error(), tc.expErrMsg) + } else { + s.Require().NoError(err) + s.Require().Equal(uint32(0), grpcRes.TxResponse.Code) + } + }) + } +} + +func (s IntegrationTestSuite) TestBroadcastTx_GRPCGateway() { + val := s.network.Validators[0] + txBuilder := s.mkTxBuilder() + txBytes, err := val.ClientCtx.TxConfig.TxEncoder()(txBuilder.GetTx()) s.Require().NoError(err) - s.Require().Equal("foobar", grpcRes.Tx.Body.Memo) - s.Require().NotZero(grpcRes.TxResponse.Height) - // Query the tx via grpc-gateway. - restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/tx/%s", val.APIAddress, txRes.TxHash)) - s.Require().NoError(err) - var getTxRes tx.GetTxResponse - s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &getTxRes)) - s.Require().Equal("foobar", grpcRes.Tx.Body.Memo) - s.Require().NotZero(grpcRes.TxResponse.Height) + testCases := []struct { + name string + req *tx.BroadcastTxRequest + expErr bool + expErrMsg string + }{ + {"empty request", &tx.BroadcastTxRequest{}, true, "invalid empty tx"}, + {"no mode", &tx.BroadcastTxRequest{TxBytes: txBytes}, true, "supported types: sync, async, block"}, + {"valid request", &tx.BroadcastTxRequest{ + Mode: tx.BroadcastMode_BROADCAST_MODE_SYNC, + TxBytes: txBytes, + }, false, ""}, + } + + for _, tc := range testCases { + s.Run(tc.name, func() { + req, err := val.ClientCtx.JSONMarshaler.MarshalJSON(tc.req) + s.Require().NoError(err) + res, err := rest.PostRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/txs", val.APIAddress), "application/json", req) + s.Require().NoError(err) + if tc.expErr { + s.Require().Contains(string(res), tc.expErrMsg) + } else { + var result tx.BroadcastTxResponse + err = val.ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result) + s.Require().NoError(err) + s.Require().Equal(uint32(0), result.TxResponse.Code) + } + }) + } } func TestIntegrationTestSuite(t *testing.T) { suite.Run(t, new(IntegrationTestSuite)) } +func (s IntegrationTestSuite) mkTxBuilder() client.TxBuilder { + val := s.network.Validators[0] + s.Require().NoError(s.network.WaitForNextBlock()) + + // prepare txBuilder with msg + txBuilder := val.ClientCtx.TxConfig.NewTxBuilder() + feeAmount := sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)} + gasLimit := testdata.NewTestGasLimit() + s.Require().NoError( + txBuilder.SetMsgs(&banktypes.MsgSend{ + FromAddress: val.Address.String(), + ToAddress: val.Address.String(), + Amount: sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)}, + }), + ) + txBuilder.SetFeeAmount(feeAmount) + txBuilder.SetGasLimit(gasLimit) + + // setup txFactory + txFactory := clienttx.Factory{}. + WithChainID(val.ClientCtx.ChainID). + WithKeybase(val.ClientCtx.Keyring). + WithTxConfig(val.ClientCtx.TxConfig). + WithSignMode(signing.SignMode_SIGN_MODE_DIRECT) + + // Sign Tx. + err := authclient.SignTx(txFactory, val.ClientCtx, val.Moniker, txBuilder, false) + s.Require().NoError(err) + + return txBuilder +} + // txBuilderToProtoTx converts a txBuilder into a proto tx.Tx. func txBuilderToProtoTx(txBuilder client.TxBuilder) (*tx.Tx, error) { // nolint intoAnyTx, ok := txBuilder.(codectypes.IntoAny) From bac31becb3deb8f0a59c6484084e89bab978b2a8 Mon Sep 17 00:00:00 2001 From: Aleksandr Bezobchuk Date: Wed, 2 Dec 2020 13:08:16 -0500 Subject: [PATCH 25/40] evidence: fix query command (#8066) * evidence: fix query command * evidence: add CLI tests Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- x/evidence/client/cli/cli_test.go | 80 +++++++++++++++++++++++++++++++ x/evidence/client/cli/query.go | 8 +--- 2 files changed, 82 insertions(+), 6 deletions(-) create mode 100644 x/evidence/client/cli/cli_test.go diff --git a/x/evidence/client/cli/cli_test.go b/x/evidence/client/cli/cli_test.go new file mode 100644 index 000000000..50ebfaa7a --- /dev/null +++ b/x/evidence/client/cli/cli_test.go @@ -0,0 +1,80 @@ +package cli_test + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/suite" + + clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" + testnet "github.com/cosmos/cosmos-sdk/testutil/network" + "github.com/cosmos/cosmos-sdk/x/evidence/client/cli" +) + +type IntegrationTestSuite struct { + suite.Suite + + cfg testnet.Config + network *testnet.Network +} + +func (s *IntegrationTestSuite) SetupSuite() { + s.T().Log("setting up integration test suite") + + cfg := testnet.DefaultConfig() + cfg.NumValidators = 1 + + s.cfg = cfg + s.network = testnet.New(s.T(), cfg) + + _, err := s.network.WaitForHeight(1) + s.Require().NoError(err) +} + +func (s *IntegrationTestSuite) TearDownSuite() { + s.T().Log("tearing down integration test suite") + s.network.Cleanup() +} + +func TestIntegrationTestSuite(t *testing.T) { + suite.Run(t, new(IntegrationTestSuite)) +} + +func (s *IntegrationTestSuite) TestGetQueryCmd() { + val := s.network.Validators[0] + + testCases := map[string]struct { + args []string + expectedOutput string + expectErr bool + }{ + "non-existant evidence": { + []string{"DF0C23E8634E480F84B9D5674A7CDC9816466DEC28A3358F73260F68D28D7660"}, + "evidence DF0C23E8634E480F84B9D5674A7CDC9816466DEC28A3358F73260F68D28D7660 not found", + true, + }, + "all evidence (default pagination)": { + []string{}, + "evidence: []\npagination:\n next_key: null\n total: \"0\"", + false, + }, + } + + for name, tc := range testCases { + tc := tc + + s.Run(name, func() { + cmd := cli.GetQueryCmd() + clientCtx := val.ClientCtx + + out, err := clitestutil.ExecTestCLICmd(clientCtx, cmd, tc.args) + if tc.expectErr { + s.Require().Error(err) + } else { + s.Require().NoError(err) + } + + s.Require().Contains(strings.TrimSpace(out.String()), tc.expectedOutput) + }) + } +} diff --git a/x/evidence/client/cli/query.go b/x/evidence/client/cli/query.go index 313656485..c6ec9f517 100644 --- a/x/evidence/client/cli/query.go +++ b/x/evidence/client/cli/query.go @@ -47,18 +47,14 @@ $ %s query %s --page=2 --limit=50 // can be queried for by hash or paginated evidence can be returned. func QueryEvidenceCmd() func(*cobra.Command, []string) error { return func(cmd *cobra.Command, args []string) error { - if err := client.ValidateCmd(cmd, args); err != nil { - return err - } - clientCtx := client.GetClientContextFromCmd(cmd) clientCtx, err := client.ReadQueryCommandFlags(clientCtx, cmd.Flags()) if err != nil { return err } - if hash := args[0]; hash != "" { - return queryEvidence(clientCtx, hash) + if len(args) > 0 { + return queryEvidence(clientCtx, args[0]) } pageReq, err := client.ReadPageRequest(cmd.Flags()) From 6040d1b88b648375eb5c93de27c897ce71d3f231 Mon Sep 17 00:00:00 2001 From: Robert Zaremba Date: Thu, 3 Dec 2020 09:39:11 +0100 Subject: [PATCH 26/40] fix: Metadata is not initialized in x/bank InitGenesis #7951 (#8065) * fix: Metadata is not initialized in x/bank InitGenesis #7951 * remove commented code Co-authored-by: Aleksandr Bezobchuk --- x/bank/keeper/genesis.go | 6 +++++- x/bank/keeper/genesis_test.go | 16 ++++++++++++++-- x/bank/keeper/keeper.go | 3 +-- x/bank/module.go | 6 +++--- 4 files changed, 23 insertions(+), 8 deletions(-) diff --git a/x/bank/keeper/genesis.go b/x/bank/keeper/genesis.go index 7397d8d5b..d30415c6a 100644 --- a/x/bank/keeper/genesis.go +++ b/x/bank/keeper/genesis.go @@ -8,7 +8,7 @@ import ( ) // InitGenesis initializes the bank module's state from a given genesis state. -func (k BaseKeeper) InitGenesis(ctx sdk.Context, genState types.GenesisState) { +func (k BaseKeeper) InitGenesis(ctx sdk.Context, genState *types.GenesisState) { k.SetParams(ctx, genState.Params) var totalSupply sdk.Coins @@ -32,6 +32,10 @@ func (k BaseKeeper) InitGenesis(ctx sdk.Context, genState types.GenesisState) { } k.SetSupply(ctx, types.NewSupply(genState.Supply)) + + for _, meta := range genState.DenomMetadata { + k.SetDenomMetaData(ctx, meta) + } } // ExportGenesis returns the bank module's genesis state. diff --git a/x/bank/keeper/genesis_test.go b/x/bank/keeper/genesis_test.go index 8bd46d681..bdb8cdc30 100644 --- a/x/bank/keeper/genesis_test.go +++ b/x/bank/keeper/genesis_test.go @@ -37,8 +37,20 @@ func (suite *IntegrationTestSuite) getTestBalances() []types.Balance { addr2, _ := sdk.AccAddressFromBech32("cosmos1f9xjhxm0plzrh9cskf4qee4pc2xwp0n0556gh0") addr1, _ := sdk.AccAddressFromBech32("cosmos1fl48vsnmsdzcv85q5d2q4z5ajdha8yu34mf0eh") return []types.Balance{ - {addr2.String(), sdk.Coins{sdk.NewInt64Coin("testcoin1", 32), sdk.NewInt64Coin("testcoin2", 34)}}, - {addr1.String(), sdk.Coins{sdk.NewInt64Coin("testcoin3", 10)}}, + {Address: addr2.String(), Coins: sdk.Coins{sdk.NewInt64Coin("testcoin1", 32), sdk.NewInt64Coin("testcoin2", 34)}}, + {Address: addr1.String(), Coins: sdk.Coins{sdk.NewInt64Coin("testcoin3", 10)}}, } } + +func (suite *IntegrationTestSuite) TestInitGenesis() { + require := suite.Require() + m := types.Metadata{Description: sdk.DefaultBondDenom, Base: sdk.DefaultBondDenom, Display: sdk.DefaultBondDenom} + g := types.DefaultGenesisState() + g.DenomMetadata = []types.Metadata{m} + bk := suite.app.BankKeeper + bk.InitGenesis(suite.ctx, g) + + m2 := bk.GetDenomMetaData(suite.ctx, m.Base) + require.Equal(m, m2) +} diff --git a/x/bank/keeper/keeper.go b/x/bank/keeper/keeper.go index 6594398aa..2fd76ea58 100644 --- a/x/bank/keeper/keeper.go +++ b/x/bank/keeper/keeper.go @@ -22,7 +22,7 @@ var _ Keeper = (*BaseKeeper)(nil) type Keeper interface { SendKeeper - InitGenesis(sdk.Context, types.GenesisState) + InitGenesis(sdk.Context, *types.GenesisState) ExportGenesis(sdk.Context) *types.GenesisState GetSupply(ctx sdk.Context) exported.SupplyI @@ -186,7 +186,6 @@ func (k BaseKeeper) GetDenomMetaData(ctx sdk.Context, denom string) types.Metada store = prefix.NewStore(store, types.DenomMetadataKey(denom)) bz := store.Get([]byte(denom)) - var metadata types.Metadata k.cdc.MustUnmarshalBinaryBare(bz, &metadata) diff --git a/x/bank/module.go b/x/bank/module.go index 32f7431f2..bc998634c 100644 --- a/x/bank/module.go +++ b/x/bank/module.go @@ -52,7 +52,7 @@ func (AppModuleBasic) DefaultGenesis(cdc codec.JSONMarshaler) json.RawMessage { } // ValidateGenesis performs genesis state validation for the bank module. -func (AppModuleBasic) ValidateGenesis(cdc codec.JSONMarshaler, config client.TxEncodingConfig, bz json.RawMessage) error { +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONMarshaler, _ client.TxEncodingConfig, bz json.RawMessage) error { var data types.GenesisState if err := cdc.UnmarshalJSON(bz, &data); err != nil { return fmt.Errorf("failed to unmarshal %s genesis state: %w", types.ModuleName, err) @@ -140,7 +140,7 @@ func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONMarshaler, data j cdc.MustUnmarshalJSON(data, &genesisState) telemetry.MeasureSince(start, "InitGenesis", "crisis", "unmarshal") - am.keeper.InitGenesis(ctx, genesisState) + am.keeper.InitGenesis(ctx, &genesisState) return []abci.ValidatorUpdate{} } @@ -170,7 +170,7 @@ func (AppModule) GenerateGenesisState(simState *module.SimulationState) { } // ProposalContents doesn't return any content functions for governance proposals. -func (AppModule) ProposalContents(simState module.SimulationState) []simtypes.WeightedProposalContent { +func (AppModule) ProposalContents(_ module.SimulationState) []simtypes.WeightedProposalContent { return nil } From 183593ff6d35a78afecb10781547ddd6258dd48d Mon Sep 17 00:00:00 2001 From: SaReN Date: Thu, 3 Dec 2020 15:18:17 +0530 Subject: [PATCH 27/40] Update signbatch multisig to work online (#7801) * add test for signbatch multisig * update test * fix sign batch multisig * update offline usage * address comments Co-authored-by: Alessio Treglia --- x/auth/client/cli/cli_test.go | 65 +++++++++++++++++++++++++++++++++++ x/auth/client/cli/tx_sign.go | 5 ++- 2 files changed, 69 insertions(+), 1 deletion(-) diff --git a/x/auth/client/cli/cli_test.go b/x/auth/client/cli/cli_test.go index 4ba383139..e6cc5230e 100644 --- a/x/auth/client/cli/cli_test.go +++ b/x/auth/client/cli/cli_test.go @@ -735,6 +735,71 @@ func (s *IntegrationTestSuite) TestCLIMultisign() { s.Require().NoError(s.network.WaitForNextBlock()) } +func (s *IntegrationTestSuite) TestSignBatchMultisig() { + val := s.network.Validators[0] + + // Fetch 2 accounts and a multisig. + account1, err := val.ClientCtx.Keyring.Key("newAccount1") + s.Require().NoError(err) + account2, err := val.ClientCtx.Keyring.Key("newAccount2") + s.Require().NoError(err) + multisigInfo, err := val.ClientCtx.Keyring.Key("multi") + + // Send coins from validator to multisig. + sendTokens := sdk.NewInt64Coin(s.cfg.BondDenom, 10) + _, err = bankcli.MsgSendExec( + val.ClientCtx, + val.Address, + multisigInfo.GetAddress(), + sdk.NewCoins(sendTokens), + fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), + fmt.Sprintf("--%s=%s", flags.FlagFees, sdk.NewCoins(sdk.NewCoin(s.cfg.BondDenom, sdk.NewInt(10))).String()), + fmt.Sprintf("--gas=%d", flags.DefaultGasLimit), + ) + s.Require().NoError(err) + s.Require().NoError(s.network.WaitForNextBlock()) + + generatedStd, err := bankcli.MsgSendExec( + val.ClientCtx, + multisigInfo.GetAddress(), + val.Address, + sdk.NewCoins( + sdk.NewCoin(s.cfg.BondDenom, sdk.NewInt(1)), + ), + fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), + fmt.Sprintf("--%s=%s", flags.FlagFees, sdk.NewCoins(sdk.NewCoin(s.cfg.BondDenom, sdk.NewInt(10))).String()), + fmt.Sprintf("--%s=true", flags.FlagGenerateOnly), + ) + s.Require().NoError(err) + + // Write the output to disk + filename, cleanup1 := testutil.WriteToNewTempFile(s.T(), strings.Repeat(generatedStd.String(), 1)) + defer cleanup1() + + val.ClientCtx.HomeDir = strings.Replace(val.ClientCtx.HomeDir, "simd", "simcli", 1) + + // sign-batch file + res, err := authtest.TxSignBatchExec(val.ClientCtx, account1.GetAddress(), filename.Name(), fmt.Sprintf("--%s=%s", flags.FlagChainID, val.ClientCtx.ChainID), "--multisig", multisigInfo.GetAddress().String()) + s.Require().NoError(err) + s.Require().Equal(1, len(strings.Split(strings.Trim(res.String(), "\n"), "\n"))) + // write sigs to file + file1, cleanup2 := testutil.WriteToNewTempFile(s.T(), res.String()) + defer cleanup2() + + // sign-batch file with account2 + res, err = authtest.TxSignBatchExec(val.ClientCtx, account2.GetAddress(), filename.Name(), fmt.Sprintf("--%s=%s", flags.FlagChainID, val.ClientCtx.ChainID), "--multisig", multisigInfo.GetAddress().String()) + s.Require().NoError(err) + s.Require().Equal(1, len(strings.Split(strings.Trim(res.String(), "\n"), "\n"))) + + // write sigs to file2 + file2, cleanup3 := testutil.WriteToNewTempFile(s.T(), res.String()) + defer cleanup3() + res, err = authtest.TxMultiSignExec(val.ClientCtx, multisigInfo.GetName(), filename.Name(), file1.Name(), file2.Name()) + s.Require().NoError(err) +} + func (s *IntegrationTestSuite) TestGetAccountCmd() { val := s.network.Validators[0] _, _, addr1 := testdata.KeyTestPubAddr() diff --git a/x/auth/client/cli/tx_sign.go b/x/auth/client/cli/tx_sign.go index 7046c495d..a9c698078 100644 --- a/x/auth/client/cli/tx_sign.go +++ b/x/auth/client/cli/tx_sign.go @@ -119,7 +119,10 @@ func makeSignBatchCmd() func(cmd *cobra.Command, args []string) error { return err } } else { - err = authclient.SignTxWithSignerAddress(txFactory, clientCtx, multisigAddr, clientCtx.GetFromName(), txBuilder, true) + if txFactory.SignMode() == signing.SignMode_SIGN_MODE_UNSPECIFIED { + txFactory = txFactory.WithSignMode(signing.SignMode_SIGN_MODE_LEGACY_AMINO_JSON) + } + err = authclient.SignTxWithSignerAddress(txFactory, clientCtx, multisigAddr, clientCtx.GetFromName(), txBuilder, clientCtx.Offline) } if err != nil { From 5291a8ff31212e1a91a98aaf5d865a1d9cfeddd1 Mon Sep 17 00:00:00 2001 From: Aleksandr Bezobchuk Date: Thu, 3 Dec 2020 18:17:21 -0500 Subject: [PATCH 28/40] Refactor Logging using Zerolog (#8072) * init commit * server: use flags * server: godoc++ * updates * baseapp: update logging * logging updates * x/bank: update logging * logging updates * lint++ * logging updates * logging updates * logging updates * logging updates * cl++ --- CHANGELOG.md | 13 +++- baseapp/abci.go | 63 ++++++++++++++------ client/flags/flags.go | 4 ++ contrib/images/simd-env/Dockerfile | 2 +- go.mod | 1 + go.sum | 4 ++ server/logger.go | 55 +++++++++++++++++ server/start.go | 6 +- server/util.go | 42 +++++++------ simapp/simd/cmd/root.go | 5 +- x/auth/keeper/keeper.go | 2 +- x/bank/keeper/keeper.go | 5 +- x/bank/keeper/view.go | 2 +- x/crisis/keeper/keeper.go | 4 +- x/distribution/keeper/delegation.go | 10 +++- x/distribution/keeper/keeper.go | 2 +- x/distribution/keeper/proposal_handler.go | 7 ++- x/evidence/keeper/keeper.go | 2 +- x/gov/abci.go | 20 +++---- x/gov/keeper/keeper.go | 2 +- x/ibc/applications/transfer/keeper/keeper.go | 4 +- x/ibc/core/02-client/keeper/keeper.go | 2 +- x/ibc/core/03-connection/keeper/keeper.go | 4 +- x/ibc/core/04-channel/keeper/keeper.go | 3 +- x/ibc/core/05-port/keeper/keeper.go | 2 +- x/mint/keeper/keeper.go | 4 +- x/params/keeper/keeper.go | 4 +- x/slashing/keeper/infractions.go | 32 +++++++--- x/slashing/keeper/keeper.go | 2 +- x/staking/keeper/keeper.go | 2 +- x/staking/keeper/slash.go | 32 +++++----- x/upgrade/keeper/keeper.go | 3 +- 32 files changed, 231 insertions(+), 114 deletions(-) create mode 100644 server/logger.go diff --git a/CHANGELOG.md b/CHANGELOG.md index d4cf410ec..5be5c9bdd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,6 +37,13 @@ Ref: https://keepachangelog.com/en/1.0.0/ ## [Unreleased] ### Improvements + +* (logging) [\#8072](https://github.com/cosmos/cosmos-sdk/pull/8072) Refactor logging: + * Use [zerolog](https://github.com/rs/zerolog) over Tendermint's go-kit logging wrapper. + * Introduce Tendermint's `--log_format=plain|json` flag. Using format `json` allows for emitting structured JSON + logs which can be consumed by an external logging facility (e.g. Loggly). Both formats log to STDERR. + * The existing `--log_level` flag and it's default value now solely relates to the global logging + level (e.g. `info`, `debug`, etc...) instead of `:`. * (crypto) [\#7987](https://github.com/cosmos/cosmos-sdk/pull/7987) Fix the inconsistency of CryptoCdc, only use `codec/legacy.Cdc`. * (SDK) [\#7925](https://github.com/cosmos/cosmos-sdk/pull/7925) Updated dependencies to use gRPC v1.33.2 * Updated gRPC dependency to v1.33.2 @@ -44,6 +51,7 @@ Ref: https://keepachangelog.com/en/1.0.0/ * (version) [\#7848](https://github.com/cosmos/cosmos-sdk/pull/7848) [\#7941](https://github.com/cosmos/cosmos-sdk/pull/7941) `version --long` output now shows the list of build dependencies and replaced build dependencies. ### State Machine Breaking Changes + * (x/upgrade) [\#7979](https://github.com/cosmos/cosmos-sdk/pull/7979) keeper pubkey storage serialization migration from bech32 to protobuf. ### Bug Fixes @@ -57,6 +65,7 @@ Ref: https://keepachangelog.com/en/1.0.0/ * (x/staking) [\#7419](https://github.com/cosmos/cosmos-sdk/pull/7419) The `TmConsPubKey` method on ValidatorI has been removed and replaced instead by `ConsPubKey` (which returns a SDK `cryptotypes.PubKey`) and `TmConsPublicKey` (which returns a Tendermint proto PublicKey). ### Improvements + * (tendermint) [\#7828](https://github.com/cosmos/cosmos-sdk/pull/7828) Update tendermint dependency to v0.34.0-rc6 ## [v0.40.0-rc2](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.0-rc2) - 2020-11-02 @@ -89,18 +98,16 @@ Ref: https://keepachangelog.com/en/1.0.0/ * __Modules__ * `x/crisis` has a new function: `AddModuleInitFlags`, which will register optional crisis module flags for the start command. - ### Bug Fixes * (client) [\#7699](https://github.com/cosmos/cosmos-sdk/pull/7699) Fix panic in context when setting invalid nodeURI. `WithNodeURI` does not set the `Client` in the context. * (x/gov) [#7641](https://github.com/cosmos/cosmos-sdk/pull/7641) Fix tally calculation precision error. -### Improvements +### Improvements * (rest) [#7649](https://github.com/cosmos/cosmos-sdk/pull/7649) Return an unsigned tx in legacy GET /tx endpoint when signature conversion fails * (cli) [#7764](https://github.com/cosmos/cosmos-sdk/pull/7764) Update x/banking and x/crisis InitChain to improve node startup time - ## [v0.40.0-rc1](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.0-rc1) - 2020-10-19 ### Client Breaking Changes diff --git a/baseapp/abci.go b/baseapp/abci.go index b46afd6c7..aaa48bb8c 100644 --- a/baseapp/abci.go +++ b/baseapp/abci.go @@ -291,7 +291,7 @@ func (app *BaseApp) Commit() (res abci.ResponseCommit) { // MultiStore (app.cms) so when Commit() is called is persists those values. app.deliverState.ms.Write() commitID := app.cms.Commit() - app.logger.Debug("Commit synced", "commit", fmt.Sprintf("%X", commitID)) + app.logger.Info("commit synced", "commit", fmt.Sprintf("%X", commitID)) // Reset the Check state to the latest committed. // @@ -358,22 +358,27 @@ func (app *BaseApp) snapshot(height int64) { app.logger.Info("snapshot manager not configured") return } - app.logger.Info("Creating state snapshot", "height", height) + + app.logger.Info("creating state snapshot", "height", height) + snapshot, err := app.snapshotManager.Create(uint64(height)) if err != nil { - app.logger.Error("Failed to create state snapshot", "height", height, "err", err) + app.logger.Error("failed to create state snapshot", "height", height, "err", err) return } - app.logger.Info("Completed state snapshot", "height", height, "format", snapshot.Format) + + app.logger.Info("completed state snapshot", "height", height, "format", snapshot.Format) if app.snapshotKeepRecent > 0 { - app.logger.Debug("Pruning state snapshots") + app.logger.Debug("pruning state snapshots") + pruned, err := app.snapshotManager.Prune(app.snapshotKeepRecent) if err != nil { app.logger.Error("Failed to prune state snapshots", "err", err) return } - app.logger.Debug("Pruned state snapshots", "pruned", pruned) + + app.logger.Debug("pruned state snapshots", "pruned", pruned) } } @@ -433,13 +438,14 @@ func (app *BaseApp) ListSnapshots(req abci.RequestListSnapshots) abci.ResponseLi snapshots, err := app.snapshotManager.List() if err != nil { - app.logger.Error("Failed to list snapshots", "err", err) + app.logger.Error("failed to list snapshots", "err", err) return resp } + for _, snapshot := range snapshots { abciSnapshot, err := snapshot.ToABCI() if err != nil { - app.logger.Error("Failed to list snapshots", "err", err) + app.logger.Error("failed to list snapshots", "err", err) return resp } resp.Snapshots = append(resp.Snapshots, &abciSnapshot) @@ -455,8 +461,13 @@ func (app *BaseApp) LoadSnapshotChunk(req abci.RequestLoadSnapshotChunk) abci.Re } chunk, err := app.snapshotManager.LoadChunk(req.Height, req.Format, req.Chunk) if err != nil { - app.logger.Error("Failed to load snapshot chunk", "height", req.Height, "format", req.Format, - "chunk", req.Chunk, "err") + app.logger.Error( + "failed to load snapshot chunk", + "height", req.Height, + "format", req.Format, + "chunk", req.Chunk, + "err", err, + ) return abci.ResponseLoadSnapshotChunk{} } return abci.ResponseLoadSnapshotChunk{Chunk: chunk} @@ -470,15 +481,16 @@ func (app *BaseApp) OfferSnapshot(req abci.RequestOfferSnapshot) abci.ResponseOf } if req.Snapshot == nil { - app.logger.Error("Received nil snapshot") + app.logger.Error("received nil snapshot") return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT} } snapshot, err := snapshottypes.SnapshotFromABCI(req.Snapshot) if err != nil { - app.logger.Error("Failed to decode snapshot metadata", "err", err) + app.logger.Error("failed to decode snapshot metadata", "err", err) return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT} } + err = app.snapshotManager.Restore(snapshot) switch { case err == nil: @@ -488,13 +500,22 @@ func (app *BaseApp) OfferSnapshot(req abci.RequestOfferSnapshot) abci.ResponseOf return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT_FORMAT} case errors.Is(err, snapshottypes.ErrInvalidMetadata): - app.logger.Error("Rejecting invalid snapshot", "height", req.Snapshot.Height, - "format", req.Snapshot.Format, "err", err) + app.logger.Error( + "rejecting invalid snapshot", + "height", req.Snapshot.Height, + "format", req.Snapshot.Format, + "err", err, + ) return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT} default: - app.logger.Error("Failed to restore snapshot", "height", req.Snapshot.Height, - "format", req.Snapshot.Format, "err", err) + app.logger.Error( + "failed to restore snapshot", + "height", req.Snapshot.Height, + "format", req.Snapshot.Format, + "err", err, + ) + // We currently don't support resetting the IAVL stores and retrying a different snapshot, // so we ask Tendermint to abort all snapshot restoration. return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_ABORT} @@ -514,8 +535,12 @@ func (app *BaseApp) ApplySnapshotChunk(req abci.RequestApplySnapshotChunk) abci. return abci.ResponseApplySnapshotChunk{Result: abci.ResponseApplySnapshotChunk_ACCEPT} case errors.Is(err, snapshottypes.ErrChunkHashMismatch): - app.logger.Error("Chunk checksum mismatch, rejecting sender and requesting refetch", - "chunk", req.Index, "sender", req.Sender, "err", err) + app.logger.Error( + "chunk checksum mismatch; rejecting sender and requesting refetch", + "chunk", req.Index, + "sender", req.Sender, + "err", err, + ) return abci.ResponseApplySnapshotChunk{ Result: abci.ResponseApplySnapshotChunk_RETRY, RefetchChunks: []uint32{req.Index}, @@ -523,7 +548,7 @@ func (app *BaseApp) ApplySnapshotChunk(req abci.RequestApplySnapshotChunk) abci. } default: - app.logger.Error("Failed to restore snapshot", "err", err) + app.logger.Error("failed to restore snapshot", "err", err) return abci.ResponseApplySnapshotChunk{Result: abci.ResponseApplySnapshotChunk_ABORT} } } diff --git a/client/flags/flags.go b/client/flags/flags.go index 2131c3763..72bcabcd6 100644 --- a/client/flags/flags.go +++ b/client/flags/flags.go @@ -65,6 +65,10 @@ const ( FlagCountTotal = "count-total" FlagTimeoutHeight = "timeout-height" FlagKeyAlgorithm = "algo" + + // Tendermint logging flags + FlagLogLevel = "log_level" + FlagLogFormat = "log_format" ) // LineBreak can be included in a command list to provide a blank line diff --git a/contrib/images/simd-env/Dockerfile b/contrib/images/simd-env/Dockerfile index 3be7eb5c8..1ccaeac65 100644 --- a/contrib/images/simd-env/Dockerfile +++ b/contrib/images/simd-env/Dockerfile @@ -12,7 +12,7 @@ VOLUME [ /simd ] WORKDIR /simd EXPOSE 26656 26657 ENTRYPOINT ["/usr/bin/wrapper.sh"] -CMD ["start"] +CMD ["start", "--log_format", "plain"] STOPSIGNAL SIGTERM COPY wrapper.sh /usr/bin/wrapper.sh diff --git a/go.mod b/go.mod index 8163ebbfb..b437a1006 100644 --- a/go.mod +++ b/go.mod @@ -35,6 +35,7 @@ require ( github.com/prometheus/common v0.15.0 github.com/rakyll/statik v0.1.7 github.com/regen-network/cosmos-proto v0.3.0 + github.com/rs/zerolog v1.20.0 github.com/spf13/afero v1.2.2 // indirect github.com/spf13/cast v1.3.1 github.com/spf13/cobra v1.1.1 diff --git a/go.sum b/go.sum index b6ae47006..4542327b0 100644 --- a/go.sum +++ b/go.sum @@ -481,6 +481,9 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.20.0 h1:38k9hgtUBdxFwE34yS8rTHmHBa4eN16E4DJlv177LNs= +github.com/rs/zerolog v1.20.0/go.mod h1:IzD0RJ65iWH0w97OQQebJEvTZYvsCUm9WVLWBQrJRjo= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= @@ -726,6 +729,7 @@ golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190828213141-aed303cbaa74/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= diff --git a/server/logger.go b/server/logger.go new file mode 100644 index 000000000..e6f6f8c11 --- /dev/null +++ b/server/logger.go @@ -0,0 +1,55 @@ +package server + +import ( + "github.com/rs/zerolog" + tmlog "github.com/tendermint/tendermint/libs/log" +) + +var _ tmlog.Logger = (*ZeroLogWrapper)(nil) + +// ZeroLogWrapper provides a wrapper around a zerolog.Logger instance. It implements +// Tendermint's Logger interface. +type ZeroLogWrapper struct { + zerolog.Logger +} + +// Info implements Tendermint's Logger interface and logs with level INFO. A set +// of key/value tuples may be provided to add context to the log. The number of +// tuples must be even and the key of the tuple must be a string. +func (z ZeroLogWrapper) Info(msg string, keyVals ...interface{}) { + z.Logger.Info().Fields(getLogFields(keyVals...)).Msg(msg) +} + +// Error implements Tendermint's Logger interface and logs with level ERR. A set +// of key/value tuples may be provided to add context to the log. The number of +// tuples must be even and the key of the tuple must be a string. +func (z ZeroLogWrapper) Error(msg string, keyVals ...interface{}) { + z.Logger.Error().Fields(getLogFields(keyVals...)).Msg(msg) +} + +// Debug implements Tendermint's Logger interface and logs with level DEBUG. A set +// of key/value tuples may be provided to add context to the log. The number of +// tuples must be even and the key of the tuple must be a string. +func (z ZeroLogWrapper) Debug(msg string, keyVals ...interface{}) { + z.Logger.Debug().Fields(getLogFields(keyVals...)).Msg(msg) +} + +// With returns a new wrapped logger with additional context provided by a set +// of key/value tuples. The number of tuples must be even and the key of the +// tuple must be a string. +func (z ZeroLogWrapper) With(keyVals ...interface{}) tmlog.Logger { + return ZeroLogWrapper{z.Logger.With().Fields(getLogFields(keyVals...)).Logger()} +} + +func getLogFields(keyVals ...interface{}) map[string]interface{} { + if len(keyVals)%2 != 0 { + return nil + } + + fields := make(map[string]interface{}) + for i := 0; i < len(keyVals); i += 2 { + fields[keyVals[i].(string)] = keyVals[i+1] + } + + return fields +} diff --git a/server/start.go b/server/start.go index e4ecd47de..d3af7db42 100644 --- a/server/start.go +++ b/server/start.go @@ -240,17 +240,17 @@ func startInProcess(ctx *Context, clientCtx client.Context, appCreator types.App genDocProvider, node.DefaultDBProvider, node.DefaultMetricsProvider(cfg.Instrumentation), - ctx.Logger.With("module", "node"), + ctx.Logger, ) if err != nil { return err } - ctx.Logger.Debug("Initialization: tmNode created") + ctx.Logger.Debug("initialization: tmNode created") if err := tmNode.Start(); err != nil { return err } - ctx.Logger.Debug("Initialization: tmNode started") + ctx.Logger.Debug("initialization: tmNode started") config := config.GetConfig(ctx.Viper) diff --git a/server/util.go b/server/util.go index 7118d5b8d..e461a12fb 100644 --- a/server/util.go +++ b/server/util.go @@ -14,12 +14,12 @@ import ( "syscall" "time" + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" "github.com/spf13/cobra" "github.com/spf13/viper" tmcfg "github.com/tendermint/tendermint/config" - tmcli "github.com/tendermint/tendermint/libs/cli" - tmflags "github.com/tendermint/tendermint/libs/cli/flags" - "github.com/tendermint/tendermint/libs/log" + tmlog "github.com/tendermint/tendermint/libs/log" dbm "github.com/tendermint/tm-db" "github.com/cosmos/cosmos-sdk/client/flags" @@ -39,7 +39,7 @@ const ServerContextKey = sdk.ContextKey("server.context") type Context struct { Viper *viper.Viper Config *tmcfg.Config - Logger log.Logger + Logger tmlog.Logger } // ErrorCode contains the exit code for server exit. @@ -52,10 +52,14 @@ func (e ErrorCode) Error() string { } func NewDefaultContext() *Context { - return NewContext(viper.New(), tmcfg.DefaultConfig(), log.NewTMLogger(log.NewSyncWriter(os.Stdout))) + return NewContext( + viper.New(), + tmcfg.DefaultConfig(), + ZeroLogWrapper{log.Logger}, + ) } -func NewContext(v *viper.Viper, config *tmcfg.Config, logger log.Logger) *Context { +func NewContext(v *viper.Viper, config *tmcfg.Config, logger tmlog.Logger) *Context { return &Context{v, config, logger} } @@ -86,27 +90,29 @@ func InterceptConfigsPreRunHandler(cmd *cobra.Command) error { serverCtx.Viper.SetEnvKeyReplacer(strings.NewReplacer(".", "_", "-", "_")) serverCtx.Viper.AutomaticEnv() - // Intercept configuration files, using both Viper instances separately + // intercept configuration files, using both Viper instances separately config, err := interceptConfigs(serverCtx.Viper) if err != nil { return err } - // Return value is a tendermint configuration object + + // return value is a tendermint configuration object serverCtx.Config = config - logger := log.NewTMLogger(log.NewSyncWriter(os.Stdout)) - logger, err = tmflags.ParseLogLevel(config.LogLevel, logger, tmcfg.DefaultLogLevel()) + var logWriter io.Writer + if strings.ToLower(serverCtx.Viper.GetString(flags.FlagLogFormat)) == tmcfg.LogFormatPlain { + logWriter = zerolog.ConsoleWriter{Out: os.Stderr} + } else { + logWriter = os.Stderr + } + + logLvlStr := serverCtx.Viper.GetString(flags.FlagLogLevel) + logLvl, err := zerolog.ParseLevel(logLvlStr) if err != nil { - return err + return fmt.Errorf("failed to parse log level (%s): %w", logLvlStr, err) } - // Check if the tendermint flag for trace logging is set - // if it is then setup a tracing logger in this app as well - if serverCtx.Viper.GetBool(tmcli.TraceFlag) { - logger = log.NewTracingLogger(logger) - } - - serverCtx.Logger = logger.With("module", "main") + serverCtx.Logger = ZeroLogWrapper{zerolog.New(logWriter).Level(logLvl).With().Timestamp().Logger()} return SetCmdServerContext(cmd, serverCtx) } diff --git a/simapp/simd/cmd/root.go b/simapp/simd/cmd/root.go index dd6132095..847a31f21 100644 --- a/simapp/simd/cmd/root.go +++ b/simapp/simd/cmd/root.go @@ -6,8 +6,10 @@ import ( "os" "path/filepath" + "github.com/rs/zerolog" "github.com/spf13/cast" "github.com/spf13/cobra" + tmcfg "github.com/tendermint/tendermint/config" tmcli "github.com/tendermint/tendermint/libs/cli" "github.com/tendermint/tendermint/libs/log" dbm "github.com/tendermint/tm-db" @@ -79,7 +81,8 @@ func Execute(rootCmd *cobra.Command) error { ctx = context.WithValue(ctx, client.ClientContextKey, &client.Context{}) ctx = context.WithValue(ctx, server.ServerContextKey, srvCtx) - rootCmd.PersistentFlags().String("log_level", srvCtx.Config.LogLevel, "The logging level in the format of :,...") + rootCmd.PersistentFlags().String(flags.FlagLogLevel, zerolog.InfoLevel.String(), "The logging level (trace|debug|info|warn|error|fatal|panic)") + rootCmd.PersistentFlags().String(flags.FlagLogFormat, tmcfg.LogFormatJSON, "The logging format (json|plain)") executor := tmcli.PrepareBaseCmd(rootCmd, "", simapp.DefaultNodeHome) return executor.ExecuteContext(ctx) diff --git a/x/auth/keeper/keeper.go b/x/auth/keeper/keeper.go index 75724f7d3..abc1cdabf 100644 --- a/x/auth/keeper/keeper.go +++ b/x/auth/keeper/keeper.go @@ -86,7 +86,7 @@ func NewAccountKeeper( // Logger returns a module-specific logger. func (ak AccountKeeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // GetPubKey Returns the PubKey of the account at address diff --git a/x/bank/keeper/keeper.go b/x/bank/keeper/keeper.go index 2fd76ea58..7661c1d31 100644 --- a/x/bank/keeper/keeper.go +++ b/x/bank/keeper/keeper.go @@ -1,7 +1,6 @@ package keeper import ( - "fmt" "time" "github.com/cosmos/cosmos-sdk/codec" @@ -341,7 +340,7 @@ func (k BaseKeeper) MintCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) k.SetSupply(ctx, supply) logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("minted %s from %s module account", amt.String(), moduleName)) + logger.Info("minted coins from module account", "amount", amt.String(), "from", moduleName) return nil } @@ -369,7 +368,7 @@ func (k BaseKeeper) BurnCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) k.SetSupply(ctx, supply) logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("burned %s from %s module account", amt.String(), moduleName)) + logger.Info("burned tokens from module account", "amount", amt.String(), "from", moduleName) return nil } diff --git a/x/bank/keeper/view.go b/x/bank/keeper/view.go index 2c4c4239b..d4bcabad2 100644 --- a/x/bank/keeper/view.go +++ b/x/bank/keeper/view.go @@ -49,7 +49,7 @@ func NewBaseViewKeeper(cdc codec.BinaryMarshaler, storeKey sdk.StoreKey, ak type // Logger returns a module-specific logger. func (k BaseViewKeeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // HasBalance returns whether or not an account has at least amt balance. diff --git a/x/crisis/keeper/keeper.go b/x/crisis/keeper/keeper.go index 8a78c936c..818e272d9 100644 --- a/x/crisis/keeper/keeper.go +++ b/x/crisis/keeper/keeper.go @@ -44,7 +44,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // RegisterRoute register the routes for each of the invariants @@ -76,7 +76,7 @@ func (k Keeper) AssertInvariants(ctx sdk.Context) { invarRoutes := k.Routes() n := len(invarRoutes) for i, ir := range invarRoutes { - logger.Debug("Asserting cirisis invariants", "inv", fmt.Sprint(i, "/", n)) + logger.Info("asserting cirisis invariants", "inv", fmt.Sprint(i, "/", n)) if res, stop := ir.Invar(ctx); stop { // TODO: Include app name as part of context to allow for this to be // variable. diff --git a/x/distribution/keeper/delegation.go b/x/distribution/keeper/delegation.go index 7d1e66611..61631da5d 100644 --- a/x/distribution/keeper/delegation.go +++ b/x/distribution/keeper/delegation.go @@ -152,9 +152,13 @@ func (k Keeper) withdrawDelegationRewards(ctx sdk.Context, val stakingtypes.Vali rewards := rewardsRaw.Intersect(outstanding) if !rewards.IsEqual(rewardsRaw) { logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("missing rewards rounding error, delegator %v"+ - "withdrawing rewards from validator %v, should have received %v, got %v", - val.GetOperator(), del.GetDelegatorAddr(), rewardsRaw, rewards)) + logger.Info( + "rounding error withdrawing rewards from validator", + "delegator", del.GetDelegatorAddr().String(), + "validator", val.GetOperator().String(), + "got", rewards.String(), + "expected", rewardsRaw.String(), + ) } // truncate coins, return remainder to community pool diff --git a/x/distribution/keeper/keeper.go b/x/distribution/keeper/keeper.go index 0c2f25c83..0d5c32f78 100644 --- a/x/distribution/keeper/keeper.go +++ b/x/distribution/keeper/keeper.go @@ -57,7 +57,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // SetWithdrawAddr sets a new address that will receive the rewards upon withdrawal diff --git a/x/distribution/keeper/proposal_handler.go b/x/distribution/keeper/proposal_handler.go index 9f9c4ec9e..d96bfc649 100644 --- a/x/distribution/keeper/proposal_handler.go +++ b/x/distribution/keeper/proposal_handler.go @@ -1,8 +1,6 @@ package keeper import ( - "fmt" - sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/cosmos/cosmos-sdk/x/distribution/types" @@ -13,16 +11,19 @@ func HandleCommunityPoolSpendProposal(ctx sdk.Context, k Keeper, p *types.Commun if k.blockedAddrs[p.Recipient] { return sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, "%s is not allowed to receive external funds", p.Recipient) } + recipient, addrErr := sdk.AccAddressFromBech32(p.Recipient) if addrErr != nil { return addrErr } + err := k.DistributeFromFeePool(ctx, p.Amount, recipient) if err != nil { return err } logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("transferred %s from the community pool to recipient %s", p.Amount, p.Recipient)) + logger.Info("transferred from the community pool to recipient", "amount", p.Amount.String(), "recipient", p.Recipient) + return nil } diff --git a/x/evidence/keeper/keeper.go b/x/evidence/keeper/keeper.go index e15cd74c8..8a2bfa686 100644 --- a/x/evidence/keeper/keeper.go +++ b/x/evidence/keeper/keeper.go @@ -40,7 +40,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // SetRouter sets the Evidence Handler router for the x/evidence module. Note, diff --git a/x/gov/abci.go b/x/gov/abci.go index f31e15062..f9815e0fb 100644 --- a/x/gov/abci.go +++ b/x/gov/abci.go @@ -30,13 +30,13 @@ func EndBlocker(ctx sdk.Context, keeper keeper.Keeper) { ) logger.Info( - fmt.Sprintf("proposal %d (%s) didn't meet minimum deposit of %s (had only %s); deleted", - proposal.ProposalId, - proposal.GetTitle(), - keeper.GetDepositParams(ctx).MinDeposit, - proposal.TotalDeposit, - ), + "proposal did not meet minimum deposit; deleted", + "proposal", proposal.ProposalId, + "title", proposal.GetTitle(), + "min_deposit", keeper.GetDepositParams(ctx).MinDeposit.String(), + "total_deposit", proposal.TotalDeposit.String(), ) + return false }) @@ -90,10 +90,10 @@ func EndBlocker(ctx sdk.Context, keeper keeper.Keeper) { keeper.RemoveFromActiveProposalQueue(ctx, proposal.ProposalId, proposal.VotingEndTime) logger.Info( - fmt.Sprintf( - "proposal %d (%s) tallied; result: %s", - proposal.ProposalId, proposal.GetTitle(), logMsg, - ), + "proposal tallied", + "proposal", proposal.ProposalId, + "title", proposal.GetTitle(), + "result", logMsg, ) ctx.EventManager().EmitEvent( diff --git a/x/gov/keeper/keeper.go b/x/gov/keeper/keeper.go index 0e999d7f2..60db99d32 100644 --- a/x/gov/keeper/keeper.go +++ b/x/gov/keeper/keeper.go @@ -68,7 +68,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (keeper Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // Router returns the gov Keeper's Router diff --git a/x/ibc/applications/transfer/keeper/keeper.go b/x/ibc/applications/transfer/keeper/keeper.go index e1149ef9b..a2eebb55e 100644 --- a/x/ibc/applications/transfer/keeper/keeper.go +++ b/x/ibc/applications/transfer/keeper/keeper.go @@ -1,8 +1,6 @@ package keeper import ( - "fmt" - tmbytes "github.com/tendermint/tendermint/libs/bytes" "github.com/tendermint/tendermint/libs/log" @@ -63,7 +61,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s-%s", host.ModuleName, types.ModuleName)) + return ctx.Logger().With("module", "x/"+host.ModuleName+"-"+types.ModuleName) } // GetTransferAccount returns the ICS20 - transfers ModuleAccount diff --git a/x/ibc/core/02-client/keeper/keeper.go b/x/ibc/core/02-client/keeper/keeper.go index 18cb4afd6..bae7d3627 100644 --- a/x/ibc/core/02-client/keeper/keeper.go +++ b/x/ibc/core/02-client/keeper/keeper.go @@ -47,7 +47,7 @@ func NewKeeper(cdc codec.BinaryMarshaler, key sdk.StoreKey, paramSpace paramtype // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s/%s", host.ModuleName, types.SubModuleName)) + return ctx.Logger().With("module", "x/"+host.ModuleName+"/"+types.SubModuleName) } // GenerateClientIdentifier returns the next client identifier. diff --git a/x/ibc/core/03-connection/keeper/keeper.go b/x/ibc/core/03-connection/keeper/keeper.go index 05d110fec..663726868 100644 --- a/x/ibc/core/03-connection/keeper/keeper.go +++ b/x/ibc/core/03-connection/keeper/keeper.go @@ -1,8 +1,6 @@ package keeper import ( - "fmt" - "github.com/tendermint/tendermint/libs/log" "github.com/cosmos/cosmos-sdk/codec" @@ -36,7 +34,7 @@ func NewKeeper(cdc codec.BinaryMarshaler, key sdk.StoreKey, ck types.ClientKeepe // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s/%s", host.ModuleName, types.SubModuleName)) + return ctx.Logger().With("module", "x/"+host.ModuleName+"/"+types.SubModuleName) } // GetCommitmentPrefix returns the IBC connection store prefix as a commitment diff --git a/x/ibc/core/04-channel/keeper/keeper.go b/x/ibc/core/04-channel/keeper/keeper.go index 2390fb41c..1651eec5e 100644 --- a/x/ibc/core/04-channel/keeper/keeper.go +++ b/x/ibc/core/04-channel/keeper/keeper.go @@ -1,7 +1,6 @@ package keeper import ( - "fmt" "strconv" "strings" @@ -52,7 +51,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s/%s", host.ModuleName, types.SubModuleName)) + return ctx.Logger().With("module", "x/"+host.ModuleName+"/"+types.SubModuleName) } // GenerateChannelIdentifier returns the next channel identifier. diff --git a/x/ibc/core/05-port/keeper/keeper.go b/x/ibc/core/05-port/keeper/keeper.go index 5321aeb66..8a4b2300a 100644 --- a/x/ibc/core/05-port/keeper/keeper.go +++ b/x/ibc/core/05-port/keeper/keeper.go @@ -26,7 +26,7 @@ func NewKeeper(sck capabilitykeeper.ScopedKeeper) Keeper { // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s/%s", host.ModuleName, types.SubModuleName)) + return ctx.Logger().With("module", "x/"+host.ModuleName+"/"+types.SubModuleName) } // isBounded checks a given port ID is already bounded. diff --git a/x/mint/keeper/keeper.go b/x/mint/keeper/keeper.go index 485ac961b..de0a31222 100644 --- a/x/mint/keeper/keeper.go +++ b/x/mint/keeper/keeper.go @@ -1,8 +1,6 @@ package keeper import ( - "fmt" - "github.com/tendermint/tendermint/libs/log" "github.com/cosmos/cosmos-sdk/codec" @@ -51,7 +49,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // get the minter diff --git a/x/params/keeper/keeper.go b/x/params/keeper/keeper.go index a2fdeaa5c..b3d649a2e 100644 --- a/x/params/keeper/keeper.go +++ b/x/params/keeper/keeper.go @@ -1,8 +1,6 @@ package keeper import ( - "fmt" - "github.com/tendermint/tendermint/libs/log" "github.com/cosmos/cosmos-sdk/codec" @@ -33,7 +31,7 @@ func NewKeeper(cdc codec.BinaryMarshaler, legacyAmino *codec.LegacyAmino, key, t // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", proposal.ModuleName)) + return ctx.Logger().With("module", "x/"+proposal.ModuleName) } // Allocate subspace used for keepers diff --git a/x/slashing/keeper/infractions.go b/x/slashing/keeper/infractions.go index 168b97b23..0c93717d1 100644 --- a/x/slashing/keeper/infractions.go +++ b/x/slashing/keeper/infractions.go @@ -48,6 +48,8 @@ func (k Keeper) HandleValidatorSignature(ctx sdk.Context, addr cryptotypes.Addre // Array value at this index has not changed, no need to update counter } + minSignedPerWindow := k.MinSignedPerWindow(ctx) + if missed { ctx.EventManager().EmitEvent( sdk.NewEvent( @@ -59,21 +61,22 @@ func (k Keeper) HandleValidatorSignature(ctx sdk.Context, addr cryptotypes.Addre ) logger.Info( - fmt.Sprintf("Absent validator %s at height %d, %d missed, threshold %d", consAddr, height, signInfo.MissedBlocksCounter, k.MinSignedPerWindow(ctx))) + "absent validator", + "height", height, + "validator", consAddr.String(), + "missed", signInfo.MissedBlocksCounter, + "threshold", minSignedPerWindow, + ) } minHeight := signInfo.StartHeight + k.SignedBlocksWindow(ctx) - maxMissed := k.SignedBlocksWindow(ctx) - k.MinSignedPerWindow(ctx) + maxMissed := k.SignedBlocksWindow(ctx) - minSignedPerWindow // if we are past the minimum height and the validator has missed too many blocks, punish them if height > minHeight && signInfo.MissedBlocksCounter > maxMissed { validator := k.sk.ValidatorByConsAddr(ctx, consAddr) if validator != nil && !validator.IsJailed() { - // Downtime confirmed: slash and jail the validator - logger.Info(fmt.Sprintf("Validator %s past min height of %d and below signed blocks threshold of %d", - consAddr, minHeight, k.MinSignedPerWindow(ctx))) - // We need to retrieve the stake distribution which signed the block, so we subtract ValidatorUpdateDelay from the evidence height, // and subtract an additional 1 since this is the LastCommit. // Note that this *can* result in a negative "distributionHeight" up to -ValidatorUpdateDelay-1, @@ -99,10 +102,21 @@ func (k Keeper) HandleValidatorSignature(ctx sdk.Context, addr cryptotypes.Addre signInfo.MissedBlocksCounter = 0 signInfo.IndexOffset = 0 k.clearValidatorMissedBlockBitArray(ctx, consAddr) - } else { - // Validator was (a) not found or (b) already jailed, don't slash + logger.Info( - fmt.Sprintf("Validator %s would have been slashed for downtime, but was either not found in store or already jailed", consAddr), + "slashing and jailing validator due to liveness fault", + "height", height, + "validator", consAddr.String(), + "min_height", minHeight, + "threshold", minSignedPerWindow, + "slashed", k.SlashFractionDowntime(ctx).String(), + "jailed_until", signInfo.JailedUntil, + ) + } else { + // validator was (a) not found or (b) already jailed so we do not slash + logger.Info( + "validator would have been slashed for downtime, but was either not found in store or already jailed", + "validator", consAddr.String(), ) } } diff --git a/x/slashing/keeper/keeper.go b/x/slashing/keeper/keeper.go index b5304a6c5..cbed27e9a 100644 --- a/x/slashing/keeper/keeper.go +++ b/x/slashing/keeper/keeper.go @@ -36,7 +36,7 @@ func NewKeeper(cdc codec.BinaryMarshaler, key sdk.StoreKey, sk types.StakingKeep // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // AddPubkey sets a address-pubkey relation diff --git a/x/staking/keeper/keeper.go b/x/staking/keeper/keeper.go index 86e32665d..74d85a645 100644 --- a/x/staking/keeper/keeper.go +++ b/x/staking/keeper/keeper.go @@ -65,7 +65,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // Set the validator hooks diff --git a/x/staking/keeper/slash.go b/x/staking/keeper/slash.go index 7bef3220f..a36f6f2e6 100644 --- a/x/staking/keeper/slash.go +++ b/x/staking/keeper/slash.go @@ -41,10 +41,10 @@ func (k Keeper) Slash(ctx sdk.Context, consAddr sdk.ConsAddress, infractionHeigh // NOTE: Correctness dependent on invariant that unbonding delegations / redelegations must also have been completely // slashed in this case - which we don't explicitly check, but should be true. // Log the slash attempt for future reference (maybe we should tag it too) - logger.Error(fmt.Sprintf( - "WARNING: Ignored attempt to slash a nonexistent validator with address %s, we recommend you investigate immediately", - consAddr)) - + logger.Error( + "WARNING: ignored attempt to slash a nonexistent validator; we recommend you investigate immediately", + "validator", consAddr.String(), + ) return } @@ -71,10 +71,12 @@ func (k Keeper) Slash(ctx sdk.Context, consAddr sdk.ConsAddress, infractionHeigh infractionHeight, ctx.BlockHeight())) case infractionHeight == ctx.BlockHeight(): - // Special-case slash at current height for efficiency - we don't need to look through unbonding delegations or redelegations - logger.Info(fmt.Sprintf( - "slashing at current height %d, not scanning unbonding delegations & redelegations", - infractionHeight)) + // Special-case slash at current height for efficiency - we don't need to + // look through unbonding delegations or redelegations. + logger.Info( + "slashing at current height; not scanning unbonding delegations & redelegations", + "height", infractionHeight, + ) case infractionHeight < ctx.BlockHeight(): // Iterate through unbonding delegations from slashed validator @@ -132,10 +134,12 @@ func (k Keeper) Slash(ctx sdk.Context, consAddr sdk.ConsAddress, infractionHeigh panic("invalid validator status") } - // Log that a slash occurred! - logger.Info(fmt.Sprintf( - "validator %s slashed by slash factor of %s; burned %v tokens", - validator.GetOperator(), slashFactor.String(), tokensToBurn)) + logger.Info( + "validator slashed by slash factor", + "validator", validator.GetOperator().String(), + "slash_factor", slashFactor.String(), + "burned", tokensToBurn, + ) } // jail a validator @@ -143,7 +147,7 @@ func (k Keeper) Jail(ctx sdk.Context, consAddr sdk.ConsAddress) { validator := k.mustGetValidatorByConsAddr(ctx, consAddr) k.jailValidator(ctx, validator) logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("validator %s jailed", consAddr)) + logger.Info("validator jailed", "validator", consAddr) } // unjail a validator @@ -151,7 +155,7 @@ func (k Keeper) Unjail(ctx sdk.Context, consAddr sdk.ConsAddress) { validator := k.mustGetValidatorByConsAddr(ctx, consAddr) k.unjailValidator(ctx, validator) logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("validator %s unjailed", consAddr)) + logger.Info("validator un-jailed", "validator", consAddr) } // slash an unbonding delegation and update the pool diff --git a/x/upgrade/keeper/keeper.go b/x/upgrade/keeper/keeper.go index 861fd0393..05fe6bd0f 100644 --- a/x/upgrade/keeper/keeper.go +++ b/x/upgrade/keeper/keeper.go @@ -3,7 +3,6 @@ package keeper import ( "encoding/binary" "encoding/json" - "fmt" "io/ioutil" "os" "path" @@ -183,7 +182,7 @@ func (k Keeper) ClearUpgradePlan(ctx sdk.Context) { // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // GetUpgradePlan returns the currently scheduled Plan if any, setting havePlan to true if there is a scheduled From 379c2ad630003f7337a7c9b579d534a81c10adeb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 3 Dec 2020 23:24:53 +0000 Subject: [PATCH 29/40] build(deps): bump vuepress-theme-cosmos from 1.0.175 to 1.0.176 in /docs (#8052) Bumps [vuepress-theme-cosmos](https://github.com/cosmos/vuepress-theme-cosmos) from 1.0.175 to 1.0.176. - [Release notes](https://github.com/cosmos/vuepress-theme-cosmos/releases) - [Commits](https://github.com/cosmos/vuepress-theme-cosmos/commits) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Federico Kunze <31522760+fedekunze@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- docs/package-lock.json | 1044 ++++++++++++++++++++++++---------------- docs/package.json | 2 +- 2 files changed, 624 insertions(+), 422 deletions(-) diff --git a/docs/package-lock.json b/docs/package-lock.json index 98bf43081..935f8aedc 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -5,118 +5,118 @@ "requires": true, "dependencies": { "@algolia/cache-browser-local-storage": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.6.0.tgz", - "integrity": "sha512-3ObeNwZ5gfDvKPp9NXdtbBrCtz/yR1oyDu/AReG73Oanua3y30Y11p7VQzzpLe2R/gDCLOGdRgr17h11lGy1Hg==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.8.2.tgz", + "integrity": "sha512-X2528jVZk+iPmsA4gF2AxH7RnREF10O98yV8QWwXcXcEYD7qjCsidPUGXcRsZCWOkCdZPA2IMJBiPDxZqfrQqA==", "requires": { - "@algolia/cache-common": "4.6.0" + "@algolia/cache-common": "4.8.2" } }, "@algolia/cache-common": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.6.0.tgz", - "integrity": "sha512-mEedrPb2O3WwtiIHggFoIhTbHVCMNikxMiiN9kqmwZkdDfClfxm435OUGZfAl67rBZfc0DNs/jmPM2mUoefM9A==" + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.8.2.tgz", + "integrity": "sha512-ER3QxHH2vmatfO4rRv504ByAiqqoj6kg0RcoBEetQflxRcRznmX7uFBXI3Zo42OoPKM3NMzFted50YO0Um5VLA==" }, "@algolia/cache-in-memory": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.6.0.tgz", - "integrity": "sha512-J7ayGokVWEFkuLxzgrIsPS4k1/ZndyGVpG/qPrG9RHVrs7ZogEhUSY1tbEyUlW3mGy7diIh+/52dtohDL/nbGQ==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.8.2.tgz", + "integrity": "sha512-CYse8/ZNPr/pMo6inQ0Uu+HWFFN9OcfJw67YCvU+1yz8NaS3rQ2HxU+zu1M/BCKMA89/dYF0jjBMT5rm6E4cdw==", "requires": { - "@algolia/cache-common": "4.6.0" + "@algolia/cache-common": "4.8.2" } }, "@algolia/client-account": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.6.0.tgz", - "integrity": "sha512-0t2yU6wNBNJgAmrARHrM1llhANyPT4Q/1wu6yEzv2WfPXlfsHwMhtKYNti4/k8eswwUt9wAri10WFV6TJI48rg==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.8.2.tgz", + "integrity": "sha512-cRtZ2xiLUfsanrpjYkxyNwE+4SbyUvbe8CL9HwpTJPsP0Jsv69H4H71lL7v0pQY5OWkFxKMsqVxCMH7Px3740w==", "requires": { - "@algolia/client-common": "4.6.0", - "@algolia/client-search": "4.6.0", - "@algolia/transporter": "4.6.0" + "@algolia/client-common": "4.8.2", + "@algolia/client-search": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "@algolia/client-analytics": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.6.0.tgz", - "integrity": "sha512-7yfn9pabA21Uw2iZjW1MNN4IJUT5y/YSg+ZJ+3HqBB6SgzOOqY0N3fATsPeGuN9EqSfVnqvnIrJMS8mI0b5FzQ==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.8.2.tgz", + "integrity": "sha512-+vnFokDGxi0vAaumbAgvDuvXWs0VvLk3gDkjkegXD8MMUTs3ByTZApCM4NPnIdbcUroFAJxbyzZQT9/CRZHgcA==", "requires": { - "@algolia/client-common": "4.6.0", - "@algolia/client-search": "4.6.0", - "@algolia/requester-common": "4.6.0", - "@algolia/transporter": "4.6.0" + "@algolia/client-common": "4.8.2", + "@algolia/client-search": "4.8.2", + "@algolia/requester-common": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "@algolia/client-common": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.6.0.tgz", - "integrity": "sha512-60jK0LK5H+6q6HyyMyoBBD0fIs8zZzJt6BiyJGQG90o3gUV/SnjiNxO9Bx0RRlqdkE5s0OYFu1L7P9Y5TX7oAw==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.8.2.tgz", + "integrity": "sha512-jO9RvC0FPxxhe/nynGxVEYmNltE5xgYV1Y6zviwl/80PwsrGfWp/rVDh4CVZaBOntmsOp+y0aqQwNYjLMVWXBg==", "requires": { - "@algolia/requester-common": "4.6.0", - "@algolia/transporter": "4.6.0" + "@algolia/requester-common": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "@algolia/client-recommendation": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/client-recommendation/-/client-recommendation-4.6.0.tgz", - "integrity": "sha512-j+Yb1z5QeIRDCCO+9hS9oZS3KNqRogPHDbJJsLTt6pkrs4CG2UVLVV67M977B1nzJ9OzaEki3VbpGQhRhPGNfQ==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/client-recommendation/-/client-recommendation-4.8.2.tgz", + "integrity": "sha512-evngF6Odrw93gXkXrOYPXxTWwDQ2K01sadB3Xpa1hQb+vjiBwcA/54w6nKyE4aiII1loT5q+Uj+G1f8HwBuksw==", "requires": { - "@algolia/client-common": "4.6.0", - "@algolia/requester-common": "4.6.0", - "@algolia/transporter": "4.6.0" + "@algolia/client-common": "4.8.2", + "@algolia/requester-common": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "@algolia/client-search": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.6.0.tgz", - "integrity": "sha512-+qA1NA88YnXvuCKifegfgts1RQs8IzcwccQqyurz8ins4hypZL1tXN2BkrOqqDIgvYIrUvFyhv+gLO6U9PpDUA==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.8.2.tgz", + "integrity": "sha512-JtmhdBKsA3Ll9ITvBfvMjsfuOY5oOPlaS9ahBGeb2OFfC1Myb6kbjXl73VtSVh4Bh0MpTsT4SdBdYCJFctRsQg==", "requires": { - "@algolia/client-common": "4.6.0", - "@algolia/requester-common": "4.6.0", - "@algolia/transporter": "4.6.0" + "@algolia/client-common": "4.8.2", + "@algolia/requester-common": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "@algolia/logger-common": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.6.0.tgz", - "integrity": "sha512-F+0HTGSQzJfWsX/cJq2l4eG2Y5JA6pqZ0YETyo5XJhZX4JaDrGszVKuOqp8kovZF/Ifebywxb8JdCiSUskmbig==" + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.8.2.tgz", + "integrity": "sha512-Sse29WFBZH4CSCnbMTh8t6uAFaJtNyRRcpDjFfvkSNdPAN/pxLAY9GYUzJmP4J+ILdJn6ZWMNpvwhNQ8p2I+mg==" }, "@algolia/logger-console": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.6.0.tgz", - "integrity": "sha512-ers7OhfU6qBQl6s7MOe5gNUkcpa7LGrhEzDWnD0cUwLSd5BvWt7zEN69O2CZVbvAUZYlZ5zJTzMMa49s0VXrKQ==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.8.2.tgz", + "integrity": "sha512-hpZvy708iOeX6tcgy9qXVzlH8Avd3UA7AMwd1wAK5dG8PwAcrhO9wRQuE1AemvuVIEhshbWGQl9pDGXsejO+4g==", "requires": { - "@algolia/logger-common": "4.6.0" + "@algolia/logger-common": "4.8.2" } }, "@algolia/requester-browser-xhr": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.6.0.tgz", - "integrity": "sha512-ugrJT25VUkoKrl5vJVFclMdogbhTiDZ38Gss4xfTiSsP/SGE/0ei5VEOMEcj/bjkurJjPky1HfJZ3ykJhIsfCA==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.8.2.tgz", + "integrity": "sha512-Vdv38BtgwAeVPThwOVRVrR8mDiRLADwqXt1c87dnHHL1Rs3/FMRQ9ogKMKnaJMAH+OeXf+yzNxh+QCISPKaMkQ==", "requires": { - "@algolia/requester-common": "4.6.0" + "@algolia/requester-common": "4.8.2" } }, "@algolia/requester-common": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.6.0.tgz", - "integrity": "sha512-DJ5iIGBGrRudimaaFnpBFM19pv8SsXiMYuukn9q1GgQh2mPPBCBBJiezKc7+OzE1UyCVrAFBpR/hrJnflZnRdQ==" + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.8.2.tgz", + "integrity": "sha512-dN6MuKQQTp7+IBZNIRC9KUCrWVQRM3LaSLLB9lM7evjt++2jJTlhUu2Vncd78VbSy2kviojelxZ/mXTITRRxoA==" }, "@algolia/requester-node-http": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.6.0.tgz", - "integrity": "sha512-MPZK3oZz0jSBsqrGiPxv7LOKMUNknlaRNyRDy0v/ASIYG+GvLhGTdEzG5Eyw5tgSvBr8CWrWM5tDC31EH40Ndw==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.8.2.tgz", + "integrity": "sha512-pnpDRzIfibJ67rPQvq1me+bqhfflS2w9MlbVMhKdPsSuO8GKAZQ4GJgvIphvpSmhVnB7drdbZZ3J0KVP/y7jeg==", "requires": { - "@algolia/requester-common": "4.6.0" + "@algolia/requester-common": "4.8.2" } }, "@algolia/transporter": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.6.0.tgz", - "integrity": "sha512-xp+HI8sB8gLCvP00scaOVPQEk5H7nboWUxrwLKyVUvtUO4o003bOfFPsH86NRyu5Dv7fzX9b8EH3rVxcLOhjqg==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.8.2.tgz", + "integrity": "sha512-r3ecEn+4GWW8ntydDmGGlZ5Iqds080bt2RtAUVNbPPwyuXAs9HUqwkYQiTIHSmeYtAlQ6YOYVnX3W6W8FhbhaA==", "requires": { - "@algolia/cache-common": "4.6.0", - "@algolia/logger-common": "4.6.0", - "@algolia/requester-common": "4.6.0" + "@algolia/cache-common": "4.8.2", + "@algolia/logger-common": "4.8.2", + "@algolia/requester-common": "4.8.2" } }, "@babel/code-frame": { @@ -128,23 +128,23 @@ } }, "@babel/compat-data": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.12.5.tgz", - "integrity": "sha512-DTsS7cxrsH3by8nqQSpFSyjSfSYl57D6Cf4q8dW3LK83tBKBDCkfcay1nYkXq1nIHXnpX8WMMb/O25HOy3h1zg==" + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.12.7.tgz", + "integrity": "sha512-YaxPMGs/XIWtYqrdEOZOCPsVWfEoriXopnsz3/i7apYPXQ3698UFhS6dVT1KN5qOsWmVgw/FOrmQgpRaZayGsw==" }, "@babel/core": { - "version": "7.12.3", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.3.tgz", - "integrity": "sha512-0qXcZYKZp3/6N2jKYVxZv0aNCsxTSVCiK72DTiTYZAu7sjg73W0/aynWjMbiGd87EQL4WyA8reiJVh92AVla9g==", + "version": "7.12.9", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz", + "integrity": "sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==", "requires": { "@babel/code-frame": "^7.10.4", - "@babel/generator": "^7.12.1", + "@babel/generator": "^7.12.5", "@babel/helper-module-transforms": "^7.12.1", - "@babel/helpers": "^7.12.1", - "@babel/parser": "^7.12.3", - "@babel/template": "^7.10.4", - "@babel/traverse": "^7.12.1", - "@babel/types": "^7.12.1", + "@babel/helpers": "^7.12.5", + "@babel/parser": "^7.12.7", + "@babel/template": "^7.12.7", + "@babel/traverse": "^7.12.9", + "@babel/types": "^7.12.7", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.1", @@ -156,9 +156,9 @@ }, "dependencies": { "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "requires": { "ms": "2.1.2" } @@ -253,12 +253,11 @@ } }, "@babel/helper-create-regexp-features-plugin": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.12.1.tgz", - "integrity": "sha512-rsZ4LGvFTZnzdNZR5HZdmJVuXK8834R5QkF3WvcnBhrlVtF0HSIUC6zbreL9MgjTywhKokn8RIYRiq99+DLAxA==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.12.7.tgz", + "integrity": "sha512-idnutvQPdpbduutvi3JVfEgcVIHooQnhvhx0Nk9isOINOIGYkZea1Pk2JlJRiUnMefrlvr0vkByATBY/mB4vjQ==", "requires": { "@babel/helper-annotate-as-pure": "^7.10.4", - "@babel/helper-regex": "^7.10.4", "regexpu-core": "^4.7.1" } }, @@ -307,11 +306,11 @@ } }, "@babel/helper-member-expression-to-functions": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.1.tgz", - "integrity": "sha512-k0CIe3tXUKTRSoEx1LQEPFU9vRQfqHtl+kf8eNnDqb4AUJEy5pz6aIiog+YWtVm2jpggjS1laH68bPsR+KWWPQ==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.7.tgz", + "integrity": "sha512-DCsuPyeWxeHgh1Dus7APn7iza42i/qXqiFPWyBDdOFtvS581JQePsc1F/nD+fHrcswhLlRc2UpYS1NwERxZhHw==", "requires": { - "@babel/types": "^7.12.1" + "@babel/types": "^7.12.7" } }, "@babel/helper-module-imports": { @@ -339,11 +338,11 @@ } }, "@babel/helper-optimise-call-expression": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.10.4.tgz", - "integrity": "sha512-n3UGKY4VXwXThEiKrgRAoVPBMqeoPgHVqiHZOanAJCG9nQUL2pLRQirUzl0ioKclHGpGqRgIOkgcIJaIWLpygg==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.7.tgz", + "integrity": "sha512-I5xc9oSJ2h59OwyUqjv95HRyzxj53DAubUERgQMrpcCEYQyToeHA+NEcUEsVWB4j53RDeskeBJ0SgRAYHDBckw==", "requires": { - "@babel/types": "^7.10.4" + "@babel/types": "^7.12.7" } }, "@babel/helper-plugin-utils": { @@ -351,14 +350,6 @@ "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" }, - "@babel/helper-regex": { - "version": "7.10.5", - "resolved": "https://registry.npmjs.org/@babel/helper-regex/-/helper-regex-7.10.5.tgz", - "integrity": "sha512-68kdUAzDrljqBrio7DYAEgCoJHxppJOERHOgOrDN7WjOzP0ZQ1LsSDRXcemzVZaLvjaJsJEESb6qt+znNuENDg==", - "requires": { - "lodash": "^4.17.19" - } - }, "@babel/helper-remap-async-to-generator": { "version": "7.12.1", "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.12.1.tgz", @@ -446,9 +437,9 @@ } }, "@babel/parser": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.12.5.tgz", - "integrity": "sha512-FVM6RZQ0mn2KCf1VUED7KepYeUWoVShczewOCfm3nzoBybaih51h+sYVVGthW9M6lPByEPTQf+xm27PBdlpwmQ==" + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.12.7.tgz", + "integrity": "sha512-oWR02Ubp4xTLCAqPRiNIuMVgNO5Aif/xpXtabhzW2HWUD47XJsAB4Zd/Rg30+XeQA3juXigV7hlquOTmwqLiwg==" }, "@babel/plugin-proposal-async-generator-functions": { "version": "7.12.1", @@ -525,9 +516,9 @@ } }, "@babel/plugin-proposal-numeric-separator": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.12.5.tgz", - "integrity": "sha512-UiAnkKuOrCyjZ3sYNHlRlfuZJbBHknMQ9VMwVeX97Ofwx7RpD6gS2HfqTCh8KNUQgcOm8IKt103oR4KIjh7Q8g==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.12.7.tgz", + "integrity": "sha512-8c+uy0qmnRTeukiGsjLGy6uVs/TFjJchGXUeBqlG4VWYOdJWkhhVPdQ3uHwbmalfJwv2JsV0qffXP4asRfL2SQ==", "requires": { "@babel/helper-plugin-utils": "^7.10.4", "@babel/plugin-syntax-numeric-separator": "^7.10.4" @@ -553,9 +544,9 @@ } }, "@babel/plugin-proposal-optional-chaining": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.12.1.tgz", - "integrity": "sha512-c2uRpY6WzaVDzynVY9liyykS+kVU+WRZPMPYpkelXH8KBt1oXoI89kPbZKKG/jDT5UK92FTW2fZkZaJhdiBabw==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.12.7.tgz", + "integrity": "sha512-4ovylXZ0PWmwoOvhU2vhnzVNnm88/Sm9nx7V8BPgMvAzn5zDou3/Awy0EjglyubVHasJj+XCEkr/r1X3P5elCA==", "requires": { "@babel/helper-plugin-utils": "^7.10.4", "@babel/helper-skip-transparent-expression-wrappers": "^7.12.1", @@ -951,12 +942,11 @@ } }, "@babel/plugin-transform-sticky-regex": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.12.1.tgz", - "integrity": "sha512-CiUgKQ3AGVk7kveIaPEET1jNDhZZEl1RPMWdTBE1799bdz++SwqDHStmxfCtDfBhQgCl38YRiSnrMuUMZIWSUQ==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.12.7.tgz", + "integrity": "sha512-VEiqZL5N/QvDbdjfYQBhruN0HYjSPjC4XkeqW4ny/jNtH9gcbgaqBIXYEZCNnESMAGs0/K/R7oFGMhOyu/eIxg==", "requires": { - "@babel/helper-plugin-utils": "^7.10.4", - "@babel/helper-regex": "^7.10.4" + "@babel/helper-plugin-utils": "^7.10.4" } }, "@babel/plugin-transform-template-literals": { @@ -993,13 +983,13 @@ } }, "@babel/preset-env": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.12.1.tgz", - "integrity": "sha512-H8kxXmtPaAGT7TyBvSSkoSTUK6RHh61So05SyEbpmr0MCZrsNYn7mGMzzeYoOUCdHzww61k8XBft2TaES+xPLg==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.12.7.tgz", + "integrity": "sha512-OnNdfAr1FUQg7ksb7bmbKoby4qFOHw6DKWWUNB9KqnnCldxhxJlP+21dpyaWFmf2h0rTbOkXJtAGevY3XW1eew==", "requires": { - "@babel/compat-data": "^7.12.1", - "@babel/helper-compilation-targets": "^7.12.1", - "@babel/helper-module-imports": "^7.12.1", + "@babel/compat-data": "^7.12.7", + "@babel/helper-compilation-targets": "^7.12.5", + "@babel/helper-module-imports": "^7.12.5", "@babel/helper-plugin-utils": "^7.10.4", "@babel/helper-validator-option": "^7.12.1", "@babel/plugin-proposal-async-generator-functions": "^7.12.1", @@ -1009,10 +999,10 @@ "@babel/plugin-proposal-json-strings": "^7.12.1", "@babel/plugin-proposal-logical-assignment-operators": "^7.12.1", "@babel/plugin-proposal-nullish-coalescing-operator": "^7.12.1", - "@babel/plugin-proposal-numeric-separator": "^7.12.1", + "@babel/plugin-proposal-numeric-separator": "^7.12.7", "@babel/plugin-proposal-object-rest-spread": "^7.12.1", "@babel/plugin-proposal-optional-catch-binding": "^7.12.1", - "@babel/plugin-proposal-optional-chaining": "^7.12.1", + "@babel/plugin-proposal-optional-chaining": "^7.12.7", "@babel/plugin-proposal-private-methods": "^7.12.1", "@babel/plugin-proposal-unicode-property-regex": "^7.12.1", "@babel/plugin-syntax-async-generators": "^7.8.0", @@ -1054,14 +1044,14 @@ "@babel/plugin-transform-reserved-words": "^7.12.1", "@babel/plugin-transform-shorthand-properties": "^7.12.1", "@babel/plugin-transform-spread": "^7.12.1", - "@babel/plugin-transform-sticky-regex": "^7.12.1", + "@babel/plugin-transform-sticky-regex": "^7.12.7", "@babel/plugin-transform-template-literals": "^7.12.1", "@babel/plugin-transform-typeof-symbol": "^7.12.1", "@babel/plugin-transform-unicode-escapes": "^7.12.1", "@babel/plugin-transform-unicode-regex": "^7.12.1", "@babel/preset-modules": "^0.1.3", - "@babel/types": "^7.12.1", - "core-js-compat": "^3.6.2", + "@babel/types": "^7.12.7", + "core-js-compat": "^3.7.0", "semver": "^5.5.0" }, "dependencies": { @@ -1100,35 +1090,35 @@ } }, "@babel/template": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.10.4.tgz", - "integrity": "sha512-ZCjD27cGJFUB6nmCB1Enki3r+L5kJveX9pq1SvAUKoICy6CZ9yD8xO086YXdYhvNjBdnekm4ZnaP5yC8Cs/1tA==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.12.7.tgz", + "integrity": "sha512-GkDzmHS6GV7ZeXfJZ0tLRBhZcMcY0/Lnb+eEbXDBfCAcZCjrZKe6p3J4we/D24O9Y8enxWAg1cWwof59yLh2ow==", "requires": { "@babel/code-frame": "^7.10.4", - "@babel/parser": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/parser": "^7.12.7", + "@babel/types": "^7.12.7" } }, "@babel/traverse": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.12.5.tgz", - "integrity": "sha512-xa15FbQnias7z9a62LwYAA5SZZPkHIXpd42C6uW68o8uTuua96FHZy1y61Va5P/i83FAAcMpW8+A/QayntzuqA==", + "version": "7.12.9", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.12.9.tgz", + "integrity": "sha512-iX9ajqnLdoU1s1nHt36JDI9KG4k+vmI8WgjK5d+aDTwQbL2fUnzedNedssA645Ede3PM2ma1n8Q4h2ohwXgMXw==", "requires": { "@babel/code-frame": "^7.10.4", "@babel/generator": "^7.12.5", "@babel/helper-function-name": "^7.10.4", "@babel/helper-split-export-declaration": "^7.11.0", - "@babel/parser": "^7.12.5", - "@babel/types": "^7.12.5", + "@babel/parser": "^7.12.7", + "@babel/types": "^7.12.7", "debug": "^4.1.0", "globals": "^11.1.0", "lodash": "^4.17.19" }, "dependencies": { "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "requires": { "ms": "2.1.2" } @@ -1141,9 +1131,9 @@ } }, "@babel/types": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.12.5.tgz", - "integrity": "sha512-gyTcvz7JFa4V45C0Zklv//GmFOAal5fL23OWpBLqc4nZ4Yrz67s4kCNwSK1Gu0MXGTU8mRY3zJYtacLdKXlzig==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.12.7.tgz", + "integrity": "sha512-MNyI92qZq6jrQkXvtIiykvl4WtoRrVV9MPn+ZfsoEENjiWcBQ3ZSHrkxnJWgWtLX3XXqX5hrSQ+X69wkmesXuQ==", "requires": { "@babel/helper-validator-identifier": "^7.10.4", "lodash": "^4.17.19", @@ -1263,9 +1253,9 @@ "integrity": "sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==" }, "@types/node": { - "version": "14.14.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.6.tgz", - "integrity": "sha512-6QlRuqsQ/Ox/aJEQWBEJG7A9+u7oSYl3mem/K8IzxXG/kAGbV1YPD9Bg9Zw3vyxC/YP+zONKwy8hGkSt1jxFMw==" + "version": "14.14.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.10.tgz", + "integrity": "sha512-J32dgx2hw8vXrSbu4ZlVhn1Nm3GbeCFNw2FWL8S5QKucHGY0cyNwjdQdO+KMBZ4wpmC7KhLCiNsdk1RFRIYUQQ==" }, "@types/q": { "version": "1.5.4", @@ -1325,9 +1315,9 @@ } }, "@vue/babel-preset-app": { - "version": "4.5.8", - "resolved": "https://registry.npmjs.org/@vue/babel-preset-app/-/babel-preset-app-4.5.8.tgz", - "integrity": "sha512-efCBo2HY8Jcs6+SyCnvWl8jGeF1Fl38reFL35AjO4SBcro0ol/qjPkeeJLjzvXUxrHAsM9DMfL/DvPa/hBmZwQ==", + "version": "4.5.9", + "resolved": "https://registry.npmjs.org/@vue/babel-preset-app/-/babel-preset-app-4.5.9.tgz", + "integrity": "sha512-d2H4hFnJsGnZtJAAZIbo1dmQJ2SI1MYix1Tc9/etlnJtCDPRHeCNodCSeuLgDwnoAyT3unzyHmTtaO56KRDuOQ==", "requires": { "@babel/core": "^7.11.0", "@babel/helper-compilation-targets": "^7.9.6", @@ -1348,9 +1338,9 @@ }, "dependencies": { "core-js": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.5.tgz", - "integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==" + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.8.0.tgz", + "integrity": "sha512-W2VYNB0nwQQE7tKS7HzXd7r2y/y2SVJl4ga6oH/dnaLFzM0o2lB2P3zCkWj5Wc/zyMYjtgd5Hmhk0ObkQFZOIA==" } } }, @@ -1521,9 +1511,9 @@ }, "dependencies": { "core-js": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.5.tgz", - "integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==" + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.8.0.tgz", + "integrity": "sha512-W2VYNB0nwQQE7tKS7HzXd7r2y/y2SVJl4ga6oH/dnaLFzM0o2lB2P3zCkWj5Wc/zyMYjtgd5Hmhk0ObkQFZOIA==" } } }, @@ -1872,24 +1862,24 @@ "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==" }, "algoliasearch": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.6.0.tgz", - "integrity": "sha512-f4QVfUYnWIGZwOupZh0RAqW8zEfpZAcZG6ZT0p6wDMztEyKBrjjbTXBk9p9uEaJqoIhFUm6TtApOxodTdHbqvw==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.8.2.tgz", + "integrity": "sha512-wQg1UpiXO6iXMXXyrmhKopjd3K4GGq5N/0qEjPB5OYzdvj4ju9rDIW8bYL9ghv9jD5IDrcyFsqCzlSKqn/RVXw==", "requires": { - "@algolia/cache-browser-local-storage": "4.6.0", - "@algolia/cache-common": "4.6.0", - "@algolia/cache-in-memory": "4.6.0", - "@algolia/client-account": "4.6.0", - "@algolia/client-analytics": "4.6.0", - "@algolia/client-common": "4.6.0", - "@algolia/client-recommendation": "4.6.0", - "@algolia/client-search": "4.6.0", - "@algolia/logger-common": "4.6.0", - "@algolia/logger-console": "4.6.0", - "@algolia/requester-browser-xhr": "4.6.0", - "@algolia/requester-common": "4.6.0", - "@algolia/requester-node-http": "4.6.0", - "@algolia/transporter": "4.6.0" + "@algolia/cache-browser-local-storage": "4.8.2", + "@algolia/cache-common": "4.8.2", + "@algolia/cache-in-memory": "4.8.2", + "@algolia/client-account": "4.8.2", + "@algolia/client-analytics": "4.8.2", + "@algolia/client-common": "4.8.2", + "@algolia/client-recommendation": "4.8.2", + "@algolia/client-search": "4.8.2", + "@algolia/logger-common": "4.8.2", + "@algolia/logger-console": "4.8.2", + "@algolia/requester-browser-xhr": "4.8.2", + "@algolia/requester-common": "4.8.2", + "@algolia/requester-node-http": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "align-text": { @@ -2167,25 +2157,14 @@ } }, "babel-loader": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.1.0.tgz", - "integrity": "sha512-7q7nC1tYOrqvUrN3LQK4GwSk/TQorZSOlO9C+RZDZpODgyN4ZlCqE5q9cDsyWOliN+aU9B4JX01xK9eJXowJLw==", + "version": "8.2.2", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.2.tgz", + "integrity": "sha512-JvTd0/D889PQBtUXJ2PXaKU/pjZDMtHA9V2ecm+eNRmmBCMR09a+fmpGTNwnJtFmFl5Ei7Vy47LjBb+L0wQ99g==", "requires": { - "find-cache-dir": "^2.1.0", + "find-cache-dir": "^3.3.1", "loader-utils": "^1.4.0", - "mkdirp": "^0.5.3", - "pify": "^4.0.1", + "make-dir": "^3.1.0", "schema-utils": "^2.6.5" - }, - "dependencies": { - "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "requires": { - "minimist": "^1.2.5" - } - } } }, "babel-plugin-dynamic-import-node": { @@ -2277,9 +2256,9 @@ } }, "base64-js": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", - "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==" + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, "batch": { "version": "0.6.1", @@ -2542,19 +2521,12 @@ } }, "browserify-rsa": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", - "integrity": "sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ=", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz", + "integrity": "sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==", "requires": { - "bn.js": "^4.1.0", + "bn.js": "^5.0.0", "randombytes": "^2.0.1" - }, - "dependencies": { - "bn.js": { - "version": "4.11.9", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", - "integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==" - } } }, "browserify-sign": { @@ -2582,14 +2554,15 @@ } }, "browserslist": { - "version": "4.14.6", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.14.6.tgz", - "integrity": "sha512-zeFYcUo85ENhc/zxHbiIp0LGzzTrE2Pv2JhxvS7kpUb9Q9D38kUX6Bie7pGutJ/5iF5rOxE7CepAuWD56xJ33A==", + "version": "4.14.7", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.14.7.tgz", + "integrity": "sha512-BSVRLCeG3Xt/j/1cCGj1019Wbty0H+Yvu2AOuZSuoaUWn3RatbL33Cxk+Q4jRMRAbOm0p7SLravLjpnT6s0vzQ==", "requires": { - "caniuse-lite": "^1.0.30001154", - "electron-to-chromium": "^1.3.585", + "caniuse-lite": "^1.0.30001157", + "colorette": "^1.2.1", + "electron-to-chromium": "^1.3.591", "escalade": "^3.1.1", - "node-releases": "^1.1.65" + "node-releases": "^1.1.66" } }, "buffer": { @@ -2698,6 +2671,42 @@ "schema-utils": "^1.0.0" }, "dependencies": { + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "requires": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, "mkdirp": { "version": "0.5.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", @@ -2706,6 +2715,27 @@ "minimist": "^1.2.5" } }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "requires": { + "find-up": "^3.0.0" + } + }, "schema-utils": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", @@ -2715,6 +2745,11 @@ "ajv-errors": "^1.0.0", "ajv-keywords": "^3.1.0" } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" } } }, @@ -2813,9 +2848,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001154", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001154.tgz", - "integrity": "sha512-y9DvdSti8NnYB9Be92ddMZQrcOe04kcQtcxtBx4NkB04+qZ+JUWotnXBJTmxlKudhxNTQ3RRknMwNU2YQl/Org==" + "version": "1.0.30001163", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001163.tgz", + "integrity": "sha512-QQbOGkHWnvhn3Dlf4scPlXTZVhGOK+2qCOP5gPxqzXHhtn3tZHwNdH9qNcQRWN0f3tDYrsyXFJCFiP/GLzI5Vg==" }, "caseless": { "version": "0.12.0", @@ -3154,16 +3189,6 @@ "unique-string": "^2.0.0", "write-file-atomic": "^3.0.0", "xdg-basedir": "^4.0.0" - }, - "dependencies": { - "make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "requires": { - "semver": "^6.0.0" - } - } } }, "connect-history-api-fallback": { @@ -3297,6 +3322,24 @@ "webpack-log": "^2.0.0" }, "dependencies": { + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "requires": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, "globby": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/globby/-/globby-7.1.1.tgz", @@ -3308,6 +3351,13 @@ "ignore": "^3.3.5", "pify": "^3.0.0", "slash": "^1.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } } }, "ignore": { @@ -3315,10 +3365,44 @@ "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==" }, - "pify": { + "locate-path": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "requires": { + "find-up": "^3.0.0" + } }, "schema-utils": { "version": "1.0.0", @@ -3330,6 +3414,11 @@ "ajv-keywords": "^3.1.0" } }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + }, "slash": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", @@ -3338,16 +3427,16 @@ } }, "core-js": { - "version": "2.6.11", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.11.tgz", - "integrity": "sha512-5wjnpaT/3dV+XB4borEsnAYQchn00XSgTAWKDkEqv+K8KevjbzmofK6hfJ9TZIlpj2N0xQpazy7PiRQiWHqzWg==" + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", + "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==" }, "core-js-compat": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.6.5.tgz", - "integrity": "sha512-7ItTKOhOZbznhXAQ2g/slGg1PJV5zDO/WdkTwi7UEOJmkvsE32PWvx6mKtDjiMpjnR2CNf6BAD6sSxIlv7ptng==", + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.8.0.tgz", + "integrity": "sha512-o9QKelQSxQMYWHXc/Gc4L8bx/4F7TTraE5rhuN8I7mKBt5dBIUpXpIR3omv70ebr8ST5R3PqbDQr+ZI3+Tt1FQ==", "requires": { - "browserslist": "^4.8.5", + "browserslist": "^4.14.7", "semver": "7.0.0" }, "dependencies": { @@ -3637,26 +3726,26 @@ "integrity": "sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q==" }, "csso": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/csso/-/csso-4.1.0.tgz", - "integrity": "sha512-h+6w/W1WqXaJA4tb1dk7r5tVbOm97MsKxzwnvOR04UQ6GILroryjMWu3pmCCtL2mLaEStQ0fZgeGiy99mo7iyg==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz", + "integrity": "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==", "requires": { - "css-tree": "^1.0.0" + "css-tree": "^1.1.2" }, "dependencies": { "css-tree": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0.tgz", - "integrity": "sha512-CdVYz/Yuqw0VdKhXPBIgi8DO3NicJVYZNWeX9XcIuSp9ZoFT5IcleVRW07O5rMjdcx1mb+MEJPknTTEW7DdsYw==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.1.2.tgz", + "integrity": "sha512-wCoWush5Aeo48GLhfHPbmvZs59Z+M7k5+B1xDnXbdWNcEF423DoFdqSWE0PM5aNk5nI5cp1q7ms36zGApY/sKQ==", "requires": { - "mdn-data": "2.0.12", + "mdn-data": "2.0.14", "source-map": "^0.6.1" } }, "mdn-data": { - "version": "2.0.12", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.12.tgz", - "integrity": "sha512-ULbAlgzVb8IqZ0Hsxm6hHSlQl3Jckst2YEQS7fODu9ilNWy2LvcoSY7TRFIktABP2mdppBioc66va90T+NUs8Q==" + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", + "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" } } }, @@ -4093,9 +4182,9 @@ "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, "electron-to-chromium": { - "version": "1.3.587", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.587.tgz", - "integrity": "sha512-8XFNxzNj0R8HpTQslWAw6UWpGSuOKSP3srhyFHVbGUGb8vTHckZGCyWi+iQlaXJx5DNeTQTQLd6xN11WSckkmA==" + "version": "1.3.612", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.612.tgz", + "integrity": "sha512-CdrdX1B6mQqxfw+51MPWB5qA6TKWjza9f5voBtUlRfEZEwZiFaxJLrhFI8zHE9SBAuGt4h84rQU6Ho9Bauo1LA==" }, "elliptic": { "version": "6.5.3", @@ -4225,15 +4314,16 @@ } }, "es-abstract": { - "version": "1.17.7", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz", - "integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==", + "version": "1.18.0-next.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", + "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", "requires": { "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", "has": "^1.0.3", "has-symbols": "^1.0.1", "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.0", "is-regex": "^1.1.1", "object-inspect": "^1.8.0", "object-keys": "^1.1.1", @@ -4632,21 +4722,22 @@ } }, "find-cache-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", - "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", + "integrity": "sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==", "requires": { "commondir": "^1.0.1", - "make-dir": "^2.0.0", - "pkg-dir": "^3.0.0" + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" } }, "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "requires": { - "locate-path": "^3.0.0" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" } }, "flush-write-stream": { @@ -5413,6 +5504,46 @@ "requires": { "pkg-dir": "^3.0.0", "resolve-cwd": "^2.0.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "requires": { + "find-up": "^3.0.0" + } + } } }, "imurmurhash": { @@ -5553,9 +5684,9 @@ } }, "is-core-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.0.0.tgz", - "integrity": "sha512-jq1AH6C8MuteOoBPwkxHafmByhL9j5q4OaPGdbuD+ZtQJVzH+i6E3BJDQcBA09k57i2Hh2yQbEG8yObZ0jdlWw==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", + "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", "requires": { "has": "^1.0.3" } @@ -5998,12 +6129,11 @@ } }, "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" + "p-locate": "^4.1.0" } }, "lodash": { @@ -6074,9 +6204,9 @@ "integrity": "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=" }, "loglevel": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.7.0.tgz", - "integrity": "sha512-i2sY04nal5jDcagM3FMfG++T69GEEM8CYuOfeOIvmXzOIcwE9a/CJPR0MFM97pYMj/u10lzz7/zd7+qwhrBTqQ==" + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.7.1.tgz", + "integrity": "sha512-Hesni4s5UkWkwCGJMQGAh71PaLUmKFM60dHvq0zi/vDhhrzuk+4GgNbTXJ12YYQJn6ZKBDNIjYcuQGKudvqrIw==" }, "longest": { "version": "1.0.1", @@ -6102,19 +6232,11 @@ } }, "make-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", - "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "requires": { - "pify": "^4.0.1", - "semver": "^5.6.0" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } + "semver": "^6.0.0" } }, "map-cache": { @@ -6645,9 +6767,9 @@ } }, "node-releases": { - "version": "1.1.65", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.65.tgz", - "integrity": "sha512-YpzJOe2WFIW0V4ZkJQd/DGR/zdVwc/pI4Nl1CZrBO19FdRcSTmsuhdttw9rsTzzJLrNcSloLiBbEYx1C4f6gpA==" + "version": "1.1.67", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.67.tgz", + "integrity": "sha512-V5QF9noGFl3EymEwUYzO+3NTDpGfQB4ve6Qfnzf3UNydMhjQRVPR1DZTuvWiLzaFJYw2fmDwAfnRNEVb64hSIg==" }, "nopt": { "version": "1.0.10", @@ -6747,33 +6869,12 @@ "integrity": "sha512-jLdtEOB112fORuypAyl/50VRVIBIdVQOSUUGQHzJ4xBSbit81zRarz7GThkEFZy1RceYrWYcPcBFPQwHyAc1gA==" }, "object-is": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.3.tgz", - "integrity": "sha512-teyqLvFWzLkq5B9ki8FVWA902UER2qkxmdA4nLf+wjOLAWgxzCWZNCxpDq9MvE8MmhWNr+I8w3BN49Vx36Y6Xg==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.4.tgz", + "integrity": "sha512-1ZvAZ4wlF7IyPVOcE1Omikt7UpaFlOQq0HlSti+ZvDH3UiD2brwGMwDbyV43jao2bKJ+4+WdPJHSd7kgzKYVqg==", "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1" - }, - "dependencies": { - "es-abstract": { - "version": "1.18.0-next.1", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", - "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", - "requires": { - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.2.2", - "is-negative-zero": "^2.0.0", - "is-regex": "^1.1.1", - "object-inspect": "^1.8.0", - "object-keys": "^1.1.1", - "object.assign": "^4.1.1", - "string.prototype.trimend": "^1.0.1", - "string.prototype.trimstart": "^1.0.1" - } - } + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" } }, "object-keys": { @@ -6801,12 +6902,13 @@ } }, "object.getownpropertydescriptors": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.0.tgz", - "integrity": "sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.1.tgz", + "integrity": "sha512-6DtXgZ/lIZ9hqx4GtZETobXLR/ZLaa0aqV0kzbn80Rf8Z2e/XFnhA0I7p07N2wH8bBBltr2xQPi6sbKWAY2Eng==", "requires": { + "call-bind": "^1.0.0", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1" + "es-abstract": "^1.18.0-next.1" } }, "object.pick": { @@ -6818,13 +6920,13 @@ } }, "object.values": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.1.tgz", - "integrity": "sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.2.tgz", + "integrity": "sha512-MYC0jvJopr8EK6dPBiO8Nb9mvjdypOachO5REGk6MXzujbBrAisKo3HmdEI6kZDL6fC31Mwee/5YbtMebixeag==", "requires": { + "call-bind": "^1.0.0", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1", - "function-bind": "^1.1.1", + "es-abstract": "^1.18.0-next.1", "has": "^1.0.3" } }, @@ -6908,11 +7010,11 @@ } }, "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "requires": { - "p-limit": "^2.0.0" + "p-limit": "^2.2.0" } }, "p-map": { @@ -7046,9 +7148,9 @@ "integrity": "sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=" }, "path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" }, "path-is-absolute": { "version": "1.0.1", @@ -7132,11 +7234,11 @@ } }, "pkg-dir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", - "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "requires": { - "find-up": "^3.0.0" + "find-up": "^4.0.0" } }, "portfinder": { @@ -7150,9 +7252,9 @@ }, "dependencies": { "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "requires": { "ms": "^2.1.1" } @@ -8187,6 +8289,26 @@ "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.0-next.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz", + "integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + } } }, "regexpu-core": { @@ -8203,9 +8325,9 @@ } }, "registry-auth-token": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.0.tgz", - "integrity": "sha512-P+lWzPrsgfN+UEpDS3U8AQKg/UjZX6mQSJueZj3EK+vNESoqBSpBUD3gmu4sF9lOsjXWjF11dQKUqemf3veq1w==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz", + "integrity": "sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw==", "requires": { "rc": "^1.2.8" } @@ -8320,11 +8442,11 @@ "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=" }, "resolve": { - "version": "1.18.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.18.1.tgz", - "integrity": "sha512-lDfCPaMKfOJXjy0dPayzPdF1phampNWr3qFCjAu+rw/qbQmr5jWH5xN2hwh9QKfw9E5v4hwV7A+jrCmL8yjjqA==", + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.19.0.tgz", + "integrity": "sha512-rArEXAgsBG4UgRGcynxWIWKFvh/XZCcS8UJdHhwy91zwAvCZIbcs+vAbflgBnNjYMs/i/i+/Ux6IZhML1yPvxg==", "requires": { - "is-core-module": "^2.0.0", + "is-core-module": "^2.1.0", "path-parse": "^1.0.6" } }, @@ -8807,9 +8929,9 @@ }, "dependencies": { "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "requires": { "ms": "^2.1.1" } @@ -8886,9 +9008,9 @@ }, "dependencies": { "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "requires": { "ms": "2.1.2" } @@ -8914,9 +9036,9 @@ }, "dependencies": { "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "requires": { "ms": "2.1.2" } @@ -8990,9 +9112,19 @@ "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==" }, "stack-utils": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.2.tgz", - "integrity": "sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA==" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.4.tgz", + "integrity": "sha512-IPDJfugEGbfizBwBZRZ3xpccMdRyP5lqsBWXGQWimVjua/ccLCeMOAVjlc1R7LxFjo5sEDhyNIXd8mo/AiDS9w==", + "requires": { + "escape-string-regexp": "^2.0.0" + }, + "dependencies": { + "escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==" + } + } }, "static-extend": { "version": "0.1.2", @@ -9150,63 +9282,21 @@ } }, "string.prototype.trimend": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.2.tgz", - "integrity": "sha512-8oAG/hi14Z4nOVP0z6mdiVZ/wqjDtWSLygMigTzAb+7aPEDTleeFf+WrF+alzecxIRkckkJVn+dTlwzJXORATw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz", + "integrity": "sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==", "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1" - }, - "dependencies": { - "es-abstract": { - "version": "1.18.0-next.1", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", - "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", - "requires": { - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.2.2", - "is-negative-zero": "^2.0.0", - "is-regex": "^1.1.1", - "object-inspect": "^1.8.0", - "object-keys": "^1.1.1", - "object.assign": "^4.1.1", - "string.prototype.trimend": "^1.0.1", - "string.prototype.trimstart": "^1.0.1" - } - } + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" } }, "string.prototype.trimstart": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.2.tgz", - "integrity": "sha512-7F6CdBTl5zyu30BJFdzSTlSlLPwODC23Od+iLoVH8X6+3fvDPPuBVVj9iaB1GOsSTSIgVfsfm27R2FGrAPznWg==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz", + "integrity": "sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==", "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1" - }, - "dependencies": { - "es-abstract": { - "version": "1.18.0-next.1", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", - "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", - "requires": { - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.2.2", - "is-negative-zero": "^2.0.0", - "is-regex": "^1.1.1", - "object-inspect": "^1.8.0", - "object-keys": "^1.1.1", - "object.assign": "^4.1.1", - "string.prototype.trimend": "^1.0.1", - "string.prototype.trimstart": "^1.0.1" - } - } + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" } }, "string_decoder": { @@ -9405,6 +9495,63 @@ "worker-farm": "^1.7.0" }, "dependencies": { + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "requires": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "requires": { + "find-up": "^3.0.0" + } + }, "schema-utils": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", @@ -9414,6 +9561,11 @@ "ajv-errors": "^1.0.0", "ajv-keywords": "^3.1.0" } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" } } }, @@ -9983,6 +10135,26 @@ "es-abstract": "^1.17.2", "has-symbols": "^1.0.1", "object.getownpropertydescriptors": "^2.1.0" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz", + "integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + } } }, "utila": { @@ -10058,9 +10230,9 @@ } }, "vue-router": { - "version": "3.4.8", - "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-3.4.8.tgz", - "integrity": "sha512-3BsR84AqarcmweXjItxw3jwQsiYNssYg090yi4rlzTnCJxmHtkyCvhNz9Z7qRSOkmiV485KkUCReTp5AjNY4wg==" + "version": "3.4.9", + "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-3.4.9.tgz", + "integrity": "sha512-CGAKWN44RqXW06oC+u4mPgHLQQi2t6vLD/JbGRDAXm0YpMv0bgpKuU5bBd7AvMgfTz9kXVRIWKHqRwGEb8xFkA==" }, "vue-server-renderer": { "version": "2.6.12", @@ -10227,9 +10399,9 @@ } }, "vuepress-theme-cosmos": { - "version": "1.0.175", - "resolved": "https://registry.npmjs.org/vuepress-theme-cosmos/-/vuepress-theme-cosmos-1.0.175.tgz", - "integrity": "sha512-QwVVaU1cMEl+j11trOEp2Vw+C3TAU+DQQIK4rcezHwMCsIYm9Wj4yDhz6rZVYd/Rg+KaCgZ1OCiZlcH/CXdu2A==", + "version": "1.0.176", + "resolved": "https://registry.npmjs.org/vuepress-theme-cosmos/-/vuepress-theme-cosmos-1.0.176.tgz", + "integrity": "sha512-9PNjU+AjI0FAzPthKeaEpTuoNbKfm8p5mzchkF8gXZoLCZPAN5fPaDRN3rfuE4oXHgn1INaZA7fNGaU4MgZzxg==", "requires": { "@cosmos-ui/vue": "^0.35.0", "@vuepress/plugin-google-analytics": "1.7.1", @@ -10244,7 +10416,7 @@ "jsonp": "^0.2.1", "markdown-it": "^12.0.0", "markdown-it-attrs": "^3.0.3", - "prismjs": "^1.21.0", + "prismjs": "^1.22.0", "pug": "^2.0.4", "pug-plain-loader": "^1.0.0", "stylus": "^0.54.8", @@ -10256,14 +10428,14 @@ } }, "watchpack": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.4.tgz", - "integrity": "sha512-aWAgTW4MoSJzZPAicljkO1hsi1oKj/RRq/OJQh2PKI2UKL04c2Bs+MBOB+BBABHTXJpf9mCwHN7ANCvYsvY2sg==", + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.5.tgz", + "integrity": "sha512-9P3MWk6SrKjHsGkLT2KHXdQ/9SNkyoJbabxnKOoJepsvJjJG8uYTR3yTPxPQvNDI3w4Nz1xnE0TLHK4RIVe/MQ==", "requires": { "chokidar": "^3.4.1", "graceful-fs": "^4.1.2", "neo-async": "^2.5.0", - "watchpack-chokidar2": "^2.0.0" + "watchpack-chokidar2": "^2.0.1" }, "dependencies": { "anymatch": { @@ -10367,9 +10539,9 @@ } }, "watchpack-chokidar2": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/watchpack-chokidar2/-/watchpack-chokidar2-2.0.0.tgz", - "integrity": "sha512-9TyfOyN/zLUbA288wZ8IsMZ+6cbzvsNyEzSBp6e/zkifi6xxbl8SmQ/CxQq32k8NNqrdVEVUVSEf56L4rQ/ZxA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/watchpack-chokidar2/-/watchpack-chokidar2-2.0.1.tgz", + "integrity": "sha512-nCFfBIPKr5Sh61s4LPpy1Wtfi0HE8isJ3d2Yb5/Ppw2P2B/3eVSEBjKfN0fmHJSK14+31KwMKmcrzs2GM4P0Ww==", "optional": true, "requires": { "chokidar": "^2.1.8" @@ -10547,23 +10719,53 @@ } }, "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "requires": { "ms": "2.1.2" } }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, "is-absolute-url": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-3.0.3.tgz", "integrity": "sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==" }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, "schema-utils": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", @@ -10818,9 +11020,9 @@ "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" }, "y18n": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", - "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==" + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", + "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==" }, "yallist": { "version": "3.1.1", diff --git a/docs/package.json b/docs/package.json index 3b3fa4b1f..a9221ed06 100644 --- a/docs/package.json +++ b/docs/package.json @@ -14,6 +14,6 @@ "author": "", "license": "ISC", "dependencies": { - "vuepress-theme-cosmos": "^1.0.175" + "vuepress-theme-cosmos": "^1.0.176" } } From 1b00c01b70c45caad6ac7681504a7c61c2422664 Mon Sep 17 00:00:00 2001 From: Amaury Date: Fri, 4 Dec 2020 00:33:32 +0100 Subject: [PATCH 30/40] Audit through legacy endpoints to find breaking changes (#8037) * Add error msg on staking * Add tests for legacy staking and gov * Add test for encode * Fix broadcast too * Add comments * update changelog Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + docs/migrations/rest.md | 15 ++-- x/auth/client/rest/broadcast.go | 13 +++- x/auth/client/rest/decode.go | 2 +- x/auth/client/rest/encode.go | 19 ++++- x/auth/client/rest/query.go | 8 +- x/auth/client/rest/rest_test.go | 78 ++++++++++++++++--- x/gov/client/rest/grpc_query_test.go | 8 +- x/gov/client/rest/rest_test.go | 110 +++++++++++++++++++++++++++ x/staking/client/rest/query.go | 14 ++++ x/staking/client/rest/rest_test.go | 62 +++++++++++++++ 11 files changed, 300 insertions(+), 30 deletions(-) create mode 100644 x/gov/client/rest/rest_test.go create mode 100644 x/staking/client/rest/rest_test.go diff --git a/CHANGELOG.md b/CHANGELOG.md index 5be5c9bdd..25a486f96 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -62,6 +62,7 @@ Ref: https://keepachangelog.com/en/1.0.0/ ### Client Breaking +* (crypto) [\#7419](https://github.com/cosmos/cosmos-sdk/pull/7419) The SDK doesn't use Tendermint's `crypto.PubKey` interface anymore, and uses instead it's own `PubKey` interface, defined in `crypto/types`. Replace all instances of `crypto.PubKey` by `cryptotypes.Pubkey`. * (x/staking) [\#7419](https://github.com/cosmos/cosmos-sdk/pull/7419) The `TmConsPubKey` method on ValidatorI has been removed and replaced instead by `ConsPubKey` (which returns a SDK `cryptotypes.PubKey`) and `TmConsPublicKey` (which returns a Tendermint proto PublicKey). ### Improvements diff --git a/docs/migrations/rest.md b/docs/migrations/rest.md index f27635bf3..828816695 100644 --- a/docs/migrations/rest.md +++ b/docs/migrations/rest.md @@ -14,12 +14,15 @@ Some important information concerning all legacy REST endpoints: ## Breaking Changes in Legacy REST Endpoints -| Legacy REST Endpoint | Description | Breaking Change | -| ------------------------- | ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `POST /txs` | Query tx by hash | Endpoint will error when trying to broadcast transactions that don't support Amino serialization (e.g. IBC txs)1. | -| `GET /txs/{hash}` | Query tx by hash | Endpoint will error when trying to output transactions that don't support Amino serialization (e.g. IBC txs)1. | -| `GET /txs` | Query tx by events | Endpoint will error when trying to output transactions that don't support Amino serialization (e.g. IBC txs)1. | -| `GET /staking/validators` | Get all validators | BondStatus is now a protobuf enum instead of an int32, and JSON serialized using its protobuf name, so expect query parameters like `?status=BOND_STATUS_{BONDED,UNBONDED,UNBONDING}` as opposed to `?status={bonded,unbonded,unbonding}`. | +| Legacy REST Endpoint | Description | Breaking Change | +| ------------------------------------------------------------------------ | ------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `POST /txs` | Broadcast tx | Endpoint will error when trying to broadcast transactions that don't support Amino serialization (e.g. IBC txs)1. | +| `POST /txs/encode`, `POST /txs/decode` | Encode/decode Amino txs from JSON to binary | Endpoint will error when trying to encode/decode transactions that don't support Amino serialization (e.g. IBC txs)1. | +| `GET /txs/{hash}` | Query tx by hash | Endpoint will error when trying to output transactions that don't support Amino serialization (e.g. IBC txs)1. | +| `GET /txs` | Query tx by events | Endpoint will error when trying to output transactions that don't support Amino serialization (e.g. IBC txs)1. | +| `GET /gov/proposals/{id}/votes`, `GET /gov/proposals/{id}/votes/{voter}` | Gov endpoints for querying votes | All gov endpoints which return votes return int32 in the `option` field instead of string: `1=VOTE_OPTION_YES, 2=VOTE_OPTION_ABSTAIN, 3=VOTE_OPTION_NO, 4=VOTE_OPTION_NO_WITH_VETO`. | +| `GET /staking/*` | Staking query endpoints | All staking endpoints which return validators have two breaking changes. First, the validator's `consensus_pubkey` field returns an Amino-encoded struct representing an `Any` instead of a bech32-encoded string representing the pubkey. The `value` field of the `Any` is the pubkey's raw key as base64-encoded bytes. Second, the validator's `status` field now returns an int32 instead of string: `1=BOND_STATUS_UNBONDED`, `2=BOND_STATUS_UNBONDING`, `3=BOND_STATUS_BONDED`. | +| `GET /staking/validators` | Get all validators | BondStatus is now a protobuf enum instead of an int32, and JSON serialized using its protobuf name, so expect query parameters like `?status=BOND_STATUS_{BONDED,UNBONDED,UNBONDING}` as opposed to `?status={bonded,unbonded,unbonding}`. | 1: Transactions that don't support Amino serialization are the ones that contain one or more `Msg`s that are not registered with the Amino codec. Currently in the SDK, only IBC `Msg`s fall into this case. diff --git a/x/auth/client/rest/broadcast.go b/x/auth/client/rest/broadcast.go index e0020515d..4fd4dcb1a 100644 --- a/x/auth/client/rest/broadcast.go +++ b/x/auth/client/rest/broadcast.go @@ -1,9 +1,11 @@ package rest import ( + "fmt" "io/ioutil" "net/http" + clientrest "github.com/cosmos/cosmos-sdk/client/rest" "github.com/cosmos/cosmos-sdk/client/tx" "github.com/cosmos/cosmos-sdk/client" @@ -30,8 +32,15 @@ func BroadcastTxRequest(clientCtx client.Context) http.HandlerFunc { } // NOTE: amino is used intentionally here, don't migrate it! - if err := clientCtx.LegacyAmino.UnmarshalJSON(body, &req); rest.CheckBadRequestError(w, err) { - return + err = clientCtx.LegacyAmino.UnmarshalJSON(body, &req) + if err != nil { + err := fmt.Errorf("this transaction cannot be broadcasted via legacy REST endpoints, because it does not support"+ + " Amino serialization. Please either use CLI, gRPC, gRPC-gateway, or directly query the Tendermint RPC"+ + " endpoint to broadcast this transaction. The new REST endpoint (via gRPC-gateway) is POST /cosmos/tx/v1beta1/txs."+ + " Please also see the REST endpoints migration guide at %s for more info", clientrest.DeprecationURL) + if rest.CheckBadRequestError(w, err) { + return + } } txBytes, err := tx.ConvertAndEncodeStdTx(clientCtx.TxConfig, req.Tx) diff --git a/x/auth/client/rest/decode.go b/x/auth/client/rest/decode.go index 3061d71cf..5b732fa0a 100644 --- a/x/auth/client/rest/decode.go +++ b/x/auth/client/rest/decode.go @@ -55,7 +55,7 @@ func DecodeTxRequestHandlerFn(clientCtx client.Context) http.HandlerFunc { response := DecodeResp(stdTx) - err = checkSignModeError(clientCtx, response, "/cosmos/tx/v1beta1/txs/decode") + err = checkAminoMarshalError(clientCtx, response, "/cosmos/tx/v1beta1/txs/decode") if err != nil { rest.WriteErrorResponse(w, http.StatusInternalServerError, err.Error()) diff --git a/x/auth/client/rest/encode.go b/x/auth/client/rest/encode.go index 1fbc3b94f..638817801 100644 --- a/x/auth/client/rest/encode.go +++ b/x/auth/client/rest/encode.go @@ -2,12 +2,13 @@ package rest import ( "encoding/base64" + "fmt" "io/ioutil" "net/http" - "github.com/cosmos/cosmos-sdk/client/tx" - "github.com/cosmos/cosmos-sdk/client" + clientrest "github.com/cosmos/cosmos-sdk/client/rest" + "github.com/cosmos/cosmos-sdk/client/tx" "github.com/cosmos/cosmos-sdk/types/rest" "github.com/cosmos/cosmos-sdk/x/auth/legacy/legacytx" ) @@ -17,6 +18,12 @@ type EncodeResp struct { Tx string `json:"tx" yaml:"tx"` } +// ErrEncodeDecode is the error to show when encoding/decoding txs that are not +// amino-serializable (e.g. IBC txs). +var ErrEncodeDecode error = fmt.Errorf("this endpoint does not support txs that are not serializable"+ + " via Amino, such as txs that contain IBC `Msg`s. For more info, please refer to our"+ + " REST migration guide at %s", clientrest.DeprecationURL) + // EncodeTxRequestHandlerFn returns the encode tx REST handler. In particular, // it takes a json-formatted transaction, encodes it to the Amino wire protocol, // and responds with base64-encoded bytes. @@ -31,8 +38,12 @@ func EncodeTxRequestHandlerFn(clientCtx client.Context) http.HandlerFunc { // NOTE: amino is used intentionally here, don't migrate it err = clientCtx.LegacyAmino.UnmarshalJSON(body, &req) - if rest.CheckBadRequestError(w, err) { - return + // If there's an unmarshalling error, we assume that it's because we're + // using amino to unmarshal a non-amino tx. + if err != nil { + if rest.CheckBadRequestError(w, ErrEncodeDecode) { + return + } } // re-encode it in the chain's native binary format diff --git a/x/auth/client/rest/query.go b/x/auth/client/rest/query.go index 6fe54404b..d11d4b341 100644 --- a/x/auth/client/rest/query.go +++ b/x/auth/client/rest/query.go @@ -108,7 +108,7 @@ func QueryTxsRequestHandlerFn(clientCtx client.Context) http.HandlerFunc { packStdTxResponse(w, clientCtx, txRes) } - err = checkSignModeError(clientCtx, searchResult, "/cosmos/tx/v1beta1/txs") + err = checkAminoMarshalError(clientCtx, searchResult, "/cosmos/tx/v1beta1/txs") if err != nil { rest.WriteErrorResponse(w, http.StatusInternalServerError, err.Error()) @@ -151,7 +151,7 @@ func QueryTxRequestHandlerFn(clientCtx client.Context) http.HandlerFunc { rest.WriteErrorResponse(w, http.StatusNotFound, fmt.Sprintf("no transaction found with hash %s", hashHexStr)) } - err = checkSignModeError(clientCtx, output, "/cosmos/tx/v1beta1/txs/{txhash}") + err = checkAminoMarshalError(clientCtx, output, "/cosmos/tx/v1beta1/txs/{txhash}") if err != nil { rest.WriteErrorResponse(w, http.StatusInternalServerError, err.Error()) @@ -198,9 +198,9 @@ func packStdTxResponse(w http.ResponseWriter, clientCtx client.Context, txRes *s return nil } -// checkSignModeError checks if there are errors with marshalling non-amino +// checkAminoMarshalError checks if there are errors with marshalling non-amino // txs with amino. -func checkSignModeError(ctx client.Context, resp interface{}, grpcEndPoint string) error { +func checkAminoMarshalError(ctx client.Context, resp interface{}, grpcEndPoint string) error { // LegacyAmino used intentionally here to handle the SignMode errors marshaler := ctx.LegacyAmino diff --git a/x/auth/client/rest/rest_test.go b/x/auth/client/rest/rest_test.go index 67c3cfc3f..fb9dd73d7 100644 --- a/x/auth/client/rest/rest_test.go +++ b/x/auth/client/rest/rest_test.go @@ -21,7 +21,7 @@ import ( "github.com/cosmos/cosmos-sdk/types/tx/signing" authclient "github.com/cosmos/cosmos-sdk/x/auth/client" authcli "github.com/cosmos/cosmos-sdk/x/auth/client/cli" - rest2 "github.com/cosmos/cosmos-sdk/x/auth/client/rest" + authrest "github.com/cosmos/cosmos-sdk/x/auth/client/rest" "github.com/cosmos/cosmos-sdk/x/auth/legacy/legacytx" bankcli "github.com/cosmos/cosmos-sdk/x/bank/client/testutil" "github.com/cosmos/cosmos-sdk/x/bank/types" @@ -72,7 +72,7 @@ func (s *IntegrationTestSuite) TearDownSuite() { s.network.Cleanup() } -func mkTx() legacytx.StdTx { +func mkStdTx() legacytx.StdTx { // NOTE: this uses StdTx explicitly, don't migrate it! return legacytx.StdTx{ Msgs: []sdk.Msg{&types.MsgSend{}}, @@ -84,10 +84,49 @@ func mkTx() legacytx.StdTx { } } +// Create an IBC tx that's encoded as amino-JSON. Since we can't amino-marshal +// a tx with "cosmos-sdk/MsgTransfer" using the SDK, we just hardcode the tx +// here. But external clients might, see https://github.com/cosmos/cosmos-sdk/issues/8022. +func mkIBCStdTx() []byte { + ibcTx := `{ + "account_number": "68", + "chain_id": "stargate-4", + "fee": { + "amount": [ + { + "amount": "3500", + "denom": "umuon" + } + ], + "gas": "350000" + }, + "memo": "", + "msg": [ + { + "type": "cosmos-sdk/MsgTransfer", + "value": { + "receiver": "cosmos1q9wtnlwdjrhwtcjmt2uq77jrgx7z3usrq2yz7z", + "sender": "cosmos1q9wtnlwdjrhwtcjmt2uq77jrgx7z3usrq2yz7z", + "source_channel": "THEslipperCHANNEL", + "source_port": "transfer", + "token": { + "amount": "1000000", + "denom": "umuon" + } + } + } + ], + "sequence": "24" + }` + req := fmt.Sprintf(`{"tx":%s,"mode":"async"}`, ibcTx) + + return []byte(req) +} + func (s *IntegrationTestSuite) TestEncodeDecode() { var require = s.Require() val := s.network.Validators[0] - stdTx := mkTx() + stdTx := mkStdTx() // NOTE: this uses amino explicitly, don't migrate it! cdc := val.ClientCtx.LegacyAmino @@ -98,11 +137,11 @@ func (s *IntegrationTestSuite) TestEncodeDecode() { res, err := rest.PostRequest(fmt.Sprintf("%s/txs/encode", val.APIAddress), "application/json", bz) require.NoError(err) - var encodeResp rest2.EncodeResp + var encodeResp authrest.EncodeResp err = cdc.UnmarshalJSON(res, &encodeResp) require.NoError(err) - bz, err = cdc.MarshalJSON(rest2.DecodeReq{Tx: encodeResp.Tx}) + bz, err = cdc.MarshalJSON(authrest.DecodeReq{Tx: encodeResp.Tx}) require.NoError(err) res, err = rest.PostRequest(fmt.Sprintf("%s/txs/decode", val.APIAddress), "application/json", bz) @@ -111,14 +150,24 @@ func (s *IntegrationTestSuite) TestEncodeDecode() { var respWithHeight rest.ResponseWithHeight err = cdc.UnmarshalJSON(res, &respWithHeight) require.NoError(err) - var decodeResp rest2.DecodeResp + var decodeResp authrest.DecodeResp err = cdc.UnmarshalJSON(respWithHeight.Result, &decodeResp) require.NoError(err) require.Equal(stdTx, legacytx.StdTx(decodeResp)) } +func (s *IntegrationTestSuite) TestEncodeIBCTx() { + val := s.network.Validators[0] + + req := mkIBCStdTx() + res, err := rest.PostRequest(fmt.Sprintf("%s/txs/encode", val.APIAddress), "application/json", []byte(req)) + s.Require().NoError(err) + + s.Require().Contains(string(res), authrest.ErrEncodeDecode.Error()) +} + func (s *IntegrationTestSuite) TestBroadcastTxRequest() { - stdTx := mkTx() + stdTx := mkStdTx() // we just test with async mode because this tx will fail - all we care about is that it got encoded and broadcast correctly res, err := s.broadcastReq(stdTx, "async") @@ -130,6 +179,17 @@ func (s *IntegrationTestSuite) TestBroadcastTxRequest() { s.Require().NotEmpty(txRes.TxHash) } +func (s *IntegrationTestSuite) TestBroadcastIBCTxRequest() { + val := s.network.Validators[0] + + req := mkIBCStdTx() + res, err := rest.PostRequest(fmt.Sprintf("%s/txs", val.APIAddress), "application/json", []byte(req)) + s.Require().NoError(err) + + // Make sure the error message is correct. + s.Require().Contains(string(res), "this transaction cannot be broadcasted via legacy REST endpoints") +} + // Helper function to test querying txs. We will use it to query StdTx and service `Msg`s. func (s *IntegrationTestSuite) testQueryTx(txHeight int64, txHash, txRecipient string) { val0 := s.network.Validators[0] @@ -332,7 +392,7 @@ func (s *IntegrationTestSuite) broadcastReq(stdTx legacytx.StdTx, mode string) ( // NOTE: this uses amino explicitly, don't migrate it! cdc := val.ClientCtx.LegacyAmino - req := rest2.BroadcastReq{ + req := authrest.BroadcastReq{ Tx: stdTx, Mode: mode, } @@ -401,7 +461,7 @@ func (s *IntegrationTestSuite) testQueryIBCTx(txRes sdk.TxResponse, cmd *cobra.C out, err = clitestutil.ExecTestCLICmd(val.ClientCtx, authcli.GetEncodeCommand(), []string{txFileName}) s.Require().NoError(err) - bz, err := val.ClientCtx.LegacyAmino.MarshalJSON(rest2.DecodeReq{Tx: string(out.Bytes())}) + bz, err := val.ClientCtx.LegacyAmino.MarshalJSON(authrest.DecodeReq{Tx: string(out.Bytes())}) s.Require().NoError(err) // try to decode the txn using legacy rest, it fails. diff --git a/x/gov/client/rest/grpc_query_test.go b/x/gov/client/rest/grpc_query_test.go index 23a282d5d..8e2d4efa9 100644 --- a/x/gov/client/rest/grpc_query_test.go +++ b/x/gov/client/rest/grpc_query_test.go @@ -150,7 +150,7 @@ func (s *IntegrationTestSuite) TestGetProposalsGRPC() { func (s *IntegrationTestSuite) TestGetProposalVoteGRPC() { val := s.network.Validators[0] - voterAddressBase64 := val.Address.String() + voterAddressBech32 := val.Address.String() testCases := []struct { name string @@ -159,12 +159,12 @@ func (s *IntegrationTestSuite) TestGetProposalVoteGRPC() { }{ { "empty proposal", - fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "", voterAddressBase64), + fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "", voterAddressBech32), true, }, { "get non existing proposal", - fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "10", voterAddressBase64), + fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "10", voterAddressBech32), true, }, { @@ -174,7 +174,7 @@ func (s *IntegrationTestSuite) TestGetProposalVoteGRPC() { }, { "get proposal with id", - fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "1", voterAddressBase64), + fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "1", voterAddressBech32), false, }, } diff --git a/x/gov/client/rest/rest_test.go b/x/gov/client/rest/rest_test.go new file mode 100644 index 000000000..0b1a27402 --- /dev/null +++ b/x/gov/client/rest/rest_test.go @@ -0,0 +1,110 @@ +// +build norace + +package rest_test + +import ( + "fmt" + + "github.com/cosmos/cosmos-sdk/types/rest" + "github.com/cosmos/cosmos-sdk/x/gov/types" +) + +func (s *IntegrationTestSuite) TestLegacyGetVote() { + val := s.network.Validators[0] + voterAddressBech32 := val.Address.String() + + testCases := []struct { + name string + url string + expErr bool + expErrMsg string + }{ + { + "get non existing proposal", + fmt.Sprintf("%s/gov/proposals/%s/votes/%s", val.APIAddress, "10", voterAddressBech32), + true, "proposalID 10 does not exist", + }, + { + "get proposal with wrong voter address", + fmt.Sprintf("%s/gov/proposals/%s/votes/%s", val.APIAddress, "1", "wrongVoterAddress"), + true, "decoding bech32 failed: string not all lowercase or all uppercase", + }, + { + "get proposal with id", + fmt.Sprintf("%s/gov/proposals/%s/votes/%s", val.APIAddress, "1", voterAddressBech32), + false, "", + }, + } + + for _, tc := range testCases { + tc := tc + s.Run(tc.name, func() { + respJSON, err := rest.GetRequest(tc.url) + s.Require().NoError(err) + + if tc.expErr { + var errResp rest.ErrorResponse + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &errResp)) + + s.Require().Equal(errResp.Error, tc.expErrMsg) + } else { + var resp = rest.ResponseWithHeight{} + err = val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &resp) + s.Require().NoError(err) + + // Check result is not empty. + var vote types.Vote + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(resp.Result, &vote)) + s.Require().Equal(val.Address.String(), vote.Voter) + // Note that option is now an int. + s.Require().Equal(types.VoteOption(1), vote.Option) + } + }) + } +} + +func (s *IntegrationTestSuite) TestLegacyGetVotes() { + val := s.network.Validators[0] + + testCases := []struct { + name string + url string + expErr bool + expErrMsg string + }{ + { + "votes with empty proposal id", + fmt.Sprintf("%s/gov/proposals/%s/votes", val.APIAddress, ""), + true, "'votes' is not a valid uint64", + }, + { + "get votes with valid id", + fmt.Sprintf("%s/gov/proposals/%s/votes", val.APIAddress, "1"), + false, "", + }, + } + + for _, tc := range testCases { + tc := tc + s.Run(tc.name, func() { + respJSON, err := rest.GetRequest(tc.url) + s.Require().NoError(err) + + if tc.expErr { + var errResp rest.ErrorResponse + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &errResp)) + + s.Require().Equal(errResp.Error, tc.expErrMsg) + } else { + var resp = rest.ResponseWithHeight{} + err = val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &resp) + s.Require().NoError(err) + + // Check result is not empty. + var votes []types.Vote + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(resp.Result, &votes)) + s.Require().Greater(len(votes), 0) + } + }) + } +} diff --git a/x/staking/client/rest/query.go b/x/staking/client/rest/query.go index cbe0813c3..d515d5dfe 100644 --- a/x/staking/client/rest/query.go +++ b/x/staking/client/rest/query.go @@ -9,6 +9,7 @@ import ( "github.com/gorilla/mux" "github.com/cosmos/cosmos-sdk/client" + clientrest "github.com/cosmos/cosmos-sdk/client/rest" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/rest" "github.com/cosmos/cosmos-sdk/x/staking/types" @@ -278,6 +279,19 @@ func validatorsHandlerFn(clientCtx client.Context) http.HandlerFunc { } status := r.FormValue("status") + // These are query params that were available in =<0.39. We show a nice + // error message for this breaking change. + if status == "bonded" || status == "unbonding" || status == "unbonded" { + err := fmt.Errorf("cosmos sdk v0.40 introduces a breaking change on this endpoint:"+ + " instead of querying using `?status=%s`, please use `status=BOND_STATUS_%s`. For more"+ + " info, please see our REST endpoint migration guide at %s", status, strings.ToUpper(status), clientrest.DeprecationURL) + + if rest.CheckBadRequestError(w, err) { + return + } + + } + if status == "" { status = types.BondStatusBonded } diff --git a/x/staking/client/rest/rest_test.go b/x/staking/client/rest/rest_test.go new file mode 100644 index 000000000..43b9afdb6 --- /dev/null +++ b/x/staking/client/rest/rest_test.go @@ -0,0 +1,62 @@ +// +build norace + +package rest_test + +import ( + "fmt" + + "github.com/cosmos/cosmos-sdk/types/rest" + "github.com/cosmos/cosmos-sdk/x/staking/types" +) + +func (s *IntegrationTestSuite) TestLegacyGetValidators() { + val := s.network.Validators[0] + baseURL := val.APIAddress + + testCases := []struct { + name string + url string + expErr bool + expErrMsg string + }{ + { + "old status should show error message", + fmt.Sprintf("%s/staking/validators?status=bonded", baseURL), + true, "cosmos sdk v0.40 introduces a breaking change on this endpoint: instead of" + + " querying using `?status=bonded`, please use `status=BOND_STATUS_BONDED`. For more" + + " info, please see our REST endpoint migration guide at https://docs.cosmos.network/master/migrations/rest.html", + }, + { + "new status should work", + fmt.Sprintf("%s/staking/validators?status=BOND_STATUS_BONDED", baseURL), + false, "", + }, + } + + for _, tc := range testCases { + tc := tc + s.Run(tc.name, func() { + respJSON, err := rest.GetRequest(tc.url) + s.Require().NoError(err) + + if tc.expErr { + var errResp rest.ErrorResponse + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &errResp)) + + s.Require().Equal(errResp.Error, tc.expErrMsg) + } else { + var resp = rest.ResponseWithHeight{} + err = val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &resp) + s.Require().NoError(err) + + // Check result is not empty. + var validators []types.Validator + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(resp.Result, &validators)) + s.Require().Greater(len(validators), 0) + // While we're at it, also check that the consensus_pubkey is + // an Any, and not bech32 anymore. + s.Require().Contains(string(resp.Result), "\"consensus_pubkey\": {\n \"type\": \"tendermint/PubKeyEd25519\",") + } + }) + } +} From 7ec3bcd23a7a62b578796669bdec0b3d29036c76 Mon Sep 17 00:00:00 2001 From: Frojdi Dymylja <33157909+fdymylja@users.noreply.github.com> Date: Fri, 4 Dec 2020 13:19:29 +0100 Subject: [PATCH 31/40] fix: bank gRPC error status codes (#7814) Co-authored-by: SaReN Co-authored-by: Alessio Treglia --- x/bank/keeper/grpc_query.go | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/x/bank/keeper/grpc_query.go b/x/bank/keeper/grpc_query.go index bb0a5b176..a71d0d72c 100644 --- a/x/bank/keeper/grpc_query.go +++ b/x/bank/keeper/grpc_query.go @@ -31,7 +31,7 @@ func (k BaseKeeper) Balance(ctx context.Context, req *types.QueryBalanceRequest) sdkCtx := sdk.UnwrapSDKContext(ctx) address, err := sdk.AccAddressFromBech32(req.Address) if err != nil { - return nil, err + return nil, status.Errorf(codes.InvalidArgument, "invalid address: %s", err.Error()) } balance := k.GetBalance(sdkCtx, address, req.Denom) @@ -45,9 +45,13 @@ func (k BaseKeeper) AllBalances(ctx context.Context, req *types.QueryAllBalances return nil, status.Error(codes.InvalidArgument, "empty request") } + if req.Address == "" { + return nil, status.Error(codes.InvalidArgument, "address cannot be empty") + } + addr, err := sdk.AccAddressFromBech32(req.Address) if err != nil { - return nil, err + return nil, status.Errorf(codes.InvalidArgument, "invalid address: %s", err.Error()) } sdkCtx := sdk.UnwrapSDKContext(ctx) From 4a233b8dcf734d5bd8ca4a6938320f2ed876aaae Mon Sep 17 00:00:00 2001 From: Anil Kumar Kammari Date: Fri, 4 Dec 2020 20:36:50 +0530 Subject: [PATCH 32/40] Rename GRPCRouter (#8079) * rename GRPCRouter * Update store/types/store.go Co-authored-by: Robert Zaremba * Fix gofmt Co-authored-by: Robert Zaremba --- baseapp/grpcrouter_test.go | 2 +- server/api/server.go | 10 ++++---- server/util_test.go | 3 ++- simapp/app.go | 6 ++--- store/types/store.go | 2 +- types/events.go | 3 ++- types/tx_msg.go | 3 ++- x/bank/client/rest/rest.go | 3 ++- x/distribution/keeper/msg_server.go | 1 + x/evidence/client/cli/cli_test.go | 2 +- x/gov/keeper/common_test.go | 3 ++- x/gov/keeper/msg_server.go | 1 + .../core/02-client/keeper/grpc_query_test.go | 10 ++++---- x/ibc/core/02-client/types/keys_test.go | 3 ++- x/ibc/core/02-client/types/params_test.go | 3 ++- .../03-connection/keeper/grpc_query_test.go | 8 +++---- x/ibc/core/03-connection/types/keys_test.go | 3 ++- .../core/04-channel/keeper/grpc_query_test.go | 24 +++++++++---------- x/ibc/core/04-channel/types/keys_test.go | 3 ++- x/ibc/core/23-commitment/types/utils.go | 3 ++- x/ibc/core/23-commitment/types/utils_test.go | 3 ++- x/ibc/core/24-host/parse_test.go | 5 ++-- x/ibc/core/keeper/msg_server.go | 1 + x/slashing/client/rest/rest.go | 3 ++- x/staking/client/rest/rest.go | 3 ++- x/staking/handler_test.go | 3 ++- x/staking/keeper/grpc_query_test.go | 3 ++- x/staking/legacy/v036/migrate_test.go | 3 ++- x/staking/types/historical_info_test.go | 3 ++- x/staking/types/params_test.go | 3 ++- x/upgrade/client/rest/rest.go | 3 ++- 31 files changed, 76 insertions(+), 53 deletions(-) diff --git a/baseapp/grpcrouter_test.go b/baseapp/grpcrouter_test.go index 6a83939b4..64b2a97b9 100644 --- a/baseapp/grpcrouter_test.go +++ b/baseapp/grpcrouter_test.go @@ -16,7 +16,7 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" ) -func TestGRPCRouter(t *testing.T) { +func TestGRPCGatewayRouter(t *testing.T) { qr := baseapp.NewGRPCQueryRouter() interfaceRegistry := testdata.NewTestInterfaceRegistry() qr.SetInterfaceRegistry(interfaceRegistry) diff --git a/server/api/server.go b/server/api/server.go index b73b95556..c2987f860 100644 --- a/server/api/server.go +++ b/server/api/server.go @@ -26,9 +26,9 @@ import ( // Server defines the server's API interface. type Server struct { - Router *mux.Router - GRPCRouter *runtime.ServeMux - ClientCtx client.Context + Router *mux.Router + GRPCGatewayRouter *runtime.ServeMux + ClientCtx client.Context logger log.Logger metrics *telemetry.Metrics @@ -63,7 +63,7 @@ func New(clientCtx client.Context, logger log.Logger) *Server { Router: mux.NewRouter(), ClientCtx: clientCtx, logger: logger, - GRPCRouter: runtime.NewServeMux( + GRPCGatewayRouter: runtime.NewServeMux( // Custom marshaler option is required for gogo proto runtime.WithMarshalerOption(runtime.MIMEWildcard, marshalerOption), @@ -124,7 +124,7 @@ func (s *Server) Close() error { } func (s *Server) registerGRPCGatewayRoutes() { - s.Router.PathPrefix("/").Handler(s.GRPCRouter) + s.Router.PathPrefix("/").Handler(s.GRPCGatewayRouter) } func (s *Server) registerMetrics() { diff --git a/server/util_test.go b/server/util_test.go index 582fb4c08..a0ff6479b 100644 --- a/server/util_test.go +++ b/server/util_test.go @@ -10,8 +10,9 @@ import ( "strings" "testing" - "github.com/cosmos/cosmos-sdk/client/flags" "github.com/spf13/cobra" + + "github.com/cosmos/cosmos-sdk/client/flags" ) var CancelledInPreRun = errors.New("Canelled in prerun") diff --git a/simapp/app.go b/simapp/app.go index f8f5b40b4..02aaa2297 100644 --- a/simapp/app.go +++ b/simapp/app.go @@ -560,13 +560,13 @@ func (app *SimApp) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APICon // Register legacy tx routes. authrest.RegisterTxRoutes(clientCtx, apiSvr.Router) // Register new tx routes from grpc-gateway. - authtx.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCRouter) + authtx.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) // Register new tendermint queries routes from grpc-gateway. - tmservice.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCRouter) + tmservice.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) // Register legacy and grpc-gateway routes for all modules. ModuleBasics.RegisterRESTRoutes(clientCtx, apiSvr.Router) - ModuleBasics.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCRouter) + ModuleBasics.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) // register swagger API from root so that other applications can override easily if apiConfig.Swagger { diff --git a/store/types/store.go b/store/types/store.go index f78fcad3f..68b231dfc 100644 --- a/store/types/store.go +++ b/store/types/store.go @@ -5,11 +5,11 @@ import ( "io" abci "github.com/tendermint/tendermint/abci/types" + tmstrings "github.com/tendermint/tendermint/libs/strings" dbm "github.com/tendermint/tm-db" snapshottypes "github.com/cosmos/cosmos-sdk/snapshots/types" "github.com/cosmos/cosmos-sdk/types/kv" - tmstrings "github.com/tendermint/tendermint/libs/strings" ) type Store interface { diff --git a/types/events.go b/types/events.go index 44d48d9cd..5a2bf3af4 100644 --- a/types/events.go +++ b/types/events.go @@ -7,10 +7,11 @@ import ( "sort" "strings" - "github.com/cosmos/cosmos-sdk/codec" "github.com/gogo/protobuf/jsonpb" proto "github.com/gogo/protobuf/proto" abci "github.com/tendermint/tendermint/abci/types" + + "github.com/cosmos/cosmos-sdk/codec" ) // ---------------------------------------------------------------------------- diff --git a/types/tx_msg.go b/types/tx_msg.go index ddc07fa16..b8a602d88 100644 --- a/types/tx_msg.go +++ b/types/tx_msg.go @@ -1,8 +1,9 @@ package types import ( - cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/gogo/protobuf/proto" + + cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" ) type ( diff --git a/x/bank/client/rest/rest.go b/x/bank/client/rest/rest.go index 4325386d4..db4909c53 100644 --- a/x/bank/client/rest/rest.go +++ b/x/bank/client/rest/rest.go @@ -1,9 +1,10 @@ package rest import ( - "github.com/cosmos/cosmos-sdk/client/rest" "github.com/gorilla/mux" + "github.com/cosmos/cosmos-sdk/client/rest" + "github.com/cosmos/cosmos-sdk/client" ) diff --git a/x/distribution/keeper/msg_server.go b/x/distribution/keeper/msg_server.go index a6bbe9ea0..4cad3c948 100644 --- a/x/distribution/keeper/msg_server.go +++ b/x/distribution/keeper/msg_server.go @@ -4,6 +4,7 @@ import ( "context" "github.com/armon/go-metrics" + "github.com/cosmos/cosmos-sdk/telemetry" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/x/distribution/types" diff --git a/x/evidence/client/cli/cli_test.go b/x/evidence/client/cli/cli_test.go index 50ebfaa7a..84b3697b4 100644 --- a/x/evidence/client/cli/cli_test.go +++ b/x/evidence/client/cli/cli_test.go @@ -48,7 +48,7 @@ func (s *IntegrationTestSuite) TestGetQueryCmd() { expectedOutput string expectErr bool }{ - "non-existant evidence": { + "non-existent evidence": { []string{"DF0C23E8634E480F84B9D5674A7CDC9816466DEC28A3358F73260F68D28D7660"}, "evidence DF0C23E8634E480F84B9D5674A7CDC9816466DEC28A3358F73260F68D28D7660 not found", true, diff --git a/x/gov/keeper/common_test.go b/x/gov/keeper/common_test.go index 60eb8be59..7ba9c5827 100644 --- a/x/gov/keeper/common_test.go +++ b/x/gov/keeper/common_test.go @@ -3,13 +3,14 @@ package keeper_test import ( "testing" + "github.com/stretchr/testify/require" + "github.com/cosmos/cosmos-sdk/simapp" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/x/gov/types" "github.com/cosmos/cosmos-sdk/x/staking" stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" - "github.com/stretchr/testify/require" ) var ( diff --git a/x/gov/keeper/msg_server.go b/x/gov/keeper/msg_server.go index aee3d3c38..39ddf3ab4 100644 --- a/x/gov/keeper/msg_server.go +++ b/x/gov/keeper/msg_server.go @@ -6,6 +6,7 @@ import ( "strconv" "github.com/armon/go-metrics" + "github.com/cosmos/cosmos-sdk/telemetry" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/x/gov/types" diff --git a/x/ibc/core/02-client/keeper/grpc_query_test.go b/x/ibc/core/02-client/keeper/grpc_query_test.go index d8125198e..e1e2c9d8b 100644 --- a/x/ibc/core/02-client/keeper/grpc_query_test.go +++ b/x/ibc/core/02-client/keeper/grpc_query_test.go @@ -43,7 +43,7 @@ func (suite *KeeperTestSuite) TestQueryClientState() { { "success", func() { - clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, types.ZeroHeight(), commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) + clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, types.ZeroHeight(), commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) suite.keeper.SetClientState(suite.ctx, testClientID, clientState) var err error @@ -183,10 +183,10 @@ func (suite *KeeperTestSuite) TestQueryConsensusState() { "invalid height", func() { req = &types.QueryConsensusStateRequest{ - ClientId: testClientID, + ClientId: testClientID, RevisionNumber: 0, RevisionHeight: 0, - LatestHeight: false, + LatestHeight: false, } }, false, @@ -204,7 +204,7 @@ func (suite *KeeperTestSuite) TestQueryConsensusState() { { "success latest height", func() { - clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) + clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) cs := ibctmtypes.NewConsensusState( suite.consensusState.Timestamp, commitmenttypes.NewMerkleRoot([]byte("hash1")), nil, ) @@ -235,7 +235,7 @@ func (suite *KeeperTestSuite) TestQueryConsensusState() { suite.Require().NoError(err) req = &types.QueryConsensusStateRequest{ - ClientId: testClientID, + ClientId: testClientID, RevisionNumber: 0, RevisionHeight: height, } diff --git a/x/ibc/core/02-client/types/keys_test.go b/x/ibc/core/02-client/types/keys_test.go index dbe56657a..493814523 100644 --- a/x/ibc/core/02-client/types/keys_test.go +++ b/x/ibc/core/02-client/types/keys_test.go @@ -4,8 +4,9 @@ import ( "math" "testing" - "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/types" "github.com/stretchr/testify/require" + + "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/types" ) // tests ParseClientIdentifier and IsValidClientID diff --git a/x/ibc/core/02-client/types/params_test.go b/x/ibc/core/02-client/types/params_test.go index 9484e48b4..dac80a4b4 100644 --- a/x/ibc/core/02-client/types/params_test.go +++ b/x/ibc/core/02-client/types/params_test.go @@ -3,8 +3,9 @@ package types import ( "testing" - "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" "github.com/stretchr/testify/require" + + "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" ) func TestValidateParams(t *testing.T) { diff --git a/x/ibc/core/03-connection/keeper/grpc_query_test.go b/x/ibc/core/03-connection/keeper/grpc_query_test.go index 553033810..6a3497667 100644 --- a/x/ibc/core/03-connection/keeper/grpc_query_test.go +++ b/x/ibc/core/03-connection/keeper/grpc_query_test.go @@ -338,7 +338,7 @@ func (suite *KeeperTestSuite) TestQueryConnectionConsensusState() { "invalid connection ID", func() { req = &types.QueryConnectionConsensusStateRequest{ - ConnectionId: "", + ConnectionId: "", RevisionNumber: 0, RevisionHeight: 1, } @@ -349,7 +349,7 @@ func (suite *KeeperTestSuite) TestQueryConnectionConsensusState() { "connection not found", func() { req = &types.QueryConnectionConsensusStateRequest{ - ConnectionId: "test-connection-id", + ConnectionId: "test-connection-id", RevisionNumber: 0, RevisionHeight: 1, } @@ -362,7 +362,7 @@ func (suite *KeeperTestSuite) TestQueryConnectionConsensusState() { _, _, connA, _, _, _ := suite.coordinator.Setup(suite.chainA, suite.chainB, channeltypes.UNORDERED) req = &types.QueryConnectionConsensusStateRequest{ - ConnectionId: connA.ID, + ConnectionId: connA.ID, RevisionNumber: 0, RevisionHeight: uint64(suite.chainA.GetContext().BlockHeight()), // use current height } @@ -379,7 +379,7 @@ func (suite *KeeperTestSuite) TestQueryConnectionConsensusState() { expClientID = clientA req = &types.QueryConnectionConsensusStateRequest{ - ConnectionId: connA.ID, + ConnectionId: connA.ID, RevisionNumber: clientState.GetLatestHeight().GetRevisionNumber(), RevisionHeight: clientState.GetLatestHeight().GetRevisionHeight(), } diff --git a/x/ibc/core/03-connection/types/keys_test.go b/x/ibc/core/03-connection/types/keys_test.go index c899dc3c5..6adb8090f 100644 --- a/x/ibc/core/03-connection/types/keys_test.go +++ b/x/ibc/core/03-connection/types/keys_test.go @@ -4,8 +4,9 @@ import ( "math" "testing" - "github.com/cosmos/cosmos-sdk/x/ibc/core/03-connection/types" "github.com/stretchr/testify/require" + + "github.com/cosmos/cosmos-sdk/x/ibc/core/03-connection/types" ) // tests ParseConnectionSequence and IsValidConnectionID diff --git a/x/ibc/core/04-channel/keeper/grpc_query_test.go b/x/ibc/core/04-channel/keeper/grpc_query_test.go index cd894520e..94f07bc06 100644 --- a/x/ibc/core/04-channel/keeper/grpc_query_test.go +++ b/x/ibc/core/04-channel/keeper/grpc_query_test.go @@ -437,8 +437,8 @@ func (suite *KeeperTestSuite) TestQueryChannelConsensusState() { "invalid port ID", func() { req = &types.QueryChannelConsensusStateRequest{ - PortId: "", - ChannelId: "test-channel-id", + PortId: "", + ChannelId: "test-channel-id", RevisionNumber: 0, RevisionHeight: 1, } @@ -449,8 +449,8 @@ func (suite *KeeperTestSuite) TestQueryChannelConsensusState() { "invalid channel ID", func() { req = &types.QueryChannelConsensusStateRequest{ - PortId: "test-port-id", - ChannelId: "", + PortId: "test-port-id", + ChannelId: "", RevisionNumber: 0, RevisionHeight: 1, } @@ -461,8 +461,8 @@ func (suite *KeeperTestSuite) TestQueryChannelConsensusState() { "channel not found", func() { req = &types.QueryChannelConsensusStateRequest{ - PortId: "test-port-id", - ChannelId: "test-channel-id", + PortId: "test-port-id", + ChannelId: "test-channel-id", RevisionNumber: 0, RevisionHeight: 1, } @@ -482,8 +482,8 @@ func (suite *KeeperTestSuite) TestQueryChannelConsensusState() { suite.chainA.App.IBCKeeper.ChannelKeeper.SetChannel(suite.chainA.GetContext(), channelA.PortID, channelA.ID, channel) req = &types.QueryChannelConsensusStateRequest{ - PortId: channelA.PortID, - ChannelId: channelA.ID, + PortId: channelA.PortID, + ChannelId: channelA.ID, RevisionNumber: 0, RevisionHeight: 1, } @@ -495,8 +495,8 @@ func (suite *KeeperTestSuite) TestQueryChannelConsensusState() { _, _, _, _, channelA, _ := suite.coordinator.Setup(suite.chainA, suite.chainB, types.UNORDERED) req = &types.QueryChannelConsensusStateRequest{ - PortId: channelA.PortID, - ChannelId: channelA.ID, + PortId: channelA.PortID, + ChannelId: channelA.ID, RevisionNumber: 0, RevisionHeight: uint64(suite.chainA.GetContext().BlockHeight()), // use current height } @@ -516,8 +516,8 @@ func (suite *KeeperTestSuite) TestQueryChannelConsensusState() { expClientID = clientA req = &types.QueryChannelConsensusStateRequest{ - PortId: channelA.PortID, - ChannelId: channelA.ID, + PortId: channelA.PortID, + ChannelId: channelA.ID, RevisionNumber: clientState.GetLatestHeight().GetRevisionNumber(), RevisionHeight: clientState.GetLatestHeight().GetRevisionHeight(), } diff --git a/x/ibc/core/04-channel/types/keys_test.go b/x/ibc/core/04-channel/types/keys_test.go index 86e4e61aa..9bc6500b9 100644 --- a/x/ibc/core/04-channel/types/keys_test.go +++ b/x/ibc/core/04-channel/types/keys_test.go @@ -3,8 +3,9 @@ package types_test import ( "testing" - "github.com/cosmos/cosmos-sdk/x/ibc/core/04-channel/types" "github.com/stretchr/testify/require" + + "github.com/cosmos/cosmos-sdk/x/ibc/core/04-channel/types" ) // tests ParseChannelSequence and IsValidChannelID diff --git a/x/ibc/core/23-commitment/types/utils.go b/x/ibc/core/23-commitment/types/utils.go index 7d2937f0f..e662f7726 100644 --- a/x/ibc/core/23-commitment/types/utils.go +++ b/x/ibc/core/23-commitment/types/utils.go @@ -2,8 +2,9 @@ package types import ( ics23 "github.com/confio/ics23/go" - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" crypto "github.com/tendermint/tendermint/proto/tendermint/crypto" + + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) // ConvertProofs converts crypto.ProofOps into MerkleProof diff --git a/x/ibc/core/23-commitment/types/utils_test.go b/x/ibc/core/23-commitment/types/utils_test.go index a4c08d13f..f852fb6c2 100644 --- a/x/ibc/core/23-commitment/types/utils_test.go +++ b/x/ibc/core/23-commitment/types/utils_test.go @@ -3,10 +3,11 @@ package types_test import ( "fmt" - "github.com/cosmos/cosmos-sdk/x/ibc/core/23-commitment/types" "github.com/stretchr/testify/require" abci "github.com/tendermint/tendermint/abci/types" crypto "github.com/tendermint/tendermint/proto/tendermint/crypto" + + "github.com/cosmos/cosmos-sdk/x/ibc/core/23-commitment/types" ) func (suite *MerkleTestSuite) TestConvertProofs() { diff --git a/x/ibc/core/24-host/parse_test.go b/x/ibc/core/24-host/parse_test.go index cbee37ddb..9f74bf5f6 100644 --- a/x/ibc/core/24-host/parse_test.go +++ b/x/ibc/core/24-host/parse_test.go @@ -4,9 +4,10 @@ import ( "math" "testing" + "github.com/stretchr/testify/require" + connectiontypes "github.com/cosmos/cosmos-sdk/x/ibc/core/03-connection/types" host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" - "github.com/stretchr/testify/require" ) func TestParseIdentifier(t *testing.T) { @@ -23,7 +24,7 @@ func TestParseIdentifier(t *testing.T) { // one above uint64 max {"invalid uint64", "connection-18446744073709551616", "connection-", 0, false}, // uint64 == 20 characters - {"invalid large sequence", "connection-2345682193567182931243", "conenction-", 0, false}, + {"invalid large sequence", "connection-2345682193567182931243", "connection-", 0, false}, {"capital prefix", "Connection-0", "connection-", 0, false}, {"double prefix", "connection-connection-0", "connection-", 0, false}, {"doesn't have prefix", "connection-0", "prefix", 0, false}, diff --git a/x/ibc/core/keeper/msg_server.go b/x/ibc/core/keeper/msg_server.go index 78f822e4d..8815a7e45 100644 --- a/x/ibc/core/keeper/msg_server.go +++ b/x/ibc/core/keeper/msg_server.go @@ -4,6 +4,7 @@ import ( "context" "github.com/armon/go-metrics" + "github.com/cosmos/cosmos-sdk/telemetry" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" diff --git a/x/slashing/client/rest/rest.go b/x/slashing/client/rest/rest.go index 21d73a5e8..a3a885459 100644 --- a/x/slashing/client/rest/rest.go +++ b/x/slashing/client/rest/rest.go @@ -1,9 +1,10 @@ package rest import ( - "github.com/cosmos/cosmos-sdk/client/rest" "github.com/gorilla/mux" + "github.com/cosmos/cosmos-sdk/client/rest" + "github.com/cosmos/cosmos-sdk/client" ) diff --git a/x/staking/client/rest/rest.go b/x/staking/client/rest/rest.go index bb4e82917..35bb8da68 100644 --- a/x/staking/client/rest/rest.go +++ b/x/staking/client/rest/rest.go @@ -1,9 +1,10 @@ package rest import ( - "github.com/cosmos/cosmos-sdk/client/rest" "github.com/gorilla/mux" + "github.com/cosmos/cosmos-sdk/client/rest" + "github.com/cosmos/cosmos-sdk/client" ) diff --git a/x/staking/handler_test.go b/x/staking/handler_test.go index 0f5a64105..4b48f0942 100644 --- a/x/staking/handler_test.go +++ b/x/staking/handler_test.go @@ -11,6 +11,8 @@ import ( tmproto "github.com/tendermint/tendermint/proto/tendermint/types" tmtypes "github.com/tendermint/tendermint/types" + "github.com/golang/protobuf/proto" + cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" "github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1" @@ -23,7 +25,6 @@ import ( "github.com/cosmos/cosmos-sdk/x/staking/keeper" "github.com/cosmos/cosmos-sdk/x/staking/teststaking" "github.com/cosmos/cosmos-sdk/x/staking/types" - "github.com/golang/protobuf/proto" ) func bootstrapHandlerGenesisTest(t *testing.T, power int64, numAddrs int, accAmount sdk.Int) (*simapp.SimApp, sdk.Context, []sdk.AccAddress, []sdk.ValAddress) { diff --git a/x/staking/keeper/grpc_query_test.go b/x/staking/keeper/grpc_query_test.go index cf3c9e3fe..acf6f9cb5 100644 --- a/x/staking/keeper/grpc_query_test.go +++ b/x/staking/keeper/grpc_query_test.go @@ -5,13 +5,14 @@ import ( "fmt" "testing" + "github.com/stretchr/testify/require" + "github.com/cosmos/cosmos-sdk/simapp" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/query" "github.com/cosmos/cosmos-sdk/x/staking/keeper" "github.com/cosmos/cosmos-sdk/x/staking/teststaking" "github.com/cosmos/cosmos-sdk/x/staking/types" - "github.com/stretchr/testify/require" ) func (suite *KeeperTestSuite) TestGRPCQueryValidators() { diff --git a/x/staking/legacy/v036/migrate_test.go b/x/staking/legacy/v036/migrate_test.go index 7278774bf..375404278 100644 --- a/x/staking/legacy/v036/migrate_test.go +++ b/x/staking/legacy/v036/migrate_test.go @@ -3,12 +3,13 @@ package v036_test import ( "testing" + "github.com/stretchr/testify/require" + "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" "github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1" v034staking "github.com/cosmos/cosmos-sdk/x/staking/legacy/v034" v036staking "github.com/cosmos/cosmos-sdk/x/staking/legacy/v036" - "github.com/stretchr/testify/require" ) func TestMigrate(t *testing.T) { diff --git a/x/staking/types/historical_info_test.go b/x/staking/types/historical_info_test.go index ca3e48951..d8a25fa92 100644 --- a/x/staking/types/historical_info_test.go +++ b/x/staking/types/historical_info_test.go @@ -5,9 +5,10 @@ import ( "sort" "testing" - "github.com/cosmos/cosmos-sdk/x/staking/types" "github.com/stretchr/testify/require" tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + + "github.com/cosmos/cosmos-sdk/x/staking/types" ) var header = tmproto.Header{ diff --git a/x/staking/types/params_test.go b/x/staking/types/params_test.go index 05a7ea956..6218091e0 100644 --- a/x/staking/types/params_test.go +++ b/x/staking/types/params_test.go @@ -3,8 +3,9 @@ package types_test import ( "testing" - "github.com/cosmos/cosmos-sdk/x/staking/types" "github.com/stretchr/testify/require" + + "github.com/cosmos/cosmos-sdk/x/staking/types" ) func TestParamsEqual(t *testing.T) { diff --git a/x/upgrade/client/rest/rest.go b/x/upgrade/client/rest/rest.go index 83577c0cb..3192083f8 100644 --- a/x/upgrade/client/rest/rest.go +++ b/x/upgrade/client/rest/rest.go @@ -1,9 +1,10 @@ package rest import ( - "github.com/cosmos/cosmos-sdk/client/rest" "github.com/gorilla/mux" + "github.com/cosmos/cosmos-sdk/client/rest" + "github.com/cosmos/cosmos-sdk/client" ) From c6b8e5f7093b9c0e126cfb9069a06f58c36bce29 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 4 Dec 2020 16:40:50 +0000 Subject: [PATCH 33/40] build(deps): bump vuepress-theme-cosmos from 1.0.176 to 1.0.177 in /docs (#8075) Bumps [vuepress-theme-cosmos](https://github.com/cosmos/vuepress-theme-cosmos) from 1.0.176 to 1.0.177. - [Release notes](https://github.com/cosmos/vuepress-theme-cosmos/releases) - [Commits](https://github.com/cosmos/vuepress-theme-cosmos/commits) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- docs/package-lock.json | 200 ++++++++++++++++++++--------------------- docs/package.json | 2 +- 2 files changed, 101 insertions(+), 101 deletions(-) diff --git a/docs/package-lock.json b/docs/package-lock.json index 935f8aedc..73740f8cd 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -5,118 +5,118 @@ "requires": true, "dependencies": { "@algolia/cache-browser-local-storage": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.8.2.tgz", - "integrity": "sha512-X2528jVZk+iPmsA4gF2AxH7RnREF10O98yV8QWwXcXcEYD7qjCsidPUGXcRsZCWOkCdZPA2IMJBiPDxZqfrQqA==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.8.3.tgz", + "integrity": "sha512-Cwc03hikHSUI+xvgUdN+H+f6jFyoDsC9fegzXzJ2nPn1YSN9EXzDMBnbrgl0sbl9iLGXe0EIGMYqR2giCv1wMQ==", "requires": { - "@algolia/cache-common": "4.8.2" + "@algolia/cache-common": "4.8.3" } }, "@algolia/cache-common": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.8.2.tgz", - "integrity": "sha512-ER3QxHH2vmatfO4rRv504ByAiqqoj6kg0RcoBEetQflxRcRznmX7uFBXI3Zo42OoPKM3NMzFted50YO0Um5VLA==" + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.8.3.tgz", + "integrity": "sha512-Cf7zZ2i6H+tLSBTkFePHhYvlgc9fnMPKsF9qTmiU38kFIGORy/TN2Fx5n1GBuRLIzaSXvcf+oHv1HvU0u1gE1g==" }, "@algolia/cache-in-memory": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.8.2.tgz", - "integrity": "sha512-CYse8/ZNPr/pMo6inQ0Uu+HWFFN9OcfJw67YCvU+1yz8NaS3rQ2HxU+zu1M/BCKMA89/dYF0jjBMT5rm6E4cdw==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.8.3.tgz", + "integrity": "sha512-+N7tkvmijXiDy2E7u1mM73AGEgGPWFmEmPeJS96oT46I98KXAwVPNYbcAqBE79YlixdXpkYJk41cFcORzNh+Iw==", "requires": { - "@algolia/cache-common": "4.8.2" + "@algolia/cache-common": "4.8.3" } }, "@algolia/client-account": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.8.2.tgz", - "integrity": "sha512-cRtZ2xiLUfsanrpjYkxyNwE+4SbyUvbe8CL9HwpTJPsP0Jsv69H4H71lL7v0pQY5OWkFxKMsqVxCMH7Px3740w==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.8.3.tgz", + "integrity": "sha512-Uku8LqnXBwfDCtsTCDYTUOz2/2oqcAQCKgaO0uGdIR8DTQENBXFQvzziambHdn9KuFuY+6Et9k1+cjpTPBDTBg==", "requires": { - "@algolia/client-common": "4.8.2", - "@algolia/client-search": "4.8.2", - "@algolia/transporter": "4.8.2" + "@algolia/client-common": "4.8.3", + "@algolia/client-search": "4.8.3", + "@algolia/transporter": "4.8.3" } }, "@algolia/client-analytics": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.8.2.tgz", - "integrity": "sha512-+vnFokDGxi0vAaumbAgvDuvXWs0VvLk3gDkjkegXD8MMUTs3ByTZApCM4NPnIdbcUroFAJxbyzZQT9/CRZHgcA==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.8.3.tgz", + "integrity": "sha512-9ensIWmjYJprZ+YjAVSZdWUG05xEnbytENXp508X59tf34IMIX8BR2xl0RjAQODtxBdAteGxuKt5THX6U9tQLA==", "requires": { - "@algolia/client-common": "4.8.2", - "@algolia/client-search": "4.8.2", - "@algolia/requester-common": "4.8.2", - "@algolia/transporter": "4.8.2" + "@algolia/client-common": "4.8.3", + "@algolia/client-search": "4.8.3", + "@algolia/requester-common": "4.8.3", + "@algolia/transporter": "4.8.3" } }, "@algolia/client-common": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.8.2.tgz", - "integrity": "sha512-jO9RvC0FPxxhe/nynGxVEYmNltE5xgYV1Y6zviwl/80PwsrGfWp/rVDh4CVZaBOntmsOp+y0aqQwNYjLMVWXBg==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.8.3.tgz", + "integrity": "sha512-TU3623AEFAWUQlDTznkgAMSYo8lfS9pNs5QYDQzkvzWdqK0GBDWthwdRfo9iIsfxiR9qdCMHqwEu+AlZMVhNSA==", "requires": { - "@algolia/requester-common": "4.8.2", - "@algolia/transporter": "4.8.2" + "@algolia/requester-common": "4.8.3", + "@algolia/transporter": "4.8.3" } }, "@algolia/client-recommendation": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/client-recommendation/-/client-recommendation-4.8.2.tgz", - "integrity": "sha512-evngF6Odrw93gXkXrOYPXxTWwDQ2K01sadB3Xpa1hQb+vjiBwcA/54w6nKyE4aiII1loT5q+Uj+G1f8HwBuksw==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/client-recommendation/-/client-recommendation-4.8.3.tgz", + "integrity": "sha512-qysGbmkcc6Agt29E38KWJq9JuxjGsyEYoKuX9K+P5HyQh08yR/BlRYrA8mB7vT/OIUHRGFToGO6Vq/rcg0NIOQ==", "requires": { - "@algolia/client-common": "4.8.2", - "@algolia/requester-common": "4.8.2", - "@algolia/transporter": "4.8.2" + "@algolia/client-common": "4.8.3", + "@algolia/requester-common": "4.8.3", + "@algolia/transporter": "4.8.3" } }, "@algolia/client-search": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.8.2.tgz", - "integrity": "sha512-JtmhdBKsA3Ll9ITvBfvMjsfuOY5oOPlaS9ahBGeb2OFfC1Myb6kbjXl73VtSVh4Bh0MpTsT4SdBdYCJFctRsQg==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.8.3.tgz", + "integrity": "sha512-rAnvoy3GAhbzOQVniFcKVn1eM2NX77LearzYNCbtFrFYavG+hJI187bNVmajToiuGZ10FfJvK99X2OB1AzzezQ==", "requires": { - "@algolia/client-common": "4.8.2", - "@algolia/requester-common": "4.8.2", - "@algolia/transporter": "4.8.2" + "@algolia/client-common": "4.8.3", + "@algolia/requester-common": "4.8.3", + "@algolia/transporter": "4.8.3" } }, "@algolia/logger-common": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.8.2.tgz", - "integrity": "sha512-Sse29WFBZH4CSCnbMTh8t6uAFaJtNyRRcpDjFfvkSNdPAN/pxLAY9GYUzJmP4J+ILdJn6ZWMNpvwhNQ8p2I+mg==" + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.8.3.tgz", + "integrity": "sha512-03wksHRbhl2DouEKnqWuUb64s1lV6kDAAabMCQ2Du1fb8X/WhDmxHC4UXMzypeOGlH5BZBsgVwSB7vsZLP3MZg==" }, "@algolia/logger-console": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.8.2.tgz", - "integrity": "sha512-hpZvy708iOeX6tcgy9qXVzlH8Avd3UA7AMwd1wAK5dG8PwAcrhO9wRQuE1AemvuVIEhshbWGQl9pDGXsejO+4g==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.8.3.tgz", + "integrity": "sha512-Npt+hI4UF8t3TLMluL5utr9Gc11BjL5kDnGZOhDOAz5jYiSO2nrHMFmnpLT4Cy/u7a5t7EB5dlypuC4/AGStkA==", "requires": { - "@algolia/logger-common": "4.8.2" + "@algolia/logger-common": "4.8.3" } }, "@algolia/requester-browser-xhr": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.8.2.tgz", - "integrity": "sha512-Vdv38BtgwAeVPThwOVRVrR8mDiRLADwqXt1c87dnHHL1Rs3/FMRQ9ogKMKnaJMAH+OeXf+yzNxh+QCISPKaMkQ==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.8.3.tgz", + "integrity": "sha512-/LTTIpgEmEwkyhn8yXxDdBWqXqzlgw5w2PtTpIwkSlP2/jDwdR/9w1TkFzhNbJ81ki6LAEQM5mSwoTTnbIIecg==", "requires": { - "@algolia/requester-common": "4.8.2" + "@algolia/requester-common": "4.8.3" } }, "@algolia/requester-common": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.8.2.tgz", - "integrity": "sha512-dN6MuKQQTp7+IBZNIRC9KUCrWVQRM3LaSLLB9lM7evjt++2jJTlhUu2Vncd78VbSy2kviojelxZ/mXTITRRxoA==" + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.8.3.tgz", + "integrity": "sha512-+Yo9vBkofoKR1SCqqtMnmnfq9yt/BiaDewY/6bYSMNxSYCnu2Fw1JKSIaf/4zos09PMSsxGpLohZwGas3+0GDQ==" }, "@algolia/requester-node-http": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.8.2.tgz", - "integrity": "sha512-pnpDRzIfibJ67rPQvq1me+bqhfflS2w9MlbVMhKdPsSuO8GKAZQ4GJgvIphvpSmhVnB7drdbZZ3J0KVP/y7jeg==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.8.3.tgz", + "integrity": "sha512-k2fiKIeMIFqgC01FnzII6kqC2GQBAfbNaUX4k7QCPa6P8t4sp2xE6fImOUiztLnnL3C9X9ZX6Fw3L+cudi7jvQ==", "requires": { - "@algolia/requester-common": "4.8.2" + "@algolia/requester-common": "4.8.3" } }, "@algolia/transporter": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.8.2.tgz", - "integrity": "sha512-r3ecEn+4GWW8ntydDmGGlZ5Iqds080bt2RtAUVNbPPwyuXAs9HUqwkYQiTIHSmeYtAlQ6YOYVnX3W6W8FhbhaA==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.8.3.tgz", + "integrity": "sha512-nU7fy2iU8snxATlsks0MjMyv97QJWQmOVwTjDc+KZ4+nue8CLcgm4LA4dsTBqvxeCQIoEtt3n72GwXcaqiJSjQ==", "requires": { - "@algolia/cache-common": "4.8.2", - "@algolia/logger-common": "4.8.2", - "@algolia/requester-common": "4.8.2" + "@algolia/cache-common": "4.8.3", + "@algolia/logger-common": "4.8.3", + "@algolia/requester-common": "4.8.3" } }, "@babel/code-frame": { @@ -1862,24 +1862,24 @@ "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==" }, "algoliasearch": { - "version": "4.8.2", - "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.8.2.tgz", - "integrity": "sha512-wQg1UpiXO6iXMXXyrmhKopjd3K4GGq5N/0qEjPB5OYzdvj4ju9rDIW8bYL9ghv9jD5IDrcyFsqCzlSKqn/RVXw==", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.8.3.tgz", + "integrity": "sha512-pljX9jEE2TQ3i1JayhG8afNdE8UuJg3O9c7unW6QO67yRWCKr6b0t5aKC3hSVtjt7pA2TQXLKoAISb4SHx9ozQ==", "requires": { - "@algolia/cache-browser-local-storage": "4.8.2", - "@algolia/cache-common": "4.8.2", - "@algolia/cache-in-memory": "4.8.2", - "@algolia/client-account": "4.8.2", - "@algolia/client-analytics": "4.8.2", - "@algolia/client-common": "4.8.2", - "@algolia/client-recommendation": "4.8.2", - "@algolia/client-search": "4.8.2", - "@algolia/logger-common": "4.8.2", - "@algolia/logger-console": "4.8.2", - "@algolia/requester-browser-xhr": "4.8.2", - "@algolia/requester-common": "4.8.2", - "@algolia/requester-node-http": "4.8.2", - "@algolia/transporter": "4.8.2" + "@algolia/cache-browser-local-storage": "4.8.3", + "@algolia/cache-common": "4.8.3", + "@algolia/cache-in-memory": "4.8.3", + "@algolia/client-account": "4.8.3", + "@algolia/client-analytics": "4.8.3", + "@algolia/client-common": "4.8.3", + "@algolia/client-recommendation": "4.8.3", + "@algolia/client-search": "4.8.3", + "@algolia/logger-common": "4.8.3", + "@algolia/logger-console": "4.8.3", + "@algolia/requester-browser-xhr": "4.8.3", + "@algolia/requester-common": "4.8.3", + "@algolia/requester-node-http": "4.8.3", + "@algolia/transporter": "4.8.3" } }, "align-text": { @@ -2554,15 +2554,15 @@ } }, "browserslist": { - "version": "4.14.7", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.14.7.tgz", - "integrity": "sha512-BSVRLCeG3Xt/j/1cCGj1019Wbty0H+Yvu2AOuZSuoaUWn3RatbL33Cxk+Q4jRMRAbOm0p7SLravLjpnT6s0vzQ==", + "version": "4.15.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.15.0.tgz", + "integrity": "sha512-IJ1iysdMkGmjjYeRlDU8PQejVwxvVO5QOfXH7ylW31GO6LwNRSmm/SgRXtNsEXqMLl2e+2H5eEJ7sfynF8TCaQ==", "requires": { - "caniuse-lite": "^1.0.30001157", + "caniuse-lite": "^1.0.30001164", "colorette": "^1.2.1", - "electron-to-chromium": "^1.3.591", + "electron-to-chromium": "^1.3.612", "escalade": "^3.1.1", - "node-releases": "^1.1.66" + "node-releases": "^1.1.67" } }, "buffer": { @@ -2848,9 +2848,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001163", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001163.tgz", - "integrity": "sha512-QQbOGkHWnvhn3Dlf4scPlXTZVhGOK+2qCOP5gPxqzXHhtn3tZHwNdH9qNcQRWN0f3tDYrsyXFJCFiP/GLzI5Vg==" + "version": "1.0.30001164", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001164.tgz", + "integrity": "sha512-G+A/tkf4bu0dSp9+duNiXc7bGds35DioCyC6vgK2m/rjA4Krpy5WeZgZyfH2f0wj2kI6yAWWucyap6oOwmY1mg==" }, "caseless": { "version": "0.12.0", @@ -4182,9 +4182,9 @@ "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, "electron-to-chromium": { - "version": "1.3.612", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.612.tgz", - "integrity": "sha512-CdrdX1B6mQqxfw+51MPWB5qA6TKWjza9f5voBtUlRfEZEwZiFaxJLrhFI8zHE9SBAuGt4h84rQU6Ho9Bauo1LA==" + "version": "1.3.615", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.615.tgz", + "integrity": "sha512-fNYTQXoUhNc6RmHDlGN4dgcLURSBIqQCN7ls6MuQ741+NJyLNRz8DxAC+pZpOKfRs6cfY0lv2kWdy8Oxf9j4+A==" }, "elliptic": { "version": "6.5.3", @@ -6864,9 +6864,9 @@ } }, "object-inspect": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.8.0.tgz", - "integrity": "sha512-jLdtEOB112fORuypAyl/50VRVIBIdVQOSUUGQHzJ4xBSbit81zRarz7GThkEFZy1RceYrWYcPcBFPQwHyAc1gA==" + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.9.0.tgz", + "integrity": "sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==" }, "object-is": { "version": "1.1.4", @@ -10399,9 +10399,9 @@ } }, "vuepress-theme-cosmos": { - "version": "1.0.176", - "resolved": "https://registry.npmjs.org/vuepress-theme-cosmos/-/vuepress-theme-cosmos-1.0.176.tgz", - "integrity": "sha512-9PNjU+AjI0FAzPthKeaEpTuoNbKfm8p5mzchkF8gXZoLCZPAN5fPaDRN3rfuE4oXHgn1INaZA7fNGaU4MgZzxg==", + "version": "1.0.177", + "resolved": "https://registry.npmjs.org/vuepress-theme-cosmos/-/vuepress-theme-cosmos-1.0.177.tgz", + "integrity": "sha512-PCTY/5g9I3+4CyJBdPg7hoGqSPsf+noB8qWd/ZmBmGTg7ST2zAawmkEf/lYlCmMEHPRKF+lDKvQD+cTEbZR74g==", "requires": { "@cosmos-ui/vue": "^0.35.0", "@vuepress/plugin-google-analytics": "1.7.1", diff --git a/docs/package.json b/docs/package.json index a9221ed06..ab813e4d7 100644 --- a/docs/package.json +++ b/docs/package.json @@ -14,6 +14,6 @@ "author": "", "license": "ISC", "dependencies": { - "vuepress-theme-cosmos": "^1.0.176" + "vuepress-theme-cosmos": "^1.0.177" } } From c8a9da1d279bd11a192dbfb624f41cd95450139f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?colin=20axn=C3=A9r?= <25233464+colin-axner@users.noreply.github.com> Date: Sat, 5 Dec 2020 22:10:15 +0100 Subject: [PATCH 34/40] increase ibc basic genesis validation (#8081) * increase client basic genesis validation * add connection/channel genesis validation * fix tests * apply @AdityaSripal suggestion Co-authored-by: Christopher Goes --- x/ibc/core/02-client/types/genesis.go | 39 ++++- x/ibc/core/02-client/types/genesis_test.go | 138 +++++++++++++++--- x/ibc/core/03-connection/types/genesis.go | 17 +++ .../core/03-connection/types/genesis_test.go | 26 ++++ x/ibc/core/04-channel/types/genesis.go | 17 +++ x/ibc/core/04-channel/types/genesis_test.go | 80 +++++++++- x/ibc/core/genesis_test.go | 21 +-- 7 files changed, 298 insertions(+), 40 deletions(-) diff --git a/x/ibc/core/02-client/types/genesis.go b/x/ibc/core/02-client/types/genesis.go index f7939c478..18f33160c 100644 --- a/x/ibc/core/02-client/types/genesis.go +++ b/x/ibc/core/02-client/types/genesis.go @@ -103,11 +103,15 @@ func (gs GenesisState) UnpackInterfaces(unpacker codectypes.AnyUnpacker) error { // Validate performs basic genesis state validation returning an error upon any // failure. func (gs GenesisState) Validate() error { + // keep track of the max sequence to ensure it is less than + // the next sequence used in creating client identifers. + var maxSequence uint64 = 0 + if err := gs.Params.Validate(); err != nil { return err } - validClients := make(map[string]bool) + validClients := make(map[string]string) for i, client := range gs.Clients { if err := host.ClientIdentifierValidator(client.ClientId); err != nil { @@ -126,13 +130,31 @@ func (gs GenesisState) Validate() error { return fmt.Errorf("invalid client %v index %d: %w", client, i, err) } + clientType, sequence, err := ParseClientIdentifier(client.ClientId) + if err != nil { + return err + } + + if clientType != clientState.ClientType() { + return fmt.Errorf("client state type %s does not equal client type in client identifier %s", clientState.ClientType(), clientType) + } + + if err := ValidateClientType(clientType); err != nil { + return err + } + + if sequence > maxSequence { + maxSequence = sequence + } + // add client id to validClients map - validClients[client.ClientId] = true + validClients[client.ClientId] = clientState.ClientType() } for i, cc := range gs.ClientsConsensus { // check that consensus state is for a client in the genesis clients list - if !validClients[cc.ClientId] { + clientType, ok := validClients[cc.ClientId] + if !ok { return fmt.Errorf("consensus state in genesis has a client id %s that does not map to a genesis client", cc.ClientId) } @@ -149,12 +171,23 @@ func (gs GenesisState) Validate() error { if err := cs.ValidateBasic(); err != nil { return fmt.Errorf("invalid client consensus state %v index %d: %w", cs, i, err) } + + // ensure consensus state type matches client state type + if clientType != cs.ClientType() { + return fmt.Errorf("consensus state client type %s does not equal client state client type %s", cs.ClientType(), clientType) + } + } + } if gs.CreateLocalhost && !gs.Params.IsAllowedClient(exported.Localhost) { return fmt.Errorf("localhost client is not registered on the allowlist") } + if maxSequence != 0 && maxSequence >= gs.NextClientSequence { + return fmt.Errorf("next client identifier sequence %d must be greater than the maximum sequence used in the provided client identifiers %d", gs.NextClientSequence, maxSequence) + } + return nil } diff --git a/x/ibc/core/02-client/types/genesis_test.go b/x/ibc/core/02-client/types/genesis_test.go index 81eff7833..12cfba69d 100644 --- a/x/ibc/core/02-client/types/genesis_test.go +++ b/x/ibc/core/02-client/types/genesis_test.go @@ -17,8 +17,11 @@ import ( ) const ( - chainID = "chainID" - clientID = "ethbridge" + chainID = "chainID" + tmClientID0 = "07-tendermint-0" + tmClientID1 = "07-tendermint-1" + invalidClientID = "myclient-0" + clientID = tmClientID0 height = 10 ) @@ -69,15 +72,15 @@ func (suite *TypesTestSuite) TestValidateGenesis() { genState: types.NewGenesisState( []types.IdentifiedClientState{ types.NewIdentifiedClientState( - clientID, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + tmClientID0, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), types.NewIdentifiedClientState( - exported.Localhost, localhosttypes.NewClientState("chainID", clientHeight), + exported.Localhost+"-1", localhosttypes.NewClientState("chainID", clientHeight), ), }, []types.ClientConsensusStates{ types.NewClientConsensusStates( - clientID, + tmClientID0, []types.ConsensusStateWithHeight{ types.NewConsensusStateWithHeight( header.GetHeight().(types.Height), @@ -90,7 +93,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint, exported.Localhost), false, - 0, + 2, ), expPass: true, }, @@ -99,7 +102,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { genState: types.NewGenesisState( []types.IdentifiedClientState{ types.NewIdentifiedClientState( - "/~@$*", ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + invalidClientID, ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), types.NewIdentifiedClientState( exported.Localhost, localhosttypes.NewClientState("chainID", clientHeight), @@ -107,7 +110,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, []types.ClientConsensusStates{ types.NewClientConsensusStates( - "/~@$*", + invalidClientID, []types.ConsensusStateWithHeight{ types.NewConsensusStateWithHeight( header.GetHeight().(types.Height), @@ -129,7 +132,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { genState: types.NewGenesisState( []types.IdentifiedClientState{ types.NewIdentifiedClientState( - clientID, ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + tmClientID0, ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), types.NewIdentifiedClientState(exported.Localhost, localhosttypes.NewClientState("chaindID", types.ZeroHeight())), }, @@ -145,7 +148,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { genState: types.NewGenesisState( []types.IdentifiedClientState{ types.NewIdentifiedClientState( - clientID, ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + tmClientID0, ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), types.NewIdentifiedClientState( exported.Localhost, localhosttypes.NewClientState("chaindID", clientHeight), @@ -153,7 +156,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, []types.ClientConsensusStates{ types.NewClientConsensusStates( - "wrongclientid", + tmClientID1, []types.ConsensusStateWithHeight{ types.NewConsensusStateWithHeight( types.ZeroHeight(), @@ -175,7 +178,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { genState: types.NewGenesisState( []types.IdentifiedClientState{ types.NewIdentifiedClientState( - clientID, ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + tmClientID0, ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), types.NewIdentifiedClientState( exported.Localhost, localhosttypes.NewClientState("chaindID", clientHeight), @@ -183,7 +186,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, []types.ClientConsensusStates{ types.NewClientConsensusStates( - clientID, + tmClientID0, []types.ConsensusStateWithHeight{ types.NewConsensusStateWithHeight( types.ZeroHeight(), @@ -205,7 +208,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { genState: types.NewGenesisState( []types.IdentifiedClientState{ types.NewIdentifiedClientState( - clientID, ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + tmClientID0, ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), types.NewIdentifiedClientState( exported.Localhost, localhosttypes.NewClientState("chaindID", clientHeight), @@ -213,7 +216,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, []types.ClientConsensusStates{ types.NewClientConsensusStates( - clientID, + tmClientID0, []types.ConsensusStateWithHeight{ types.NewConsensusStateWithHeight( types.NewHeight(0, 1), @@ -235,7 +238,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { genState: types.NewGenesisState( []types.IdentifiedClientState{ types.NewIdentifiedClientState( - clientID, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + tmClientID0, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), types.NewIdentifiedClientState( exported.Localhost, localhosttypes.NewClientState("chainID", clientHeight), @@ -243,7 +246,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, []types.ClientConsensusStates{ types.NewClientConsensusStates( - clientID, + tmClientID0, []types.ConsensusStateWithHeight{ types.NewConsensusStateWithHeight( header.GetHeight().(types.Height), @@ -265,7 +268,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { genState: types.NewGenesisState( []types.IdentifiedClientState{ types.NewIdentifiedClientState( - clientID, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + tmClientID0, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), types.NewIdentifiedClientState( exported.Localhost, localhosttypes.NewClientState("chainID", clientHeight), @@ -273,7 +276,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, []types.ClientConsensusStates{ types.NewClientConsensusStates( - clientID, + tmClientID0, []types.ConsensusStateWithHeight{ types.NewConsensusStateWithHeight( header.GetHeight().(types.Height), @@ -295,7 +298,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { genState: types.NewGenesisState( []types.IdentifiedClientState{ types.NewIdentifiedClientState( - clientID, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + tmClientID0, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), types.NewIdentifiedClientState( exported.Localhost, localhosttypes.NewClientState("chainID", clientHeight), @@ -303,7 +306,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, []types.ClientConsensusStates{ types.NewClientConsensusStates( - clientID, + tmClientID0, []types.ConsensusStateWithHeight{ types.NewConsensusStateWithHeight( header.GetHeight().(types.Height), @@ -325,15 +328,15 @@ func (suite *TypesTestSuite) TestValidateGenesis() { genState: types.NewGenesisState( []types.IdentifiedClientState{ types.NewIdentifiedClientState( - clientID, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + tmClientID1, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), types.NewIdentifiedClientState( - exported.Localhost, localhosttypes.NewClientState("chainID", clientHeight), + exported.Localhost+"-0", localhosttypes.NewClientState("chainID", clientHeight), ), }, []types.ClientConsensusStates{ types.NewClientConsensusStates( - clientID, + tmClientID1, []types.ConsensusStateWithHeight{ types.NewConsensusStateWithHeight( header.GetHeight().(types.Height), @@ -346,10 +349,97 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint), true, + 2, + ), + expPass: false, + }, + { + name: "next sequence too small", + genState: types.NewGenesisState( + []types.IdentifiedClientState{ + types.NewIdentifiedClientState( + tmClientID0, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + ), + types.NewIdentifiedClientState( + exported.Localhost+"-1", localhosttypes.NewClientState("chainID", clientHeight), + ), + }, + []types.ClientConsensusStates{ + types.NewClientConsensusStates( + tmClientID0, + []types.ConsensusStateWithHeight{ + types.NewConsensusStateWithHeight( + header.GetHeight().(types.Height), + ibctmtypes.NewConsensusState( + header.GetTime(), commitmenttypes.NewMerkleRoot(header.Header.GetAppHash()), header.Header.NextValidatorsHash, + ), + ), + }, + ), + }, + types.NewParams(exported.Tendermint, exported.Localhost), + false, 0, ), expPass: false, }, + { + name: "failed to parse client identifier in client state loop", + genState: types.NewGenesisState( + []types.IdentifiedClientState{ + types.NewIdentifiedClientState( + "my-client", ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + ), + types.NewIdentifiedClientState( + exported.Localhost+"-1", localhosttypes.NewClientState("chainID", clientHeight), + ), + }, + []types.ClientConsensusStates{ + types.NewClientConsensusStates( + tmClientID0, + []types.ConsensusStateWithHeight{ + types.NewConsensusStateWithHeight( + header.GetHeight().(types.Height), + ibctmtypes.NewConsensusState( + header.GetTime(), commitmenttypes.NewMerkleRoot(header.Header.GetAppHash()), header.Header.NextValidatorsHash, + ), + ), + }, + ), + }, + types.NewParams(exported.Tendermint, exported.Localhost), + false, + 5, + ), + expPass: false, + }, + { + name: "consensus state different than client state type", + genState: types.NewGenesisState( + []types.IdentifiedClientState{ + types.NewIdentifiedClientState( + exported.Localhost+"-1", localhosttypes.NewClientState("chainID", clientHeight), + ), + }, + []types.ClientConsensusStates{ + types.NewClientConsensusStates( + exported.Localhost+"-1", + []types.ConsensusStateWithHeight{ + types.NewConsensusStateWithHeight( + header.GetHeight().(types.Height), + ibctmtypes.NewConsensusState( + header.GetTime(), commitmenttypes.NewMerkleRoot(header.Header.GetAppHash()), header.Header.NextValidatorsHash, + ), + ), + }, + ), + }, + types.NewParams(exported.Tendermint, exported.Localhost), + false, + 5, + ), + expPass: false, + }, } for _, tc := range testCases { diff --git a/x/ibc/core/03-connection/types/genesis.go b/x/ibc/core/03-connection/types/genesis.go index f5af9b848..b10c300a8 100644 --- a/x/ibc/core/03-connection/types/genesis.go +++ b/x/ibc/core/03-connection/types/genesis.go @@ -38,7 +38,20 @@ func DefaultGenesisState() GenesisState { // Validate performs basic genesis state validation returning an error upon any // failure. func (gs GenesisState) Validate() error { + // keep track of the max sequence to ensure it is less than + // the next sequence used in creating connection identifers. + var maxSequence uint64 = 0 + for i, conn := range gs.Connections { + sequence, err := ParseConnectionSequence(conn.Id) + if err != nil { + return err + } + + if sequence > maxSequence { + maxSequence = sequence + } + if err := conn.ValidateBasic(); err != nil { return fmt.Errorf("invalid connection %v index %d: %w", conn, i, err) } @@ -55,5 +68,9 @@ func (gs GenesisState) Validate() error { } } + if maxSequence != 0 && maxSequence >= gs.NextConnectionSequence { + return fmt.Errorf("next connection sequence %d must be greater than maximum sequence used in connection identifier %d", gs.NextConnectionSequence, maxSequence) + } + return nil } diff --git a/x/ibc/core/03-connection/types/genesis_test.go b/x/ibc/core/03-connection/types/genesis_test.go index 343d69b08..8d343d1f0 100644 --- a/x/ibc/core/03-connection/types/genesis_test.go +++ b/x/ibc/core/03-connection/types/genesis_test.go @@ -74,6 +74,32 @@ func TestValidateGenesis(t *testing.T) { ), expPass: false, }, + { + name: "invalid connection identifier", + genState: types.NewGenesisState( + []types.IdentifiedConnection{ + types.NewIdentifiedConnection("conn-0", types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion})), + }, + []types.ConnectionPaths{ + {clientID, []string{connectionID}}, + }, + 0, + ), + expPass: false, + }, + { + name: "next connection sequence is not greater than maximum connection identifier sequence provided", + genState: types.NewGenesisState( + []types.IdentifiedConnection{ + types.NewIdentifiedConnection(types.FormatConnectionIdentifier(10), types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion})), + }, + []types.ConnectionPaths{ + {clientID, []string{connectionID}}, + }, + 0, + ), + expPass: false, + }, } for _, tc := range testCases { diff --git a/x/ibc/core/04-channel/types/genesis.go b/x/ibc/core/04-channel/types/genesis.go index 28ab32f21..2c431e97b 100644 --- a/x/ibc/core/04-channel/types/genesis.go +++ b/x/ibc/core/04-channel/types/genesis.go @@ -74,12 +74,29 @@ func DefaultGenesisState() GenesisState { // Validate performs basic genesis state validation returning an error upon any // failure. func (gs GenesisState) Validate() error { + // keep track of the max sequence to ensure it is less than + // the next sequence used in creating connection identifers. + var maxSequence uint64 = 0 + for i, channel := range gs.Channels { + sequence, err := ParseChannelSequence(channel.ChannelId) + if err != nil { + return err + } + + if sequence > maxSequence { + maxSequence = sequence + } + if err := channel.ValidateBasic(); err != nil { return fmt.Errorf("invalid channel %v channel index %d: %w", channel, i, err) } } + if maxSequence != 0 && maxSequence >= gs.NextChannelSequence { + return fmt.Errorf("next channel sequence %d must be greater than maximum sequence used in channel identifier %d", gs.NextChannelSequence, maxSequence) + } + for i, ack := range gs.Acknowledgements { if err := ack.Validate(); err != nil { return fmt.Errorf("invalid acknowledgement %v ack index %d: %w", ack, i, err) diff --git a/x/ibc/core/04-channel/types/genesis_test.go b/x/ibc/core/04-channel/types/genesis_test.go index 05e154296..a0d21007a 100644 --- a/x/ibc/core/04-channel/types/genesis_test.go +++ b/x/ibc/core/04-channel/types/genesis_test.go @@ -13,8 +13,8 @@ const ( testPort2 = "secondport" testConnectionIDA = "connectionidatob" - testChannel1 = "firstchannel" - testChannel2 = "secondchannel" + testChannel1 = "channel-0" + testChannel2 = "channel-1" testChannelOrder = types.ORDERED testChannelVersion = "1.0" @@ -66,7 +66,7 @@ func TestValidateGenesis(t *testing.T) { []types.PacketSequence{ types.NewPacketSequence(testPort2, testChannel2, 1), }, - 0, + 2, ), expPass: true, }, @@ -137,6 +137,80 @@ func TestValidateGenesis(t *testing.T) { }, expPass: false, }, + { + name: "invalid channel identifier", + genState: types.NewGenesisState( + []types.IdentifiedChannel{ + types.NewIdentifiedChannel( + testPort1, "chan-0", types.NewChannel( + types.INIT, testChannelOrder, counterparty2, []string{testConnectionIDA}, testChannelVersion, + ), + ), + types.NewIdentifiedChannel( + testPort2, testChannel2, types.NewChannel( + types.INIT, testChannelOrder, counterparty1, []string{testConnectionIDA}, testChannelVersion, + ), + ), + }, + []types.PacketState{ + types.NewPacketState(testPort2, testChannel2, 1, []byte("ack")), + }, + []types.PacketState{ + types.NewPacketState(testPort2, testChannel2, 1, []byte("")), + }, + []types.PacketState{ + types.NewPacketState(testPort1, testChannel1, 1, []byte("commit_hash")), + }, + []types.PacketSequence{ + types.NewPacketSequence(testPort1, testChannel1, 1), + }, + []types.PacketSequence{ + types.NewPacketSequence(testPort2, testChannel2, 1), + }, + []types.PacketSequence{ + types.NewPacketSequence(testPort2, testChannel2, 1), + }, + 0, + ), + expPass: false, + }, + { + name: "next channel sequence is less than maximum channel identifier sequence used", + genState: types.NewGenesisState( + []types.IdentifiedChannel{ + types.NewIdentifiedChannel( + testPort1, "channel-10", types.NewChannel( + types.INIT, testChannelOrder, counterparty2, []string{testConnectionIDA}, testChannelVersion, + ), + ), + types.NewIdentifiedChannel( + testPort2, testChannel2, types.NewChannel( + types.INIT, testChannelOrder, counterparty1, []string{testConnectionIDA}, testChannelVersion, + ), + ), + }, + []types.PacketState{ + types.NewPacketState(testPort2, testChannel2, 1, []byte("ack")), + }, + []types.PacketState{ + types.NewPacketState(testPort2, testChannel2, 1, []byte("")), + }, + []types.PacketState{ + types.NewPacketState(testPort1, testChannel1, 1, []byte("commit_hash")), + }, + []types.PacketSequence{ + types.NewPacketSequence(testPort1, testChannel1, 1), + }, + []types.PacketSequence{ + types.NewPacketSequence(testPort2, testChannel2, 1), + }, + []types.PacketSequence{ + types.NewPacketSequence(testPort2, testChannel2, 1), + }, + 0, + ), + expPass: false, + }, } for _, tc := range testCases { diff --git a/x/ibc/core/genesis_test.go b/x/ibc/core/genesis_test.go index bc4c5834d..fa0200de7 100644 --- a/x/ibc/core/genesis_test.go +++ b/x/ibc/core/genesis_test.go @@ -22,16 +22,17 @@ import ( ) const ( - connectionID = "connectionidone" - clientID = "clientidone" - connectionID2 = "connectionidtwo" - clientID2 = "clientidtwo" + connectionID = "connection-0" + clientID = "07-tendermint-0" + connectionID2 = "connection-1" + clientID2 = "07-tendermin-1" + localhostID = exported.Localhost + "-1" port1 = "firstport" port2 = "secondport" - channel1 = "firstchannel" - channel2 = "secondchannel" + channel1 = "channel-0" + channel2 = "channel-1" ) var clientHeight = clienttypes.NewHeight(0, 10) @@ -79,7 +80,7 @@ func (suite *IBCTestSuite) TestValidateGenesis() { clientID, ibctmtypes.NewClientState(suite.chainA.ChainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), clienttypes.NewIdentifiedClientState( - exported.Localhost, localhosttypes.NewClientState("chaindID", clientHeight), + localhostID, localhosttypes.NewClientState("chaindID", clientHeight), ), }, []clienttypes.ClientConsensusStates{ @@ -97,7 +98,7 @@ func (suite *IBCTestSuite) TestValidateGenesis() { }, clienttypes.NewParams(exported.Tendermint, exported.Localhost), true, - 0, + 2, ), ConnectionGenesis: connectiontypes.NewGenesisState( []connectiontypes.IdentifiedConnection{ @@ -149,13 +150,13 @@ func (suite *IBCTestSuite) TestValidateGenesis() { clientID, ibctmtypes.NewClientState(suite.chainA.ChainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), ), clienttypes.NewIdentifiedClientState( - exported.Localhost, localhosttypes.NewClientState("(chaindID)", clienttypes.ZeroHeight()), + localhostID, localhosttypes.NewClientState("(chaindID)", clienttypes.ZeroHeight()), ), }, nil, clienttypes.NewParams(exported.Tendermint), false, - 0, + 2, ), ConnectionGenesis: connectiontypes.DefaultGenesisState(), }, From a8c3b4414de6eb0b88f5a6730d56f453556ad195 Mon Sep 17 00:00:00 2001 From: Emmanuel T Odeke Date: Mon, 7 Dec 2020 01:32:59 -0800 Subject: [PATCH 35/40] x/bank/keeper: fix a missing gRPC error in AllBalances (#8085) * x/bank/keeper: fix a missing gRPC error in AllBalances Fixes an incoherent return which had: (non-nil value, non-nil-non-grpc error) to (nil value, non-nil-gRPC error) and it is a follow-up of PR #7814. * types/query: fix pagination test (#8096) Co-authored-by: Alessio Treglia --- types/query/pagination_test.go | 2 +- x/bank/keeper/grpc_query.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/types/query/pagination_test.go b/types/query/pagination_test.go index e24d7c489..f0e1377a1 100644 --- a/types/query/pagination_test.go +++ b/types/query/pagination_test.go @@ -159,7 +159,7 @@ func (s *paginationTestSuite) TestPagination() { request = types.NewQueryAllBalancesRequest(addr1, pageReq) res, err = queryClient.AllBalances(gocontext.Background(), request) s.Require().Error(err) - s.Require().Equal(err.Error(), "invalid request, either offset or key is expected, got both") + s.Require().Equal("rpc error: code = InvalidArgument desc = paginate: invalid request, either offset or key is expected, got both", err.Error()) s.T().Log("verify paginate with offset greater than total results") pageReq = &query.PageRequest{Offset: 300, Limit: defaultLimit, CountTotal: false} diff --git a/x/bank/keeper/grpc_query.go b/x/bank/keeper/grpc_query.go index a71d0d72c..bcf143b8a 100644 --- a/x/bank/keeper/grpc_query.go +++ b/x/bank/keeper/grpc_query.go @@ -72,7 +72,7 @@ func (k BaseKeeper) AllBalances(ctx context.Context, req *types.QueryAllBalances }) if err != nil { - return &types.QueryAllBalancesResponse{}, err + return nil, status.Errorf(codes.InvalidArgument, "paginate: %v", err) } return &types.QueryAllBalancesResponse{Balances: balances, Pagination: pageRes}, nil From 29091c14e838b3f2282e212be8b0b8bb5ae50c17 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Dec 2020 11:17:09 +0000 Subject: [PATCH 36/40] build(deps): bump vuepress-theme-cosmos from 1.0.177 to 1.0.178 in /docs (#8098) Bumps [vuepress-theme-cosmos](https://github.com/cosmos/vuepress-theme-cosmos) from 1.0.177 to 1.0.178. - [Release notes](https://github.com/cosmos/vuepress-theme-cosmos/releases) - [Commits](https://github.com/cosmos/vuepress-theme-cosmos/commits) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Federico Kunze <31522760+fedekunze@users.noreply.github.com> --- docs/package-lock.json | 53 ++++++++++++++++++++++-------------------- docs/package.json | 2 +- 2 files changed, 29 insertions(+), 26 deletions(-) diff --git a/docs/package-lock.json b/docs/package-lock.json index 73740f8cd..df8920eee 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -1338,9 +1338,9 @@ }, "dependencies": { "core-js": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.8.0.tgz", - "integrity": "sha512-W2VYNB0nwQQE7tKS7HzXd7r2y/y2SVJl4ga6oH/dnaLFzM0o2lB2P3zCkWj5Wc/zyMYjtgd5Hmhk0ObkQFZOIA==" + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.8.1.tgz", + "integrity": "sha512-9Id2xHY1W7m8hCl8NkhQn5CufmF/WuR30BTRewvCXc1aZd3kMECwNZ69ndLbekKfakw9Rf2Xyc+QR6E7Gg+obg==" } } }, @@ -1511,9 +1511,9 @@ }, "dependencies": { "core-js": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.8.0.tgz", - "integrity": "sha512-W2VYNB0nwQQE7tKS7HzXd7r2y/y2SVJl4ga6oH/dnaLFzM0o2lB2P3zCkWj5Wc/zyMYjtgd5Hmhk0ObkQFZOIA==" + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.8.1.tgz", + "integrity": "sha512-9Id2xHY1W7m8hCl8NkhQn5CufmF/WuR30BTRewvCXc1aZd3kMECwNZ69ndLbekKfakw9Rf2Xyc+QR6E7Gg+obg==" } } }, @@ -2848,9 +2848,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001164", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001164.tgz", - "integrity": "sha512-G+A/tkf4bu0dSp9+duNiXc7bGds35DioCyC6vgK2m/rjA4Krpy5WeZgZyfH2f0wj2kI6yAWWucyap6oOwmY1mg==" + "version": "1.0.30001165", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001165.tgz", + "integrity": "sha512-8cEsSMwXfx7lWSUMA2s08z9dIgsnR5NAqjXP23stdsU3AUWkCr/rr4s4OFtHXn5XXr6+7kam3QFVoYyXNPdJPA==" }, "caseless": { "version": "0.12.0", @@ -3432,11 +3432,11 @@ "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==" }, "core-js-compat": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.8.0.tgz", - "integrity": "sha512-o9QKelQSxQMYWHXc/Gc4L8bx/4F7TTraE5rhuN8I7mKBt5dBIUpXpIR3omv70ebr8ST5R3PqbDQr+ZI3+Tt1FQ==", + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.8.1.tgz", + "integrity": "sha512-a16TLmy9NVD1rkjUGbwuyWkiDoN0FDpAwrfLONvHFQx0D9k7J9y0srwMT8QP/Z6HE3MIFaVynEeYwZwPX1o5RQ==", "requires": { - "browserslist": "^4.14.7", + "browserslist": "^4.15.0", "semver": "7.0.0" }, "dependencies": { @@ -4182,9 +4182,9 @@ "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, "electron-to-chromium": { - "version": "1.3.615", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.615.tgz", - "integrity": "sha512-fNYTQXoUhNc6RmHDlGN4dgcLURSBIqQCN7ls6MuQ741+NJyLNRz8DxAC+pZpOKfRs6cfY0lv2kWdy8Oxf9j4+A==" + "version": "1.3.616", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.616.tgz", + "integrity": "sha512-CI8L38UN2BEnqXw3/oRIQTmde0LiSeqWSRlPA42ZTYgJQ8fYenzAM2Z3ni+jtILTcrs5aiXZCGJ96Pm+3/yGyQ==" }, "elliptic": { "version": "6.5.3", @@ -5628,9 +5628,12 @@ } }, "is-arguments": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.0.4.tgz", - "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.0.tgz", + "integrity": "sha512-1Ij4lOMPl/xB5kBDn7I+b2ttPMKa8szhEIrXDuXQD/oe3HJLTLhqhgGspwgyGd6MOywBUqVvYicF72lkgDnIHg==", + "requires": { + "call-bind": "^1.0.0" + } }, "is-arrayish": { "version": "0.2.1", @@ -5792,9 +5795,9 @@ } }, "is-negative-zero": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.0.tgz", - "integrity": "sha1-lVOxIbD6wohp2p7UWeIMdUN4hGE=" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz", + "integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==" }, "is-npm": { "version": "4.0.0", @@ -10399,9 +10402,9 @@ } }, "vuepress-theme-cosmos": { - "version": "1.0.177", - "resolved": "https://registry.npmjs.org/vuepress-theme-cosmos/-/vuepress-theme-cosmos-1.0.177.tgz", - "integrity": "sha512-PCTY/5g9I3+4CyJBdPg7hoGqSPsf+noB8qWd/ZmBmGTg7ST2zAawmkEf/lYlCmMEHPRKF+lDKvQD+cTEbZR74g==", + "version": "1.0.178", + "resolved": "https://registry.npmjs.org/vuepress-theme-cosmos/-/vuepress-theme-cosmos-1.0.178.tgz", + "integrity": "sha512-RkCO3BbS9WuS4VwZtu2e2EHJIHpP6dwyKr3OYbFvdBLwybk64TKHf3gwOiQ8Vfpm5pij5YCd0WKGQOHU8duMKQ==", "requires": { "@cosmos-ui/vue": "^0.35.0", "@vuepress/plugin-google-analytics": "1.7.1", diff --git a/docs/package.json b/docs/package.json index ab813e4d7..518f685a5 100644 --- a/docs/package.json +++ b/docs/package.json @@ -14,6 +14,6 @@ "author": "", "license": "ISC", "dependencies": { - "vuepress-theme-cosmos": "^1.0.177" + "vuepress-theme-cosmos": "^1.0.178" } } From de114773eeb5419aa3a12261ce56e6b3e3df15a3 Mon Sep 17 00:00:00 2001 From: Mark Jackson Date: Mon, 7 Dec 2020 04:26:09 -0700 Subject: [PATCH 37/40] fix typo (#8097) Co-authored-by: Jonathan Gimeno Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- x/crisis/keeper/keeper.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x/crisis/keeper/keeper.go b/x/crisis/keeper/keeper.go index 818e272d9..1cfa29d5e 100644 --- a/x/crisis/keeper/keeper.go +++ b/x/crisis/keeper/keeper.go @@ -76,7 +76,7 @@ func (k Keeper) AssertInvariants(ctx sdk.Context) { invarRoutes := k.Routes() n := len(invarRoutes) for i, ir := range invarRoutes { - logger.Info("asserting cirisis invariants", "inv", fmt.Sprint(i, "/", n)) + logger.Info("asserting crisis invariants", "inv", fmt.Sprint(i, "/", n)) if res, stop := ir.Invar(ctx); stop { // TODO: Include app name as part of context to allow for this to be // variable. From 971d253214ba3aec8064e6260874a860cbfb3d99 Mon Sep 17 00:00:00 2001 From: Aditya Date: Mon, 7 Dec 2020 12:13:06 +0000 Subject: [PATCH 38/40] Implement Connection-specified delay (#8069) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * start with initialization and metadata * start by fixing delay period during connection handshake * fix connection handshake tests * add delay period logic to packet verification * proto format * fix err issue * appease linter * document upgrade special case * Update x/ibc/light-clients/07-tendermint/types/client_state.go Co-authored-by: colin axnér <25233464+colin-axner@users.noreply.github.com> * Update x/ibc/light-clients/07-tendermint/types/store.go * add sanity check * fix build * fix tests Co-authored-by: colin axnér <25233464+colin-axner@users.noreply.github.com> Co-authored-by: Federico Kunze <31522760+fedekunze@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> Co-authored-by: Colin Axner --- proto/ibc/core/client/v1/genesis.proto | 27 +- proto/ibc/core/connection/v1/connection.proto | 12 +- proto/ibc/core/connection/v1/tx.proto | 18 +- x/ibc/core/02-client/genesis.go | 14 +- x/ibc/core/02-client/keeper/client.go | 12 +- x/ibc/core/02-client/keeper/keeper.go | 43 ++ x/ibc/core/02-client/keeper/keeper_test.go | 31 + x/ibc/core/02-client/types/errors.go | 39 +- x/ibc/core/02-client/types/genesis.go | 67 +- x/ibc/core/02-client/types/genesis.pb.go | 594 +++++++++++++++++- x/ibc/core/02-client/types/genesis_test.go | 97 ++- x/ibc/core/03-connection/client/cli/tx.go | 19 +- x/ibc/core/03-connection/genesis.go | 2 +- .../03-connection/keeper/grpc_query_test.go | 8 +- x/ibc/core/03-connection/keeper/handshake.go | 17 +- .../03-connection/keeper/handshake_test.go | 30 +- .../core/03-connection/keeper/keeper_test.go | 4 +- x/ibc/core/03-connection/keeper/verify.go | 4 + .../core/03-connection/keeper/verify_test.go | 64 +- x/ibc/core/03-connection/types/connection.go | 11 +- .../core/03-connection/types/connection.pb.go | 148 +++-- .../03-connection/types/connection_test.go | 14 +- .../core/03-connection/types/genesis_test.go | 12 +- x/ibc/core/03-connection/types/msgs.go | 8 +- x/ibc/core/03-connection/types/msgs_test.go | 50 +- x/ibc/core/03-connection/types/tx.pb.go | 215 ++++--- x/ibc/core/exported/client.go | 25 + x/ibc/core/exported/connection.go | 1 + x/ibc/core/genesis_test.go | 33 +- x/ibc/core/keeper/msg_server.go | 4 +- .../06-solomachine/types/client_state.go | 24 + .../06-solomachine/types/client_state_test.go | 60 +- .../06-solomachine/types/codec_test.go | 4 +- .../07-tendermint/types/client_state.go | 57 ++ .../07-tendermint/types/client_state_test.go | 109 +++- .../07-tendermint/types/consensus_state.go | 7 +- .../07-tendermint/types/errors.go | 10 +- .../07-tendermint/types/genesis.go | 21 + .../07-tendermint/types/genesis_test.go | 38 ++ .../07-tendermint/types/proposal_handle.go | 8 +- .../07-tendermint/types/store.go | 52 ++ .../07-tendermint/types/store_test.go | 37 ++ .../07-tendermint/types/update.go | 10 +- .../07-tendermint/types/upgrade.go | 2 + .../09-localhost/types/client_state.go | 21 + .../09-localhost/types/client_state_test.go | 47 +- x/ibc/testing/chain.go | 11 +- x/ibc/testing/coordinator.go | 29 +- 48 files changed, 1852 insertions(+), 318 deletions(-) create mode 100644 x/ibc/light-clients/07-tendermint/types/genesis.go create mode 100644 x/ibc/light-clients/07-tendermint/types/genesis_test.go diff --git a/proto/ibc/core/client/v1/genesis.proto b/proto/ibc/core/client/v1/genesis.proto index 06b4bbd06..16febbcee 100644 --- a/proto/ibc/core/client/v1/genesis.proto +++ b/proto/ibc/core/client/v1/genesis.proto @@ -17,9 +17,30 @@ message GenesisState { (gogoproto.castrepeated) = "ClientsConsensusStates", (gogoproto.moretags) = "yaml:\"clients_consensus\"" ]; - Params params = 3 [(gogoproto.nullable) = false]; + // metadata from each client + repeated IdentifiedGenesisMetadata clients_metadata = 3 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"clients_metadata\""]; + Params params = 4 [(gogoproto.nullable) = false]; // create localhost on initialization - bool create_localhost = 4 [(gogoproto.moretags) = "yaml:\"create_localhost\""]; + bool create_localhost = 5 [(gogoproto.moretags) = "yaml:\"create_localhost\""]; // the sequence for the next generated client identifier - uint64 next_client_sequence = 5 [(gogoproto.moretags) = "yaml:\"next_client_sequence\""]; + uint64 next_client_sequence = 6 [(gogoproto.moretags) = "yaml:\"next_client_sequence\""]; } + +// GenesisMetadata defines the genesis type for metadata that clients may return +// with ExportMetadata +message GenesisMetadata { + option (gogoproto.goproto_getters) = false; + + // store key of metadata without clientID-prefix + bytes key = 1; + // metadata value + bytes value = 2; +} + +// IdentifiedGenesisMetadata has the client metadata with the corresponding client id. +message IdentifiedGenesisMetadata { + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + repeated GenesisMetadata client_metadata = 2 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"client_metadata\""]; +} \ No newline at end of file diff --git a/proto/ibc/core/connection/v1/connection.proto b/proto/ibc/core/connection/v1/connection.proto index 0cf4f2092..d21e59516 100644 --- a/proto/ibc/core/connection/v1/connection.proto +++ b/proto/ibc/core/connection/v1/connection.proto @@ -10,19 +10,23 @@ import "ibc/core/commitment/v1/commitment.proto"; // https://github.com/cosmos/ics/tree/master/spec/ics-003-connection-semantics#data-structures // ConnectionEnd defines a stateful object on a chain connected to another -// separate one. NOTE: there must only be 2 defined ConnectionEnds to establish +// separate one. +// NOTE: there must only be 2 defined ConnectionEnds to establish // a connection between two chains. message ConnectionEnd { option (gogoproto.goproto_getters) = false; // client associated with this connection. string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; // IBC version which can be utilised to determine encodings or protocols for - // channels or packets utilising this connection + // channels or packets utilising this connection. repeated Version versions = 2; // current state of the connection end. State state = 3; // counterparty chain associated with this connection. Counterparty counterparty = 4 [(gogoproto.nullable) = false]; + // delay period that must pass before a consensus state can be used for packet-verification + // NOTE: delay period logic is only implemented by some clients. + uint64 delay_period = 5 [(gogoproto.moretags) = "yaml:\"delay_period\""]; } // IdentifiedConnection defines a connection with additional connection @@ -40,6 +44,8 @@ message IdentifiedConnection { State state = 4; // counterparty chain associated with this connection. Counterparty counterparty = 5 [(gogoproto.nullable) = false]; + // delay period associated with this connection. + uint64 delay_period = 6 [(gogoproto.moretags) = "yaml:\"delay_period\""]; } // State defines if a connection is in one of the following states: @@ -68,7 +74,7 @@ message Counterparty { // identifies the connection end on the counterparty chain associated with a // given connection. string connection_id = 2 [(gogoproto.moretags) = "yaml:\"connection_id\""]; - // commitment merkle prefix of the counterparty chain + // commitment merkle prefix of the counterparty chain. ibc.core.commitment.v1.MerklePrefix prefix = 3 [(gogoproto.nullable) = false]; } diff --git a/proto/ibc/core/connection/v1/tx.proto b/proto/ibc/core/connection/v1/tx.proto index 21c283545..19b40c69c 100644 --- a/proto/ibc/core/connection/v1/tx.proto +++ b/proto/ibc/core/connection/v1/tx.proto @@ -32,7 +32,8 @@ message MsgConnectionOpenInit { string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; Counterparty counterparty = 2 [(gogoproto.nullable) = false]; Version version = 3; - string signer = 4; + uint64 delay_period = 4 [(gogoproto.moretags) = "yaml:\"delay_period\""]; + string signer = 5; } // MsgConnectionOpenInitResponse defines the Msg/ConnectionOpenInit response type. @@ -50,19 +51,20 @@ message MsgConnectionOpenTry { string previous_connection_id = 2 [(gogoproto.moretags) = "yaml:\"previous_connection_id\""]; google.protobuf.Any client_state = 3 [(gogoproto.moretags) = "yaml:\"client_state\""]; Counterparty counterparty = 4 [(gogoproto.nullable) = false]; - repeated Version counterparty_versions = 5 [(gogoproto.moretags) = "yaml:\"counterparty_versions\""]; - ibc.core.client.v1.Height proof_height = 6 + uint64 delay_period = 5 [(gogoproto.moretags) = "yaml:\"delay_period\""]; + repeated Version counterparty_versions = 6 [(gogoproto.moretags) = "yaml:\"counterparty_versions\""]; + ibc.core.client.v1.Height proof_height = 7 [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; // proof of the initialization the connection on Chain A: `UNITIALIZED -> // INIT` - bytes proof_init = 7 [(gogoproto.moretags) = "yaml:\"proof_init\""]; + bytes proof_init = 8 [(gogoproto.moretags) = "yaml:\"proof_init\""]; // proof of client state included in message - bytes proof_client = 8 [(gogoproto.moretags) = "yaml:\"proof_client\""]; + bytes proof_client = 9 [(gogoproto.moretags) = "yaml:\"proof_client\""]; // proof of client consensus state - bytes proof_consensus = 9 [(gogoproto.moretags) = "yaml:\"proof_consensus\""]; - ibc.core.client.v1.Height consensus_height = 10 + bytes proof_consensus = 10 [(gogoproto.moretags) = "yaml:\"proof_consensus\""]; + ibc.core.client.v1.Height consensus_height = 11 [(gogoproto.moretags) = "yaml:\"consensus_height\"", (gogoproto.nullable) = false]; - string signer = 11; + string signer = 12; } // MsgConnectionOpenTryResponse defines the Msg/ConnectionOpenTry response type. diff --git a/x/ibc/core/02-client/genesis.go b/x/ibc/core/02-client/genesis.go index 6e77b20e3..26635f078 100644 --- a/x/ibc/core/02-client/genesis.go +++ b/x/ibc/core/02-client/genesis.go @@ -14,6 +14,12 @@ import ( func InitGenesis(ctx sdk.Context, k keeper.Keeper, gs types.GenesisState) { k.SetParams(ctx, gs.Params) + // Set all client metadata first. This will allow client keeper to overwrite client and consensus state keys + // if clients accidentally write to ClientKeeper reserved keys. + if len(gs.ClientsMetadata) != 0 { + k.SetAllClientMetadata(ctx, gs.ClientsMetadata) + } + for _, client := range gs.Clients { cs, ok := client.ClientState.GetCachedValue().(exported.ClientState) if !ok { @@ -48,8 +54,14 @@ func InitGenesis(ctx sdk.Context, k keeper.Keeper, gs types.GenesisState) { // NOTE: CreateLocalhost should always be false on export since a // created localhost will be included in the exported clients. func ExportGenesis(ctx sdk.Context, k keeper.Keeper) types.GenesisState { + genClients := k.GetAllGenesisClients(ctx) + clientsMetadata, err := k.GetAllClientMetadata(ctx, genClients) + if err != nil { + panic(err) + } return types.GenesisState{ - Clients: k.GetAllGenesisClients(ctx), + Clients: genClients, + ClientsMetadata: clientsMetadata, ClientsConsensus: k.GetAllConsensusStates(ctx), Params: k.GetParams(ctx), CreateLocalhost: false, diff --git a/x/ibc/core/02-client/keeper/client.go b/x/ibc/core/02-client/keeper/client.go index 49d5a04eb..672dcf5d7 100644 --- a/x/ibc/core/02-client/keeper/client.go +++ b/x/ibc/core/02-client/keeper/client.go @@ -12,8 +12,6 @@ import ( // CreateClient creates a new client state and populates it with a given consensus // state as defined in https://github.com/cosmos/ics/tree/master/spec/ics-002-client-semantics#create -// -// CONTRACT: ClientState was constructed correctly from given initial consensusState func (k Keeper) CreateClient( ctx sdk.Context, clientState exported.ClientState, consensusState exported.ConsensusState, ) (string, error) { @@ -27,12 +25,20 @@ func (k Keeper) CreateClient( clientID := k.GenerateClientIdentifier(ctx, clientState.ClientType()) + k.SetClientState(ctx, clientID, clientState) + k.Logger(ctx).Info("client created at height", "client-id", clientID, "height", clientState.GetLatestHeight().String()) + + // verifies initial consensus state against client state and initializes client store with any client-specific metadata + // e.g. set ProcessedTime in Tendermint clients + if err := clientState.Initialize(ctx, k.cdc, k.ClientStore(ctx, clientID), consensusState); err != nil { + return "", err + } + // check if consensus state is nil in case the created client is Localhost if consensusState != nil { k.SetClientConsensusState(ctx, clientID, clientState.GetLatestHeight(), consensusState) } - k.SetClientState(ctx, clientID, clientState) k.Logger(ctx).Info("client created at height", "client-id", clientID, "height", clientState.GetLatestHeight().String()) defer func() { diff --git a/x/ibc/core/02-client/keeper/keeper.go b/x/ibc/core/02-client/keeper/keeper.go index bae7d3627..67c5c0658 100644 --- a/x/ibc/core/02-client/keeper/keeper.go +++ b/x/ibc/core/02-client/keeper/keeper.go @@ -152,6 +152,49 @@ func (k Keeper) GetAllGenesisClients(ctx sdk.Context) types.IdentifiedClientStat return genClients.Sort() } +// GetAllClientMetadata will take a list of IdentifiedClientState and return a list +// of IdentifiedGenesisMetadata necessary for exporting and importing client metadata +// into the client store. +func (k Keeper) GetAllClientMetadata(ctx sdk.Context, genClients []types.IdentifiedClientState) ([]types.IdentifiedGenesisMetadata, error) { + genMetadata := make([]types.IdentifiedGenesisMetadata, 0) + for _, ic := range genClients { + cs, err := types.UnpackClientState(ic.ClientState) + if err != nil { + return nil, err + } + gms := cs.ExportMetadata(k.ClientStore(ctx, ic.ClientId)) + if len(gms) == 0 { + continue + } + clientMetadata := make([]types.GenesisMetadata, len(gms)) + for i, metadata := range gms { + cmd, ok := metadata.(types.GenesisMetadata) + if !ok { + return nil, sdkerrors.Wrapf(types.ErrInvalidClientMetadata, "expected metadata type: %T, got: %T", + types.GenesisMetadata{}, cmd) + } + clientMetadata[i] = cmd + } + genMetadata = append(genMetadata, types.NewIdentifiedGenesisMetadata( + ic.ClientId, + clientMetadata, + )) + } + return genMetadata, nil +} + +// SetAllClientMetadata takes a list of IdentifiedGenesisMetadata and stores all of the metadata in the client store at the appropriate paths. +func (k Keeper) SetAllClientMetadata(ctx sdk.Context, genMetadata []types.IdentifiedGenesisMetadata) { + for _, igm := range genMetadata { + // create client store + store := k.ClientStore(ctx, igm.ClientId) + // set all metadata kv pairs in client store + for _, md := range igm.ClientMetadata { + store.Set(md.GetKey(), md.GetValue()) + } + } +} + // GetAllConsensusStates returns all stored client consensus states. func (k Keeper) GetAllConsensusStates(ctx sdk.Context) types.ClientsConsensusStates { clientConsStates := make(types.ClientsConsensusStates, 0) diff --git a/x/ibc/core/02-client/keeper/keeper_test.go b/x/ibc/core/02-client/keeper/keeper_test.go index ec3c0229c..c22e80cc9 100644 --- a/x/ibc/core/02-client/keeper/keeper_test.go +++ b/x/ibc/core/02-client/keeper/keeper_test.go @@ -258,6 +258,37 @@ func (suite KeeperTestSuite) TestGetAllGenesisClients() { suite.Require().Equal(expGenClients.Sort(), genClients) } +func (suite KeeperTestSuite) TestGetAllGenesisMetadata() { + expectedGenMetadata := []types.IdentifiedGenesisMetadata{ + types.NewIdentifiedGenesisMetadata( + "clientA", + []types.GenesisMetadata{ + types.NewGenesisMetadata(ibctmtypes.ProcessedTimeKey(types.NewHeight(0, 1)), []byte("foo")), + types.NewGenesisMetadata(ibctmtypes.ProcessedTimeKey(types.NewHeight(0, 2)), []byte("bar")), + types.NewGenesisMetadata(ibctmtypes.ProcessedTimeKey(types.NewHeight(0, 3)), []byte("baz")), + }, + ), + types.NewIdentifiedGenesisMetadata( + "clientB", + []types.GenesisMetadata{ + types.NewGenesisMetadata(ibctmtypes.ProcessedTimeKey(types.NewHeight(1, 100)), []byte("val1")), + types.NewGenesisMetadata(ibctmtypes.ProcessedTimeKey(types.NewHeight(2, 300)), []byte("val2")), + }, + ), + } + + genClients := []types.IdentifiedClientState{ + types.NewIdentifiedClientState("clientA", &ibctmtypes.ClientState{}), types.NewIdentifiedClientState("clientB", &ibctmtypes.ClientState{}), + types.NewIdentifiedClientState("clientC", &ibctmtypes.ClientState{}), types.NewIdentifiedClientState("clientD", &localhosttypes.ClientState{}), + } + + suite.chainA.App.IBCKeeper.ClientKeeper.SetAllClientMetadata(suite.chainA.GetContext(), expectedGenMetadata) + + actualGenMetadata, err := suite.chainA.App.IBCKeeper.ClientKeeper.GetAllClientMetadata(suite.chainA.GetContext(), genClients) + suite.Require().NoError(err, "get client metadata returned error unexpectedly") + suite.Require().Equal(expectedGenMetadata, actualGenMetadata, "retrieved metadata is unexpected") +} + func (suite KeeperTestSuite) TestGetConsensusState() { suite.ctx = suite.ctx.WithBlockHeight(10) cases := []struct { diff --git a/x/ibc/core/02-client/types/errors.go b/x/ibc/core/02-client/types/errors.go index ca0131823..09dc92959 100644 --- a/x/ibc/core/02-client/types/errors.go +++ b/x/ibc/core/02-client/types/errors.go @@ -10,23 +10,24 @@ var ( ErrInvalidClient = sdkerrors.Register(SubModuleName, 3, "light client is invalid") ErrClientNotFound = sdkerrors.Register(SubModuleName, 4, "light client not found") ErrClientFrozen = sdkerrors.Register(SubModuleName, 5, "light client is frozen due to misbehaviour") - ErrConsensusStateNotFound = sdkerrors.Register(SubModuleName, 6, "consensus state not found") - ErrInvalidConsensus = sdkerrors.Register(SubModuleName, 7, "invalid consensus state") - ErrClientTypeNotFound = sdkerrors.Register(SubModuleName, 8, "client type not found") - ErrInvalidClientType = sdkerrors.Register(SubModuleName, 9, "invalid client type") - ErrRootNotFound = sdkerrors.Register(SubModuleName, 10, "commitment root not found") - ErrInvalidHeader = sdkerrors.Register(SubModuleName, 11, "invalid client header") - ErrInvalidMisbehaviour = sdkerrors.Register(SubModuleName, 12, "invalid light client misbehaviour") - ErrFailedClientStateVerification = sdkerrors.Register(SubModuleName, 13, "client state verification failed") - ErrFailedClientConsensusStateVerification = sdkerrors.Register(SubModuleName, 14, "client consensus state verification failed") - ErrFailedConnectionStateVerification = sdkerrors.Register(SubModuleName, 15, "connection state verification failed") - ErrFailedChannelStateVerification = sdkerrors.Register(SubModuleName, 16, "channel state verification failed") - ErrFailedPacketCommitmentVerification = sdkerrors.Register(SubModuleName, 17, "packet commitment verification failed") - ErrFailedPacketAckVerification = sdkerrors.Register(SubModuleName, 18, "packet acknowledgement verification failed") - ErrFailedPacketReceiptVerification = sdkerrors.Register(SubModuleName, 19, "packet receipt verification failed") - ErrFailedNextSeqRecvVerification = sdkerrors.Register(SubModuleName, 20, "next sequence receive verification failed") - ErrSelfConsensusStateNotFound = sdkerrors.Register(SubModuleName, 21, "self consensus state not found") - ErrUpdateClientFailed = sdkerrors.Register(SubModuleName, 22, "unable to update light client") - ErrInvalidUpdateClientProposal = sdkerrors.Register(SubModuleName, 23, "invalid update client proposal") - ErrInvalidUpgradeClient = sdkerrors.Register(SubModuleName, 24, "invalid client upgrade") + ErrInvalidClientMetadata = sdkerrors.Register(SubModuleName, 6, "invalid client metadata") + ErrConsensusStateNotFound = sdkerrors.Register(SubModuleName, 7, "consensus state not found") + ErrInvalidConsensus = sdkerrors.Register(SubModuleName, 8, "invalid consensus state") + ErrClientTypeNotFound = sdkerrors.Register(SubModuleName, 9, "client type not found") + ErrInvalidClientType = sdkerrors.Register(SubModuleName, 10, "invalid client type") + ErrRootNotFound = sdkerrors.Register(SubModuleName, 11, "commitment root not found") + ErrInvalidHeader = sdkerrors.Register(SubModuleName, 12, "invalid client header") + ErrInvalidMisbehaviour = sdkerrors.Register(SubModuleName, 13, "invalid light client misbehaviour") + ErrFailedClientStateVerification = sdkerrors.Register(SubModuleName, 14, "client state verification failed") + ErrFailedClientConsensusStateVerification = sdkerrors.Register(SubModuleName, 15, "client consensus state verification failed") + ErrFailedConnectionStateVerification = sdkerrors.Register(SubModuleName, 16, "connection state verification failed") + ErrFailedChannelStateVerification = sdkerrors.Register(SubModuleName, 17, "channel state verification failed") + ErrFailedPacketCommitmentVerification = sdkerrors.Register(SubModuleName, 18, "packet commitment verification failed") + ErrFailedPacketAckVerification = sdkerrors.Register(SubModuleName, 19, "packet acknowledgement verification failed") + ErrFailedPacketReceiptVerification = sdkerrors.Register(SubModuleName, 20, "packet receipt verification failed") + ErrFailedNextSeqRecvVerification = sdkerrors.Register(SubModuleName, 21, "next sequence receive verification failed") + ErrSelfConsensusStateNotFound = sdkerrors.Register(SubModuleName, 22, "self consensus state not found") + ErrUpdateClientFailed = sdkerrors.Register(SubModuleName, 23, "unable to update light client") + ErrInvalidUpdateClientProposal = sdkerrors.Register(SubModuleName, 24, "invalid update client proposal") + ErrInvalidUpgradeClient = sdkerrors.Register(SubModuleName, 25, "invalid client upgrade") ) diff --git a/x/ibc/core/02-client/types/genesis.go b/x/ibc/core/02-client/types/genesis.go index 18f33160c..3f197208e 100644 --- a/x/ibc/core/02-client/types/genesis.go +++ b/x/ibc/core/02-client/types/genesis.go @@ -16,7 +16,10 @@ var ( _ codectypes.UnpackInterfacesMessage = GenesisState{} ) -var _ sort.Interface = ClientsConsensusStates{} +var ( + _ sort.Interface = ClientsConsensusStates{} + _ exported.GenesisMetadata = GenesisMetadata{} +) // ClientsConsensusStates defines a slice of ClientConsensusStates that supports the sort interface type ClientsConsensusStates []ClientConsensusStates @@ -66,12 +69,13 @@ func (ccs ClientConsensusStates) UnpackInterfaces(unpacker codectypes.AnyUnpacke // NewGenesisState creates a GenesisState instance. func NewGenesisState( - clients []IdentifiedClientState, clientsConsensus ClientsConsensusStates, + clients []IdentifiedClientState, clientsConsensus ClientsConsensusStates, clientsMetadata []IdentifiedGenesisMetadata, params Params, createLocalhost bool, nextClientSequence uint64, ) GenesisState { return GenesisState{ Clients: clients, ClientsConsensus: clientsConsensus, + ClientsMetadata: clientsMetadata, Params: params, CreateLocalhost: createLocalhost, NextClientSequence: nextClientSequence, @@ -151,14 +155,14 @@ func (gs GenesisState) Validate() error { validClients[client.ClientId] = clientState.ClientType() } - for i, cc := range gs.ClientsConsensus { + for _, cc := range gs.ClientsConsensus { // check that consensus state is for a client in the genesis clients list clientType, ok := validClients[cc.ClientId] if !ok { return fmt.Errorf("consensus state in genesis has a client id %s that does not map to a genesis client", cc.ClientId) } - for _, consensusState := range cc.ConsensusStates { + for i, consensusState := range cc.ConsensusStates { if consensusState.Height.IsZero() { return fmt.Errorf("consensus state height cannot be zero") } @@ -169,7 +173,7 @@ func (gs GenesisState) Validate() error { } if err := cs.ValidateBasic(); err != nil { - return fmt.Errorf("invalid client consensus state %v index %d: %w", cs, i, err) + return fmt.Errorf("invalid client consensus state %v clientID %s index %d: %w", cs, cc.ClientId, i, err) } // ensure consensus state type matches client state type @@ -178,6 +182,21 @@ func (gs GenesisState) Validate() error { } } + } + + for _, clientMetadata := range gs.ClientsMetadata { + // check that metadata is for a client in the genesis clients list + _, ok := validClients[clientMetadata.ClientId] + if !ok { + return fmt.Errorf("metadata in genesis has a client id %s that does not map to a genesis client", clientMetadata.ClientId) + } + + for i, gm := range clientMetadata.ClientMetadata { + if err := gm.Validate(); err != nil { + return fmt.Errorf("invalid client metadata %v clientID %s index %d: %w", gm, clientMetadata.ClientId, i, err) + } + + } } @@ -191,3 +210,41 @@ func (gs GenesisState) Validate() error { return nil } + +// NewGenesisMetadata is a constructor for GenesisMetadata +func NewGenesisMetadata(key, val []byte) GenesisMetadata { + return GenesisMetadata{ + Key: key, + Value: val, + } +} + +// GetKey returns the key of metadata. Implements exported.GenesisMetadata interface. +func (gm GenesisMetadata) GetKey() []byte { + return gm.Key +} + +// GetValue returns the value of metadata. Implements exported.GenesisMetadata interface. +func (gm GenesisMetadata) GetValue() []byte { + return gm.Value +} + +// Validate ensures key and value of metadata are not empty +func (gm GenesisMetadata) Validate() error { + if len(gm.Key) == 0 { + return fmt.Errorf("genesis metadata key cannot be empty") + } + if len(gm.Value) == 0 { + return fmt.Errorf("genesis metadata value cannot be empty") + } + return nil +} + +// NewIdentifiedGenesisMetadata takes in a client ID and list of genesis metadata for that client +// and constructs a new IdentifiedGenesisMetadata. +func NewIdentifiedGenesisMetadata(clientID string, gms []GenesisMetadata) IdentifiedGenesisMetadata { + return IdentifiedGenesisMetadata{ + ClientId: clientID, + ClientMetadata: gms, + } +} diff --git a/x/ibc/core/02-client/types/genesis.pb.go b/x/ibc/core/02-client/types/genesis.pb.go index becfa1bab..07f996ac2 100644 --- a/x/ibc/core/02-client/types/genesis.pb.go +++ b/x/ibc/core/02-client/types/genesis.pb.go @@ -29,11 +29,13 @@ type GenesisState struct { Clients IdentifiedClientStates `protobuf:"bytes,1,rep,name=clients,proto3,castrepeated=IdentifiedClientStates" json:"clients"` // consensus states from each client ClientsConsensus ClientsConsensusStates `protobuf:"bytes,2,rep,name=clients_consensus,json=clientsConsensus,proto3,castrepeated=ClientsConsensusStates" json:"clients_consensus" yaml:"clients_consensus"` - Params Params `protobuf:"bytes,3,opt,name=params,proto3" json:"params"` + // metadata from each client + ClientsMetadata []IdentifiedGenesisMetadata `protobuf:"bytes,3,rep,name=clients_metadata,json=clientsMetadata,proto3" json:"clients_metadata" yaml:"clients_metadata"` + Params Params `protobuf:"bytes,4,opt,name=params,proto3" json:"params"` // create localhost on initialization - CreateLocalhost bool `protobuf:"varint,4,opt,name=create_localhost,json=createLocalhost,proto3" json:"create_localhost,omitempty" yaml:"create_localhost"` + CreateLocalhost bool `protobuf:"varint,5,opt,name=create_localhost,json=createLocalhost,proto3" json:"create_localhost,omitempty" yaml:"create_localhost"` // the sequence for the next generated client identifier - NextClientSequence uint64 `protobuf:"varint,5,opt,name=next_client_sequence,json=nextClientSequence,proto3" json:"next_client_sequence,omitempty" yaml:"next_client_sequence"` + NextClientSequence uint64 `protobuf:"varint,6,opt,name=next_client_sequence,json=nextClientSequence,proto3" json:"next_client_sequence,omitempty" yaml:"next_client_sequence"` } func (m *GenesisState) Reset() { *m = GenesisState{} } @@ -83,6 +85,13 @@ func (m *GenesisState) GetClientsConsensus() ClientsConsensusStates { return nil } +func (m *GenesisState) GetClientsMetadata() []IdentifiedGenesisMetadata { + if m != nil { + return m.ClientsMetadata + } + return nil +} + func (m *GenesisState) GetParams() Params { if m != nil { return m.Params @@ -104,39 +113,145 @@ func (m *GenesisState) GetNextClientSequence() uint64 { return 0 } +// GenesisMetadata defines the genesis type for metadata that clients may return +// with ExportMetadata +type GenesisMetadata struct { + // store key of metadata without clientID-prefix + Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // metadata value + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (m *GenesisMetadata) Reset() { *m = GenesisMetadata{} } +func (m *GenesisMetadata) String() string { return proto.CompactTextString(m) } +func (*GenesisMetadata) ProtoMessage() {} +func (*GenesisMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bcd0c0f1f2e6a91a, []int{1} +} +func (m *GenesisMetadata) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *GenesisMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_GenesisMetadata.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *GenesisMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenesisMetadata.Merge(m, src) +} +func (m *GenesisMetadata) XXX_Size() int { + return m.Size() +} +func (m *GenesisMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_GenesisMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_GenesisMetadata proto.InternalMessageInfo + +// IdentifiedGenesisMetadata has the client metadata with the corresponding client id. +type IdentifiedGenesisMetadata struct { + ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty" yaml:"client_id"` + ClientMetadata []GenesisMetadata `protobuf:"bytes,2,rep,name=client_metadata,json=clientMetadata,proto3" json:"client_metadata" yaml:"client_metadata"` +} + +func (m *IdentifiedGenesisMetadata) Reset() { *m = IdentifiedGenesisMetadata{} } +func (m *IdentifiedGenesisMetadata) String() string { return proto.CompactTextString(m) } +func (*IdentifiedGenesisMetadata) ProtoMessage() {} +func (*IdentifiedGenesisMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bcd0c0f1f2e6a91a, []int{2} +} +func (m *IdentifiedGenesisMetadata) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *IdentifiedGenesisMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_IdentifiedGenesisMetadata.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *IdentifiedGenesisMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_IdentifiedGenesisMetadata.Merge(m, src) +} +func (m *IdentifiedGenesisMetadata) XXX_Size() int { + return m.Size() +} +func (m *IdentifiedGenesisMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_IdentifiedGenesisMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_IdentifiedGenesisMetadata proto.InternalMessageInfo + +func (m *IdentifiedGenesisMetadata) GetClientId() string { + if m != nil { + return m.ClientId + } + return "" +} + +func (m *IdentifiedGenesisMetadata) GetClientMetadata() []GenesisMetadata { + if m != nil { + return m.ClientMetadata + } + return nil +} + func init() { proto.RegisterType((*GenesisState)(nil), "ibc.core.client.v1.GenesisState") + proto.RegisterType((*GenesisMetadata)(nil), "ibc.core.client.v1.GenesisMetadata") + proto.RegisterType((*IdentifiedGenesisMetadata)(nil), "ibc.core.client.v1.IdentifiedGenesisMetadata") } func init() { proto.RegisterFile("ibc/core/client/v1/genesis.proto", fileDescriptor_bcd0c0f1f2e6a91a) } var fileDescriptor_bcd0c0f1f2e6a91a = []byte{ - // 400 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xc1, 0x8e, 0x9a, 0x40, - 0x1c, 0xc6, 0x99, 0x6a, 0x6d, 0x83, 0x4d, 0x6a, 0x27, 0xa6, 0x25, 0x9a, 0x00, 0xe1, 0x44, 0x0f, - 0x32, 0xd5, 0x1e, 0xda, 0x78, 0xc4, 0xa4, 0x4d, 0x93, 0x1e, 0x2a, 0xbd, 0xf5, 0x42, 0x60, 0x9c, - 0x22, 0x29, 0x30, 0x96, 0x19, 0x8d, 0xbe, 0xc5, 0x66, 0x1f, 0x63, 0x9f, 0xc4, 0xa3, 0xc7, 0xbd, - 0x2c, 0xbb, 0xd1, 0x37, 0xf0, 0x09, 0x36, 0x30, 0xe3, 0x1e, 0x94, 0x3d, 0xf1, 0xcf, 0x37, 0xbf, - 0xef, 0xfb, 0xfe, 0x21, 0x7f, 0xd5, 0x8c, 0x43, 0x8c, 0x30, 0xcd, 0x09, 0xc2, 0x49, 0x4c, 0x32, - 0x8e, 0x56, 0x43, 0x14, 0x91, 0x8c, 0xb0, 0x98, 0x39, 0x8b, 0x9c, 0x72, 0x0a, 0x61, 0x1c, 0x62, - 0xa7, 0x24, 0x1c, 0x41, 0x38, 0xab, 0x61, 0xcf, 0xa8, 0x71, 0xc9, 0xd7, 0xca, 0xd4, 0xeb, 0x46, - 0x34, 0xa2, 0xd5, 0x88, 0xca, 0x49, 0xa8, 0xd6, 0x5d, 0x43, 0x7d, 0xf3, 0x5d, 0x84, 0xff, 0xe6, - 0x01, 0x27, 0x10, 0xab, 0xaf, 0x84, 0x8d, 0x69, 0xc0, 0x6c, 0xd8, 0xed, 0xd1, 0x47, 0xe7, 0xb2, - 0xcd, 0xf9, 0x31, 0x23, 0x19, 0x8f, 0xff, 0xc6, 0x64, 0x36, 0xa9, 0xb4, 0xca, 0xeb, 0xea, 0xdb, - 0xc2, 0x50, 0x6e, 0xee, 0x8d, 0xf7, 0xb5, 0xcf, 0xcc, 0x3b, 0x25, 0xc3, 0x6b, 0xa0, 0xbe, 0x93, - 0xb3, 0x8f, 0x69, 0xc6, 0x48, 0xc6, 0x96, 0x4c, 0x7b, 0xf1, 0x7c, 0x9f, 0x88, 0x99, 0x9c, 0x50, - 0x91, 0xe7, 0x8e, 0xcb, 0xbe, 0x63, 0x61, 0x68, 0x9b, 0x20, 0x4d, 0xc6, 0xd6, 0x45, 0xa2, 0x55, - 0xee, 0x22, 0xac, 0xec, 0xcc, 0xeb, 0x75, 0xf0, 0x99, 0x0e, 0xbf, 0xaa, 0xad, 0x45, 0x90, 0x07, - 0x29, 0xd3, 0x1a, 0x26, 0xb0, 0xdb, 0xa3, 0x5e, 0xdd, 0x22, 0xbf, 0x2a, 0xc2, 0x6d, 0x96, 0xcd, - 0x9e, 0xe4, 0xe1, 0x37, 0xb5, 0x83, 0x73, 0x12, 0x70, 0xe2, 0x27, 0x14, 0x07, 0xc9, 0x9c, 0x32, - 0xae, 0x35, 0x4d, 0x60, 0xbf, 0x76, 0xfb, 0xc7, 0xc2, 0xf8, 0x20, 0xb7, 0x3b, 0x23, 0x2c, 0xef, - 0xad, 0x90, 0x7e, 0x9e, 0x14, 0x38, 0x55, 0xbb, 0x19, 0x59, 0x73, 0x5f, 0xd4, 0xf9, 0x8c, 0xfc, - 0x5f, 0x92, 0x0c, 0x13, 0xed, 0xa5, 0x09, 0xec, 0xa6, 0x6b, 0x1c, 0x0b, 0xa3, 0x2f, 0xb2, 0xea, - 0x28, 0xcb, 0x83, 0xa5, 0x2c, 0x7f, 0xb8, 0x14, 0xdd, 0xe9, 0x76, 0xaf, 0x83, 0xdd, 0x5e, 0x07, - 0x0f, 0x7b, 0x1d, 0x5c, 0x1d, 0x74, 0x65, 0x77, 0xd0, 0x95, 0xdb, 0x83, 0xae, 0xfc, 0xf9, 0x12, - 0xc5, 0x7c, 0xbe, 0x0c, 0x1d, 0x4c, 0x53, 0x84, 0x29, 0x4b, 0x29, 0x93, 0x9f, 0x01, 0x9b, 0xfd, - 0x43, 0x6b, 0xf4, 0x74, 0x4f, 0x9f, 0x46, 0x03, 0x79, 0x52, 0x7c, 0xb3, 0x20, 0x2c, 0x6c, 0x55, - 0x97, 0xf3, 0xf9, 0x31, 0x00, 0x00, 0xff, 0xff, 0x7c, 0xcd, 0xe7, 0x85, 0xa8, 0x02, 0x00, 0x00, + // 535 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x53, 0x4d, 0x6e, 0xd3, 0x40, + 0x14, 0xce, 0x34, 0x69, 0x68, 0xa7, 0x15, 0x0d, 0xa3, 0xa8, 0x98, 0x54, 0xb2, 0x2d, 0xb3, 0x09, + 0x8b, 0xd8, 0x24, 0x2c, 0x40, 0xd9, 0x20, 0xb9, 0x12, 0xa8, 0x12, 0x48, 0xd4, 0xec, 0xd8, 0x58, + 0x93, 0xf1, 0x90, 0x5a, 0x75, 0x3c, 0x21, 0x33, 0x89, 0x9a, 0x1b, 0xb0, 0x44, 0x9c, 0x80, 0x35, + 0x67, 0xe0, 0x00, 0x5d, 0x76, 0xd9, 0x55, 0x40, 0xc9, 0x0d, 0x72, 0x02, 0xe4, 0x99, 0x71, 0x7f, + 0x5c, 0xb7, 0xab, 0xbc, 0x7c, 0xf3, 0x7d, 0xdf, 0x7b, 0xfa, 0x9e, 0x1f, 0xb4, 0xe3, 0x01, 0xf1, + 0x08, 0x9b, 0x50, 0x8f, 0x24, 0x31, 0x4d, 0x85, 0x37, 0xeb, 0x7a, 0x43, 0x9a, 0x52, 0x1e, 0x73, + 0x77, 0x3c, 0x61, 0x82, 0x21, 0x14, 0x0f, 0x88, 0x9b, 0x31, 0x5c, 0xc5, 0x70, 0x67, 0xdd, 0x96, + 0x55, 0xa2, 0xd2, 0xaf, 0x52, 0xd4, 0x6a, 0x0e, 0xd9, 0x90, 0xc9, 0xd2, 0xcb, 0x2a, 0x85, 0x3a, + 0x97, 0x35, 0xb8, 0xfb, 0x5e, 0x99, 0x7f, 0x16, 0x58, 0x50, 0x44, 0xe0, 0x23, 0x25, 0xe3, 0x06, + 0xb0, 0xab, 0xed, 0x9d, 0xde, 0x0b, 0xf7, 0x6e, 0x37, 0xf7, 0x28, 0xa2, 0xa9, 0x88, 0xbf, 0xc6, + 0x34, 0x3a, 0x94, 0x98, 0xd4, 0xfa, 0xe6, 0xf9, 0xc2, 0xaa, 0xfc, 0xfe, 0x6b, 0xed, 0x97, 0x3e, + 0xf3, 0x20, 0x77, 0x46, 0x3f, 0x01, 0x7c, 0xa2, 0xeb, 0x90, 0xb0, 0x94, 0xd3, 0x94, 0x4f, 0xb9, + 0xb1, 0x71, 0x7f, 0x3f, 0x65, 0x73, 0x98, 0x53, 0x95, 0x9f, 0xdf, 0xcf, 0xfa, 0xad, 0x17, 0x96, + 0x31, 0xc7, 0xa3, 0xa4, 0xef, 0xdc, 0x71, 0x74, 0xb2, 0x59, 0x94, 0x94, 0x17, 0xb4, 0x41, 0x83, + 0x14, 0x70, 0x34, 0x87, 0x39, 0x16, 0x8e, 0xa8, 0xc0, 0x11, 0x16, 0xd8, 0xa8, 0xca, 0x91, 0x3a, + 0x0f, 0x47, 0xa0, 0xf3, 0xfb, 0xa8, 0x45, 0xbe, 0xa5, 0xc7, 0x7a, 0x7a, 0x7b, 0xac, 0xdc, 0xd4, + 0x09, 0xf6, 0x34, 0x94, 0x2b, 0xd0, 0x1b, 0x58, 0x1f, 0xe3, 0x09, 0x1e, 0x71, 0xa3, 0x66, 0x83, + 0xf6, 0x4e, 0xaf, 0x55, 0xd6, 0xf0, 0x93, 0x64, 0xf8, 0xb5, 0xcc, 0x3d, 0xd0, 0x7c, 0xf4, 0x0e, + 0x36, 0xc8, 0x84, 0x62, 0x41, 0xc3, 0x84, 0x11, 0x9c, 0x9c, 0x30, 0x2e, 0x8c, 0x4d, 0x1b, 0xb4, + 0xb7, 0xfc, 0x83, 0x1b, 0x13, 0x14, 0x18, 0xd9, 0x04, 0x12, 0xfa, 0x90, 0x23, 0xe8, 0x18, 0x36, + 0x53, 0x7a, 0x26, 0x42, 0xd5, 0x2e, 0xe4, 0xf4, 0xdb, 0x94, 0xa6, 0x84, 0x1a, 0x75, 0x1b, 0xb4, + 0x6b, 0xbe, 0xb5, 0x5e, 0x58, 0x07, 0xca, 0xab, 0x8c, 0xe5, 0x04, 0x28, 0x83, 0xf5, 0xae, 0x73, + 0xf0, 0x2d, 0xdc, 0x2b, 0x24, 0x83, 0x1a, 0xb0, 0x7a, 0x4a, 0xe7, 0x06, 0xb0, 0x41, 0x7b, 0x37, + 0xc8, 0x4a, 0xd4, 0x84, 0x9b, 0x33, 0x9c, 0x4c, 0xa9, 0xb1, 0x21, 0x31, 0xf5, 0xa7, 0x5f, 0xfb, + 0xfe, 0xcb, 0xaa, 0x38, 0x7f, 0x00, 0x7c, 0x76, 0x6f, 0xca, 0xa8, 0x0b, 0xb7, 0xf5, 0x18, 0x71, + 0x24, 0x1d, 0xb7, 0xfd, 0xe6, 0x7a, 0x61, 0x35, 0x6e, 0x86, 0x1e, 0xc6, 0x91, 0x13, 0x6c, 0xa9, + 0xfa, 0x28, 0x42, 0x09, 0xd4, 0xc9, 0x5f, 0x2f, 0x58, 0x7d, 0x73, 0xcf, 0xcb, 0xf2, 0x2e, 0xae, + 0xd5, 0xd4, 0x6b, 0xdd, 0xbf, 0xd5, 0xe1, 0x7a, 0xab, 0x8f, 0x15, 0x72, 0xc5, 0x3f, 0x3e, 0x5f, + 0x9a, 0xe0, 0x62, 0x69, 0x82, 0x7f, 0x4b, 0x13, 0xfc, 0x58, 0x99, 0x95, 0x8b, 0x95, 0x59, 0xb9, + 0x5c, 0x99, 0x95, 0x2f, 0xaf, 0x87, 0xb1, 0x38, 0x99, 0x0e, 0x5c, 0xc2, 0x46, 0x1e, 0x61, 0x7c, + 0xc4, 0xb8, 0xfe, 0xe9, 0xf0, 0xe8, 0xd4, 0x3b, 0xf3, 0xae, 0x4e, 0xf9, 0x65, 0xaf, 0xa3, 0xaf, + 0x59, 0xcc, 0xc7, 0x94, 0x0f, 0xea, 0xf2, 0x68, 0x5f, 0xfd, 0x0f, 0x00, 0x00, 0xff, 0xff, 0x61, + 0x6f, 0x94, 0xed, 0x23, 0x04, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { @@ -162,7 +277,7 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { if m.NextClientSequence != 0 { i = encodeVarintGenesis(dAtA, i, uint64(m.NextClientSequence)) i-- - dAtA[i] = 0x28 + dAtA[i] = 0x30 } if m.CreateLocalhost { i-- @@ -172,7 +287,7 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { dAtA[i] = 0 } i-- - dAtA[i] = 0x20 + dAtA[i] = 0x28 } { size, err := m.Params.MarshalToSizedBuffer(dAtA[:i]) @@ -183,7 +298,21 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { i = encodeVarintGenesis(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x1a + dAtA[i] = 0x22 + if len(m.ClientsMetadata) > 0 { + for iNdEx := len(m.ClientsMetadata) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.ClientsMetadata[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a + } + } if len(m.ClientsConsensus) > 0 { for iNdEx := len(m.ClientsConsensus) - 1; iNdEx >= 0; iNdEx-- { { @@ -215,6 +344,87 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *GenesisMetadata) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *GenesisMetadata) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *GenesisMetadata) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Value) > 0 { + i -= len(m.Value) + copy(dAtA[i:], m.Value) + i = encodeVarintGenesis(dAtA, i, uint64(len(m.Value))) + i-- + dAtA[i] = 0x12 + } + if len(m.Key) > 0 { + i -= len(m.Key) + copy(dAtA[i:], m.Key) + i = encodeVarintGenesis(dAtA, i, uint64(len(m.Key))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *IdentifiedGenesisMetadata) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *IdentifiedGenesisMetadata) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *IdentifiedGenesisMetadata) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.ClientMetadata) > 0 { + for iNdEx := len(m.ClientMetadata) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.ClientMetadata[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + } + if len(m.ClientId) > 0 { + i -= len(m.ClientId) + copy(dAtA[i:], m.ClientId) + i = encodeVarintGenesis(dAtA, i, uint64(len(m.ClientId))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func encodeVarintGenesis(dAtA []byte, offset int, v uint64) int { offset -= sovGenesis(v) base := offset @@ -244,6 +454,12 @@ func (m *GenesisState) Size() (n int) { n += 1 + l + sovGenesis(uint64(l)) } } + if len(m.ClientsMetadata) > 0 { + for _, e := range m.ClientsMetadata { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } l = m.Params.Size() n += 1 + l + sovGenesis(uint64(l)) if m.CreateLocalhost { @@ -255,6 +471,42 @@ func (m *GenesisState) Size() (n int) { return n } +func (m *GenesisMetadata) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Key) + if l > 0 { + n += 1 + l + sovGenesis(uint64(l)) + } + l = len(m.Value) + if l > 0 { + n += 1 + l + sovGenesis(uint64(l)) + } + return n +} + +func (m *IdentifiedGenesisMetadata) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.ClientId) + if l > 0 { + n += 1 + l + sovGenesis(uint64(l)) + } + if len(m.ClientMetadata) > 0 { + for _, e := range m.ClientMetadata { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } + return n +} + func sovGenesis(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } @@ -359,6 +611,40 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { } iNdEx = postIndex case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientsMetadata", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ClientsMetadata = append(m.ClientsMetadata, IdentifiedGenesisMetadata{}) + if err := m.ClientsMetadata[len(m.ClientsMetadata)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Params", wireType) } @@ -391,7 +677,7 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 4: + case 5: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field CreateLocalhost", wireType) } @@ -411,7 +697,7 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { } } m.CreateLocalhost = bool(v != 0) - case 5: + case 6: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field NextClientSequence", wireType) } @@ -454,6 +740,246 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { } return nil } +func (m *GenesisMetadata) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: GenesisMetadata: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: GenesisMetadata: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Key = append(m.Key[:0], dAtA[iNdEx:postIndex]...) + if m.Key == nil { + m.Key = []byte{} + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Value = append(m.Value[:0], dAtA[iNdEx:postIndex]...) + if m.Value == nil { + m.Value = []byte{} + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenesis(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthGenesis + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthGenesis + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *IdentifiedGenesisMetadata) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: IdentifiedGenesisMetadata: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: IdentifiedGenesisMetadata: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientId", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ClientId = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientMetadata", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ClientMetadata = append(m.ClientMetadata, GenesisMetadata{}) + if err := m.ClientMetadata[len(m.ClientMetadata)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenesis(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthGenesis + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthGenesis + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipGenesis(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/x/ibc/core/02-client/types/genesis_test.go b/x/ibc/core/02-client/types/genesis_test.go index 12cfba69d..d57b8d1ba 100644 --- a/x/ibc/core/02-client/types/genesis_test.go +++ b/x/ibc/core/02-client/types/genesis_test.go @@ -91,6 +91,15 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + []types.IdentifiedGenesisMetadata{ + types.NewIdentifiedGenesisMetadata( + clientID, + []types.GenesisMetadata{ + types.NewGenesisMetadata([]byte("key1"), []byte("val1")), + types.NewGenesisMetadata([]byte("key2"), []byte("val2")), + }, + ), + }, types.NewParams(exported.Tendermint, exported.Localhost), false, 2, @@ -121,6 +130,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + nil, types.NewParams(exported.Tendermint), false, 0, @@ -137,6 +147,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { types.NewIdentifiedClientState(exported.Localhost, localhosttypes.NewClientState("chaindID", types.ZeroHeight())), }, nil, + nil, types.NewParams(exported.Tendermint), false, 0, @@ -159,7 +170,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { tmClientID1, []types.ConsensusStateWithHeight{ types.NewConsensusStateWithHeight( - types.ZeroHeight(), + types.NewHeight(0, 1), ibctmtypes.NewConsensusState( header.GetTime(), commitmenttypes.NewMerkleRoot(header.Header.GetAppHash()), header.Header.NextValidatorsHash, ), @@ -167,6 +178,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + nil, types.NewParams(exported.Tendermint), false, 0, @@ -197,6 +209,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + nil, types.NewParams(exported.Tendermint), false, 0, @@ -227,6 +240,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + nil, types.NewParams(exported.Tendermint), false, 0, @@ -257,12 +271,87 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + nil, types.NewParams(exported.Solomachine), false, 0, ), expPass: false, }, + { + name: "metadata client-id does not match a genesis client", + genState: types.NewGenesisState( + []types.IdentifiedClientState{ + types.NewIdentifiedClientState( + clientID, ibctmtypes.NewClientState(chainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + ), + types.NewIdentifiedClientState( + exported.Localhost, localhosttypes.NewClientState("chainID", clientHeight), + ), + }, + []types.ClientConsensusStates{ + types.NewClientConsensusStates( + clientID, + []types.ConsensusStateWithHeight{ + types.NewConsensusStateWithHeight( + header.GetHeight().(types.Height), + ibctmtypes.NewConsensusState( + header.GetTime(), commitmenttypes.NewMerkleRoot(header.Header.GetAppHash()), header.Header.NextValidatorsHash, + ), + ), + }, + ), + }, + []types.IdentifiedGenesisMetadata{ + types.NewIdentifiedGenesisMetadata( + "wrongclientid", + []types.GenesisMetadata{ + types.NewGenesisMetadata([]byte("key1"), []byte("val1")), + types.NewGenesisMetadata([]byte("key2"), []byte("val2")), + }, + ), + }, + types.NewParams(exported.Tendermint, exported.Localhost), + false, + 0, + ), + expPass: false, + }, + { + name: "invalid metadata", + genState: types.NewGenesisState( + []types.IdentifiedClientState{ + types.NewIdentifiedClientState( + clientID, ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), + ), + }, + []types.ClientConsensusStates{ + types.NewClientConsensusStates( + clientID, + []types.ConsensusStateWithHeight{ + types.NewConsensusStateWithHeight( + header.GetHeight().(types.Height), + ibctmtypes.NewConsensusState( + header.GetTime(), commitmenttypes.NewMerkleRoot(header.Header.GetAppHash()), header.Header.NextValidatorsHash, + ), + ), + }, + ), + }, + []types.IdentifiedGenesisMetadata{ + types.NewIdentifiedGenesisMetadata( + clientID, + []types.GenesisMetadata{ + types.NewGenesisMetadata([]byte(""), []byte("val1")), + types.NewGenesisMetadata([]byte("key2"), []byte("val2")), + }, + ), + }, + types.NewParams(exported.Tendermint), + false, + 0, + ), + }, { name: "invalid params", genState: types.NewGenesisState( @@ -287,6 +376,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + nil, types.NewParams(" "), false, 0, @@ -317,6 +407,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + nil, types.NewParams(" "), true, 0, @@ -347,6 +438,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + nil, types.NewParams(exported.Tendermint), true, 2, @@ -377,6 +469,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + nil, types.NewParams(exported.Tendermint, exported.Localhost), false, 0, @@ -407,6 +500,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + nil, types.NewParams(exported.Tendermint, exported.Localhost), false, 5, @@ -434,6 +528,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, ), }, + nil, types.NewParams(exported.Tendermint, exported.Localhost), false, 5, diff --git a/x/ibc/core/03-connection/client/cli/tx.go b/x/ibc/core/03-connection/client/cli/tx.go index bff4c5da9..5044f003d 100644 --- a/x/ibc/core/03-connection/client/cli/tx.go +++ b/x/ibc/core/03-connection/client/cli/tx.go @@ -22,6 +22,7 @@ import ( const ( flagVersionIdentifier = "version-identifier" flagVersionFeatures = "version-features" + flagDelayPeriod = "delay-period" ) // NewConnectionOpenInitCmd defines the command to initialize a connection on @@ -34,7 +35,7 @@ func NewConnectionOpenInitCmd() *cobra.Command { - 'version-identifier' flag can be a single pre-selected version identifier to be used in the handshake. - 'version-features' flag can be a list of features separated by commas to accompany the version identifier.`, Example: fmt.Sprintf( - "%s tx %s %s open-init [client-id] [counterparty-client-id] [path/to/counterparty_prefix.json] --version-identifier=\"1.0\" --version-features=\"ORDER_UNORDERED\"", + "%s tx %s %s open-init [client-id] [counterparty-client-id] [path/to/counterparty_prefix.json] --version-identifier=\"1.0\" --version-features=\"ORDER_UNORDERED\" --delay-period=500", version.AppName, host.ModuleName, types.SubModuleName, ), Args: cobra.ExactArgs(3), @@ -67,9 +68,14 @@ func NewConnectionOpenInitCmd() *cobra.Command { version = types.NewVersion(versionIdentifier, features) } + delayPeriod, err := cmd.Flags().GetUint64(flagDelayPeriod) + if err != nil { + return err + } + msg := types.NewMsgConnectionOpenInit( clientID, counterpartyClientID, - counterpartyPrefix, version, clientCtx.GetFromAddress(), + counterpartyPrefix, version, delayPeriod, clientCtx.GetFromAddress(), ) if err := msg.ValidateBasic(); err != nil { @@ -84,6 +90,7 @@ func NewConnectionOpenInitCmd() *cobra.Command { // at this step in the handshake. cmd.Flags().String(flagVersionIdentifier, "", "version identifier to be used in the connection handshake version negotiation") cmd.Flags().String(flagVersionFeatures, "", "version features list separated by commas without spaces. The features must function with the version identifier.") + cmd.Flags().Uint64(flagDelayPeriod, 0, "delay period that must pass before packet verification can pass against a consensus state") flags.AddTxFlagsToCmd(cmd) return cmd @@ -174,9 +181,14 @@ func NewConnectionOpenTryCmd() *cobra.Command { return err } + delayPeriod, err := cmd.Flags().GetUint64(flagDelayPeriod) + if err != nil { + return err + } + msg := types.NewMsgConnectionOpenTry( connectionID, clientID, counterpartyConnectionID, counterpartyClientID, - counterpartyClient, counterpartyPrefix, counterpartyVersions, + counterpartyClient, counterpartyPrefix, counterpartyVersions, delayPeriod, proofInit, proofClient, proofConsensus, proofHeight, consensusHeight, clientCtx.GetFromAddress(), ) @@ -189,6 +201,7 @@ func NewConnectionOpenTryCmd() *cobra.Command { }, } + cmd.Flags().Uint64(flagDelayPeriod, 0, "delay period that must pass before packet verification can pass against a consensus state") flags.AddTxFlagsToCmd(cmd) return cmd diff --git a/x/ibc/core/03-connection/genesis.go b/x/ibc/core/03-connection/genesis.go index 4f97ed37b..a1bb30f1f 100644 --- a/x/ibc/core/03-connection/genesis.go +++ b/x/ibc/core/03-connection/genesis.go @@ -10,7 +10,7 @@ import ( // state. func InitGenesis(ctx sdk.Context, k keeper.Keeper, gs types.GenesisState) { for _, connection := range gs.Connections { - conn := types.NewConnectionEnd(connection.State, connection.ClientId, connection.Counterparty, connection.Versions) + conn := types.NewConnectionEnd(connection.State, connection.ClientId, connection.Counterparty, connection.Versions, connection.DelayPeriod) k.SetConnection(ctx, connection.Id, conn) } for _, connPaths := range gs.ClientConnectionPaths { diff --git a/x/ibc/core/03-connection/keeper/grpc_query_test.go b/x/ibc/core/03-connection/keeper/grpc_query_test.go index 6a3497667..faa77b3c3 100644 --- a/x/ibc/core/03-connection/keeper/grpc_query_test.go +++ b/x/ibc/core/03-connection/keeper/grpc_query_test.go @@ -52,7 +52,7 @@ func (suite *KeeperTestSuite) TestQueryConnection() { connB := suite.chainB.GetFirstTestConnection(clientB, clientA) counterparty := types.NewCounterparty(clientB, connB.ID, suite.chainB.GetPrefix()) - expConnection = types.NewConnectionEnd(types.INIT, clientA, counterparty, types.ExportedVersionsToProto(types.GetCompatibleVersions())) + expConnection = types.NewConnectionEnd(types.INIT, clientA, counterparty, types.ExportedVersionsToProto(types.GetCompatibleVersions()), 500) suite.chainA.App.IBCKeeper.ConnectionKeeper.SetConnection(suite.chainA.GetContext(), connA.ID, expConnection) req = &types.QueryConnectionRequest{ @@ -121,9 +121,9 @@ func (suite *KeeperTestSuite) TestQueryConnections() { // counterparty connection id is blank after open init counterparty3 := types.NewCounterparty(clientB, "", suite.chainB.GetPrefix()) - conn1 := types.NewConnectionEnd(types.OPEN, clientA, counterparty1, types.ExportedVersionsToProto(types.GetCompatibleVersions())) - conn2 := types.NewConnectionEnd(types.OPEN, clientA1, counterparty2, types.ExportedVersionsToProto(types.GetCompatibleVersions())) - conn3 := types.NewConnectionEnd(types.INIT, clientA, counterparty3, types.ExportedVersionsToProto(types.GetCompatibleVersions())) + conn1 := types.NewConnectionEnd(types.OPEN, clientA, counterparty1, types.ExportedVersionsToProto(types.GetCompatibleVersions()), 0) + conn2 := types.NewConnectionEnd(types.OPEN, clientA1, counterparty2, types.ExportedVersionsToProto(types.GetCompatibleVersions()), 0) + conn3 := types.NewConnectionEnd(types.INIT, clientA, counterparty3, types.ExportedVersionsToProto(types.GetCompatibleVersions()), 0) iconn1 := types.NewIdentifiedConnection(connA0.ID, conn1) iconn2 := types.NewIdentifiedConnection(connA1.ID, conn2) diff --git a/x/ibc/core/03-connection/keeper/handshake.go b/x/ibc/core/03-connection/keeper/handshake.go index 7a7d70057..b8f7466f1 100644 --- a/x/ibc/core/03-connection/keeper/handshake.go +++ b/x/ibc/core/03-connection/keeper/handshake.go @@ -24,6 +24,7 @@ func (k Keeper) ConnOpenInit( clientID string, counterparty types.Counterparty, // counterpartyPrefix, counterpartyClientIdentifier version *types.Version, + delayPeriod uint64, ) (string, error) { versions := types.GetCompatibleVersions() if version != nil { @@ -36,7 +37,7 @@ func (k Keeper) ConnOpenInit( // connection defines chain A's ConnectionEnd connectionID := k.GenerateConnectionIdentifier(ctx) - connection := types.NewConnectionEnd(types.INIT, clientID, counterparty, types.ExportedVersionsToProto(versions)) + connection := types.NewConnectionEnd(types.INIT, clientID, counterparty, types.ExportedVersionsToProto(versions), delayPeriod) k.SetConnection(ctx, connectionID, connection) if err := k.addConnectionToClient(ctx, clientID, connectionID); err != nil { @@ -62,6 +63,7 @@ func (k Keeper) ConnOpenTry( ctx sdk.Context, previousConnectionID string, // previousIdentifier counterparty types.Counterparty, // counterpartyConnectionIdentifier, counterpartyPrefix and counterpartyClientIdentifier + delayPeriod uint64, clientID string, // clientID of chainA clientState exported.ClientState, // clientState that chainA has for chainB counterpartyVersions []exported.Version, // supported versions of chain A @@ -89,10 +91,12 @@ func (k Keeper) ConnOpenTry( // counterparty is chainA and connection is on INIT stage. // Check that existing connection versions for initialized connection is equal to compatible // versions for this chain. + // ensure that existing connection's delay period is the same as desired delay period. if !(previousConnection.Counterparty.ConnectionId == "" && bytes.Equal(previousConnection.Counterparty.Prefix.Bytes(), counterparty.Prefix.Bytes()) && previousConnection.ClientId == clientID && - previousConnection.Counterparty.ClientId == counterparty.ClientId) { + previousConnection.Counterparty.ClientId == counterparty.ClientId && + previousConnection.DelayPeriod == delayPeriod) { return "", sdkerrors.Wrap(types.ErrInvalidConnection, "connection fields mismatch previous connection fields") } @@ -128,9 +132,10 @@ func (k Keeper) ConnOpenTry( // expectedConnection defines Chain A's ConnectionEnd // NOTE: chain A's counterparty is chain B (i.e where this code is executed) + // NOTE: chainA and chainB must have the same delay period prefix := k.GetCommitmentPrefix() expectedCounterparty := types.NewCounterparty(clientID, "", commitmenttypes.NewMerklePrefix(prefix.Bytes())) - expectedConnection := types.NewConnectionEnd(types.INIT, counterparty.ClientId, expectedCounterparty, types.ExportedVersionsToProto(counterpartyVersions)) + expectedConnection := types.NewConnectionEnd(types.INIT, counterparty.ClientId, expectedCounterparty, types.ExportedVersionsToProto(counterpartyVersions), delayPeriod) supportedVersions := types.GetCompatibleVersions() if len(previousConnection.Versions) != 0 { @@ -146,7 +151,7 @@ func (k Keeper) ConnOpenTry( } // connection defines chain B's ConnectionEnd - connection := types.NewConnectionEnd(types.TRYOPEN, clientID, counterparty, []*types.Version{version}) + connection := types.NewConnectionEnd(types.TRYOPEN, clientID, counterparty, []*types.Version{version}, delayPeriod) // Check that ChainA committed expectedConnectionEnd to its state if err := k.VerifyConnectionState( @@ -252,7 +257,7 @@ func (k Keeper) ConnOpenAck( prefix := k.GetCommitmentPrefix() expectedCounterparty := types.NewCounterparty(connection.ClientId, connectionID, commitmenttypes.NewMerklePrefix(prefix.Bytes())) - expectedConnection := types.NewConnectionEnd(types.TRYOPEN, connection.Counterparty.ClientId, expectedCounterparty, []*types.Version{version}) + expectedConnection := types.NewConnectionEnd(types.TRYOPEN, connection.Counterparty.ClientId, expectedCounterparty, []*types.Version{version}, connection.DelayPeriod) // Ensure that ChainB stored expected connectionEnd in its state during ConnOpenTry if err := k.VerifyConnectionState( @@ -314,7 +319,7 @@ func (k Keeper) ConnOpenConfirm( prefix := k.GetCommitmentPrefix() expectedCounterparty := types.NewCounterparty(connection.ClientId, connectionID, commitmenttypes.NewMerklePrefix(prefix.Bytes())) - expectedConnection := types.NewConnectionEnd(types.OPEN, connection.Counterparty.ClientId, expectedCounterparty, connection.Versions) + expectedConnection := types.NewConnectionEnd(types.OPEN, connection.Counterparty.ClientId, expectedCounterparty, connection.Versions, connection.DelayPeriod) // Check that connection on ChainA is open if err := k.VerifyConnectionState( diff --git a/x/ibc/core/03-connection/keeper/handshake_test.go b/x/ibc/core/03-connection/keeper/handshake_test.go index d70fd013d..01c0f68ae 100644 --- a/x/ibc/core/03-connection/keeper/handshake_test.go +++ b/x/ibc/core/03-connection/keeper/handshake_test.go @@ -17,6 +17,7 @@ func (suite *KeeperTestSuite) TestConnOpenInit() { clientA string clientB string version *types.Version + delayPeriod uint64 emptyConnBID bool ) @@ -36,6 +37,11 @@ func (suite *KeeperTestSuite) TestConnOpenInit() { clientA, clientB = suite.coordinator.SetupClients(suite.chainA, suite.chainB, exported.Tendermint) version = types.ExportedVersionsToProto(types.GetCompatibleVersions())[0] }, true}, + {"success with non zero delayPeriod", func() { + clientA, clientB = suite.coordinator.SetupClients(suite.chainA, suite.chainB, exported.Tendermint) + delayPeriod = uint64(time.Hour.Nanoseconds()) + }, true}, + {"invalid version", func() { clientA, clientB = suite.coordinator.SetupClients(suite.chainA, suite.chainB, exported.Tendermint) version = &types.Version{} @@ -62,7 +68,7 @@ func (suite *KeeperTestSuite) TestConnOpenInit() { } counterparty := types.NewCounterparty(clientB, connB.ID, suite.chainB.GetPrefix()) - connectionID, err := suite.chainA.App.IBCKeeper.ConnectionKeeper.ConnOpenInit(suite.chainA.GetContext(), clientA, counterparty, version) + connectionID, err := suite.chainA.App.IBCKeeper.ConnectionKeeper.ConnOpenInit(suite.chainA.GetContext(), clientA, counterparty, version, delayPeriod) if tc.expPass { suite.Require().NoError(err) @@ -81,6 +87,7 @@ func (suite *KeeperTestSuite) TestConnOpenTry() { var ( clientA string clientB string + delayPeriod uint64 previousConnectionID string versions []exported.Version consensusHeight exported.Height @@ -110,6 +117,25 @@ func (suite *KeeperTestSuite) TestConnOpenTry() { previousConnectionID = connB.ID }, true}, + {"success with delay period", func() { + clientA, clientB = suite.coordinator.SetupClients(suite.chainA, suite.chainB, exported.Tendermint) + connA, _, err := suite.coordinator.ConnOpenInit(suite.chainA, suite.chainB, clientA, clientB) + suite.Require().NoError(err) + + delayPeriod = uint64(time.Hour.Nanoseconds()) + + // set delay period on counterparty to non-zero value + conn := suite.chainA.GetConnection(connA) + conn.DelayPeriod = delayPeriod + suite.chainA.App.IBCKeeper.ConnectionKeeper.SetConnection(suite.chainA.GetContext(), connA.ID, conn) + + // commit in order for proof to return correct value + suite.coordinator.CommitBlock(suite.chainA) + suite.coordinator.UpdateClient(suite.chainB, suite.chainA, clientB, exported.Tendermint) + + // retrieve client state of chainA to pass as counterpartyClient + counterpartyClient = suite.chainA.GetClientState(clientA) + }, true}, {"invalid counterparty client", func() { clientA, clientB = suite.coordinator.SetupClients(suite.chainA, suite.chainB, exported.Tendermint) _, _, err := suite.coordinator.ConnOpenInit(suite.chainA, suite.chainB, clientA, clientB) @@ -283,7 +309,7 @@ func (suite *KeeperTestSuite) TestConnOpenTry() { proofClient, _ := suite.chainA.QueryProof(clientKey) connectionID, err := suite.chainB.App.IBCKeeper.ConnectionKeeper.ConnOpenTry( - suite.chainB.GetContext(), previousConnectionID, counterparty, clientB, counterpartyClient, + suite.chainB.GetContext(), previousConnectionID, counterparty, delayPeriod, clientB, counterpartyClient, versions, proofInit, proofClient, proofConsensus, proofHeight, consensusHeight, ) diff --git a/x/ibc/core/03-connection/keeper/keeper_test.go b/x/ibc/core/03-connection/keeper/keeper_test.go index cf8f44717..f2a1124b5 100644 --- a/x/ibc/core/03-connection/keeper/keeper_test.go +++ b/x/ibc/core/03-connection/keeper/keeper_test.go @@ -62,8 +62,8 @@ func (suite KeeperTestSuite) TestGetAllConnections() { counterpartyB0 := types.NewCounterparty(clientB, connB0.ID, suite.chainB.GetPrefix()) // connection B0 counterpartyB1 := types.NewCounterparty(clientB, connB1.ID, suite.chainB.GetPrefix()) // connection B1 - conn1 := types.NewConnectionEnd(types.OPEN, clientA, counterpartyB0, types.ExportedVersionsToProto(types.GetCompatibleVersions())) // A0 - B0 - conn2 := types.NewConnectionEnd(types.OPEN, clientA, counterpartyB1, types.ExportedVersionsToProto(types.GetCompatibleVersions())) // A1 - B1 + conn1 := types.NewConnectionEnd(types.OPEN, clientA, counterpartyB0, types.ExportedVersionsToProto(types.GetCompatibleVersions()), 0) // A0 - B0 + conn2 := types.NewConnectionEnd(types.OPEN, clientA, counterpartyB1, types.ExportedVersionsToProto(types.GetCompatibleVersions()), 0) // A1 - B1 iconn1 := types.NewIdentifiedConnection(connA0.ID, conn1) iconn2 := types.NewIdentifiedConnection(connA1.ID, conn2) diff --git a/x/ibc/core/03-connection/keeper/verify.go b/x/ibc/core/03-connection/keeper/verify.go index 2b9e00706..ddb1ea6b9 100644 --- a/x/ibc/core/03-connection/keeper/verify.go +++ b/x/ibc/core/03-connection/keeper/verify.go @@ -128,6 +128,7 @@ func (k Keeper) VerifyPacketCommitment( if err := clientState.VerifyPacketCommitment( k.clientKeeper.ClientStore(ctx, connection.GetClientID()), k.cdc, height, + uint64(ctx.BlockTime().UnixNano()), connection.GetDelayPeriod(), connection.GetCounterparty().GetPrefix(), proof, portID, channelID, sequence, commitmentBytes, ); err != nil { @@ -156,6 +157,7 @@ func (k Keeper) VerifyPacketAcknowledgement( if err := clientState.VerifyPacketAcknowledgement( k.clientKeeper.ClientStore(ctx, connection.GetClientID()), k.cdc, height, + uint64(ctx.BlockTime().UnixNano()), connection.GetDelayPeriod(), connection.GetCounterparty().GetPrefix(), proof, portID, channelID, sequence, acknowledgement, ); err != nil { @@ -184,6 +186,7 @@ func (k Keeper) VerifyPacketReceiptAbsence( if err := clientState.VerifyPacketReceiptAbsence( k.clientKeeper.ClientStore(ctx, connection.GetClientID()), k.cdc, height, + uint64(ctx.BlockTime().UnixNano()), connection.GetDelayPeriod(), connection.GetCounterparty().GetPrefix(), proof, portID, channelID, sequence, ); err != nil { @@ -211,6 +214,7 @@ func (k Keeper) VerifyNextSequenceRecv( if err := clientState.VerifyNextSequenceRecv( k.clientKeeper.ClientStore(ctx, connection.GetClientID()), k.cdc, height, + uint64(ctx.BlockTime().UnixNano()), connection.GetDelayPeriod(), connection.GetCounterparty().GetPrefix(), proof, portID, channelID, nextSequenceRecv, ); err != nil { diff --git a/x/ibc/core/03-connection/keeper/verify_test.go b/x/ibc/core/03-connection/keeper/verify_test.go index 4bf1bfb9d..2d94955d8 100644 --- a/x/ibc/core/03-connection/keeper/verify_test.go +++ b/x/ibc/core/03-connection/keeper/verify_test.go @@ -258,12 +258,15 @@ func (suite *KeeperTestSuite) TestVerifyPacketCommitment() { changeClientID bool changePacketCommitmentState bool heightDiff uint64 + delayPeriod uint64 expPass bool }{ - {"verification success", false, false, 0, true}, - {"client state not found- changed client ID", true, false, 0, false}, - {"consensus state not found - increased proof height", false, false, 5, false}, - {"verification failed - changed packet commitment state", false, true, 0, false}, + {"verification success", false, false, 0, 0, true}, + {"verification success: delay period passed", false, false, 0, uint64(1 * time.Second.Nanoseconds()), true}, + {"delay period has not passed", false, false, 0, uint64(1 * time.Hour.Nanoseconds()), false}, + {"client state not found- changed client ID", true, false, 0, 0, false}, + {"consensus state not found - increased proof height", false, false, 5, 0, false}, + {"verification failed - changed packet commitment state", false, true, 0, 0, false}, } for _, tc := range cases { @@ -273,7 +276,9 @@ func (suite *KeeperTestSuite) TestVerifyPacketCommitment() { suite.SetupTest() // reset _, clientB, _, connB, channelA, channelB := suite.coordinator.Setup(suite.chainA, suite.chainB, channeltypes.UNORDERED) + connection := suite.chainB.GetConnection(connB) + connection.DelayPeriod = tc.delayPeriod if tc.changeClientID { connection.ClientId = ibctesting.InvalidID } @@ -313,12 +318,15 @@ func (suite *KeeperTestSuite) TestVerifyPacketAcknowledgement() { changeClientID bool changeAcknowledgement bool heightDiff uint64 + delayPeriod uint64 expPass bool }{ - {"verification success", false, false, 0, true}, - {"client state not found- changed client ID", true, false, 0, false}, - {"consensus state not found - increased proof height", false, false, 5, false}, - {"verification failed - changed acknowledgement", false, true, 0, false}, + {"verification success", false, false, 0, 0, true}, + {"verification success: delay period passed", false, false, 0, uint64(1 * time.Second.Nanoseconds()), true}, + {"delay period has not passed", false, false, 0, uint64(1 * time.Hour.Nanoseconds()), false}, + {"client state not found- changed client ID", true, false, 0, 0, false}, + {"consensus state not found - increased proof height", false, false, 5, 0, false}, + {"verification failed - changed acknowledgement", false, true, 0, 0, false}, } for _, tc := range cases { @@ -328,7 +336,9 @@ func (suite *KeeperTestSuite) TestVerifyPacketAcknowledgement() { suite.SetupTest() // reset clientA, clientB, connA, _, channelA, channelB := suite.coordinator.Setup(suite.chainA, suite.chainB, channeltypes.UNORDERED) + connection := suite.chainA.GetConnection(connA) + connection.DelayPeriod = tc.delayPeriod if tc.changeClientID { connection.ClientId = ibctesting.InvalidID } @@ -338,6 +348,10 @@ func (suite *KeeperTestSuite) TestVerifyPacketAcknowledgement() { err := suite.coordinator.SendPacket(suite.chainA, suite.chainB, packet, clientB) suite.Require().NoError(err) + // increment receiving chain's (chainB) time by 2 hour to always pass receive + suite.coordinator.IncrementTimeBy(time.Hour * 2) + suite.coordinator.CommitBlock(suite.chainB) + err = suite.coordinator.RecvPacket(suite.chainA, suite.chainB, clientA, packet) suite.Require().NoError(err) @@ -372,12 +386,15 @@ func (suite *KeeperTestSuite) TestVerifyPacketReceiptAbsence() { changeClientID bool recvAck bool heightDiff uint64 + delayPeriod uint64 expPass bool }{ - {"verification success", false, false, 0, true}, - {"client state not found - changed client ID", true, false, 0, false}, - {"consensus state not found - increased proof height", false, false, 5, false}, - {"verification failed - acknowledgement was received", false, true, 0, false}, + {"verification success", false, false, 0, 0, true}, + {"verification success: delay period passed", false, false, 0, uint64(1 * time.Second.Nanoseconds()), true}, + {"delay period has not passed", false, false, 0, uint64(1 * time.Hour.Nanoseconds()), false}, + {"client state not found - changed client ID", true, false, 0, 0, false}, + {"consensus state not found - increased proof height", false, false, 5, 0, false}, + {"verification failed - acknowledgement was received", false, true, 0, 0, false}, } for _, tc := range cases { @@ -387,7 +404,9 @@ func (suite *KeeperTestSuite) TestVerifyPacketReceiptAbsence() { suite.SetupTest() // reset clientA, clientB, connA, _, channelA, channelB := suite.coordinator.Setup(suite.chainA, suite.chainB, channeltypes.UNORDERED) + connection := suite.chainA.GetConnection(connA) + connection.DelayPeriod = tc.delayPeriod if tc.changeClientID { connection.ClientId = ibctesting.InvalidID } @@ -398,6 +417,10 @@ func (suite *KeeperTestSuite) TestVerifyPacketReceiptAbsence() { suite.Require().NoError(err) if tc.recvAck { + // increment receiving chain's (chainB) time by 2 hour to always pass receive + suite.coordinator.IncrementTimeBy(time.Hour * 2) + suite.coordinator.CommitBlock(suite.chainB) + err = suite.coordinator.RecvPacket(suite.chainA, suite.chainB, clientA, packet) suite.Require().NoError(err) } else { @@ -432,12 +455,15 @@ func (suite *KeeperTestSuite) TestVerifyNextSequenceRecv() { changeClientID bool offsetSeq uint64 heightDiff uint64 + delayPeriod uint64 expPass bool }{ - {"verification success", false, 0, 0, true}, - {"client state not found- changed client ID", true, 0, 0, false}, - {"consensus state not found - increased proof height", false, 0, 5, false}, - {"verification failed - wrong expected next seq recv", false, 1, 0, false}, + {"verification success", false, 0, 0, 0, true}, + {"verification success: delay period passed", false, 0, 0, uint64(1 * time.Second.Nanoseconds()), true}, + {"delay period has not passed", false, 0, 0, uint64(1 * time.Hour.Nanoseconds()), false}, + {"client state not found- changed client ID", true, 0, 0, 0, false}, + {"consensus state not found - increased proof height", false, 0, 5, 0, false}, + {"verification failed - wrong expected next seq recv", false, 1, 0, 0, false}, } for _, tc := range cases { @@ -447,7 +473,9 @@ func (suite *KeeperTestSuite) TestVerifyNextSequenceRecv() { suite.SetupTest() // reset clientA, clientB, connA, _, channelA, channelB := suite.coordinator.Setup(suite.chainA, suite.chainB, channeltypes.UNORDERED) + connection := suite.chainA.GetConnection(connA) + connection.DelayPeriod = tc.delayPeriod if tc.changeClientID { connection.ClientId = ibctesting.InvalidID } @@ -457,6 +485,10 @@ func (suite *KeeperTestSuite) TestVerifyNextSequenceRecv() { err := suite.coordinator.SendPacket(suite.chainA, suite.chainB, packet, clientB) suite.Require().NoError(err) + // increment receiving chain's (chainB) time by 2 hour to always pass receive + suite.coordinator.IncrementTimeBy(time.Hour * 2) + suite.coordinator.CommitBlock(suite.chainB) + err = suite.coordinator.RecvPacket(suite.chainA, suite.chainB, clientA, packet) suite.Require().NoError(err) diff --git a/x/ibc/core/03-connection/types/connection.go b/x/ibc/core/03-connection/types/connection.go index 224998c6a..197af83ca 100644 --- a/x/ibc/core/03-connection/types/connection.go +++ b/x/ibc/core/03-connection/types/connection.go @@ -10,12 +10,13 @@ import ( var _ exported.ConnectionI = (*ConnectionEnd)(nil) // NewConnectionEnd creates a new ConnectionEnd instance. -func NewConnectionEnd(state State, clientID string, counterparty Counterparty, versions []*Version) ConnectionEnd { +func NewConnectionEnd(state State, clientID string, counterparty Counterparty, versions []*Version, delayPeriod uint64) ConnectionEnd { return ConnectionEnd{ ClientId: clientID, Versions: versions, State: state, Counterparty: counterparty, + DelayPeriod: delayPeriod, } } @@ -39,6 +40,11 @@ func (c ConnectionEnd) GetVersions() []exported.Version { return ProtoVersionsToExported(c.Versions) } +// GetDelayPeriod implements the Connection interface +func (c ConnectionEnd) GetDelayPeriod() uint64 { + return c.DelayPeriod +} + // ValidateBasic implements the Connection interface. // NOTE: the protocol supports that the connection and client IDs match the // counterparty's. @@ -107,6 +113,7 @@ func NewIdentifiedConnection(connectionID string, conn ConnectionEnd) Identified Versions: conn.Versions, State: conn.State, Counterparty: conn.Counterparty, + DelayPeriod: conn.DelayPeriod, } } @@ -115,6 +122,6 @@ func (ic IdentifiedConnection) ValidateBasic() error { if err := host.ConnectionIdentifierValidator(ic.Id); err != nil { return sdkerrors.Wrap(err, "invalid connection ID") } - connection := NewConnectionEnd(ic.State, ic.ClientId, ic.Counterparty, ic.Versions) + connection := NewConnectionEnd(ic.State, ic.ClientId, ic.Counterparty, ic.Versions, ic.DelayPeriod) return connection.ValidateBasic() } diff --git a/x/ibc/core/03-connection/types/connection.pb.go b/x/ibc/core/03-connection/types/connection.pb.go index ff0f6db1f..cdfb752b9 100644 --- a/x/ibc/core/03-connection/types/connection.pb.go +++ b/x/ibc/core/03-connection/types/connection.pb.go @@ -63,18 +63,22 @@ func (State) EnumDescriptor() ([]byte, []int) { } // ConnectionEnd defines a stateful object on a chain connected to another -// separate one. NOTE: there must only be 2 defined ConnectionEnds to establish +// separate one. +// NOTE: there must only be 2 defined ConnectionEnds to establish // a connection between two chains. type ConnectionEnd struct { // client associated with this connection. ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty" yaml:"client_id"` // IBC version which can be utilised to determine encodings or protocols for - // channels or packets utilising this connection + // channels or packets utilising this connection. Versions []*Version `protobuf:"bytes,2,rep,name=versions,proto3" json:"versions,omitempty"` // current state of the connection end. State State `protobuf:"varint,3,opt,name=state,proto3,enum=ibc.core.connection.v1.State" json:"state,omitempty"` // counterparty chain associated with this connection. Counterparty Counterparty `protobuf:"bytes,4,opt,name=counterparty,proto3" json:"counterparty"` + // delay period that must pass before a consensus state can be used for packet-verification + // NOTE: delay period logic is only implemented by some clients. + DelayPeriod uint64 `protobuf:"varint,5,opt,name=delay_period,json=delayPeriod,proto3" json:"delay_period,omitempty" yaml:"delay_period"` } func (m *ConnectionEnd) Reset() { *m = ConnectionEnd{} } @@ -124,6 +128,8 @@ type IdentifiedConnection struct { State State `protobuf:"varint,4,opt,name=state,proto3,enum=ibc.core.connection.v1.State" json:"state,omitempty"` // counterparty chain associated with this connection. Counterparty Counterparty `protobuf:"bytes,5,opt,name=counterparty,proto3" json:"counterparty"` + // delay period associated with this connection. + DelayPeriod uint64 `protobuf:"varint,6,opt,name=delay_period,json=delayPeriod,proto3" json:"delay_period,omitempty" yaml:"delay_period"` } func (m *IdentifiedConnection) Reset() { *m = IdentifiedConnection{} } @@ -167,7 +173,7 @@ type Counterparty struct { // identifies the connection end on the counterparty chain associated with a // given connection. ConnectionId string `protobuf:"bytes,2,opt,name=connection_id,json=connectionId,proto3" json:"connection_id,omitempty" yaml:"connection_id"` - // commitment merkle prefix of the counterparty chain + // commitment merkle prefix of the counterparty chain. Prefix types.MerklePrefix `protobuf:"bytes,3,opt,name=prefix,proto3" json:"prefix"` } @@ -362,46 +368,48 @@ func init() { } var fileDescriptor_90572467c054e43a = []byte{ - // 617 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0xc1, 0x6a, 0xdb, 0x4c, - 0x10, 0x96, 0x64, 0x39, 0xb1, 0xd7, 0xf1, 0xff, 0xbb, 0x8b, 0x29, 0x42, 0x10, 0x49, 0xa8, 0x85, - 0x9a, 0x42, 0xa4, 0x3a, 0x81, 0x1e, 0x12, 0x7a, 0x88, 0x1d, 0x15, 0x44, 0x5b, 0xd7, 0x28, 0x4e, - 0xa1, 0xb9, 0x04, 0x5b, 0xda, 0x24, 0x4b, 0x62, 0xc9, 0x48, 0x6b, 0x13, 0xbf, 0x41, 0xf0, 0xa9, - 0xd7, 0x1e, 0x0c, 0x85, 0xbe, 0x40, 0x1f, 0x23, 0xf4, 0x94, 0x63, 0x4f, 0xa6, 0xd8, 0x6f, 0xe0, - 0x27, 0x28, 0xd2, 0xca, 0xb2, 0x12, 0x9a, 0x43, 0xdd, 0x9e, 0x34, 0xb3, 0xf3, 0x7d, 0x9f, 0x76, - 0xbe, 0x1d, 0x06, 0x3c, 0xc3, 0x1d, 0x5b, 0xb7, 0x3d, 0x1f, 0xe9, 0xb6, 0xe7, 0xba, 0xc8, 0x26, - 0xd8, 0x73, 0xf5, 0x41, 0x35, 0x95, 0x69, 0x3d, 0xdf, 0x23, 0x1e, 0x7c, 0x8c, 0x3b, 0xb6, 0x16, - 0x02, 0xb5, 0x54, 0x69, 0x50, 0x15, 0xcb, 0x67, 0xde, 0x99, 0x17, 0x41, 0xf4, 0x30, 0xa2, 0x68, - 0x31, 0x2d, 0xdb, 0xed, 0x62, 0xd2, 0x45, 0x2e, 0xa1, 0xb2, 0x8b, 0x8c, 0x02, 0xd5, 0x11, 0x07, - 0x8a, 0xf5, 0x44, 0xd0, 0x70, 0x1d, 0x58, 0x05, 0x79, 0xfb, 0x12, 0x23, 0x97, 0x9c, 0x60, 0x47, - 0x60, 0x15, 0xb6, 0x92, 0xaf, 0x95, 0xe7, 0x13, 0xb9, 0x34, 0x6c, 0x77, 0x2f, 0x77, 0xd5, 0xa4, - 0xa4, 0x5a, 0x39, 0x1a, 0x9b, 0x0e, 0xdc, 0x03, 0xb9, 0x01, 0xf2, 0x03, 0xec, 0xb9, 0x81, 0xc0, - 0x29, 0x99, 0x4a, 0x61, 0x5b, 0xd6, 0x7e, 0x7f, 0x5d, 0xed, 0x03, 0xc5, 0x59, 0x09, 0x01, 0xee, - 0x80, 0x6c, 0x40, 0xda, 0x04, 0x09, 0x19, 0x85, 0xad, 0xfc, 0xb7, 0xbd, 0xf9, 0x10, 0xf3, 0x30, - 0x04, 0x59, 0x14, 0x0b, 0x1b, 0x60, 0xc3, 0xf6, 0xfa, 0x2e, 0x41, 0x7e, 0xaf, 0xed, 0x93, 0xa1, - 0xc0, 0x2b, 0x6c, 0xa5, 0xb0, 0xfd, 0xf4, 0x21, 0x6e, 0x3d, 0x85, 0xad, 0xf1, 0x37, 0x13, 0x99, - 0xb1, 0xee, 0xf0, 0x77, 0xf9, 0xeb, 0x2f, 0x32, 0xa3, 0x7e, 0xe3, 0x40, 0xd9, 0x74, 0x90, 0x4b, - 0xf0, 0x29, 0x46, 0xce, 0xd2, 0x16, 0xb8, 0x09, 0xb8, 0xc4, 0x8c, 0xe2, 0x7c, 0x22, 0xe7, 0xa9, - 0x19, 0xa1, 0x0b, 0x1c, 0xbe, 0x67, 0x19, 0xf7, 0xc7, 0x96, 0x65, 0x56, 0xb6, 0x8c, 0xff, 0x0b, - 0xcb, 0xb2, 0xff, 0xc4, 0xb2, 0xef, 0x2c, 0xd8, 0x48, 0x43, 0x57, 0x19, 0x9f, 0x57, 0xa0, 0xb8, - 0xfc, 0xf7, 0xd2, 0x42, 0x61, 0x3e, 0x91, 0xcb, 0x31, 0x2d, 0x5d, 0x56, 0xc3, 0x8b, 0x2c, 0x72, - 0xd3, 0x81, 0x35, 0xb0, 0xd6, 0xf3, 0xd1, 0x29, 0xbe, 0x8a, 0x26, 0xe8, 0x5e, 0x4b, 0xc9, 0xb8, - 0x0f, 0xaa, 0xda, 0x3b, 0xe4, 0x5f, 0x5c, 0xa2, 0x66, 0x84, 0x8d, 0x5b, 0x8a, 0x99, 0x71, 0x33, - 0x4f, 0x40, 0xa1, 0x1e, 0x5d, 0xaa, 0xd9, 0x26, 0xe7, 0x01, 0x2c, 0x83, 0x6c, 0x2f, 0x0c, 0x04, - 0x56, 0xc9, 0x54, 0xf2, 0x16, 0x4d, 0xd4, 0x63, 0xf0, 0xff, 0x72, 0x32, 0x28, 0x70, 0x85, 0x9e, - 0x13, 0x6d, 0x2e, 0xad, 0xfd, 0x06, 0xac, 0xc7, 0xaf, 0x0d, 0x25, 0x00, 0xf0, 0x62, 0x14, 0x7d, - 0x2a, 0x6a, 0xa5, 0x4e, 0xa0, 0x08, 0x72, 0xa7, 0xa8, 0x4d, 0xfa, 0x3e, 0x5a, 0x68, 0x24, 0x39, - 0xed, 0xe6, 0xf9, 0x67, 0x16, 0x64, 0xa3, 0x09, 0x80, 0x2f, 0x81, 0x7c, 0xd8, 0xda, 0x6f, 0x19, - 0x27, 0x47, 0x0d, 0xb3, 0x61, 0xb6, 0xcc, 0xfd, 0xb7, 0xe6, 0xb1, 0x71, 0x70, 0x72, 0xd4, 0x38, - 0x6c, 0x1a, 0x75, 0xf3, 0xb5, 0x69, 0x1c, 0x94, 0x18, 0xf1, 0xd1, 0x68, 0xac, 0x14, 0xef, 0x00, - 0xa0, 0x00, 0x00, 0xe5, 0x85, 0x87, 0x25, 0x56, 0xcc, 0x8d, 0xc6, 0x0a, 0x1f, 0xc6, 0x50, 0x02, - 0x45, 0x5a, 0x69, 0x59, 0x1f, 0xdf, 0x37, 0x8d, 0x46, 0x89, 0x13, 0x0b, 0xa3, 0xb1, 0xb2, 0x1e, - 0xa7, 0x4b, 0x66, 0x54, 0xcc, 0x50, 0x66, 0x18, 0x8b, 0xfc, 0xf5, 0x57, 0x89, 0xa9, 0x1d, 0xdd, - 0x4c, 0x25, 0xf6, 0x76, 0x2a, 0xb1, 0x3f, 0xa7, 0x12, 0xfb, 0x69, 0x26, 0x31, 0xb7, 0x33, 0x89, - 0xf9, 0x31, 0x93, 0x98, 0xe3, 0xbd, 0x33, 0x4c, 0xce, 0xfb, 0x9d, 0xf0, 0xe9, 0x74, 0xdb, 0x0b, - 0xba, 0x5e, 0x10, 0x7f, 0xb6, 0x02, 0xe7, 0x42, 0xbf, 0xd2, 0x93, 0xc5, 0xf6, 0x62, 0x67, 0x2b, - 0xb5, 0x32, 0xc9, 0xb0, 0x87, 0x82, 0xce, 0x5a, 0xb4, 0xd4, 0x76, 0x7e, 0x05, 0x00, 0x00, 0xff, - 0xff, 0x6d, 0xfb, 0xee, 0xb6, 0x56, 0x05, 0x00, 0x00, + // 654 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0x41, 0x6b, 0xdb, 0x4c, + 0x14, 0x94, 0x64, 0x39, 0xb1, 0xd7, 0xf1, 0xf7, 0xb9, 0x5b, 0xd3, 0x0a, 0x41, 0x24, 0xa1, 0x16, + 0x6a, 0x0a, 0xb1, 0xea, 0x04, 0x7a, 0x48, 0xe8, 0x21, 0x76, 0x5c, 0x10, 0x6d, 0x5d, 0xa3, 0x38, + 0x85, 0xe6, 0x62, 0x6c, 0x69, 0x93, 0x2c, 0xb1, 0xb5, 0x42, 0xda, 0x98, 0xf8, 0x1f, 0x84, 0x9c, + 0x7a, 0xed, 0x21, 0x50, 0xe8, 0x7f, 0x29, 0xa1, 0xa7, 0x1c, 0x7b, 0x32, 0x25, 0xb9, 0xf6, 0xe4, + 0x5f, 0x50, 0xa4, 0x95, 0x65, 0x25, 0x34, 0x87, 0xa4, 0x3d, 0xf9, 0xcd, 0xbe, 0x99, 0xf1, 0xbe, + 0xf1, 0xf3, 0x82, 0x67, 0xb8, 0x6f, 0x1b, 0x36, 0xf1, 0x91, 0x61, 0x13, 0xd7, 0x45, 0x36, 0xc5, + 0xc4, 0x35, 0x46, 0xb5, 0x14, 0xaa, 0x7a, 0x3e, 0xa1, 0x04, 0x3e, 0xc2, 0x7d, 0xbb, 0x1a, 0x12, + 0xab, 0xa9, 0xd6, 0xa8, 0x26, 0x97, 0xf7, 0xc9, 0x3e, 0x89, 0x28, 0x46, 0x58, 0x31, 0xb6, 0x9c, + 0xb6, 0x1d, 0x0e, 0x31, 0x1d, 0x22, 0x97, 0x32, 0xdb, 0x19, 0x62, 0x44, 0xfd, 0x9b, 0x00, 0x8a, + 0x8d, 0xc4, 0xb0, 0xe9, 0x3a, 0xb0, 0x06, 0xf2, 0xf6, 0x00, 0x23, 0x97, 0x76, 0xb1, 0x23, 0xf1, + 0x1a, 0x5f, 0xc9, 0xd7, 0xcb, 0xd3, 0x89, 0x5a, 0x1a, 0xf7, 0x86, 0x83, 0x75, 0x3d, 0x69, 0xe9, + 0x56, 0x8e, 0xd5, 0xa6, 0x03, 0x37, 0x40, 0x6e, 0x84, 0xfc, 0x00, 0x13, 0x37, 0x90, 0x04, 0x2d, + 0x53, 0x29, 0xac, 0xaa, 0xd5, 0x3f, 0x5f, 0xb7, 0xfa, 0x81, 0xf1, 0xac, 0x44, 0x00, 0xd7, 0x40, + 0x36, 0xa0, 0x3d, 0x8a, 0xa4, 0x8c, 0xc6, 0x57, 0xfe, 0x5b, 0x5d, 0xbe, 0x4d, 0xb9, 0x1d, 0x92, + 0x2c, 0xc6, 0x85, 0x2d, 0xb0, 0x64, 0x93, 0x23, 0x97, 0x22, 0xdf, 0xeb, 0xf9, 0x74, 0x2c, 0x89, + 0x1a, 0x5f, 0x29, 0xac, 0x3e, 0xbd, 0x4d, 0xdb, 0x48, 0x71, 0xeb, 0xe2, 0xf9, 0x44, 0xe5, 0xac, + 0x6b, 0x7a, 0xb8, 0x0e, 0x96, 0x1c, 0x34, 0xe8, 0x8d, 0xbb, 0x1e, 0xf2, 0x31, 0x71, 0xa4, 0xac, + 0xc6, 0x57, 0xc4, 0xfa, 0xe3, 0xe9, 0x44, 0x7d, 0xc8, 0xe6, 0x4e, 0x77, 0x75, 0xab, 0x10, 0xc1, + 0x76, 0x84, 0xd6, 0xc5, 0x93, 0x2f, 0x2a, 0xa7, 0xff, 0x12, 0x40, 0xd9, 0x74, 0x90, 0x4b, 0xf1, + 0x1e, 0x46, 0xce, 0x3c, 0x52, 0xb8, 0x0c, 0x84, 0x24, 0xc8, 0xe2, 0x74, 0xa2, 0xe6, 0x99, 0x61, + 0x98, 0xa0, 0x80, 0x6f, 0xc4, 0x2d, 0xdc, 0x39, 0xee, 0xcc, 0xbd, 0xe3, 0x16, 0xff, 0x22, 0xee, + 0xec, 0x3f, 0x8e, 0x7b, 0xe1, 0xce, 0x71, 0x7f, 0xe7, 0xc1, 0x52, 0xfa, 0x6b, 0xee, 0xb3, 0xb6, + 0xaf, 0x40, 0x71, 0x7e, 0xef, 0x79, 0xfc, 0xd2, 0x74, 0xa2, 0x96, 0x63, 0x59, 0xba, 0xad, 0x87, + 0x43, 0xcc, 0xb0, 0xe9, 0xc0, 0x3a, 0x58, 0xf0, 0x7c, 0xb4, 0x87, 0x8f, 0xa3, 0xcd, 0xbd, 0x11, + 0x47, 0xf2, 0x37, 0x1b, 0xd5, 0xaa, 0xef, 0x90, 0x7f, 0x38, 0x40, 0xed, 0x88, 0x1b, 0xc7, 0x11, + 0x2b, 0xe3, 0x61, 0x9e, 0x80, 0x42, 0x23, 0xba, 0x54, 0xbb, 0x47, 0x0f, 0x02, 0x58, 0x06, 0x59, + 0x2f, 0x2c, 0x24, 0x5e, 0xcb, 0x54, 0xf2, 0x16, 0x03, 0xfa, 0x2e, 0xf8, 0x7f, 0xbe, 0x55, 0x8c, + 0x78, 0x8f, 0x99, 0x13, 0x6f, 0x21, 0xed, 0xfd, 0x06, 0x2c, 0xc6, 0x9b, 0x02, 0x15, 0x00, 0xf0, + 0x6c, 0x8d, 0x7d, 0x66, 0x6a, 0xa5, 0x4e, 0xa0, 0x0c, 0x72, 0x7b, 0xa8, 0x47, 0x8f, 0x7c, 0x34, + 0xf3, 0x48, 0x30, 0x9b, 0xe6, 0xf9, 0x67, 0x1e, 0x64, 0xa3, 0xed, 0x81, 0x2f, 0x81, 0xba, 0xdd, + 0xd9, 0xec, 0x34, 0xbb, 0x3b, 0x2d, 0xb3, 0x65, 0x76, 0xcc, 0xcd, 0xb7, 0xe6, 0x6e, 0x73, 0xab, + 0xbb, 0xd3, 0xda, 0x6e, 0x37, 0x1b, 0xe6, 0x6b, 0xb3, 0xb9, 0x55, 0xe2, 0xe4, 0x07, 0xa7, 0x67, + 0x5a, 0xf1, 0x1a, 0x01, 0x4a, 0x00, 0x30, 0x5d, 0x78, 0x58, 0xe2, 0xe5, 0xdc, 0xe9, 0x99, 0x26, + 0x86, 0x35, 0x54, 0x40, 0x91, 0x75, 0x3a, 0xd6, 0xc7, 0xf7, 0xed, 0x66, 0xab, 0x24, 0xc8, 0x85, + 0xd3, 0x33, 0x6d, 0x31, 0x86, 0x73, 0x65, 0xd4, 0xcc, 0x30, 0x65, 0x58, 0xcb, 0xe2, 0xc9, 0x57, + 0x85, 0xab, 0xef, 0x9c, 0x5f, 0x2a, 0xfc, 0xc5, 0xa5, 0xc2, 0xff, 0xbc, 0x54, 0xf8, 0x4f, 0x57, + 0x0a, 0x77, 0x71, 0xa5, 0x70, 0x3f, 0xae, 0x14, 0x6e, 0x77, 0x63, 0x1f, 0xd3, 0x83, 0xa3, 0x7e, + 0xf8, 0xd3, 0x19, 0x36, 0x09, 0x86, 0x24, 0x88, 0x3f, 0x56, 0x02, 0xe7, 0xd0, 0x38, 0x36, 0x92, + 0x07, 0xf5, 0xc5, 0xda, 0x4a, 0xea, 0xa9, 0xa6, 0x63, 0x0f, 0x05, 0xfd, 0x85, 0xe8, 0x31, 0x5d, + 0xfb, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x48, 0x0f, 0xf2, 0xaa, 0xce, 0x05, 0x00, 0x00, } func (m *ConnectionEnd) Marshal() (dAtA []byte, err error) { @@ -424,6 +432,11 @@ func (m *ConnectionEnd) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.DelayPeriod != 0 { + i = encodeVarintConnection(dAtA, i, uint64(m.DelayPeriod)) + i-- + dAtA[i] = 0x28 + } { size, err := m.Counterparty.MarshalToSizedBuffer(dAtA[:i]) if err != nil { @@ -483,6 +496,11 @@ func (m *IdentifiedConnection) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.DelayPeriod != 0 { + i = encodeVarintConnection(dAtA, i, uint64(m.DelayPeriod)) + i-- + dAtA[i] = 0x30 + } { size, err := m.Counterparty.MarshalToSizedBuffer(dAtA[:i]) if err != nil { @@ -718,6 +736,9 @@ func (m *ConnectionEnd) Size() (n int) { } l = m.Counterparty.Size() n += 1 + l + sovConnection(uint64(l)) + if m.DelayPeriod != 0 { + n += 1 + sovConnection(uint64(m.DelayPeriod)) + } return n } @@ -746,6 +767,9 @@ func (m *IdentifiedConnection) Size() (n int) { } l = m.Counterparty.Size() n += 1 + l + sovConnection(uint64(l)) + if m.DelayPeriod != 0 { + n += 1 + sovConnection(uint64(m.DelayPeriod)) + } return n } @@ -974,6 +998,25 @@ func (m *ConnectionEnd) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 5: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field DelayPeriod", wireType) + } + m.DelayPeriod = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowConnection + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.DelayPeriod |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipConnection(dAtA[iNdEx:]) @@ -1177,6 +1220,25 @@ func (m *IdentifiedConnection) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 6: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field DelayPeriod", wireType) + } + m.DelayPeriod = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowConnection + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.DelayPeriod |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipConnection(dAtA[iNdEx:]) diff --git a/x/ibc/core/03-connection/types/connection_test.go b/x/ibc/core/03-connection/types/connection_test.go index 133bb1e5c..e7e91538c 100644 --- a/x/ibc/core/03-connection/types/connection_test.go +++ b/x/ibc/core/03-connection/types/connection_test.go @@ -29,27 +29,27 @@ func TestConnectionValidateBasic(t *testing.T) { }{ { "valid connection", - types.ConnectionEnd{clientID, []*types.Version{ibctesting.ConnectionVersion}, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}}, + types.ConnectionEnd{clientID, []*types.Version{ibctesting.ConnectionVersion}, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, 500}, true, }, { "invalid client id", - types.ConnectionEnd{"(clientID1)", []*types.Version{ibctesting.ConnectionVersion}, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}}, + types.ConnectionEnd{"(clientID1)", []*types.Version{ibctesting.ConnectionVersion}, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, 500}, false, }, { "empty versions", - types.ConnectionEnd{clientID, nil, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}}, + types.ConnectionEnd{clientID, nil, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, 500}, false, }, { "invalid version", - types.ConnectionEnd{clientID, []*types.Version{{}}, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}}, + types.ConnectionEnd{clientID, []*types.Version{{}}, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, 500}, false, }, { "invalid counterparty", - types.ConnectionEnd{clientID, []*types.Version{ibctesting.ConnectionVersion}, types.INIT, types.Counterparty{clientID2, connectionID2, emptyPrefix}}, + types.ConnectionEnd{clientID, []*types.Version{ibctesting.ConnectionVersion}, types.INIT, types.Counterparty{clientID2, connectionID2, emptyPrefix}, 500}, false, }, } @@ -98,12 +98,12 @@ func TestIdentifiedConnectionValidateBasic(t *testing.T) { }{ { "valid connection", - types.NewIdentifiedConnection(clientID, types.ConnectionEnd{clientID, []*types.Version{ibctesting.ConnectionVersion}, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}}), + types.NewIdentifiedConnection(clientID, types.ConnectionEnd{clientID, []*types.Version{ibctesting.ConnectionVersion}, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, 500}), true, }, { "invalid connection id", - types.NewIdentifiedConnection("(connectionIDONE)", types.ConnectionEnd{clientID, []*types.Version{ibctesting.ConnectionVersion}, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}}), + types.NewIdentifiedConnection("(connectionIDONE)", types.ConnectionEnd{clientID, []*types.Version{ibctesting.ConnectionVersion}, types.INIT, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, 500}), false, }, } diff --git a/x/ibc/core/03-connection/types/genesis_test.go b/x/ibc/core/03-connection/types/genesis_test.go index 8d343d1f0..846837f9a 100644 --- a/x/ibc/core/03-connection/types/genesis_test.go +++ b/x/ibc/core/03-connection/types/genesis_test.go @@ -26,7 +26,7 @@ func TestValidateGenesis(t *testing.T) { name: "valid genesis", genState: types.NewGenesisState( []types.IdentifiedConnection{ - types.NewIdentifiedConnection(connectionID, types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion})), + types.NewIdentifiedConnection(connectionID, types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion}, 500)), }, []types.ConnectionPaths{ {clientID, []string{connectionID}}, @@ -39,7 +39,7 @@ func TestValidateGenesis(t *testing.T) { name: "invalid connection", genState: types.NewGenesisState( []types.IdentifiedConnection{ - types.NewIdentifiedConnection(connectionID, types.NewConnectionEnd(types.INIT, "(CLIENTIDONE)", types.Counterparty{clientID, connectionID, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion})), + types.NewIdentifiedConnection(connectionID, types.NewConnectionEnd(types.INIT, "(CLIENTIDONE)", types.Counterparty{clientID, connectionID, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion}, 500)), }, []types.ConnectionPaths{ {clientID, []string{connectionID}}, @@ -52,7 +52,7 @@ func TestValidateGenesis(t *testing.T) { name: "invalid client id", genState: types.NewGenesisState( []types.IdentifiedConnection{ - types.NewIdentifiedConnection(connectionID, types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion})), + types.NewIdentifiedConnection(connectionID, types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion}, 500)), }, []types.ConnectionPaths{ {"(CLIENTIDONE)", []string{connectionID}}, @@ -65,7 +65,7 @@ func TestValidateGenesis(t *testing.T) { name: "invalid path", genState: types.NewGenesisState( []types.IdentifiedConnection{ - types.NewIdentifiedConnection(connectionID, types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion})), + types.NewIdentifiedConnection(connectionID, types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion}, 500)), }, []types.ConnectionPaths{ {clientID, []string{invalidConnectionID}}, @@ -78,7 +78,7 @@ func TestValidateGenesis(t *testing.T) { name: "invalid connection identifier", genState: types.NewGenesisState( []types.IdentifiedConnection{ - types.NewIdentifiedConnection("conn-0", types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion})), + types.NewIdentifiedConnection("conn-0", types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion}, 500)), }, []types.ConnectionPaths{ {clientID, []string{connectionID}}, @@ -91,7 +91,7 @@ func TestValidateGenesis(t *testing.T) { name: "next connection sequence is not greater than maximum connection identifier sequence provided", genState: types.NewGenesisState( []types.IdentifiedConnection{ - types.NewIdentifiedConnection(types.FormatConnectionIdentifier(10), types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion})), + types.NewIdentifiedConnection(types.FormatConnectionIdentifier(10), types.NewConnectionEnd(types.INIT, clientID, types.Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, []*types.Version{ibctesting.ConnectionVersion}, 500)), }, []types.ConnectionPaths{ {clientID, []string{connectionID}}, diff --git a/x/ibc/core/03-connection/types/msgs.go b/x/ibc/core/03-connection/types/msgs.go index a734f9860..34a4b9d63 100644 --- a/x/ibc/core/03-connection/types/msgs.go +++ b/x/ibc/core/03-connection/types/msgs.go @@ -18,13 +18,15 @@ var _ sdk.Msg = &MsgConnectionOpenInit{} func NewMsgConnectionOpenInit( clientID, counterpartyClientID string, counterpartyPrefix commitmenttypes.MerklePrefix, - version *Version, signer sdk.AccAddress, + version *Version, delayPeriod uint64, signer sdk.AccAddress, ) *MsgConnectionOpenInit { + // counterparty must have the same delay period counterparty := NewCounterparty(counterpartyClientID, "", counterpartyPrefix) return &MsgConnectionOpenInit{ ClientId: clientID, Counterparty: counterparty, Version: version, + DelayPeriod: delayPeriod, Signer: signer.String(), } } @@ -83,7 +85,8 @@ var _ sdk.Msg = &MsgConnectionOpenTry{} func NewMsgConnectionOpenTry( previousConnectionID, clientID, counterpartyConnectionID, counterpartyClientID string, counterpartyClient exported.ClientState, - counterpartyPrefix commitmenttypes.MerklePrefix, counterpartyVersions []*Version, + counterpartyPrefix commitmenttypes.MerklePrefix, + counterpartyVersions []*Version, delayPeriod uint64, proofInit, proofClient, proofConsensus []byte, proofHeight, consensusHeight clienttypes.Height, signer sdk.AccAddress, ) *MsgConnectionOpenTry { @@ -95,6 +98,7 @@ func NewMsgConnectionOpenTry( ClientState: csAny, Counterparty: counterparty, CounterpartyVersions: counterpartyVersions, + DelayPeriod: delayPeriod, ProofInit: proofInit, ProofClient: proofClient, ProofConsensus: proofConsensus, diff --git a/x/ibc/core/03-connection/types/msgs_test.go b/x/ibc/core/03-connection/types/msgs_test.go index 9c09ec1f7..57c1925f6 100644 --- a/x/ibc/core/03-connection/types/msgs_test.go +++ b/x/ibc/core/03-connection/types/msgs_test.go @@ -87,13 +87,13 @@ func (suite *MsgTestSuite) TestNewMsgConnectionOpenInit() { msg *types.MsgConnectionOpenInit expPass bool }{ - {"invalid client ID", types.NewMsgConnectionOpenInit("test/iris", "clienttotest", prefix, version, signer), false}, - {"invalid counterparty client ID", types.NewMsgConnectionOpenInit("clienttotest", "(clienttotest)", prefix, version, signer), false}, - {"invalid counterparty connection ID", &types.MsgConnectionOpenInit{connectionID, types.NewCounterparty("clienttotest", "connectiontotest", prefix), version, signer.String()}, false}, - {"empty counterparty prefix", types.NewMsgConnectionOpenInit("clienttotest", "clienttotest", emptyPrefix, version, signer), false}, - {"supplied version fails basic validation", types.NewMsgConnectionOpenInit("clienttotest", "clienttotest", prefix, &types.Version{}, signer), false}, - {"empty singer", types.NewMsgConnectionOpenInit("clienttotest", "clienttotest", prefix, version, nil), false}, - {"success", types.NewMsgConnectionOpenInit("clienttotest", "clienttotest", prefix, version, signer), true}, + {"invalid client ID", types.NewMsgConnectionOpenInit("test/iris", "clienttotest", prefix, version, 500, signer), false}, + {"invalid counterparty client ID", types.NewMsgConnectionOpenInit("clienttotest", "(clienttotest)", prefix, version, 500, signer), false}, + {"invalid counterparty connection ID", &types.MsgConnectionOpenInit{connectionID, types.NewCounterparty("clienttotest", "connectiontotest", prefix), version, 500, signer.String()}, false}, + {"empty counterparty prefix", types.NewMsgConnectionOpenInit("clienttotest", "clienttotest", emptyPrefix, version, 500, signer), false}, + {"supplied version fails basic validation", types.NewMsgConnectionOpenInit("clienttotest", "clienttotest", prefix, &types.Version{}, 500, signer), false}, + {"empty singer", types.NewMsgConnectionOpenInit("clienttotest", "clienttotest", prefix, version, 500, nil), false}, + {"success", types.NewMsgConnectionOpenInit("clienttotest", "clienttotest", prefix, version, 500, signer), true}, } for _, tc := range testCases { @@ -131,24 +131,24 @@ func (suite *MsgTestSuite) TestNewMsgConnectionOpenTry() { msg *types.MsgConnectionOpenTry expPass bool }{ - {"invalid connection ID", types.NewMsgConnectionOpenTry("test/conn1", "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, - {"invalid connection ID", types.NewMsgConnectionOpenTry("(invalidconnection)", "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, - {"invalid client ID", types.NewMsgConnectionOpenTry(connectionID, "test/iris", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, - {"invalid counterparty connection ID", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "ibc/test", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, - {"invalid counterparty client ID", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "test/conn1", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, - {"invalid nil counterparty client", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", nil, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, - {"invalid client unpacking", &types.MsgConnectionOpenTry{connectionID, "clienttotesta", invalidAny, counterparty, []*types.Version{ibctesting.ConnectionVersion}, clientHeight, suite.proof, suite.proof, suite.proof, clientHeight, signer.String()}, false}, - {"counterparty failed Validate", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", invalidClient, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, - {"empty counterparty prefix", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, emptyPrefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, - {"empty counterpartyVersions", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, - {"empty proofInit", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, emptyProof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, - {"empty proofClient", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, emptyProof, suite.proof, clientHeight, clientHeight, signer), false}, - {"empty proofConsensus", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, emptyProof, clientHeight, clientHeight, signer), false}, - {"invalid proofHeight", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clienttypes.ZeroHeight(), clientHeight, signer), false}, - {"invalid consensusHeight", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clientHeight, clienttypes.ZeroHeight(), signer), false}, - {"empty singer", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, nil), false}, - {"success", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), true}, - {"invalid version", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{{}}, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, + {"invalid connection ID", types.NewMsgConnectionOpenTry("test/conn1", "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, + {"invalid connection ID", types.NewMsgConnectionOpenTry("(invalidconnection)", "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, + {"invalid client ID", types.NewMsgConnectionOpenTry(connectionID, "test/iris", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, + {"invalid counterparty connection ID", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "ibc/test", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, + {"invalid counterparty client ID", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "test/conn1", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, + {"invalid nil counterparty client", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", nil, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, + {"invalid client unpacking", &types.MsgConnectionOpenTry{connectionID, "clienttotesta", invalidAny, counterparty, 500, []*types.Version{ibctesting.ConnectionVersion}, clientHeight, suite.proof, suite.proof, suite.proof, clientHeight, signer.String()}, false}, + {"counterparty failed Validate", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", invalidClient, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, + {"empty counterparty prefix", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, emptyPrefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, + {"empty counterpartyVersions", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, + {"empty proofInit", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, emptyProof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, + {"empty proofClient", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, emptyProof, suite.proof, clientHeight, clientHeight, signer), false}, + {"empty proofConsensus", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, emptyProof, clientHeight, clientHeight, signer), false}, + {"invalid proofHeight", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clienttypes.ZeroHeight(), clientHeight, signer), false}, + {"invalid consensusHeight", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clienttypes.ZeroHeight(), signer), false}, + {"empty singer", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, nil), false}, + {"success", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{ibctesting.ConnectionVersion}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), true}, + {"invalid version", types.NewMsgConnectionOpenTry(connectionID, "clienttotesta", "connectiontotest", "clienttotest", clientState, prefix, []*types.Version{{}}, 500, suite.proof, suite.proof, suite.proof, clientHeight, clientHeight, signer), false}, } for _, tc := range testCases { diff --git a/x/ibc/core/03-connection/types/tx.pb.go b/x/ibc/core/03-connection/types/tx.pb.go index 05e60cf3e..5b1ce7746 100644 --- a/x/ibc/core/03-connection/types/tx.pb.go +++ b/x/ibc/core/03-connection/types/tx.pb.go @@ -36,7 +36,8 @@ type MsgConnectionOpenInit struct { ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty" yaml:"client_id"` Counterparty Counterparty `protobuf:"bytes,2,opt,name=counterparty,proto3" json:"counterparty"` Version *Version `protobuf:"bytes,3,opt,name=version,proto3" json:"version,omitempty"` - Signer string `protobuf:"bytes,4,opt,name=signer,proto3" json:"signer,omitempty"` + DelayPeriod uint64 `protobuf:"varint,4,opt,name=delay_period,json=delayPeriod,proto3" json:"delay_period,omitempty" yaml:"delay_period"` + Signer string `protobuf:"bytes,5,opt,name=signer,proto3" json:"signer,omitempty"` } func (m *MsgConnectionOpenInit) Reset() { *m = MsgConnectionOpenInit{} } @@ -118,17 +119,18 @@ type MsgConnectionOpenTry struct { PreviousConnectionId string `protobuf:"bytes,2,opt,name=previous_connection_id,json=previousConnectionId,proto3" json:"previous_connection_id,omitempty" yaml:"previous_connection_id"` ClientState *types.Any `protobuf:"bytes,3,opt,name=client_state,json=clientState,proto3" json:"client_state,omitempty" yaml:"client_state"` Counterparty Counterparty `protobuf:"bytes,4,opt,name=counterparty,proto3" json:"counterparty"` - CounterpartyVersions []*Version `protobuf:"bytes,5,rep,name=counterparty_versions,json=counterpartyVersions,proto3" json:"counterparty_versions,omitempty" yaml:"counterparty_versions"` - ProofHeight types1.Height `protobuf:"bytes,6,opt,name=proof_height,json=proofHeight,proto3" json:"proof_height" yaml:"proof_height"` + DelayPeriod uint64 `protobuf:"varint,5,opt,name=delay_period,json=delayPeriod,proto3" json:"delay_period,omitempty" yaml:"delay_period"` + CounterpartyVersions []*Version `protobuf:"bytes,6,rep,name=counterparty_versions,json=counterpartyVersions,proto3" json:"counterparty_versions,omitempty" yaml:"counterparty_versions"` + ProofHeight types1.Height `protobuf:"bytes,7,opt,name=proof_height,json=proofHeight,proto3" json:"proof_height" yaml:"proof_height"` // proof of the initialization the connection on Chain A: `UNITIALIZED -> // INIT` - ProofInit []byte `protobuf:"bytes,7,opt,name=proof_init,json=proofInit,proto3" json:"proof_init,omitempty" yaml:"proof_init"` + ProofInit []byte `protobuf:"bytes,8,opt,name=proof_init,json=proofInit,proto3" json:"proof_init,omitempty" yaml:"proof_init"` // proof of client state included in message - ProofClient []byte `protobuf:"bytes,8,opt,name=proof_client,json=proofClient,proto3" json:"proof_client,omitempty" yaml:"proof_client"` + ProofClient []byte `protobuf:"bytes,9,opt,name=proof_client,json=proofClient,proto3" json:"proof_client,omitempty" yaml:"proof_client"` // proof of client consensus state - ProofConsensus []byte `protobuf:"bytes,9,opt,name=proof_consensus,json=proofConsensus,proto3" json:"proof_consensus,omitempty" yaml:"proof_consensus"` - ConsensusHeight types1.Height `protobuf:"bytes,10,opt,name=consensus_height,json=consensusHeight,proto3" json:"consensus_height" yaml:"consensus_height"` - Signer string `protobuf:"bytes,11,opt,name=signer,proto3" json:"signer,omitempty"` + ProofConsensus []byte `protobuf:"bytes,10,opt,name=proof_consensus,json=proofConsensus,proto3" json:"proof_consensus,omitempty" yaml:"proof_consensus"` + ConsensusHeight types1.Height `protobuf:"bytes,11,opt,name=consensus_height,json=consensusHeight,proto3" json:"consensus_height" yaml:"consensus_height"` + Signer string `protobuf:"bytes,12,opt,name=signer,proto3" json:"signer,omitempty"` } func (m *MsgConnectionOpenTry) Reset() { *m = MsgConnectionOpenTry{} } @@ -384,62 +386,65 @@ func init() { func init() { proto.RegisterFile("ibc/core/connection/v1/tx.proto", fileDescriptor_5d00fde5fc97399e) } var fileDescriptor_5d00fde5fc97399e = []byte{ - // 880 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0x31, 0x73, 0xda, 0x58, - 0x10, 0x46, 0x06, 0x63, 0x78, 0x70, 0x67, 0x5b, 0x07, 0x58, 0xa7, 0xb3, 0x11, 0xd6, 0xdc, 0xcd, - 0xb9, 0x38, 0x4b, 0xc6, 0xf6, 0xcd, 0xdc, 0xf9, 0xe6, 0x0a, 0xa0, 0x39, 0x17, 0xbe, 0x64, 0x14, - 0x27, 0x99, 0x71, 0xc3, 0x80, 0x10, 0xb2, 0x06, 0xa3, 0xc7, 0xe8, 0x09, 0x62, 0xa5, 0x4d, 0x93, - 0x49, 0x95, 0x9f, 0xe0, 0x9f, 0xe3, 0xd2, 0x65, 0xd2, 0x68, 0x12, 0xbb, 0x49, 0xad, 0x26, 0x93, - 0x2e, 0xa3, 0xf7, 0x24, 0x21, 0x40, 0x8c, 0x21, 0x38, 0x15, 0x5a, 0xed, 0xf7, 0xed, 0xae, 0x76, - 0xf7, 0x7b, 0x3c, 0xc0, 0x69, 0x4d, 0x59, 0x94, 0xa1, 0xa1, 0x88, 0x32, 0xd4, 0x75, 0x45, 0x36, - 0x35, 0xa8, 0x8b, 0x83, 0xb2, 0x68, 0x5e, 0x0a, 0x3d, 0x03, 0x9a, 0x90, 0x2e, 0x68, 0x4d, 0x59, - 0x70, 0x01, 0xc2, 0x10, 0x20, 0x0c, 0xca, 0x6c, 0x4e, 0x85, 0x2a, 0xc4, 0x10, 0xd1, 0x7d, 0x22, - 0x68, 0xf6, 0x67, 0x15, 0x42, 0xf5, 0x42, 0x11, 0xb1, 0xd5, 0xec, 0xb7, 0xc5, 0x86, 0x6e, 0x79, - 0xae, 0x50, 0xa6, 0x0b, 0x4d, 0xd1, 0x4d, 0x37, 0x0b, 0x79, 0xf2, 0x00, 0xbf, 0x4f, 0x29, 0x25, - 0x94, 0x17, 0x03, 0xf9, 0xcf, 0x14, 0xc8, 0x9f, 0x20, 0xb5, 0x16, 0xbc, 0x7f, 0xd4, 0x53, 0xf4, - 0x63, 0x5d, 0x33, 0xe9, 0x32, 0x48, 0x93, 0x90, 0x75, 0xad, 0xc5, 0x50, 0x25, 0x6a, 0x27, 0x5d, - 0xcd, 0x39, 0x36, 0xb7, 0x66, 0x35, 0xba, 0x17, 0x47, 0x7c, 0xe0, 0xe2, 0xa5, 0x14, 0x79, 0x3e, - 0x6e, 0xd1, 0xff, 0x83, 0xac, 0x0c, 0xfb, 0xba, 0xa9, 0x18, 0xbd, 0x86, 0x61, 0x5a, 0xcc, 0x52, - 0x89, 0xda, 0xc9, 0xec, 0xff, 0x2a, 0x44, 0x7f, 0xb6, 0x50, 0x0b, 0x61, 0xab, 0x89, 0x6b, 0x9b, - 0x8b, 0x49, 0x23, 0x7c, 0xfa, 0x6f, 0xb0, 0x32, 0x50, 0x0c, 0xa4, 0x41, 0x9d, 0x89, 0xe3, 0x50, - 0xdc, 0xb4, 0x50, 0xcf, 0x08, 0x4c, 0xf2, 0xf1, 0x74, 0x01, 0x24, 0x91, 0xa6, 0xea, 0x8a, 0xc1, - 0x24, 0xdc, 0xd2, 0x25, 0xcf, 0x3a, 0x4a, 0xbd, 0xbe, 0xe2, 0x62, 0x9f, 0xae, 0xb8, 0x18, 0xcf, - 0x81, 0xad, 0xc8, 0x0f, 0x97, 0x14, 0xd4, 0x83, 0x3a, 0x52, 0xf8, 0xf7, 0x49, 0x90, 0x9b, 0x40, - 0x9c, 0x1a, 0xd6, 0xb7, 0x74, 0xe6, 0x39, 0x28, 0xf4, 0x0c, 0x65, 0xa0, 0xc1, 0x3e, 0xaa, 0x0f, - 0x2b, 0x77, 0xf9, 0x4b, 0x98, 0xbf, 0xed, 0xd8, 0xdc, 0x16, 0xe1, 0x47, 0xe3, 0x78, 0x29, 0xe7, - 0x3b, 0x86, 0x05, 0x1d, 0xb7, 0xe8, 0xc7, 0x20, 0xeb, 0x25, 0x44, 0x66, 0xc3, 0x54, 0xbc, 0x3e, - 0xe5, 0x04, 0xb2, 0x3b, 0x82, 0xbf, 0x3b, 0x42, 0x45, 0xb7, 0xaa, 0x1b, 0x8e, 0xcd, 0xfd, 0x34, - 0x52, 0x24, 0xe6, 0xf0, 0x52, 0x86, 0x98, 0x4f, 0x5c, 0x6b, 0x62, 0x88, 0x89, 0x05, 0x87, 0x38, - 0x00, 0xf9, 0xb0, 0x5d, 0xf7, 0x26, 0x84, 0x98, 0xe5, 0x52, 0x7c, 0x86, 0x91, 0x56, 0x4b, 0x8e, - 0xcd, 0x6d, 0x7a, 0x55, 0x47, 0xc5, 0xe1, 0xa5, 0x5c, 0xf8, 0xbd, 0x47, 0x43, 0xf4, 0x19, 0xc8, - 0xf6, 0x0c, 0x08, 0xdb, 0xf5, 0x73, 0x45, 0x53, 0xcf, 0x4d, 0x26, 0x89, 0xbf, 0x83, 0x0d, 0xa5, - 0x23, 0x82, 0x19, 0x94, 0x85, 0xff, 0x30, 0xa2, 0xfa, 0x8b, 0x5b, 0xfd, 0xb0, 0x47, 0x61, 0x36, - 0x2f, 0x65, 0xb0, 0x49, 0x90, 0xf4, 0x21, 0x00, 0xc4, 0xab, 0xe9, 0x9a, 0xc9, 0xac, 0x94, 0xa8, - 0x9d, 0x6c, 0x35, 0xef, 0xd8, 0xdc, 0x7a, 0x98, 0xe9, 0xfa, 0x78, 0x29, 0x8d, 0x0d, 0xac, 0xa8, - 0x23, 0xbf, 0x22, 0x92, 0x99, 0x49, 0x61, 0xde, 0xc6, 0x78, 0x46, 0xe2, 0xf5, 0x33, 0xd6, 0xb0, - 0x45, 0xd7, 0xc0, 0xaa, 0xe7, 0x75, 0x77, 0x53, 0x47, 0x7d, 0xc4, 0xa4, 0x31, 0x9d, 0x75, 0x6c, - 0xae, 0x30, 0x42, 0xf7, 0x01, 0xbc, 0xf4, 0x23, 0x89, 0xe0, 0xbf, 0xa0, 0xdb, 0x60, 0x2d, 0xf0, - 0xfa, 0x6d, 0x01, 0xf7, 0xb6, 0x85, 0xf3, 0xda, 0xb2, 0xe1, 0x0f, 0x61, 0x34, 0x02, 0x2f, 0xad, - 0x06, 0xaf, 0xbc, 0xf6, 0x0c, 0xc5, 0x97, 0x99, 0x22, 0xbe, 0x22, 0xd8, 0x8c, 0x92, 0x56, 0xa0, - 0xbd, 0x8f, 0xcb, 0x11, 0xda, 0xab, 0xc8, 0x1d, 0xfa, 0x5f, 0xf0, 0xc3, 0xa8, 0x7e, 0x88, 0xfe, - 0x18, 0xc7, 0xe6, 0x72, 0x41, 0x7d, 0x61, 0xd9, 0x64, 0xe5, 0xb0, 0x5c, 0x64, 0xc0, 0x8e, 0x2c, - 0x51, 0x94, 0x16, 0x7f, 0x73, 0x6c, 0x6e, 0x3b, 0x62, 0xe1, 0xc6, 0x02, 0x33, 0x61, 0xe7, 0x88, - 0x26, 0x17, 0x38, 0xb6, 0xc6, 0xe5, 0x9c, 0x58, 0x58, 0xce, 0xe3, 0x32, 0x58, 0x7e, 0x40, 0x19, - 0x94, 0x01, 0xd9, 0xee, 0xba, 0x69, 0x58, 0x58, 0x5f, 0xd9, 0xf0, 0x41, 0x18, 0xb8, 0x78, 0x29, - 0x85, 0x9f, 0xdd, 0xb3, 0x73, 0x5c, 0x03, 0x2b, 0x8b, 0x69, 0x20, 0xf5, 0x20, 0x1a, 0x48, 0x7f, - 0x57, 0x0d, 0x80, 0x39, 0x34, 0x50, 0x91, 0x3b, 0x81, 0x06, 0xde, 0x2c, 0x01, 0x66, 0x02, 0x50, - 0x83, 0x7a, 0x5b, 0x33, 0xba, 0x8b, 0xea, 0x20, 0x98, 0x5c, 0x43, 0xee, 0xe0, 0xb5, 0x8f, 0x98, - 0x5c, 0x43, 0xee, 0xf8, 0x93, 0x73, 0x95, 0x37, 0xbe, 0x48, 0xf1, 0x07, 0x5c, 0xa4, 0xfb, 0xff, - 0xad, 0x79, 0x50, 0x9a, 0xd6, 0x0b, 0xbf, 0x61, 0xfb, 0x5f, 0xe2, 0x20, 0x7e, 0x82, 0x54, 0xfa, - 0x25, 0xa0, 0x23, 0xee, 0x33, 0xbb, 0xd3, 0x44, 0x18, 0x79, 0x0b, 0x60, 0xff, 0x9c, 0x0b, 0xee, - 0xd7, 0x40, 0xbf, 0x00, 0xeb, 0x93, 0x17, 0x86, 0x3f, 0x66, 0x8e, 0x75, 0x6a, 0x58, 0xec, 0xe1, - 0x3c, 0xe8, 0xe9, 0x89, 0xdd, 0x99, 0xcd, 0x9e, 0xb8, 0x22, 0x77, 0xe6, 0x48, 0x1c, 0x5a, 0x53, - 0xfa, 0x15, 0x05, 0xf2, 0xd1, 0x3b, 0xba, 0x37, 0x73, 0x3c, 0x8f, 0xc1, 0xfe, 0x35, 0x2f, 0xc3, - 0xaf, 0xa2, 0xfa, 0xf4, 0xfa, 0xb6, 0x48, 0xdd, 0xdc, 0x16, 0xa9, 0x0f, 0xb7, 0x45, 0xea, 0xed, - 0x5d, 0x31, 0x76, 0x73, 0x57, 0x8c, 0xbd, 0xbb, 0x2b, 0xc6, 0xce, 0xfe, 0x51, 0x35, 0xf3, 0xbc, - 0xdf, 0x14, 0x64, 0xd8, 0x15, 0x65, 0x88, 0xba, 0x10, 0x79, 0x3f, 0xbb, 0xa8, 0xd5, 0x11, 0x2f, - 0xc5, 0xe0, 0xa6, 0xbc, 0x77, 0xb0, 0x1b, 0xba, 0x2c, 0x9b, 0x56, 0x4f, 0x41, 0xcd, 0x24, 0x3e, - 0x71, 0x0f, 0xbe, 0x06, 0x00, 0x00, 0xff, 0xff, 0x1a, 0x4a, 0xeb, 0x3e, 0xdb, 0x0b, 0x00, 0x00, + // 921 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0x31, 0x93, 0xdb, 0x44, + 0x14, 0xb6, 0xce, 0xbe, 0x3b, 0x7b, 0x6d, 0x48, 0xb2, 0xf8, 0xee, 0x84, 0x48, 0x2c, 0x47, 0x03, + 0x83, 0x0b, 0x4e, 0x8a, 0x93, 0x30, 0x03, 0x66, 0x28, 0x6c, 0x37, 0x5c, 0x11, 0xc8, 0x88, 0x00, + 0x33, 0x69, 0x3c, 0xb6, 0xbc, 0xd6, 0x69, 0x6c, 0x6b, 0x35, 0x5a, 0xd9, 0x44, 0xb4, 0x34, 0x0c, + 0x15, 0x0d, 0x7d, 0xfe, 0x03, 0x7f, 0x22, 0xe5, 0x95, 0x54, 0x1a, 0xb8, 0x6b, 0xa8, 0xd5, 0xd1, + 0x31, 0xda, 0x95, 0xe4, 0xb5, 0x2d, 0x0f, 0x36, 0x3e, 0x2a, 0xe9, 0xed, 0xfb, 0xde, 0x7b, 0xbb, + 0xef, 0x7d, 0xdf, 0xce, 0x02, 0xd9, 0x1a, 0x18, 0x9a, 0x81, 0x5d, 0xa4, 0x19, 0xd8, 0xb6, 0x91, + 0xe1, 0x59, 0xd8, 0xd6, 0xe6, 0x4d, 0xcd, 0x7b, 0xa5, 0x3a, 0x2e, 0xf6, 0x30, 0x3c, 0xb5, 0x06, + 0x86, 0x1a, 0x01, 0xd4, 0x05, 0x40, 0x9d, 0x37, 0xa5, 0xaa, 0x89, 0x4d, 0x4c, 0x21, 0x5a, 0xf4, + 0xc7, 0xd0, 0xd2, 0xbb, 0x26, 0xc6, 0xe6, 0x04, 0x69, 0xd4, 0x1a, 0xcc, 0x46, 0x5a, 0xdf, 0xf6, + 0x63, 0x17, 0x57, 0x69, 0x62, 0x21, 0xdb, 0x8b, 0xaa, 0xb0, 0xbf, 0x18, 0xf0, 0xe1, 0x86, 0xad, + 0x70, 0x75, 0x29, 0x50, 0xf9, 0xed, 0x00, 0x9c, 0x3c, 0x23, 0x66, 0x37, 0x5d, 0xff, 0xca, 0x41, + 0xf6, 0x85, 0x6d, 0x79, 0xb0, 0x09, 0x4a, 0x2c, 0x65, 0xcf, 0x1a, 0x8a, 0x42, 0x5d, 0x68, 0x94, + 0x3a, 0xd5, 0x30, 0x90, 0xef, 0xfa, 0xfd, 0xe9, 0xa4, 0xa5, 0xa4, 0x2e, 0x45, 0x2f, 0xb2, 0xff, + 0x8b, 0x21, 0xfc, 0x12, 0x54, 0x0c, 0x3c, 0xb3, 0x3d, 0xe4, 0x3a, 0x7d, 0xd7, 0xf3, 0xc5, 0x83, + 0xba, 0xd0, 0x28, 0x3f, 0x7e, 0x5f, 0xcd, 0x3e, 0xb6, 0xda, 0xe5, 0xb0, 0x9d, 0xc2, 0x9b, 0x40, + 0xce, 0xe9, 0x4b, 0xf1, 0xf0, 0x53, 0x70, 0x3c, 0x47, 0x2e, 0xb1, 0xb0, 0x2d, 0xe6, 0x69, 0x2a, + 0x79, 0x53, 0xaa, 0x6f, 0x19, 0x4c, 0x4f, 0xf0, 0xb0, 0x05, 0x2a, 0x43, 0x34, 0xe9, 0xfb, 0x3d, + 0x07, 0xb9, 0x16, 0x1e, 0x8a, 0x85, 0xba, 0xd0, 0x28, 0x74, 0xce, 0xc2, 0x40, 0x7e, 0x87, 0x1d, + 0x80, 0xf7, 0x2a, 0x7a, 0x99, 0x9a, 0xcf, 0xa9, 0x05, 0x4f, 0xc1, 0x11, 0xb1, 0x4c, 0x1b, 0xb9, + 0xe2, 0x61, 0x74, 0x6c, 0x3d, 0xb6, 0x5a, 0xc5, 0x9f, 0x5e, 0xcb, 0xb9, 0xbf, 0x5e, 0xcb, 0x39, + 0x45, 0x06, 0x0f, 0x32, 0x9b, 0xa6, 0x23, 0xe2, 0x60, 0x9b, 0x20, 0xe5, 0xd7, 0x63, 0x50, 0x5d, + 0x43, 0xbc, 0x70, 0xfd, 0xff, 0xd2, 0xd5, 0xef, 0xc0, 0xa9, 0xe3, 0xa2, 0xb9, 0x85, 0x67, 0xa4, + 0xb7, 0x38, 0x75, 0x14, 0x7f, 0x40, 0xe3, 0x1f, 0x86, 0x81, 0xfc, 0x80, 0xc5, 0x67, 0xe3, 0x14, + 0xbd, 0x9a, 0x38, 0x16, 0x1b, 0xba, 0x18, 0xc2, 0xe7, 0xa0, 0x12, 0x17, 0x24, 0x5e, 0xdf, 0x43, + 0x71, 0x8f, 0xab, 0x2a, 0xe3, 0x9d, 0x9a, 0xf0, 0x4e, 0x6d, 0xdb, 0x3e, 0xdf, 0x39, 0x3e, 0x46, + 0xd1, 0xcb, 0xcc, 0xfc, 0x3a, 0xb2, 0xd6, 0x08, 0x50, 0xd8, 0x93, 0x00, 0xab, 0x53, 0x3c, 0xdc, + 0x61, 0x8a, 0x73, 0x70, 0xc2, 0xe7, 0xea, 0xc5, 0xcc, 0x20, 0xe2, 0x51, 0x3d, 0xbf, 0x05, 0x95, + 0x3a, 0xf5, 0x30, 0x90, 0xef, 0xc7, 0x27, 0xce, 0xca, 0xa3, 0xe8, 0x55, 0x7e, 0x3d, 0x0e, 0x23, + 0xf0, 0x25, 0xa8, 0x38, 0x2e, 0xc6, 0xa3, 0xde, 0x25, 0xb2, 0xcc, 0x4b, 0x4f, 0x3c, 0xa6, 0x3d, + 0x90, 0xb8, 0x72, 0x4c, 0xa8, 0xf3, 0xa6, 0xfa, 0x05, 0x45, 0x74, 0xde, 0x8b, 0x4e, 0xbe, 0x38, + 0x13, 0x1f, 0xad, 0xe8, 0x65, 0x6a, 0x32, 0x24, 0x7c, 0x0a, 0x00, 0xf3, 0x5a, 0xb6, 0xe5, 0x89, + 0xc5, 0xba, 0xd0, 0xa8, 0x74, 0x4e, 0xc2, 0x40, 0xbe, 0xc7, 0x47, 0x46, 0x3e, 0x45, 0x2f, 0x51, + 0x83, 0x2a, 0xb9, 0x95, 0xec, 0x88, 0x55, 0x16, 0x4b, 0x34, 0xee, 0x6c, 0xb5, 0x22, 0xf3, 0x26, + 0x15, 0xbb, 0xd4, 0x82, 0x5d, 0x70, 0x27, 0xf6, 0x46, 0xbc, 0xb6, 0xc9, 0x8c, 0x88, 0x80, 0x86, + 0x4b, 0x61, 0x20, 0x9f, 0x2e, 0x85, 0x27, 0x00, 0x45, 0x7f, 0x9b, 0x65, 0x48, 0x16, 0xe0, 0x08, + 0xdc, 0x4d, 0xbd, 0x49, 0x5b, 0xca, 0xff, 0xda, 0x16, 0x39, 0x6e, 0xcb, 0x59, 0x32, 0x84, 0xe5, + 0x0c, 0x8a, 0x7e, 0x27, 0x5d, 0x8a, 0xdb, 0xb3, 0x10, 0x6e, 0x65, 0x83, 0x70, 0x6b, 0xe0, 0x7e, + 0x96, 0x2c, 0x53, 0xdd, 0xfe, 0x79, 0x98, 0xa1, 0xdb, 0xb6, 0x31, 0x86, 0x9f, 0x83, 0xb7, 0x96, + 0xb5, 0xc7, 0xb4, 0x2b, 0x86, 0x81, 0x5c, 0x4d, 0xf7, 0xc7, 0x4b, 0xae, 0x62, 0xf0, 0x52, 0x33, + 0x80, 0xb4, 0x44, 0xa2, 0x2c, 0x1d, 0x7f, 0x10, 0x06, 0xf2, 0xc3, 0x0c, 0xc2, 0xad, 0x24, 0x16, + 0x79, 0xe7, 0x92, 0x9e, 0xf7, 0xb8, 0x2e, 0x57, 0xaf, 0x82, 0xc2, 0xde, 0x57, 0xc1, 0xaa, 0x0c, + 0x0e, 0x6f, 0x51, 0x06, 0x4d, 0xc0, 0xd8, 0xdd, 0xf3, 0x5c, 0x5f, 0x3c, 0xa2, 0x74, 0xe4, 0x2e, + 0xd1, 0xd4, 0xa5, 0xe8, 0x45, 0xfa, 0x1f, 0xdd, 0xbb, 0xab, 0x1a, 0x38, 0xde, 0x4f, 0x03, 0xc5, + 0x5b, 0xd1, 0x40, 0xe9, 0x7f, 0xd5, 0x00, 0xd8, 0x41, 0x03, 0x6d, 0x63, 0x9c, 0x6a, 0xe0, 0xe7, + 0x03, 0x20, 0xae, 0x01, 0xba, 0xd8, 0x1e, 0x59, 0xee, 0x74, 0x5f, 0x1d, 0xa4, 0x93, 0xeb, 0x1b, + 0x63, 0x4a, 0xfb, 0x8c, 0xc9, 0xf5, 0x8d, 0x71, 0x32, 0xb9, 0x48, 0x79, 0xab, 0x44, 0xca, 0xdf, + 0x22, 0x91, 0x16, 0xcd, 0x2a, 0x6c, 0x68, 0x96, 0x02, 0xea, 0x9b, 0x7a, 0x91, 0x34, 0xec, 0xf1, + 0xdf, 0x79, 0x90, 0x7f, 0x46, 0x4c, 0xf8, 0x03, 0x80, 0x19, 0xef, 0xa8, 0xf3, 0x4d, 0x22, 0xcc, + 0x7c, 0x41, 0x48, 0x1f, 0xef, 0x04, 0x4f, 0xf6, 0x00, 0xbf, 0x07, 0xf7, 0xd6, 0x1f, 0x1b, 0x1f, + 0x6d, 0x9d, 0xeb, 0x85, 0xeb, 0x4b, 0x4f, 0x77, 0x41, 0x6f, 0x2e, 0x1c, 0xcd, 0x6c, 0xfb, 0xc2, + 0x6d, 0x63, 0xbc, 0x43, 0x61, 0x8e, 0xa6, 0xf0, 0x47, 0x01, 0x9c, 0x64, 0x73, 0xf4, 0xd1, 0xd6, + 0xf9, 0xe2, 0x08, 0xe9, 0x93, 0x5d, 0x23, 0x92, 0x5d, 0x74, 0xbe, 0x79, 0x73, 0x5d, 0x13, 0xae, + 0xae, 0x6b, 0xc2, 0x1f, 0xd7, 0x35, 0xe1, 0x97, 0x9b, 0x5a, 0xee, 0xea, 0xa6, 0x96, 0xfb, 0xfd, + 0xa6, 0x96, 0x7b, 0xf9, 0x99, 0x69, 0x79, 0x97, 0xb3, 0x81, 0x6a, 0xe0, 0xa9, 0x66, 0x60, 0x32, + 0xc5, 0x24, 0xfe, 0x9c, 0x93, 0xe1, 0x58, 0x7b, 0xa5, 0xa5, 0x2f, 0xf4, 0x47, 0x4f, 0xce, 0xb9, + 0x47, 0xba, 0xe7, 0x3b, 0x88, 0x0c, 0x8e, 0xe8, 0x8d, 0xfb, 0xe4, 0x9f, 0x00, 0x00, 0x00, 0xff, + 0xff, 0xc4, 0x4d, 0xc5, 0x58, 0x53, 0x0c, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -663,7 +668,12 @@ func (m *MsgConnectionOpenInit) MarshalToSizedBuffer(dAtA []byte) (int, error) { copy(dAtA[i:], m.Signer) i = encodeVarintTx(dAtA, i, uint64(len(m.Signer))) i-- - dAtA[i] = 0x22 + dAtA[i] = 0x2a + } + if m.DelayPeriod != 0 { + i = encodeVarintTx(dAtA, i, uint64(m.DelayPeriod)) + i-- + dAtA[i] = 0x20 } if m.Version != nil { { @@ -745,7 +755,7 @@ func (m *MsgConnectionOpenTry) MarshalToSizedBuffer(dAtA []byte) (int, error) { copy(dAtA[i:], m.Signer) i = encodeVarintTx(dAtA, i, uint64(len(m.Signer))) i-- - dAtA[i] = 0x5a + dAtA[i] = 0x62 } { size, err := m.ConsensusHeight.MarshalToSizedBuffer(dAtA[:i]) @@ -756,27 +766,27 @@ func (m *MsgConnectionOpenTry) MarshalToSizedBuffer(dAtA []byte) (int, error) { i = encodeVarintTx(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x52 + dAtA[i] = 0x5a if len(m.ProofConsensus) > 0 { i -= len(m.ProofConsensus) copy(dAtA[i:], m.ProofConsensus) i = encodeVarintTx(dAtA, i, uint64(len(m.ProofConsensus))) i-- - dAtA[i] = 0x4a + dAtA[i] = 0x52 } if len(m.ProofClient) > 0 { i -= len(m.ProofClient) copy(dAtA[i:], m.ProofClient) i = encodeVarintTx(dAtA, i, uint64(len(m.ProofClient))) i-- - dAtA[i] = 0x42 + dAtA[i] = 0x4a } if len(m.ProofInit) > 0 { i -= len(m.ProofInit) copy(dAtA[i:], m.ProofInit) i = encodeVarintTx(dAtA, i, uint64(len(m.ProofInit))) i-- - dAtA[i] = 0x3a + dAtA[i] = 0x42 } { size, err := m.ProofHeight.MarshalToSizedBuffer(dAtA[:i]) @@ -787,7 +797,7 @@ func (m *MsgConnectionOpenTry) MarshalToSizedBuffer(dAtA []byte) (int, error) { i = encodeVarintTx(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x32 + dAtA[i] = 0x3a if len(m.CounterpartyVersions) > 0 { for iNdEx := len(m.CounterpartyVersions) - 1; iNdEx >= 0; iNdEx-- { { @@ -799,9 +809,14 @@ func (m *MsgConnectionOpenTry) MarshalToSizedBuffer(dAtA []byte) (int, error) { i = encodeVarintTx(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x2a + dAtA[i] = 0x32 } } + if m.DelayPeriod != 0 { + i = encodeVarintTx(dAtA, i, uint64(m.DelayPeriod)) + i-- + dAtA[i] = 0x28 + } { size, err := m.Counterparty.MarshalToSizedBuffer(dAtA[:i]) if err != nil { @@ -1100,6 +1115,9 @@ func (m *MsgConnectionOpenInit) Size() (n int) { l = m.Version.Size() n += 1 + l + sovTx(uint64(l)) } + if m.DelayPeriod != 0 { + n += 1 + sovTx(uint64(m.DelayPeriod)) + } l = len(m.Signer) if l > 0 { n += 1 + l + sovTx(uint64(l)) @@ -1136,6 +1154,9 @@ func (m *MsgConnectionOpenTry) Size() (n int) { } l = m.Counterparty.Size() n += 1 + l + sovTx(uint64(l)) + if m.DelayPeriod != 0 { + n += 1 + sovTx(uint64(m.DelayPeriod)) + } if len(m.CounterpartyVersions) > 0 { for _, e := range m.CounterpartyVersions { l = e.Size() @@ -1397,6 +1418,25 @@ func (m *MsgConnectionOpenInit) Unmarshal(dAtA []byte) error { } iNdEx = postIndex case 4: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field DelayPeriod", wireType) + } + m.DelayPeriod = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.DelayPeriod |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Signer", wireType) } @@ -1668,6 +1708,25 @@ func (m *MsgConnectionOpenTry) Unmarshal(dAtA []byte) error { } iNdEx = postIndex case 5: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field DelayPeriod", wireType) + } + m.DelayPeriod = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.DelayPeriod |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 6: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field CounterpartyVersions", wireType) } @@ -1701,7 +1760,7 @@ func (m *MsgConnectionOpenTry) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 6: + case 7: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ProofHeight", wireType) } @@ -1734,7 +1793,7 @@ func (m *MsgConnectionOpenTry) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 7: + case 8: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ProofInit", wireType) } @@ -1768,7 +1827,7 @@ func (m *MsgConnectionOpenTry) Unmarshal(dAtA []byte) error { m.ProofInit = []byte{} } iNdEx = postIndex - case 8: + case 9: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ProofClient", wireType) } @@ -1802,7 +1861,7 @@ func (m *MsgConnectionOpenTry) Unmarshal(dAtA []byte) error { m.ProofClient = []byte{} } iNdEx = postIndex - case 9: + case 10: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ProofConsensus", wireType) } @@ -1836,7 +1895,7 @@ func (m *MsgConnectionOpenTry) Unmarshal(dAtA []byte) error { m.ProofConsensus = []byte{} } iNdEx = postIndex - case 10: + case 11: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ConsensusHeight", wireType) } @@ -1869,7 +1928,7 @@ func (m *MsgConnectionOpenTry) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 11: + case 12: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Signer", wireType) } diff --git a/x/ibc/core/exported/client.go b/x/ibc/core/exported/client.go index 67c71526d..0fe26d9f5 100644 --- a/x/ibc/core/exported/client.go +++ b/x/ibc/core/exported/client.go @@ -32,6 +32,14 @@ type ClientState interface { Validate() error GetProofSpecs() []*ics23.ProofSpec + // Initialization function + // Clients must validate the initial consensus state, and may store any client-specific metadata + // necessary for correct light client operation + Initialize(sdk.Context, codec.BinaryMarshaler, sdk.KVStore, ConsensusState) error + + // Genesis function + ExportMetadata(sdk.KVStore) []GenesisMetadata + // Update and Misbehaviour functions CheckHeaderAndUpdateState(sdk.Context, codec.BinaryMarshaler, sdk.KVStore, Header) (ClientState, ConsensusState, error) @@ -102,6 +110,8 @@ type ClientState interface { store sdk.KVStore, cdc codec.BinaryMarshaler, height Height, + currentTimestamp uint64, + delayPeriod uint64, prefix Prefix, proof []byte, portID, @@ -113,6 +123,8 @@ type ClientState interface { store sdk.KVStore, cdc codec.BinaryMarshaler, height Height, + currentTimestamp uint64, + delayPeriod uint64, prefix Prefix, proof []byte, portID, @@ -124,6 +136,8 @@ type ClientState interface { store sdk.KVStore, cdc codec.BinaryMarshaler, height Height, + currentTimestamp uint64, + delayPeriod uint64, prefix Prefix, proof []byte, portID, @@ -134,6 +148,8 @@ type ClientState interface { store sdk.KVStore, cdc codec.BinaryMarshaler, height Height, + currentTimestamp uint64, + delayPeriod uint64, prefix Prefix, proof []byte, portID, @@ -187,3 +203,12 @@ type Height interface { Decrement() (Height, bool) String() string } + +// GenesisMetadata is a wrapper interface over clienttypes.GenesisMetadata +// all clients must use the concrete implementation in types +type GenesisMetadata interface { + // return store key that contains metadata without clientID-prefix + GetKey() []byte + // returns metadata value + GetValue() []byte +} diff --git a/x/ibc/core/exported/connection.go b/x/ibc/core/exported/connection.go index a21c5a536..8f705daff 100644 --- a/x/ibc/core/exported/connection.go +++ b/x/ibc/core/exported/connection.go @@ -6,6 +6,7 @@ type ConnectionI interface { GetState() int32 GetCounterparty() CounterpartyConnectionI GetVersions() []Version + GetDelayPeriod() uint64 ValidateBasic() error } diff --git a/x/ibc/core/genesis_test.go b/x/ibc/core/genesis_test.go index fa0200de7..c29feef7f 100644 --- a/x/ibc/core/genesis_test.go +++ b/x/ibc/core/genesis_test.go @@ -96,13 +96,22 @@ func (suite *IBCTestSuite) TestValidateGenesis() { }, ), }, + []clienttypes.IdentifiedGenesisMetadata{ + clienttypes.NewIdentifiedGenesisMetadata( + clientID, + []clienttypes.GenesisMetadata{ + clienttypes.NewGenesisMetadata([]byte("key1"), []byte("val1")), + clienttypes.NewGenesisMetadata([]byte("key2"), []byte("val2")), + }, + ), + }, clienttypes.NewParams(exported.Tendermint, exported.Localhost), true, 2, ), ConnectionGenesis: connectiontypes.NewGenesisState( []connectiontypes.IdentifiedConnection{ - connectiontypes.NewIdentifiedConnection(connectionID, connectiontypes.NewConnectionEnd(connectiontypes.INIT, clientID, connectiontypes.NewCounterparty(clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))), []*connectiontypes.Version{ibctesting.ConnectionVersion})), + connectiontypes.NewIdentifiedConnection(connectionID, connectiontypes.NewConnectionEnd(connectiontypes.INIT, clientID, connectiontypes.NewCounterparty(clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))), []*connectiontypes.Version{ibctesting.ConnectionVersion}, 0)), }, []connectiontypes.ConnectionPaths{ connectiontypes.NewConnectionPaths(clientID, []string{connectionID}), @@ -154,6 +163,15 @@ func (suite *IBCTestSuite) TestValidateGenesis() { ), }, nil, + []clienttypes.IdentifiedGenesisMetadata{ + clienttypes.NewIdentifiedGenesisMetadata( + clientID, + []clienttypes.GenesisMetadata{ + clienttypes.NewGenesisMetadata([]byte(""), []byte("val1")), + clienttypes.NewGenesisMetadata([]byte("key2"), []byte("")), + }, + ), + }, clienttypes.NewParams(exported.Tendermint), false, 2, @@ -168,7 +186,7 @@ func (suite *IBCTestSuite) TestValidateGenesis() { ClientGenesis: clienttypes.DefaultGenesisState(), ConnectionGenesis: connectiontypes.NewGenesisState( []connectiontypes.IdentifiedConnection{ - connectiontypes.NewIdentifiedConnection(connectionID, connectiontypes.NewConnectionEnd(connectiontypes.INIT, "(CLIENTIDONE)", connectiontypes.NewCounterparty(clientID, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))), []*connectiontypes.Version{connectiontypes.NewVersion("1.1", nil)})), + connectiontypes.NewIdentifiedConnection(connectionID, connectiontypes.NewConnectionEnd(connectiontypes.INIT, "(CLIENTIDONE)", connectiontypes.NewCounterparty(clientID, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))), []*connectiontypes.Version{connectiontypes.NewVersion("1.1", nil)}, 0)), }, []connectiontypes.ConnectionPaths{ connectiontypes.NewConnectionPaths(clientID, []string{connectionID}), @@ -240,13 +258,22 @@ func (suite *IBCTestSuite) TestInitGenesis() { }, ), }, + []clienttypes.IdentifiedGenesisMetadata{ + clienttypes.NewIdentifiedGenesisMetadata( + clientID, + []clienttypes.GenesisMetadata{ + clienttypes.NewGenesisMetadata([]byte("key1"), []byte("val1")), + clienttypes.NewGenesisMetadata([]byte("key2"), []byte("val2")), + }, + ), + }, clienttypes.NewParams(exported.Tendermint, exported.Localhost), true, 0, ), ConnectionGenesis: connectiontypes.NewGenesisState( []connectiontypes.IdentifiedConnection{ - connectiontypes.NewIdentifiedConnection(connectionID, connectiontypes.NewConnectionEnd(connectiontypes.INIT, clientID, connectiontypes.NewCounterparty(clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))), []*connectiontypes.Version{ibctesting.ConnectionVersion})), + connectiontypes.NewIdentifiedConnection(connectionID, connectiontypes.NewConnectionEnd(connectiontypes.INIT, clientID, connectiontypes.NewCounterparty(clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))), []*connectiontypes.Version{ibctesting.ConnectionVersion}, 0)), }, []connectiontypes.ConnectionPaths{ connectiontypes.NewConnectionPaths(clientID, []string{connectionID}), diff --git a/x/ibc/core/keeper/msg_server.go b/x/ibc/core/keeper/msg_server.go index 8815a7e45..5ea590fea 100644 --- a/x/ibc/core/keeper/msg_server.go +++ b/x/ibc/core/keeper/msg_server.go @@ -134,7 +134,7 @@ func (k Keeper) SubmitMisbehaviour(goCtx context.Context, msg *clienttypes.MsgSu func (k Keeper) ConnectionOpenInit(goCtx context.Context, msg *connectiontypes.MsgConnectionOpenInit) (*connectiontypes.MsgConnectionOpenInitResponse, error) { ctx := sdk.UnwrapSDKContext(goCtx) - connectionID, err := k.ConnectionKeeper.ConnOpenInit(ctx, msg.ClientId, msg.Counterparty, msg.Version) + connectionID, err := k.ConnectionKeeper.ConnOpenInit(ctx, msg.ClientId, msg.Counterparty, msg.Version, msg.DelayPeriod) if err != nil { return nil, sdkerrors.Wrap(err, "connection handshake open init failed") } @@ -166,7 +166,7 @@ func (k Keeper) ConnectionOpenTry(goCtx context.Context, msg *connectiontypes.Ms } connectionID, err := k.ConnectionKeeper.ConnOpenTry( - ctx, msg.PreviousConnectionId, msg.Counterparty, msg.ClientId, targetClient, + ctx, msg.PreviousConnectionId, msg.Counterparty, msg.DelayPeriod, msg.ClientId, targetClient, connectiontypes.ProtoVersionsToExported(msg.CounterpartyVersions), msg.ProofInit, msg.ProofClient, msg.ProofConsensus, msg.ProofHeight, msg.ConsensusHeight, ) diff --git a/x/ibc/light-clients/06-solomachine/types/client_state.go b/x/ibc/light-clients/06-solomachine/types/client_state.go index 38004b292..24a6582f0 100644 --- a/x/ibc/light-clients/06-solomachine/types/client_state.go +++ b/x/ibc/light-clients/06-solomachine/types/client_state.go @@ -1,6 +1,8 @@ package types import ( + "reflect" + ics23 "github.com/confio/ics23/go" "github.com/cosmos/cosmos-sdk/codec" @@ -74,6 +76,20 @@ func (cs ClientState) ZeroCustomFields() exported.ClientState { ) } +// Initialize will check that initial consensus state is equal to the latest consensus state of the initial client. +func (cs ClientState) Initialize(_ sdk.Context, _ codec.BinaryMarshaler, _ sdk.KVStore, consState exported.ConsensusState) error { + if !reflect.DeepEqual(cs.ConsensusState, consState) { + return sdkerrors.Wrapf(clienttypes.ErrInvalidConsensus, "consensus state in initial client does not equal initial consensus state. expected: %s, got: %s", + cs.ConsensusState, consState) + } + return nil +} + +// ExportMetadata is a no-op since solomachine does not store any metadata in client store +func (cs ClientState) ExportMetadata(_ sdk.KVStore) []exported.GenesisMetadata { + return nil +} + // VerifyUpgradeAndUpdateState returns an error since solomachine client does not support upgrades func (cs ClientState) VerifyUpgradeAndUpdateState( _ sdk.Context, _ codec.BinaryMarshaler, _ sdk.KVStore, @@ -238,6 +254,8 @@ func (cs ClientState) VerifyPacketCommitment( store sdk.KVStore, cdc codec.BinaryMarshaler, height exported.Height, + _ uint64, + _ uint64, prefix exported.Prefix, proof []byte, portID, @@ -277,6 +295,8 @@ func (cs ClientState) VerifyPacketAcknowledgement( store sdk.KVStore, cdc codec.BinaryMarshaler, height exported.Height, + _ uint64, + _ uint64, prefix exported.Prefix, proof []byte, portID, @@ -317,6 +337,8 @@ func (cs ClientState) VerifyPacketReceiptAbsence( store sdk.KVStore, cdc codec.BinaryMarshaler, height exported.Height, + _ uint64, + _ uint64, prefix exported.Prefix, proof []byte, portID, @@ -355,6 +377,8 @@ func (cs ClientState) VerifyNextSequenceRecv( store sdk.KVStore, cdc codec.BinaryMarshaler, height exported.Height, + _ uint64, + _ uint64, prefix exported.Prefix, proof []byte, portID, diff --git a/x/ibc/light-clients/06-solomachine/types/client_state_test.go b/x/ibc/light-clients/06-solomachine/types/client_state_test.go index 597adff34..4f6c195c8 100644 --- a/x/ibc/light-clients/06-solomachine/types/client_state_test.go +++ b/x/ibc/light-clients/06-solomachine/types/client_state_test.go @@ -7,6 +7,7 @@ import ( commitmenttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/23-commitment/types" "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/06-solomachine/types" + ibctmtypes "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/07-tendermint/types" ibctesting "github.com/cosmos/cosmos-sdk/x/ibc/testing" ) @@ -80,6 +81,55 @@ func (suite *SoloMachineTestSuite) TestClientStateValidateBasic() { } } +func (suite *SoloMachineTestSuite) TestInitialize() { + // test singlesig and multisig public keys + for _, solomachine := range []*ibctesting.Solomachine{suite.solomachine, suite.solomachineMulti} { + malleatedConsensus := solomachine.ClientState().ConsensusState + malleatedConsensus.Timestamp = malleatedConsensus.Timestamp + 10 + + testCases := []struct { + name string + consState exported.ConsensusState + expPass bool + }{ + { + "valid consensus state", + solomachine.ConsensusState(), + true, + }, + { + "nil consensus state", + nil, + false, + }, + { + "invalid consensus state: Tendermint consensus state", + &ibctmtypes.ConsensusState{}, + false, + }, + { + "invalid consensus state: consensus state does not match consensus state in client", + malleatedConsensus, + false, + }, + } + + for _, tc := range testCases { + err := solomachine.ClientState().Initialize( + suite.chainA.GetContext(), suite.chainA.Codec, + suite.chainA.App.IBCKeeper.ClientKeeper.ClientStore(suite.chainA.GetContext(), "solomachine"), + tc.consState, + ) + + if tc.expPass { + suite.Require().NoError(err, "valid testcase: %s failed", tc.name) + } else { + suite.Require().Error(err, "invalid testcase: %s passed", tc.name) + } + } + } +} + func (suite *SoloMachineTestSuite) TestVerifyClientState() { // create client for tendermint so we can use client state for verification clientA, _ := suite.coordinator.SetupClients(suite.chainA, suite.chainB, exported.Tendermint) @@ -336,7 +386,7 @@ func (suite *SoloMachineTestSuite) TestVerifyClientConsensusState() { func (suite *SoloMachineTestSuite) TestVerifyConnectionState() { counterparty := connectiontypes.NewCounterparty("clientB", testConnectionID, prefix) - conn := connectiontypes.NewConnectionEnd(connectiontypes.OPEN, "clientA", counterparty, connectiontypes.ExportedVersionsToProto(connectiontypes.GetCompatibleVersions())) + conn := connectiontypes.NewConnectionEnd(connectiontypes.OPEN, "clientA", counterparty, connectiontypes.ExportedVersionsToProto(connectiontypes.GetCompatibleVersions()), 0) path := suite.solomachine.GetConnectionStatePath(testConnectionID) @@ -587,7 +637,7 @@ func (suite *SoloMachineTestSuite) TestVerifyPacketCommitment() { expSeq := tc.clientState.Sequence + 1 err := tc.clientState.VerifyPacketCommitment( - suite.store, suite.chainA.Codec, solomachine.GetHeight(), tc.prefix, tc.proof, testPortID, testChannelID, solomachine.Sequence, commitmentBytes, + suite.store, suite.chainA.Codec, solomachine.GetHeight(), 0, 0, tc.prefix, tc.proof, testPortID, testChannelID, solomachine.Sequence, commitmentBytes, ) if tc.expPass { @@ -674,7 +724,7 @@ func (suite *SoloMachineTestSuite) TestVerifyPacketAcknowledgement() { expSeq := tc.clientState.Sequence + 1 err := tc.clientState.VerifyPacketAcknowledgement( - suite.store, suite.chainA.Codec, solomachine.GetHeight(), tc.prefix, tc.proof, testPortID, testChannelID, solomachine.Sequence, ack, + suite.store, suite.chainA.Codec, solomachine.GetHeight(), 0, 0, tc.prefix, tc.proof, testPortID, testChannelID, solomachine.Sequence, ack, ) if tc.expPass { @@ -761,7 +811,7 @@ func (suite *SoloMachineTestSuite) TestVerifyPacketReceiptAbsence() { expSeq := tc.clientState.Sequence + 1 err := tc.clientState.VerifyPacketReceiptAbsence( - suite.store, suite.chainA.Codec, solomachine.GetHeight(), tc.prefix, tc.proof, testPortID, testChannelID, solomachine.Sequence, + suite.store, suite.chainA.Codec, solomachine.GetHeight(), 0, 0, tc.prefix, tc.proof, testPortID, testChannelID, solomachine.Sequence, ) if tc.expPass { @@ -848,7 +898,7 @@ func (suite *SoloMachineTestSuite) TestVerifyNextSeqRecv() { expSeq := tc.clientState.Sequence + 1 err := tc.clientState.VerifyNextSequenceRecv( - suite.store, suite.chainA.Codec, solomachine.GetHeight(), tc.prefix, tc.proof, testPortID, testChannelID, nextSeqRecv, + suite.store, suite.chainA.Codec, solomachine.GetHeight(), 0, 0, tc.prefix, tc.proof, testPortID, testChannelID, nextSeqRecv, ) if tc.expPass { diff --git a/x/ibc/light-clients/06-solomachine/types/codec_test.go b/x/ibc/light-clients/06-solomachine/types/codec_test.go index eb304bb28..70be186a1 100644 --- a/x/ibc/light-clients/06-solomachine/types/codec_test.go +++ b/x/ibc/light-clients/06-solomachine/types/codec_test.go @@ -68,7 +68,7 @@ func (suite SoloMachineTestSuite) TestUnmarshalDataByType() { { "connection", types.CONNECTION, func() { counterparty := connectiontypes.NewCounterparty("clientB", testConnectionID, prefix) - conn := connectiontypes.NewConnectionEnd(connectiontypes.OPEN, "clientA", counterparty, connectiontypes.ExportedVersionsToProto(connectiontypes.GetCompatibleVersions())) + conn := connectiontypes.NewConnectionEnd(connectiontypes.OPEN, "clientA", counterparty, connectiontypes.ExportedVersionsToProto(connectiontypes.GetCompatibleVersions()), 0) path := solomachine.GetConnectionStatePath("connectionID") data, err = types.ConnectionStateDataBytes(cdc, path, conn) @@ -99,7 +99,7 @@ func (suite SoloMachineTestSuite) TestUnmarshalDataByType() { { "bad channel (uses connection data)", types.CHANNEL, func() { counterparty := connectiontypes.NewCounterparty("clientB", testConnectionID, prefix) - conn := connectiontypes.NewConnectionEnd(connectiontypes.OPEN, "clientA", counterparty, connectiontypes.ExportedVersionsToProto(connectiontypes.GetCompatibleVersions())) + conn := connectiontypes.NewConnectionEnd(connectiontypes.OPEN, "clientA", counterparty, connectiontypes.ExportedVersionsToProto(connectiontypes.GetCompatibleVersions()), 0) path := solomachine.GetConnectionStatePath("connectionID") data, err = types.ConnectionStateDataBytes(cdc, path, conn) diff --git a/x/ibc/light-clients/07-tendermint/types/client_state.go b/x/ibc/light-clients/07-tendermint/types/client_state.go index 10f3bc3ba..8425b7419 100644 --- a/x/ibc/light-clients/07-tendermint/types/client_state.go +++ b/x/ibc/light-clients/07-tendermint/types/client_state.go @@ -140,6 +140,18 @@ func (cs ClientState) ZeroCustomFields() exported.ClientState { } } +// Initialize will check that initial consensus state is a Tendermint consensus state +// and will store ProcessedTime for initial consensus state as ctx.BlockTime() +func (cs ClientState) Initialize(ctx sdk.Context, _ codec.BinaryMarshaler, clientStore sdk.KVStore, consState exported.ConsensusState) error { + if _, ok := consState.(*ConsensusState); !ok { + return sdkerrors.Wrapf(clienttypes.ErrInvalidConsensus, "invalid initial consensus state. expected type: %T, got: %T", + &ConsensusState{}, consState) + } + // set processed time with initial consensus state height equal to initial client state's latest height + SetProcessedTime(clientStore, cs.GetLatestHeight(), uint64(ctx.BlockTime().UnixNano())) + return nil +} + // VerifyClientState verifies a proof of the client state of the running chain // stored on the target machine func (cs ClientState) VerifyClientState( @@ -308,6 +320,8 @@ func (cs ClientState) VerifyPacketCommitment( store sdk.KVStore, cdc codec.BinaryMarshaler, height exported.Height, + currentTimestamp uint64, + delayPeriod uint64, prefix exported.Prefix, proof []byte, portID, @@ -320,6 +334,11 @@ func (cs ClientState) VerifyPacketCommitment( return err } + // check delay period has passed + if err := verifyDelayPeriodPassed(store, height, currentTimestamp, delayPeriod); err != nil { + return err + } + commitmentPath := commitmenttypes.NewMerklePath(host.PacketCommitmentPath(portID, channelID, sequence)) path, err := commitmenttypes.ApplyPrefix(prefix, commitmentPath) if err != nil { @@ -339,6 +358,8 @@ func (cs ClientState) VerifyPacketAcknowledgement( store sdk.KVStore, cdc codec.BinaryMarshaler, height exported.Height, + currentTimestamp uint64, + delayPeriod uint64, prefix exported.Prefix, proof []byte, portID, @@ -351,6 +372,11 @@ func (cs ClientState) VerifyPacketAcknowledgement( return err } + // check delay period has passed + if err := verifyDelayPeriodPassed(store, height, currentTimestamp, delayPeriod); err != nil { + return err + } + ackPath := commitmenttypes.NewMerklePath(host.PacketAcknowledgementPath(portID, channelID, sequence)) path, err := commitmenttypes.ApplyPrefix(prefix, ackPath) if err != nil { @@ -371,6 +397,8 @@ func (cs ClientState) VerifyPacketReceiptAbsence( store sdk.KVStore, cdc codec.BinaryMarshaler, height exported.Height, + currentTimestamp uint64, + delayPeriod uint64, prefix exported.Prefix, proof []byte, portID, @@ -382,6 +410,11 @@ func (cs ClientState) VerifyPacketReceiptAbsence( return err } + // check delay period has passed + if err := verifyDelayPeriodPassed(store, height, currentTimestamp, delayPeriod); err != nil { + return err + } + receiptPath := commitmenttypes.NewMerklePath(host.PacketReceiptPath(portID, channelID, sequence)) path, err := commitmenttypes.ApplyPrefix(prefix, receiptPath) if err != nil { @@ -401,6 +434,8 @@ func (cs ClientState) VerifyNextSequenceRecv( store sdk.KVStore, cdc codec.BinaryMarshaler, height exported.Height, + currentTimestamp uint64, + delayPeriod uint64, prefix exported.Prefix, proof []byte, portID, @@ -412,6 +447,11 @@ func (cs ClientState) VerifyNextSequenceRecv( return err } + // check delay period has passed + if err := verifyDelayPeriodPassed(store, height, currentTimestamp, delayPeriod); err != nil { + return err + } + nextSequenceRecvPath := commitmenttypes.NewMerklePath(host.NextSequenceRecvPath(portID, channelID)) path, err := commitmenttypes.ApplyPrefix(prefix, nextSequenceRecvPath) if err != nil { @@ -427,6 +467,23 @@ func (cs ClientState) VerifyNextSequenceRecv( return nil } +// verifyDelayPeriodPassed will ensure that at least delayPeriod amount of time has passed since consensus state was submitted +// before allowing verification to continue. +func verifyDelayPeriodPassed(store sdk.KVStore, proofHeight exported.Height, currentTimestamp, delayPeriod uint64) error { + // check that executing chain's timestamp has passed consensusState's processed time + delay period + processedTime, ok := GetProcessedTime(store, proofHeight) + if !ok { + return sdkerrors.Wrapf(ErrProcessedTimeNotFound, "processed time not found for height: %s", proofHeight) + } + validTime := processedTime + delayPeriod + // NOTE: delay period is inclusive, so if currentTimestamp is validTime, then we return no error + if validTime > currentTimestamp { + return sdkerrors.Wrapf(ErrDelayPeriodNotPassed, "cannot verify packet until time: %d, current time: %d", + validTime, currentTimestamp) + } + return nil +} + // produceVerificationArgs perfoms the basic checks on the arguments that are // shared between the verification functions and returns the unmarshalled // merkle proof, the consensus state and an error if one occurred. diff --git a/x/ibc/light-clients/07-tendermint/types/client_state_test.go b/x/ibc/light-clients/07-tendermint/types/client_state_test.go index 80be1472f..d342c6f74 100644 --- a/x/ibc/light-clients/07-tendermint/types/client_state_test.go +++ b/x/ibc/light-clients/07-tendermint/types/client_state_test.go @@ -1,6 +1,8 @@ package types_test import ( + "time" + ics23 "github.com/confio/ics23/go" clienttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/types" @@ -98,6 +100,41 @@ func (suite *TendermintTestSuite) TestValidate() { } } +func (suite *TendermintTestSuite) TestInitialize() { + + testCases := []struct { + name string + consensusState exported.ConsensusState + expPass bool + }{ + { + name: "valid consensus", + consensusState: &types.ConsensusState{}, + expPass: true, + }, + { + name: "invalid consensus: consensus state is solomachine consensus", + consensusState: ibctesting.NewSolomachine(suite.T(), suite.chainA.Codec, "solomachine", "", 2).ConsensusState(), + expPass: false, + }, + } + + clientA, err := suite.coordinator.CreateClient(suite.chainA, suite.chainB, exported.Tendermint) + suite.Require().NoError(err) + + clientState := suite.chainA.GetClientState(clientA) + store := suite.chainA.App.IBCKeeper.ClientKeeper.ClientStore(suite.chainA.GetContext(), clientA) + + for _, tc := range testCases { + err := clientState.Initialize(suite.chainA.GetContext(), suite.chainA.Codec, store, tc.consensusState) + if tc.expPass { + suite.Require().NoError(err, "valid case returned an error") + } else { + suite.Require().Error(err, "invalid case didn't return an error") + } + } +} + func (suite *TendermintTestSuite) TestVerifyClientConsensusState() { testCases := []struct { name string @@ -335,6 +372,7 @@ func (suite *TendermintTestSuite) TestVerifyPacketCommitment() { var ( clientState *types.ClientState proof []byte + delayPeriod uint64 proofHeight exported.Height prefix commitmenttypes.MerklePrefix ) @@ -347,6 +385,20 @@ func (suite *TendermintTestSuite) TestVerifyPacketCommitment() { { "successful verification", func() {}, true, }, + { + name: "delay period has passed", + malleate: func() { + delayPeriod = uint64(time.Second.Nanoseconds()) + }, + expPass: true, + }, + { + name: "delay period has not passed", + malleate: func() { + delayPeriod = uint64(time.Hour.Nanoseconds()) + }, + expPass: false, + }, { "ApplyPrefix failed", func() { prefix = commitmenttypes.MerklePrefix{} @@ -396,9 +448,10 @@ func (suite *TendermintTestSuite) TestVerifyPacketCommitment() { store := suite.chainA.App.IBCKeeper.ClientKeeper.ClientStore(suite.chainA.GetContext(), clientA) + currentTime := uint64(suite.chainA.GetContext().BlockTime().UnixNano()) commitment := channeltypes.CommitPacket(suite.chainA.App.IBCKeeper.Codec(), packet) err = clientState.VerifyPacketCommitment( - store, suite.chainA.Codec, proofHeight, &prefix, proof, + store, suite.chainA.Codec, proofHeight, currentTime, delayPeriod, &prefix, proof, packet.GetSourcePort(), packet.GetSourceChannel(), packet.GetSequence(), commitment, ) @@ -418,6 +471,7 @@ func (suite *TendermintTestSuite) TestVerifyPacketAcknowledgement() { var ( clientState *types.ClientState proof []byte + delayPeriod uint64 proofHeight exported.Height prefix commitmenttypes.MerklePrefix ) @@ -430,6 +484,20 @@ func (suite *TendermintTestSuite) TestVerifyPacketAcknowledgement() { { "successful verification", func() {}, true, }, + { + name: "delay period has passed", + malleate: func() { + delayPeriod = uint64(time.Second.Nanoseconds()) + }, + expPass: true, + }, + { + name: "delay period has not passed", + malleate: func() { + delayPeriod = uint64(time.Hour.Nanoseconds()) + }, + expPass: false, + }, { "ApplyPrefix failed", func() { prefix = commitmenttypes.MerklePrefix{} @@ -485,8 +553,9 @@ func (suite *TendermintTestSuite) TestVerifyPacketAcknowledgement() { store := suite.chainA.App.IBCKeeper.ClientKeeper.ClientStore(suite.chainA.GetContext(), clientA) + currentTime := uint64(suite.chainA.GetContext().BlockTime().UnixNano()) err = clientState.VerifyPacketAcknowledgement( - store, suite.chainA.Codec, proofHeight, &prefix, proof, + store, suite.chainA.Codec, proofHeight, currentTime, delayPeriod, &prefix, proof, packet.GetDestPort(), packet.GetDestChannel(), packet.GetSequence(), ibcmock.MockAcknowledgement, ) @@ -506,6 +575,7 @@ func (suite *TendermintTestSuite) TestVerifyPacketReceiptAbsence() { var ( clientState *types.ClientState proof []byte + delayPeriod uint64 proofHeight exported.Height prefix commitmenttypes.MerklePrefix ) @@ -518,6 +588,20 @@ func (suite *TendermintTestSuite) TestVerifyPacketReceiptAbsence() { { "successful verification", func() {}, true, }, + { + name: "delay period has passed", + malleate: func() { + delayPeriod = uint64(time.Second.Nanoseconds()) + }, + expPass: true, + }, + { + name: "delay period has not passed", + malleate: func() { + delayPeriod = uint64(time.Hour.Nanoseconds()) + }, + expPass: false, + }, { "ApplyPrefix failed", func() { prefix = commitmenttypes.MerklePrefix{} @@ -572,8 +656,9 @@ func (suite *TendermintTestSuite) TestVerifyPacketReceiptAbsence() { store := suite.chainA.App.IBCKeeper.ClientKeeper.ClientStore(suite.chainA.GetContext(), clientA) + currentTime := uint64(suite.chainA.GetContext().BlockTime().UnixNano()) err = clientState.VerifyPacketReceiptAbsence( - store, suite.chainA.Codec, proofHeight, &prefix, proof, + store, suite.chainA.Codec, proofHeight, currentTime, delayPeriod, &prefix, proof, packet.GetDestPort(), packet.GetDestChannel(), packet.GetSequence(), ) @@ -593,6 +678,7 @@ func (suite *TendermintTestSuite) TestVerifyNextSeqRecv() { var ( clientState *types.ClientState proof []byte + delayPeriod uint64 proofHeight exported.Height prefix commitmenttypes.MerklePrefix ) @@ -605,6 +691,20 @@ func (suite *TendermintTestSuite) TestVerifyNextSeqRecv() { { "successful verification", func() {}, true, }, + { + name: "delay period has passed", + malleate: func() { + delayPeriod = uint64(time.Second.Nanoseconds()) + }, + expPass: true, + }, + { + name: "delay period has not passed", + malleate: func() { + delayPeriod = uint64(time.Hour.Nanoseconds()) + }, + expPass: false, + }, { "ApplyPrefix failed", func() { prefix = commitmenttypes.MerklePrefix{} @@ -663,8 +763,9 @@ func (suite *TendermintTestSuite) TestVerifyNextSeqRecv() { store := suite.chainA.App.IBCKeeper.ClientKeeper.ClientStore(suite.chainA.GetContext(), clientA) + currentTime := uint64(suite.chainA.GetContext().BlockTime().UnixNano()) err = clientState.VerifyNextSequenceRecv( - store, suite.chainA.Codec, proofHeight, &prefix, proof, + store, suite.chainA.Codec, proofHeight, currentTime, delayPeriod, &prefix, proof, packet.GetDestPort(), packet.GetDestChannel(), packet.GetSequence()+1, ) diff --git a/x/ibc/light-clients/07-tendermint/types/consensus_state.go b/x/ibc/light-clients/07-tendermint/types/consensus_state.go index d55bc1130..b58198394 100644 --- a/x/ibc/light-clients/07-tendermint/types/consensus_state.go +++ b/x/ibc/light-clients/07-tendermint/types/consensus_state.go @@ -14,8 +14,7 @@ import ( // NewConsensusState creates a new ConsensusState instance. func NewConsensusState( - timestamp time.Time, root commitmenttypes.MerkleRoot, - nextValsHash tmbytes.HexBytes, + timestamp time.Time, root commitmenttypes.MerkleRoot, nextValsHash tmbytes.HexBytes, ) *ConsensusState { return &ConsensusState{ Timestamp: timestamp, @@ -34,12 +33,14 @@ func (cs ConsensusState) GetRoot() exported.Root { return cs.Root } -// GetTimestamp returns block time in nanoseconds at which the consensus state was stored +// GetTimestamp returns block time in nanoseconds of the header that created consensus state func (cs ConsensusState) GetTimestamp() uint64 { return uint64(cs.Timestamp.UnixNano()) } // ValidateBasic defines a basic validation for the tendermint consensus state. +// NOTE: ProcessedTimestamp may be zero if this is an initial consensus state passed in by relayer +// as opposed to a consensus state constructed by the chain. func (cs ConsensusState) ValidateBasic() error { if cs.Root.Empty() { return sdkerrors.Wrap(clienttypes.ErrInvalidConsensus, "root cannot be empty") diff --git a/x/ibc/light-clients/07-tendermint/types/errors.go b/x/ibc/light-clients/07-tendermint/types/errors.go index 9683dbf3c..276c225b7 100644 --- a/x/ibc/light-clients/07-tendermint/types/errors.go +++ b/x/ibc/light-clients/07-tendermint/types/errors.go @@ -16,8 +16,10 @@ var ( ErrInvalidHeaderHeight = sdkerrors.Register(SubModuleName, 5, "invalid header height") ErrInvalidHeader = sdkerrors.Register(SubModuleName, 6, "invalid header") ErrInvalidMaxClockDrift = sdkerrors.Register(SubModuleName, 7, "invalid max clock drift") - ErrTrustingPeriodExpired = sdkerrors.Register(SubModuleName, 8, "time since latest trusted state has passed the trusting period") - ErrUnbondingPeriodExpired = sdkerrors.Register(SubModuleName, 9, "time since latest trusted state has passed the unbonding period") - ErrInvalidProofSpecs = sdkerrors.Register(SubModuleName, 10, "invalid proof specs") - ErrInvalidValidatorSet = sdkerrors.Register(SubModuleName, 11, "invalid validator set") + ErrProcessedTimeNotFound = sdkerrors.Register(SubModuleName, 8, "processed time not found") + ErrDelayPeriodNotPassed = sdkerrors.Register(SubModuleName, 9, "packet-specified delay period has not been reached") + ErrTrustingPeriodExpired = sdkerrors.Register(SubModuleName, 10, "time since latest trusted state has passed the trusting period") + ErrUnbondingPeriodExpired = sdkerrors.Register(SubModuleName, 11, "time since latest trusted state has passed the unbonding period") + ErrInvalidProofSpecs = sdkerrors.Register(SubModuleName, 12, "invalid proof specs") + ErrInvalidValidatorSet = sdkerrors.Register(SubModuleName, 13, "invalid validator set") ) diff --git a/x/ibc/light-clients/07-tendermint/types/genesis.go b/x/ibc/light-clients/07-tendermint/types/genesis.go new file mode 100644 index 000000000..7124643b5 --- /dev/null +++ b/x/ibc/light-clients/07-tendermint/types/genesis.go @@ -0,0 +1,21 @@ +package types + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + clienttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/types" + "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" +) + +// ExportMetadata exports all the processed times in the client store so they can be included in clients genesis +// and imported by a ClientKeeper +func (cs ClientState) ExportMetadata(store sdk.KVStore) []exported.GenesisMetadata { + gm := make([]exported.GenesisMetadata, 0) + IterateProcessedTime(store, func(key, val []byte) bool { + gm = append(gm, clienttypes.NewGenesisMetadata(key, val)) + return false + }) + if len(gm) == 0 { + return nil + } + return gm +} diff --git a/x/ibc/light-clients/07-tendermint/types/genesis_test.go b/x/ibc/light-clients/07-tendermint/types/genesis_test.go new file mode 100644 index 000000000..5732151e6 --- /dev/null +++ b/x/ibc/light-clients/07-tendermint/types/genesis_test.go @@ -0,0 +1,38 @@ +package types_test + +import ( + "time" + + sdk "github.com/cosmos/cosmos-sdk/types" + clienttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/types" + commitmenttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/23-commitment/types" + "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/07-tendermint/types" +) + +func (suite *TendermintTestSuite) TestExportMetadata() { + clientState := types.NewClientState(chainID, types.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, height, commitmenttypes.GetSDKSpecs(), upgradePath, false, false) + suite.chainA.App.IBCKeeper.ClientKeeper.SetClientState(suite.chainA.GetContext(), "clientA", clientState) + + gm := clientState.ExportMetadata(suite.chainA.App.IBCKeeper.ClientKeeper.ClientStore(suite.chainA.GetContext(), "clientA")) + suite.Require().Nil(gm, "client with no metadata returned non-nil exported metadata") + + clientStore := suite.chainA.App.IBCKeeper.ClientKeeper.ClientStore(suite.chainA.GetContext(), "clientA") + + // set some processed times + timestamp1 := uint64(time.Now().UnixNano()) + timestamp2 := uint64(time.Now().Add(time.Minute).UnixNano()) + timestampBz1 := sdk.Uint64ToBigEndian(timestamp1) + timestampBz2 := sdk.Uint64ToBigEndian(timestamp2) + types.SetProcessedTime(clientStore, clienttypes.NewHeight(0, 1), timestamp1) + types.SetProcessedTime(clientStore, clienttypes.NewHeight(0, 2), timestamp2) + + gm = clientState.ExportMetadata(suite.chainA.App.IBCKeeper.ClientKeeper.ClientStore(suite.chainA.GetContext(), "clientA")) + suite.Require().NotNil(gm, "client with metadata returned nil exported metadata") + suite.Require().Len(gm, 2, "exported metadata has unexpected length") + + suite.Require().Equal(types.ProcessedTimeKey(clienttypes.NewHeight(0, 1)), gm[0].GetKey(), "metadata has unexpected key") + suite.Require().Equal(timestampBz1, gm[0].GetValue(), "metadata has unexpected value") + + suite.Require().Equal(types.ProcessedTimeKey(clienttypes.NewHeight(0, 2)), gm[1].GetKey(), "metadata has unexpected key") + suite.Require().Equal(timestampBz2, gm[1].GetValue(), "metadata has unexpected value") +} diff --git a/x/ibc/light-clients/07-tendermint/types/proposal_handle.go b/x/ibc/light-clients/07-tendermint/types/proposal_handle.go index 7e27f543a..4cd3eb376 100644 --- a/x/ibc/light-clients/07-tendermint/types/proposal_handle.go +++ b/x/ibc/light-clients/07-tendermint/types/proposal_handle.go @@ -53,7 +53,7 @@ func (cs ClientState) CheckProposedHeaderAndUpdateState( // if the client is expired we unexpire the client using softer validation, otherwise // full validation on the header is performed. if cs.IsExpired(consensusState.Timestamp, ctx.BlockTime()) { - return cs.unexpireClient(consensusState, tmHeader, ctx.BlockTime()) + return cs.unexpireClient(ctx, clientStore, consensusState, tmHeader, ctx.BlockTime()) } // NOTE: the client may be frozen again since the misbehaviour evidence may @@ -61,7 +61,7 @@ func (cs ClientState) CheckProposedHeaderAndUpdateState( return cs.CheckHeaderAndUpdateState(ctx, cdc, clientStore, header) case cs.AllowUpdateAfterExpiry && cs.IsExpired(consensusState.Timestamp, ctx.BlockTime()): - return cs.unexpireClient(consensusState, tmHeader, ctx.BlockTime()) + return cs.unexpireClient(ctx, clientStore, consensusState, tmHeader, ctx.BlockTime()) default: return nil, nil, sdkerrors.Wrap(clienttypes.ErrUpdateClientFailed, "client cannot be updated with proposal") @@ -72,7 +72,7 @@ func (cs ClientState) CheckProposedHeaderAndUpdateState( // unexpireClient checks if the proposed header is sufficient to update an expired client. // The client is updated if no error occurs. func (cs ClientState) unexpireClient( - consensusState *ConsensusState, header *Header, currentTimestamp time.Time, + ctx sdk.Context, clientStore sdk.KVStore, consensusState *ConsensusState, header *Header, currentTimestamp time.Time, ) (exported.ClientState, exported.ConsensusState, error) { // the client is expired and either AllowUpdateAfterMisbehaviour or AllowUpdateAfterExpiry @@ -81,7 +81,7 @@ func (cs ClientState) unexpireClient( return nil, nil, err } - newClientState, consensusState := update(&cs, header) + newClientState, consensusState := update(ctx, clientStore, &cs, header) return newClientState, consensusState, nil } diff --git a/x/ibc/light-clients/07-tendermint/types/store.go b/x/ibc/light-clients/07-tendermint/types/store.go index 50f8d2a1d..8b2720c5a 100644 --- a/x/ibc/light-clients/07-tendermint/types/store.go +++ b/x/ibc/light-clients/07-tendermint/types/store.go @@ -1,6 +1,8 @@ package types import ( + "strings" + "github.com/cosmos/cosmos-sdk/codec" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" @@ -9,6 +11,9 @@ import ( "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" ) +// KeyProcessedTime is appended to consensus state key to store the processed time +var KeyProcessedTime = []byte("/processedTime") + // GetConsensusState retrieves the consensus state from the client prefixed // store. An error is returned if the consensus state does not exist. func GetConsensusState(store sdk.KVStore, cdc codec.BinaryMarshaler, height exported.Height) (*ConsensusState, error) { @@ -35,3 +40,50 @@ func GetConsensusState(store sdk.KVStore, cdc codec.BinaryMarshaler, height expo return consensusState, nil } + +// IterateProcessedTime iterates through the prefix store and applies the callback. +// If the cb returns true, then iterator will close and stop. +func IterateProcessedTime(store sdk.KVStore, cb func(key, val []byte) bool) { + iterator := sdk.KVStorePrefixIterator(store, []byte(host.KeyConsensusStatePrefix)) + + defer iterator.Close() + for ; iterator.Valid(); iterator.Next() { + keySplit := strings.Split(string(iterator.Key()), "/") + // processed time key in prefix store has format: "consensusState//processedTime" + if len(keySplit) != 3 || keySplit[2] != "processedTime" { + // ignore all consensus state keys + continue + } + + if cb(iterator.Key(), iterator.Value()) { + break + } + } +} + +// ProcessedTime Store code + +// ProcessedTimeKey returns the key under which the processed time will be stored in the client store. +func ProcessedTimeKey(height exported.Height) []byte { + return append(host.ConsensusStateKey(height), KeyProcessedTime...) +} + +// SetProcessedTime stores the time at which a header was processed and the corresponding consensus state was created. +// This is useful when validating whether a packet has reached the specified delay period in the tendermint client's +// verification functions +func SetProcessedTime(clientStore sdk.KVStore, height exported.Height, timeNs uint64) { + key := ProcessedTimeKey(height) + val := sdk.Uint64ToBigEndian(timeNs) + clientStore.Set(key, val) +} + +// GetProcessedTime gets the time (in nanoseconds) at which this chain received and processed a tendermint header. +// This is used to validate that a received packet has passed the delay period. +func GetProcessedTime(clientStore sdk.KVStore, height exported.Height) (uint64, bool) { + key := ProcessedTimeKey(height) + bz := clientStore.Get(key) + if bz == nil { + return 0, false + } + return sdk.BigEndianToUint64(bz), true +} diff --git a/x/ibc/light-clients/07-tendermint/types/store_test.go b/x/ibc/light-clients/07-tendermint/types/store_test.go index 3bb267b0f..b8badc094 100644 --- a/x/ibc/light-clients/07-tendermint/types/store_test.go +++ b/x/ibc/light-clients/07-tendermint/types/store_test.go @@ -7,6 +7,7 @@ import ( "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" solomachinetypes "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/06-solomachine/types" "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/07-tendermint/types" + ibctesting "github.com/cosmos/cosmos-sdk/x/ibc/testing" ) func (suite *TendermintTestSuite) TestGetConsensusState() { @@ -74,3 +75,39 @@ func (suite *TendermintTestSuite) TestGetConsensusState() { }) } } + +func (suite *TendermintTestSuite) TestGetProcessedTime() { + // Verify ProcessedTime on CreateClient + // coordinator increments time before creating client + expectedTime := suite.chainA.CurrentHeader.Time.Add(ibctesting.TimeIncrement) + + clientA, err := suite.coordinator.CreateClient(suite.chainA, suite.chainB, exported.Tendermint) + suite.Require().NoError(err) + + clientState := suite.chainA.GetClientState(clientA) + height := clientState.GetLatestHeight() + + store := suite.chainA.App.IBCKeeper.ClientKeeper.ClientStore(suite.chainA.GetContext(), clientA) + actualTime, ok := types.GetProcessedTime(store, height) + suite.Require().True(ok, "could not retrieve processed time for stored consensus state") + suite.Require().Equal(uint64(expectedTime.UnixNano()), actualTime, "retrieved processed time is not expected value") + + // Verify ProcessedTime on UpdateClient + // coordinator increments time before updating client + expectedTime = suite.chainA.CurrentHeader.Time.Add(ibctesting.TimeIncrement) + + err = suite.coordinator.UpdateClient(suite.chainA, suite.chainB, clientA, exported.Tendermint) + suite.Require().NoError(err) + + clientState = suite.chainA.GetClientState(clientA) + height = clientState.GetLatestHeight() + + store = suite.chainA.App.IBCKeeper.ClientKeeper.ClientStore(suite.chainA.GetContext(), clientA) + actualTime, ok = types.GetProcessedTime(store, height) + suite.Require().True(ok, "could not retrieve processed time for stored consensus state") + suite.Require().Equal(uint64(expectedTime.UnixNano()), actualTime, "retrieved processed time is not expected value") + + // try to get processed time for height that doesn't exist in store + _, ok = types.GetProcessedTime(store, clienttypes.NewHeight(1, 1)) + suite.Require().False(ok, "retrieved processed time for a non-existent consensus state") +} diff --git a/x/ibc/light-clients/07-tendermint/types/update.go b/x/ibc/light-clients/07-tendermint/types/update.go index cee0015a1..e692e7466 100644 --- a/x/ibc/light-clients/07-tendermint/types/update.go +++ b/x/ibc/light-clients/07-tendermint/types/update.go @@ -60,7 +60,7 @@ func (cs ClientState) CheckHeaderAndUpdateState( return nil, nil, err } - newClientState, consensusState := update(&cs, tmHeader) + newClientState, consensusState := update(ctx, clientStore, &cs, tmHeader) return newClientState, consensusState, nil } @@ -166,8 +166,8 @@ func checkValidity( return nil } -// update the consensus state from a new header -func update(clientState *ClientState, header *Header) (*ClientState, *ConsensusState) { +// update the consensus state from a new header and set processed time metadata +func update(ctx sdk.Context, clientStore sdk.KVStore, clientState *ClientState, header *Header) (*ClientState, *ConsensusState) { height := header.GetHeight().(clienttypes.Height) if height.GT(clientState.LatestHeight) { clientState.LatestHeight = height @@ -178,5 +178,9 @@ func update(clientState *ClientState, header *Header) (*ClientState, *ConsensusS NextValidatorsHash: header.Header.NextValidatorsHash, } + // set context time as processed time as this is state internal to tendermint client logic. + // client state and consensus state will be set by client keeper + SetProcessedTime(clientStore, header.GetHeight(), uint64(ctx.BlockTime().UnixNano())) + return clientState, consensusState } diff --git a/x/ibc/light-clients/07-tendermint/types/upgrade.go b/x/ibc/light-clients/07-tendermint/types/upgrade.go index e3f230bbb..074ccc2db 100644 --- a/x/ibc/light-clients/07-tendermint/types/upgrade.go +++ b/x/ibc/light-clients/07-tendermint/types/upgrade.go @@ -116,6 +116,8 @@ func (cs ClientState) VerifyUpgradeAndUpdateState( // The timestamp and the NextValidatorsHash of the consensus state is the blocktime and NextValidatorsHash // of the last block committed by the old chain. This will allow the first block of the new chain to be verified against // the last validators of the old chain so long as it is submitted within the TrustingPeriod of this client. + // NOTE: We do not set processed time for this consensus state since this consensus state should not be used for packet verification + // as the root is empty. The next consensus state submitted using update will be usable for packet-verification. newConsState := NewConsensusState( tmUpgradeConsState.Timestamp, commitmenttypes.MerkleRoot{}, tmUpgradeConsState.NextValidatorsHash, ) diff --git a/x/ibc/light-clients/09-localhost/types/client_state.go b/x/ibc/light-clients/09-localhost/types/client_state.go index b828aa029..e0ba7a2f0 100644 --- a/x/ibc/light-clients/09-localhost/types/client_state.go +++ b/x/ibc/light-clients/09-localhost/types/client_state.go @@ -74,6 +74,19 @@ func (cs ClientState) ZeroCustomFields() exported.ClientState { return &cs } +// Initialize ensures that initial consensus state for localhost is nil +func (cs ClientState) Initialize(_ sdk.Context, _ codec.BinaryMarshaler, _ sdk.KVStore, consState exported.ConsensusState) error { + if consState != nil { + return sdkerrors.Wrap(clienttypes.ErrInvalidConsensus, "initial consensus state for localhost must be nil.") + } + return nil +} + +// ExportMetadata is a no-op for localhost client +func (cs ClientState) ExportMetadata(_ sdk.KVStore) []exported.GenesisMetadata { + return nil +} + // CheckHeaderAndUpdateState updates the localhost client. It only needs access to the context func (cs *ClientState) CheckHeaderAndUpdateState( ctx sdk.Context, _ codec.BinaryMarshaler, _ sdk.KVStore, _ exported.Header, @@ -216,6 +229,8 @@ func (cs ClientState) VerifyPacketCommitment( store sdk.KVStore, _ codec.BinaryMarshaler, _ exported.Height, + _ uint64, + _ uint64, _ exported.Prefix, _ []byte, portID, @@ -246,6 +261,8 @@ func (cs ClientState) VerifyPacketAcknowledgement( store sdk.KVStore, _ codec.BinaryMarshaler, _ exported.Height, + _ uint64, + _ uint64, _ exported.Prefix, _ []byte, portID, @@ -277,6 +294,8 @@ func (cs ClientState) VerifyPacketReceiptAbsence( store sdk.KVStore, _ codec.BinaryMarshaler, _ exported.Height, + _ uint64, + _ uint64, _ exported.Prefix, _ []byte, portID, @@ -299,6 +318,8 @@ func (cs ClientState) VerifyNextSequenceRecv( store sdk.KVStore, _ codec.BinaryMarshaler, _ exported.Height, + _ uint64, + _ uint64, _ exported.Prefix, _ []byte, portID, diff --git a/x/ibc/light-clients/09-localhost/types/client_state_test.go b/x/ibc/light-clients/09-localhost/types/client_state_test.go index 17555b945..13a1367d5 100644 --- a/x/ibc/light-clients/09-localhost/types/client_state_test.go +++ b/x/ibc/light-clients/09-localhost/types/client_state_test.go @@ -7,6 +7,8 @@ import ( channeltypes "github.com/cosmos/cosmos-sdk/x/ibc/core/04-channel/types" commitmenttypes "github.com/cosmos/cosmos-sdk/x/ibc/core/23-commitment/types" host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" + "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" + ibctmtypes "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/07-tendermint/types" "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/09-localhost/types" ) @@ -50,6 +52,37 @@ func (suite *LocalhostTestSuite) TestValidate() { } } +func (suite *LocalhostTestSuite) TestInitialize() { + testCases := []struct { + name string + consState exported.ConsensusState + expPass bool + }{ + { + "valid initialization", + nil, + true, + }, + { + "invalid consenus state", + &ibctmtypes.ConsensusState{}, + false, + }, + } + + clientState := types.NewClientState("chainID", clienttypes.NewHeight(3, 10)) + + for _, tc := range testCases { + err := clientState.Initialize(suite.ctx, suite.cdc, suite.store, tc.consState) + + if tc.expPass { + suite.Require().NoError(err, "valid testcase: %s failed", tc.name) + } else { + suite.Require().Error(err, "invalid testcase: %s passed", tc.name) + } + } +} + func (suite *LocalhostTestSuite) TestVerifyClientState() { clientState := types.NewClientState("chainID", clientHeight) invalidClient := types.NewClientState("chainID", clienttypes.NewHeight(0, 12)) @@ -145,8 +178,8 @@ func (suite *LocalhostTestSuite) TestProposedHeaderAndUpdateState() { func (suite *LocalhostTestSuite) TestVerifyConnectionState() { counterparty := connectiontypes.NewCounterparty("clientB", testConnectionID, commitmenttypes.NewMerklePrefix([]byte("ibc"))) - conn1 := connectiontypes.NewConnectionEnd(connectiontypes.OPEN, "clientA", counterparty, []*connectiontypes.Version{connectiontypes.NewVersion("1", nil)}) - conn2 := connectiontypes.NewConnectionEnd(connectiontypes.OPEN, "clientA", counterparty, []*connectiontypes.Version{connectiontypes.NewVersion("2", nil)}) + conn1 := connectiontypes.NewConnectionEnd(connectiontypes.OPEN, "clientA", counterparty, []*connectiontypes.Version{connectiontypes.NewVersion("1", nil)}, 0) + conn2 := connectiontypes.NewConnectionEnd(connectiontypes.OPEN, "clientA", counterparty, []*connectiontypes.Version{connectiontypes.NewVersion("2", nil)}, 0) testCases := []struct { name string @@ -336,7 +369,7 @@ func (suite *LocalhostTestSuite) TestVerifyPacketCommitment() { tc.malleate() err := tc.clientState.VerifyPacketCommitment( - suite.store, suite.cdc, clientHeight, nil, []byte{}, testPortID, testChannelID, testSequence, tc.commitment, + suite.store, suite.cdc, clientHeight, 0, 0, nil, []byte{}, testPortID, testChannelID, testSequence, tc.commitment, ) if tc.expPass { @@ -395,7 +428,7 @@ func (suite *LocalhostTestSuite) TestVerifyPacketAcknowledgement() { tc.malleate() err := tc.clientState.VerifyPacketAcknowledgement( - suite.store, suite.cdc, clientHeight, nil, []byte{}, testPortID, testChannelID, testSequence, tc.ack, + suite.store, suite.cdc, clientHeight, 0, 0, nil, []byte{}, testPortID, testChannelID, testSequence, tc.ack, ) if tc.expPass { @@ -411,7 +444,7 @@ func (suite *LocalhostTestSuite) TestVerifyPacketReceiptAbsence() { clientState := types.NewClientState("chainID", clientHeight) err := clientState.VerifyPacketReceiptAbsence( - suite.store, suite.cdc, clientHeight, nil, nil, testPortID, testChannelID, testSequence, + suite.store, suite.cdc, clientHeight, 0, 0, nil, nil, testPortID, testChannelID, testSequence, ) suite.Require().NoError(err, "receipt absence failed") @@ -419,7 +452,7 @@ func (suite *LocalhostTestSuite) TestVerifyPacketReceiptAbsence() { suite.store.Set(host.PacketReceiptKey(testPortID, testChannelID, testSequence), []byte("receipt")) err = clientState.VerifyPacketReceiptAbsence( - suite.store, suite.cdc, clientHeight, nil, nil, testPortID, testChannelID, testSequence, + suite.store, suite.cdc, clientHeight, 0, 0, nil, nil, testPortID, testChannelID, testSequence, ) suite.Require().Error(err, "receipt exists in store") } @@ -475,7 +508,7 @@ func (suite *LocalhostTestSuite) TestVerifyNextSeqRecv() { tc.malleate() err := tc.clientState.VerifyNextSequenceRecv( - suite.store, suite.cdc, clientHeight, nil, []byte{}, testPortID, testChannelID, nextSeqRecv, + suite.store, suite.cdc, clientHeight, 0, 0, nil, []byte{}, testPortID, testChannelID, nextSeqRecv, ) if tc.expPass { diff --git a/x/ibc/testing/chain.go b/x/ibc/testing/chain.go index 4dc8ec4f2..0534066d8 100644 --- a/x/ibc/testing/chain.go +++ b/x/ibc/testing/chain.go @@ -41,9 +41,10 @@ import ( const ( // Default params constants used to create a TM client - TrustingPeriod time.Duration = time.Hour * 24 * 7 * 2 - UnbondingPeriod time.Duration = time.Hour * 24 * 7 * 3 - MaxClockDrift time.Duration = time.Second * 10 + TrustingPeriod time.Duration = time.Hour * 24 * 7 * 2 + UnbondingPeriod time.Duration = time.Hour * 24 * 7 * 3 + MaxClockDrift time.Duration = time.Second * 10 + DefaultDelayPeriod uint64 = 0 DefaultChannelVersion = ibctransfertypes.Version InvalidID = "IDisInvalid" @@ -643,7 +644,7 @@ func (chain *TestChain) ConnectionOpenInit( msg := connectiontypes.NewMsgConnectionOpenInit( connection.ClientID, connection.CounterpartyClientID, - counterparty.GetPrefix(), DefaultOpenInitVersion, + counterparty.GetPrefix(), DefaultOpenInitVersion, DefaultDelayPeriod, chain.SenderAccount.GetAddress(), ) return chain.sendMsgs(msg) @@ -664,7 +665,7 @@ func (chain *TestChain) ConnectionOpenTry( msg := connectiontypes.NewMsgConnectionOpenTry( "", connection.ClientID, // does not support handshake continuation counterpartyConnection.ID, counterpartyConnection.ClientID, - counterpartyClient, counterparty.GetPrefix(), []*connectiontypes.Version{ConnectionVersion}, + counterpartyClient, counterparty.GetPrefix(), []*connectiontypes.Version{ConnectionVersion}, DefaultDelayPeriod, proofInit, proofClient, proofConsensus, proofHeight, consensusHeight, chain.SenderAccount.GetAddress(), diff --git a/x/ibc/testing/coordinator.go b/x/ibc/testing/coordinator.go index 95b59a1db..ade28b4df 100644 --- a/x/ibc/testing/coordinator.go +++ b/x/ibc/testing/coordinator.go @@ -18,7 +18,7 @@ import ( var ( ChainIDPrefix = "testchain" globalStartTime = time.Date(2020, 1, 2, 0, 0, 0, 0, time.UTC) - timeIncrement = time.Second * 5 + TimeIncrement = time.Second * 5 ) // Coordinator is a testing struct which contains N TestChain's. It handles keeping all chains @@ -241,6 +241,10 @@ func (coord *Coordinator) RecvPacket( packetKey := host.PacketCommitmentKey(packet.GetSourcePort(), packet.GetSourceChannel(), packet.GetSequence()) proof, proofHeight := source.QueryProof(packetKey) + // Increment time and commit block so that 5 second delay period passes between send and receive + coord.IncrementTime() + coord.CommitBlock(source, counterparty) + recvMsg := channeltypes.NewMsgRecvPacket(packet, proof, proofHeight, counterparty.SenderAccount.GetAddress()) // receive on counterparty and update source client @@ -280,6 +284,10 @@ func (coord *Coordinator) AcknowledgePacket( packetKey := host.PacketAcknowledgementKey(packet.GetDestPort(), packet.GetDestChannel(), packet.GetSequence()) proof, proofHeight := counterparty.QueryProof(packetKey) + // Increment time and commit block so that 5 second delay period passes between send and receive + coord.IncrementTime() + coord.CommitBlock(source, counterparty) + ackMsg := channeltypes.NewMsgAcknowledgement(packet, ack, proof, proofHeight, source.SenderAccount.GetAddress()) return coord.SendMsgs(source, counterparty, counterpartyClient, []sdk.Msg{ackMsg}) } @@ -291,10 +299,18 @@ func (coord *Coordinator) RelayPacket( sourceClient, counterpartyClient string, packet channeltypes.Packet, ack []byte, ) error { + // Increment time and commit block so that 5 second delay period passes between send and receive + coord.IncrementTime() + coord.CommitBlock(counterparty) + if err := coord.RecvPacket(source, counterparty, sourceClient, packet); err != nil { return err } + // Increment time and commit block so that 5 second delay period passes between send and receive + coord.IncrementTime() + coord.CommitBlock(source) + return coord.AcknowledgePacket(source, counterparty, counterpartyClient, packet, ack) } @@ -304,7 +320,16 @@ func (coord *Coordinator) RelayPacket( // CONTRACT: this function must be called after every commit on any TestChain. func (coord *Coordinator) IncrementTime() { for _, chain := range coord.Chains { - chain.CurrentHeader.Time = chain.CurrentHeader.Time.Add(timeIncrement) + chain.CurrentHeader.Time = chain.CurrentHeader.Time.Add(TimeIncrement) + chain.App.BeginBlock(abci.RequestBeginBlock{Header: chain.CurrentHeader}) + } +} + +// IncrementTimeBy iterates through all the TestChain's and increments their current header time +// by specified time. +func (coord *Coordinator) IncrementTimeBy(increment time.Duration) { + for _, chain := range coord.Chains { + chain.CurrentHeader.Time = chain.CurrentHeader.Time.Add(increment) chain.App.BeginBlock(abci.RequestBeginBlock{Header: chain.CurrentHeader}) } } From d4a919b7143dc2b5233518c8d1fc299fda66ac77 Mon Sep 17 00:00:00 2001 From: MD Aleem <72057206+aleem1413@users.noreply.github.com> Date: Mon, 7 Dec 2020 19:21:38 +0530 Subject: [PATCH 39/40] change TmService's validator pubkey type to any (#8060) * change validator pubkey type to any * add test for vslidator pubkey * change address to string Co-authored-by: Jonathan Gimeno --- client/grpc/tmservice/query.pb.go | 266 +++++++++--------- client/grpc/tmservice/service.go | 29 +- client/grpc/tmservice/service_test.go | 12 +- client/rpc/validators.go | 19 +- .../base/tendermint/v1beta1/query.proto | 9 +- 5 files changed, 187 insertions(+), 148 deletions(-) diff --git a/client/grpc/tmservice/query.pb.go b/client/grpc/tmservice/query.pb.go index c08d33dc2..20f3ee090 100644 --- a/client/grpc/tmservice/query.pb.go +++ b/client/grpc/tmservice/query.pb.go @@ -6,12 +6,13 @@ package tmservice import ( context "context" fmt "fmt" + types "github.com/cosmos/cosmos-sdk/codec/types" query "github.com/cosmos/cosmos-sdk/types/query" _ "github.com/gogo/protobuf/gogoproto" grpc1 "github.com/gogo/protobuf/grpc" proto "github.com/gogo/protobuf/proto" p2p "github.com/tendermint/tendermint/proto/tendermint/p2p" - types "github.com/tendermint/tendermint/proto/tendermint/types" + types1 "github.com/tendermint/tendermint/proto/tendermint/types" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -258,10 +259,10 @@ func (m *GetLatestValidatorSetResponse) GetPagination() *query.PageResponse { // Validator is the type for the validator-set. type Validator struct { - Address []byte `protobuf:"bytes,1,opt,name=address,proto3" json:"address,omitempty"` - PubKey string `protobuf:"bytes,2,opt,name=pub_key,json=pubKey,proto3" json:"pub_key,omitempty"` - VotingPower int64 `protobuf:"varint,3,opt,name=voting_power,json=votingPower,proto3" json:"voting_power,omitempty"` - ProposerPriority int64 `protobuf:"varint,4,opt,name=proposer_priority,json=proposerPriority,proto3" json:"proposer_priority,omitempty"` + Address string `protobuf:"bytes,1,opt,name=address,proto3" json:"address,omitempty"` + PubKey *types.Any `protobuf:"bytes,2,opt,name=pub_key,json=pubKey,proto3" json:"pub_key,omitempty"` + VotingPower int64 `protobuf:"varint,3,opt,name=voting_power,json=votingPower,proto3" json:"voting_power,omitempty"` + ProposerPriority int64 `protobuf:"varint,4,opt,name=proposer_priority,json=proposerPriority,proto3" json:"proposer_priority,omitempty"` } func (m *Validator) Reset() { *m = Validator{} } @@ -297,18 +298,18 @@ func (m *Validator) XXX_DiscardUnknown() { var xxx_messageInfo_Validator proto.InternalMessageInfo -func (m *Validator) GetAddress() []byte { +func (m *Validator) GetAddress() string { if m != nil { return m.Address } - return nil + return "" } -func (m *Validator) GetPubKey() string { +func (m *Validator) GetPubKey() *types.Any { if m != nil { return m.PubKey } - return "" + return nil } func (m *Validator) GetVotingPower() int64 { @@ -372,8 +373,8 @@ func (m *GetBlockByHeightRequest) GetHeight() int64 { // GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method. type GetBlockByHeightResponse struct { - BlockId *types.BlockID `protobuf:"bytes,1,opt,name=block_id,json=blockId,proto3" json:"block_id,omitempty"` - Block *types.Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"` + BlockId *types1.BlockID `protobuf:"bytes,1,opt,name=block_id,json=blockId,proto3" json:"block_id,omitempty"` + Block *types1.Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"` } func (m *GetBlockByHeightResponse) Reset() { *m = GetBlockByHeightResponse{} } @@ -409,14 +410,14 @@ func (m *GetBlockByHeightResponse) XXX_DiscardUnknown() { var xxx_messageInfo_GetBlockByHeightResponse proto.InternalMessageInfo -func (m *GetBlockByHeightResponse) GetBlockId() *types.BlockID { +func (m *GetBlockByHeightResponse) GetBlockId() *types1.BlockID { if m != nil { return m.BlockId } return nil } -func (m *GetBlockByHeightResponse) GetBlock() *types.Block { +func (m *GetBlockByHeightResponse) GetBlock() *types1.Block { if m != nil { return m.Block } @@ -462,8 +463,8 @@ var xxx_messageInfo_GetLatestBlockRequest proto.InternalMessageInfo // GetLatestBlockResponse is the response type for the Query/GetLatestBlock RPC method. type GetLatestBlockResponse struct { - BlockId *types.BlockID `protobuf:"bytes,1,opt,name=block_id,json=blockId,proto3" json:"block_id,omitempty"` - Block *types.Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"` + BlockId *types1.BlockID `protobuf:"bytes,1,opt,name=block_id,json=blockId,proto3" json:"block_id,omitempty"` + Block *types1.Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"` } func (m *GetLatestBlockResponse) Reset() { *m = GetLatestBlockResponse{} } @@ -499,14 +500,14 @@ func (m *GetLatestBlockResponse) XXX_DiscardUnknown() { var xxx_messageInfo_GetLatestBlockResponse proto.InternalMessageInfo -func (m *GetLatestBlockResponse) GetBlockId() *types.BlockID { +func (m *GetLatestBlockResponse) GetBlockId() *types1.BlockID { if m != nil { return m.BlockId } return nil } -func (m *GetLatestBlockResponse) GetBlock() *types.Block { +func (m *GetLatestBlockResponse) GetBlock() *types1.Block { if m != nil { return m.Block } @@ -865,72 +866,74 @@ func init() { } var fileDescriptor_40c93fb3ef485c5d = []byte{ - // 1040 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x56, 0x4f, 0x6f, 0x1b, 0x45, - 0x14, 0xcf, 0xc6, 0xad, 0x1d, 0x3f, 0x57, 0x90, 0x4c, 0x4a, 0xb3, 0xb5, 0x52, 0x37, 0xec, 0xa1, - 0x4d, 0x88, 0xb2, 0x2b, 0x3b, 0x84, 0x82, 0xf8, 0x27, 0x85, 0x80, 0x1b, 0xb5, 0x54, 0xd1, 0x06, - 0x71, 0x40, 0x48, 0xab, 0xb5, 0x77, 0xb2, 0x19, 0xc5, 0xde, 0x99, 0xee, 0x8c, 0x8d, 0x2c, 0x54, - 0x81, 0xfa, 0x01, 0x10, 0x12, 0x5f, 0x81, 0x0b, 0x5f, 0x80, 0x23, 0xe2, 0xc8, 0x8d, 0x4a, 0x48, - 0xd0, 0x23, 0x4a, 0xf8, 0x14, 0x9c, 0xd0, 0xce, 0xcc, 0xda, 0xbb, 0xcd, 0x1f, 0xdb, 0x39, 0x20, - 0xf5, 0xe4, 0xd9, 0xf7, 0xde, 0xef, 0xcd, 0xef, 0xf7, 0xe6, 0xcd, 0xf3, 0xc0, 0x1b, 0x6d, 0xca, - 0xbb, 0x94, 0x3b, 0x2d, 0x9f, 0x63, 0x47, 0xe0, 0x28, 0xc0, 0x71, 0x97, 0x44, 0xc2, 0xe9, 0xd7, - 0x5b, 0x58, 0xf8, 0x75, 0xe7, 0x71, 0x0f, 0xc7, 0x03, 0x9b, 0xc5, 0x54, 0x50, 0x54, 0x53, 0xb1, - 0x76, 0x12, 0x6b, 0x8f, 0x62, 0x6d, 0x1d, 0x5b, 0xbd, 0x1e, 0xd2, 0x90, 0xca, 0x50, 0x27, 0x59, - 0x29, 0x54, 0x75, 0x39, 0xa4, 0x34, 0xec, 0x60, 0xc7, 0x67, 0xc4, 0xf1, 0xa3, 0x88, 0x0a, 0x5f, - 0x10, 0x1a, 0x71, 0xed, 0xad, 0x66, 0xf6, 0x64, 0x0d, 0xe6, 0x88, 0x01, 0xc3, 0xa9, 0x6f, 0x39, - 0xe3, 0x93, 0x76, 0xa7, 0xd5, 0xa1, 0xed, 0xa3, 0x73, 0xbd, 0x59, 0x6c, 0x4e, 0x97, 0x14, 0x31, - 0x94, 0xc4, 0xfc, 0x90, 0x44, 0x92, 0x84, 0x8a, 0xb5, 0xbe, 0x35, 0xa0, 0xd6, 0xc4, 0xe2, 0x73, - 0xbf, 0x43, 0x02, 0x5f, 0xd0, 0x78, 0x1f, 0x8b, 0xed, 0xc1, 0x7d, 0x4c, 0xc2, 0x43, 0xe1, 0xe2, - 0xc7, 0x3d, 0xcc, 0x05, 0xba, 0x01, 0xc5, 0x43, 0x69, 0x30, 0x8d, 0x15, 0x63, 0xb5, 0xe0, 0xea, - 0x2f, 0xf4, 0x09, 0xc0, 0x28, 0x9d, 0x39, 0xbb, 0x62, 0xac, 0x56, 0x1a, 0x77, 0xec, 0x6c, 0x9d, - 0x54, 0x01, 0xf5, 0xde, 0xf6, 0x9e, 0x1f, 0x62, 0x9d, 0xd3, 0xcd, 0x20, 0xad, 0xe7, 0x06, 0xdc, - 0x3e, 0x97, 0x02, 0x67, 0x34, 0xe2, 0x18, 0xbd, 0x0e, 0xd7, 0xa4, 0x7e, 0x2f, 0xc7, 0xa4, 0x22, - 0x6d, 0x2a, 0x14, 0xed, 0x02, 0xf4, 0xd3, 0x14, 0xdc, 0x9c, 0x5d, 0x29, 0xac, 0x56, 0x1a, 0x6b, - 0xf6, 0xc5, 0xc7, 0x66, 0x0f, 0x37, 0x75, 0x33, 0x60, 0xd4, 0xcc, 0x29, 0x2b, 0x48, 0x65, 0x77, - 0xc7, 0x2a, 0x53, 0x54, 0x73, 0xd2, 0x0e, 0x60, 0xb9, 0x89, 0xc5, 0x43, 0x5f, 0x60, 0x9e, 0xd3, - 0x97, 0x96, 0x36, 0x5f, 0x42, 0xe3, 0xd2, 0x25, 0xfc, 0xd3, 0x80, 0x5b, 0xe7, 0x6c, 0xf4, 0x72, - 0x17, 0xf0, 0x3b, 0x03, 0xca, 0xc3, 0x2d, 0x90, 0x09, 0x25, 0x3f, 0x08, 0x62, 0xcc, 0xb9, 0xe4, - 0x7f, 0xcd, 0x4d, 0x3f, 0xd1, 0x12, 0x94, 0x58, 0xaf, 0xe5, 0x1d, 0xe1, 0x81, 0x6c, 0xc4, 0xb2, - 0x5b, 0x64, 0xbd, 0xd6, 0x03, 0x3c, 0x48, 0x74, 0xf7, 0xa9, 0x20, 0x51, 0xe8, 0x31, 0xfa, 0x15, - 0x8e, 0x25, 0x97, 0x82, 0x5b, 0x51, 0xb6, 0xbd, 0xc4, 0x84, 0xd6, 0x61, 0x81, 0xc5, 0x94, 0x51, - 0x8e, 0x63, 0x8f, 0xc5, 0x84, 0xc6, 0x44, 0x0c, 0xcc, 0x2b, 0x32, 0x6e, 0x3e, 0x75, 0xec, 0x69, - 0xbb, 0x55, 0x87, 0xa5, 0x26, 0x16, 0xdb, 0x49, 0xd9, 0x26, 0xbc, 0x27, 0xd6, 0x37, 0x60, 0x9e, - 0x86, 0xe8, 0x63, 0x79, 0x13, 0xe6, 0xd4, 0xb1, 0x90, 0x40, 0x1f, 0xff, 0xcd, 0x6c, 0x95, 0xd5, - 0xad, 0x96, 0xd0, 0xdd, 0x1d, 0xb7, 0x24, 0x43, 0x77, 0x03, 0xb4, 0x01, 0x57, 0xe5, 0x52, 0x5f, - 0xba, 0xa5, 0x73, 0x20, 0xae, 0x8a, 0xb2, 0x96, 0xe0, 0xb5, 0x61, 0x73, 0x28, 0x87, 0x62, 0x6c, - 0x3d, 0x81, 0x1b, 0x2f, 0x3a, 0xfe, 0x4f, 0x5e, 0x8b, 0xb0, 0xd0, 0xc4, 0x62, 0x7f, 0x10, 0xb5, - 0x49, 0x14, 0xa6, 0x9c, 0x6c, 0x40, 0x59, 0xa3, 0xe6, 0x63, 0x42, 0x89, 0x2b, 0x93, 0xa4, 0x33, - 0xe7, 0xa6, 0x9f, 0xd6, 0x75, 0x19, 0xff, 0x88, 0x06, 0x78, 0x37, 0x3a, 0xa0, 0x69, 0x96, 0x5f, - 0x0d, 0x58, 0xcc, 0x99, 0x75, 0x9e, 0x07, 0xb0, 0x10, 0xe0, 0x03, 0xbf, 0xd7, 0x11, 0x5e, 0x44, - 0x03, 0xec, 0x91, 0xe8, 0x80, 0x6a, 0x81, 0xb7, 0xb3, 0x6c, 0x59, 0x83, 0xd9, 0x3b, 0x2a, 0x70, - 0x98, 0xe3, 0xd5, 0x20, 0x6f, 0x40, 0x5f, 0xc2, 0xa2, 0xcf, 0x58, 0x87, 0xb4, 0x65, 0xaf, 0x7a, - 0x7d, 0x1c, 0xf3, 0xd1, 0x24, 0x5c, 0x1f, 0x7b, 0x73, 0x54, 0xb8, 0x4c, 0x8d, 0x32, 0x79, 0xb4, - 0xdd, 0xfa, 0xd7, 0x80, 0x4a, 0x26, 0x06, 0x21, 0xb8, 0x12, 0xf9, 0x5d, 0x2c, 0xd9, 0x96, 0x5d, - 0xb9, 0x46, 0x37, 0x61, 0xce, 0x67, 0xcc, 0x93, 0x76, 0xd5, 0xf7, 0x25, 0x9f, 0xb1, 0x47, 0x89, - 0xcb, 0x84, 0x52, 0x4a, 0xa8, 0xa0, 0x3c, 0xfa, 0x13, 0xdd, 0x02, 0x08, 0x89, 0xf0, 0xda, 0xb4, - 0xdb, 0x25, 0x42, 0x36, 0x7a, 0xd9, 0x2d, 0x87, 0x44, 0x7c, 0x24, 0x0d, 0x89, 0xbb, 0xd5, 0x23, - 0x9d, 0xc0, 0x13, 0x7e, 0xc8, 0xcd, 0xab, 0xca, 0x2d, 0x2d, 0x9f, 0xf9, 0x21, 0x97, 0x68, 0x3a, - 0xd4, 0x5a, 0xd4, 0x68, 0xaa, 0x99, 0xa2, 0x8f, 0x53, 0x74, 0x80, 0x19, 0x37, 0x4b, 0x72, 0x88, - 0xdc, 0x19, 0x57, 0x8a, 0x4f, 0x69, 0xd0, 0xeb, 0x60, 0xbd, 0xcb, 0x0e, 0x66, 0xdc, 0xba, 0x0f, - 0x45, 0x65, 0x4c, 0x64, 0x33, 0x5f, 0x1c, 0xa6, 0xb2, 0x93, 0x75, 0x56, 0xdb, 0x6c, 0x5e, 0xdb, - 0x3c, 0x14, 0x78, 0xaf, 0xab, 0x15, 0x27, 0xcb, 0xc6, 0xd3, 0x32, 0x94, 0xf6, 0x71, 0xdc, 0x27, - 0x6d, 0x8c, 0x7e, 0x32, 0xa0, 0x92, 0xe9, 0x0a, 0xd4, 0x18, 0x47, 0xec, 0x74, 0x67, 0x55, 0x37, - 0xa7, 0xc2, 0xa8, 0xb6, 0xb3, 0xea, 0x4f, 0xff, 0xf8, 0xe7, 0x87, 0xd9, 0x75, 0xb4, 0xe6, 0x8c, - 0x79, 0x72, 0x0c, 0x9b, 0x12, 0xfd, 0x68, 0x00, 0x8c, 0x2e, 0x02, 0xaa, 0x4f, 0xb0, 0x6d, 0xfe, - 0x26, 0x55, 0x1b, 0xd3, 0x40, 0x34, 0x51, 0x47, 0x12, 0x5d, 0x43, 0x77, 0xc7, 0x11, 0xd5, 0xd7, - 0x0f, 0xfd, 0x6c, 0xc0, 0x2b, 0xf9, 0x19, 0x82, 0xb6, 0x26, 0xd8, 0xf7, 0xf4, 0x30, 0xaa, 0xbe, - 0x35, 0x2d, 0x4c, 0x53, 0xde, 0x92, 0x94, 0x1d, 0xb4, 0x31, 0x8e, 0xb2, 0x1c, 0x3a, 0xdc, 0xe9, - 0xc8, 0x1c, 0xe8, 0x17, 0x03, 0xe6, 0x5f, 0x1c, 0xcb, 0xe8, 0xde, 0x04, 0x1c, 0xce, 0x9a, 0xfd, - 0xd5, 0xb7, 0xa7, 0x07, 0x6a, 0xfa, 0xf7, 0x24, 0xfd, 0x3a, 0x72, 0x26, 0xa4, 0xff, 0xb5, 0xfa, - 0x57, 0x79, 0x82, 0x7e, 0x37, 0x32, 0x63, 0x3d, 0xfb, 0x9f, 0x8f, 0xde, 0x9b, 0xb8, 0x92, 0x67, - 0xbc, 0x49, 0xaa, 0xef, 0x5f, 0x12, 0xad, 0xf5, 0xbc, 0x23, 0xf5, 0x6c, 0xa2, 0xfa, 0x38, 0x3d, - 0xa3, 0xe7, 0x42, 0x7a, 0x24, 0x7f, 0x19, 0xf2, 0xcf, 0xf5, 0xac, 0x87, 0x20, 0xfa, 0x60, 0x02, - 0x56, 0x17, 0x3c, 0x62, 0xab, 0x1f, 0x5e, 0x1a, 0xaf, 0x75, 0xbd, 0x2b, 0x75, 0x6d, 0xa1, 0xcd, - 0x29, 0x74, 0xa5, 0x67, 0xb5, 0xfd, 0xf0, 0xb7, 0xe3, 0x9a, 0xf1, 0xec, 0xb8, 0x66, 0xfc, 0x7d, - 0x5c, 0x33, 0xbe, 0x3f, 0xa9, 0xcd, 0x3c, 0x3b, 0xa9, 0xcd, 0x3c, 0x3f, 0xa9, 0xcd, 0x7c, 0xd1, - 0x08, 0x89, 0x38, 0xec, 0xb5, 0xec, 0x36, 0xed, 0xa6, 0x89, 0xd5, 0xcf, 0x06, 0x0f, 0x8e, 0x9c, - 0x76, 0x87, 0xe0, 0x48, 0x38, 0x61, 0xcc, 0xda, 0x8e, 0xe8, 0x72, 0x35, 0xc6, 0x5a, 0x45, 0xf9, - 0x74, 0xdf, 0xfc, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x82, 0x6b, 0xb0, 0xbd, 0xc0, 0x0c, 0x00, 0x00, + // 1059 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x56, 0xcf, 0x6f, 0xdc, 0xc4, + 0x17, 0x8f, 0xb3, 0x6d, 0x36, 0x79, 0xfb, 0xd5, 0x97, 0x64, 0x12, 0x1a, 0xc7, 0x4a, 0xb7, 0xc1, + 0x87, 0x36, 0x21, 0x8a, 0xad, 0xdd, 0x10, 0x0a, 0xe2, 0x97, 0x08, 0x81, 0x34, 0x6a, 0xa9, 0x22, + 0x07, 0x71, 0x40, 0x48, 0x96, 0x77, 0x3d, 0x71, 0x46, 0xd9, 0xf5, 0x4c, 0x3d, 0xe3, 0xa0, 0x15, + 0xaa, 0x40, 0xfd, 0x0b, 0x90, 0xf8, 0x17, 0x7a, 0xe1, 0x1f, 0xe0, 0x88, 0x38, 0x72, 0xa3, 0x12, + 0x12, 0xf4, 0x88, 0x12, 0xfe, 0x0a, 0x4e, 0xc8, 0x33, 0xe3, 0x5d, 0xbb, 0x49, 0xba, 0xbb, 0x39, + 0x20, 0x71, 0xf2, 0xcc, 0xfb, 0x35, 0x9f, 0xcf, 0x9b, 0xf7, 0x9e, 0x07, 0x5e, 0x6f, 0x53, 0xde, + 0xa5, 0xdc, 0x6d, 0x05, 0x1c, 0xbb, 0x02, 0xc7, 0x21, 0x4e, 0xba, 0x24, 0x16, 0xee, 0x49, 0xa3, + 0x85, 0x45, 0xd0, 0x70, 0x1f, 0xa5, 0x38, 0xe9, 0x39, 0x2c, 0xa1, 0x82, 0xa2, 0xba, 0xb2, 0x75, + 0x32, 0x5b, 0x67, 0x60, 0xeb, 0x68, 0x5b, 0x6b, 0x21, 0xa2, 0x11, 0x95, 0xa6, 0x6e, 0xb6, 0x52, + 0x5e, 0xd6, 0x52, 0x44, 0x69, 0xd4, 0xc1, 0xae, 0xdc, 0xb5, 0xd2, 0x43, 0x37, 0x88, 0x75, 0x40, + 0x6b, 0x59, 0xab, 0x02, 0x46, 0xdc, 0x20, 0x8e, 0xa9, 0x08, 0x04, 0xa1, 0x31, 0xd7, 0x5a, 0xab, + 0x00, 0x87, 0x35, 0x99, 0x2b, 0x7a, 0x0c, 0xe7, 0xba, 0xe5, 0x82, 0x4e, 0xca, 0xdd, 0x56, 0x87, + 0xb6, 0x8f, 0x2f, 0xd5, 0x16, 0x7d, 0x4b, 0x94, 0x25, 0xbf, 0x3e, 0x5b, 0x16, 0x44, 0x24, 0x96, + 0x20, 0x94, 0xad, 0xfd, 0xad, 0x01, 0xf5, 0x5d, 0x2c, 0x3e, 0x0f, 0x3a, 0x24, 0x0c, 0x04, 0x4d, + 0x0e, 0xb0, 0xd8, 0xee, 0xdd, 0xc3, 0x24, 0x3a, 0x12, 0x1e, 0x7e, 0x94, 0x62, 0x2e, 0xd0, 0x0d, + 0x98, 0x3a, 0x92, 0x02, 0xd3, 0x58, 0x31, 0x56, 0x2b, 0x9e, 0xde, 0xa1, 0x4f, 0x00, 0x06, 0xe1, + 0xcc, 0xc9, 0x15, 0x63, 0xb5, 0xd6, 0xbc, 0xed, 0x14, 0x53, 0xa8, 0x72, 0xab, 0xcf, 0x76, 0xf6, + 0x83, 0x08, 0xeb, 0x98, 0x5e, 0xc1, 0xd3, 0x7e, 0x6e, 0xc0, 0xad, 0x4b, 0x21, 0x70, 0x46, 0x63, + 0x8e, 0xd1, 0x6b, 0xf0, 0x3f, 0xc9, 0xdf, 0x2f, 0x21, 0xa9, 0x49, 0x99, 0x32, 0x45, 0x7b, 0x00, + 0x27, 0x79, 0x08, 0x6e, 0x4e, 0xae, 0x54, 0x56, 0x6b, 0xcd, 0x35, 0xe7, 0xe5, 0x37, 0xea, 0xf4, + 0x0f, 0xf5, 0x0a, 0xce, 0x68, 0xb7, 0xc4, 0xac, 0x22, 0x99, 0xdd, 0x19, 0xca, 0x4c, 0x41, 0x2d, + 0x51, 0x3b, 0x84, 0xe5, 0x5d, 0x2c, 0x1e, 0x04, 0x02, 0xf3, 0x12, 0xbf, 0x3c, 0xb5, 0xe5, 0x14, + 0x1a, 0x57, 0x4e, 0xe1, 0xef, 0x06, 0xdc, 0xbc, 0xe4, 0xa0, 0xff, 0x76, 0x02, 0x9f, 0x1a, 0x30, + 0xd3, 0x3f, 0x02, 0x99, 0x50, 0x0d, 0xc2, 0x30, 0xc1, 0x9c, 0x4b, 0xfc, 0x33, 0x5e, 0xbe, 0x45, + 0x1b, 0x50, 0x65, 0x69, 0xcb, 0x3f, 0xc6, 0x3d, 0x5d, 0x88, 0x0b, 0x8e, 0x6a, 0x3d, 0x27, 0xef, + 0x4a, 0xe7, 0xc3, 0xb8, 0xe7, 0x4d, 0xb1, 0xb4, 0x75, 0x1f, 0xf7, 0xb2, 0x6c, 0x9c, 0x50, 0x41, + 0xe2, 0xc8, 0x67, 0xf4, 0x2b, 0x9c, 0x48, 0x84, 0x15, 0xaf, 0xa6, 0x64, 0xfb, 0x99, 0x08, 0xad, + 0xc3, 0x1c, 0x4b, 0x28, 0xa3, 0x1c, 0x27, 0x3e, 0x4b, 0x08, 0x4d, 0x88, 0xe8, 0x99, 0xd7, 0xa4, + 0xdd, 0x6c, 0xae, 0xd8, 0xd7, 0x72, 0xbb, 0x01, 0x8b, 0xbb, 0x58, 0x6c, 0x67, 0xc9, 0x1c, 0xb1, + 0x7b, 0xec, 0x6f, 0xc0, 0x3c, 0xef, 0xa2, 0x2f, 0xeb, 0x0d, 0x98, 0x56, 0x97, 0x45, 0x42, 0x5d, + 0x14, 0x4b, 0xc5, 0xdc, 0xab, 0x5e, 0x97, 0xae, 0x7b, 0x3b, 0x5e, 0x55, 0x9a, 0xee, 0x85, 0x68, + 0x03, 0xae, 0xcb, 0xa5, 0xce, 0xc0, 0xe2, 0x25, 0x2e, 0x9e, 0xb2, 0xb2, 0x17, 0xe1, 0xd5, 0x7e, + 0xc9, 0x28, 0x85, 0x42, 0x6c, 0x3f, 0x86, 0x1b, 0x2f, 0x2a, 0xfe, 0x4d, 0x5c, 0xf3, 0x30, 0xb7, + 0x8b, 0xc5, 0x41, 0x2f, 0x6e, 0x93, 0x38, 0xca, 0x31, 0x39, 0x80, 0x8a, 0x42, 0x8d, 0xc7, 0x84, + 0x2a, 0x57, 0x22, 0x09, 0x67, 0xda, 0xcb, 0xb7, 0xf6, 0x82, 0xb4, 0x7f, 0x48, 0x43, 0xbc, 0x17, + 0x1f, 0xd2, 0x3c, 0xca, 0xcf, 0x06, 0xcc, 0x97, 0xc4, 0x3a, 0xce, 0x7d, 0x98, 0x0b, 0xf1, 0x61, + 0x90, 0x76, 0x84, 0x1f, 0xd3, 0x10, 0xfb, 0x24, 0x3e, 0xa4, 0x9a, 0xe0, 0xad, 0x22, 0x5a, 0xd6, + 0x64, 0xce, 0x8e, 0x32, 0xec, 0xc7, 0x78, 0x25, 0x2c, 0x0b, 0xd0, 0x97, 0x30, 0x1f, 0x30, 0xd6, + 0x21, 0x6d, 0x59, 0xc1, 0xfe, 0x09, 0x4e, 0xf8, 0x60, 0x3e, 0xae, 0x0f, 0xed, 0x27, 0x65, 0x2e, + 0x43, 0xa3, 0x42, 0x1c, 0x2d, 0xb7, 0xff, 0x36, 0xa0, 0x56, 0xb0, 0x41, 0x08, 0xae, 0xc5, 0x41, + 0x17, 0xeb, 0x7e, 0x90, 0x6b, 0xb4, 0x04, 0xd3, 0x01, 0x63, 0xbe, 0x94, 0x4f, 0xea, 0x3e, 0x61, + 0xec, 0x61, 0xa6, 0x32, 0xa1, 0x9a, 0x03, 0xaa, 0x28, 0x8d, 0xde, 0xa2, 0x9b, 0x00, 0x11, 0x11, + 0x7e, 0x9b, 0x76, 0xbb, 0x44, 0xc8, 0x42, 0x9f, 0xf1, 0x66, 0x22, 0x22, 0x3e, 0x92, 0x82, 0x4c, + 0xdd, 0x4a, 0x49, 0x27, 0xf4, 0x45, 0x10, 0x71, 0xf3, 0xba, 0x52, 0x4b, 0xc9, 0x67, 0x41, 0xc4, + 0xa5, 0x37, 0xed, 0x73, 0x9d, 0xd2, 0xde, 0x54, 0x23, 0x45, 0x1f, 0xe7, 0xde, 0x21, 0x66, 0xdc, + 0xac, 0xca, 0xd1, 0x72, 0x7b, 0x58, 0x2a, 0x3e, 0xa5, 0x61, 0xda, 0xc1, 0xfa, 0x94, 0x1d, 0xcc, + 0xb8, 0x7d, 0x0f, 0xa6, 0x94, 0x30, 0xa3, 0xcd, 0x02, 0x71, 0x94, 0xd3, 0xce, 0xd6, 0x45, 0x6e, + 0x93, 0x65, 0x6e, 0xb3, 0x50, 0xe1, 0x69, 0x57, 0x33, 0xce, 0x96, 0xcd, 0x27, 0x33, 0x50, 0x3d, + 0xc0, 0xc9, 0x09, 0x69, 0x63, 0xf4, 0x83, 0x01, 0xb5, 0x42, 0x55, 0xa0, 0xe6, 0x30, 0x60, 0xe7, + 0x2b, 0xcb, 0xda, 0x1c, 0xcb, 0x47, 0x95, 0x9d, 0xdd, 0x78, 0xf2, 0xdb, 0x5f, 0xdf, 0x4f, 0xae, + 0xa3, 0x35, 0x77, 0xc8, 0x1b, 0xa5, 0x5f, 0x94, 0xe8, 0xa9, 0x01, 0x30, 0x68, 0x04, 0xd4, 0x18, + 0xe1, 0xd8, 0x72, 0x27, 0x59, 0xcd, 0x71, 0x5c, 0x34, 0x50, 0x57, 0x02, 0x5d, 0x43, 0x77, 0x86, + 0x01, 0xd5, 0xed, 0x87, 0x7e, 0x34, 0xe0, 0xff, 0xe5, 0x19, 0x82, 0xb6, 0x46, 0x38, 0xf7, 0xfc, + 0x30, 0xb2, 0xde, 0x1c, 0xd7, 0x4d, 0x43, 0xde, 0x92, 0x90, 0x5d, 0xb4, 0x31, 0x0c, 0xb2, 0x1c, + 0x3a, 0xdc, 0xed, 0xc8, 0x18, 0xe8, 0x27, 0x03, 0x66, 0x5f, 0x1c, 0xcb, 0xe8, 0xee, 0x08, 0x18, + 0x2e, 0x9a, 0xfd, 0xd6, 0x5b, 0xe3, 0x3b, 0x6a, 0xf8, 0x77, 0x25, 0xfc, 0x06, 0x72, 0x47, 0x84, + 0xff, 0xb5, 0xfa, 0xab, 0x3c, 0x46, 0xbf, 0x1a, 0x85, 0xb1, 0x5e, 0x7c, 0x09, 0xa0, 0x77, 0x47, + 0xce, 0xe4, 0x05, 0x2f, 0x15, 0xeb, 0xbd, 0x2b, 0x7a, 0x6b, 0x3e, 0x6f, 0x4b, 0x3e, 0x9b, 0xa8, + 0x31, 0x8c, 0xcf, 0xe0, 0x11, 0x91, 0x5f, 0xc9, 0x1f, 0x86, 0xfc, 0xb9, 0x5e, 0xf4, 0x3c, 0x44, + 0xef, 0x8f, 0x80, 0xea, 0x25, 0x4f, 0x5b, 0xeb, 0x83, 0x2b, 0xfb, 0x6b, 0x5e, 0xef, 0x48, 0x5e, + 0x5b, 0x68, 0x73, 0x0c, 0x5e, 0xf9, 0x5d, 0x6d, 0x3f, 0xf8, 0xe5, 0xb4, 0x6e, 0x3c, 0x3b, 0xad, + 0x1b, 0x7f, 0x9e, 0xd6, 0x8d, 0xef, 0xce, 0xea, 0x13, 0xcf, 0xce, 0xea, 0x13, 0xcf, 0xcf, 0xea, + 0x13, 0x5f, 0x34, 0x23, 0x22, 0x8e, 0xd2, 0x96, 0xd3, 0xa6, 0xdd, 0x3c, 0xb0, 0xfa, 0x6c, 0xf0, + 0xf0, 0xd8, 0x6d, 0x77, 0x08, 0x8e, 0x85, 0x1b, 0x25, 0xac, 0xed, 0x8a, 0x2e, 0x57, 0x63, 0xac, + 0x35, 0x25, 0x5f, 0x3a, 0x9b, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0xf6, 0xf3, 0x0e, 0x76, 0xf1, + 0x0c, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1418,10 +1421,15 @@ func (m *Validator) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x18 } - if len(m.PubKey) > 0 { - i -= len(m.PubKey) - copy(dAtA[i:], m.PubKey) - i = encodeVarintQuery(dAtA, i, uint64(len(m.PubKey))) + if m.PubKey != nil { + { + size, err := m.PubKey.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQuery(dAtA, i, uint64(size)) + } i-- dAtA[i] = 0x12 } @@ -1923,8 +1931,8 @@ func (m *Validator) Size() (n int) { if l > 0 { n += 1 + l + sovQuery(uint64(l)) } - l = len(m.PubKey) - if l > 0 { + if m.PubKey != nil { + l = m.PubKey.Size() n += 1 + l + sovQuery(uint64(l)) } if m.VotingPower != 0 { @@ -2618,40 +2626,6 @@ func (m *Validator) Unmarshal(dAtA []byte) error { if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowQuery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthQuery - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthQuery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Address = append(m.Address[:0], dAtA[iNdEx:postIndex]...) - if m.Address == nil { - m.Address = []byte{} - } - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PubKey", wireType) - } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { @@ -2678,7 +2652,43 @@ func (m *Validator) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.PubKey = string(dAtA[iNdEx:postIndex]) + m.Address = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field PubKey", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQuery + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQuery + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.PubKey == nil { + m.PubKey = &types.Any{} + } + if err := m.PubKey.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } iNdEx = postIndex case 3: if wireType != 0 { @@ -2873,7 +2883,7 @@ func (m *GetBlockByHeightResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.BlockId == nil { - m.BlockId = &types.BlockID{} + m.BlockId = &types1.BlockID{} } if err := m.BlockId.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -2909,7 +2919,7 @@ func (m *GetBlockByHeightResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Block == nil { - m.Block = &types.Block{} + m.Block = &types1.Block{} } if err := m.Block.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -3051,7 +3061,7 @@ func (m *GetLatestBlockResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.BlockId == nil { - m.BlockId = &types.BlockID{} + m.BlockId = &types1.BlockID{} } if err := m.BlockId.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -3087,7 +3097,7 @@ func (m *GetLatestBlockResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Block == nil { - m.Block = &types.Block{} + m.Block = &types1.Block{} } if err := m.Block.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err diff --git a/client/grpc/tmservice/service.go b/client/grpc/tmservice/service.go index b4d9f8c95..58d44af57 100644 --- a/client/grpc/tmservice/service.go +++ b/client/grpc/tmservice/service.go @@ -11,6 +11,7 @@ import ( "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/rpc" codectypes "github.com/cosmos/cosmos-sdk/codec/types" + cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" qtypes "github.com/cosmos/cosmos-sdk/types/query" "github.com/cosmos/cosmos-sdk/version" ) @@ -22,6 +23,7 @@ type queryServer struct { } var _ ServiceServer = queryServer{} +var _ codectypes.UnpackInterfacesMessage = &GetLatestValidatorSetResponse{} // NewQueryServer creates a new tendermint query server. func NewQueryServer(clientCtx client.Context, interfaceRegistry codectypes.InterfaceRegistry) ServiceServer { @@ -105,16 +107,31 @@ func (s queryServer) GetLatestValidatorSet(ctx context.Context, req *GetLatestVa } for i, validator := range validatorsRes.Validators { + anyPub, err := codectypes.NewAnyWithValue(validator.PubKey) + if err != nil { + return nil, err + } outputValidatorsRes.Validators[i] = &Validator{ - Address: validator.Address, + Address: validator.Address.String(), ProposerPriority: validator.ProposerPriority, - PubKey: validator.PubKey, + PubKey: anyPub, VotingPower: validator.VotingPower, } } return outputValidatorsRes, nil } +func (m *GetLatestValidatorSetResponse) UnpackInterfaces(unpacker codectypes.AnyUnpacker) error { + var pubKey cryptotypes.PubKey + for _, val := range m.Validators { + err := unpacker.UnpackAny(val.PubKey, &pubKey) + if err != nil { + return err + } + } + return nil +} + // GetValidatorSetByHeight implements ServiceServer.GetValidatorSetByHeight func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *GetValidatorSetByHeightRequest) (*GetValidatorSetByHeightResponse, error) { page, limit, err := qtypes.ParsePagination(req.Pagination) @@ -142,10 +159,14 @@ func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *GetValida } for i, validator := range validatorsRes.Validators { + anyPub, err := codectypes.NewAnyWithValue(validator.PubKey) + if err != nil { + return nil, err + } outputValidatorsRes.Validators[i] = &Validator{ - Address: validator.Address, + Address: validator.Address.String(), ProposerPriority: validator.ProposerPriority, - PubKey: validator.PubKey, + PubKey: anyPub, VotingPower: validator.VotingPower, } } diff --git a/client/grpc/tmservice/service_test.go b/client/grpc/tmservice/service_test.go index 2210486f4..9724e002d 100644 --- a/client/grpc/tmservice/service_test.go +++ b/client/grpc/tmservice/service_test.go @@ -8,6 +8,8 @@ import ( "github.com/stretchr/testify/suite" "github.com/cosmos/cosmos-sdk/client/grpc/tmservice" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/cosmos/cosmos-sdk/testutil/network" qtypes "github.com/cosmos/cosmos-sdk/types/query" "github.com/cosmos/cosmos-sdk/types/rest" @@ -98,10 +100,14 @@ func (s IntegrationTestSuite) TestQueryLatestValidatorSet() { val := s.network.Validators[0] // nil pagination - _, err := s.queryClient.GetLatestValidatorSet(context.Background(), &tmservice.GetLatestValidatorSetRequest{ + res, err := s.queryClient.GetLatestValidatorSet(context.Background(), &tmservice.GetLatestValidatorSetRequest{ Pagination: nil, }) s.Require().NoError(err) + s.Require().Equal(1, len(res.Validators)) + content, ok := res.Validators[0].PubKey.GetCachedValue().(cryptotypes.PubKey) + s.Require().Equal(true, ok) + s.Require().Equal(content, val.PubKey) //with pagination _, err = s.queryClient.GetLatestValidatorSet(context.Background(), &tmservice.GetLatestValidatorSetRequest{Pagination: &qtypes.PageRequest{ @@ -119,6 +125,10 @@ func (s IntegrationTestSuite) TestQueryLatestValidatorSet() { s.Require().NoError(err) var validatorSetRes tmservice.GetLatestValidatorSetResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &validatorSetRes)) + s.Require().Equal(1, len(validatorSetRes.Validators)) + anyPub, err := codectypes.NewAnyWithValue(val.PubKey) + s.Require().NoError(err) + s.Require().Equal(validatorSetRes.Validators[0].PubKey, anyPub) } func (s IntegrationTestSuite) TestQueryValidatorSetByHeight() { diff --git a/client/rpc/validators.go b/client/rpc/validators.go index 0a15f4bc9..dd0459b1e 100644 --- a/client/rpc/validators.go +++ b/client/rpc/validators.go @@ -15,6 +15,7 @@ import ( "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/flags" cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" + cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/rest" ) @@ -66,10 +67,10 @@ func ValidatorCommand() *cobra.Command { // Validator output in bech32 format type ValidatorOutput struct { - Address sdk.ConsAddress `json:"address"` - PubKey string `json:"pub_key"` - ProposerPriority int64 `json:"proposer_priority"` - VotingPower int64 `json:"voting_power"` + Address sdk.ConsAddress `json:"address"` + PubKey cryptotypes.PubKey `json:"pub_key"` + ProposerPriority int64 `json:"proposer_priority"` + VotingPower int64 `json:"voting_power"` } // Validators at a certain height output in bech32 format @@ -99,19 +100,15 @@ func (rvo ResultValidatorsOutput) String() string { return b.String() } -func bech32ValidatorOutput(validator *tmtypes.Validator) (ValidatorOutput, error) { +func validatorOutput(validator *tmtypes.Validator) (ValidatorOutput, error) { pk, err := cryptocodec.FromTmPubKeyInterface(validator.PubKey) if err != nil { return ValidatorOutput{}, err } - bechValPubkey, err := sdk.Bech32ifyPubKey(sdk.Bech32PubKeyTypeConsPub, pk) - if err != nil { - return ValidatorOutput{}, err - } return ValidatorOutput{ Address: sdk.ConsAddress(validator.Address), - PubKey: bechValPubkey, + PubKey: pk, ProposerPriority: validator.ProposerPriority, VotingPower: validator.VotingPower, }, nil @@ -136,7 +133,7 @@ func GetValidators(clientCtx client.Context, height *int64, page, limit *int) (R } for i := 0; i < len(validatorsRes.Validators); i++ { - outputValidatorsRes.Validators[i], err = bech32ValidatorOutput(validatorsRes.Validators[i]) + outputValidatorsRes.Validators[i], err = validatorOutput(validatorsRes.Validators[i]) if err != nil { return ResultValidatorsOutput{}, err } diff --git a/proto/cosmos/base/tendermint/v1beta1/query.proto b/proto/cosmos/base/tendermint/v1beta1/query.proto index 640522e7a..d50849cd6 100644 --- a/proto/cosmos/base/tendermint/v1beta1/query.proto +++ b/proto/cosmos/base/tendermint/v1beta1/query.proto @@ -2,6 +2,7 @@ syntax = "proto3"; package cosmos.base.tendermint.v1beta1; import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; import "google/api/annotations.proto"; import "tendermint/p2p/types.proto"; import "tendermint/types/block.proto"; @@ -70,10 +71,10 @@ message GetLatestValidatorSetResponse { // Validator is the type for the validator-set. message Validator { - bytes address = 1; - string pub_key = 2; - int64 voting_power = 3; - int64 proposer_priority = 4; + string address = 1; + google.protobuf.Any pub_key = 2; + int64 voting_power = 3; + int64 proposer_priority = 4; } // GetBlockByHeightRequest is the request type for the Query/GetBlockByHeight RPC method. From 291d966fe93eba49a669e3e397122f3256550f21 Mon Sep 17 00:00:00 2001 From: Emmanuel T Odeke Date: Mon, 7 Dec 2020 12:57:15 -0800 Subject: [PATCH 40/40] x/ibc/core/23-commitment/types: fix MerkleProof.Empty comparisons (#8092) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * x/ibc/core/23-commitment/types: fix MerkleProof.Empty comparisons Fixes invalid pointer creation, reduces on unnecessary allocations inside MerkleProof.Empty, and changes the method to a pointer; the invalid pointer creation was symptomatic of broad use of values. With this change we have improvements whose benchmarks produce: ```shell name old time/op new time/op delta Empty-8 311ns ± 5% 232ns ± 5% -25.49% (p=0.000 n=20+19) name old alloc/op new alloc/op delta Empty-8 56.0B ± 0% 8.0B ± 0% -85.71% (p=0.000 n=20+20) name old allocs/op new allocs/op delta Empty-8 3.00 ± 0% 1.00 ± 0% -66.67% (p=0.000 n=20+20) ``` Fixes #8091 * Move Empty godoc to right place + add comments for blank* --- x/ibc/core/23-commitment/types/bench_test.go | 15 +++++++++++++++ x/ibc/core/23-commitment/types/merkle.go | 9 +++++++-- 2 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 x/ibc/core/23-commitment/types/bench_test.go diff --git a/x/ibc/core/23-commitment/types/bench_test.go b/x/ibc/core/23-commitment/types/bench_test.go new file mode 100644 index 000000000..83794fc6f --- /dev/null +++ b/x/ibc/core/23-commitment/types/bench_test.go @@ -0,0 +1,15 @@ +package types + +import ( + "testing" +) + +func BenchmarkMerkleProofEmpty(b *testing.B) { + b.ReportAllocs() + var mk MerkleProof + for i := 0; i < b.N; i++ { + if !mk.Empty() { + b.Fatal("supposed to be empty") + } + } +} diff --git a/x/ibc/core/23-commitment/types/merkle.go b/x/ibc/core/23-commitment/types/merkle.go index fe7d5c302..e90fccc34 100644 --- a/x/ibc/core/23-commitment/types/merkle.go +++ b/x/ibc/core/23-commitment/types/merkle.go @@ -269,9 +269,14 @@ func verifyChainedMembershipProof(root []byte, specs []*ics23.ProofSpec, proofs return nil } +// blankMerkleProof and blankProofOps will be used to compare against their zero values, +// and are declared as globals to avoid having to unnecessarily re-allocate on every comparison. +var blankMerkleProof = &MerkleProof{} +var blankProofOps = &tmcrypto.ProofOps{} + // Empty returns true if the root is empty -func (proof MerkleProof) Empty() bool { - return proto.Equal(&proof, nil) || proto.Equal(&proof, &MerkleProof{}) || proto.Equal(&proof, &tmcrypto.ProofOps{}) +func (proof *MerkleProof) Empty() bool { + return proof == nil || proto.Equal(proof, blankMerkleProof) || proto.Equal(proof, blankProofOps) } // ValidateBasic checks if the proof is empty.