Merge branch 'adr-epoched-staking' of github.com:sikkatech/cosmos-sdk into adr-epoched-staking

This commit is contained in:
ValarDragon 2021-02-15 13:24:55 -06:00
commit f4d328cc27
1195 changed files with 102984 additions and 24928 deletions

View File

@ -11,7 +11,7 @@ set -ue
# - DEBUG
# Source builder's functions library
. /usr/local/share/cosmos-sdk/buildlib.sh
. /usr/local/share/tendermint/buildlib.sh
# These variables are now available
# - BASEDIR

View File

@ -1,64 +0,0 @@
version: 2.1
executors:
docs:
docker:
- image: tendermintdev/docker-website-deployment
environment:
AWS_REGION: us-east-1
commands:
make:
parameters:
description:
type: string
target:
type: string
steps:
- attach_workspace:
at: /tmp/workspace
- restore_cache:
name: "Restore source code cache"
keys:
- go-src-v1-{{ .Revision }}
- checkout
- restore_cache:
name: "Restore go modules cache"
keys:
- go-mod-v2-{{ checksum "go.sum" }}
- run:
name: << parameters.description >>
command: |
make << parameters.target >>
jobs:
build-docs:
executor: docs
steps:
- checkout
- run:
name: "Build docs"
command: make build-docs LEDGER_ENABLED=false
- run:
name: "Upload docs to S3"
command: make sync-docs LEDGER_ENABLED=false
workflows:
version: 2
test-suite:
jobs:
- build-docs:
context: docs-deployment-master
filters:
branches:
only:
- docs-staging
- build-docs:
context: docs-deployment-release
filters:
branches:
only:
- master
tags:
only:
- /v.*/

View File

@ -15,12 +15,15 @@ coverage:
threshold: 1% # allow this much decrease on project
app:
target: 70%
flags: app
flags:
- app
modules:
target: 70%
flags: modules
flags:
- modules
client:
flags: client
flags:
- client
changes: false
comment:

View File

@ -42,6 +42,7 @@ jobs:
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
if: ${{ github.event_name != 'pull_request' }}
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}

31
.github/workflows/docs.yml vendored Normal file
View File

@ -0,0 +1,31 @@
name: Documentation
# This job builds and deploys documenation to github pages.
# It runs on every push to master.
on:
push:
branches:
- master
jobs:
build-and-deploy:
runs-on: ubuntu-latest
container:
image: tendermintdev/docker-website-deployment
steps:
- name: Checkout 🛎️
uses: actions/checkout@v2.3.1
with:
persist-credentials: false
fetch-depth: 0
- name: Install and Build 🔧
run: |
apk add rsync
make build-docs LEDGER_ENABLED=false
- name: Deploy 🚀
uses: JamesIves/github-pages-deploy-action@4.0.0
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BRANCH: gh-pages
FOLDER: ~/output

View File

@ -7,6 +7,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: gaurav-nelson/github-action-markdown-link-check@1.0.8
- uses: gaurav-nelson/github-action-markdown-link-check@1.0.12
with:
folder-path: "docs"

46
.github/workflows/proto-docker.yml vendored Normal file
View File

@ -0,0 +1,46 @@
name: Build & Push SDK Proto Builder
on:
pull_request:
push:
branches:
- master
paths:
- "contrib/devtools/dockerfile"
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Prepare
id: prep
run: |
DOCKER_IMAGE=tendermintdev/sdk-proto-gen
VERSION=noop
if [[ $GITHUB_REF == refs/tags/* ]]; then
VERSION=${GITHUB_REF#refs/tags/}
elif [[ $GITHUB_REF == refs/heads/* ]]; then
VERSION=$(echo ${GITHUB_REF#refs/heads/} | sed -r 's#/+#-#g')
if [ "${{ github.event.repository.default_branch }}" = "$VERSION" ]; then
VERSION=latest
fi
fi
TAGS="${DOCKER_IMAGE}:${VERSION}"
echo ::set-output name=tags::${TAGS}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
if: ${{ github.event_name != 'pull_request' }}
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUBTM_USERNAME }}
password: ${{ secrets.DOCKERHUBTM_TOKEN }}
- name: Publish to Docker Hub
uses: docker/build-push-action@v2
with:
context: ./contrib/devtools
file: ./contrib/devtools/dockerfile
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.prep.outputs.tags }}

View File

@ -12,10 +12,10 @@ jobs:
steps:
- uses: actions/checkout@master
- name: lint
run: make proto-lint-docker
run: make proto-lint
breakage:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: check-breakage
run: make proto-check-breaking-docker
run: make proto-check-breaking

View File

@ -30,7 +30,7 @@ jobs:
- name: install runsim
run: |
export GO111MODULE="on" && go get github.com/cosmos/tools/cmd/runsim@v1.0.0
- uses: actions/cache@v2.1.2
- uses: actions/cache@v2.1.4
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary
@ -40,7 +40,7 @@ jobs:
needs: [build, install-runsim]
steps:
- uses: actions/checkout@v2
- uses: actions/cache@v2.1.2
- uses: actions/cache@v2.1.4
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary

View File

@ -39,7 +39,7 @@ jobs:
run: go version
- name: Install runsim
run: export GO111MODULE="on" && go get github.com/cosmos/tools/cmd/runsim@v1.0.0
- uses: actions/cache@v2.1.2
- uses: actions/cache@v2.1.4
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary
@ -60,7 +60,7 @@ jobs:
**/**.go
go.mod
go.sum
- uses: actions/cache@v2.1.2
- uses: actions/cache@v2.1.4
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary
@ -88,7 +88,7 @@ jobs:
go.sum
SET_ENV_NAME_INSERTIONS: 1
SET_ENV_NAME_LINES: 1
- uses: actions/cache@v2.1.2
- uses: actions/cache@v2.1.4
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary
@ -116,7 +116,7 @@ jobs:
go.sum
SET_ENV_NAME_INSERTIONS: 1
SET_ENV_NAME_LINES: 1
- uses: actions/cache@v2.1.2
- uses: actions/cache@v2.1.4
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary
@ -144,7 +144,7 @@ jobs:
go.sum
SET_ENV_NAME_INSERTIONS: 1
SET_ENV_NAME_LINES: 1
- uses: actions/cache@v2.1.2
- uses: actions/cache@v2.1.4
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary

View File

@ -26,11 +26,31 @@ jobs:
- name: install tparse
run: |
export GO111MODULE="on" && go get github.com/mfridman/tparse@v0.8.3
- uses: actions/cache@v2.1.2
- uses: actions/cache@v2.1.4
with:
path: ~/go/bin
key: ${{ runner.os }}-go-tparse-binary
build:
runs-on: ubuntu-latest
strategy:
matrix:
go-arch: ["amd64", "arm", "arm64"]
steps:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
- uses: technote-space/get-diff-action@v4
id: git_diff
with:
PATTERNS: |
**/**.go
go.mod
go.sum
- name: Build
run: GOARCH=${{ matrix.go-arch }} LEDGER_ENABLED=false make build
test-cosmovisor:
runs-on: ubuntu-latest
steps:
@ -144,6 +164,7 @@ jobs:
run: |
excludelist="$(find ./ -type f -name '*.go' | xargs grep -l 'DONTCOVER')"
excludelist+=" $(find ./ -type f -name '*.pb.go')"
excludelist+=" $(find ./ -type f -name '*.pb.gw.go')"
excludelist+=" $(find ./ -type f -path './tests/mocks/*.go')"
for filename in ${excludelist}; do
filename=$(echo $filename | sed 's/^./github.com\/cosmos\/cosmos-sdk/g')
@ -151,7 +172,7 @@ jobs:
sed -i.bak "/$(echo $filename | sed 's/\//\\\//g')/d" coverage.txt
done
if: env.GIT_DIFF
- uses: codecov/codecov-action@v1.0.14
- uses: codecov/codecov-action@v1.2.1
with:
file: ./coverage.txt
if: env.GIT_DIFF
@ -180,13 +201,30 @@ jobs:
if: env.GIT_DIFF
- name: test & coverage report creation
run: |
cat pkgs.txt.part.${{ matrix.part }} | xargs go test -mod=readonly -timeout 30m -race -tags='cgo ledger test_ledger_mock' > ${{ matrix.part }}-race-output.txt
cat pkgs.txt.part.${{ matrix.part }} | xargs go test -mod=readonly -json -timeout 30m -race -tags='cgo ledger test_ledger_mock' > ${{ matrix.part }}-race-output.txt
if: env.GIT_DIFF
- uses: actions/upload-artifact@v2
with:
name: "${{ github.sha }}-${{ matrix.part }}-race-output"
path: ./${{ matrix.part }}-race-output.txt
test-rosetta:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v2
- uses: technote-space/get-diff-action@v4
id: git_diff
with:
PATTERNS: |
**/**.go
go.mod
go.sum
- name: test rosetta
run: |
make test-rosetta
# if: env.GIT_DIFF
race-detector-report:
runs-on: ubuntu-latest
needs: [test-race, install-tparse]
@ -216,7 +254,7 @@ jobs:
with:
name: "${{ github.sha }}-03-race-output"
if: env.GIT_DIFF
- uses: actions/cache@v2.1.2
- uses: actions/cache@v2.1.4
with:
path: ~/go/bin
key: ${{ runner.os }}-go-tparse-binary
@ -230,6 +268,9 @@ jobs:
timeout-minutes: 10
steps:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
- uses: technote-space/get-diff-action@v4
id: git_diff
with:

1
.gitignore vendored
View File

@ -20,7 +20,6 @@ docs/node_modules
docs/modules
dist
tools-stamp
proto-tools-stamp
buf-stamp
artifacts

View File

@ -36,37 +36,63 @@ Ref: https://keepachangelog.com/en/1.0.0/
## [Unreleased]
### Client Breaking
### Client Breaking Changes
* (x/staking) [\#7499](https://github.com/cosmos/cosmos-sdk/pull/7499) `BondStatus` is now a protobuf `enum` instead of an `int32`, and JSON serialized using its protobuf name, so expect names like `BOND_STATUS_UNBONDING` as opposed to `Unbonding`.
* (x/evidence) [\#7538](https://github.com/cosmos/cosmos-sdk/pull/7538) The ABCI's `Result.Data` field of `MsgSubmitEvidence` does not contain the raw evidence's hash, but the encoded `MsgSubmitEvidenceResponse` struct.
* (x/upgrade) [#7697](https://github.com/cosmos/cosmos-sdk/pull/7697) Rename flag name "--time" to "--upgrade-time", "--info" to "--upgrade-info", to keep it consistent with help message.
* [\#8363](https://github.com/cosmos/cosmos-sdk/issues/8363) Addresses no longer have a fixed 20-byte length. From the SDK modules' point of view, any 1-255 bytes-long byte array is a valid address.
### API Breaking
### API Breaking Changes
* (AppModule) [\#7518](https://github.com/cosmos/cosmos-sdk/pull/7518) [\#7584](https://github.com/cosmos/cosmos-sdk/pull/7584) Rename `AppModule.RegisterQueryServices` to `AppModule.RegisterServices`, as this method now registers multiple services (the gRPC query service and the protobuf Msg service). A `Configurator` struct is used to hold the different services.
* (x/staking/types) [\#7447](https://github.com/cosmos/cosmos-sdk/issues/7447) Remove bech32 PubKey support:
* `ValidatorI` interface update. `GetConsPubKey` renamed to `TmConsPubKey` (consensus public key must be a tendermint key). `TmConsPubKey`, `GetConsAddr` methods return error.
* `Validator` update. Methods changed in `ValidatorI` (as described above) and `ToTmValidator` return error.
* `Validator.ConsensusPubkey` type changed from `string` to `codectypes.Any`.
* `MsgCreateValidator.Pubkey` type changed from `string` to `codectypes.Any`.
* Deprecating and renaming `MakeEncodingConfig` to `MakeTestEncodingConfig` (both in `simapp` and `simapp/params` packages).
* (client/keys) [\#8500](https://github.com/cosmos/cosmos-sdk/pull/8500) `InfoImporter` interface is removed from legacy keybase.
### Features
### State Machine Breaking
* (codec) [\#7519](https://github.com/cosmos/cosmos-sdk/pull/7519) `InterfaceRegistry` now inherits `jsonpb.AnyResolver`, and has a `RegisterCustomTypeURL` method to support ADR 031 packing of `Any`s. `AnyResolver` is now a required parameter to `RejectUnknownFields`.
* (baseapp) [\#7519](https://github.com/cosmos/cosmos-sdk/pull/7519) Add `ServiceMsgRouter` to BaseApp to handle routing of protobuf service `Msg`s. The two new types defined in ADR 031, `sdk.ServiceMsg` and `sdk.MsgRequest` are introduced with this router.
* (x/{bank,distrib,gov,slashing,staking}) [\#8363](https://github.com/cosmos/cosmos-sdk/issues/8363) Store keys have been modified to allow for variable-length addresses.
* (x/ibc) [\#8266](https://github.com/cosmos/cosmos-sdk/issues/8266) Add amino JSON for IBC messages in order to support Ledger text signing.
* (x/evidence) [\#8502](https://github.com/cosmos/cosmos-sdk/pull/8502) `HandleEquivocationEvidence` persists the evidence to state.
### Improvements
* (x/ibc) [\#8458](https://github.com/cosmos/cosmos-sdk/pull/8458) Add `packet_connection` attribute to ibc events to enable relayer filtering
* (x/bank) [\#8479](https://github.com/cosmos/cosmos-sdk/pull/8479) Adittional client denom metadata validation for `base` and `display` denoms.
* (x/ibc) [\#8404](https://github.com/cosmos/cosmos-sdk/pull/8404) Reorder IBC `ChanOpenAck` and `ChanOpenConfirm` handler execution to perform core handler first, followed by application callbacks.
* [\#8396](https://github.com/cosmos/cosmos-sdk/pull/8396) Add support for ARM platform
### Bug Fixes
* (kvstore) [\#7415](https://github.com/cosmos/cosmos-sdk/pull/7415) Allow new stores to be registered during on-chain upgrades.
* (client) [\#7699](https://github.com/cosmos/cosmos-sdk/pull/7699) Fix panic in context when setting invalid nodeURI. `WithNodeURI` does not set the `Client` in the context.
* (x/evidence) [#8461](https://github.com/cosmos/cosmos-sdk/pull/8461) Fix bech32 prefix in evidence validator address conversion
* (x/slashing) [\#8427](https://github.com/cosmos/cosmos-sdk/pull/8427) Fix query signing infos command
* (simapp) [\#8418](https://github.com/cosmos/cosmos-sdk/pull/8418) Add balance coin to supply when adding a new genesis account
* (x/bank) [\#8417](https://github.com/cosmos/cosmos-sdk/pull/8417) Validate balances and coin denom metadata on genesis
* (server) [\#8399](https://github.com/cosmos/cosmos-sdk/pull/8399) fix gRPC-web flag default value
* (client/keys) [\#8436](https://github.com/cosmos/cosmos-sdk/pull/8436) Fix key migration issue
* (server) [\#8481](https://github.com/cosmos/cosmos-sdk/pull/8481) Don't create
files when running `{appd} tendermint show-*` subcommands
## [v0.40.1](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.1) - 2021-01-19
### Improvements
* (x/bank) [\#8302](https://github.com/cosmos/cosmos-sdk/issues/8302) Add gRPC and CLI queries for client denomination metadata.
* (tendermint) Bump Tendermint version to [v0.34.3](https://github.com/tendermint/tendermint/releases/tag/v0.34.3).
### Bug Fixes
* [\#8085](https://github.com/cosmos/cosmos-sdk/pull/8058) fix zero time checks
* [\#8280](https://github.com/cosmos/cosmos-sdk/pull/8280) fix GET /upgrade/current query
* (x/auth) [\#8287](https://github.com/cosmos/cosmos-sdk/pull/8287) Fix `tx sign --signature-only` to return correct sequence value in signature.
* (build) [\8300](https://github.com/cosmos/cosmos-sdk/pull/8300), [\8301](https://github.com/cosmos/cosmos-sdk/pull/8301) Fix reproducible builds
* (types/errors) [\#8355][https://github.com/cosmos/cosmos-sdk/pull/8355] Fix errorWrap `Is` method.
* (x/ibc) [\#8341](https://github.com/cosmos/cosmos-sdk/pull/8341) Fix query latest consensus state.
* (proto) [\#8350][https://github.com/cosmos/cosmos-sdk/pull/8350], [\#8361](https://github.com/cosmos/cosmos-sdk/pull/8361) Update gogo proto deps with v1.3.2 security fixes
* (x/ibc) [\#8359](https://github.com/cosmos/cosmos-sdk/pull/8359) Add missing UnpackInterfaces functions to IBC Query Responses. Fixes 'cannot unpack Any' error for IBC types.
* (x/bank) [\#8317](https://github.com/cosmos/cosmos-sdk/pull/8317) Fix panic when querying for a not found client denomination metadata.
## [v0.40.0-rc0](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.0-rc0) - 2020-10-13
## [v0.40.0](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.0) - 2021-01-08
v0.40.0, known as the Stargate release of the Cosmos SDK, is one of the largest releases
of the Cosmos SDK since launch. Please read through this changelog and [release notes](./RELEASE_NOTES.md) to make sure you are aware of any relevant breaking changes.
of the Cosmos SDK since launch. Please read through this changelog and [release notes](https://github.com/cosmos/cosmos-sdk/blob/v0.40.0/RELEASE_NOTES.md) to make
sure you are aware of any relevant breaking changes.
### Client Breaking Changes
@ -74,8 +100,10 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* (client/keys) [\#5889](https://github.com/cosmos/cosmos-sdk/pull/5889) remove `keys update` command.
* (x/auth) [\#5844](https://github.com/cosmos/cosmos-sdk/pull/5844) `tx sign` command now returns an error when signing is attempted with offline/multisig keys.
* (x/auth) [\#6108](https://github.com/cosmos/cosmos-sdk/pull/6108) `tx sign` command's `--validate-signatures` flag is migrated into a `tx validate-signatures` standalone command.
* (x/auth) [#7788](https://github.com/cosmos/cosmos-sdk/pull/7788) Remove `tx auth` subcommands, all auth subcommands exist as `tx <subcommand>`
* (x/genutil) [\#6651](https://github.com/cosmos/cosmos-sdk/pull/6651) The `gentx` command has been improved. No longer are `--from` and `--name` flags required. Instead, a single argument, `name`, is required which refers to the key pair in the Keyring. In addition, an optional
`--moniker` flag can be provided to override the moniker found in `config.toml`.
* (x/upgrade) [#7697](https://github.com/cosmos/cosmos-sdk/pull/7697) Rename flag name "--time" to "--upgrade-time", "--info" to "--upgrade-info", to keep it consistent with help message.
* __REST / Queriers__
* (api) [\#6426](https://github.com/cosmos/cosmos-sdk/pull/6426) The ability to start an out-of-process API REST server has now been removed. Instead, the API server is now started in-process along with the application and Tendermint. Configuration options have been added to `app.toml` to enable/disable the API server along with additional HTTP server options.
* (client) [\#7246](https://github.com/cosmos/cosmos-sdk/pull/7246) The rest server endpoint `/swagger-ui/` is replaced by `/swagger/`, and contains swagger documentation for gRPC Gateway routes in addition to legacy REST routes. Swagger API is exposed only if set in `app.toml`.
@ -86,6 +114,9 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* __General__
* (baseapp) [\#6384](https://github.com/cosmos/cosmos-sdk/pull/6384) The `Result.Data` is now a Protocol Buffer encoded binary blob of type `TxData`. The `TxData` contains `Data` which contains a list of Protocol Buffer encoded message data and the corresponding message type.
* (client) [\#5783](https://github.com/cosmos/cosmos-sdk/issues/5783) Unify all coins representations on JSON client requests for governance proposals.
* (crypto) [\#7419](https://github.com/cosmos/cosmos-sdk/pull/7419) The SDK doesn't use Tendermint's `crypto.PubKey`
interface anymore, and uses instead it's own `PubKey` interface, defined in `crypto/types`. Replace all instances of
`crypto.PubKey` by `cryptotypes.Pubkey`.
* (store/rootmulti) [\#6390](https://github.com/cosmos/cosmos-sdk/pull/6390) Proofs of empty stores are no longer supported.
* (store/types) [\#5730](https://github.com/cosmos/cosmos-sdk/pull/5730) store.types.Cp() is removed in favour of types.CopyBytes().
* (x/auth) [\#6054](https://github.com/cosmos/cosmos-sdk/pull/6054) Remove custom JSON marshaling for base accounts as multsigs cannot be bech32 decoded.
@ -93,14 +124,29 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* (x/bank) [\#5785](https://github.com/cosmos/cosmos-sdk/issues/5785) In x/bank errors, JSON strings coerced to valid UTF-8 bytes at JSON marshalling time
are now replaced by human-readable expressions. This change can potentially break compatibility with all those client side tools
that parse log messages.
* (x/evidence) [\#7538](https://github.com/cosmos/cosmos-sdk/pull/7538) The ABCI's `Result.Data` field for
`MsgSubmitEvidence` responses does not contain the raw evidence's hash, but the protobuf encoded
`MsgSubmitEvidenceResponse` struct.
* (x/gov) [\#7533](https://github.com/cosmos/cosmos-sdk/pull/7533) The ABCI's `Result.Data` field for
`MsgSubmitProposal` responses does not contain a raw binary encoding of the `proposalID`, but the protobuf encoded
`MsgSubmitSubmitProposalResponse` struct.
* (x/gov) [\#6859](https://github.com/cosmos/cosmos-sdk/pull/6859) `ProposalStatus` and `VoteOption` are now JSON serialized using its protobuf name, so expect names like `PROPOSAL_STATUS_DEPOSIT_PERIOD` as opposed to `DepositPeriod`.
* (x/staking) [\#7499](https://github.com/cosmos/cosmos-sdk/pull/7499) `BondStatus` is now a protobuf `enum` instead
of an `int32`, and JSON serialized using its protobuf name, so expect names like `BOND_STATUS_UNBONDING` as opposed
to `Unbonding`.
* (x/staking) [\#7556](https://github.com/cosmos/cosmos-sdk/pull/7556) The ABCI's `Result.Data` field for
`MsgBeginRedelegate` and `MsgUndelegate` responses does not contain custom binary marshaled `completionTime`, but the
protobuf encoded `MsgBeginRedelegateResponse` and `MsgUndelegateResponse` structs respectively
### API Breaking Changes
* __Baseapp / Client__
* (AppModule) [\#7518](https://github.com/cosmos/cosmos-sdk/pull/7518) [\#7584](https://github.com/cosmos/cosmos-sdk/pull/7584) Rename `AppModule.RegisterQueryServices` to `AppModule.RegisterServices`, as this method now registers multiple services (the gRPC query service and the protobuf Msg service). A `Configurator` struct is used to hold the different services.
* (baseapp) [\#5865](https://github.com/cosmos/cosmos-sdk/pull/5865) The `SimulationResponse` returned from tx simulation is now JSON encoded instead of Amino binary.
* (client) [\#6290](https://github.com/cosmos/cosmos-sdk/pull/6290) `CLIContext` is renamed to `Context`. `Context` and all related methods have been moved from package context to client.
* (client) [\#6525](https://github.com/cosmos/cosmos-sdk/pull/6525) Removed support for `indent` in JSON responses. Clients should consider piping to an external tool such as `jq`.
* (client) [\#8107](https://github.com/cosmos/cosmos-sdk/pull/8107) Renamed `PrintOutput` and `PrintOutputLegacy`
methods of the `context.Client` object to `PrintProto` and `PrintObjectLegacy`.
* (client/flags) [\#6632](https://github.com/cosmos/cosmos-sdk/pull/6632) Remove NewCompletionCmd(), the function is now available in tendermint.
* (client/input) [\#5904](https://github.com/cosmos/cosmos-sdk/pull/5904) Removal of unnecessary `GetCheckPassword`, `PrintPrefixed` functions.
* (client/keys) [\#5889](https://github.com/cosmos/cosmos-sdk/pull/5889) Rename `NewKeyBaseFromDir()` -> `NewLegacyKeyBaseFromDir()`.
@ -108,6 +154,10 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* (client/rpc) [\#6290](https://github.com/cosmos/cosmos-sdk/pull/6290) `client` package and subdirs reorganization.
* (client/lcd) [\#6290](https://github.com/cosmos/cosmos-sdk/pull/6290) `CliCtx` of struct `RestServer` in package client/lcd has been renamed to `ClientCtx`.
* (codec) [\#6330](https://github.com/cosmos/cosmos-sdk/pull/6330) `codec.RegisterCrypto` has been moved to the `crypto/codec` package and the global `codec.Cdc` Amino instance has been deprecated and moved to the `codec/legacy_global` package.
* (codec) [\#8080](https://github.com/cosmos/cosmos-sdk/pull/8080) Updated the `codec.Marshaler` interface
* Moved `MarshalAny` and `UnmarshalAny` helper functions to `codec.Marshaler` and renamed to `MarshalInterface` and
`UnmarshalInterface` respectively. These functions must take interface as a parameter (not a concrete type nor `Any`
object). Underneath they use `Any` wrapping for correct protobuf serialization.
* (crypto) [\#6780](https://github.com/cosmos/cosmos-sdk/issues/6780) Move ledger code to its own package.
* (crypto/types/multisig) [\#6373](https://github.com/cosmos/cosmos-sdk/pull/6373) `multisig.Multisignature` has been renamed to `AminoMultisignature`
* (codec) `*codec.LegacyAmino` is now a wrapper around Amino which provides backwards compatibility with protobuf `Any`. ALL legacy code should use `*codec.LegacyAmino` instead of `*amino.Codec` directly
@ -122,6 +172,7 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
information on how to implement the new `Keyring` interface.
* [\#5858](https://github.com/cosmos/cosmos-sdk/pull/5858) Make Keyring store keys by name and address's hexbytes representation.
* (export) [\#5952](https://github.com/cosmos/cosmos-sdk/pull/5952) `AppExporter` now returns ABCI consensus parameters to be included in marshaled exported state. These parameters must be returned from the application via the `BaseApp`.
* (simapp) Deprecating and renaming `MakeEncodingConfig` to `MakeTestEncodingConfig` (both in `simapp` and `simapp/params` packages).
* (store) [\#5803](https://github.com/cosmos/cosmos-sdk/pull/5803) The `store.CommitMultiStore` interface now includes the new `snapshots.Snapshotter` interface as well.
* (types) [\#5579](https://github.com/cosmos/cosmos-sdk/pull/5579) The `keepRecent` field has been removed from the `PruningOptions` type.
The `PruningOptions` type now only includes fields `KeepEvery` and `SnapshotEvery`, where `KeepEvery`
@ -146,16 +197,46 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* (modules) [\#6447](https://github.com/cosmos/cosmos-sdk/issues/6447) Rename `blacklistedAddrs` to `blockedAddrs`.
* (modules) [\#6834](https://github.com/cosmos/cosmos-sdk/issues/6834) Add `RegisterInterfaces` method to `AppModuleBasic` to support registration of protobuf interface types.
* (modules) [\#6734](https://github.com/cosmos/cosmos-sdk/issues/6834) Add `TxEncodingConfig` parameter to `AppModuleBasic.ValidateGenesis` command to support JSON tx decoding in `genutil`.
* (modules) [#7764](https://github.com/cosmos/cosmos-sdk/pull/7764) Added module initialization options:
* `server/types.AppExporter` requires extra argument: `AppOptions`.
* `server.AddCommands` requires extra argument: `addStartFlags types.ModuleInitFlags`
* `x/crisis.NewAppModule` has a new attribute: `skipGenesisInvariants`. [PR](https://github.com/cosmos/cosmos-sdk/pull/7764)
* (types) [\#6327](https://github.com/cosmos/cosmos-sdk/pull/6327) `sdk.Msg` now inherits `proto.Message`, as a result all `sdk.Msg` types now use pointer semantics.
* (types) [\#7032](https://github.com/cosmos/cosmos-sdk/pull/7032) All types ending with `ID` (e.g. `ProposalID`) now end with `Id` (e.g. `ProposalId`), to match default Protobuf generated format. Also see [\#7033](https://github.com/cosmos/cosmos-sdk/pull/7033) for more details.
* (x/auth) [\#6029](https://github.com/cosmos/cosmos-sdk/pull/6029) Module accounts have been moved from `x/supply` to `x/auth`.
* (x/auth) [\#6443](https://github.com/cosmos/cosmos-sdk/issues/6443) Move `FeeTx` and `TxWithMemo` interfaces from `x/auth/ante` to `types`.
* (x/auth) [\#7006](https://github.com/cosmos/cosmos-sdk/pull/7006) All `AccountRetriever` methods now take `client.Context` as a parameter instead of as a struct member.
* (x/auth) [\#6270](https://github.com/cosmos/cosmos-sdk/pull/6270) The passphrase argument has been removed from the signature of the following functions and methods: `BuildAndSign`, ` MakeSignature`, ` SignStdTx`, `TxBuilder.BuildAndSign`, `TxBuilder.Sign`, `TxBuilder.SignStdTx`
* (x/auth) [\#6428](https://github.com/cosmos/cosmos-sdk/issues/6428):
* `NewAnteHandler` and `NewSigVerificationDecorator` both now take a `SignModeHandler` parameter.
* `SignatureVerificationGasConsumer` now has the signature: `func(meter sdk.GasMeter, sig signing.SignatureV2, params types.Params) error`.
* The `SigVerifiableTx` interface now has a `GetSignaturesV2() ([]signing.SignatureV2, error)` method and no longer has the `GetSignBytes` method.
* (x/auth/tx) [\#8106](https://github.com/cosmos/cosmos-sdk/pull/8106) change related to missing append functionality in
client transaction signing
+ added `overwriteSig` argument to `x/auth/client.SignTx` and `client/tx.Sign` functions.
+ removed `x/auth/tx.go:wrapper.GetSignatures`. The `wrapper` provides `TxBuilder` functionality, and it's a private
structure. That function was not used at all and it's not exposed through the `TxBuilder` interface.
* (x/bank) [\#7327](https://github.com/cosmos/cosmos-sdk/pull/7327) AddCoins and SubtractCoins no longer return a resultingValue and will only return an error.
* (x/capability) [#7918](https://github.com/cosmos/cosmos-sdk/pull/7918) Add x/capability safety checks:
* All outward facing APIs will now check that capability is not nil and name is not empty before performing any state-machine changes
* `SetIndex` has been renamed to `InitializeIndex`
* (x/evidence) [\#7251](https://github.com/cosmos/cosmos-sdk/pull/7251) New evidence types and light client evidence handling. The module function names changed.
* (x/evidence) [\#5952](https://github.com/cosmos/cosmos-sdk/pull/5952) Remove APIs for getting and setting `x/evidence` parameters. `BaseApp` now uses a `ParamStore` to manage Tendermint consensus parameters which is managed via the `x/params` `Substore` type.
* (x/gov) [\#6147](https://github.com/cosmos/cosmos-sdk/pull/6147) The `Content` field on `Proposal` and `MsgSubmitProposal`
is now `Any` in concordance with [ADR 019](docs/architecture/adr-019-protobuf-state-encoding.md) and `GetContent` should now
be used to retrieve the actual proposal `Content`. Also the `NewMsgSubmitProposal` constructor now may return an `error`
* (x/ibc) [\#6374](https://github.com/cosmos/cosmos-sdk/pull/6374) `VerifyMembership` and `VerifyNonMembership` now take a `specs []string` argument to specify the proof format used for verification. Most SDK chains can simply use `commitmenttypes.GetSDKSpecs()` for this argument.
* (x/params) [\#5619](https://github.com/cosmos/cosmos-sdk/pull/5619) The `x/params` keeper now accepts a `codec.Marshaller` instead of
a reference to an amino codec. Amino is still used for JSON serialization.
* (x/staking) [\#6451](https://github.com/cosmos/cosmos-sdk/pull/6451) `DefaultParamspace` and `ParamKeyTable` in staking module are moved from keeper to types to enforce consistency.
* (x/staking) [\#7419](https://github.com/cosmos/cosmos-sdk/pull/7419) The `TmConsPubKey` method on ValidatorI has been
removed and replaced instead by `ConsPubKey` (which returns a SDK `cryptotypes.PubKey`) and `TmConsPublicKey` (which
returns a Tendermint proto PublicKey).
* (x/staking/types) [\#7447](https://github.com/cosmos/cosmos-sdk/issues/7447) Remove bech32 PubKey support:
* `ValidatorI` interface update. `GetConsPubKey` renamed to `TmConsPubKey` (consensus public key must be a tendermint key). `TmConsPubKey`, `GetConsAddr` methods return error.
* `Validator` update. Methods changed in `ValidatorI` (as described above) and `ToTmValidator` return error.
* `Validator.ConsensusPubkey` type changed from `string` to `codectypes.Any`.
* `MsgCreateValidator.Pubkey` type changed from `string` to `codectypes.Any`.
* (x/supply) [\#6010](https://github.com/cosmos/cosmos-sdk/pull/6010) All `x/supply` types and APIs have been moved to `x/bank`.
* [\#6409](https://github.com/cosmos/cosmos-sdk/pull/6409) Rename all IsEmpty methods to Empty across the codebase and enforce consistency.
* [\#6231](https://github.com/cosmos/cosmos-sdk/pull/6231) Simplify `AppModule` interface, `Route` and `NewHandler` methods become only `Route`
@ -163,16 +244,7 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* (x/slashing) [\#6212](https://github.com/cosmos/cosmos-sdk/pull/6212) Remove `Get*` prefixes from key construction functions
* (server) [\#6079](https://github.com/cosmos/cosmos-sdk/pull/6079) Remove `UpgradeOldPrivValFile` (deprecated in Tendermint Core v0.28).
* [\#5719](https://github.com/cosmos/cosmos-sdk/pull/5719) Bump Go requirement to 1.14+
* (x/evidence) [\#5952](https://github.com/cosmos/cosmos-sdk/pull/5952) Remove APIs for getting and setting `x/evidence` parameters. `BaseApp` now uses a `ParamStore` to manage Tendermint consensus parameters which is managed via the `x/params` `Substore` type.
* (x/gov) [\#6147](https://github.com/cosmos/cosmos-sdk/pull/6147) The `Content` field on `Proposal` and `MsgSubmitProposal`
is now `Any` in concordance with [ADR 019](docs/architecture/adr-019-protobuf-state-encoding.md) and `GetContent` should now
be used to retrieve the actual proposal `Content`. Also the `NewMsgSubmitProposal` constructor now may return an `error`
* (x/auth) [\#7006](https://github.com/cosmos/cosmos-sdk/pull/7006) All `AccountRetriever` methods now take `client.Context` as a parameter instead of as a struct member.
* (x/auth) [\#6270](https://github.com/cosmos/cosmos-sdk/pull/6270) The passphrase argument has been removed from the signature of the following functions and methods: `BuildAndSign`, ` MakeSignature`, ` SignStdTx`, `TxBuilder.BuildAndSign`, `TxBuilder.Sign`, `TxBuilder.SignStdTx`
* (x/auth) [\#6428](https://github.com/cosmos/cosmos-sdk/issues/6428):
* `NewAnteHandler` and `NewSigVerificationDecorator` both now take a `SignModeHandler` parameter.
* `SignatureVerificationGasConsumer` now has the signature: `func(meter sdk.GasMeter, sig signing.SignatureV2, params types.Params) error`.
* The `SigVerifiableTx` interface now has a `GetSignaturesV2() ([]signing.SignatureV2, error)` method and no longer has the `GetSignBytes` method.
### State Machine Breaking
@ -241,6 +313,8 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* Every reference of `crypto.Pubkey` in context of a `Validator` is now of type string. `GetPubKeyFromBech32` must be used to get the `crypto.Pubkey`.
* The `Keeper` constructor now takes a `codec.Marshaler` instead of a concrete Amino codec. This exact type
provided is specified by `ModuleCdc`.
* (x/staking) [\#7979](https://github.com/cosmos/cosmos-sdk/pull/7979) keeper pubkey storage serialization migration
from bech32 to protobuf.
* (x/supply) [\#6010](https://github.com/cosmos/cosmos-sdk/pull/6010) Removed the `x/supply` module by merging the existing types and APIs into the `x/bank` module.
* (x/supply) [\#5533](https://github.com/cosmos/cosmos-sdk/pull/5533) Migrate the `x/supply` module to use Protocol Buffers for state
serialization instead of Amino.
@ -260,8 +334,11 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* (x/auth) [\#6213](https://github.com/cosmos/cosmos-sdk/issues/6213) Introduce new protobuf based path for transaction signing, see [ADR020](https://github.com/cosmos/cosmos-sdk/blob/master/docs/architecture/adr-020-protobuf-transaction-encoding.md) for more details
* (x/auth) [\#6350](https://github.com/cosmos/cosmos-sdk/pull/6350) New sign-batch command to sign StdTx batch files.
* (baseapp) [\#5803](https://github.com/cosmos/cosmos-sdk/pull/5803) Added support for taking state snapshots at regular height intervals, via options `snapshot-interval` and `snapshot-keep-recent`.
* (baseapp) [\#7519](https://github.com/cosmos/cosmos-sdk/pull/7519) Add `ServiceMsgRouter` to BaseApp to handle routing of protobuf service `Msg`s. The two new types defined in ADR 031, `sdk.ServiceMsg` and `sdk.MsgRequest` are introduced with this router.
* (client) [\#5921](https://github.com/cosmos/cosmos-sdk/issues/5921) Introduce new gRPC and gRPC Gateway based APIs for querying app & module data. See [ADR021](https://github.com/cosmos/cosmos-sdk/blob/master/docs/architecture/adr-021-protobuf-query-encoding.md) for more details
* (cli) [\#7485](https://github.com/cosmos/cosmos-sdk/pull/7485) Introduce a new optional `--keyring-dir` flag that allows clients to specify a Keyring directory if it does not reside in the directory specified by `--home`.
* (cli) [\#7221](https://github.com/cosmos/cosmos-sdk/pull/7221) Add the option of emitting amino encoded json from the CLI
* (codec) [\#7519](https://github.com/cosmos/cosmos-sdk/pull/7519) `InterfaceRegistry` now inherits `jsonpb.AnyResolver`, and has a `RegisterCustomTypeURL` method to support ADR 031 packing of `Any`s. `AnyResolver` is now a required parameter to `RejectUnknownFields`.
* (coin) [\#6755](https://github.com/cosmos/cosmos-sdk/pull/6755) Add custom regex validation for `Coin` denom by overwriting `CoinDenomRegex` when using `/types/coin.go`.
* (config) [\#7265](https://github.com/cosmos/cosmos-sdk/pull/7265) Support Tendermint block pruning through a new `min-retain-blocks` configuration that can be set in either `app.toml` or via the CLI. This parameter is used in conjunction with other criteria to determine the height at which Tendermint should prune blocks.
* (events) [\#7121](https://github.com/cosmos/cosmos-sdk/pull/7121) The application now derives what events are indexed by Tendermint via the `index-events` configuration in `app.toml`, which is a list of events taking the form `{eventType}.{attributeKey}`.
@ -270,7 +347,13 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* (genesis) [\#7089](https://github.com/cosmos/cosmos-sdk/pull/7089) The `export` command now adds a `initial_height` field in the exported JSON. Baseapp's `CommitMultiStore` now also has a `SetInitialVersion` setter, so it can set the initial store version inside `InitChain` and start a new chain from a given height.
* __General__
* (crypto/multisig) [\#6241](https://github.com/cosmos/cosmos-sdk/pull/6241) Add Multisig type directly to the repo. Previously this was in tendermint.
* (codec/types) [\#8106](https://github.com/cosmos/cosmos-sdk/pull/8106) Adding `NewAnyWithCustomTypeURL` to correctly
marshal Messages in TxBuilder.
* (tests) [\#6489](https://github.com/cosmos/cosmos-sdk/pull/6489) Introduce package `testutil`, new in-process testing network framework for use in integration and unit tests.
* (tx) Add new auth/tx gRPC & gRPC-Gateway endpoints for basic querying & broadcasting support
* [\#7842](https://github.com/cosmos/cosmos-sdk/pull/7842) Add TxsByEvent gRPC endpoint
* [\#7852](https://github.com/cosmos/cosmos-sdk/pull/7852) Add tx broadcast gRPC endpoint
* (tx) [\#7688](https://github.com/cosmos/cosmos-sdk/pull/7688) Add a new Tx gRPC service with methods `Simulate` and `GetTx` (by hash).
* (store) [\#5803](https://github.com/cosmos/cosmos-sdk/pull/5803) Added `rootmulti.Store` methods for taking and restoring snapshots, based on `iavl.Store` export/import.
* (store) [\#6324](https://github.com/cosmos/cosmos-sdk/pull/6324) IAVL store query proofs now return CommitmentOp which wraps an ics23 CommitmentProof
* (store) [\#6390](https://github.com/cosmos/cosmos-sdk/pull/6390) `RootMulti` store query proofs now return `CommitmentOp` which wraps `CommitmentProofs`
@ -278,8 +361,12 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* `ProofRuntime` only decodes and verifies `ics23.CommitmentProof`
* __Modules__
* (modules) [\#5921](https://github.com/cosmos/cosmos-sdk/issues/5921) Introduction of Query gRPC service definitions along with REST annotations for gRPC Gateway for each module
* (modules) [\#7540](https://github.com/cosmos/cosmos-sdk/issues/7540) Protobuf service definitions can now be used for
packing `Msg`s in transactions as defined in [ADR 031](./docs/architecture/adr-031-msg-service.md). All modules now
define a `Msg` protobuf service.
* (x/auth/vesting) [\#7209](https://github.com/cosmos/cosmos-sdk/pull/7209) Create new `MsgCreateVestingAccount` message type along with CLI handler that allows for the creation of delayed and continuous vesting types.
* (x/capability) [\#5828](https://github.com/cosmos/cosmos-sdk/pull/5828) Capability module integration as outlined in [ADR 3 - Dynamic Capability Store](https://github.com/cosmos/tree/master/docs/architecture/adr-003-dynamic-capability-store.md).
* (x/crisis) `x/crisis` has a new function: `AddModuleInitFlags`, which will register optional crisis module flags for the start command.
* (x/ibc) [\#5277](https://github.com/cosmos/cosmos-sdk/pull/5277) `x/ibc` changes from IBC alpha. For more details check the the [`x/ibc/core/spec`](https://github.com/cosmos/cosmos-sdk/tree/master/x/ibc/core/spec) directory, or the ICS specs below:
* [ICS 002 - Client Semantics](https://github.com/cosmos/ics/tree/master/spec/ics-002-client-semantics) subpackage
* [ICS 003 - Connection Semantics](https://github.com/cosmos/ics/blob/master/spec/ics-003-connection-semantics) subpackage
@ -299,10 +386,14 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* __Baseapp / Client / REST__
* (client) [\#5964](https://github.com/cosmos/cosmos-sdk/issues/5964) `--trust-node` is now false by default - for real. Users must ensure it is set to true if they don't want to enable the verifier.
* (client) [\#6402](https://github.com/cosmos/cosmos-sdk/issues/6402) Fix `keys add` `--algo` flag which only worked for Tendermint's `secp256k1` default key signing algorithm.
* (client) [\#7699](https://github.com/cosmos/cosmos-sdk/pull/7699) Fix panic in context when setting invalid nodeURI. `WithNodeURI` does not set the `Client` in the context.
* (export) [\#6510](https://github.com/cosmos/cosmos-sdk/pull/6510/) Field TimeIotaMs now is included in genesis file while exporting.
* (rest) [\#5906](https://github.com/cosmos/cosmos-sdk/pull/5906) Fix an issue that make some REST calls panic when sending invalid or incomplete requests.
* (crypto) [\#7966](https://github.com/cosmos/cosmos-sdk/issues/7966) `Bip44Params` `String()` function now correctly
returns the absolute HD path by adding the `m/` prefix.
* (crypto/keyring) [\#5844](https://github.com/cosmos/cosmos-sdk/pull/5844) `Keyring.Sign()` methods no longer decode amino signatures when method receivers
are offline/multisig keys.
* (store) [\#7415](https://github.com/cosmos/cosmos-sdk/pull/7415) Allow new stores to be registered during on-chain upgrades.
* __Modules__
* (modules) [\#5569](https://github.com/cosmos/cosmos-sdk/issues/5569) `InitGenesis`, for the relevant modules, now ensures module accounts exist.
* (x/auth) [\#5892](https://github.com/cosmos/cosmos-sdk/pull/5892) Add `RegisterKeyTypeCodec` to register new
@ -310,6 +401,8 @@ of the Cosmos SDK since launch. Please read through this changelog and [release
* (x/bank) [\#6536](https://github.com/cosmos/cosmos-sdk/pull/6536) Fix bug in `WriteGeneratedTxResponse` function used by multiple
REST endpoints. Now it writes a Tx in StdTx format.
* (x/genutil) [\#5938](https://github.com/cosmos/cosmos-sdk/pull/5938) Fix `InitializeNodeValidatorFiles` error handling.
* (x/gentx) [\#8183](https://github.com/cosmos/cosmos-sdk/pull/8183) change gentx cmd amount to arg from flag
* (x/gov) [#7641](https://github.com/cosmos/cosmos-sdk/pull/7641) Fix tally calculation precision error.
* (x/staking) [\#6529](https://github.com/cosmos/cosmos-sdk/pull/6529) Export validator addresses (previously was empty).
* (x/staking) [\#5949](https://github.com/cosmos/cosmos-sdk/pull/5949) Skip staking `HistoricalInfoKey` in simulations as headers are not exported.
* (x/staking) [\#6061](https://github.com/cosmos/cosmos-sdk/pull/6061) Allow a validator to immediately unjail when no signing info is present due to
@ -328,8 +421,13 @@ falling below their minimum self-delegation and never having been bonded. The va
internet connection. Previously, `--generate-only` served this purpose in addition to only allowing txs to be generated. Now, `--generate-only` solely
allows txs to be generated without being broadcasted and disallows Keybase use and `--offline` allows the use of Keybase but does not allow any
functionality that requires an online connection.
* (cli) [#7764](https://github.com/cosmos/cosmos-sdk/pull/7764) Update x/banking and x/crisis InitChain to improve node startup time
* (client) [\#5856](https://github.com/cosmos/cosmos-sdk/pull/5856) Added the possibility to set `--offline` flag with config command.
* (client) [\#5895](https://github.com/cosmos/cosmos-sdk/issues/5895) show config options in the config command's help screen.
* (client/keys) [\#8043](https://github.com/cosmos/cosmos-sdk/pull/8043) Add support for export of unarmored private key
* (client/tx) [\#7801](https://github.com/cosmos/cosmos-sdk/pull/7801) Update sign-batch multisig to work online
* (x/genutil) [\#8099](https://github.com/cosmos/cosmos-sdk/pull/8099) `init` now supports a `--recover` flag to recover
the private validator key from a given mnemonic
* __Modules__
* (x/auth) [\#5702](https://github.com/cosmos/cosmos-sdk/pull/5702) Add parameter querying support for `x/auth`.
* (x/auth/ante) [\#6040](https://github.com/cosmos/cosmos-sdk/pull/6040) `AccountKeeper` interface used for `NewAnteHandler` and handler's decorators to add support of using custom `AccountKeeper` implementations.
@ -341,11 +439,23 @@ falling below their minimum self-delegation and never having been bonded. The va
* (x/staking) [\#5584](https://github.com/cosmos/cosmos-sdk/pull/5584) Add util function `ToTmValidator` that converts a `staking.Validator` type to `*tmtypes.Validator`.
* (x/staking) [\#6163](https://github.com/cosmos/cosmos-sdk/pull/6163) CLI and REST call to unbonding delegations and delegations now accept
pagination.
* (x/staking) [\#8178](https://github.com/cosmos/cosmos-sdk/pull/8178) Update default historical header number for stargate
* __General__
* (tendermint) [\#6365](https://github.com/cosmos/cosmos-sdk/issues/6365) Update tendermint version to v0.34, and make necessary upgrades to the SDK
* (crypto) [\#7987](https://github.com/cosmos/cosmos-sdk/pull/7987) Fix the inconsistency of CryptoCdc, only use
`codec/legacy.Cdc`.
* (logging) [\#8072](https://github.com/cosmos/cosmos-sdk/pull/8072) Refactor logging:
* Use [zerolog](https://github.com/rs/zerolog) over Tendermint's go-kit logging wrapper.
* Introduce Tendermint's `--log_format=plain|json` flag. Using format `json` allows for emitting structured JSON
logs which can be consumed by an external logging facility (e.g. Loggly). Both formats log to STDERR.
* The existing `--log_level` flag and it's default value now solely relates to the global logging
level (e.g. `info`, `debug`, etc...) instead of `<module>:<level>`.
* (rest) [#7649](https://github.com/cosmos/cosmos-sdk/pull/7649) Return an unsigned tx in legacy GET /tx endpoint when signature conversion fails
* (simulation) [\#6002](https://github.com/cosmos/cosmos-sdk/pull/6002) Add randomized consensus params into simulation.
* (store) [\#6481](https://github.com/cosmos/cosmos-sdk/pull/6481) Move `SimpleProofsFromMap` from Tendermint into the SDK.
* (store) [\#6719](https://github.com/cosmos/cosmos-sdk/6754) Add validity checks to stores for nil and empty keys.
* (SDK) Updated dependencies
* Updated iavl dependency to v0.15.3
* Update tendermint to v0.34.1
* (types) [\#7027](https://github.com/cosmos/cosmos-sdk/pull/7027) `Coin(s)` and `DecCoin(s)` updates:
* Bump denomination max length to 128
* Allow uppercase letters and numbers in denominations to support [ADR 001](./docs/architecture/adr-001-coin-source-tracing.md)
@ -359,6 +469,8 @@ falling below their minimum self-delegation and never having been bonded. The va
* (types) [\#6128](https://github.com/cosmos/cosmos-sdk/pull/6137) Add `String()` method to `GasMeter`.
* (types) [\#6195](https://github.com/cosmos/cosmos-sdk/pull/6195) Add codespace to broadcast(sync/async) response.
* (types) \#6897 Add KV type from tendermint to `types` directory.
* (version) [\#7848](https://github.com/cosmos/cosmos-sdk/pull/7848) [\#7941](https://github.com/cosmos/cosmos-sdk/pull/7941)
`version --long` output now shows the list of build dependencies and replaced build dependencies.
## [v0.39.1](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.39.1) - 2020-08-11
@ -651,6 +763,7 @@ generalized genesis accounts through the `GenesisAccount` interface.
* (sdk) [\#4758](https://github.com/cosmos/cosmos-sdk/issues/4758) update `x/genaccounts` to match module spec
* (simulation) [\#4824](https://github.com/cosmos/cosmos-sdk/issues/4824) `PrintAllInvariants` flag will print all failed invariants
* (simulation) [\#4490](https://github.com/cosmos/cosmos-sdk/issues/4490) add `InitialBlockHeight` flag to resume a simulation from a given block
* Support exporting the simulation stats to a given JSON file
* (simulation) [\#4847](https://github.com/cosmos/cosmos-sdk/issues/4847), [\#4838](https://github.com/cosmos/cosmos-sdk/pull/4838) and [\#4869](https://github.com/cosmos/cosmos-sdk/pull/4869) `SimApp` and simulation refactors:
* Implement `SimulationManager` for executing modules' simulation functionalities in a modularized way
@ -964,6 +1077,7 @@ that error is that the account doesn't exist.
* (simulation) PrintAllInvariants flag will print all failed invariants
* (simulation) Add `InitialBlockHeight` flag to resume a simulation from a given block
* (simulation) [\#4670](https://github.com/cosmos/cosmos-sdk/issues/4670) Update simulation statistics to JSON format
- Support exporting the simulation stats to a given JSON file
* [\#4775](https://github.com/cosmos/cosmos-sdk/issues/4775) Refactor CI config
* Upgrade IAVL to v0.12.4
@ -1569,7 +1683,8 @@ BREAKING CHANGES
FEATURES
* Gaia REST API
* [\#2358](https://github.com/cosmos/cosmos-sdk/issues/2358) Add distribution module REST interface
* [\#2358](https://github.com/cosmos/cosmos-sdk/issues/2358) Add distribution module REST interface
* Gaia CLI (`gaiacli`)
* [\#3429](https://github.com/cosmos/cosmos-sdk/issues/3429) Support querying

View File

@ -142,11 +142,15 @@ build, in which case we can fall back on `go mod tidy -v`.
We use [Protocol Buffers](https://developers.google.com/protocol-buffers) along with [gogoproto](https://github.com/gogo/protobuf) to generate code for use in Cosmos-SDK.
For determinstic behavior around Protobuf tooling, everything is containerized using Docker. Make sure to have Docker installed on your machine, or head to [Docker's website](https://docs.docker.com/get-docker/) to install it.
For formatting code in `.proto` files, you can run `make proto-format` command.
For linting and checking breaking changes, we use [buf](https://buf.build/). There are two options for linting and to check if your changes will cause a break. The first is that you can install [buf](https://buf.build/docs/installation) locally, the commands for running buf after installing are `make proto-lint` and the breaking changes check will be `make proto-check-breaking`. If you do not want to install buf and have docker installed already then you can use these commands `make proto-lint-docker` and `make proto-check-breaking-docker`.
For linting and checking breaking changes, we use [buf](https://buf.build/). You can use the commands `make proto-lint` and `make proto-check-breaking` to respectively lint your proto files and check for breaking changes.
To generate the protobuf stubs you must have `protoc` and `protoc-gen-gocosmos` installed. To install these tools run `make proto-tools`. After this step you will be able to run `make proto-gen` to generate the protobuf stubs.
To generate the protobuf stubs, you can run `make proto-gen`.
We also added the `make proto-all` command to run all the above commands sequentially.
In order for imports to properly compile in your IDE, you may need to manually set your protobuf path in your IDE's workspace settings/config.
@ -214,7 +218,7 @@ only pull requests targeted directly against master.
### Development Procedure
- the latest state of development is on `master`
- `master` must never fail `make test` or `make test_cli`
- `master` must never fail `make lint test test-race`
- `master` should not fail `make lint`
- no `--force` onto `master` (except when reverting a broken commit, which should seldom happen)
- create a development branch either on github.com/cosmos/cosmos-sdk, or your fork (using `git remote add origin`)
@ -223,7 +227,7 @@ only pull requests targeted directly against master.
### Pull Merge Procedure
- ensure pull branch is rebased on `master`
- run `make test` and `make test_cli` to ensure that all tests pass
- run `make test` to ensure that all tests pass
- merge pull request
### Release Procedure
@ -268,8 +272,8 @@ and PRs are merged into `master`, if a contributor wishes the PR to be released
- **[Impact]** Explanation of how the bug affects users or developers.
- **[Test Case]** section with detailed instructions on how to reproduce the bug.
- **[Regression Potential]** section with a discussion how regressions are most likely to manifest, or might
manifest even if it's unlikely, as a result of the change. **It is assumed that any SRU candidate PR is
well-tested before it is merged in and has an overall low risk of regression**.
manifest even if it's unlikely, as a result of the change. **It is assumed that any SRU candidate PR is
well-tested before it is merged in and has an overall low risk of regression**.
It is the PR's author's responsibility to fix merge conflicts, update changelog entries, and
ensure CI passes. If a PR originates from an external contributor, it may be a core team member's
@ -280,7 +284,7 @@ Finally, when a point release is ready to be made:
1. Create `release/v0.38.N` branch
2. Ensure changelog entries are verified
2. Be sure changelog entries are added to `RELEASE_CHANGELOG.md`
1. Be sure changelog entries are added to `RELEASE_CHANGELOG.md`
3. Add release version date to the changelog
4. Push release branch along with the annotated tag: **git tag -a**
5. Create a PR into `master` containing ONLY `CHANGELOG.md` updates
@ -302,9 +306,11 @@ new code owners is as follows: On a bi-monthly basis (or more frequently if
agreeable) all the existing code owners will privately convene to discuss
potential new candidates as well as the potential for existing code-owners to
exit or "pass on the torch". This private meeting is to be a held as a
phone/video meeting. Subsequently at the end of the meeting, one of the existing
code owners should open a PR modifying the `CODEOWNERS` file. The other code
owners should then all approve this PR to publicly display their support.
phone/video meeting.
Subsequently after the meeting, and pending final approval from the ICF,
one of the existing code owners should open a PR modifying the `CODEOWNERS` file.
The other code owners should then all approve this PR to publicly display their support.
Only if unanimous consensus is reached among all the existing code-owners will
an invitation be extended to a new potential-member. Likewise, when an existing
@ -314,6 +320,95 @@ should be taken. If however, a code-owner is demonstrably shown to intentionally
have had acted maliciously or grossly negligent, code-owner privileges may be
stripped with no prior warning or consent from the member in question.
Other potential removal criteria:
* Missing 3 scheduled meetings results in ICF evaluating whether the member should be
removed / replaced
* Violation of Code of Conduct
Earning this privilege should be considered to be no small feat and is by no
means guaranteed by any quantifiable metric. It is a symbol of great trust of
the community of this project.
## Concept & Release Approval Process
The process for how Cosmos SDK maintainers take features and ADRs from concept to release
is broken up into three distinct stages: **Strategy Discovery**, **Concept Approval**, and
**Implementation & Release Approval**
### Strategy Discovery
* Develop long term priorities, strategy and roadmap for the SDK
* Release committee not yet defined as there is already a roadmap that can be used for the time being
### Concept Approval
* Architecture Decision Records (ADRs) may be proposed by any contributors or maintainers of the Cosmos SDK,
and should follow the guidelines outlined in the
[ADR Creation Process](https://github.com/cosmos/cosmos-sdk/blob/master/docs/architecture/PROCESS.md)
* After proposal, a time bound period for Request for Comment (RFC) on ADRs commences
* ADRs are intended to be iterative, and may be merged into `master` while still in a `Proposed` status
**Time Bound Period**
* Once a PR for an ADR is opened, reviewers are expected to perform a first
review within 1 week of pull request being open
* Time bound period for individual ADR Pull Requests to be merged should not exceed 2 weeks
* Total time bound period for an ADR to reach a decision (`ABANDONED | ACCEPTED | REJECTED`) should not exceed 4 weeks
If an individual Pull Request for an ADR needs more time than 2 weeks to reach resolution, it should be merged
in current state (`Draft` or `Proposed`), with its contents updated to summarize
the current state of its discussion.
If an ADR is taking longer than 4 weeks to reach a final conclusion, the **Concept Approval Committee**
should convene to rectify the situation by either:
- unanimously setting a new time bound period for this ADR
- making changes to the Concept Approval Process (as outlined here)
- making changes to the members of the Concept Approval Committee
**Approval Committee & Decision Making**
In absense of general consensus, decision making requires ⅔ vote from the three members
of the **Concept Approval Committee**.
**Committee Members**
* Core Members: **Aaron** (Regen), **Bez** (Fission), **Alessio** (AiB)
* Secondary pool of candidates to replace / substitute:
* **Chris Goes** (IG), **Sunny** (Sikka)
**Committee Criteria**
Members must:
* Participate in all or almost all ADR discussions, both on Github as well as in bi-weekly Architecture Review
meetings
* Be active contributors to the SDK, and furthermore should be continuously making substantial contributions
to the project's codebase, review process, documentation and ADRs
* Have stake in the Cosmos SDK project, represented by:
* Being a client / user of the Comsos SDK
* "[giving back](https://www.debian.org/social_contract)" to the software
* Delegate representation in case of vacation or absence
Code owners need to maintain participation in the process, ideally as members of **Concept Approval Committee**
members, but at the very least as active participants in ADR discussions
Removal criteria:
* Missing 3 meetings results in ICF evaluating whether the member should be removed / replaced
* Violation of Code of Conduct
### Implementation & Release Approval
The following process should be adhered to both for implementation PRs corresponding to ADRs, as
well as for PRs made as part of a release process:
* Code reviewers should ensure the PR does exactly what the ADR said it should
* Code reviewers should have more senior engineering capability
* ⅔ approval is required from the **primary repo maintainers** in `CODEOWNERS`
* Secondary pool of candidates to replace / substitute are listed as **secondary repo maintainers** in `CODEOWNERS`
*Note: For any major or minor release series denoted as a "Stable Release" (e.g. v0.39 "Launchpad"), a separate release
committee is often established. Stable Releases, and their corresponding release committees are documented
separately in [STABLE_RELEASES.md](./STABLE_RELEASES.md)*

100
Makefile
View File

@ -10,7 +10,8 @@ BUILDDIR ?= $(CURDIR)/build
SIMAPP = ./simapp
MOCKS_DIR = $(CURDIR)/tests/mocks
HTTPS_GIT := https://github.com/cosmos/cosmos-sdk.git
DOCKER_BUF := docker run -v $(shell pwd):/workspace --workdir /workspace bufbuild/buf
DOCKER := $(shell which docker)
DOCKER_BUF := $(DOCKER) run --rm -v $(CURDIR):/workspace --workdir /workspace bufbuild/buf
export GO111MODULE = on
@ -112,26 +113,26 @@ $(BUILDDIR)/:
mkdir -p $(BUILDDIR)/
build-simd-all: go.sum
docker rm latest-build || true
docker run --volume=$(CURDIR):/sources:ro \
$(DOCKER) rm latest-build || true
$(DOCKER) run --volume=$(CURDIR):/sources:ro \
--env TARGET_PLATFORMS='linux/amd64 darwin/amd64 linux/arm64 windows/amd64' \
--env APP=simd \
--env VERSION=$(VERSION) \
--env COMMIT=$(COMMIT) \
--env LEDGER_ENABLED=$(LEDGER_ENABLED) \
--name latest-build cosmossdk/rbuilder:latest
docker cp -a latest-build:/home/builder/artifacts/ $(CURDIR)/
$(DOCKER) cp -a latest-build:/home/builder/artifacts/ $(CURDIR)/
build-simd-linux: go.sum $(BUILDDIR)/
docker rm latest-build || true
docker run --volume=$(CURDIR):/sources:ro \
$(DOCKER) rm latest-build || true
$(DOCKER) run --volume=$(CURDIR):/sources:ro \
--env TARGET_PLATFORMS='linux/amd64' \
--env APP=simd \
--env VERSION=$(VERSION) \
--env COMMIT=$(COMMIT) \
--env LEDGER_ENABLED=false \
--name latest-build cosmossdk/rbuilder:latest
docker cp -a latest-build:/home/builder/artifacts/ $(CURDIR)/
$(DOCKER) cp -a latest-build:/home/builder/artifacts/ $(CURDIR)/
cp artifacts/simd-*-linux-amd64 $(BUILDDIR)/simd
cosmovisor:
@ -175,6 +176,16 @@ go.sum: go.mod
### Documentation ###
###############################################################################
update-swagger-docs: statik
$(BINDIR)/statik -src=client/docs/swagger-ui -dest=client/docs -f -m
@if [ -n "$(git status --porcelain)" ]; then \
echo "\033[91mSwagger docs are out of sync!!!\033[0m";\
exit 1;\
else \
echo "\033[92mSwagger docs are in sync\033[0m";\
fi
.PHONY: update-swagger-docs
godocs:
@echo "--> Wait a few seconds and visit http://localhost:6060/pkg/github.com/cosmos/cosmos-sdk/types"
godoc -http=:6060
@ -190,14 +201,7 @@ build-docs:
cp -r .vuepress/dist/* ~/output/$${path_prefix}/ ; \
cp ~/output/$${path_prefix}/index.html ~/output ; \
done < versions ;
sync-docs:
cd ~/output && \
echo "role_arn = ${DEPLOYMENT_ROLE_ARN}" >> /root/.aws/config ; \
echo "CI job = ${CIRCLE_BUILD_URL}" >> version.html ; \
aws s3 sync . s3://${WEBSITE_BUCKET} --profile terraform --delete ; \
aws cloudfront create-invalidation --distribution-id ${CF_DISTRIBUTION_ID} --profile terraform --path "/*" ;
.PHONY: sync-docs
.PHONY: build-docs
###############################################################################
### Tests & Simulation ###
@ -305,6 +309,11 @@ test-cover:
@export VERSION=$(VERSION); bash -x contrib/test_cover.sh
.PHONY: test-cover
test-rosetta:
docker build -t rosetta-ci:latest -f contrib/rosetta/node/Dockerfile .
docker-compose -f contrib/rosetta/docker-compose.yaml up --abort-on-container-exit --exit-code-from test_rosetta --build
.PHONY: test-rosetta
benchmark:
@go test -mod=readonly -bench=. $(PACKAGES_NOSIMULATION)
.PHONY: benchmark
@ -333,12 +342,12 @@ format:
DEVDOC_SAVE = docker commit `docker ps -a -n 1 -q` devdoc:local
devdoc-init:
docker run -it -v "$(CURDIR):/go/src/github.com/cosmos/cosmos-sdk" -w "/go/src/github.com/cosmos/cosmos-sdk" tendermint/devdoc echo
$(DOCKER) run -it -v "$(CURDIR):/go/src/github.com/cosmos/cosmos-sdk" -w "/go/src/github.com/cosmos/cosmos-sdk" tendermint/devdoc echo
# TODO make this safer
$(call DEVDOC_SAVE)
devdoc:
docker run -it -v "$(CURDIR):/go/src/github.com/cosmos/cosmos-sdk" -w "/go/src/github.com/cosmos/cosmos-sdk" devdoc:local bash
$(DOCKER) run -it -v "$(CURDIR):/go/src/github.com/cosmos/cosmos-sdk" -w "/go/src/github.com/cosmos/cosmos-sdk" devdoc:local bash
devdoc-save:
# TODO make this safer
@ -356,49 +365,42 @@ devdoc-update:
### Protobuf ###
###############################################################################
proto-all: proto-tools proto-gen proto-lint proto-check-breaking proto-swagger-gen proto-format
proto-all: proto-format proto-lint proto-gen
proto-gen:
@./scripts/protocgen.sh
@echo "Generating Protobuf files"
$(DOCKER) run --rm -v $(CURDIR):/workspace --workdir /workspace tendermintdev/sdk-proto-gen sh ./scripts/protocgen.sh
proto-format:
@echo "Formatting Protobuf files"
docker run -v $(shell pwd):/workspace \
$(DOCKER) run --rm -v $(CURDIR):/workspace \
--workdir /workspace tendermintdev/docker-build-proto \
find ./ -not -path "./third_party/*" -name *.proto -exec clang-format -i {} \;
.PHONY: proto-format
# This generates the SDK's custom wrapper for google.protobuf.Any. It should only be run manually when needed
proto-gen-any:
@./scripts/protocgen-any.sh
$(DOCKER) run --rm -v $(CURDIR):/workspace --workdir /workspace tendermintdev/sdk-proto-gen sh ./scripts/protocgen-any.sh
proto-swagger-gen:
@./scripts/protoc-swagger-gen.sh
proto-lint:
@buf check lint --error-format=json
@$(DOCKER_BUF) check lint --error-format=json
proto-check-breaking:
@buf check breaking --against-input '.git#branch=master'
proto-lint-docker:
@$(DOCKER_BUF) check lint --error-format=json
.PHONY: proto-lint
proto-check-breaking-docker:
@$(DOCKER_BUF) check breaking --against-input $(HTTPS_GIT)#branch=master
.PHONY: proto-check-breaking-ci
TM_URL = https://raw.githubusercontent.com/tendermint/tendermint/v0.34.0-rc5/proto/tendermint
GOGO_PROTO_URL = https://raw.githubusercontent.com/regen-network/protobuf/cosmos
COSMOS_PROTO_URL = https://raw.githubusercontent.com/regen-network/cosmos-proto/master
CONFIO_URL = https://raw.githubusercontent.com/confio/ics23/v0.6.3
TM_URL = https://raw.githubusercontent.com/tendermint/tendermint/v0.34.0-rc6/proto/tendermint
GOGO_PROTO_URL = https://raw.githubusercontent.com/regen-network/protobuf/cosmos
COSMOS_PROTO_URL = https://raw.githubusercontent.com/regen-network/cosmos-proto/master
CONFIO_URL = https://raw.githubusercontent.com/confio/ics23/v0.6.3
TM_CRYPTO_TYPES = third_party/proto/tendermint/crypto
TM_ABCI_TYPES = third_party/proto/tendermint/abci
TM_TYPES = third_party/proto/tendermint/types
TM_VERSION = third_party/proto/tendermint/version
TM_LIBS = third_party/proto/tendermint/libs/bits
TM_TYPES = third_party/proto/tendermint/types
TM_VERSION = third_party/proto/tendermint/version
TM_LIBS = third_party/proto/tendermint/libs/bits
TM_P2P = third_party/proto/tendermint/p2p
GOGO_PROTO_TYPES = third_party/proto/gogoproto
COSMOS_PROTO_TYPES = third_party/proto/cosmos_proto
@ -426,6 +428,7 @@ proto-update-deps:
@curl -sSL $(TM_URL)/types/evidence.proto > $(TM_TYPES)/evidence.proto
@curl -sSL $(TM_URL)/types/params.proto > $(TM_TYPES)/params.proto
@curl -sSL $(TM_URL)/types/validator.proto > $(TM_TYPES)/validator.proto
@curl -sSL $(TM_URL)/types/block.proto > $(TM_TYPES)/block.proto
@mkdir -p $(TM_CRYPTO_TYPES)
@curl -sSL $(TM_URL)/crypto/proof.proto > $(TM_CRYPTO_TYPES)/proof.proto
@ -434,13 +437,16 @@ proto-update-deps:
@mkdir -p $(TM_LIBS)
@curl -sSL $(TM_URL)/libs/bits/types.proto > $(TM_LIBS)/types.proto
@mkdir -p $(TM_P2P)
@curl -sSL $(TM_URL)/p2p/types.proto > $(TM_P2P)/types.proto
@mkdir -p $(CONFIO_TYPES)
@curl -sSL $(CONFIO_URL)/proofs.proto > $(CONFIO_TYPES)/proofs.proto
## insert go package option into proofs.proto file
## Issue link: https://github.com/confio/ics23/issues/32
@sed -i '4ioption go_package = "github.com/confio/ics23/go";' $(CONFIO_TYPES)/proofs.proto
.PHONY: proto-all proto-gen proto-lint proto-check-breaking proto-update-deps
.PHONY: proto-all proto-gen proto-gen-any proto-swagger-gen proto-format proto-lint proto-check-breaking proto-update-deps
###############################################################################
### Localnet ###
@ -448,8 +454,8 @@ proto-update-deps:
# Run a 4-node testnet locally
localnet-start: build-linux localnet-stop
$(if $(shell docker inspect -f '{{ .Id }}' cosmossdk/simd-env 2>/dev/null),$(info found image cosmossdk/simd-env),$(MAKE) -C contrib/images simd-env)
if ! [ -f build/node0/simd/config/genesis.json ]; then docker run --rm \
$(if $(shell $(DOCKER) inspect -f '{{ .Id }}' cosmossdk/simd-env 2>/dev/null),$(info found image cosmossdk/simd-env),$(MAKE) -C contrib/images simd-env)
if ! [ -f build/node0/simd/config/genesis.json ]; then $(DOCKER) run --rm \
--user $(shell id -u):$(shell id -g) \
-v $(BUILDDIR):/simd:Z \
-v /etc/group:/etc/group:ro \
@ -462,3 +468,15 @@ localnet-stop:
docker-compose down
.PHONY: localnet-start localnet-stop
###############################################################################
### rosetta ###
###############################################################################
# builds rosetta test data dir
rosetta-data:
-docker container rm data_dir_build
docker build -t rosetta-ci:latest -f contrib/rosetta/node/Dockerfile .
docker run --name data_dir_build -t rosetta-ci:latest sh /rosetta/data.sh
docker cp data_dir_build:/tmp/data.tar.gz "$(CURDIR)/contrib/rosetta/node/data.tar.gz"
docker container rm data_dir_build
.PHONY: rosetta-data

View File

@ -286,12 +286,12 @@ func (app *BaseApp) Commit() (res abci.ResponseCommit) {
header := app.deliverState.ctx.BlockHeader()
retainHeight := app.GetBlockRetentionHeight(header.Height)
// Write the DeliverTx state which is cache-wrapped and commit the MultiStore.
// Write the DeliverTx state into branched storage and commit the MultiStore.
// The write to the DeliverTx state writes all state transitions to the root
// MultiStore (app.cms) so when Commit() is called is persists those values.
app.deliverState.ms.Write()
commitID := app.cms.Commit()
app.logger.Debug("Commit synced", "commit", fmt.Sprintf("%X", commitID))
app.logger.Info("commit synced", "commit", fmt.Sprintf("%X", commitID))
// Reset the Check state to the latest committed.
//
@ -354,30 +354,52 @@ func (app *BaseApp) halt() {
// snapshot takes a snapshot of the current state and prunes any old snapshottypes.
func (app *BaseApp) snapshot(height int64) {
app.logger.Info("Creating state snapshot", "height", height)
snapshot, err := app.snapshotManager.Create(uint64(height))
if err != nil {
app.logger.Error("Failed to create state snapshot", "height", height, "err", err)
if app.snapshotManager == nil {
app.logger.Info("snapshot manager not configured")
return
}
app.logger.Info("Completed state snapshot", "height", height, "format", snapshot.Format)
app.logger.Info("creating state snapshot", "height", height)
snapshot, err := app.snapshotManager.Create(uint64(height))
if err != nil {
app.logger.Error("failed to create state snapshot", "height", height, "err", err)
return
}
app.logger.Info("completed state snapshot", "height", height, "format", snapshot.Format)
if app.snapshotKeepRecent > 0 {
app.logger.Debug("Pruning state snapshots")
app.logger.Debug("pruning state snapshots")
pruned, err := app.snapshotManager.Prune(app.snapshotKeepRecent)
if err != nil {
app.logger.Error("Failed to prune state snapshots", "err", err)
return
}
app.logger.Debug("Pruned state snapshots", "pruned", pruned)
app.logger.Debug("pruned state snapshots", "pruned", pruned)
}
}
// Query implements the ABCI interface. It delegates to CommitMultiStore if it
// implements Queryable.
func (app *BaseApp) Query(req abci.RequestQuery) abci.ResponseQuery {
func (app *BaseApp) Query(req abci.RequestQuery) (res abci.ResponseQuery) {
defer telemetry.MeasureSince(time.Now(), "abci", "query")
// Add panic recovery for all queries.
// ref: https://github.com/cosmos/cosmos-sdk/pull/8039
defer func() {
if r := recover(); r != nil {
res = sdkerrors.QueryResult(sdkerrors.Wrapf(sdkerrors.ErrPanic, "%v", r))
}
}()
// when a client did not provide a query height, manually inject the latest
if req.Height == 0 {
req.Height = app.LastBlockHeight()
}
// handle gRPC routes first rather than calling splitPath because '/' characters
// are used as part of gRPC paths
if grpcHandler := app.grpcQueryRouter.Route(req.Path); grpcHandler != nil {
@ -416,13 +438,14 @@ func (app *BaseApp) ListSnapshots(req abci.RequestListSnapshots) abci.ResponseLi
snapshots, err := app.snapshotManager.List()
if err != nil {
app.logger.Error("Failed to list snapshots", "err", err)
app.logger.Error("failed to list snapshots", "err", err)
return resp
}
for _, snapshot := range snapshots {
abciSnapshot, err := snapshot.ToABCI()
if err != nil {
app.logger.Error("Failed to list snapshots", "err", err)
app.logger.Error("failed to list snapshots", "err", err)
return resp
}
resp.Snapshots = append(resp.Snapshots, &abciSnapshot)
@ -438,8 +461,13 @@ func (app *BaseApp) LoadSnapshotChunk(req abci.RequestLoadSnapshotChunk) abci.Re
}
chunk, err := app.snapshotManager.LoadChunk(req.Height, req.Format, req.Chunk)
if err != nil {
app.logger.Error("Failed to load snapshot chunk", "height", req.Height, "format", req.Format,
"chunk", req.Chunk, "err")
app.logger.Error(
"failed to load snapshot chunk",
"height", req.Height,
"format", req.Format,
"chunk", req.Chunk,
"err", err,
)
return abci.ResponseLoadSnapshotChunk{}
}
return abci.ResponseLoadSnapshotChunk{Chunk: chunk}
@ -447,16 +475,22 @@ func (app *BaseApp) LoadSnapshotChunk(req abci.RequestLoadSnapshotChunk) abci.Re
// OfferSnapshot implements the ABCI interface. It delegates to app.snapshotManager if set.
func (app *BaseApp) OfferSnapshot(req abci.RequestOfferSnapshot) abci.ResponseOfferSnapshot {
if app.snapshotManager == nil {
app.logger.Error("snapshot manager not configured")
return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_ABORT}
}
if req.Snapshot == nil {
app.logger.Error("Received nil snapshot")
app.logger.Error("received nil snapshot")
return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT}
}
snapshot, err := snapshottypes.SnapshotFromABCI(req.Snapshot)
if err != nil {
app.logger.Error("Failed to decode snapshot metadata", "err", err)
app.logger.Error("failed to decode snapshot metadata", "err", err)
return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT}
}
err = app.snapshotManager.Restore(snapshot)
switch {
case err == nil:
@ -466,13 +500,22 @@ func (app *BaseApp) OfferSnapshot(req abci.RequestOfferSnapshot) abci.ResponseOf
return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT_FORMAT}
case errors.Is(err, snapshottypes.ErrInvalidMetadata):
app.logger.Error("Rejecting invalid snapshot", "height", req.Snapshot.Height,
"format", req.Snapshot.Format, "err", err)
app.logger.Error(
"rejecting invalid snapshot",
"height", req.Snapshot.Height,
"format", req.Snapshot.Format,
"err", err,
)
return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT}
default:
app.logger.Error("Failed to restore snapshot", "height", req.Snapshot.Height,
"format", req.Snapshot.Format, "err", err)
app.logger.Error(
"failed to restore snapshot",
"height", req.Snapshot.Height,
"format", req.Snapshot.Format,
"err", err,
)
// We currently don't support resetting the IAVL stores and retrying a different snapshot,
// so we ask Tendermint to abort all snapshot restoration.
return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_ABORT}
@ -481,14 +524,23 @@ func (app *BaseApp) OfferSnapshot(req abci.RequestOfferSnapshot) abci.ResponseOf
// ApplySnapshotChunk implements the ABCI interface. It delegates to app.snapshotManager if set.
func (app *BaseApp) ApplySnapshotChunk(req abci.RequestApplySnapshotChunk) abci.ResponseApplySnapshotChunk {
if app.snapshotManager == nil {
app.logger.Error("snapshot manager not configured")
return abci.ResponseApplySnapshotChunk{Result: abci.ResponseApplySnapshotChunk_ABORT}
}
_, err := app.snapshotManager.RestoreChunk(req.Chunk)
switch {
case err == nil:
return abci.ResponseApplySnapshotChunk{Result: abci.ResponseApplySnapshotChunk_ACCEPT}
case errors.Is(err, snapshottypes.ErrChunkHashMismatch):
app.logger.Error("Chunk checksum mismatch, rejecting sender and requesting refetch",
"chunk", req.Index, "sender", req.Sender, "err", err)
app.logger.Error(
"chunk checksum mismatch; rejecting sender and requesting refetch",
"chunk", req.Index,
"sender", req.Sender,
"err", err,
)
return abci.ResponseApplySnapshotChunk{
Result: abci.ResponseApplySnapshotChunk_RETRY,
RefetchChunks: []uint32{req.Index},
@ -496,7 +548,7 @@ func (app *BaseApp) ApplySnapshotChunk(req abci.RequestApplySnapshotChunk) abci.
}
default:
app.logger.Error("Failed to restore snapshot", "err", err)
app.logger.Error("failed to restore snapshot", "err", err)
return abci.ResponseApplySnapshotChunk{Result: abci.ResponseApplySnapshotChunk_ABORT}
}
}
@ -537,9 +589,24 @@ func gRPCErrorToSDKError(err error) error {
}
}
func checkNegativeHeight(height int64) error {
if height < 0 {
// Reject invalid heights.
return sdkerrors.Wrap(
sdkerrors.ErrInvalidRequest,
"cannot query with height < 0; please provide a valid height",
)
}
return nil
}
// createQueryContext creates a new sdk.Context for a query, taking as args
// the block height and whether the query needs a proof or not.
func (app *BaseApp) createQueryContext(height int64, prove bool) (sdk.Context, error) {
if err := checkNegativeHeight(height); err != nil {
return sdk.Context{}, err
}
// when a client did not provide a query height, manually inject the latest
if height == 0 {
height = app.LastBlockHeight()
@ -562,7 +629,7 @@ func (app *BaseApp) createQueryContext(height int64, prove bool) (sdk.Context, e
)
}
// cache wrap the commit-multistore for safety
// branch the commit-multistore for safety
ctx := sdk.NewContext(
cacheMS, app.checkState.ctx.BlockHeader(), true, app.logger,
).WithMinGasPrices(app.minGasPrices)
@ -713,11 +780,6 @@ func handleQueryStore(app *BaseApp, path []string, req abci.RequestQuery) abci.R
req.Path = "/" + strings.Join(path[1:], "/")
// when a client did not provide a query height, manually inject the latest
if req.Height == 0 {
req.Height = app.LastBlockHeight()
}
if req.Height <= 1 && req.Prove {
return sdkerrors.QueryResult(
sdkerrors.Wrap(

View File

@ -1,6 +1,7 @@
package baseapp
import (
"fmt"
"testing"
"github.com/stretchr/testify/require"
@ -116,3 +117,25 @@ func TestGetBlockRentionHeight(t *testing.T) {
})
}
}
// Test and ensure that negative heights always cause errors.
// See issue https://github.com/cosmos/cosmos-sdk/issues/7662.
func TestBaseAppCreateQueryContextRejectsNegativeHeights(t *testing.T) {
t.Parallel()
logger := defaultLogger()
db := dbm.NewMemDB()
name := t.Name()
app := NewBaseApp(name, logger, db, nil)
proves := []bool{
false, true,
}
for _, prove := range proves {
t.Run(fmt.Sprintf("prove=%t", prove), func(t *testing.T) {
sctx, err := app.createQueryContext(-10, true)
require.Error(t, err)
require.Equal(t, sctx, sdk.Context{})
})
}
}

View File

@ -357,8 +357,8 @@ func (app *BaseApp) Seal() { app.sealed = true }
// IsSealed returns true if the BaseApp is sealed and false otherwise.
func (app *BaseApp) IsSealed() bool { return app.sealed }
// setCheckState sets the BaseApp's checkState with a cache-wrapped multi-store
// (i.e. a CacheMultiStore) and a new Context with the cache-wrapped multi-store,
// setCheckState sets the BaseApp's checkState with a branched multi-store
// (i.e. a CacheMultiStore) and a new Context with the same multi-store branch,
// provided header, and minimum gas prices set. It is set on InitChain and reset
// on Commit.
func (app *BaseApp) setCheckState(header tmproto.Header) {
@ -369,8 +369,8 @@ func (app *BaseApp) setCheckState(header tmproto.Header) {
}
}
// setDeliverState sets the BaseApp's deliverState with a cache-wrapped multi-store
// (i.e. a CacheMultiStore) and a new Context with the cache-wrapped multi-store,
// setDeliverState sets the BaseApp's deliverState with a branched multi-store
// (i.e. a CacheMultiStore) and a new Context with the same multi-store branch,
// and provided header. It is set on InitChain and BeginBlock and set to nil on
// Commit.
func (app *BaseApp) setDeliverState(header tmproto.Header) {
@ -532,7 +532,7 @@ func (app *BaseApp) getContextForTx(mode runTxMode, txBytes []byte) sdk.Context
}
// cacheTxContext returns a new context based off of the provided context with
// a cache wrapped multi-store.
// a branched multi-store.
func (app *BaseApp) cacheTxContext(ctx sdk.Context, txBytes []byte) (sdk.Context, sdk.CacheMultiStore) {
ms := ctx.MultiStore()
// TODO: https://github.com/cosmos/cosmos-sdk/issues/2824
@ -620,7 +620,7 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte) (gInfo sdk.GasInfo, re
msCache sdk.CacheMultiStore
)
// Cache wrap context before AnteHandler call in case it aborts.
// Branch context before AnteHandler call in case it aborts.
// This is required for both CheckTx and DeliverTx.
// Ref: https://github.com/cosmos/cosmos-sdk/issues/2772
//
@ -632,9 +632,8 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte) (gInfo sdk.GasInfo, re
newCtx, err := app.anteHandler(anteCtx, tx, mode == runTxModeSimulate)
if !newCtx.IsZero() {
// At this point, newCtx.MultiStore() is cache-wrapped, or something else
// replaced by the AnteHandler. We want the original multistore, not one
// which was cache-wrapped for the AnteHandler.
// At this point, newCtx.MultiStore() is a store branch, or something else
// replaced by the AnteHandler. We want the original multistore.
//
// Also, in the case of the tx aborting, we need to track gas consumed via
// the instantiated gas meter in the AnteHandler, so we update the context
@ -654,9 +653,9 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte) (gInfo sdk.GasInfo, re
msCache.Write()
}
// Create a new Context based off of the existing Context with a cache-wrapped
// MultiStore in case message processing fails. At this point, the MultiStore
// is doubly cached-wrapped.
// Create a new Context based off of the existing Context with a MultiStore branch
// in case message processing fails. At this point, the MultiStore
// is a branch of a branch.
runMsgCtx, msCache := app.cacheTxContext(ctx, txBytes)
// Attempt to execute all messages and only update state if all messages pass

View File

@ -10,7 +10,6 @@ import (
"google.golang.org/grpc/encoding/proto"
"github.com/cosmos/cosmos-sdk/client/grpc/reflection"
"github.com/cosmos/cosmos-sdk/client/grpc/simulate"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
sdk "github.com/cosmos/cosmos-sdk/types"
)
@ -54,13 +53,31 @@ func (qrt *GRPCQueryRouter) Route(path string) GRPCQueryHandler {
}
// RegisterService implements the gRPC Server.RegisterService method. sd is a gRPC
// service description, handler is an object which implements that gRPC service
// service description, handler is an object which implements that gRPC service/
//
// This functions PANICS:
// - if a protobuf service is registered twice.
func (qrt *GRPCQueryRouter) RegisterService(sd *grpc.ServiceDesc, handler interface{}) {
// adds a top-level query handler based on the gRPC service name
for _, method := range sd.Methods {
fqName := fmt.Sprintf("/%s/%s", sd.ServiceName, method.MethodName)
methodHandler := method.Handler
// Check that each service is only registered once. If a service is
// registered more than once, then we should error. Since we can't
// return an error (`Server.RegisterService` interface restriction) we
// panic (at startup).
_, found := qrt.routes[fqName]
if found {
panic(
fmt.Errorf(
"gRPC query service %s has already been registered. Please make sure to only register each service once. "+
"This usually means that there are conflicting modules registering the same gRPC query service",
fqName,
),
)
}
qrt.routes[fqName] = func(ctx sdk.Context, req abci.RequestQuery) (abci.ResponseQuery, error) {
// call the method handler from the service description with the handler object,
// a wrapped sdk.Context with proto-unmarshaled data from the ABCI request data
@ -110,14 +127,3 @@ func (qrt *GRPCQueryRouter) SetInterfaceRegistry(interfaceRegistry codectypes.In
reflection.NewReflectionServiceServer(interfaceRegistry),
)
}
// RegisterSimulateService registers the simulate service on the gRPC router.
func (qrt *GRPCQueryRouter) RegisterSimulateService(
simulateFn simulate.BaseAppSimulateFn,
interfaceRegistry codectypes.InterfaceRegistry,
) {
simulate.RegisterSimulateServiceServer(
qrt,
simulate.NewSimulateServer(simulateFn, interfaceRegistry),
)
}

View File

@ -18,7 +18,7 @@ import (
// service client.
type QueryServiceTestHelper struct {
*GRPCQueryRouter
ctx sdk.Context
Ctx sdk.Context
}
var (
@ -31,7 +31,7 @@ var (
func NewQueryServerTestHelper(ctx sdk.Context, interfaceRegistry types.InterfaceRegistry) *QueryServiceTestHelper {
qrt := NewGRPCQueryRouter()
qrt.SetInterfaceRegistry(interfaceRegistry)
return &QueryServiceTestHelper{GRPCQueryRouter: qrt, ctx: ctx}
return &QueryServiceTestHelper{GRPCQueryRouter: qrt, Ctx: ctx}
}
// Invoke implements the grpc ClientConn.Invoke method
@ -45,7 +45,7 @@ func (q *QueryServiceTestHelper) Invoke(_ gocontext.Context, method string, args
return err
}
res, err := querier(q.ctx, abci.RequestQuery{Data: reqBz})
res, err := querier(q.Ctx, abci.RequestQuery{Data: reqBz})
if err != nil {
return err
}

View File

@ -1,24 +1,29 @@
package baseapp
package baseapp_test
import (
"context"
"os"
"testing"
"github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/libs/log"
dbm "github.com/tendermint/tm-db"
"github.com/cosmos/cosmos-sdk/baseapp"
"github.com/cosmos/cosmos-sdk/codec/types"
"github.com/cosmos/cosmos-sdk/simapp"
"github.com/cosmos/cosmos-sdk/testutil/testdata"
sdk "github.com/cosmos/cosmos-sdk/types"
)
func TestGRPCRouter(t *testing.T) {
qr := NewGRPCQueryRouter()
func TestGRPCGatewayRouter(t *testing.T) {
qr := baseapp.NewGRPCQueryRouter()
interfaceRegistry := testdata.NewTestInterfaceRegistry()
qr.SetInterfaceRegistry(interfaceRegistry)
testdata.RegisterQueryServer(qr, testdata.QueryImpl{})
helper := &QueryServiceTestHelper{
helper := &baseapp.QueryServiceTestHelper{
GRPCQueryRouter: qr,
ctx: sdk.Context{}.WithContext(context.Background()),
Ctx: sdk.Context{}.WithContext(context.Background()),
}
client := testdata.NewQueryClient(helper)
@ -44,3 +49,28 @@ func TestGRPCRouter(t *testing.T) {
require.NotNil(t, res3)
require.Equal(t, spot, res3.HasAnimal.Animal.GetCachedValue())
}
func TestRegisterQueryServiceTwice(t *testing.T) {
// Setup baseapp.
db := dbm.NewMemDB()
encCfg := simapp.MakeTestEncodingConfig()
app := baseapp.NewBaseApp("test", log.NewTMLogger(log.NewSyncWriter(os.Stdout)), db, encCfg.TxConfig.TxDecoder())
app.SetInterfaceRegistry(encCfg.InterfaceRegistry)
testdata.RegisterInterfaces(encCfg.InterfaceRegistry)
// First time registering service shouldn't panic.
require.NotPanics(t, func() {
testdata.RegisterQueryServer(
app.GRPCQueryRouter(),
testdata.QueryImpl{},
)
})
// Second time should panic.
require.Panics(t, func() {
testdata.RegisterQueryServer(
app.GRPCQueryRouter(),
testdata.QueryImpl{},
)
})
}

View File

@ -5,12 +5,15 @@ import (
"strconv"
gogogrpc "github.com/gogo/protobuf/grpc"
grpcmiddleware "github.com/grpc-ecosystem/go-grpc-middleware"
grpcrecovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
grpctypes "github.com/cosmos/cosmos-sdk/types/grpc"
)
@ -33,6 +36,11 @@ func (app *BaseApp) RegisterGRPCServer(server gogogrpc.Server) {
if heightHeaders := md.Get(grpctypes.GRPCBlockHeightHeader); len(heightHeaders) > 0 {
height, err = strconv.ParseInt(heightHeaders[0], 10, 64)
if err != nil {
return nil, sdkerrors.Wrapf(
sdkerrors.ErrInvalidRequest,
"Baseapp.RegisterGRPCServer: invalid height header %q: %v", grpctypes.GRPCBlockHeightHeader, err)
}
if err := checkNegativeHeight(height); err != nil {
return nil, err
}
}
@ -68,7 +76,10 @@ func (app *BaseApp) RegisterGRPCServer(server gogogrpc.Server) {
newMethods[i] = grpc.MethodDesc{
MethodName: method.MethodName,
Handler: func(srv interface{}, ctx context.Context, dec func(interface{}) error, _ grpc.UnaryServerInterceptor) (interface{}, error) {
return methodHandler(srv, ctx, dec, interceptor)
return methodHandler(srv, ctx, dec, grpcmiddleware.ChainUnaryServer(
grpcrecovery.UnaryServerInterceptor(),
interceptor,
))
},
}
}

View File

@ -40,8 +40,10 @@ func (msr *MsgServiceRouter) Handler(methodName string) MsgServiceHandler {
// RegisterService implements the gRPC Server.RegisterService method. sd is a gRPC
// service description, handler is an object which implements that gRPC service.
//
// This function PANICs if it is called before the service `Msg`s have been
// registered using RegisterInterfaces.
// This function PANICs:
// - if it is called before the service `Msg`s have been registered using
// RegisterInterfaces,
// - or if a service is being registered twice.
func (msr *MsgServiceRouter) RegisterService(sd *grpc.ServiceDesc, handler interface{}) {
// Adds a top-level query handler based on the gRPC service name.
for _, method := range sd.Methods {
@ -66,6 +68,21 @@ func (msr *MsgServiceRouter) RegisterService(sd *grpc.ServiceDesc, handler inter
)
}
// Check that each service is only registered once. If a service is
// registered more than once, then we should error. Since we can't
// return an error (`Server.RegisterService` interface restriction) we
// panic (at startup).
_, found := msr.routes[fqMethod]
if found {
panic(
fmt.Errorf(
"msg service %s has already been registered. Please make sure to only register each service once. "+
"This usually means that there are conflicting modules registering the same msg service",
fqMethod,
),
)
}
msr.routes[fqMethod] = func(ctx sdk.Context, req sdk.MsgRequest) (*sdk.Result, error) {
ctx = ctx.WithEventManager(sdk.NewEventManager())
interceptor := func(goCtx context.Context, _ interface{}, _ *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {

View File

@ -18,7 +18,7 @@ import (
authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing"
)
func TestRegisterService(t *testing.T) {
func TestRegisterMsgService(t *testing.T) {
db := dbm.NewMemDB()
// Create an encoding config that doesn't register testdata Msg services.
@ -42,6 +42,31 @@ func TestRegisterService(t *testing.T) {
})
}
func TestRegisterMsgServiceTwice(t *testing.T) {
// Setup baseapp.
db := dbm.NewMemDB()
encCfg := simapp.MakeTestEncodingConfig()
app := baseapp.NewBaseApp("test", log.NewTMLogger(log.NewSyncWriter(os.Stdout)), db, encCfg.TxConfig.TxDecoder())
app.SetInterfaceRegistry(encCfg.InterfaceRegistry)
testdata.RegisterInterfaces(encCfg.InterfaceRegistry)
// First time registering service shouldn't panic.
require.NotPanics(t, func() {
testdata.RegisterMsgServer(
app.MsgServiceRouter(),
testdata.MsgServerImpl{},
)
})
// Second time should panic.
require.Panics(t, func() {
testdata.RegisterMsgServer(
app.MsgServiceRouter(),
testdata.MsgServerImpl{},
)
})
}
func TestMsgService(t *testing.T) {
priv, _, _ := testdata.KeyTestPubAddr()
encCfg := simapp.MakeTestEncodingConfig()

View File

@ -204,6 +204,10 @@ func (app *BaseApp) SetSnapshotStore(snapshotStore *snapshots.Store) {
if app.sealed {
panic("SetSnapshotStore() on sealed BaseApp")
}
if snapshotStore == nil {
app.snapshotManager = nil
return
}
app.snapshotManager = snapshots.NewManager(snapshotStore, app.cms)
}

View File

@ -64,7 +64,7 @@ func ValidateEvidenceParams(i interface{}) error {
}
if v.MaxBytes < 0 {
return fmt.Errorf("maximum evidence bytes must be positive: %v", v.MaxBytes)
return fmt.Errorf("maximum evidence bytes must be non-negative: %v", v.MaxBytes)
}
return nil

View File

@ -37,7 +37,10 @@ func TestValidateEvidenceParams(t *testing.T) {
{&tmproto.EvidenceParams{}, true},
{tmproto.EvidenceParams{}, true},
{tmproto.EvidenceParams{MaxAgeNumBlocks: -1, MaxAgeDuration: 18004000, MaxBytes: 5000000}, true},
{tmproto.EvidenceParams{MaxAgeNumBlocks: 360000, MaxAgeDuration: -1, MaxBytes: 5000000}, true},
{tmproto.EvidenceParams{MaxAgeNumBlocks: 360000, MaxAgeDuration: 18004000, MaxBytes: -1}, true},
{tmproto.EvidenceParams{MaxAgeNumBlocks: 360000, MaxAgeDuration: 18004000, MaxBytes: 5000000}, false},
{tmproto.EvidenceParams{MaxAgeNumBlocks: 360000, MaxAgeDuration: 18004000, MaxBytes: 0}, false},
}
for _, tc := range testCases {

View File

@ -1,3 +1,5 @@
version: v1beta1
build:
roots:
- proto

View File

@ -1,15 +1,14 @@
package client
import (
"github.com/tendermint/tendermint/crypto"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
sdk "github.com/cosmos/cosmos-sdk/types"
)
// Account defines a read-only version of the auth module's AccountI.
type Account interface {
GetAddress() sdk.AccAddress
GetPubKey() crypto.PubKey // can return nil.
GetPubKey() cryptotypes.PubKey // can return nil.
GetAccountNumber() uint64
GetSequence() uint64
}

View File

@ -7,10 +7,13 @@ import (
"github.com/tendermint/tendermint/crypto/tmhash"
"github.com/tendermint/tendermint/mempool"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"github.com/cosmos/cosmos-sdk/client/flags"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
"github.com/cosmos/cosmos-sdk/types/tx"
)
// BroadcastTx broadcasts a transactions either synchronously or asynchronously
@ -92,23 +95,14 @@ func (ctx Context) BroadcastTxCommit(txBytes []byte) (*sdk.TxResponse, error) {
}
res, err := node.BroadcastTxCommit(context.Background(), txBytes)
if err != nil {
if errRes := CheckTendermintError(err, txBytes); errRes != nil {
return errRes, nil
}
return sdk.NewResponseFormatBroadcastTxCommit(res), err
}
if !res.CheckTx.IsOK() {
if err == nil {
return sdk.NewResponseFormatBroadcastTxCommit(res), nil
}
if !res.DeliverTx.IsOK() {
return sdk.NewResponseFormatBroadcastTxCommit(res), nil
if errRes := CheckTendermintError(err, txBytes); errRes != nil {
return errRes, nil
}
return sdk.NewResponseFormatBroadcastTxCommit(res), nil
return sdk.NewResponseFormatBroadcastTxCommit(res), err
}
// BroadcastTxSync broadcasts transaction bytes to a Tendermint node
@ -142,3 +136,36 @@ func (ctx Context) BroadcastTxAsync(txBytes []byte) (*sdk.TxResponse, error) {
return sdk.NewResponseFormatBroadcastTx(res), err
}
// TxServiceBroadcast is a helper function to broadcast a Tx with the correct gRPC types
// from the tx service. Calls `clientCtx.BroadcastTx` under the hood.
func TxServiceBroadcast(grpcCtx context.Context, clientCtx Context, req *tx.BroadcastTxRequest) (*tx.BroadcastTxResponse, error) {
if req == nil || req.TxBytes == nil {
return nil, status.Error(codes.InvalidArgument, "invalid empty tx")
}
clientCtx = clientCtx.WithBroadcastMode(normalizeBroadcastMode(req.Mode))
resp, err := clientCtx.BroadcastTx(req.TxBytes)
if err != nil {
return nil, err
}
return &tx.BroadcastTxResponse{
TxResponse: resp,
}, nil
}
// normalizeBroadcastMode converts a broadcast mode into a normalized string
// to be passed into the clientCtx.
func normalizeBroadcastMode(mode tx.BroadcastMode) string {
switch mode {
case tx.BroadcastMode_BROADCAST_MODE_ASYNC:
return "async"
case tx.BroadcastMode_BROADCAST_MODE_BLOCK:
return "block"
case tx.BroadcastMode_BROADCAST_MODE_SYNC:
return "sync"
default:
return "unspecified"
}
}

View File

@ -11,6 +11,7 @@ import (
rpchttp "github.com/tendermint/tendermint/rpc/client/http"
"github.com/cosmos/cosmos-sdk/client/flags"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
sdk "github.com/cosmos/cosmos-sdk/types"
)
@ -145,7 +146,7 @@ func ReadPersistentCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Cont
return clientCtx, nil
}
// ReadQueryCommandFlags returns an updated Context with fields set based on flags
// readQueryCommandFlags returns an updated Context with fields set based on flags
// defined in AddQueryFlagsToCmd. An error is returned if any flag query fails.
//
// Note, the provided clientCtx may have field pre-populated. The following order
@ -155,7 +156,7 @@ func ReadPersistentCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Cont
// - client.Context field not pre-populated & flag set: uses set flag value
// - client.Context field pre-populated & flag not set: uses pre-populated value
// - client.Context field pre-populated & flag set: uses set flag value
func ReadQueryCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Context, error) {
func readQueryCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Context, error) {
if clientCtx.Height == 0 || flagSet.Changed(flags.FlagHeight) {
height, _ := flagSet.GetInt64(flags.FlagHeight)
clientCtx = clientCtx.WithHeight(height)
@ -169,7 +170,7 @@ func ReadQueryCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Context,
return ReadPersistentCommandFlags(clientCtx, flagSet)
}
// ReadTxCommandFlags returns an updated Context with fields set based on flags
// readTxCommandFlags returns an updated Context with fields set based on flags
// defined in AddTxFlagsToCmd. An error is returned if any flag query fails.
//
// Note, the provided clientCtx may have field pre-populated. The following order
@ -179,7 +180,7 @@ func ReadQueryCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Context,
// - client.Context field not pre-populated & flag set: uses set flag value
// - client.Context field pre-populated & flag not set: uses pre-populated value
// - client.Context field pre-populated & flag set: uses set flag value
func ReadTxCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Context, error) {
func readTxCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Context, error) {
clientCtx, err := ReadPersistentCommandFlags(clientCtx, flagSet)
if err != nil {
return clientCtx, err
@ -215,19 +216,69 @@ func ReadTxCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Context, err
clientCtx = clientCtx.WithSkipConfirmation(skipConfirm)
}
if clientCtx.SignModeStr == "" || flagSet.Changed(flags.FlagSignMode) {
signModeStr, _ := flagSet.GetString(flags.FlagSignMode)
clientCtx = clientCtx.WithSignModeStr(signModeStr)
}
if clientCtx.FeeGranter == nil || flagSet.Changed(flags.FlagFeeAccount) {
granter, _ := flagSet.GetString(flags.FlagFeeAccount)
if granter != "" {
granterAcc, err := sdk.AccAddressFromBech32(granter)
if err != nil {
return clientCtx, err
}
clientCtx = clientCtx.WithFeeGranterAddress(granterAcc)
}
}
if clientCtx.From == "" || flagSet.Changed(flags.FlagFrom) {
from, _ := flagSet.GetString(flags.FlagFrom)
fromAddr, fromName, err := GetFromFields(clientCtx.Keyring, from, clientCtx.GenerateOnly)
fromAddr, fromName, keyType, err := GetFromFields(clientCtx.Keyring, from, clientCtx.GenerateOnly)
if err != nil {
return clientCtx, err
}
clientCtx = clientCtx.WithFrom(from).WithFromAddress(fromAddr).WithFromName(fromName)
// If the `from` signer account is a ledger key, we need to use
// SIGN_MODE_AMINO_JSON, because ledger doesn't support proto yet.
// ref: https://github.com/cosmos/cosmos-sdk/issues/8109
if keyType == keyring.TypeLedger && clientCtx.SignModeStr != flags.SignModeLegacyAminoJSON {
fmt.Println("Default sign-mode 'direct' not supported by Ledger, using sign-mode 'amino-json'.")
clientCtx = clientCtx.WithSignModeStr(flags.SignModeLegacyAminoJSON)
}
}
return clientCtx, nil
}
// GetClientQueryContext returns a Context from a command with fields set based on flags
// defined in AddQueryFlagsToCmd. An error is returned if any flag query fails.
//
// - client.Context field not pre-populated & flag not set: uses default flag value
// - client.Context field not pre-populated & flag set: uses set flag value
// - client.Context field pre-populated & flag not set: uses pre-populated value
// - client.Context field pre-populated & flag set: uses set flag value
func GetClientQueryContext(cmd *cobra.Command) (Context, error) {
ctx := GetClientContextFromCmd(cmd)
return readQueryCommandFlags(ctx, cmd.Flags())
}
// GetClientTxContext returns a Context from a command with fields set based on flags
// defined in AddTxFlagsToCmd. An error is returned if any flag query fails.
//
// - client.Context field not pre-populated & flag not set: uses default flag value
// - client.Context field not pre-populated & flag set: uses set flag value
// - client.Context field pre-populated & flag not set: uses pre-populated value
// - client.Context field pre-populated & flag set: uses set flag value
func GetClientTxContext(cmd *cobra.Command) (Context, error) {
ctx := GetClientContextFromCmd(cmd)
return readTxCommandFlags(ctx, cmd.Flags())
}
// GetClientContextFromCmd returns a Context from a command or an empty Context
// if it has not been set.
func GetClientContextFromCmd(cmd *cobra.Command) Context {

View File

@ -64,13 +64,8 @@ func TestSetCmdClientContextHandler(t *testing.T) {
return client.SetCmdClientContextHandler(initClientCtx, cmd)
},
RunE: func(cmd *cobra.Command, _ []string) error {
clientCtx := client.GetClientContextFromCmd(cmd)
_, err := client.ReadTxCommandFlags(clientCtx, cmd.Flags())
if err != nil {
return err
}
return nil
_, err := client.GetClientTxContext(cmd)
return err
},
}
@ -102,8 +97,7 @@ func TestSetCmdClientContextHandler(t *testing.T) {
tc := tc
t.Run(tc.name, func(t *testing.T) {
ctx := context.Background()
ctx = context.WithValue(ctx, client.ClientContextKey, &client.Context{})
ctx := context.WithValue(context.Background(), client.ClientContextKey, &client.Context{})
cmd := newCmd()
_ = testutil.ApplyMockIODiscardOutErr(cmd)

View File

@ -35,6 +35,7 @@ type Context struct {
From string
BroadcastMode string
FromName string
SignModeStr string
UseLedger bool
Simulate bool
GenerateOnly bool
@ -43,6 +44,7 @@ type Context struct {
TxConfig TxConfig
AccountRetriever AccountRetriever
NodeURI string
FeeGranter sdk.AccAddress
// TODO: Deprecated (remove).
LegacyAmino *codec.LegacyAmino
@ -165,6 +167,13 @@ func (ctx Context) WithFromAddress(addr sdk.AccAddress) Context {
return ctx
}
// WithFeeGranterAddress returns a copy of the context with an updated fee granter account
// address.
func (ctx Context) WithFeeGranterAddress(addr sdk.AccAddress) Context {
ctx.FeeGranter = addr
return ctx
}
// WithBroadcastMode returns a copy of the context with an updated broadcast
// mode.
func (ctx Context) WithBroadcastMode(mode string) Context {
@ -172,6 +181,13 @@ func (ctx Context) WithBroadcastMode(mode string) Context {
return ctx
}
// WithSignModeStr returns a copy of the context with an updated SignMode
// value.
func (ctx Context) WithSignModeStr(signModeStr string) Context {
ctx.SignModeStr = signModeStr
return ctx
}
// WithSkipConfirmation returns a copy of the context with an updated SkipConfirm
// value.
func (ctx Context) WithSkipConfirmation(skip bool) Context {
@ -197,21 +213,27 @@ func (ctx Context) WithInterfaceRegistry(interfaceRegistry codectypes.InterfaceR
return ctx
}
// PrintString prints the raw string to ctx.Output or os.Stdout
// PrintString prints the raw string to ctx.Output if it's defined, otherwise to os.Stdout
func (ctx Context) PrintString(str string) error {
return ctx.PrintBytes([]byte(str))
}
// PrintBytes prints the raw bytes to ctx.Output if it's defined, otherwise to os.Stdout.
// NOTE: for printing a complex state object, you should use ctx.PrintOutput
func (ctx Context) PrintBytes(o []byte) error {
writer := ctx.Output
if writer == nil {
writer = os.Stdout
}
_, err := writer.Write([]byte(str))
_, err := writer.Write(o)
return err
}
// PrintOutput outputs toPrint to the ctx.Output based on ctx.OutputFormat which is
// PrintProto outputs toPrint to the ctx.Output based on ctx.OutputFormat which is
// either text or json. If text, toPrint will be YAML encoded. Otherwise, toPrint
// will be JSON encoded using ctx.JSONMarshaler. An error is returned upon failure.
func (ctx Context) PrintOutput(toPrint proto.Message) error {
func (ctx Context) PrintProto(toPrint proto.Message) error {
// always serialize JSON initially because proto json can't be directly YAML encoded
out, err := ctx.JSONMarshaler.MarshalJSON(toPrint)
if err != nil {
@ -220,9 +242,10 @@ func (ctx Context) PrintOutput(toPrint proto.Message) error {
return ctx.printOutput(out)
}
// PrintOutputLegacy is a variant of PrintOutput that doesn't require a proto type
// and uses amino JSON encoding. It will be removed in the near future!
func (ctx Context) PrintOutputLegacy(toPrint interface{}) error {
// PrintObjectLegacy is a variant of PrintProto that doesn't require a proto.Message type
// and uses amino JSON encoding.
// Deprecated: It will be removed in the near future!
func (ctx Context) PrintObjectLegacy(toPrint interface{}) error {
out, err := ctx.LegacyAmino.MarshalJSON(toPrint)
if err != nil {
return err
@ -267,37 +290,37 @@ func (ctx Context) printOutput(out []byte) error {
return nil
}
// GetFromFields returns a from account address and Keybase name given either
// GetFromFields returns a from account address, account name and keyring type, given either
// an address or key name. If genOnly is true, only a valid Bech32 cosmos
// address is returned.
func GetFromFields(kr keyring.Keyring, from string, genOnly bool) (sdk.AccAddress, string, error) {
func GetFromFields(kr keyring.Keyring, from string, genOnly bool) (sdk.AccAddress, string, keyring.KeyType, error) {
if from == "" {
return nil, "", nil
return nil, "", 0, nil
}
if genOnly {
addr, err := sdk.AccAddressFromBech32(from)
if err != nil {
return nil, "", errors.Wrap(err, "must provide a valid Bech32 address in generate-only mode")
return nil, "", 0, errors.Wrap(err, "must provide a valid Bech32 address in generate-only mode")
}
return addr, "", nil
return addr, "", 0, nil
}
var info keyring.Info
if addr, err := sdk.AccAddressFromBech32(from); err == nil {
info, err = kr.KeyByAddress(addr)
if err != nil {
return nil, "", err
return nil, "", 0, err
}
} else {
info, err = kr.Key(from)
if err != nil {
return nil, "", err
return nil, "", 0, err
}
}
return info.GetAddress(), info.GetName(), nil
return info.GetAddress(), info.GetName(), info.GetType(), nil
}
func newKeyringFromFlags(ctx Context, backend string) (keyring.Keyring, error) {

View File

@ -23,7 +23,7 @@ func TestMain(m *testing.M) {
os.Exit(m.Run())
}
func TestContext_PrintOutput(t *testing.T) {
func TestContext_PrintObject(t *testing.T) {
ctx := client.Context{}
animal := &testdata.Dog{
@ -47,7 +47,7 @@ func TestContext_PrintOutput(t *testing.T) {
buf := &bytes.Buffer{}
ctx = ctx.WithOutput(buf)
ctx.OutputFormat = "json"
err = ctx.PrintOutput(hasAnimal)
err = ctx.PrintProto(hasAnimal)
require.NoError(t, err)
require.Equal(t,
`{"animal":{"@type":"/testdata.Dog","size":"big","name":"Spot"},"x":"10"}
@ -57,7 +57,7 @@ func TestContext_PrintOutput(t *testing.T) {
buf = &bytes.Buffer{}
ctx = ctx.WithOutput(buf)
ctx.OutputFormat = "text"
err = ctx.PrintOutput(hasAnimal)
err = ctx.PrintProto(hasAnimal)
require.NoError(t, err)
require.Equal(t,
`animal:
@ -77,7 +77,7 @@ x: "10"
buf = &bytes.Buffer{}
ctx = ctx.WithOutput(buf)
ctx.OutputFormat = "json"
err = ctx.PrintOutputLegacy(hasAnimal)
err = ctx.PrintObjectLegacy(hasAnimal)
require.NoError(t, err)
require.Equal(t,
`{"type":"testdata/HasAnimal","value":{"animal":{"type":"testdata/Dog","value":{"size":"big","name":"Spot"}},"x":"10"}}
@ -87,7 +87,7 @@ x: "10"
buf = &bytes.Buffer{}
ctx = ctx.WithOutput(buf)
ctx.OutputFormat = "text"
err = ctx.PrintOutputLegacy(hasAnimal)
err = ctx.PrintObjectLegacy(hasAnimal)
require.NoError(t, err)
require.Equal(t,
`type: testdata/HasAnimal

View File

@ -8,10 +8,10 @@ import (
"strings"
"github.com/spf13/cobra"
"github.com/tendermint/tendermint/crypto"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/errors"
"github.com/cosmos/cosmos-sdk/version"
@ -34,7 +34,7 @@ func Cmd() *cobra.Command {
// getPubKeyFromString returns a Tendermint PubKey (PubKeyEd25519) by attempting
// to decode the pubkey string from hex, base64, and finally bech32. If all
// encodings fail, an error is returned.
func getPubKeyFromString(pkstr string) (crypto.PubKey, error) {
func getPubKeyFromString(pkstr string) (cryptotypes.PubKey, error) {
bz, err := hex.DecodeString(pkstr)
if err == nil {
if len(bz) == ed25519.PubKeySize {

View File

@ -28,6 +28,14 @@
}
}
},
{
"url": "./tmp-swagger-gen/cosmos/base/tendermint/v1beta1/query.swagger.json",
"operationIds": {
"rename": {
"Params": "BaseParams"
}
}
},
{
"url": "./tmp-swagger-gen/cosmos/distribution/v1beta1/query.swagger.json",
"operationIds": {
@ -85,6 +93,12 @@
}
}
},
{
"url": "./tmp-swagger-gen/cosmos/tx/v1beta1/service.swagger.json",
"dereference": {
"circular": "ignore"
}
},
{
"url": "./tmp-swagger-gen/cosmos/upgrade/v1beta1/query.swagger.json",
"operationIds": {
@ -94,7 +108,7 @@
}
},
{
"url": "./tmp-swagger-gen/ibc/channel/query.swagger.json",
"url": "./tmp-swagger-gen/ibc/core/channel/v1/query.swagger.json",
"operationIds": {
"rename": {
"Params": "IBCChannelParams"
@ -102,7 +116,7 @@
}
},
{
"url": "./tmp-swagger-gen/ibc/client/query.swagger.json",
"url": "./tmp-swagger-gen/ibc/core/client/v1/query.swagger.json",
"operationIds": {
"rename": {
"Params": "IBCClientParams"
@ -110,7 +124,7 @@
}
},
{
"url": "./tmp-swagger-gen/ibc/connection/query.swagger.json",
"url": "./tmp-swagger-gen/ibc/core/connection/v1/query.swagger.json",
"operationIds": {
"rename": {
"Params": "IBCConnectionParams"
@ -118,7 +132,7 @@
}
},
{
"url": "./tmp-swagger-gen/ibc/transfer/query.swagger.json",
"url": "./tmp-swagger-gen/ibc/applications/transfer/v1/query.swagger.json",
"operationIds": {
"rename": {
"Params": "IBCTransferParams"

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -1,22 +0,0 @@
package client
import (
"fmt"
sdk "github.com/cosmos/cosmos-sdk/types"
)
// ErrInvalidAccount returns a standardized error reflecting that a given
// account address does not exist.
func ErrInvalidAccount(addr sdk.AccAddress) error {
return fmt.Errorf(`no account with address %s was found in the state.
Are you sure there has been a transaction involving it?`, addr)
}
// ErrVerifyCommit returns a common error reflecting that the blockchain commit at a given
// height can't be verified. The reason is that the base checkpoint of the certifier is
// newer than the given height
func ErrVerifyCommit(height int64) error {
return fmt.Errorf(`the height of base truststore in the light client is higher than height %d.
Can't verify blockchain proof at this height. Please set --trust-node to true and try again`, height)
}

View File

@ -30,6 +30,11 @@ const (
// BroadcastAsync defines a tx broadcasting mode where the client returns
// immediately.
BroadcastAsync = "async"
// SignModeDirect is the value of the --sign-mode flag for SIGN_MODE_DIRECT
SignModeDirect = "direct"
// SignModeLegacyAminoJSON is the value of the --sign-mode flag for SIGN_MODE_LEGACY_AMINO_JSON
SignModeLegacyAminoJSON = "amino-json"
)
// List of CLI flags
@ -65,6 +70,11 @@ const (
FlagCountTotal = "count-total"
FlagTimeoutHeight = "timeout-height"
FlagKeyAlgorithm = "algo"
FlagFeeAccount = "fee-account"
// Tendermint logging flags
FlagLogLevel = "log_level"
FlagLogFormat = "log_format"
)
// LineBreak can be included in a command list to provide a blank line
@ -103,6 +113,7 @@ func AddTxFlagsToCmd(cmd *cobra.Command) {
cmd.Flags().String(FlagKeyringBackend, DefaultKeyringBackend, "Select keyring's backend (os|file|kwallet|pass|test)")
cmd.Flags().String(FlagSignMode, "", "Choose sign mode (direct|amino-json), this is an advanced feature")
cmd.Flags().Uint64(FlagTimeoutHeight, 0, "Set a block timeout height to prevent the tx from being committed past a certain height")
cmd.Flags().String(FlagFeeAccount, "", "Fee account pays fees for the transaction instead of deducting from the signer")
// --gas can accept integers and "auto"
cmd.Flags().String(FlagGas, "", fmt.Sprintf("gas limit to set per-transaction; set to %q to calculate sufficient gas automatically (default %d)", GasFlagAuto, DefaultGasLimit))

View File

@ -589,10 +589,7 @@ func (m *ListAllInterfacesRequest) Unmarshal(dAtA []byte) error {
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthReflection
}
if (iNdEx + skippy) < 0 {
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthReflection
}
if (iNdEx + skippy) > l {
@ -674,10 +671,7 @@ func (m *ListAllInterfacesResponse) Unmarshal(dAtA []byte) error {
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthReflection
}
if (iNdEx + skippy) < 0 {
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthReflection
}
if (iNdEx + skippy) > l {
@ -759,10 +753,7 @@ func (m *ListImplementationsRequest) Unmarshal(dAtA []byte) error {
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthReflection
}
if (iNdEx + skippy) < 0 {
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthReflection
}
if (iNdEx + skippy) > l {
@ -844,10 +835,7 @@ func (m *ListImplementationsResponse) Unmarshal(dAtA []byte) error {
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthReflection
}
if (iNdEx + skippy) < 0 {
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthReflection
}
if (iNdEx + skippy) > l {

View File

@ -21,14 +21,9 @@ type IntegrationTestSuite struct {
func (s *IntegrationTestSuite) SetupSuite() {
app := simapp.Setup(false)
srv := reflection.NewReflectionServiceServer(app.InterfaceRegistry())
sdkCtx := app.BaseApp.NewContext(false, tmproto.Header{})
queryHelper := baseapp.NewQueryServerTestHelper(sdkCtx, app.InterfaceRegistry())
reflection.RegisterReflectionServiceServer(queryHelper, srv)
queryClient := reflection.NewReflectionServiceClient(queryHelper)
s.queryClient = queryClient
}

View File

@ -1,55 +0,0 @@
package simulate
import (
"context"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
sdk "github.com/cosmos/cosmos-sdk/types"
)
// BaseAppSimulateFn is the signature of the Baseapp#Simulate function.
type BaseAppSimulateFn func(txBytes []byte) (sdk.GasInfo, *sdk.Result, error)
type simulateServer struct {
simulate BaseAppSimulateFn
interfaceRegistry codectypes.InterfaceRegistry
}
// NewSimulateServer creates a new SimulateServer.
func NewSimulateServer(simulate BaseAppSimulateFn, interfaceRegistry codectypes.InterfaceRegistry) SimulateServiceServer {
return simulateServer{
simulate: simulate,
interfaceRegistry: interfaceRegistry,
}
}
var _ SimulateServiceServer = simulateServer{}
// Simulate implements the SimulateService.Simulate RPC method.
func (s simulateServer) Simulate(ctx context.Context, req *SimulateRequest) (*SimulateResponse, error) {
if req.Tx == nil {
return nil, status.Error(codes.InvalidArgument, "invalid empty tx")
}
err := req.Tx.UnpackInterfaces(s.interfaceRegistry)
if err != nil {
return nil, err
}
txBytes, err := req.Tx.Marshal()
if err != nil {
return nil, err
}
gasInfo, result, err := s.simulate(txBytes)
if err != nil {
return nil, err
}
return &SimulateResponse{
GasInfo: &gasInfo,
Result: result,
}, nil
}

View File

@ -1,679 +0,0 @@
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: cosmos/base/simulate/v1beta1/simulate.proto
package simulate
import (
context "context"
fmt "fmt"
types "github.com/cosmos/cosmos-sdk/types"
tx "github.com/cosmos/cosmos-sdk/types/tx"
grpc1 "github.com/gogo/protobuf/grpc"
proto "github.com/gogo/protobuf/proto"
_ "google.golang.org/genproto/googleapis/api/annotations"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
io "io"
math "math"
math_bits "math/bits"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
// SimulateRequest is the request type for the SimulateServiceService.Simulate
// RPC method.
type SimulateRequest struct {
// tx is the transaction to simulate.
Tx *tx.Tx `protobuf:"bytes,1,opt,name=tx,proto3" json:"tx,omitempty"`
}
func (m *SimulateRequest) Reset() { *m = SimulateRequest{} }
func (m *SimulateRequest) String() string { return proto.CompactTextString(m) }
func (*SimulateRequest) ProtoMessage() {}
func (*SimulateRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_503c836d80bb2d47, []int{0}
}
func (m *SimulateRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *SimulateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_SimulateRequest.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *SimulateRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_SimulateRequest.Merge(m, src)
}
func (m *SimulateRequest) XXX_Size() int {
return m.Size()
}
func (m *SimulateRequest) XXX_DiscardUnknown() {
xxx_messageInfo_SimulateRequest.DiscardUnknown(m)
}
var xxx_messageInfo_SimulateRequest proto.InternalMessageInfo
func (m *SimulateRequest) GetTx() *tx.Tx {
if m != nil {
return m.Tx
}
return nil
}
// SimulateResponse is the response type for the
// SimulateServiceService.SimulateRPC method.
type SimulateResponse struct {
// gas_info is the information about gas used in the simulation.
GasInfo *types.GasInfo `protobuf:"bytes,1,opt,name=gas_info,json=gasInfo,proto3" json:"gas_info,omitempty"`
// result is the result of the simulation.
Result *types.Result `protobuf:"bytes,2,opt,name=result,proto3" json:"result,omitempty"`
}
func (m *SimulateResponse) Reset() { *m = SimulateResponse{} }
func (m *SimulateResponse) String() string { return proto.CompactTextString(m) }
func (*SimulateResponse) ProtoMessage() {}
func (*SimulateResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_503c836d80bb2d47, []int{1}
}
func (m *SimulateResponse) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *SimulateResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_SimulateResponse.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *SimulateResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_SimulateResponse.Merge(m, src)
}
func (m *SimulateResponse) XXX_Size() int {
return m.Size()
}
func (m *SimulateResponse) XXX_DiscardUnknown() {
xxx_messageInfo_SimulateResponse.DiscardUnknown(m)
}
var xxx_messageInfo_SimulateResponse proto.InternalMessageInfo
func (m *SimulateResponse) GetGasInfo() *types.GasInfo {
if m != nil {
return m.GasInfo
}
return nil
}
func (m *SimulateResponse) GetResult() *types.Result {
if m != nil {
return m.Result
}
return nil
}
func init() {
proto.RegisterType((*SimulateRequest)(nil), "cosmos.base.simulate.v1beta1.SimulateRequest")
proto.RegisterType((*SimulateResponse)(nil), "cosmos.base.simulate.v1beta1.SimulateResponse")
}
func init() {
proto.RegisterFile("cosmos/base/simulate/v1beta1/simulate.proto", fileDescriptor_503c836d80bb2d47)
}
var fileDescriptor_503c836d80bb2d47 = []byte{
// 351 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xbf, 0x4b, 0xf3, 0x40,
0x18, 0xc7, 0x7b, 0x19, 0xfa, 0x96, 0x7b, 0x87, 0xf7, 0x25, 0x20, 0x94, 0x50, 0x42, 0x8d, 0x28,
0x05, 0xe9, 0x1d, 0xad, 0x4b, 0x07, 0x27, 0x17, 0x11, 0xb7, 0xd4, 0xc9, 0x45, 0x2e, 0xf1, 0x1a,
0x0f, 0xd3, 0xbb, 0x98, 0x7b, 0x52, 0x32, 0x3b, 0x3a, 0x09, 0x4e, 0xfe, 0x11, 0xfe, 0x1f, 0x8e,
0x05, 0x17, 0x47, 0x69, 0xfd, 0x43, 0xa4, 0xc9, 0x25, 0x2d, 0x82, 0xd2, 0x29, 0xb9, 0xe7, 0xf9,
0x7c, 0xbf, 0xcf, 0x2f, 0x7c, 0x18, 0x2a, 0x3d, 0x55, 0x9a, 0x06, 0x4c, 0x73, 0xaa, 0xc5, 0x34,
0x8b, 0x19, 0x70, 0x3a, 0x1b, 0x04, 0x1c, 0xd8, 0xa0, 0x0e, 0x90, 0x24, 0x55, 0xa0, 0xec, 0x4e,
0x09, 0x93, 0x15, 0x4c, 0xea, 0x9c, 0x81, 0x9d, 0x4e, 0xa4, 0x54, 0x14, 0x73, 0xca, 0x12, 0x41,
0x99, 0x94, 0x0a, 0x18, 0x08, 0x25, 0x75, 0xa9, 0x75, 0xf6, 0x36, 0x0b, 0xb1, 0x20, 0x14, 0x75,
0x91, 0xd5, 0xc3, 0x40, 0x8e, 0x81, 0x20, 0xaf, 0xb3, 0x90, 0x97, 0x39, 0x6f, 0x84, 0xff, 0x8d,
0x4d, 0x49, 0x9f, 0xdf, 0x65, 0x5c, 0x83, 0xbd, 0x8f, 0x2d, 0xc8, 0xdb, 0xa8, 0x8b, 0x7a, 0x7f,
0x87, 0x3b, 0xc4, 0x34, 0x07, 0x79, 0xd5, 0x11, 0xb9, 0xc8, 0x7d, 0x0b, 0x72, 0xef, 0x01, 0xe1,
0xff, 0x6b, 0xa9, 0x4e, 0x94, 0xd4, 0xdc, 0x3e, 0xc6, 0xad, 0x88, 0xe9, 0x2b, 0x21, 0x27, 0xca,
0x38, 0xec, 0x92, 0xcd, 0xf1, 0x8a, 0xae, 0x2a, 0xa3, 0x53, 0xa6, 0xcf, 0xe4, 0x44, 0xf9, 0x7f,
0xa2, 0xf2, 0xc7, 0x1e, 0xe1, 0x66, 0xca, 0x75, 0x16, 0x43, 0xdb, 0x2a, 0xb4, 0xdd, 0x9f, 0xb5,
0x7e, 0xc1, 0xf9, 0x86, 0x1f, 0xbe, 0xa0, 0xf5, 0x1c, 0x63, 0x9e, 0xce, 0x44, 0xc8, 0xed, 0x67,
0x84, 0x5b, 0x55, 0xcc, 0xee, 0x93, 0xdf, 0xb6, 0x4c, 0xbe, 0xed, 0xc0, 0x21, 0xdb, 0xe2, 0xe5,
0xdc, 0x1e, 0xb9, 0x7f, 0xfb, 0x7c, 0xb2, 0x7a, 0xde, 0x01, 0xdd, 0xea, 0xf2, 0x27, 0xe7, 0xaf,
0x0b, 0x17, 0xcd, 0x17, 0x2e, 0xfa, 0x58, 0xb8, 0xe8, 0x71, 0xe9, 0x36, 0xe6, 0x4b, 0xb7, 0xf1,
0xbe, 0x74, 0x1b, 0x97, 0x83, 0x48, 0xc0, 0x4d, 0x16, 0x90, 0x50, 0x4d, 0x2b, 0xaf, 0xf2, 0xd3,
0xd7, 0xd7, 0xb7, 0x34, 0x8c, 0x05, 0x97, 0x40, 0xa3, 0x34, 0x09, 0x6b, 0xb3, 0xa0, 0x59, 0x9c,
0xf2, 0xe8, 0x2b, 0x00, 0x00, 0xff, 0xff, 0x7d, 0x16, 0xed, 0x90, 0x76, 0x02, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// SimulateServiceClient is the client API for SimulateService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type SimulateServiceClient interface {
// Simulate simulates executing a transaction for estimating gas usage.
Simulate(ctx context.Context, in *SimulateRequest, opts ...grpc.CallOption) (*SimulateResponse, error)
}
type simulateServiceClient struct {
cc grpc1.ClientConn
}
func NewSimulateServiceClient(cc grpc1.ClientConn) SimulateServiceClient {
return &simulateServiceClient{cc}
}
func (c *simulateServiceClient) Simulate(ctx context.Context, in *SimulateRequest, opts ...grpc.CallOption) (*SimulateResponse, error) {
out := new(SimulateResponse)
err := c.cc.Invoke(ctx, "/cosmos.base.simulate.v1beta1.SimulateService/Simulate", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// SimulateServiceServer is the server API for SimulateService service.
type SimulateServiceServer interface {
// Simulate simulates executing a transaction for estimating gas usage.
Simulate(context.Context, *SimulateRequest) (*SimulateResponse, error)
}
// UnimplementedSimulateServiceServer can be embedded to have forward compatible implementations.
type UnimplementedSimulateServiceServer struct {
}
func (*UnimplementedSimulateServiceServer) Simulate(ctx context.Context, req *SimulateRequest) (*SimulateResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Simulate not implemented")
}
func RegisterSimulateServiceServer(s grpc1.Server, srv SimulateServiceServer) {
s.RegisterService(&_SimulateService_serviceDesc, srv)
}
func _SimulateService_Simulate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(SimulateRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(SimulateServiceServer).Simulate(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/cosmos.base.simulate.v1beta1.SimulateService/Simulate",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(SimulateServiceServer).Simulate(ctx, req.(*SimulateRequest))
}
return interceptor(ctx, in, info, handler)
}
var _SimulateService_serviceDesc = grpc.ServiceDesc{
ServiceName: "cosmos.base.simulate.v1beta1.SimulateService",
HandlerType: (*SimulateServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Simulate",
Handler: _SimulateService_Simulate_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "cosmos/base/simulate/v1beta1/simulate.proto",
}
func (m *SimulateRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SimulateRequest) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *SimulateRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.Tx != nil {
{
size, err := m.Tx.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintSimulate(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func (m *SimulateResponse) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SimulateResponse) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *SimulateResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.Result != nil {
{
size, err := m.Result.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintSimulate(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x12
}
if m.GasInfo != nil {
{
size, err := m.GasInfo.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintSimulate(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func encodeVarintSimulate(dAtA []byte, offset int, v uint64) int {
offset -= sovSimulate(v)
base := offset
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return base
}
func (m *SimulateRequest) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.Tx != nil {
l = m.Tx.Size()
n += 1 + l + sovSimulate(uint64(l))
}
return n
}
func (m *SimulateResponse) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.GasInfo != nil {
l = m.GasInfo.Size()
n += 1 + l + sovSimulate(uint64(l))
}
if m.Result != nil {
l = m.Result.Size()
n += 1 + l + sovSimulate(uint64(l))
}
return n
}
func sovSimulate(x uint64) (n int) {
return (math_bits.Len64(x|1) + 6) / 7
}
func sozSimulate(x uint64) (n int) {
return sovSimulate(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *SimulateRequest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowSimulate
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SimulateRequest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SimulateRequest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Tx", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowSimulate
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthSimulate
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthSimulate
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Tx == nil {
m.Tx = &tx.Tx{}
}
if err := m.Tx.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipSimulate(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthSimulate
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthSimulate
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *SimulateResponse) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowSimulate
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SimulateResponse: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SimulateResponse: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field GasInfo", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowSimulate
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthSimulate
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthSimulate
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.GasInfo == nil {
m.GasInfo = &types.GasInfo{}
}
if err := m.GasInfo.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Result", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowSimulate
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthSimulate
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthSimulate
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Result == nil {
m.Result = &types.Result{}
}
if err := m.Result.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipSimulate(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthSimulate
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthSimulate
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipSimulate(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
depth := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowSimulate
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowSimulate
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
case 1:
iNdEx += 8
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowSimulate
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if length < 0 {
return 0, ErrInvalidLengthSimulate
}
iNdEx += length
case 3:
depth++
case 4:
if depth == 0 {
return 0, ErrUnexpectedEndOfGroupSimulate
}
depth--
case 5:
iNdEx += 4
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
if iNdEx < 0 {
return 0, ErrInvalidLengthSimulate
}
if depth == 0 {
return iNdEx, nil
}
}
return 0, io.ErrUnexpectedEOF
}
var (
ErrInvalidLengthSimulate = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowSimulate = fmt.Errorf("proto: integer overflow")
ErrUnexpectedEndOfGroupSimulate = fmt.Errorf("proto: unexpected end of group")
)

View File

@ -1,166 +0,0 @@
// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
// source: cosmos/base/simulate/v1beta1/simulate.proto
/*
Package simulate is a reverse proxy.
It translates gRPC into RESTful JSON APIs.
*/
package simulate
import (
"context"
"io"
"net/http"
"github.com/golang/protobuf/descriptor"
"github.com/golang/protobuf/proto"
"github.com/grpc-ecosystem/grpc-gateway/runtime"
"github.com/grpc-ecosystem/grpc-gateway/utilities"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/status"
)
// Suppress "imported and not used" errors
var _ codes.Code
var _ io.Reader
var _ status.Status
var _ = runtime.String
var _ = utilities.NewDoubleArray
var _ = descriptor.ForMessage
var (
filter_SimulateService_Simulate_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_SimulateService_Simulate_0(ctx context.Context, marshaler runtime.Marshaler, client SimulateServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq SimulateRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_SimulateService_Simulate_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.Simulate(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_SimulateService_Simulate_0(ctx context.Context, marshaler runtime.Marshaler, server SimulateServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq SimulateRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_SimulateService_Simulate_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.Simulate(ctx, &protoReq)
return msg, metadata, err
}
// RegisterSimulateServiceHandlerServer registers the http handlers for service SimulateService to "mux".
// UnaryRPC :call SimulateServiceServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
// Note that using this registration option will cause many gRPC library features (such as grpc.SendHeader, etc) to stop working. Consider using RegisterSimulateServiceHandlerFromEndpoint instead.
func RegisterSimulateServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server SimulateServiceServer) error {
mux.Handle("POST", pattern_SimulateService_Simulate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_SimulateService_Simulate_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_SimulateService_Simulate_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
// RegisterSimulateServiceHandlerFromEndpoint is same as RegisterSimulateServiceHandler but
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
func RegisterSimulateServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
conn, err := grpc.Dial(endpoint, opts...)
if err != nil {
return err
}
defer func() {
if err != nil {
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
return
}
go func() {
<-ctx.Done()
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
}()
}()
return RegisterSimulateServiceHandler(ctx, mux, conn)
}
// RegisterSimulateServiceHandler registers the http handlers for service SimulateService to "mux".
// The handlers forward requests to the grpc endpoint over "conn".
func RegisterSimulateServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
return RegisterSimulateServiceHandlerClient(ctx, mux, NewSimulateServiceClient(conn))
}
// RegisterSimulateServiceHandlerClient registers the http handlers for service SimulateService
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "SimulateServiceClient".
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "SimulateServiceClient"
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
// "SimulateServiceClient" to call the correct interceptors.
func RegisterSimulateServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client SimulateServiceClient) error {
mux.Handle("POST", pattern_SimulateService_Simulate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_SimulateService_Simulate_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_SimulateService_Simulate_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
var (
pattern_SimulateService_Simulate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 2}, []string{"cosmos", "base", "simulate", "v1beta1"}, "", runtime.AssumeColonVerbOpt(true)))
)
var (
forward_SimulateService_Simulate_0 = runtime.ForwardResponseMessage
)

View File

@ -1,119 +0,0 @@
package simulate_test
import (
"context"
"testing"
"github.com/stretchr/testify/suite"
tmproto "github.com/tendermint/tendermint/proto/tendermint/types"
"github.com/cosmos/cosmos-sdk/baseapp"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/grpc/simulate"
"github.com/cosmos/cosmos-sdk/client/tx"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
"github.com/cosmos/cosmos-sdk/simapp"
"github.com/cosmos/cosmos-sdk/testutil/testdata"
sdk "github.com/cosmos/cosmos-sdk/types"
txtypes "github.com/cosmos/cosmos-sdk/types/tx"
"github.com/cosmos/cosmos-sdk/types/tx/signing"
authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing"
authtypes "github.com/cosmos/cosmos-sdk/x/auth/types"
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
)
type IntegrationTestSuite struct {
suite.Suite
app *simapp.SimApp
clientCtx client.Context
queryClient simulate.SimulateServiceClient
sdkCtx sdk.Context
}
func (s *IntegrationTestSuite) SetupSuite() {
app := simapp.Setup(true)
sdkCtx := app.BaseApp.NewContext(true, tmproto.Header{})
app.AccountKeeper.SetParams(sdkCtx, authtypes.DefaultParams())
app.BankKeeper.SetParams(sdkCtx, banktypes.DefaultParams())
// Set up TxConfig.
encodingConfig := simapp.MakeTestEncodingConfig()
clientCtx := client.Context{}.WithTxConfig(encodingConfig.TxConfig)
// Create new simulation server.
srv := simulate.NewSimulateServer(app.BaseApp.Simulate, encodingConfig.InterfaceRegistry)
queryHelper := baseapp.NewQueryServerTestHelper(sdkCtx, app.InterfaceRegistry())
simulate.RegisterSimulateServiceServer(queryHelper, srv)
queryClient := simulate.NewSimulateServiceClient(queryHelper)
s.app = app
s.clientCtx = clientCtx
s.queryClient = queryClient
s.sdkCtx = sdkCtx
}
func (s IntegrationTestSuite) TestSimulateService() {
// Create an account with some funds.
priv1, _, addr1 := testdata.KeyTestPubAddr()
acc1 := s.app.AccountKeeper.NewAccountWithAddress(s.sdkCtx, addr1)
err := acc1.SetAccountNumber(0)
s.Require().NoError(err)
s.app.AccountKeeper.SetAccount(s.sdkCtx, acc1)
s.app.BankKeeper.SetBalances(s.sdkCtx, addr1, sdk.Coins{
sdk.NewInt64Coin("atom", 10000000),
})
// Create a test x/bank MsgSend.
coins := sdk.NewCoins(sdk.NewInt64Coin("atom", 10))
_, _, addr2 := testdata.KeyTestPubAddr()
msg := banktypes.NewMsgSend(addr1, addr2, coins)
feeAmount := testdata.NewTestFeeAmount()
gasLimit := testdata.NewTestGasLimit()
memo := "foo"
accSeq, accNum := uint64(0), uint64(0)
// Create a txBuilder.
txBuilder := s.clientCtx.TxConfig.NewTxBuilder()
txBuilder.SetMsgs(msg)
txBuilder.SetMemo(memo)
txBuilder.SetFeeAmount(feeAmount)
txBuilder.SetGasLimit(gasLimit)
// 1st round: set empty signature
sigV2 := signing.SignatureV2{
PubKey: priv1.PubKey(),
Data: &signing.SingleSignatureData{
SignMode: s.clientCtx.TxConfig.SignModeHandler().DefaultMode(),
Signature: nil,
},
}
txBuilder.SetSignatures(sigV2)
// 2nd round: actually sign
sigV2, err = tx.SignWithPrivKey(
s.clientCtx.TxConfig.SignModeHandler().DefaultMode(),
authsigning.SignerData{ChainID: s.sdkCtx.ChainID(), AccountNumber: accNum, Sequence: accSeq},
txBuilder, priv1, s.clientCtx.TxConfig, accSeq,
)
txBuilder.SetSignatures(sigV2)
any, ok := txBuilder.(codectypes.IntoAny)
s.Require().True(ok)
cached := any.AsAny().GetCachedValue()
txTx, ok := cached.(*txtypes.Tx)
s.Require().True(ok)
res, err := s.queryClient.Simulate(
context.Background(),
&simulate.SimulateRequest{Tx: txTx},
)
s.Require().NoError(err)
// Check the result and gas used are correct.
s.Require().Equal(len(res.GetResult().GetEvents()), 4) // 1 transfer, 3 messages.
s.Require().True(res.GetGasInfo().GetGasUsed() > 0) // Gas used sometimes change, just check it's not empty.
}
func TestSimulateTestSuite(t *testing.T) {
suite.Run(t, new(IntegrationTestSuite))
}

View File

@ -0,0 +1,19 @@
package tmservice
import (
"context"
ctypes "github.com/tendermint/tendermint/rpc/core/types"
"github.com/cosmos/cosmos-sdk/client"
)
func getBlock(ctx context.Context, clientCtx client.Context, height *int64) (*ctypes.ResultBlock, error) {
// get the node
node, err := clientCtx.GetNode()
if err != nil {
return nil, err
}
return node.Block(ctx, height)
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,566 @@
// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
// source: cosmos/base/tendermint/v1beta1/query.proto
/*
Package tmservice is a reverse proxy.
It translates gRPC into RESTful JSON APIs.
*/
package tmservice
import (
"context"
"io"
"net/http"
"github.com/golang/protobuf/descriptor"
"github.com/golang/protobuf/proto"
"github.com/grpc-ecosystem/grpc-gateway/runtime"
"github.com/grpc-ecosystem/grpc-gateway/utilities"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/status"
)
// Suppress "imported and not used" errors
var _ codes.Code
var _ io.Reader
var _ status.Status
var _ = runtime.String
var _ = utilities.NewDoubleArray
var _ = descriptor.ForMessage
func request_Service_GetNodeInfo_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetNodeInfoRequest
var metadata runtime.ServerMetadata
msg, err := client.GetNodeInfo(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Service_GetNodeInfo_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetNodeInfoRequest
var metadata runtime.ServerMetadata
msg, err := server.GetNodeInfo(ctx, &protoReq)
return msg, metadata, err
}
func request_Service_GetSyncing_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetSyncingRequest
var metadata runtime.ServerMetadata
msg, err := client.GetSyncing(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Service_GetSyncing_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetSyncingRequest
var metadata runtime.ServerMetadata
msg, err := server.GetSyncing(ctx, &protoReq)
return msg, metadata, err
}
func request_Service_GetLatestBlock_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetLatestBlockRequest
var metadata runtime.ServerMetadata
msg, err := client.GetLatestBlock(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Service_GetLatestBlock_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetLatestBlockRequest
var metadata runtime.ServerMetadata
msg, err := server.GetLatestBlock(ctx, &protoReq)
return msg, metadata, err
}
func request_Service_GetBlockByHeight_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetBlockByHeightRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["height"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "height")
}
protoReq.Height, err = runtime.Int64(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "height", err)
}
msg, err := client.GetBlockByHeight(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Service_GetBlockByHeight_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetBlockByHeightRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["height"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "height")
}
protoReq.Height, err = runtime.Int64(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "height", err)
}
msg, err := server.GetBlockByHeight(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_Service_GetLatestValidatorSet_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_Service_GetLatestValidatorSet_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetLatestValidatorSetRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Service_GetLatestValidatorSet_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.GetLatestValidatorSet(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Service_GetLatestValidatorSet_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetLatestValidatorSetRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Service_GetLatestValidatorSet_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.GetLatestValidatorSet(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_Service_GetValidatorSetByHeight_0 = &utilities.DoubleArray{Encoding: map[string]int{"height": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}}
)
func request_Service_GetValidatorSetByHeight_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetValidatorSetByHeightRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["height"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "height")
}
protoReq.Height, err = runtime.Int64(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "height", err)
}
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Service_GetValidatorSetByHeight_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.GetValidatorSetByHeight(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Service_GetValidatorSetByHeight_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetValidatorSetByHeightRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["height"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "height")
}
protoReq.Height, err = runtime.Int64(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "height", err)
}
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Service_GetValidatorSetByHeight_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.GetValidatorSetByHeight(ctx, &protoReq)
return msg, metadata, err
}
// RegisterServiceHandlerServer registers the http handlers for service Service to "mux".
// UnaryRPC :call ServiceServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
// Note that using this registration option will cause many gRPC library features (such as grpc.SendHeader, etc) to stop working. Consider using RegisterServiceHandlerFromEndpoint instead.
func RegisterServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ServiceServer) error {
mux.Handle("GET", pattern_Service_GetNodeInfo_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Service_GetNodeInfo_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetNodeInfo_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Service_GetSyncing_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Service_GetSyncing_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetSyncing_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Service_GetLatestBlock_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Service_GetLatestBlock_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetLatestBlock_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Service_GetBlockByHeight_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Service_GetBlockByHeight_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetBlockByHeight_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Service_GetLatestValidatorSet_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Service_GetLatestValidatorSet_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetLatestValidatorSet_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Service_GetValidatorSetByHeight_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Service_GetValidatorSetByHeight_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetValidatorSetByHeight_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
// RegisterServiceHandlerFromEndpoint is same as RegisterServiceHandler but
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
func RegisterServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
conn, err := grpc.Dial(endpoint, opts...)
if err != nil {
return err
}
defer func() {
if err != nil {
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
return
}
go func() {
<-ctx.Done()
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
}()
}()
return RegisterServiceHandler(ctx, mux, conn)
}
// RegisterServiceHandler registers the http handlers for service Service to "mux".
// The handlers forward requests to the grpc endpoint over "conn".
func RegisterServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
return RegisterServiceHandlerClient(ctx, mux, NewServiceClient(conn))
}
// RegisterServiceHandlerClient registers the http handlers for service Service
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "ServiceClient".
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ServiceClient"
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
// "ServiceClient" to call the correct interceptors.
func RegisterServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ServiceClient) error {
mux.Handle("GET", pattern_Service_GetNodeInfo_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Service_GetNodeInfo_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetNodeInfo_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Service_GetSyncing_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Service_GetSyncing_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetSyncing_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Service_GetLatestBlock_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Service_GetLatestBlock_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetLatestBlock_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Service_GetBlockByHeight_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Service_GetBlockByHeight_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetBlockByHeight_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Service_GetLatestValidatorSet_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Service_GetLatestValidatorSet_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetLatestValidatorSet_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Service_GetValidatorSetByHeight_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Service_GetValidatorSetByHeight_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Service_GetValidatorSetByHeight_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
var (
pattern_Service_GetNodeInfo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4}, []string{"cosmos", "base", "tendermint", "v1beta1", "node_info"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Service_GetSyncing_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4}, []string{"cosmos", "base", "tendermint", "v1beta1", "syncing"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Service_GetLatestBlock_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "blocks", "latest"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Service_GetBlockByHeight_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "blocks", "height"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Service_GetLatestValidatorSet_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "validatorsets", "latest"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Service_GetValidatorSetByHeight_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "validatorsets", "height"}, "", runtime.AssumeColonVerbOpt(true)))
)
var (
forward_Service_GetNodeInfo_0 = runtime.ForwardResponseMessage
forward_Service_GetSyncing_0 = runtime.ForwardResponseMessage
forward_Service_GetLatestBlock_0 = runtime.ForwardResponseMessage
forward_Service_GetBlockByHeight_0 = runtime.ForwardResponseMessage
forward_Service_GetLatestValidatorSet_0 = runtime.ForwardResponseMessage
forward_Service_GetValidatorSetByHeight_0 = runtime.ForwardResponseMessage
)

View File

@ -0,0 +1,227 @@
package tmservice
import (
"context"
gogogrpc "github.com/gogo/protobuf/grpc"
"github.com/grpc-ecosystem/grpc-gateway/runtime"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/rpc"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
qtypes "github.com/cosmos/cosmos-sdk/types/query"
"github.com/cosmos/cosmos-sdk/version"
)
// This is the struct that we will implement all the handlers on.
type queryServer struct {
clientCtx client.Context
interfaceRegistry codectypes.InterfaceRegistry
}
var _ ServiceServer = queryServer{}
var _ codectypes.UnpackInterfacesMessage = &GetLatestValidatorSetResponse{}
// NewQueryServer creates a new tendermint query server.
func NewQueryServer(clientCtx client.Context, interfaceRegistry codectypes.InterfaceRegistry) ServiceServer {
return queryServer{
clientCtx: clientCtx,
interfaceRegistry: interfaceRegistry,
}
}
// GetSyncing implements ServiceServer.GetSyncing
func (s queryServer) GetSyncing(ctx context.Context, _ *GetSyncingRequest) (*GetSyncingResponse, error) {
status, err := getNodeStatus(ctx, s.clientCtx)
if err != nil {
return nil, err
}
return &GetSyncingResponse{
Syncing: status.SyncInfo.CatchingUp,
}, nil
}
// GetLatestBlock implements ServiceServer.GetLatestBlock
func (s queryServer) GetLatestBlock(ctx context.Context, _ *GetLatestBlockRequest) (*GetLatestBlockResponse, error) {
status, err := getBlock(ctx, s.clientCtx, nil)
if err != nil {
return nil, err
}
protoBlockID := status.BlockID.ToProto()
protoBlock, err := status.Block.ToProto()
if err != nil {
return nil, err
}
return &GetLatestBlockResponse{
BlockId: &protoBlockID,
Block: protoBlock,
}, nil
}
// GetBlockByHeight implements ServiceServer.GetBlockByHeight
func (s queryServer) GetBlockByHeight(ctx context.Context, req *GetBlockByHeightRequest) (*GetBlockByHeightResponse, error) {
chainHeight, err := rpc.GetChainHeight(s.clientCtx)
if err != nil {
return nil, err
}
if req.Height > chainHeight {
return nil, status.Error(codes.InvalidArgument, "requested block height is bigger then the chain length")
}
res, err := getBlock(ctx, s.clientCtx, &req.Height)
if err != nil {
return nil, err
}
protoBlockID := res.BlockID.ToProto()
protoBlock, err := res.Block.ToProto()
if err != nil {
return nil, err
}
return &GetBlockByHeightResponse{
BlockId: &protoBlockID,
Block: protoBlock,
}, nil
}
// GetLatestValidatorSet implements ServiceServer.GetLatestValidatorSet
func (s queryServer) GetLatestValidatorSet(ctx context.Context, req *GetLatestValidatorSetRequest) (*GetLatestValidatorSetResponse, error) {
page, limit, err := qtypes.ParsePagination(req.Pagination)
if err != nil {
return nil, err
}
validatorsRes, err := rpc.GetValidators(ctx, s.clientCtx, nil, &page, &limit)
if err != nil {
return nil, err
}
outputValidatorsRes := &GetLatestValidatorSetResponse{
BlockHeight: validatorsRes.BlockHeight,
Validators: make([]*Validator, len(validatorsRes.Validators)),
}
for i, validator := range validatorsRes.Validators {
anyPub, err := codectypes.NewAnyWithValue(validator.PubKey)
if err != nil {
return nil, err
}
outputValidatorsRes.Validators[i] = &Validator{
Address: validator.Address.String(),
ProposerPriority: validator.ProposerPriority,
PubKey: anyPub,
VotingPower: validator.VotingPower,
}
}
return outputValidatorsRes, nil
}
func (m *GetLatestValidatorSetResponse) UnpackInterfaces(unpacker codectypes.AnyUnpacker) error {
var pubKey cryptotypes.PubKey
for _, val := range m.Validators {
err := unpacker.UnpackAny(val.PubKey, &pubKey)
if err != nil {
return err
}
}
return nil
}
// GetValidatorSetByHeight implements ServiceServer.GetValidatorSetByHeight
func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *GetValidatorSetByHeightRequest) (*GetValidatorSetByHeightResponse, error) {
page, limit, err := qtypes.ParsePagination(req.Pagination)
if err != nil {
return nil, err
}
chainHeight, err := rpc.GetChainHeight(s.clientCtx)
if err != nil {
return nil, status.Error(codes.Internal, "failed to parse chain height")
}
if req.Height > chainHeight {
return nil, status.Error(codes.InvalidArgument, "requested block height is bigger then the chain length")
}
validatorsRes, err := rpc.GetValidators(ctx, s.clientCtx, &req.Height, &page, &limit)
if err != nil {
return nil, err
}
outputValidatorsRes := &GetValidatorSetByHeightResponse{
BlockHeight: validatorsRes.BlockHeight,
Validators: make([]*Validator, len(validatorsRes.Validators)),
}
for i, validator := range validatorsRes.Validators {
anyPub, err := codectypes.NewAnyWithValue(validator.PubKey)
if err != nil {
return nil, err
}
outputValidatorsRes.Validators[i] = &Validator{
Address: validator.Address.String(),
ProposerPriority: validator.ProposerPriority,
PubKey: anyPub,
VotingPower: validator.VotingPower,
}
}
return outputValidatorsRes, nil
}
// GetNodeInfo implements ServiceServer.GetNodeInfo
func (s queryServer) GetNodeInfo(ctx context.Context, req *GetNodeInfoRequest) (*GetNodeInfoResponse, error) {
status, err := getNodeStatus(ctx, s.clientCtx)
if err != nil {
return nil, err
}
protoNodeInfo := status.NodeInfo.ToProto()
nodeInfo := version.NewInfo()
deps := make([]*Module, len(nodeInfo.BuildDeps))
for i, dep := range nodeInfo.BuildDeps {
deps[i] = &Module{
Path: dep.Path,
Sum: dep.Sum,
Version: dep.Version,
}
}
resp := GetNodeInfoResponse{
DefaultNodeInfo: protoNodeInfo,
ApplicationVersion: &VersionInfo{
AppName: nodeInfo.AppName,
Name: nodeInfo.Name,
GitCommit: nodeInfo.GitCommit,
GoVersion: nodeInfo.GoVersion,
Version: nodeInfo.Version,
BuildTags: nodeInfo.BuildTags,
BuildDeps: deps,
},
}
return &resp, nil
}
// RegisterTendermintService registers the tendermint queries on the gRPC router.
func RegisterTendermintService(
qrt gogogrpc.Server,
clientCtx client.Context,
interfaceRegistry codectypes.InterfaceRegistry,
) {
RegisterServiceServer(
qrt,
NewQueryServer(clientCtx, interfaceRegistry),
)
}
// RegisterGRPCGatewayRoutes mounts the tendermint service's GRPC-gateway routes on the
// given Mux.
func RegisterGRPCGatewayRoutes(clientConn gogogrpc.ClientConn, mux *runtime.ServeMux) {
RegisterServiceHandlerClient(context.Background(), mux, NewServiceClient(clientConn))
}

View File

@ -0,0 +1,164 @@
package tmservice_test
import (
"context"
"fmt"
"testing"
"github.com/stretchr/testify/suite"
"github.com/cosmos/cosmos-sdk/client/grpc/tmservice"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
"github.com/cosmos/cosmos-sdk/testutil/network"
qtypes "github.com/cosmos/cosmos-sdk/types/query"
"github.com/cosmos/cosmos-sdk/types/rest"
"github.com/cosmos/cosmos-sdk/version"
)
type IntegrationTestSuite struct {
suite.Suite
cfg network.Config
network *network.Network
queryClient tmservice.ServiceClient
}
func (s *IntegrationTestSuite) SetupSuite() {
s.T().Log("setting up integration test suite")
cfg := network.DefaultConfig()
cfg.NumValidators = 1
s.cfg = cfg
s.network = network.New(s.T(), cfg)
s.Require().NotNil(s.network)
_, err := s.network.WaitForHeight(1)
s.Require().NoError(err)
s.queryClient = tmservice.NewServiceClient(s.network.Validators[0].ClientCtx)
}
func (s *IntegrationTestSuite) TearDownSuite() {
s.T().Log("tearing down integration test suite")
s.network.Cleanup()
}
func (s IntegrationTestSuite) TestQueryNodeInfo() {
val := s.network.Validators[0]
res, err := s.queryClient.GetNodeInfo(context.Background(), &tmservice.GetNodeInfoRequest{})
s.Require().NoError(err)
s.Require().Equal(res.ApplicationVersion.AppName, version.NewInfo().AppName)
restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/node_info", val.APIAddress))
s.Require().NoError(err)
var getInfoRes tmservice.GetNodeInfoResponse
s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &getInfoRes))
s.Require().Equal(getInfoRes.ApplicationVersion.AppName, version.NewInfo().AppName)
}
func (s IntegrationTestSuite) TestQuerySyncing() {
val := s.network.Validators[0]
_, err := s.queryClient.GetSyncing(context.Background(), &tmservice.GetSyncingRequest{})
s.Require().NoError(err)
restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/syncing", val.APIAddress))
s.Require().NoError(err)
var syncingRes tmservice.GetSyncingResponse
s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &syncingRes))
}
func (s IntegrationTestSuite) TestQueryLatestBlock() {
val := s.network.Validators[0]
_, err := s.queryClient.GetLatestBlock(context.Background(), &tmservice.GetLatestBlockRequest{})
s.Require().NoError(err)
restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/blocks/latest", val.APIAddress))
s.Require().NoError(err)
var blockInfoRes tmservice.GetLatestBlockResponse
s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &blockInfoRes))
}
func (s IntegrationTestSuite) TestQueryBlockByHeight() {
val := s.network.Validators[0]
_, err := s.queryClient.GetBlockByHeight(context.Background(), &tmservice.GetBlockByHeightRequest{Height: 1})
s.Require().NoError(err)
restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/blocks/%d", val.APIAddress, 1))
s.Require().NoError(err)
var blockInfoRes tmservice.GetBlockByHeightResponse
s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &blockInfoRes))
}
func (s IntegrationTestSuite) TestQueryLatestValidatorSet() {
val := s.network.Validators[0]
// nil pagination
res, err := s.queryClient.GetLatestValidatorSet(context.Background(), &tmservice.GetLatestValidatorSetRequest{
Pagination: nil,
})
s.Require().NoError(err)
s.Require().Equal(1, len(res.Validators))
content, ok := res.Validators[0].PubKey.GetCachedValue().(cryptotypes.PubKey)
s.Require().Equal(true, ok)
s.Require().Equal(content, val.PubKey)
//with pagination
_, err = s.queryClient.GetLatestValidatorSet(context.Background(), &tmservice.GetLatestValidatorSetRequest{Pagination: &qtypes.PageRequest{
Offset: 0,
Limit: 10,
}})
s.Require().NoError(err)
// rest request without pagination
_, err = rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/latest", val.APIAddress))
s.Require().NoError(err)
// rest request with pagination
restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/latest?pagination.offset=%d&pagination.limit=%d", val.APIAddress, 0, 1))
s.Require().NoError(err)
var validatorSetRes tmservice.GetLatestValidatorSetResponse
s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &validatorSetRes))
s.Require().Equal(1, len(validatorSetRes.Validators))
anyPub, err := codectypes.NewAnyWithValue(val.PubKey)
s.Require().NoError(err)
s.Require().Equal(validatorSetRes.Validators[0].PubKey, anyPub)
}
func (s IntegrationTestSuite) TestQueryValidatorSetByHeight() {
val := s.network.Validators[0]
// nil pagination
_, err := s.queryClient.GetValidatorSetByHeight(context.Background(), &tmservice.GetValidatorSetByHeightRequest{
Height: 1,
Pagination: nil,
})
s.Require().NoError(err)
_, err = s.queryClient.GetValidatorSetByHeight(context.Background(), &tmservice.GetValidatorSetByHeightRequest{
Height: 1,
Pagination: &qtypes.PageRequest{
Offset: 0,
Limit: 10,
}})
s.Require().NoError(err)
// no pagination rest
_, err = rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/%d", val.APIAddress, 1))
s.Require().NoError(err)
// rest query with pagination
restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/%d?pagination.offset=%d&pagination.limit=%d", val.APIAddress, 1, 0, 1))
var validatorSetRes tmservice.GetValidatorSetByHeightResponse
s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &validatorSetRes))
}
func TestIntegrationTestSuite(t *testing.T) {
suite.Run(t, new(IntegrationTestSuite))
}

View File

@ -0,0 +1,17 @@
package tmservice
import (
"context"
ctypes "github.com/tendermint/tendermint/rpc/core/types"
"github.com/cosmos/cosmos-sdk/client"
)
func getNodeStatus(ctx context.Context, clientCtx client.Context) (*ctypes.ResultStatus, error) {
node, err := clientCtx.GetNode()
if err != nil {
return &ctypes.ResultStatus{}, err
}
return node.Status(ctx)
}

View File

@ -3,6 +3,7 @@ package client
import (
gocontext "context"
"fmt"
"reflect"
"strconv"
gogogrpc "github.com/gogo/protobuf/grpc"
@ -12,9 +13,10 @@ import (
"google.golang.org/grpc/encoding/proto"
"google.golang.org/grpc/metadata"
grpctypes "github.com/cosmos/cosmos-sdk/types/grpc"
"github.com/cosmos/cosmos-sdk/codec/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
grpctypes "github.com/cosmos/cosmos-sdk/types/grpc"
"github.com/cosmos/cosmos-sdk/types/tx"
)
var _ gogogrpc.ClientConn = Context{}
@ -22,7 +24,37 @@ var _ gogogrpc.ClientConn = Context{}
var protoCodec = encoding.GetCodec(proto.Name)
// Invoke implements the grpc ClientConn.Invoke method
func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply interface{}, opts ...grpc.CallOption) error {
func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply interface{}, opts ...grpc.CallOption) (err error) {
// Two things can happen here:
// 1. either we're broadcasting a Tx, in which call we call Tendermint's broadcast endpoint directly,
// 2. or we are querying for state, in which case we call ABCI's Query.
// In both cases, we don't allow empty request args (it will panic unexpectedly).
if reflect.ValueOf(args).IsNil() {
return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "request cannot be nil")
}
// Case 1. Broadcasting a Tx.
if isBroadcast(method) {
req, ok := args.(*tx.BroadcastTxRequest)
if !ok {
return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "expected %T, got %T", (*tx.BroadcastTxRequest)(nil), args)
}
res, ok := reply.(*tx.BroadcastTxResponse)
if !ok {
return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "expected %T, got %T", (*tx.BroadcastTxResponse)(nil), args)
}
broadcastRes, err := TxServiceBroadcast(grpcCtx, ctx, req)
if err != nil {
return err
}
*res = *broadcastRes
return err
}
// Case 2. Querying state.
reqBz, err := protoCodec.Marshal(args)
if err != nil {
return err
@ -35,6 +67,11 @@ func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply
if err != nil {
return err
}
if height < 0 {
return sdkerrors.Wrapf(
sdkerrors.ErrInvalidRequest,
"client.Context.Invoke: height (%d) from %q must be >= 0", height, grpctypes.GRPCBlockHeightHeader)
}
ctx = ctx.WithHeight(height)
}
@ -80,3 +117,7 @@ func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply
func (Context) NewStream(gocontext.Context, *grpc.StreamDesc, string, ...grpc.CallOption) (grpc.ClientStream, error) {
return nil, fmt.Errorf("streaming rpc not supported")
}
func isBroadcast(method string) bool {
return method == "/cosmos.tx.v1beta1.Service/BroadcastTx"
}

View File

@ -9,7 +9,6 @@ import (
bip39 "github.com/cosmos/go-bip39"
"github.com/spf13/cobra"
"github.com/tendermint/tendermint/crypto"
"github.com/tendermint/tendermint/libs/cli"
"github.com/cosmos/cosmos-sdk/client"
@ -18,6 +17,7 @@ import (
"github.com/cosmos/cosmos-sdk/crypto/hd"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
"github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
sdk "github.com/cosmos/cosmos-sdk/types"
)
@ -85,12 +85,12 @@ the flag --nosort is set.
func runAddCmd(cmd *cobra.Command, args []string) error {
buf := bufio.NewReader(cmd.InOrStdin())
clientCtx := client.GetClientContextFromCmd(cmd)
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
var (
kr keyring.Keyring
err error
)
var kr keyring.Keyring
dryRun, _ := cmd.Flags().GetBool(flags.FlagDryRun)
if dryRun {
@ -152,7 +152,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
multisigKeys, _ := cmd.Flags().GetStringSlice(flagMultisig)
if len(multisigKeys) != 0 {
var pks []crypto.PubKey
var pks []cryptotypes.PubKey
multisigThreshold, _ := cmd.Flags().GetInt(flagMultiSigThreshold)
if err := validateMultisigThreshold(multisigThreshold, len(multisigKeys)); err != nil {
@ -247,7 +247,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
}
if len(mnemonic) == 0 {
// read entropy seed straight from crypto.Rand and convert to mnemonic
// read entropy seed straight from tmcrypto.Rand and convert to mnemonic
entropySeed, err := bip39.NewEntropy(mnemonicEntropySize)
if err != nil {
return err

View File

@ -29,7 +29,10 @@ private keys stored in a ledger device cannot be deleted with the CLI.
Args: cobra.MinimumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
buf := bufio.NewReader(cmd.InOrStdin())
clientCtx := client.GetClientContextFromCmd(cmd)
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
for _, name := range args {
info, err := clientCtx.Keyring.Key(name)

View File

@ -2,23 +2,48 @@ package keys
import (
"bufio"
"fmt"
"github.com/spf13/cobra"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/input"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
)
const (
flagUnarmoredHex = "unarmored-hex"
flagUnsafe = "unsafe"
)
// ExportKeyCommand exports private keys from the key store.
func ExportKeyCommand() *cobra.Command {
return &cobra.Command{
cmd := &cobra.Command{
Use: "export <name>",
Short: "Export private keys",
Long: `Export a private key from the local keybase in ASCII-armored encrypted format.`,
Args: cobra.ExactArgs(1),
Long: `Export a private key from the local keyring in ASCII-armored encrypted format.
When both the --unarmored-hex and --unsafe flags are selected, cryptographic
private key material is exported in an INSECURE fashion that is designed to
allow users to import their keys in hot wallets. This feature is for advanced
users only that are confident about how to handle private keys work and are
FULLY AWARE OF THE RISKS. If you are unsure, you may want to do some research
and export your keys in ASCII-armored encrypted format.`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
buf := bufio.NewReader(cmd.InOrStdin())
clientCtx := client.GetClientContextFromCmd(cmd)
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
unarmored, _ := cmd.Flags().GetBool(flagUnarmoredHex)
unsafe, _ := cmd.Flags().GetBool(flagUnsafe)
if unarmored && unsafe {
return exportUnsafeUnarmored(cmd, args[0], buf, clientCtx.Keyring)
} else if unarmored || unsafe {
return fmt.Errorf("the flags %s and %s must be used together", flagUnsafe, flagUnarmoredHex)
}
encryptPassword, err := input.GetPassword("Enter passphrase to encrypt the exported key:", buf)
if err != nil {
@ -31,7 +56,31 @@ func ExportKeyCommand() *cobra.Command {
}
cmd.Println(armored)
return nil
},
}
cmd.Flags().Bool(flagUnarmoredHex, false, "Export unarmored hex privkey. Requires --unsafe.")
cmd.Flags().Bool(flagUnsafe, false, "Enable unsafe operations. This flag must be switched on along with all unsafe operation-specific options.")
return cmd
}
func exportUnsafeUnarmored(cmd *cobra.Command, uid string, buf *bufio.Reader, kr keyring.Keyring) error {
// confirm deletion, unless -y is passed
if yes, err := input.GetConfirmation("WARNING: The private key will be exported as an unarmored hexadecimal string. USE AT YOUR OWN RISK. Continue?", buf, cmd.ErrOrStderr()); err != nil {
return err
} else if !yes {
return nil
}
hexPrivKey, err := keyring.NewUnsafe(kr).UnsafeExportPrivKeyHex(uid)
if err != nil {
return err
}
cmd.Println(hexPrivKey)
return nil
}

View File

@ -36,15 +36,34 @@ func Test_runExportCmd(t *testing.T) {
require.NoError(t, err)
// Now enter password
mockIn.Reset("123456789\n123456789\n")
cmd.SetArgs([]string{
args := []string{
"keyname1",
fmt.Sprintf("--%s=%s", flags.FlagHome, kbHome),
fmt.Sprintf("--%s=%s", flags.FlagKeyringBackend, keyring.BackendTest),
})
}
mockIn.Reset("123456789\n123456789\n")
cmd.SetArgs(args)
clientCtx := client.Context{}.WithKeyring(kb)
ctx := context.WithValue(context.Background(), client.ClientContextKey, &clientCtx)
require.NoError(t, cmd.ExecuteContext(ctx))
argsUnsafeOnly := append(args, "--unsafe")
cmd.SetArgs(argsUnsafeOnly)
require.Error(t, cmd.ExecuteContext(ctx))
argsUnarmoredHexOnly := append(args, "--unarmored-hex")
cmd.SetArgs(argsUnarmoredHexOnly)
require.Error(t, cmd.ExecuteContext(ctx))
argsUnsafeUnarmoredHex := append(args, "--unsafe", "--unarmored-hex")
cmd.SetArgs(argsUnsafeUnarmoredHex)
require.Error(t, cmd.ExecuteContext(ctx))
mockIn, mockOut := testutil.ApplyMockIO(cmd)
mockIn.Reset("y\n")
require.NoError(t, cmd.ExecuteContext(ctx))
require.Equal(t, "2485e33678db4175dc0ecef2d6e1fc493d4a0d7f7ce83324b6ed70afe77f3485\n", mockOut.String())
}

View File

@ -19,7 +19,10 @@ func ImportKeyCommand() *cobra.Command {
Args: cobra.ExactArgs(2),
RunE: func(cmd *cobra.Command, args []string) error {
buf := bufio.NewReader(cmd.InOrStdin())
clientCtx := client.GetClientContextFromCmd(cmd)
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
bz, err := ioutil.ReadFile(args[1])
if err != nil {

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMzowNjozOC45MTAzNTggLTA0MDAgRURUIG09KzAuMDUwMTczMjM4IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiT25IN1lscERZSHZUVFFQcCJ9.27X3naS_OL75csQLEIFoPvvCyYb9R4D573z1Z1obm3TRGn4HyPFN_w.GXNcqKAUkxqM537Q.cT169l1KGKeOra6NXHbx3kEOEDw77Lom-42mwKV0bRQ_5WZU3kG5o6Ix14r7LFL1ajjc8rdXkuiUgKQyVXEXVpo-6WkEfk2-D_CQaaUgq0-UErT-9Pj7djI3FZkPPG-yxlVSiQXB1xMk38I_AxYwAakctpwHlEK_YC0-UycFmk25Qjezar_ni69KDRPyuqCYh3dyhimG6LgdpWF4pQHjtZPy5qIqcaE7TR0OeKvf9MtsaKEzpAQOeAvh.WbbZ_Fs8qk9rsN6FuWa2zg

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMzA4ODggLTA0MDAgRURUIG09KzAuMTYxMTg1Nzk2IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiU3ZNM1hfZU42VHhpSVg3aiJ9.BjL9xqItRueA3u4ujcWPTp4TJjO6w4NeR9G7p32ndo63ADDGJ7j1JQ.8Fd_XM52yxKhF31U.7Cm3SBAmp0u4QffFwBgeueuU3rWT1npSKI5CUROX5COgKbDpqj5CaT54k6UGeZiUxv8itQXglUpAsg7XsF-1LjbbUAfVxXe9H9n1GcfxrLov0L8_Ia-5JadXMXkbvv9jKyjhVg6kSziQXoHcHaeauF1X0_ij3a-UVH87cLqsdAI_OXtptyU8GonVyt_Q0n8mljonjZhj2c_bmXmHARYXZOmCj52dmzSpmkyQ9vqdhlRPco93-JWR5P1V.5J7fb71-1WKJ91g02D0JGg

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MzoyMy4yNjI0NzEgLTA0MDAgRURUIG09KzAuMDc2MzQ4MjA5IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiTE02SnY0RUdZNHBPLTQxWSJ9.wsqaCPHz_PlOH4_B3QlKT_4N9nTEjMzqn_Rqjq2ZM3vzf1dTO1_gjA.WVEaKSvNNWJ3ZaTT.eWrtCGCCplDULPw1QEyijVO_totUT5-6yx-TK4KP_BdKmhdEG8Bm319dXU33BchHthFa2VxDyB4NH_hsUenErJSKIJgJGoVc_AMwqrVZr0Wg0qJaay7jRGh1IRNXc0cuEsNpEek1C31tNaXjD2IuJzkicwdDT3BARFLFFdRhY97LG83YTvX0gVKyJFfjx8TAgUHZgpYyJMI4_vVajnneI-v1SYCY_VMbFTaCqWKFZdYOhu3x-hXfFBww.rxnMJbBz5OU4itr8nuyZgA

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yNDQwMTEgLTA0MDAgRURUIG09KzAuMTc0MzMxMjAxIiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiYmlQLXR5Mnlvc3U2aHBvLSJ9.3drCmgYTeqS3PohaYKQc7i1fyjtOMuEPu_pDqMpT0UStPNDxG_LUDg.VS6Au9HoIruV0RiE._2BmFif-VbT_x4OD1NfsOCVFdL2MZfsG645SkptEKZAncOwHkKmWnBlKiV_LwnNzRBh-9eGGsCGfyou3zjUQRMDDHJOuW2EaVNmufmBWcAb9UoNO8O5kzPHwIvNqqJo5TQyjOviKCoP2PVcJXAwzttqDOw71B-9OuPwt_Ed4G6u8evwGIe08CzV6CKVImzj-AQg-1UI-uL06yFIEJ6CzB1DMdPR0qDQddP8pSYR_RTHnEUsii7HeKK1O.jqlYm4IZhXqe1k5kBQtguA

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMDQ1NjYgLTA0MDAgRURUIG09KzAuMTM0ODY0MzE4IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoidjVpMzRMY3NNbXduakEyMiJ9.XTokiwtSrKOIGREG7P7uaSfcV3hEr2ANHVUwaKbvLbuQVlTQO8fALw.bldSMLqfirE4GM9S.kNlvEojt1cavNW-nCaxX-Qk3tNm09xtXbuKppWbmMBUCf-_p-U_TWsnHuKbLon47RH1lxomrc1RpcfXwWhDEsGLwibtsjRdxz_2DGh124jeKOr4-Bl2raoPWdHKimm_cf5Ve17ChFfVy1AOaXwIr97ZdGWSU0FP8hOvv5_z5iUsuMK9T0DLxjz0162-_xSQMWWl4-hLknHz-QdO3oR_FpYo2K2eucNaFKmcN5Rn4s2n8FYLU9dIcopUF.WpNuRheBDoTiv3rK95yNjA

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMjAyOSAtMDQwMCBFRFQgbT0rMC4xNTA1ODc4MjYiLCJlbmMiOiJBMjU2R0NNIiwicDJjIjo4MTkyLCJwMnMiOiJJYXRUWEFMVjBfeXV2ZnNMIn0.kCXD6XXDHeBiXR-GqF10fbMWBvy3qe38r16b92Xu3oLpA5c0a6ByMg.ONW9ggBJFhdfIA8M.IWm_ioQqOCLSK3FbSwjAlEVtzRR4AAW7ceIXpKzv_voaCGDNgcr7xSyRR5N-YK-sVYInwUDrme8rb5T14mjcsNgoGdKKB2QXuApY-GcPwpe2Tf7TyiCxFp91VotHnrbjCh1NvWnjDC-SZNm8HDVolkYtiBPkIkk0uFGh35WWprkVpgEYFyNIFQ0PP3XD4D9A58X0UXdGEu5Q8VcJnt1p86XUyI1le_LufJUrWAz3o_89n3xKj-b6sYzQ.KZSIrdNzE97BxrTSNkMkTw

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMzowNjozOC45MDMwMjcgLTA0MDAgRURUIG09KzAuMDQyODQyNTY4IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiSjBxN2Zza0pGRUJJR25mYSJ9.W91-I2lpaBfacsUO2Xn2_tCadqztjGX7MjAkA6GKL4uMkqjEHDXyhw.c8uKD9z5w-jpSmq1.XGnt9JaOg0VT1cjg4RAlwC6Bsq9KowSF6wM6Ak1Y16Kq4sV3NnwA4CqJKnluIjAG6D4sfBKEs2FCHy5zux4uaOQ3Y5EJjRxWoTdBP7HahmO2-jsSFX_sPIzr86KIlKIqaYFJAOUqvaObOsQkX3EL_2-vDonSRMz32abg8thFS6mNi7NtM4xGXQ5Knrix-6OgzBmvWbn4Y0v82vNNWh8d4ubKf_RSEBV7CIWfuFg2CxfRq5EbUUmtMINF74eG52F8y8zjTDcn6n3qKLcecdr6s0n1tc7iq-f3s1EHnzPefwROPLFxiq0Zyt7N7vZCSowOElYZtgQWEg0dy6CIyZ274gNPlfLXMHA-kUsZj4Q_3w.sUPc7D8bBR4I3S-njXa4Ww

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MzoyMy4yNTg0OTUgLTA0MDAgRURUIG09KzAuMDcyMzcyNTM1IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiTW1XOFlVU0Nia2JUNW9iSSJ9.JyBrlPAvgtgYWwu0rcfTn6k9qvv6DywUotcWxPUiJncCBue2WPC5cQ.CmWeu5wMFFinUfiE.FA9k3Q_W8mBgSuJRkYV8h_U5YR2mDmW595L4DnFzuSFJ19Us0O1SQF9-xPJQAyjh4jli46o5mfFfsmU0ce1h4HwklW7AdrRJXVXZ0reZLjrdiojCbLvzyM9tsWInRXi6izUcwLggv2lNCXP5UIRpjMpUPiEC4GsHiwNH8qN04_feICxHuSWJ4mKLWEDtgKxHTrBqvaHT304UF6gRD-_W9_hWdEIj66-5HE4jlxcJAe22WdoF2Z1c3ujhm4piSfHaNnWYsZHLI5Jy1WhkFC2eULOe31c6eAeik5DyUUdWKvAoSiEk4H0Z9EcSbNzlW2rrU30WIIb-icK1qLID21WYurbxM8zvXl-CvhSM2VRN1g.tu_usvTlCOy3okBKmC6zHg

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4xOTkwMDUgLTA0MDAgRURUIG09KzAuMTI5MzA0MDMxIiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiYzVYM09VdTBtQmJ4TFd4ayJ9.Pa4p1u-9N9x4E9-5rjUlReLsfH3TvTfAw-Dr1iV8z5ccAfnqLY1UWQ.Q41_cYh4c-C2zi3v.aFna4CwpZeGQBI2_ecOzlJSKypCV0NLCD8PCOnpYvY-k-HqoUFSeouFbuKeN9VaIo12JSZmjzGhfCAupZDBcSJisLVHOvaBAjl5XCOa8k49jb-aSopMI4HXQWatBJcnM65p9Hl1JrYOcnoKPxNKzJ4PiPQnHKv_VgAvWU_CBt6nnSjkwwVJjPMobgvNzeQTEFq-4pyziJNDbDWKUrQSrc-VaO-31JBlhpu6dPOJPFsnOcMyPc76po5cAQQdog-g79d59_Y4vj8s7qd-YPCHnWmoCbgf9w8vbpmJ4Y9evXZQz8A0-c0rFX7F96aZBYtQOeC1ZpRi0BMsbs_WHrpdN678HXej6YpfNDijQmiNYzQ.ayDBvX4W6GiGxAjN3ch8DA

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMTYwNTkgLTA0MDAgRURUIG09KzAuMTQ2MzU3MTI5IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoidmNnOXNfXzFvY0N1ZC1YTCJ9.hQ21Z1cXTw925V8Xxm_Gywb05S92rKEX54tnELv16xZvkgV8XiEj9A.4Wnaa4LemIi3bdIT.kEJyzWUvmLof5_bYbDePbrYyfjyqTLuUIOdnom3igOuwfzFDHtPuEb3rSLKjxj7LgJOmZVqZGP_ihW1sJQPXbK7ZuWA4zH_Wf-n5T8CFDmNIUhlUIb6sfd_ze-s45CE58hjKRkp7b5k61xBnMujZ5KC5Vk_JHUOUyZB5SqhTuEUJDjSSCFnMDJ1UYKEp23U__XFwcZonent4IMfM0fWvmA6NC2h0qLAMcKw9hbJ_yyNHt2I3lI5twthsAOsXKxUkjhx7c9Tc7BnttFxq-puD_QyjReExP77DzuueDJ-5KBd8PMgeiQMHoYM8e2NAAJU7MXe7voB-D8Ki2QcEgH7GfHNcr6vP1by3hvV5M32OXg.ifBDbtRjrXBOdH_jEORHgw

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMjY1OTEgLTA0MDAgRURUIG09KzAuMTU2ODg5MTU0IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiNUtTSWJtdGhCVHVSUnRjYyJ9.qD2C7cQK5P0EHy2Yr-uQZzMHep6U6n57z2LwWTmgxQp6m7ujLw-C-g.rmsltoJfFO4e56RZ.IDpVmduqe2WgyepT_-paXzcosHQzK6sfKY9JH16lT4QRVJ_lAozQOyZrW3X5MbgefrmtXGsoEIEFYhTDYBtXxrW7IqLaBhSCiA5MVwR403H3C2NkcygdGDdR-uDQGW3_bp7xnOhVL_3ofu0-7MQMMhZyz_wEmVW-aG7F6lN68TPaO5KTIqfnI8vOJyyZsSgB0M0gA3f-P4aar64YDTUdjgXPOSBkyRZr07JIOauGhTFXwmHWsDVBvGo3aIIx9ybAg_Blgo8ZAPqOJ6EYmA3J5RE2_LkfJjgI8dEpIFaviBHeWrG54AAN0klQ7trq9MOCpUGPc7PqySwiwTmxb2g4kFH9fR_yQ-g5g6mjj3JYVA.GRnNxd28SYmRt1I9twptPw

View File

@ -1 +0,0 @@
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMzg1MTQgLTA0MDAgRURUIG09KzAuMTY4ODExNzMwIiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiUmZDZC0xeGh6OVJ3RzJfViJ9.hMyIYfHwLYAwJs6THPC30rWfhd1SRUl5po4ifTvln5cV_VHyHLW3MQ.ku5jtKB-G5acpq4v.s0oNPaUaRQbFk-X8AL8QitkI_SdBWB2BpBmRRbo2ZMAkq4x81hSC5p7RlSrM3OGTNFZ4yOrRgzdMv43YpCl7ZpJIypF4l7Hyvl_13jTjqzB7o81dEhl_10SI_Fw607VKCnwqq02_VoqD489EpMVuQ05Fg2pUT3M_mJMacGztORYVJrIWwzbyUiHfM4GlnaoUQaKfwbkHS2W2-1wOPTSWTLEBVJlRG1EAZR_upcPJolcAStjl8PY5EfkxXD56c8Xu6SI8LjMrJAXXg7lTqOGNOkt0v8M8UZWd95Gy2zH_KJm3ItYR_YjPoMIHh-_Cb2-0uoXNRyykW4EpGptp08n7QubSYltzXwaw_NgLP9KUmg.67EgfbLDNyvEYCR12Bjoew

View File

@ -24,7 +24,10 @@ along with their associated name and address.`,
}
func runListCmd(cmd *cobra.Command, _ []string) error {
clientCtx := client.GetClientContextFromCmd(cmd)
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
infos, err := clientCtx.Keyring.List()
if err != nil {

View File

@ -62,7 +62,7 @@ func runMigrateCmd(cmd *cobra.Command, args []string) error {
var (
tmpDir string
migrator keyring.InfoImporter
migrator keyring.Importer
)
if dryRun, _ := cmd.Flags().GetBool(flags.FlagDryRun); dryRun {
@ -73,10 +73,10 @@ func runMigrateCmd(cmd *cobra.Command, args []string) error {
defer os.RemoveAll(tmpDir)
migrator, err = keyring.NewInfoImporter(keyringServiceName, "test", tmpDir, buf)
migrator, err = keyring.New(keyringServiceName, keyring.BackendTest, tmpDir, buf)
} else {
backend, _ := cmd.Flags().GetString(flags.FlagKeyringBackend)
migrator, err = keyring.NewInfoImporter(keyringServiceName, backend, rootDir, buf)
migrator, err = keyring.New(keyringServiceName, backend, rootDir, buf)
}
if err != nil {
@ -86,12 +86,12 @@ func runMigrateCmd(cmd *cobra.Command, args []string) error {
))
}
for _, key := range oldKeys {
legKeyInfo, err := legacyKb.Export(key.GetName())
if err != nil {
return err
}
if len(oldKeys) == 0 {
cmd.Print("Migration Aborted: no keys to migrate")
return nil
}
for _, key := range oldKeys {
keyName := key.GetName()
keyType := key.GetType()
@ -107,7 +107,12 @@ func runMigrateCmd(cmd *cobra.Command, args []string) error {
}
if keyType != keyring.TypeLocal {
if err := migrator.Import(keyName, legKeyInfo); err != nil {
pubkeyArmor, err := legacyKb.ExportPubKey(keyName)
if err != nil {
return err
}
if err := migrator.ImportPubKey(keyName, pubkeyArmor); err != nil {
return err
}
@ -127,10 +132,11 @@ func runMigrateCmd(cmd *cobra.Command, args []string) error {
return err
}
if err := migrator.Import(keyName, armoredPriv); err != nil {
if err := migrator.ImportPrivKey(keyName, armoredPriv, migratePassphrase); err != nil {
return err
}
}
cmd.Print("Migration Complete")
return err
}

View File

@ -1,7 +1,6 @@
package keys
import (
"context"
"encoding/hex"
"errors"
"fmt"
@ -86,7 +85,7 @@ hexadecimal into bech32 cosmos prefixed format and vice versa.
}
func parseKey(cmd *cobra.Command, args []string) error {
config, _ := sdk.GetSealedConfig(context.Background())
config, _ := sdk.GetSealedConfig(cmd.Context())
return doParseKey(cmd, config, args)
}

View File

@ -5,13 +5,13 @@ import (
"fmt"
"github.com/spf13/cobra"
tmcrypto "github.com/tendermint/tendermint/crypto"
"github.com/tendermint/tendermint/libs/cli"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
"github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
"github.com/cosmos/cosmos-sdk/crypto/ledger"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
sdk "github.com/cosmos/cosmos-sdk/types"
)
@ -53,7 +53,10 @@ consisting of all the keys provided by name and multisig threshold.`,
func runShowCmd(cmd *cobra.Command, args []string) (err error) {
var info keyring.Info
clientCtx := client.GetClientContextFromCmd(cmd)
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
if len(args) == 1 {
info, err = fetchKey(clientCtx.Keyring, args[0])
@ -61,7 +64,7 @@ func runShowCmd(cmd *cobra.Command, args []string) (err error) {
return fmt.Errorf("%s is not a valid name or address: %v", args[0], err)
}
} else {
pks := make([]tmcrypto.PubKey, len(args))
pks := make([]cryptotypes.PubKey, len(args))
for i, keyref := range args {
info, err := fetchKey(clientCtx.Keyring, keyref)
if err != nil {

View File

@ -7,14 +7,13 @@ import (
"github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/crypto"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/flags"
"github.com/cosmos/cosmos-sdk/crypto/hd"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
"github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
"github.com/cosmos/cosmos-sdk/testutil"
sdk "github.com/cosmos/cosmos-sdk/types"
)
@ -23,7 +22,7 @@ func Test_multiSigKey_Properties(t *testing.T) {
tmpKey1 := secp256k1.GenPrivKeyFromSecret([]byte("mySecret"))
pk := multisig.NewLegacyAminoPubKey(
1,
[]crypto.PubKey{tmpKey1.PubKey()},
[]cryptotypes.PubKey{tmpKey1.PubKey()},
)
tmp := keyring.NewMultiInfo("myMultisig", pk)

Binary file not shown.

View File

@ -1 +1 @@
MANIFEST-000004
MANIFEST-000005

View File

@ -1 +1 @@
MANIFEST-000000
MANIFEST-000003

View File

@ -1,18 +1,30 @@
=============== Mar 30, 2020 (CEST) ===============
02:07:34.137606 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
02:07:34.144547 db@open opening
02:07:34.144770 version@stat F·[] S·0B[] Sc·[]
02:07:34.145843 db@janitor F·2 G·0
02:07:34.145875 db@open done T·1.315251ms
02:07:34.335635 db@close closing
02:07:34.335736 db@close done T·98.95µs
=============== Mar 30, 2020 (CEST) ===============
02:08:33.239115 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
02:08:33.239264 version@stat F·[] S·0B[] Sc·[]
02:08:33.239281 db@open opening
02:08:33.239310 journal@recovery F·1
02:08:33.239398 journal@recovery recovering @1
02:08:33.322008 memdb@flush created L0@2 N·4 S·391B "cos..ess,v4":"run..nfo,v3"
02:08:33.323091 version@stat F·[1] S·391B[391B] Sc·[0.25]
02:08:33.421979 db@janitor F·3 G·0
02:08:33.422153 db@open done T·182.707962ms
=============== Feb 2, 2021 (IST) ===============
00:03:25.348369 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
00:03:25.350695 db@open opening
00:03:25.350888 version@stat F·[] S·0B[] Sc·[]
00:03:25.351864 db@janitor F·2 G·0
00:03:25.351881 db@open done T·1.169825ms
00:03:25.351895 db@close closing
00:03:25.351929 db@close done T·33.042µs
=============== Feb 2, 2021 (IST) ===============
00:03:34.450638 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
00:03:34.450722 version@stat F·[] S·0B[] Sc·[]
00:03:34.450737 db@open opening
00:03:34.450765 journal@recovery F·1
00:03:34.450851 journal@recovery recovering @1
00:03:34.451173 version@stat F·[] S·0B[] Sc·[]
00:03:34.454278 db@janitor F·2 G·0
00:03:34.454298 db@open done T·3.548046ms
00:03:34.454307 db@close closing
00:03:34.454327 db@close done T·19.017µs
=============== Feb 2, 2021 (IST) ===============
00:03:42.025705 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
00:03:42.025892 version@stat F·[] S·0B[] Sc·[]
00:03:42.025907 db@open opening
00:03:42.025943 journal@recovery F·1
00:03:42.026790 journal@recovery recovering @2
00:03:42.026946 version@stat F·[] S·0B[] Sc·[]
00:03:42.031645 db@janitor F·2 G·0
00:03:42.031661 db@open done T·5.750008ms
00:03:42.283102 db@close closing
00:03:42.283162 db@close done T·58.775µs

Binary file not shown.

Binary file not shown.

View File

@ -57,6 +57,11 @@ func (ctx Context) GetFromAddress() sdk.AccAddress {
return ctx.FromAddress
}
// GetFeeGranterAddress returns the fee granter address from the context
func (ctx Context) GetFeeGranterAddress() sdk.AccAddress {
return ctx.FeeGranter
}
// GetFromName returns the key name for the current context.
func (ctx Context) GetFromName() string {
return ctx.FromName

32
client/rest/rest.go Normal file
View File

@ -0,0 +1,32 @@
package rest
import (
"net/http"
"github.com/gorilla/mux"
)
// DeprecationURL is the URL for migrating deprecated REST endpoints to newer ones.
// TODO Switch to `/` (not `/master`) once v0.40 docs are deployed.
// https://github.com/cosmos/cosmos-sdk/issues/8019
const DeprecationURL = "https://docs.cosmos.network/master/migrations/rest.html"
// addHTTPDeprecationHeaders is a mux middleware function for adding HTTP
// Deprecation headers to a http handler
func addHTTPDeprecationHeaders(h http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Deprecation", "true")
w.Header().Set("Link", "<"+DeprecationURL+">; rel=\"deprecation\"")
w.Header().Set("Warning", "199 - \"this endpoint is deprecated and may not work as before, see deprecation link for more info\"")
h.ServeHTTP(w, r)
})
}
// WithHTTPDeprecationHeaders returns a new *mux.Router, identical to its input
// but with the addition of HTTP Deprecation headers. This is used to mark legacy
// amino REST endpoints as deprecated in the REST API.
func WithHTTPDeprecationHeaders(r *mux.Router) *mux.Router {
subRouter := r.NewRoute().Subrouter()
subRouter.Use(addHTTPDeprecationHeaders)
return subRouter
}

View File

@ -22,8 +22,10 @@ func BlockCommand() *cobra.Command {
Short: "Get verified data for a the block at given height",
Args: cobra.MaximumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx := client.GetClientContextFromCmd(cmd)
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
var height *int64
// optional height

61
client/rpc/rpc_test.go Normal file
View File

@ -0,0 +1,61 @@
package rpc_test
import (
"fmt"
"testing"
"github.com/stretchr/testify/suite"
ctypes "github.com/tendermint/tendermint/rpc/core/types"
"github.com/cosmos/cosmos-sdk/client/rpc"
"github.com/cosmos/cosmos-sdk/codec/legacy"
clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli"
"github.com/cosmos/cosmos-sdk/testutil/network"
"github.com/cosmos/cosmos-sdk/types/rest"
)
type IntegrationTestSuite struct {
suite.Suite
network *network.Network
}
func (s *IntegrationTestSuite) SetupSuite() {
s.T().Log("setting up integration test suite")
s.network = network.New(s.T(), network.DefaultConfig())
s.Require().NotNil(s.network)
s.Require().NoError(s.network.WaitForNextBlock())
}
func (s *IntegrationTestSuite) TearDownSuite() {
s.T().Log("tearing down integration test suite")
s.network.Cleanup()
}
func (s *IntegrationTestSuite) TestStatusCommand() {
val0 := s.network.Validators[0]
cmd := rpc.StatusCommand()
out, err := clitestutil.ExecTestCLICmd(val0.ClientCtx, cmd, []string{})
s.Require().NoError(err)
// Make sure the output has the validator moniker.
s.Require().Contains(out.String(), fmt.Sprintf("\"moniker\":\"%s\"", val0.Moniker))
}
func (s *IntegrationTestSuite) TestLatestBlocks() {
val0 := s.network.Validators[0]
res, err := rest.GetRequest(fmt.Sprintf("%s/blocks/latest", val0.APIAddress))
s.Require().NoError(err)
var result ctypes.ResultBlock
err = legacy.Cdc.UnmarshalJSON(res, &result)
s.Require().NoError(err)
}
func TestIntegrationTestSuite(t *testing.T) {
suite.Run(t, new(IntegrationTestSuite))
}

View File

@ -2,41 +2,75 @@ package rpc
import (
"context"
"fmt"
"net/http"
"github.com/spf13/cobra"
"github.com/tendermint/tendermint/libs/bytes"
"github.com/tendermint/tendermint/p2p"
ctypes "github.com/tendermint/tendermint/rpc/core/types"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/flags"
"github.com/cosmos/cosmos-sdk/codec/legacy"
cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
"github.com/cosmos/cosmos-sdk/types/rest"
"github.com/cosmos/cosmos-sdk/version"
"github.com/tendermint/tendermint/p2p"
)
// ValidatorInfo is info about the node's validator, same as Tendermint,
// except that we use our own PubKey.
type validatorInfo struct {
Address bytes.HexBytes
PubKey cryptotypes.PubKey
VotingPower int64
}
// ResultStatus is node's info, same as Tendermint, except that we use our own
// PubKey.
type resultStatus struct {
NodeInfo p2p.DefaultNodeInfo
SyncInfo ctypes.SyncInfo
ValidatorInfo validatorInfo
}
// StatusCommand returns the command to return the status of the network.
func StatusCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "status",
Short: "Query remote node for status",
RunE: func(cmd *cobra.Command, _ []string) error {
clientCtx := client.GetClientContextFromCmd(cmd)
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
status, err := getNodeStatus(clientCtx)
if err != nil {
return err
}
output, err := legacy.Cdc.MarshalJSON(status)
// `status` has TM pubkeys, we need to convert them to our pubkeys.
pk, err := cryptocodec.FromTmPubKeyInterface(status.ValidatorInfo.PubKey)
if err != nil {
return err
}
statusWithPk := resultStatus{
NodeInfo: status.NodeInfo,
SyncInfo: status.SyncInfo,
ValidatorInfo: validatorInfo{
Address: status.ValidatorInfo.Address,
PubKey: pk,
VotingPower: status.ValidatorInfo.VotingPower,
},
}
output, err := clientCtx.LegacyAmino.MarshalJSON(statusWithPk)
if err != nil {
return err
}
fmt.Println(string(output))
cmd.Println(string(output))
return nil
},
}

View File

@ -14,6 +14,8 @@ import (
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/flags"
cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/rest"
)
@ -27,8 +29,10 @@ func ValidatorCommand() *cobra.Command {
Short: "Get the full tendermint validator set at given height",
Args: cobra.MaximumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx := client.GetClientContextFromCmd(cmd)
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
var height *int64
// optional height
@ -46,12 +50,12 @@ func ValidatorCommand() *cobra.Command {
page, _ := cmd.Flags().GetInt(flags.FlagPage)
limit, _ := cmd.Flags().GetInt(flags.FlagLimit)
result, err := GetValidators(clientCtx, height, &page, &limit)
result, err := GetValidators(cmd.Context(), clientCtx, height, &page, &limit)
if err != nil {
return err
}
return clientCtx.PrintOutputLegacy(result)
return clientCtx.PrintObjectLegacy(result)
},
}
@ -65,10 +69,10 @@ func ValidatorCommand() *cobra.Command {
// Validator output in bech32 format
type ValidatorOutput struct {
Address sdk.ConsAddress `json:"address"`
PubKey string `json:"pub_key"`
ProposerPriority int64 `json:"proposer_priority"`
VotingPower int64 `json:"voting_power"`
Address sdk.ConsAddress `json:"address"`
PubKey cryptotypes.PubKey `json:"pub_key"`
ProposerPriority int64 `json:"proposer_priority"`
VotingPower int64 `json:"voting_power"`
}
// Validators at a certain height output in bech32 format
@ -98,29 +102,29 @@ func (rvo ResultValidatorsOutput) String() string {
return b.String()
}
func bech32ValidatorOutput(validator *tmtypes.Validator) (ValidatorOutput, error) {
bechValPubkey, err := sdk.Bech32ifyPubKey(sdk.Bech32PubKeyTypeConsPub, validator.PubKey)
func validatorOutput(validator *tmtypes.Validator) (ValidatorOutput, error) {
pk, err := cryptocodec.FromTmPubKeyInterface(validator.PubKey)
if err != nil {
return ValidatorOutput{}, err
}
return ValidatorOutput{
Address: sdk.ConsAddress(validator.Address),
PubKey: bechValPubkey,
PubKey: pk,
ProposerPriority: validator.ProposerPriority,
VotingPower: validator.VotingPower,
}, nil
}
// GetValidators from client
func GetValidators(clientCtx client.Context, height *int64, page, limit *int) (ResultValidatorsOutput, error) {
func GetValidators(ctx context.Context, clientCtx client.Context, height *int64, page, limit *int) (ResultValidatorsOutput, error) {
// get the node
node, err := clientCtx.GetNode()
if err != nil {
return ResultValidatorsOutput{}, err
}
validatorsRes, err := node.Validators(context.Background(), height, page, limit)
validatorsRes, err := node.Validators(ctx, height, page, limit)
if err != nil {
return ResultValidatorsOutput{}, err
}
@ -131,7 +135,7 @@ func GetValidators(clientCtx client.Context, height *int64, page, limit *int) (R
}
for i := 0; i < len(validatorsRes.Validators); i++ {
outputValidatorsRes.Validators[i], err = bech32ValidatorOutput(validatorsRes.Validators[i])
outputValidatorsRes.Validators[i], err = validatorOutput(validatorsRes.Validators[i])
if err != nil {
return ResultValidatorsOutput{}, err
}
@ -168,7 +172,7 @@ func ValidatorSetRequestHandlerFn(clientCtx client.Context) http.HandlerFunc {
return
}
output, err := GetValidators(clientCtx, &height, &page, &limit)
output, err := GetValidators(r.Context(), clientCtx, &height, &page, &limit)
if rest.CheckInternalServerError(w, err) {
return
}
@ -185,7 +189,7 @@ func LatestValidatorSetRequestHandlerFn(clientCtx client.Context) http.HandlerFu
return
}
output, err := GetValidators(clientCtx, nil, &page, &limit)
output, err := GetValidators(r.Context(), clientCtx, nil, &page, &limit)
if rest.CheckInternalServerError(w, err) {
return
}

View File

@ -3,8 +3,7 @@ package client
import (
"fmt"
"github.com/tendermint/tendermint/crypto"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
sdk "github.com/cosmos/cosmos-sdk/types"
)
@ -26,7 +25,7 @@ func (t TestAccount) GetAddress() sdk.AccAddress {
}
// GetPubKey implements client Account.GetPubKey
func (t TestAccount) GetPubKey() crypto.PubKey {
func (t TestAccount) GetPubKey() cryptotypes.PubKey {
return nil
}

View File

@ -29,19 +29,15 @@ type Factory struct {
simulateAndExecute bool
}
const (
signModeDirect = "direct"
signModeAminoJSON = "amino-json"
)
// NewFactoryCLI creates a new Factory.
func NewFactoryCLI(clientCtx client.Context, flagSet *pflag.FlagSet) Factory {
signModeStr, _ := flagSet.GetString(flags.FlagSignMode)
signModeStr := clientCtx.SignModeStr
signMode := signing.SignMode_SIGN_MODE_UNSPECIFIED
switch signModeStr {
case signModeDirect:
case flags.SignModeDirect:
signMode = signing.SignMode_SIGN_MODE_DIRECT
case signModeAminoJSON:
case flags.SignModeLegacyAminoJSON:
signMode = signing.SignMode_SIGN_MODE_LEGACY_AMINO_JSON
}
@ -120,7 +116,7 @@ func (f Factory) WithGas(gas uint64) Factory {
// WithFees returns a copy of the Factory with an updated fee.
func (f Factory) WithFees(fees string) Factory {
parsedFees, err := sdk.ParseCoins(fees)
parsedFees, err := sdk.ParseCoinsNormalized(fees)
if err != nil {
panic(err)
}

View File

@ -61,6 +61,7 @@ func CopyTx(tx signing.Tx, builder client.TxBuilder, ignoreSignatureError bool)
builder.SetMemo(tx.GetMemo())
builder.SetFeeAmount(tx.GetFee())
builder.SetGasLimit(tx.GetGas())
builder.SetTimeoutHeight(tx.GetTimeoutHeight())
return nil
}

View File

@ -21,8 +21,9 @@ import (
)
const (
memo = "waboom"
gas = uint64(10000)
memo = "waboom"
gas = uint64(10000)
timeoutHeight = 5
)
var (
@ -47,6 +48,7 @@ func buildTestTx(t *testing.T, builder client.TxBuilder) {
require.NoError(t, err)
err = builder.SetSignatures(sig)
require.NoError(t, err)
builder.SetTimeoutHeight(timeoutHeight)
}
type TestSuite struct {
@ -105,6 +107,7 @@ func (s *TestSuite) TestConvertTxToStdTx() {
s.Require().Equal(gas, stdTx.Fee.Gas)
s.Require().Equal(fee, stdTx.Fee.Amount)
s.Require().Equal(msg, stdTx.Msgs[0])
s.Require().Equal(timeoutHeight, stdTx.TimeoutHeight)
s.Require().Equal(sig.PubKey, stdTx.Signatures[0].PubKey)
s.Require().Equal(sig.Data.(*signing2.SingleSignatureData).Signature, stdTx.Signatures[0].Signature)
@ -123,6 +126,7 @@ func (s *TestSuite) TestConvertTxToStdTx() {
s.Require().Equal(gas, stdTx.Fee.Gas)
s.Require().Equal(fee, stdTx.Fee.Amount)
s.Require().Equal(msg, stdTx.Msgs[0])
s.Require().Equal(timeoutHeight, stdTx.TimeoutHeight)
s.Require().Empty(stdTx.Signatures)
// std tx

View File

@ -8,20 +8,19 @@ import (
"os"
"github.com/spf13/pflag"
"github.com/tendermint/tendermint/crypto"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/flags"
sim "github.com/cosmos/cosmos-sdk/client/grpc/simulate"
"github.com/cosmos/cosmos-sdk/client/input"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
"github.com/cosmos/cosmos-sdk/types/rest"
"github.com/cosmos/cosmos-sdk/types/tx"
"github.com/cosmos/cosmos-sdk/types/tx/signing"
authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing"
authtx "github.com/cosmos/cosmos-sdk/x/auth/tx"
)
// GenerateOrBroadcastTxCLI will either generate and print and unsigned transaction
@ -118,7 +117,8 @@ func BroadcastTx(clientCtx client.Context, txf Factory, msgs ...sdk.Msg) error {
}
}
err = Sign(txf, clientCtx.GetFromName(), tx)
tx.SetFeeGranter(clientCtx.GetFeeGranterAddress())
err = Sign(txf, clientCtx.GetFromName(), tx, true)
if err != nil {
return err
}
@ -134,7 +134,7 @@ func BroadcastTx(clientCtx client.Context, txf Factory, msgs ...sdk.Msg) error {
return err
}
return clientCtx.PrintOutput(res)
return clientCtx.PrintProto(res)
}
// WriteGeneratedTxResponse writes a generated unsigned transaction to the
@ -264,22 +264,15 @@ func BuildSimTx(txf Factory, msgs ...sdk.Msg) ([]byte, error) {
},
Sequence: txf.Sequence(),
}
if err := txb.SetSignatures(sig); err != nil {
return nil, err
}
any, ok := txb.(codectypes.IntoAny)
if !ok {
return nil, fmt.Errorf("cannot simulate tx that cannot be wrapped into any")
}
cached := any.AsAny().GetCachedValue()
protoTx, ok := cached.(*tx.Tx)
protoProvider, ok := txb.(authtx.ProtoTxProvider)
if !ok {
return nil, fmt.Errorf("cannot simulate amino tx")
}
simReq := sim.SimulateRequest{Tx: protoTx}
simReq := tx.SimulateRequest{Tx: protoProvider.GetProtoTx()}
return simReq.Marshal()
}
@ -288,21 +281,23 @@ func BuildSimTx(txf Factory, msgs ...sdk.Msg) ([]byte, error) {
// simulation response obtained by the query and the adjusted gas amount.
func CalculateGas(
queryFunc func(string, []byte) ([]byte, int64, error), txf Factory, msgs ...sdk.Msg,
) (sim.SimulateResponse, uint64, error) {
) (tx.SimulateResponse, uint64, error) {
txBytes, err := BuildSimTx(txf, msgs...)
if err != nil {
return sim.SimulateResponse{}, 0, err
return tx.SimulateResponse{}, 0, err
}
bz, _, err := queryFunc("/cosmos.base.simulate.v1beta1.SimulateService/Simulate", txBytes)
// TODO This should use the generated tx service Client.
// https://github.com/cosmos/cosmos-sdk/issues/7726
bz, _, err := queryFunc("/cosmos.tx.v1beta1.Service/Simulate", txBytes)
if err != nil {
return sim.SimulateResponse{}, 0, err
return tx.SimulateResponse{}, 0, err
}
var simRes sim.SimulateResponse
var simRes tx.SimulateResponse
if err := simRes.Unmarshal(bz); err != nil {
return sim.SimulateResponse{}, 0, err
return tx.SimulateResponse{}, 0, err
}
return simRes, uint64(txf.GasAdjustment() * float64(simRes.GasInfo.GasUsed)), nil
@ -342,7 +337,7 @@ func PrepareFactory(clientCtx client.Context, txf Factory) (Factory, error) {
// corresponding SignatureV2 if the signing is successful.
func SignWithPrivKey(
signMode signing.SignMode, signerData authsigning.SignerData,
txBuilder client.TxBuilder, priv crypto.PrivKey, txConfig client.TxConfig,
txBuilder client.TxBuilder, priv cryptotypes.PrivKey, txConfig client.TxConfig,
accSeq uint64,
) (signing.SignatureV2, error) {
var sigV2 signing.SignatureV2
@ -374,10 +369,21 @@ func SignWithPrivKey(
return sigV2, nil
}
// Sign signs a given tx with the provided name and passphrase. The bytes signed
// over are canconical. The resulting signature will be set on the transaction.
func checkMultipleSigners(mode signing.SignMode, tx authsigning.Tx) error {
if mode == signing.SignMode_SIGN_MODE_DIRECT &&
len(tx.GetSigners()) > 1 {
return sdkerrors.Wrap(sdkerrors.ErrNotSupported, "Signing in DIRECT mode is only supported for transactions with one signer only")
}
return nil
}
// Sign signs a given tx with a named key. The bytes signed over are canconical.
// The resulting signature will be added to the transaction builder overwriting the previous
// ones if overwrite=true (otherwise, the signature will be appended).
// Signing a transaction with mutltiple signers in the DIRECT mode is not supprted and will
// return an error.
// An error is returned upon failure.
func Sign(txf Factory, name string, txBuilder client.TxBuilder) error {
func Sign(txf Factory, name string, txBuilder client.TxBuilder, overwriteSig bool) error {
if txf.keybase == nil {
return errors.New("keybase must be set prior to signing a transaction")
}
@ -387,12 +393,14 @@ func Sign(txf Factory, name string, txBuilder client.TxBuilder) error {
// use the SignModeHandler's default mode if unspecified
signMode = txf.txConfig.SignModeHandler().DefaultMode()
}
if err := checkMultipleSigners(signMode, txBuilder.GetTx()); err != nil {
return err
}
key, err := txf.keybase.Key(name)
if err != nil {
return err
}
pubKey := key.GetPubKey()
signerData := authsigning.SignerData{
ChainID: txf.chainID,
@ -417,18 +425,25 @@ func Sign(txf Factory, name string, txBuilder client.TxBuilder) error {
Data: &sigData,
Sequence: txf.Sequence(),
}
var prevSignatures []signing.SignatureV2
if !overwriteSig {
prevSignatures, err = txBuilder.GetTx().GetSignaturesV2()
if err != nil {
return err
}
}
if err := txBuilder.SetSignatures(sig); err != nil {
return err
}
// Generate the bytes to be signed.
signBytes, err := txf.txConfig.SignModeHandler().GetSignBytes(signMode, signerData, txBuilder.GetTx())
bytesToSign, err := txf.txConfig.SignModeHandler().GetSignBytes(signMode, signerData, txBuilder.GetTx())
if err != nil {
return err
}
// Sign those bytes
sigBytes, _, err := txf.keybase.Sign(name, signBytes)
sigBytes, _, err := txf.keybase.Sign(name, bytesToSign)
if err != nil {
return err
}
@ -444,8 +459,11 @@ func Sign(txf Factory, name string, txBuilder client.TxBuilder) error {
Sequence: txf.Sequence(),
}
// And here the tx is populated with the signature
return txBuilder.SetSignatures(sig)
if overwriteSig {
return txBuilder.SetSignatures(sig)
}
prevSignatures = append(prevSignatures, sig)
return txBuilder.SetSignatures(prevSignatures...)
}
// GasEstimateResponse defines a response definition for tx gas estimation.

View File

@ -7,12 +7,14 @@ import (
"github.com/stretchr/testify/require"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/grpc/simulate"
"github.com/cosmos/cosmos-sdk/client/tx"
"github.com/cosmos/cosmos-sdk/crypto/hd"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
"github.com/cosmos/cosmos-sdk/simapp"
sdk "github.com/cosmos/cosmos-sdk/types"
txtypes "github.com/cosmos/cosmos-sdk/types/tx"
signingtypes "github.com/cosmos/cosmos-sdk/types/tx/signing"
"github.com/cosmos/cosmos-sdk/x/auth/signing"
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
)
@ -28,7 +30,7 @@ func TestCalculateGas(t *testing.T) {
if wantErr {
return nil, 0, errors.New("query failed")
}
simRes := &simulate.SimulateResponse{
simRes := &txtypes.SimulateResponse{
GasInfo: &sdk.GasInfo{GasUsed: gasUsed, GasWanted: gasUsed},
Result: &sdk.Result{Data: []byte("tx data"), Log: "log"},
}
@ -121,49 +123,110 @@ func TestBuildUnsignedTx(t *testing.T) {
}
func TestSign(t *testing.T) {
requireT := require.New(t)
path := hd.CreateHDPath(118, 0, 0).String()
kr, err := keyring.New(t.Name(), "test", t.TempDir(), nil)
require.NoError(t, err)
requireT.NoError(err)
var from = "test_sign"
var from1 = "test_key1"
var from2 = "test_key2"
_, seed, err := kr.NewMnemonic(from, keyring.English, path, hd.Secp256k1)
require.NoError(t, err)
require.NoError(t, kr.Delete(from))
// create a new key using a mnemonic generator and test if we can reuse seed to recreate that account
_, seed, err := kr.NewMnemonic(from1, keyring.English, path, hd.Secp256k1)
requireT.NoError(err)
requireT.NoError(kr.Delete(from1))
info1, _, err := kr.NewMnemonic(from1, keyring.English, path, hd.Secp256k1)
requireT.NoError(err)
info, err := kr.NewAccount(from, seed, "", path, hd.Secp256k1)
require.NoError(t, err)
info2, err := kr.NewAccount(from2, seed, "", path, hd.Secp256k1)
requireT.NoError(err)
txf := tx.Factory{}.
pubKey1 := info1.GetPubKey()
pubKey2 := info2.GetPubKey()
requireT.NotEqual(pubKey1.Bytes(), pubKey2.Bytes())
t.Log("Pub keys:", pubKey1, pubKey2)
txfNoKeybase := tx.Factory{}.
WithTxConfig(NewTestTxConfig()).
WithAccountNumber(50).
WithSequence(23).
WithFees("50stake").
WithMemo("memo").
WithChainID("test-chain")
msg := banktypes.NewMsgSend(info.GetAddress(), sdk.AccAddress("to"), nil)
txn, err := tx.BuildUnsignedTx(txf, msg)
require.NoError(t, err)
t.Log("should failed if txf without keyring")
err = tx.Sign(txf, from, txn)
require.Error(t, err)
txf = tx.Factory{}.
txfDirect := txfNoKeybase.
WithKeybase(kr).
WithTxConfig(NewTestTxConfig()).
WithAccountNumber(50).
WithSequence(23).
WithFees("50stake").
WithMemo("memo").
WithChainID("test-chain")
WithSignMode(signingtypes.SignMode_SIGN_MODE_DIRECT)
txfAmino := txfDirect.
WithSignMode(signingtypes.SignMode_SIGN_MODE_LEGACY_AMINO_JSON)
msg1 := banktypes.NewMsgSend(info1.GetAddress(), sdk.AccAddress("to"), nil)
msg2 := banktypes.NewMsgSend(info2.GetAddress(), sdk.AccAddress("to"), nil)
txb, err := tx.BuildUnsignedTx(txfNoKeybase, msg1, msg2)
requireT.NoError(err)
txb2, err := tx.BuildUnsignedTx(txfNoKeybase, msg1, msg2)
requireT.NoError(err)
txbSimple, err := tx.BuildUnsignedTx(txfNoKeybase, msg2)
requireT.NoError(err)
t.Log("should succeed if txf with keyring")
err = tx.Sign(txf, from, txn)
require.NoError(t, err)
testCases := []struct {
name string
txf tx.Factory
txb client.TxBuilder
from string
overwrite bool
expectedPKs []cryptotypes.PubKey
matchingSigs []int // if not nil, check matching signature against old ones.
}{
{"should fail if txf without keyring",
txfNoKeybase, txb, from1, true, nil, nil},
{"should fail for non existing key",
txfAmino, txb, "unknown", true, nil, nil},
{"amino: should succeed with keyring",
txfAmino, txbSimple, from1, true, []cryptotypes.PubKey{pubKey1}, nil},
{"direct: should succeed with keyring",
txfDirect, txbSimple, from1, true, []cryptotypes.PubKey{pubKey1}, nil},
t.Log("should fail for non existing key")
err = tx.Sign(txf, "non_existing_key", txn)
require.Error(t, err)
/**** test double sign Amino mode ****/
{"amino: should sign multi-signers tx",
txfAmino, txb, from1, true, []cryptotypes.PubKey{pubKey1}, nil},
{"amino: should append a second signature and not overwrite",
txfAmino, txb, from2, false, []cryptotypes.PubKey{pubKey1, pubKey2}, []int{0, 0}},
{"amino: should overwrite a signature",
txfAmino, txb, from2, true, []cryptotypes.PubKey{pubKey2}, []int{1, 0}},
/**** test double sign Direct mode
signing transaction with more than 2 signers should fail in DIRECT mode ****/
{"direct: should fail to append a signature with different mode",
txfDirect, txb, from1, false, []cryptotypes.PubKey{}, nil},
{"direct: should fail to sign multi-signers tx",
txfDirect, txb2, from1, false, []cryptotypes.PubKey{}, nil},
{"direct: should fail to overwrite multi-signers tx",
txfDirect, txb2, from1, true, []cryptotypes.PubKey{}, nil},
}
var prevSigs []signingtypes.SignatureV2
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
err = tx.Sign(tc.txf, tc.from, tc.txb, tc.overwrite)
if len(tc.expectedPKs) == 0 {
requireT.Error(err)
} else {
requireT.NoError(err)
sigs := testSigners(requireT, tc.txb.GetTx(), tc.expectedPKs...)
if tc.matchingSigs != nil {
requireT.Equal(prevSigs[tc.matchingSigs[0]], sigs[tc.matchingSigs[1]])
}
prevSigs = sigs
}
})
}
}
func testSigners(require *require.Assertions, tr signing.Tx, pks ...cryptotypes.PubKey) []signingtypes.SignatureV2 {
sigs, err := tr.GetSignaturesV2()
require.Len(sigs, len(pks))
require.NoError(err)
require.Len(sigs, len(pks))
for i := range pks {
require.True(sigs[i].PubKey.Equals(pks[i]), "Signature is signed with a wrong pubkey. Got: %s, expected: %s", sigs[i].PubKey, pks[i])
}
return sigs
}

View File

@ -42,5 +42,6 @@ type (
SetFeeAmount(amount sdk.Coins)
SetGasLimit(limit uint64)
SetTimeoutHeight(height uint64)
SetFeeGranter(feeGranter sdk.AccAddress)
}
)

View File

@ -1,6 +1,8 @@
package codec
import "github.com/gogo/protobuf/proto"
import (
"github.com/gogo/protobuf/proto"
)
// AminoCodec defines a codec that utilizes Codec for both binary and JSON
// encoding.
@ -78,3 +80,45 @@ func (ac *AminoCodec) UnmarshalJSON(bz []byte, ptr proto.Message) error {
func (ac *AminoCodec) MustUnmarshalJSON(bz []byte, ptr proto.Message) {
ac.LegacyAmino.MustUnmarshalJSON(bz, ptr)
}
// MarshalInterface is a convenience function for amino marshaling interfaces.
// The `i` must be an interface.
// NOTE: to marshal a concrete type, you should use MarshalBinaryBare instead
func (ac *AminoCodec) MarshalInterface(i proto.Message) ([]byte, error) {
if err := assertNotNil(i); err != nil {
return nil, err
}
return ac.LegacyAmino.MarshalBinaryBare(i)
}
// UnmarshalInterface is a convenience function for amino unmarshaling interfaces.
// `ptr` must be a pointer to an interface.
// NOTE: to unmarshal a concrete type, you should use UnmarshalBinaryBare instead
//
// Example:
// var x MyInterface
// err := cdc.UnmarshalInterface(bz, &x)
func (ac *AminoCodec) UnmarshalInterface(bz []byte, ptr interface{}) error {
return ac.LegacyAmino.UnmarshalBinaryBare(bz, ptr)
}
// MarshalInterfaceJSON is a convenience function for amino marshaling interfaces.
// The `i` must be an interface.
// NOTE: to marshal a concrete type, you should use MarshalJSON instead
func (ac *AminoCodec) MarshalInterfaceJSON(i proto.Message) ([]byte, error) {
if err := assertNotNil(i); err != nil {
return nil, err
}
return ac.LegacyAmino.MarshalJSON(i)
}
// UnmarshalInterfaceJSON is a convenience function for amino unmarshaling interfaces.
// `ptr` must be a pointer to an interface.
// NOTE: to unmarshal a concrete type, you should use UnmarshalJSON instead
//
// Example:
// var x MyInterface
// err := cdc.UnmarshalInterfaceJSON(bz, &x)
func (ac *AminoCodec) UnmarshalInterfaceJSON(bz []byte, ptr interface{}) error {
return ac.LegacyAmino.UnmarshalJSON(bz, ptr)
}

View File

@ -5,131 +5,38 @@ import (
"errors"
"testing"
"github.com/cosmos/cosmos-sdk/codec/types"
"github.com/stretchr/testify/require"
"github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/codec/types"
"github.com/cosmos/cosmos-sdk/simapp"
"github.com/cosmos/cosmos-sdk/testutil/testdata"
"github.com/cosmos/cosmos-sdk/x/auth/client/rest"
"github.com/cosmos/cosmos-sdk/x/auth/legacy/legacytx"
)
func createTestCodec() *codec.LegacyAmino {
cdc := codec.NewLegacyAmino()
cdc.RegisterInterface((*testdata.Animal)(nil), nil)
cdc.RegisterConcrete(testdata.Dog{}, "testdata/Dog", nil)
cdc.RegisterConcrete(testdata.Cat{}, "testdata/Cat", nil)
// NOTE: since we unmarshal interface using pointers, we need to register a pointer
// types here.
cdc.RegisterConcrete(&testdata.Dog{}, "testdata/Dog", nil)
cdc.RegisterConcrete(&testdata.Cat{}, "testdata/Cat", nil)
return cdc
}
func TestAminoMarsharlInterface(t *testing.T) {
cdc := codec.NewAminoCodec(createTestCodec())
m := interfaceMarshaler{cdc.MarshalInterface, cdc.UnmarshalInterface}
testInterfaceMarshaling(require.New(t), m, true)
m = interfaceMarshaler{cdc.MarshalInterfaceJSON, cdc.UnmarshalInterfaceJSON}
testInterfaceMarshaling(require.New(t), m, false)
}
func TestAminoCodec(t *testing.T) {
any, err := types.NewAnyWithValue(&testdata.Dog{Name: "rufus"})
require.NoError(t, err)
testCases := []struct {
name string
codec *codec.AminoCodec
input codec.ProtoMarshaler
recv codec.ProtoMarshaler
marshalErr bool
unmarshalErr bool
}{
{
"valid encoding and decoding",
codec.NewAminoCodec(createTestCodec()),
&testdata.Dog{Name: "rufus"},
&testdata.Dog{},
false,
false,
},
{
"invalid decode type",
codec.NewAminoCodec(createTestCodec()),
&testdata.Dog{Name: "rufus"},
&testdata.Cat{},
false,
true,
},
{
"any marshaling",
codec.NewAminoCodec(createTestCodec()),
&testdata.HasAnimal{Animal: any},
&testdata.HasAnimal{Animal: any},
false,
false,
},
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
bz, err := tc.codec.MarshalBinaryBare(tc.input)
if tc.marshalErr {
require.Error(t, err)
require.Panics(t, func() { tc.codec.MustMarshalBinaryBare(tc.input) })
} else {
var bz2 []byte
require.NoError(t, err)
require.NotPanics(t, func() { bz2 = tc.codec.MustMarshalBinaryBare(tc.input) })
require.Equal(t, bz, bz2)
err := tc.codec.UnmarshalBinaryBare(bz, tc.recv)
if tc.unmarshalErr {
require.Error(t, err)
require.Panics(t, func() { tc.codec.MustUnmarshalBinaryBare(bz, tc.recv) })
} else {
require.NoError(t, err)
require.NotPanics(t, func() { tc.codec.MustUnmarshalBinaryBare(bz, tc.recv) })
require.Equal(t, tc.input, tc.recv)
}
}
bz, err = tc.codec.MarshalBinaryLengthPrefixed(tc.input)
if tc.marshalErr {
require.Error(t, err)
require.Panics(t, func() { tc.codec.MustMarshalBinaryLengthPrefixed(tc.input) })
} else {
var bz2 []byte
require.NoError(t, err)
require.NotPanics(t, func() { bz2 = tc.codec.MustMarshalBinaryLengthPrefixed(tc.input) })
require.Equal(t, bz, bz2)
err := tc.codec.UnmarshalBinaryLengthPrefixed(bz, tc.recv)
if tc.unmarshalErr {
require.Error(t, err)
require.Panics(t, func() { tc.codec.MustUnmarshalBinaryLengthPrefixed(bz, tc.recv) })
} else {
require.NoError(t, err)
require.NotPanics(t, func() { tc.codec.MustUnmarshalBinaryLengthPrefixed(bz, tc.recv) })
require.Equal(t, tc.input, tc.recv)
}
}
bz, err = tc.codec.MarshalJSON(tc.input)
if tc.marshalErr {
require.Error(t, err)
require.Panics(t, func() { tc.codec.MustMarshalJSON(tc.input) })
} else {
var bz2 []byte
require.NoError(t, err)
require.NotPanics(t, func() { bz2 = tc.codec.MustMarshalJSON(tc.input) })
require.Equal(t, bz, bz2)
err := tc.codec.UnmarshalJSON(bz, tc.recv)
if tc.unmarshalErr {
require.Error(t, err)
require.Panics(t, func() { tc.codec.MustUnmarshalJSON(bz, tc.recv) })
} else {
require.NoError(t, err)
require.NotPanics(t, func() { tc.codec.MustUnmarshalJSON(bz, tc.recv) })
require.Equal(t, tc.input, tc.recv)
}
}
})
}
testMarshaling(t, codec.NewAminoCodec(createTestCodec()))
}
func TestAminoCodecMarshalJSONIndent(t *testing.T) {
@ -210,3 +117,25 @@ func TestAminoCodecUnpackAnyFails(t *testing.T) {
require.Error(t, err)
require.Equal(t, err, errors.New("AminoCodec can't handle unpack protobuf Any's"))
}
func TestAminoCodecFullDecodeAndEncode(t *testing.T) {
// This tx comes from https://github.com/cosmos/cosmos-sdk/issues/8117.
txSigned := `{"type":"cosmos-sdk/StdTx","value":{"msg":[{"type":"cosmos-sdk/MsgCreateValidator","value":{"description":{"moniker":"fulltest","identity":"satoshi","website":"example.com","details":"example inc"},"commission":{"rate":"0.500000000000000000","max_rate":"1.000000000000000000","max_change_rate":"0.200000000000000000"},"min_self_delegation":"1000000","delegator_address":"cosmos14pt0q5cwf38zt08uu0n6yrstf3rndzr5057jys","validator_address":"cosmosvaloper14pt0q5cwf38zt08uu0n6yrstf3rndzr52q28gr","pubkey":{"type":"tendermint/PubKeyEd25519","value":"CYrOiM3HtS7uv1B1OAkknZnFYSRpQYSYII8AtMMtev0="},"value":{"denom":"umuon","amount":"700000000"}}}],"fee":{"amount":[{"denom":"umuon","amount":"6000"}],"gas":"160000"},"signatures":[{"pub_key":{"type":"tendermint/PubKeySecp256k1","value":"AwAOXeWgNf1FjMaayrSnrOOKz+Fivr6DiI/i0x0sZCHw"},"signature":"RcnfS/u2yl7uIShTrSUlDWvsXo2p2dYu6WJC8VDVHMBLEQZWc8bsINSCjOnlsIVkUNNe1q/WCA9n3Gy1+0zhYA=="}],"memo":"","timeout_height":"0"}}`
var legacyCdc = simapp.MakeTestEncodingConfig().Amino
var tx legacytx.StdTx
err := legacyCdc.UnmarshalJSON([]byte(txSigned), &tx)
require.NoError(t, err)
// Marshalling/unmarshalling the tx should work.
marshaledTx, err := legacyCdc.MarshalJSON(tx)
require.NoError(t, err)
require.Equal(t, string(marshaledTx), txSigned)
// Marshalling/unmarshalling the tx wrapped in a struct should work.
txRequest := &rest.BroadcastReq{
Mode: "block",
Tx: tx,
}
_, err = legacyCdc.MarshalJSON(txRequest)
require.NoError(t, err)
}

View File

@ -1,44 +0,0 @@
package codec
import (
"fmt"
"github.com/gogo/protobuf/proto"
"github.com/cosmos/cosmos-sdk/codec/types"
)
// MarshalAny is a convenience function for packing the provided value in an
// Any and then proto marshaling it to bytes
func MarshalAny(m BinaryMarshaler, x interface{}) ([]byte, error) {
msg, ok := x.(proto.Message)
if !ok {
return nil, fmt.Errorf("can't proto marshal %T", x)
}
any := &types.Any{}
err := any.Pack(msg)
if err != nil {
return nil, err
}
return m.MarshalBinaryBare(any)
}
// UnmarshalAny is a convenience function for proto unmarshaling an Any from
// bz and then unpacking it to the interface pointer passed in as iface using
// the provided AnyUnpacker or returning an error
//
// Ex:
// var x MyInterface
// err := UnmarshalAny(unpacker, &x, bz)
func UnmarshalAny(m BinaryMarshaler, iface interface{}, bz []byte) error {
any := &types.Any{}
err := m.UnmarshalBinaryBare(bz, any)
if err != nil {
return err
}
return m.UnpackAny(any, iface)
}

View File

@ -1,7 +1,6 @@
package codec_test
import (
"errors"
"testing"
"github.com/stretchr/testify/require"
@ -28,38 +27,29 @@ func TestMarshalAny(t *testing.T) {
cdc := codec.NewProtoCodec(registry)
kitty := &testdata.Cat{Moniker: "Kitty"}
bz, err := codec.MarshalAny(cdc, kitty)
bz, err := cdc.MarshalInterface(kitty)
require.NoError(t, err)
var animal testdata.Animal
// empty registry should fail
err = codec.UnmarshalAny(cdc, &animal, bz)
err = cdc.UnmarshalInterface(bz, &animal)
require.Error(t, err)
// wrong type registration should fail
registry.RegisterImplementations((*testdata.Animal)(nil), &testdata.Dog{})
err = codec.UnmarshalAny(cdc, &animal, bz)
err = cdc.UnmarshalInterface(bz, &animal)
require.Error(t, err)
// should pass
registry = NewTestInterfaceRegistry()
cdc = codec.NewProtoCodec(registry)
err = codec.UnmarshalAny(cdc, &animal, bz)
err = cdc.UnmarshalInterface(bz, &animal)
require.NoError(t, err)
require.Equal(t, kitty, animal)
// nil should fail
registry = NewTestInterfaceRegistry()
err = codec.UnmarshalAny(cdc, nil, bz)
err = cdc.UnmarshalInterface(bz, nil)
require.Error(t, err)
}
func TestMarshalAnyNonProtoErrors(t *testing.T) {
registry := types.NewInterfaceRegistry()
cdc := codec.NewProtoCodec(registry)
_, err := codec.MarshalAny(cdc, 29)
require.Error(t, err)
require.Equal(t, err, errors.New("can't proto marshal int"))
}

View File

@ -32,12 +32,17 @@ type (
UnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMarshaler) error
MustUnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMarshaler)
MarshalInterface(i proto.Message) ([]byte, error)
UnmarshalInterface(bz []byte, ptr interface{}) error
types.AnyUnpacker
}
JSONMarshaler interface {
MarshalJSON(o proto.Message) ([]byte, error)
MustMarshalJSON(o proto.Message) []byte
MarshalInterfaceJSON(i proto.Message) ([]byte, error)
UnmarshalInterfaceJSON(bz []byte, ptr interface{}) error
UnmarshalJSON(bz []byte, ptr proto.Message) error
MustUnmarshalJSON(bz []byte, ptr proto.Message)

135
codec/codec_common_test.go Normal file
View File

@ -0,0 +1,135 @@
package codec_test
import (
"testing"
"github.com/gogo/protobuf/proto"
"github.com/stretchr/testify/require"
"github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/codec/types"
"github.com/cosmos/cosmos-sdk/testutil/testdata"
)
type interfaceMarshaler struct {
marshal func(i proto.Message) ([]byte, error)
unmarshal func(bz []byte, ptr interface{}) error
}
func testInterfaceMarshaling(require *require.Assertions, cdc interfaceMarshaler, isAminoBin bool) {
_, err := cdc.marshal(nil)
require.Error(err, "can't marshal a nil value")
dog := &testdata.Dog{Name: "rufus"}
var dogI testdata.Animal = dog
bz, err := cdc.marshal(dogI)
require.NoError(err)
var animal testdata.Animal
if isAminoBin {
require.PanicsWithValue("Unmarshal expects a pointer", func() {
cdc.unmarshal(bz, animal)
})
} else {
err = cdc.unmarshal(bz, animal)
require.Error(err)
require.Contains(err.Error(), "expects a pointer")
}
require.NoError(cdc.unmarshal(bz, &animal))
require.Equal(dog, animal)
// Amino doesn't wrap into Any, so it doesn't need to register self type
if isAminoBin {
var dog2 testdata.Dog
require.NoError(cdc.unmarshal(bz, &dog2))
require.Equal(*dog, dog2)
}
var cat testdata.Cat
require.Error(cdc.unmarshal(bz, &cat))
}
type mustMarshaler struct {
marshal func(i codec.ProtoMarshaler) ([]byte, error)
mustMarshal func(i codec.ProtoMarshaler) []byte
unmarshal func(bz []byte, ptr codec.ProtoMarshaler) error
mustUnmarshal func(bz []byte, ptr codec.ProtoMarshaler)
}
type testCase struct {
name string
input codec.ProtoMarshaler
recv codec.ProtoMarshaler
marshalErr bool
unmarshalErr bool
}
func testMarshalingTestCase(require *require.Assertions, tc testCase, m mustMarshaler) {
bz, err := m.marshal(tc.input)
if tc.marshalErr {
require.Error(err)
require.Panics(func() { m.mustMarshal(tc.input) })
} else {
var bz2 []byte
require.NoError(err)
require.NotPanics(func() { bz2 = m.mustMarshal(tc.input) })
require.Equal(bz, bz2)
err := m.unmarshal(bz, tc.recv)
if tc.unmarshalErr {
require.Error(err)
require.Panics(func() { m.mustUnmarshal(bz, tc.recv) })
} else {
require.NoError(err)
require.NotPanics(func() { m.mustUnmarshal(bz, tc.recv) })
require.Equal(tc.input, tc.recv)
}
}
}
func testMarshaling(t *testing.T, cdc codec.Marshaler) {
any, err := types.NewAnyWithValue(&testdata.Dog{Name: "rufus"})
require.NoError(t, err)
testCases := []testCase{
{
"valid encoding and decoding",
&testdata.Dog{Name: "rufus"},
&testdata.Dog{},
false,
false,
}, {
"invalid decode type",
&testdata.Dog{Name: "rufus"},
&testdata.Cat{},
false,
true,
}}
if _, ok := cdc.(*codec.AminoCodec); ok {
testCases = append(testCases, testCase{
"any marshaling",
&testdata.HasAnimal{Animal: any},
&testdata.HasAnimal{Animal: any},
false,
false,
})
}
for _, tc := range testCases {
tc := tc
m1 := mustMarshaler{cdc.MarshalBinaryBare, cdc.MustMarshalBinaryBare, cdc.UnmarshalBinaryBare, cdc.MustUnmarshalBinaryBare}
m2 := mustMarshaler{cdc.MarshalBinaryLengthPrefixed, cdc.MustMarshalBinaryLengthPrefixed, cdc.UnmarshalBinaryLengthPrefixed, cdc.MustUnmarshalBinaryLengthPrefixed}
m3 := mustMarshaler{
func(i codec.ProtoMarshaler) ([]byte, error) { return cdc.MarshalJSON(i) },
func(i codec.ProtoMarshaler) []byte { return cdc.MustMarshalJSON(i) },
func(bz []byte, ptr codec.ProtoMarshaler) error { return cdc.UnmarshalJSON(bz, ptr) },
func(bz []byte, ptr codec.ProtoMarshaler) { cdc.MustUnmarshalJSON(bz, ptr) }}
t.Run(tc.name+"_BinaryBare",
func(t *testing.T) { testMarshalingTestCase(require.New(t), tc, m1) })
t.Run(tc.name+"_BinaryLengthPrefixed",
func(t *testing.T) { testMarshalingTestCase(require.New(t), tc, m2) })
t.Run(tc.name+"_JSON",
func(t *testing.T) { testMarshalingTestCase(require.New(t), tc, m3) })
}
}

Some files were not shown because too many files have changed in this diff Show More