Merge branch 'master' into aaronc/6513-textual-json-proto

This commit is contained in:
Federico Kunze 2021-05-02 10:00:26 -04:00 committed by GitHub
commit 9f22fb4146
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1317 changed files with 55284 additions and 132465 deletions

20
.deepsource.toml Normal file
View File

@ -0,0 +1,20 @@
version = 1
test_patterns = [
"tests/**",
"**_test.go"
]
exclude_patterns = [
"third_party/proto/**",
"testutil/**",
"proto/cosmos/**",
"contrib/**"
]
[[analyzers]]
name = "go"
enabled = true
[analyzers.meta]
import_paths = ["github.com/cosmos/cosmos-sdk"]

7
.github/CODEOWNERS vendored
View File

@ -3,11 +3,6 @@
# NOTE: Order is important; the last matching pattern takes the
# most precedence.
# Secondary repo maintainers, substitutes of the primary
# maintainers when they become MIA
* @cwgoes @sunnya97
# Primary repo maintainers
* @aaronc @alexanderbez @alessio
* @aaronc @alexanderbez

View File

@ -0,0 +1,40 @@
---
name: Module Readiness Checklist
about: Pre-flight checklist that modules must pass in order to be included in a release of the Cosmos SDK
labels: 'module-readiness-checklist'
---
## x/{MODULE_NAME} Module Readiness Checklist
This checklist is to be used for tracking the final internal audit of new Cosmos SDK modules prior to inclusion in a published release.
### Release Candidate Checklist
The following checklist should be gone through once the module has been fully implemented. This audit should be performed directly on `master`, or preferably on a `alpha` or `beta` release tag that includes the module.
The module **should not** be included in any Release Candidate tag until it has passed this checklist.
- [ ] API audit (at least 1 person) (@assignee)
- [ ] Are Msg and Query methods and types well-named and organized?
- [ ] Is everything well documented (inline godoc as well as [`/spec/` folder](https://github.com/cosmos/cosmos-sdk/blob/master/docs/spec/SPEC-SPEC.md) in module directory)
- [ ] State machine audit (at least 2 people) (@assignee1, @assignee2)
- [ ] Read through MsgServer code and verify correctness upon visual inspection
- [ ] Ensure all state machine code which could be confusing is properly commented
- [ ] Make sure state machine logic matches Msg method documentation
- [ ] Ensure that all state machine edge cases are covered with tests and that test coverage is sufficient (at least 90% coverage on module code)
- [ ] Assess potential threats for each method including spam attacks and ensure that threats have been addressed sufficiently. This should be done by writing up threat assessment for each method
- [ ] Assess potential risks of any new third party dependencies and decide whether a dependency audit is needed
- [ ] Completeness audit, fully implemented with tests (at least 1 person) (@assignee)
- [ ] Genesis import and export of all state
- [ ] Query services
- [ ] CLI methods
- [ ] All necessary migration scripts are present (if this is an upgrade of existing module)
### Published Release Checklist
After the above checks have been audited and the module is included in a tagged Release Candidate, the following additional checklist should be undertaken for live testing, and potentially a 3rd party audit (if deemed necessary):
- [ ] Testnet / devnet testing (2-3 people) (@assignee1, @assignee2, @assignee3)
- [ ] All Msg methods have been tested especially in light of any potential threats identified
- [ ] Genesis import and export has been tested
- [ ] Nice to have (and needed in some cases if threats could be high): Official 3rd party audit

57
.github/labeler.yml vendored Normal file
View File

@ -0,0 +1,57 @@
"Scope: x/auth":
- x/auth/**/*
"Scope: x/authz":
- x/authz/**/*
"Scope: x/bank":
- x/bank/**/*
"Scope: x/capability":
- x/capability/**/*
"Scope: x/crisis":
- x/crisis/**/*
"Scope: x/distribution":
- x/distribution/**/*
"Scope: x/evidence":
- x/evidence/**/*
"Scope: x/feegrant":
- x/feegrant/**/*
"Scope: x/genutil":
- x/genutil/**/*
"Scope: x/gov":
- x/gov/**/*
"Scope: x/mint":
- x/mint/**/*
"Scope: x/params":
- x/params/**/*
"Scope: Simulations":
- x/simulation/**/*
- x/*/simulation/**/*
"Scope: x/slashing":
- x/slashing/**/*
"Scope: x/staking":
- x/staking/**/*
"Scope: x/upgrade":
- x/upgrade/**/*
"Scope: Cosmovisor":
- cosmovisor/**/*
"Scope: Rosetta":
- contrib/rosetta/**/*
"Scope: Keys":
- client/keys/**/*
"Type: Build":
- Makefile
- Dockerfile
- docker-compose.yml
- scripts/*
"Type: CI":
- .github/**/*.yml
- buf.yaml
- .mergify.yml
- .golangci.yml
"Scope: CLI":
- client/**/*
- x/*/client/**/*
"Type: Docs":
- docs/**/*
- x/*/spec/**/*
"Type: ADR":
- docs/architecture/**/*

58
.github/workflows/atlas.yml vendored Normal file
View File

@ -0,0 +1,58 @@
name: Atlas
# Atlas checks if a modules atlas manifest has been touched, if so it publishes the updated version
on:
push:
branches:
- master
paths:
- "x/**/atlas/*"
pull_request:
paths:
- "x/**/atlas/*"
jobs:
auth:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: technote-space/get-diff-action@v4
id: git_diff
with:
PATTERNS: |
x/auth/atlas/**
- uses: marbar3778/atlas_action@main
with:
token: ${{ secrets.ATLAS_TOKEN }}
path: ./x/auth/atlas/atlas.toml
dry-run: ${{ github.event_name != 'pull_request' }}
if: env.GIT_DIFF
bank:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: technote-space/get-diff-action@v4
id: git_diff
with:
PATTERNS: |
x/bank/atlas/**
- uses: marbar3778/atlas_action@main
with:
token: ${{ secrets.ATLAS_TOKEN }}
path: ./x/bank/atlas/atlas.toml
dry-run: ${{ github.event_name != 'pull_request' }}
if: env.GIT_DIFF
evidence:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: technote-space/get-diff-action@v4
id: git_diff
with:
PATTERNS: |
x/evidence/atlas/**
- uses: marbar3778/atlas_action@main
with:
token: ${{ secrets.ATLAS_TOKEN }}
path: ./x/evidence/atlas/manifest.toml
dry-run: ${{ github.event_name != 'pull_request' }}
if: env.GIT_DIFF

25
.github/workflows/check-docs.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: Check docs build
# This workflow runs when a PR is labeled with `docs`
# This will check if the docs build successfully by running `npm run build`
on:
pull_request:
types: [ labeled ]
jobs:
check-docs-build:
if: ${{ github.event.label.name == 'docs' }}
name: Check docs build
runs-on: ubuntu-latest
steps:
- name: Checkout 🛎️
uses: actions/checkout@v2.3.1
with:
persist-credentials: false
fetch-depth: 0
- name: Install dependencies and build docs 🧱
run: |
cd docs
npm install
npm run build

View File

@ -1,31 +0,0 @@
name: Documentation
# This job builds and deploys documenation to github pages.
# It runs on every push to master.
on:
push:
branches:
- master
jobs:
build-and-deploy:
runs-on: ubuntu-latest
container:
image: tendermintdev/docker-website-deployment
steps:
- name: Checkout 🛎️
uses: actions/checkout@v2.3.1
with:
persist-credentials: false
fetch-depth: 0
- name: Install and Build 🔧
run: |
apk add rsync
make build-docs LEDGER_ENABLED=false
- name: Deploy 🚀
uses: JamesIves/github-pages-deploy-action@3.7.1
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BRANCH: gh-pages
FOLDER: ~/output

16
.github/workflows/janitor.yml vendored Normal file
View File

@ -0,0 +1,16 @@
name: Janitor
# Janitor cleans up previous runs of various workflows
# Cancels Sims and Tests
on:
pull_request:
jobs:
cancel:
name: "Cancel Previous Runs"
runs-on: ubuntu-latest
timeout-minutes: 3
steps:
- uses: styfle/cancel-workflow-action@0.9.0
with:
workflow_id: 872925,1013614,1134055
access_token: ${{ github.token }}

11
.github/workflows/labeler.yml vendored Normal file
View File

@ -0,0 +1,11 @@
name: "Pull Request Labeler"
on:
- pull_request_target
jobs:
labeler:
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@main
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"

View File

@ -23,7 +23,7 @@ jobs:
- uses: golangci/golangci-lint-action@master
with:
# Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
version: v1.28
version: v1.39
args: --timeout 10m
github-token: ${{ secrets.github_token }}
if: env.GIT_DIFF

View File

@ -27,6 +27,12 @@ jobs:
fi
TAGS="${DOCKER_IMAGE}:${VERSION}"
echo ::set-output name=tags::${TAGS}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
@ -42,5 +48,6 @@ jobs:
with:
context: ./contrib/devtools
file: ./contrib/devtools/dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.prep.outputs.tags }}

View File

@ -3,19 +3,28 @@ name: Protobuf
# This workflow is only run when a .proto file has been changed
on:
pull_request:
paths:
- "**.proto"
jobs:
lint:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@master
- uses: technote-space/get-diff-action@v4
with:
PATTERNS: |
**/**.proto
- name: lint
run: make proto-lint
if: env.GIT_DIFF
breakage:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: technote-space/get-diff-action@v4
with:
PATTERNS: |
**/**.proto
- name: check-breakage
run: make proto-check-breaking
if: env.GIT_DIFF

View File

@ -7,14 +7,6 @@ on:
- "rc**"
jobs:
cleanup-runs:
runs-on: ubuntu-latest
steps:
- uses: rokroskar/workflow-run-cleanup-action@master
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/master'"
build:
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, 'skip-sims')"
@ -30,7 +22,7 @@ jobs:
- name: install runsim
run: |
export GO111MODULE="on" && go get github.com/cosmos/tools/cmd/runsim@v1.0.0
- uses: actions/cache@v2.1.3
- uses: actions/cache@v2.1.5
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary
@ -40,7 +32,7 @@ jobs:
needs: [build, install-runsim]
steps:
- uses: actions/checkout@v2
- uses: actions/cache@v2.1.3
- uses: actions/cache@v2.1.5
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary

View File

@ -8,14 +8,6 @@ on:
- master
jobs:
cleanup-runs:
runs-on: ubuntu-latest
if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/master'"
steps:
- uses: rokroskar/workflow-run-cleanup-action@master
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
build:
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, 'skip-sims')"
@ -23,7 +15,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- name: Display go version
run: go version
- run: make build
@ -34,12 +26,12 @@ jobs:
steps:
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- name: Display go version
run: go version
- name: Install runsim
run: export GO111MODULE="on" && go get github.com/cosmos/tools/cmd/runsim@v1.0.0
- uses: actions/cache@v2.1.3
- uses: actions/cache@v2.1.5
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary
@ -51,7 +43,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- name: Display go version
run: go version
- uses: technote-space/get-diff-action@v4
@ -60,7 +52,7 @@ jobs:
**/**.go
go.mod
go.sum
- uses: actions/cache@v2.1.3
- uses: actions/cache@v2.1.5
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary
@ -77,7 +69,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- name: Display go version
run: go version
- uses: technote-space/get-diff-action@v4
@ -88,7 +80,7 @@ jobs:
go.sum
SET_ENV_NAME_INSERTIONS: 1
SET_ENV_NAME_LINES: 1
- uses: actions/cache@v2.1.3
- uses: actions/cache@v2.1.5
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary
@ -105,7 +97,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- name: Display go version
run: go version
- uses: technote-space/get-diff-action@v4
@ -116,7 +108,7 @@ jobs:
go.sum
SET_ENV_NAME_INSERTIONS: 1
SET_ENV_NAME_LINES: 1
- uses: actions/cache@v2.1.3
- uses: actions/cache@v2.1.5
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary
@ -133,7 +125,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- name: Display go version
run: go version
- uses: technote-space/get-diff-action@v4
@ -144,7 +136,7 @@ jobs:
go.sum
SET_ENV_NAME_INSERTIONS: 1
SET_ENV_NAME_LINES: 1
- uses: actions/cache@v2.1.3
- uses: actions/cache@v2.1.5
with:
path: ~/go/bin
key: ${{ runner.os }}-go-runsim-binary

View File

@ -15,7 +15,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- name: Unshallow
run: git fetch --prune --unshallow
- name: Create release

View File

@ -7,26 +7,18 @@ on:
branches:
- master
jobs:
cleanup-runs:
runs-on: ubuntu-latest
steps:
- uses: rokroskar/workflow-run-cleanup-action@master
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/master'"
install-tparse:
runs-on: ubuntu-latest
steps:
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- name: Display go version
run: go version
- name: install tparse
run: |
export GO111MODULE="on" && go get github.com/mfridman/tparse@v0.8.3
- uses: actions/cache@v2.1.3
- uses: actions/cache@v2.1.5
with:
path: ~/go/bin
key: ${{ runner.os }}-go-tparse-binary
@ -40,7 +32,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- uses: technote-space/get-diff-action@v4
id: git_diff
with:
@ -57,7 +49,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- name: Display go version
run: go version
- uses: technote-space/get-diff-action@v4
@ -110,7 +102,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- uses: technote-space/get-diff-action@v4
with:
PATTERNS: |
@ -172,7 +164,7 @@ jobs:
sed -i.bak "/$(echo $filename | sed 's/\//\\\//g')/d" coverage.txt
done
if: env.GIT_DIFF
- uses: codecov/codecov-action@v1.2.1
- uses: codecov/codecov-action@v1.4.1
with:
file: ./coverage.txt
if: env.GIT_DIFF
@ -188,7 +180,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- uses: technote-space/get-diff-action@v4
with:
PATTERNS: |
@ -201,7 +193,7 @@ jobs:
if: env.GIT_DIFF
- name: test & coverage report creation
run: |
cat pkgs.txt.part.${{ matrix.part }} | xargs go test -mod=readonly -json -timeout 30m -race -tags='cgo ledger test_ledger_mock' > ${{ matrix.part }}-race-output.txt
xargs --arg-file=pkgs.txt.part.${{ matrix.part }} go test -mod=readonly -timeout 30m -race -tags='cgo ledger test_ledger_mock'
if: env.GIT_DIFF
- uses: actions/upload-artifact@v2
with:
@ -225,44 +217,6 @@ jobs:
make test-rosetta
# if: env.GIT_DIFF
race-detector-report:
runs-on: ubuntu-latest
needs: [test-race, install-tparse]
timeout-minutes: 5
steps:
- uses: actions/checkout@v2
- uses: technote-space/get-diff-action@v4
id: git_diff
with:
PATTERNS: |
**/**.go
go.mod
go.sum
- uses: actions/download-artifact@v2
with:
name: "${{ github.sha }}-00-race-output"
if: env.GIT_DIFF
- uses: actions/download-artifact@v2
with:
name: "${{ github.sha }}-01-race-output"
if: env.GIT_DIFF
- uses: actions/download-artifact@v2
with:
name: "${{ github.sha }}-02-race-output"
if: env.GIT_DIFF
- uses: actions/download-artifact@v2
with:
name: "${{ github.sha }}-03-race-output"
if: env.GIT_DIFF
- uses: actions/cache@v2.1.3
with:
path: ~/go/bin
key: ${{ runner.os }}-go-tparse-binary
if: env.GIT_DIFF
- name: Generate test report (go test -race)
run: cat ./*-race-output.txt | ~/go/bin/tparse
if: env.GIT_DIFF
liveness-test:
runs-on: ubuntu-latest
timeout-minutes: 10
@ -270,7 +224,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2.1.3
with:
go-version: 1.15
go-version: 1.16
- uses: technote-space/get-diff-action@v4
id: git_diff
with:

View File

@ -20,12 +20,10 @@ linters:
- gosimple
- govet
- ineffassign
- interfacer
- maligned
- misspell
- nakedret
- prealloc
- scopelint
- exportloopref
- staticcheck
- structcheck
- stylecheck
@ -56,6 +54,11 @@ issues:
- text: "ST1016:"
linters:
- stylecheck
- path: "legacy"
text: "SA1019:"
linters:
- staticcheck
max-issues-per-linter: 10000
max-same-issues: 10000

View File

@ -8,3 +8,19 @@ pull_request_rules:
merge:
method: squash
strict: true
- name: backport patches to v0.42.x branch
conditions:
- base=master
- label=backport/0.42.x (Stargate)
actions:
backport:
branches:
- release/v0.42.x
- name: backport patches to v0.39.x branch
conditions:
- base=master
- label=backport/0.39.x (Launchpad)
actions:
backport:
branches:
- launchpad/backports

View File

@ -35,30 +35,225 @@ Ref: https://keepachangelog.com/en/1.0.0/
# Changelog
## [Unreleased]
* [\#9205](https://github.com/cosmos/cosmos-sdk/pull/9205) Improve readability in `abci` handleQueryP2P
### Features
* [\#8965](https://github.com/cosmos/cosmos-sdk/pull/8965) cosmos reflection now provides more information on the application such as: deliverable msgs, sdk.Config info etc (still in alpha stage).
* [\#8559](https://github.com/cosmos/cosmos-sdk/pull/8559) Added Protobuf compatible secp256r1 ECDSA signatures.
* [\#8786](https://github.com/cosmos/cosmos-sdk/pull/8786) Enabled secp256r1 in x/auth.
* (rosetta) [\#8729](https://github.com/cosmos/cosmos-sdk/pull/8729) Data API fully supports balance tracking. Construction API can now construct any message supported by the application.
* [\#8754](https://github.com/cosmos/cosmos-sdk/pull/8875) Added support for reverse iteration to pagination.
* [#9088](https://github.com/cosmos/cosmos-sdk/pull/9088) Added implementation to ADR-28 Derived Addresses.
* [\#9133](https://github.com/cosmos/cosmos-sdk/pull/9133) Added hooks for governance actions.
* (x/staking) [\#9214](https://github.com/cosmos/cosmos-sdk/pull/9214) Added `new_shares` attribute inside `EventTypeDelegate` event.
### Client Breaking Changes
* [\#8363](https://github.com/cosmos/cosmos-sdk/issues/8363) Addresses no longer have a fixed 20-byte length. From the SDK modules' point of view, any 1-255 bytes-long byte array is a valid address.
* [\#8363](https://github.com/cosmos/cosmos-sdk/pull/8363) Addresses no longer have a fixed 20-byte length. From the SDK modules' point of view, any 1-255 bytes-long byte array is a valid address.
* [\#8346](https://github.com/cosmos/cosmos-sdk/pull/8346) All CLI `tx` commands generate ServiceMsgs by default. Graceful Amino support has been added to ServiceMsgs to support signing legacy Msgs.
* (crypto/ed25519) [\#8690] Adopt zip1215 ed2559 verification rules.
* [\#8849](https://github.com/cosmos/cosmos-sdk/pull/8849) Upgrade module no longer supports time based upgrades.
* [\#8880](https://github.com/cosmos/cosmos-sdk/pull/8880) The CLI `simd migrate v0.40 ...` command has been renamed to `simd migrate v0.42`.
* [\#7477](https://github.com/cosmos/cosmos-sdk/pull/7477) Changed Bech32 Public Key serialization in the client facing functionality (CLI, MsgServer, QueryServer):
* updated the keyring display structure (it uses protobuf JSON serialization) - the output is more verbose.
* Renamed `MarshalAny` and `UnmarshalAny` to `MarshalInterface` and `UnmarshalInterface` respectively. These functions must take an interface as parameter (not a concrete type nor `Any` object). Underneath they use `Any` wrapping for correct protobuf serialization.
* CLI: removed `--text` flag from `show-node-id` command; the text format for public keys is not used any more - instead we use ProtoJSON.
* (types) [\#9079](https://github.com/cosmos/cosmos-sdk/issues/9079) Add `AddAmount`/`SubAmount` methods to `sdk.Coin`.
* [\#8628](https://github.com/cosmos/cosmos-sdk/issues/8628) Commands no longer print outputs using `stderr` by default
* [\#9139](https://github.com/cosmos/cosmos-sdk/pull/9139) Querying events:
* via `ServiceMsg` TypeURLs (e.g. `message.action='/cosmos.bank.v1beta1.Msg/Send'`) does not work anymore,
* via legacy `msg.Type()` (e.g. `message.action='send'`) is being deprecated, new `Msg`s won't emit these events.
* Please use concrete `Msg` TypeURLs instead (e.g. `message.action='/cosmos.bank.v1beta1.MsgSend'`).
### API Breaking Changes
* (keyring) [#\8662](https://github.com/cosmos/cosmos-sdk/pull/8662) `NewMnemonic` now receives an additional `passphrase` argument to secure the key generated by the bip39 mnemonic.
* (x/bank) [\#8473](https://github.com/cosmos/cosmos-sdk/pull/8473) Bank keeper does not expose unsafe balance changing methods such as `SetBalance`, `SetSupply` etc.
* (x/staking) [\#8473](https://github.com/cosmos/cosmos-sdk/pull/8473) On genesis init, if non bonded pool and bonded pool balance, coming from the bank module, does not match what is saved in the staking state, the initialization will panic.
* (x/gov) [\#8473](https://github.com/cosmos/cosmos-sdk/pull/8473) On genesis init, if the gov module account balance, coming from bank module state, does not match the one in gov module state, the initialization will panic.
* (x/distribution) [\#8473](https://github.com/cosmos/cosmos-sdk/pull/8473) On genesis init, if the distribution module account balance, coming from bank module state, does not match the one in distribution module state, the initialization will panic.
* (client/keys) [\#8500](https://github.com/cosmos/cosmos-sdk/pull/8500) `InfoImporter` interface is removed from legacy keybase.
* [\#8629](https://github.com/cosmos/cosmos-sdk/pull/8629) Deprecated `SetFullFundraiserPath` from `Config` in favor of `SetPurpose` and `SetCoinType`.
* (x/upgrade) [\#8673](https://github.com/cosmos/cosmos-sdk/pull/8673) Remove IBC logic from x/upgrade. Deprecates IBC fields in an Upgrade Plan. IBC upgrade logic moved to 02-client and an IBC UpgradeProposal is added.
* (x/bank) [\#8517](https://github.com/cosmos/cosmos-sdk/pull/8517) `SupplyI` interface and `Supply` are removed and uses `sdk.Coins` for supply tracking
* (x/upgrade) [\#8743](https://github.com/cosmos/cosmos-sdk/pull/8743) `UpgradeHandler` includes a new argument `VersionMap` which helps facilitate in-place migrations.
* (x/auth) [\#8129](https://github.com/cosmos/cosmos-sdk/pull/8828) Updated `SigVerifiableTx.GetPubKeys` method signature to return error.
* (x/upgrade) [\7487](https://github.com/cosmos/cosmos-sdk/pull/8897) Upgrade `Keeper` takes new argument `ProtocolVersionSetter` which implements setting a protocol version on baseapp.
* (baseapp) [\7487](https://github.com/cosmos/cosmos-sdk/pull/8897) BaseApp's fields appVersion and version were swapped to match Tendermint's fields.
* [\#8682](https://github.com/cosmos/cosmos-sdk/pull/8682) `ante.NewAnteHandler` updated to receive all positional params as `ante.HandlerOptions` struct. If required fields aren't set, throws error accordingly.
* (x/staking/types) [\#7447](https://github.com/cosmos/cosmos-sdk/issues/7447) Remove bech32 PubKey support:
* `ValidatorI` interface update: `GetConsPubKey` renamed to `TmConsPubKey` (this is to clarify the return type: consensus public key must be a tendermint key); `TmConsPubKey`, `GetConsAddr` methods return error.
* `Validator` updated according to the `ValidatorI` changes described above.
* `ToTmValidator` function: added `error` to return values.
* `Validator.ConsensusPubkey` type changed from `string` to `codectypes.Any`.
* `MsgCreateValidator.Pubkey` type changed from `string` to `codectypes.Any`.
* (client) [\#8926](https://github.com/cosmos/cosmos-sdk/pull/8926) `client/tx.PrepareFactory` has been converted to a private function, as it's only used internally.
* (auth/tx) [\#8926](https://github.com/cosmos/cosmos-sdk/pull/8926) The `ProtoTxProvider` interface used as a workaround for transaction simulation has been removed.
* (x/bank) [\#8798](https://github.com/cosmos/cosmos-sdk/pull/8798) `GetTotalSupply` is removed in favour of `GetPaginatedTotalSupply`
* (x/bank/types) [\#9061](https://github.com/cosmos/cosmos-sdk/pull/9061) `AddressFromBalancesStore` now returns an error for invalid key instead of panic.
* (codec) [\#9061](https://github.com/cosmos/cosmos-sdk/pull/9226) Rename codec interfaces and methods, to follow a general Go interfaces:
* `codec.Marshaler``codec.Codec` (this defines objects which serialize other objects)
* `codec.BinaryMarshaler``codec.BinaryCodec`
* `codec.JSONMarshaler``codec.JSONCodec`
* Removed `BinaryBare` suffix from `BinaryCodec` methods (`MarshalBinaryBare`, `UnmarshalBinaryBare`, ...)
* Removed `Binary` infix from `BinaryCodec` methods (`MarshalBinaryLengthPrefixed`, `UnmarshalBinaryLengthPrefixed`, ...)
* [\#9139](https://github.com/cosmos/cosmos-sdk/pull/9139) `ServiceMsg` TypeURLs (e.g. `/cosmos.bank.v1beta1.Msg/Send`) have been removed, as they don't comply to the Probobuf `Any` spec. Please use `Msg` type TypeURLs (e.g. `/cosmos.bank.v1beta1.MsgSend`). This has multiple consequences:
* The `sdk.ServiceMsg` struct has been removed.
* `sdk.Msg` now only contains `ValidateBasic` and `GetSigners` methods. The remaining methods `GetSignBytes`, `Route` and `Type` are moved to `legacytx.LegacyMsg`.
* The `RegisterCustomTypeURL` function and the `cosmos.base.v1beta1.ServiceMsg` interface have been removed from the interface registry.
### State Machine Breaking
* (x/{bank,distrib,gov,slashing,staking}) [\#8363](https://github.com/cosmos/cosmos-sdk/issues/8363) Store keys have been modified to allow for variable-length addresses.
* (x/ibc) [\#8266](https://github.com/cosmos/cosmos-sdk/issues/8266) Add amino JSON for IBC messages in order to support Ledger text signing.
* (x/evidence) [\#8502](https://github.com/cosmos/cosmos-sdk/pull/8502) `HandleEquivocationEvidence` persists the evidence to state.
* (x/gov) [\#7733](https://github.com/cosmos/cosmos-sdk/pull/7733) ADR 037 Implementation: Governance Split Votes
* (x/bank) [\#8656](https://github.com/cosmos/cosmos-sdk/pull/8656) balance and supply are now correctly tracked via `coin_spent`, `coin_received`, `coinbase` and `burn` events.
* (x/bank) [\#8517](https://github.com/cosmos/cosmos-sdk/pull/8517) Supply is now stored and tracked as `sdk.Coins`
* (store) [\#8790](https://github.com/cosmos/cosmos-sdk/pull/8790) Reduce gas costs by 10x for transient store operations.
* (x/staking) [\#8505](https://github.com/cosmos/cosmos-sdk/pull/8505) Convert staking power reduction into an on-chain parameter rather than a hardcoded in-code variable.
* (x/bank) [\#9051](https://github.com/cosmos/cosmos-sdk/pull/9051) Supply value is stored as `sdk.Int` rather than `string`.
### Improvements
* (x/bank) [\#8614](https://github.com/cosmos/cosmos-sdk/issues/8614) Add `Name` and `Symbol` fields to denom metadata
* (x/auth) [\#8522](https://github.com/cosmos/cosmos-sdk/pull/8522) Allow to query all stored accounts
* (crypto/types) [\#8600](https://github.com/cosmos/cosmos-sdk/pull/8600) `CompactBitArray`: optimize the `NumTrueBitsBefore` method and add an `Equal` method.
* (x/upgrade) [\#8743](https://github.com/cosmos/cosmos-sdk/pull/8743) Add tracking module versions as per ADR-041
* (types) [\#8962](https://github.com/cosmos/cosmos-sdk/issues/8962) Add `Abs()` method to `sdk.Int`.
* (x/bank) [\#8950](https://github.com/cosmos/cosmos-sdk/pull/8950) Improve efficiency on supply updates.
* (store) [\#8012](https://github.com/cosmos/cosmos-sdk/pull/8012) Implementation of ADR-038 WriteListener and listen.KVStore
* (makefile) [\#7933](https://github.com/cosmos/cosmos-sdk/issues/7933) Use Docker to generate swagger files.
### Bug Fixes
* (gRPC) [\#8945](https://github.com/cosmos/cosmos-sdk/pull/8945) gRPC reflection now works correctly.
* (keyring) [#\8635](https://github.com/cosmos/cosmos-sdk/issues/8635) Remove hardcoded default passphrase value on `NewMnemonic`
* (x/bank) [\#8434](https://github.com/cosmos/cosmos-sdk/pull/8434) Fix legacy REST API `GET /bank/total` and `GET /bank/total/{denom}` in swagger
* (x/slashing) [\#8427](https://github.com/cosmos/cosmos-sdk/pull/8427) Fix query signing infos command
* (server) [\#8399](https://github.com/cosmos/cosmos-sdk/pull/8399) fix gRPC-web flag default value
* (x/bank) [\#9229](https://github.com/cosmos/cosmos-sdk/pull/9229) Now zero coin balances cannot be added to balances & supply stores. If any denom becomes zero corresponding key gets deleted from store.
### Deprecated
* (grpc) [\#8926](https://github.com/cosmos/cosmos-sdk/pull/8926) The `tx` field in `SimulateRequest` has been deprecated, prefer to pass `tx_bytes` instead.
## [v0.42.4](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.42.4) - 2021-04-08
### Client Breaking Changes
* [\#9026](https://github.com/cosmos/cosmos-sdk/pull/9026) By default, the `tx sign` and `tx sign-batch` CLI commands use SIGN_MODE_DIRECT to sign transactions for local pubkeys. For multisigs and ledger keys, the default LEGACY_AMINO_JSON is used.
### Bug Fixes
* (gRPC) [\#9015](https://github.com/cosmos/cosmos-sdk/pull/9015) Fix invalid status code when accessing gRPC endpoints.
* [\#9026](https://github.com/cosmos/cosmos-sdk/pull/9026) Fixed the bug that caused the `gentx` command to fail for Ledger keys.
### Improvements
* [\#9081](https://github.com/cosmos/cosmos-sdk/pull/9081) Upgrade Tendermint to v0.34.9 that includes a security issue fix for Tendermint light clients.
## [v0.42.3](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.42.3) - 2021-03-24
This release fixes a security vulnerability identified in x/bank.
## [v0.42.2](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.42.2) - 2021-03-19
### Improvements
* (grpc) [\#8815](https://github.com/cosmos/cosmos-sdk/pull/8815) Add orderBy parameter to `TxsByEvents` endpoint.
* (cli) [\#8826](https://github.com/cosmos/cosmos-sdk/pull/8826) Add trust to macOS Keychain for caller app by default.
* (store) [\#8811](https://github.com/cosmos/cosmos-sdk/pull/8811) store/cachekv: use typed types/kv.List instead of container/list.List
### Bug Fixes
* (crypto) [\#8841](https://github.com/cosmos/cosmos-sdk/pull/8841) Fix legacy multisig amino marshaling, allowing migrations to work between v0.39 and v0.40+.
* (cli) [\#8873](https://github.com/cosmos/cosmos-sdk/pull/8873) add --output-document to multisign-batch.
## [v0.42.1](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.42.1) - 2021-03-10
This release fixes security vulnerability identified in the simapp.
## [v0.42.0](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.42.0) - 2021-03-08
**IMPORTANT**: This release contains an important security fix for all non Cosmos Hub chains running Stargate version of the Cosmos SDK (>0.40). Non-hub chains should not be using any version of the SDK in the v0.40.x or v0.41.x release series. See [#8461](https://github.com/cosmos/cosmos-sdk/pull/8461) for more details.
### Improvements
* (x/ibc) [\#8624](https://github.com/cosmos/cosmos-sdk/pull/8624) Emit full header in IBC UpdateClient message.
* (x/crisis) [\#8621](https://github.com/cosmos/cosmos-sdk/issues/8621) crisis invariants names now print to loggers.
### Bug fixes
* (x/evidence) [\#8461](https://github.com/cosmos/cosmos-sdk/pull/8461) Fix bech32 prefix in evidence validator address conversion
* (x/gov) [\#8806](https://github.com/cosmos/cosmos-sdk/issues/8806) Fix q gov proposals command's mishandling of the --status parameter's values.
## [v0.41.4](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.41.3) - 2021-03-02
**IMPORTANT**: Due to a bug in the v0.41.x series with how evidence handles validator consensus addresses #8461, SDK based chains that are not using the default bech32 prefix (cosmos, aka all chains except for t
he Cosmos Hub) should not use this release or any release in the v0.41.x series. Please see #8668 for tracking & timeline for the v0.42.0 release, which will include a fix for this issue.
### Features
* [\#7787](https://github.com/cosmos/cosmos-sdk/pull/7787) Add multisign-batch command.
### Bug fixes
* [\#8730](https://github.com/cosmos/cosmos-sdk/pull/8730) Allow REST endpoint to query txs with multisig addresses.
* [\#8680](https://github.com/cosmos/cosmos-sdk/issues/8680) Fix missing timestamp in GetTxsEvent response [\#8732](https://github.com/cosmos/cosmos-sdk/pull/8732).
* [\#8681](https://github.com/cosmos/cosmos-sdk/issues/8681) Fix missing error message when calling GetTxsEvent [\#8732](https://github.com/cosmos/cosmos-sdk/pull/8732)
* (server) [\#8641](https://github.com/cosmos/cosmos-sdk/pull/8641) Fix Tendermint and application configuration reading from file
* (client/keys) [\#8639] (https://github.com/cosmos/cosmos-sdk/pull/8639) Fix keys migrate for mulitisig, offline, and ledger keys. The migrate command now takes a positional old_home_dir argument.
### Improvements
* (store/cachekv), (x/bank/types) [\#8719](https://github.com/cosmos/cosmos-sdk/pull/8719) algorithmically fix pathologically slow code
* [\#8701](https://github.com/cosmos/cosmos-sdk/pull/8701) Upgrade tendermint v0.34.8.
* [\#8714](https://github.com/cosmos/cosmos-sdk/pull/8714) Allow accounts to have a balance of 0 at genesis.
## [v0.41.3](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.41.3) - 2021-02-18
### Bug Fixes
* [\#8617](https://github.com/cosmos/cosmos-sdk/pull/8617) Fix build failures caused by a small API breakage introduced in tendermint v0.34.7.
## [v0.41.2](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.41.2) - 2021-02-18
### Improvements
* Bump tendermint dependency to v0.34.7.
## [v0.41.1](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.41.1) - 2021-02-17
### Bug Fixes
* (grpc) [\#8549](https://github.com/cosmos/cosmos-sdk/pull/8549) Make gRPC requests go through ABCI and disallow concurrency.
* (x/staking) [\#8546](https://github.com/cosmos/cosmos-sdk/pull/8546) Fix caching bug where concurrent calls to GetValidator could cause a node to crash
* (server) [\#8481](https://github.com/cosmos/cosmos-sdk/pull/8481) Don't create files when running `{appd} tendermint show-*` subcommands.
* (client/keys) [\#8436](https://github.com/cosmos/cosmos-sdk/pull/8436) Fix keybase->keyring keys migration.
* (crypto/hd) [\#8607](https://github.com/cosmos/cosmos-sdk/pull/8607) Make DerivePrivateKeyForPath error and not panic on trailing slashes.
### Improvements
* (x/ibc) [\#8458](https://github.com/cosmos/cosmos-sdk/pull/8458) Add `packet_connection` attribute to ibc events to enable relayer filtering
* (x/bank) [\#8479](https://github.com/cosmos/cosmos-sdk/pull/8479) Adittional client denom metadata validation for `base` and `display` denoms.
* (x/ibc) [\#8404](https://github.com/cosmos/cosmos-sdk/pull/8404) Reorder IBC `ChanOpenAck` and `ChanOpenConfirm` handler execution to perform core handler first, followed by application callbacks.
* [\#8396](https://github.com/cosmos/cosmos-sdk/pull/8396) Add support for ARM platform
* (x/bank) [\#8479](https://github.com/cosmos/cosmos-sdk/pull/8479) Aditional client denom metadata validation for `base` and `display` denoms.
* (codec/types) [\#8605](https://github.com/cosmos/cosmos-sdk/pull/8605) Avoid unnecessary allocations for NewAnyWithCustomTypeURL on error.
## [v0.41.0](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.41.0) - 2021-01-26
### State Machine Breaking
* (x/ibc) [\#8266](https://github.com/cosmos/cosmos-sdk/issues/8266) Add amino JSON support for IBC MsgTransfer in order to support Ledger text signing transfer transactions.
* (x/ibc) [\#8404](https://github.com/cosmos/cosmos-sdk/pull/8404) Reorder IBC `ChanOpenAck` and `ChanOpenConfirm` handler execution to perform core handler first, followed by application callbacks.
### Bug Fixes
* (x/evidence) [#8461](https://github.com/cosmos/cosmos-sdk/pull/8461) Fix bech32 prefix in evidence validator address conversion
* (x/slashing) [\#8427](https://github.com/cosmos/cosmos-sdk/pull/8427) Fix query signing infos command
* (simapp) [\#8418](https://github.com/cosmos/cosmos-sdk/pull/8418) Add balance coin to supply when adding a new genesis account
* (x/bank) [\#8417](https://github.com/cosmos/cosmos-sdk/pull/8417) Validate balances and coin denom metadata on genesis
* (server) [\#8399](https://github.com/cosmos/cosmos-sdk/pull/8399) fix gRPC-web flag default value
## [v0.40.1](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.1) - 2021-01-19

View File

@ -20,7 +20,7 @@
Thank you for considering making contributions to Cosmos-SDK and related
repositories!
Contributing to this repo can mean many things such as participated in
Contributing to this repo can mean many things such as participating in
discussion or proposing code changes. To ensure a smooth workflow for all
contributors, the general procedure for contributing has been established:
@ -51,9 +51,6 @@ not required to an open issue to submit a PR, but be aware that for more complex
problems/features, if a PR is opened before an adequate design discussion has
taken place in a github issue, that PR runs a high likelihood of being rejected.
Take a peek at our [coding repo](https://github.com/tendermint/coding) for
overall information on repository workflow and standards. Note, we use `make tools` for installing the linting tools.
Other notes:
- Looking for a good place to start contributing? How about checking out some
@ -69,7 +66,7 @@ Other notes:
When proposing an architecture decision for the SDK, please create an [ADR](./docs/architecture/README.md)
so further discussions can be made. We are following this process so all involved parties are in
agreement before any party begins coding the proposed implementation. If you would like to see some examples
of how these are written refer to [Tendermint ADRs](https://github.com/tendermint/tendermint/tree/master/docs/architecture)
of how these are written refer to the current [ADRs](https://github.com/cosmos/cosmos-sdk/tree/master/docs/architecture)
## Pull Requests
@ -169,12 +166,7 @@ For example, in vscode your `.vscode/settings.json` should look like:
## Testing
All repos should be hooked up to [CircleCI](https://circleci.com/).
If they have `.go` files in the root directory, they will be automatically
tested by circle using `go test -v -race ./...`. If not, they will need a
`circle.yml`. Ideally, every repo has a `Makefile` that defines `make test` and
includes its continuous integration status using a badge in the `README.md`.
Tests can be ran by running `make test` at the top level of the SDK repository.
We expect tests to use `require` or `assert` rather than `t.Skip` or `t.Fail`,
unless there is a reason to do otherwise.
@ -212,8 +204,7 @@ The SDK utilizes [semantic versioning](https://semver.org/).
Ensure that you base and target your PR on the `master` branch.
All feature additions should be targeted against `master`. Bug fixes for an outstanding release candidate
should be targeted against the release candidate branch. Release candidate branches themselves should be the
only pull requests targeted directly against master.
should be targeted against the release candidate branch.
### Development Procedure
@ -369,14 +360,12 @@ should convene to rectify the situation by either:
**Approval Committee & Decision Making**
In absense of general consensus, decision making requires ⅔ vote from the three members
In absense of general consensus, decision making requires 1/2 vote from the two members
of the **Concept Approval Committee**.
**Committee Members**
* Core Members: **Aaron** (Regen), **Bez** (Fission), **Alessio** (AiB)
* Secondary pool of candidates to replace / substitute:
* **Chris Goes** (IG), **Sunny** (Sikka)
* Core Members: **Aaron** (Regen), **Bez** (IG)
**Committee Criteria**
@ -406,8 +395,7 @@ well as for PRs made as part of a release process:
* Code reviewers should ensure the PR does exactly what the ADR said it should
* Code reviewers should have more senior engineering capability
* ⅔ approval is required from the **primary repo maintainers** in `CODEOWNERS`
* Secondary pool of candidates to replace / substitute are listed as **secondary repo maintainers** in `CODEOWNERS`
* 1/2 approval is required from the **primary repo maintainers** in `CODEOWNERS`
*Note: For any major or minor release series denoted as a "Stable Release" (e.g. v0.39 "Launchpad"), a separate release
committee is often established. Stable Releases, and their corresponding release committees are documented

View File

@ -3,6 +3,7 @@
PACKAGES_NOSIMULATION=$(shell go list ./... | grep -v '/simulation')
PACKAGES_SIMTEST=$(shell go list ./... | grep '/simulation')
VERSION := $(shell echo $(shell git describe --always) | sed 's/^v//')
TMVERSION := $(shell go list -m github.com/tendermint/tendermint | sed 's:.* ::')
COMMIT := $(shell git log -1 --format='%H')
LEDGER_ENABLED ?= true
BINDIR ?= $(GOPATH)/bin
@ -44,8 +45,6 @@ endif
ifeq (cleveldb,$(findstring cleveldb,$(COSMOS_BUILD_OPTIONS)))
build_tags += gcc
endif
build_tags += $(BUILD_TAGS)
build_tags := $(strip $(build_tags))
whitespace :=
whitespace += $(whitespace)
@ -58,7 +57,8 @@ ldflags = -X github.com/cosmos/cosmos-sdk/version.Name=sim \
-X github.com/cosmos/cosmos-sdk/version.AppName=simd \
-X github.com/cosmos/cosmos-sdk/version.Version=$(VERSION) \
-X github.com/cosmos/cosmos-sdk/version.Commit=$(COMMIT) \
-X "github.com/cosmos/cosmos-sdk/version.BuildTags=$(build_tags_comma_sep)"
-X "github.com/cosmos/cosmos-sdk/version.BuildTags=$(build_tags_comma_sep)" \
-X github.com/tendermint/tendermint/version.TMCoreSemVer=$(TMVERSION)
# DB backend selection
ifeq (cleveldb,$(findstring cleveldb,$(COSMOS_BUILD_OPTIONS)))
@ -66,6 +66,7 @@ ifeq (cleveldb,$(findstring cleveldb,$(COSMOS_BUILD_OPTIONS)))
endif
ifeq (badgerdb,$(findstring badgerdb,$(COSMOS_BUILD_OPTIONS)))
ldflags += -X github.com/cosmos/cosmos-sdk/types.DBBackend=badgerdb
BUILD_TAGS += badgerdb
endif
# handle rocksdb
ifeq (rocksdb,$(findstring rocksdb,$(COSMOS_BUILD_OPTIONS)))
@ -85,6 +86,9 @@ endif
ldflags += $(LDFLAGS)
ldflags := $(strip $(ldflags))
build_tags += $(BUILD_TAGS)
build_tags := $(strip $(build_tags))
BUILD_FLAGS := -tags "$(build_tags)" -ldflags '$(ldflags)'
# check for nostrip option
ifeq (,$(findstring nostrip,$(COSMOS_BUILD_OPTIONS)))
@ -365,30 +369,40 @@ devdoc-update:
### Protobuf ###
###############################################################################
containerProtoVer=v0.2
containerProtoImage=tendermintdev/sdk-proto-gen:$(containerProtoVer)
containerProtoGen=cosmos-sdk-proto-gen-$(containerProtoVer)
containerProtoGenSwagger=cosmos-sdk-proto-gen-swagger-$(containerProtoVer)
containerProtoFmt=cosmos-sdk-proto-fmt-$(containerProtoVer)
proto-all: proto-format proto-lint proto-gen
proto-gen:
@echo "Generating Protobuf files"
$(DOCKER) run --rm -v $(CURDIR):/workspace --workdir /workspace tendermintdev/sdk-proto-gen sh ./scripts/protocgen.sh
proto-format:
@echo "Formatting Protobuf files"
$(DOCKER) run --rm -v $(CURDIR):/workspace \
--workdir /workspace tendermintdev/docker-build-proto \
find ./ -not -path "./third_party/*" -name *.proto -exec clang-format -i {} \;
@if docker ps -a --format '{{.Names}}' | grep -Eq "^${containerProtoGen}$$"; then docker start -a $(containerProtoGen); else docker run --name $(containerProtoGen) -v $(CURDIR):/workspace --workdir /workspace $(containerProtoImage) \
sh ./scripts/protocgen.sh; fi
# This generates the SDK's custom wrapper for google.protobuf.Any. It should only be run manually when needed
proto-gen-any:
$(DOCKER) run --rm -v $(CURDIR):/workspace --workdir /workspace tendermintdev/sdk-proto-gen sh ./scripts/protocgen-any.sh
@echo "Generating Protobuf Any"
$(DOCKER) run --rm -v $(CURDIR):/workspace --workdir /workspace $(containerProtoImage) sh ./scripts/protocgen-any.sh
proto-swagger-gen:
@./scripts/protoc-swagger-gen.sh
@echo "Generating Protobuf Swagger"
@if docker ps -a --format '{{.Names}}' | grep -Eq "^${containerProtoGenSwagger}$$"; then docker start -a $(containerProtoGenSwagger); else docker run --name $(containerProtoGenSwagger) -v $(CURDIR):/workspace --workdir /workspace $(containerProtoImage) \
sh ./scripts/protoc-swagger-gen.sh; fi
proto-format:
@echo "Formatting Protobuf files"
@if docker ps -a --format '{{.Names}}' | grep -Eq "^${containerProtoFmt}$$"; then docker start -a $(containerProtoFmt); else docker run --name $(containerProtoFmt) -v $(CURDIR):/workspace --workdir /workspace $(containerProtoImage) \
find ./ -not -path "./third_party/*" -name *.proto -exec clang-format -i {}; fi
proto-lint:
@$(DOCKER_BUF) check lint --error-format=json
@$(DOCKER_BUF) lint --error-format=json
proto-check-breaking:
@$(DOCKER_BUF) check breaking --against-input $(HTTPS_GIT)#branch=master
@$(DOCKER_BUF) breaking --against $(HTTPS_GIT)#branch=master
TM_URL = https://raw.githubusercontent.com/tendermint/tendermint/v0.34.0-rc6/proto/tendermint
GOGO_PROTO_URL = https://raw.githubusercontent.com/regen-network/protobuf/cosmos

View File

@ -63,6 +63,10 @@ For more, please go to the [Cosmos SDK Docs](./docs/).
The Cosmos Hub application, `gaia`, has moved to its [own repository](https://github.com/cosmos/gaia). Go there to join the Cosmos Hub mainnet and more.
## Interblockchain Communication (IBC)
The IBC module for the SDK has moved to its [own repository](https://github.com/cosmos/ibc-go). Go there to build and integrate with the IBC module.
## Starport
If you are starting a new app or a new module you can use [Starport](https://github.com/tendermint/starport) to help you get started and speed up development. If you have any questions or find a bug, feel free to open an issue in the repo.

View File

@ -52,3 +52,81 @@ the code.
- HD key derivation, local and Ledger, and all key-management functionality
- Side-channel attack vectors with our implementations
- e.g. key exfiltration based on time or memory-access patterns when decrypting privkey
## Disclosure Process
The Cosmos SDK team uses the following disclosure process:
1. After a security report is received, the Cosmos SDK team works to verify the issue and confirm its severity level using Common Vulnerability Scoring System (CVSS).
1. The Cosmos SDK team collaborates with the Tendermint and Gaia teams to determine the vulnerabilitys potential impact on the Cosmos Hub and partners.
1. Patches are prepared in private repositories for eligible releases of Cosmos SDK. See [Stable Releases](https://github.com/cosmos/cosmos-sdk/blob/master/STABLE_RELEASES.md) for a list of eligible releases.
1. If it is determined that a CVE-ID is required, we request a CVE through a CVE Numbering Authority.
1. We notify the community that a security release is coming to give users time to prepare their systems for the update. Notifications can include forum posts, tweets, and emails to partners and validators.
1. 24 hours after the notification, fixes are applied publicly and new releases are issued.
1. The Gaia team updates their Tendermint Core and Cosmos SDK dependencies to use these releases and then issues new Gaia releases.
1. After releases are available for Tendermint Core, Cosmos SDK, and Gaia, we notify the community again through the same channels. We also publish a Security Advisory on Github and publish the CVE, as long as the Security Advisory and the CVE do not include information on how to exploit these vulnerabilities beyond the information that is available in the patch.
1. After the community is notified, Tendermint pays out any relevant bug bounties to submitters.
1. One week after the releases go out, we publish a post with details and our response to the vulnerability.
This process can take some time. Every effort is made to handle the bug in as timely a manner as possible. However, it's important that we follow this security process to ensure that disclosures are handled consistently and to keep Cosmos SDK and its downstream dependent projects--including but not limited to Gaia and the Cosmos Hub--as secure as possible.
### Disclosure Communications
Communications to partners usually include the following details:
1. Affected version or versions
1. New release version
1. Impact on user funds
1. For timed releases, a date and time that the new release will be made available
1. Impact on the partners if upgrades are not completed in a timely manner
1. Potential required actions if an adverse condition arises during the security release process
An example notice looks like:
```
Dear Cosmos SDK partners,
A critical security vulnerability has been identified in Cosmos SDK vX.X.X.
User funds are NOT at risk; however, the vulnerability can result in a chain halt.
This notice is to inform you that on [[**March 1 at 1pm EST/6pm UTC**]], we will be releasing Cosmos SDK vX.X.Y to fix the security issue.
We ask all validators to upgrade their nodes ASAP.
If the chain halts, validators with sufficient voting power must upgrade and come online for the chain to resume.
```
### Example Timeline
The following timeline is an example of triage and response. Each task identifies the required roles and team members; however, multiple people can play each role and each person may play multiple roles.
#### 24+ Hours Before Release Time
1. Request CVE number (ADMIN)
1. Gather emails and other contact info for validators (COMMS LEAD)
1. Test fixes on a testnet (COSMOS SDK ENG)
1. Write “Security Advisory” for forum (COSMOS SDK LEAD)
#### 24 Hours Before Release Time
1. Post “Security Advisory” pre-notification on forum (COSMOS SDK LEAD)
1. Post Tweet linking to forum post (COMMS LEAD)
1. Announce security advisory/link to post in various other social channels (Telegram, Discord) (COMMS LEAD)
1. Send emails to partners or other users (PARTNERSHIPS LEAD)
#### Release Time
1. Cut Cosmos SDK releases for eligible versions (COSMOS SDK ENG)
1. Cut Gaia release for eligible versions (GAIA ENG)
1. Post “Security releases” on forum (COSMOS SDK LEAD)
1. Post new Tweet linking to forum post (COMMS LEAD)
1. Remind everyone using social channels (Telegram, Discord) that the release is out (COMMS LEAD)
1. Send emails to validators and other users (COMMS LEAD)
1. Publish Security Advisory and CVE if the CVE has no sensitive information (ADMIN)
#### After Release Time
1. Write forum post with exploit details (COSMOS SDK LEAD)
1. Approve payout on HackerOne for submitter (ADMIN)
#### 7 Days After Release Time
1. Publish CVE if it has not yet been published (ADMIN)
1. Publish forum post with exploit details (COSMOS SDK ENG, COSMOS SDK LEAD)

View File

@ -109,6 +109,8 @@ func (app *BaseApp) Info(req abci.RequestInfo) abci.ResponseInfo {
return abci.ResponseInfo{
Data: app.name,
Version: app.version,
AppVersion: app.appVersion,
LastBlockHeight: lastCommitID.Version,
LastBlockAppHash: lastCommitID.Hash,
}
@ -755,7 +757,7 @@ func handleQueryApp(app *BaseApp, path []string, req abci.RequestQuery) abci.Res
return abci.ResponseQuery{
Codespace: sdkerrors.RootCodespace,
Height: req.Height,
Value: []byte(app.appVersion),
Value: []byte(app.version),
}
default:
@ -797,28 +799,32 @@ func handleQueryStore(app *BaseApp, path []string, req abci.RequestQuery) abci.R
func handleQueryP2P(app *BaseApp, path []string) abci.ResponseQuery {
// "/p2p" prefix for p2p queries
if len(path) >= 4 {
cmd, typ, arg := path[1], path[2], path[3]
switch cmd {
case "filter":
switch typ {
case "addr":
return app.FilterPeerByAddrPort(arg)
case "id":
return app.FilterPeerByID(arg)
}
default:
return sdkerrors.QueryResult(sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, "expected second parameter to be 'filter'"))
}
if len(path) < 4 {
return sdkerrors.QueryResult(
sdkerrors.Wrap(
sdkerrors.ErrUnknownRequest, "path should be p2p filter <addr|id> <parameter>",
),
)
}
return sdkerrors.QueryResult(
sdkerrors.Wrap(
sdkerrors.ErrUnknownRequest, "expected path is p2p filter <addr|id> <parameter>",
),
)
var resp abci.ResponseQuery
cmd, typ, arg := path[1], path[2], path[3]
switch cmd {
case "filter":
switch typ {
case "addr":
resp = app.FilterPeerByAddrPort(arg)
case "id":
resp = app.FilterPeerByID(arg)
}
default:
resp = sdkerrors.QueryResult(sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, "expected second parameter to be 'filter'"))
}
return resp
}
func handleQueryCustom(app *BaseApp, path []string, req abci.RequestQuery) abci.ResponseQuery {

View File

@ -19,6 +19,7 @@ import (
"github.com/cosmos/cosmos-sdk/store/rootmulti"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
"github.com/cosmos/cosmos-sdk/x/auth/legacy/legacytx"
)
const (
@ -117,7 +118,11 @@ type BaseApp struct { // nolint: maligned
minRetainBlocks uint64
// application's version string
appVersion string
version string
// application's protocol version that increments on every upgrade
// if BaseApp is passed to the upgrade keeper's NewKeeper method.
appVersion uint64
// recovery handler for app.runTx method
runTxRecoveryMiddleware recoveryMiddleware
@ -170,11 +175,16 @@ func (app *BaseApp) Name() string {
return app.name
}
// AppVersion returns the application's version string.
func (app *BaseApp) AppVersion() string {
// AppVersion returns the application's protocol version.
func (app *BaseApp) AppVersion() uint64 {
return app.appVersion
}
// Version returns the application's version string.
func (app *BaseApp) Version() string {
return app.version
}
// Logger returns the logger of the BaseApp.
func (app *BaseApp) Logger() log.Logger {
return app.logger
@ -694,37 +704,39 @@ func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, mode runTxMode) (*s
}
var (
msgEvents sdk.Events
msgResult *sdk.Result
msgFqName string
err error
msgResult *sdk.Result
eventMsgName string // name to use as value in event `message.action`
err error
)
if svcMsg, ok := msg.(sdk.ServiceMsg); ok {
msgFqName = svcMsg.MethodName
handler := app.msgServiceRouter.Handler(msgFqName)
if handler == nil {
return nil, sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "unrecognized message service method: %s; message index: %d", msgFqName, i)
}
msgResult, err = handler(ctx, svcMsg.Request)
} else {
if handler := app.msgServiceRouter.Handler(msg); handler != nil {
// ADR 031 request type routing
msgResult, err = handler(ctx, msg)
eventMsgName = sdk.MsgTypeURL(msg)
} else if legacyMsg, ok := msg.(legacytx.LegacyMsg); ok {
// legacy sdk.Msg routing
msgRoute := msg.Route()
msgFqName = msg.Type()
// Assuming that the app developer has migrated all their Msgs to
// proto messages and has registered all `Msg services`, then this
// path should never be called, because all those Msgs should be
// registered within the `msgServiceRouter` already.
msgRoute := legacyMsg.Route()
eventMsgName = legacyMsg.Type()
handler := app.router.Route(ctx, msgRoute)
if handler == nil {
return nil, sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "unrecognized message route: %s; message index: %d", msgRoute, i)
}
msgResult, err = handler(ctx, msg)
} else {
return nil, sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "can't route message %+v", msg)
}
if err != nil {
return nil, sdkerrors.Wrapf(err, "failed to execute message; message index: %d", i)
}
msgEvents = sdk.Events{
sdk.NewEvent(sdk.EventTypeMessage, sdk.NewAttribute(sdk.AttributeKeyAction, msgFqName)),
msgEvents := sdk.Events{
sdk.NewEvent(sdk.EventTypeMessage, sdk.NewAttribute(sdk.AttributeKeyAction, eventMsgName)),
}
msgEvents = msgEvents.AppendEvents(msgResult.GetEvents())
@ -734,7 +746,7 @@ func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, mode runTxMode) (*s
// separate each result.
events = events.AppendEvents(msgEvents)
txMsgData.Data = append(txMsgData.Data, &sdk.MsgData{MsgType: msg.Type(), Data: msgResult.Data})
txMsgData.Data = append(txMsgData.Data, &sdk.MsgData{MsgType: sdk.MsgTypeURL(msg), Data: msgResult.Data})
msgLogs = append(msgLogs, sdk.NewABCIMessageLog(uint32(i), msgResult.Log, msgEvents))
}

View File

@ -164,7 +164,7 @@ func setupBaseAppWithSnapshots(t *testing.T, blocks uint, blockTxs int, options
tx.Msgs = append(tx.Msgs, msgKeyValue{Key: key, Value: value})
keyCounter++
}
txBytes, err := codec.MarshalBinaryBare(tx)
txBytes, err := codec.Marshal(tx)
require.NoError(t, err)
resp := app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})
require.True(t, resp.IsOK(), "%v", resp.String())
@ -340,21 +340,21 @@ func TestSetLoader(t *testing.T) {
}
}
func TestAppVersionSetterGetter(t *testing.T) {
func TestVersionSetterGetter(t *testing.T) {
logger := defaultLogger()
pruningOpt := SetPruning(store.PruneDefault)
db := dbm.NewMemDB()
name := t.Name()
app := NewBaseApp(name, logger, db, nil, pruningOpt)
require.Equal(t, "", app.AppVersion())
require.Equal(t, "", app.Version())
res := app.Query(abci.RequestQuery{Path: "app/version"})
require.True(t, res.IsOK())
require.Equal(t, "", string(res.Value))
versionString := "1.0.0"
app.SetAppVersion(versionString)
require.Equal(t, versionString, app.AppVersion())
app.SetVersion(versionString)
require.Equal(t, versionString, app.Version())
res = app.Query(abci.RequestQuery{Path: "app/version"})
require.True(t, res.IsOK())
require.Equal(t, versionString, string(res.Value))
@ -476,7 +476,7 @@ func TestTxDecoder(t *testing.T) {
app := newBaseApp(t.Name())
tx := newTxCounter(1, 0)
txBytes := codec.MustMarshalBinaryBare(tx)
txBytes := codec.MustMarshal(tx)
dTx, err := app.txDecoder(txBytes)
require.NoError(t, err)
@ -498,7 +498,7 @@ func TestInfo(t *testing.T) {
assert.Equal(t, t.Name(), res.GetData())
assert.Equal(t, int64(0), res.LastBlockHeight)
require.Equal(t, []uint8(nil), res.LastBlockAppHash)
require.Equal(t, app.AppVersion(), res.AppVersion)
// ----- test a proper response -------
// TODO
}
@ -510,7 +510,7 @@ func TestBaseAppOptionSeal(t *testing.T) {
app.SetName("")
})
require.Panics(t, func() {
app.SetAppVersion("")
app.SetVersion("")
})
require.Panics(t, func() {
app.SetDB(nil)
@ -804,7 +804,7 @@ func testTxDecoder(cdc *codec.LegacyAmino) sdk.TxDecoder {
return nil, sdkerrors.Wrap(sdkerrors.ErrTxDecode, "tx bytes are empty")
}
err := cdc.UnmarshalBinaryBare(txBytes, &tx)
err := cdc.Unmarshal(txBytes, &tx)
if err != nil {
return nil, sdkerrors.ErrTxDecode
}
@ -935,7 +935,7 @@ func TestCheckTx(t *testing.T) {
for i := int64(0); i < nTxs; i++ {
tx := newTxCounter(i, 0) // no messages
txBytes, err := codec.MarshalBinaryBare(tx)
txBytes, err := codec.Marshal(tx)
require.NoError(t, err)
r := app.CheckTx(abci.RequestCheckTx{Tx: txBytes})
require.Empty(t, r.GetEvents())
@ -991,7 +991,7 @@ func TestDeliverTx(t *testing.T) {
counter := int64(blockN*txPerHeight + i)
tx := newTxCounter(counter, counter)
txBytes, err := codec.MarshalBinaryBare(tx)
txBytes, err := codec.Marshal(tx)
require.NoError(t, err)
res := app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})
@ -1041,7 +1041,7 @@ func TestMultiMsgDeliverTx(t *testing.T) {
header := tmproto.Header{Height: 1}
app.BeginBlock(abci.RequestBeginBlock{Header: header})
tx := newTxCounter(0, 0, 1, 2)
txBytes, err := codec.MarshalBinaryBare(tx)
txBytes, err := codec.Marshal(tx)
require.NoError(t, err)
res := app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})
require.True(t, res.IsOK(), fmt.Sprintf("%v", res))
@ -1061,7 +1061,7 @@ func TestMultiMsgDeliverTx(t *testing.T) {
tx = newTxCounter(1, 3)
tx.Msgs = append(tx.Msgs, msgCounter2{0})
tx.Msgs = append(tx.Msgs, msgCounter2{1})
txBytes, err = codec.MarshalBinaryBare(tx)
txBytes, err = codec.Marshal(tx)
require.NoError(t, err)
res = app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})
require.True(t, res.IsOK(), fmt.Sprintf("%v", res))
@ -1123,7 +1123,7 @@ func TestSimulateTx(t *testing.T) {
app.BeginBlock(abci.RequestBeginBlock{Header: header})
tx := newTxCounter(count, count)
txBytes, err := cdc.MarshalBinaryBare(tx)
txBytes, err := cdc.Marshal(tx)
require.Nil(t, err)
// simulate a message, check gas reported
@ -1252,7 +1252,7 @@ func TestRunInvalidTransaction(t *testing.T) {
registerTestCodec(newCdc)
newCdc.RegisterConcrete(&msgNoDecode{}, "cosmos-sdk/baseapp/msgNoDecode", nil)
txBytes, err := newCdc.MarshalBinaryBare(tx)
txBytes, err := newCdc.Marshal(tx)
require.NoError(t, err)
res := app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})
@ -1520,7 +1520,7 @@ func TestBaseAppAnteHandler(t *testing.T) {
// the next txs ante handler execution (anteHandlerTxTest).
tx := newTxCounter(0, 0)
tx.setFailOnAnte(true)
txBytes, err := cdc.MarshalBinaryBare(tx)
txBytes, err := cdc.Marshal(tx)
require.NoError(t, err)
res := app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})
require.Empty(t, res.Events)
@ -1535,7 +1535,7 @@ func TestBaseAppAnteHandler(t *testing.T) {
tx = newTxCounter(0, 0)
tx.setFailOnHandler(true)
txBytes, err = cdc.MarshalBinaryBare(tx)
txBytes, err = cdc.Marshal(tx)
require.NoError(t, err)
res = app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})
@ -1551,7 +1551,7 @@ func TestBaseAppAnteHandler(t *testing.T) {
// implicitly checked by previous tx executions
tx = newTxCounter(1, 0)
txBytes, err = cdc.MarshalBinaryBare(tx)
txBytes, err = cdc.Marshal(tx)
require.NoError(t, err)
res = app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})
@ -1624,7 +1624,7 @@ func TestGasConsumptionBadTx(t *testing.T) {
tx := newTxCounter(5, 0)
tx.setFailOnAnte(true)
txBytes, err := cdc.MarshalBinaryBare(tx)
txBytes, err := cdc.Marshal(tx)
require.NoError(t, err)
res := app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})
@ -1632,7 +1632,7 @@ func TestGasConsumptionBadTx(t *testing.T) {
// require next tx to fail due to black gas limit
tx = newTxCounter(5, 0)
txBytes, err = cdc.MarshalBinaryBare(tx)
txBytes, err = cdc.Marshal(tx)
require.NoError(t, err)
res = app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})
@ -1992,7 +1992,7 @@ func TestWithRouter(t *testing.T) {
counter := int64(blockN*txPerHeight + i)
tx := newTxCounter(counter, counter)
txBytes, err := codec.MarshalBinaryBare(tx)
txBytes, err := codec.Marshal(tx)
require.NoError(t, err)
res := app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})

View File

@ -2,6 +2,9 @@ package baseapp
import (
"fmt"
"reflect"
"github.com/cosmos/cosmos-sdk/client/grpc/reflection"
gogogrpc "github.com/gogo/protobuf/grpc"
abci "github.com/tendermint/tendermint/abci/types"
@ -9,16 +12,21 @@ import (
"google.golang.org/grpc/encoding"
"google.golang.org/grpc/encoding/proto"
"github.com/cosmos/cosmos-sdk/client/grpc/reflection"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
)
var protoCodec = encoding.GetCodec(proto.Name)
// GRPCQueryRouter routes ABCI Query requests to GRPC handlers
type GRPCQueryRouter struct {
routes map[string]GRPCQueryHandler
routes map[string]GRPCQueryHandler
// returnTypes is a map of FQ method name => its return type. It is used
// for cache purposes: the first time a method handler is run, we save its
// return type in this map. Then, on subsequent method handler calls, we
// decode the ABCI response bytes using the cached return type.
returnTypes map[string]reflect.Type
interfaceRegistry codectypes.InterfaceRegistry
serviceData []serviceData
}
@ -34,7 +42,8 @@ var _ gogogrpc.Server = &GRPCQueryRouter{}
// NewGRPCQueryRouter creates a new GRPCQueryRouter
func NewGRPCQueryRouter() *GRPCQueryRouter {
return &GRPCQueryRouter{
routes: map[string]GRPCQueryHandler{},
returnTypes: map[string]reflect.Type{},
routes: map[string]GRPCQueryHandler{},
}
}
@ -89,8 +98,17 @@ func (qrt *GRPCQueryRouter) RegisterService(sd *grpc.ServiceDesc, handler interf
if qrt.interfaceRegistry != nil {
return codectypes.UnpackInterfaces(i, qrt.interfaceRegistry)
}
return nil
}, nil)
// If it's the first time we call this handler, then we save
// the return type of the handler in the `returnTypes` map.
// The return type will be used for decoding subsequent requests.
if _, found := qrt.returnTypes[fqName]; !found {
qrt.returnTypes[fqName] = reflect.TypeOf(res)
}
if err != nil {
return abci.ResponseQuery{}, err
}
@ -119,7 +137,6 @@ func (qrt *GRPCQueryRouter) RegisterService(sd *grpc.ServiceDesc, handler interf
// also register the interface reflection gRPC service.
func (qrt *GRPCQueryRouter) SetInterfaceRegistry(interfaceRegistry codectypes.InterfaceRegistry) {
qrt.interfaceRegistry = interfaceRegistry
// Once we have an interface registry, we can register the interface
// registry reflection gRPC service.
reflection.RegisterReflectionServiceServer(
@ -127,3 +144,16 @@ func (qrt *GRPCQueryRouter) SetInterfaceRegistry(interfaceRegistry codectypes.In
reflection.NewReflectionServiceServer(interfaceRegistry),
)
}
// returnTypeOf returns the return type of a gRPC method handler. With the way the
// `returnTypes` cache map is set up, the return type of a method handler is
// guaranteed to be found if it's retrieved **after** the method handler ran at
// least once. If not, then a logic error is return.
func (qrt *GRPCQueryRouter) returnTypeOf(method string) (reflect.Type, error) {
returnType, found := qrt.returnTypes[method]
if !found {
return nil, sdkerrors.Wrapf(sdkerrors.ErrLogic, "cannot find %s return type", method)
}
return returnType, nil
}

View File

@ -2,67 +2,78 @@ package baseapp
import (
"context"
"strconv"
"reflect"
gogogrpc "github.com/gogo/protobuf/grpc"
grpcmiddleware "github.com/grpc-ecosystem/go-grpc-middleware"
grpcrecovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/client"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
grpctypes "github.com/cosmos/cosmos-sdk/types/grpc"
"github.com/cosmos/cosmos-sdk/types/tx"
)
// GRPCQueryRouter returns the GRPCQueryRouter of a BaseApp.
func (app *BaseApp) GRPCQueryRouter() *GRPCQueryRouter { return app.grpcQueryRouter }
// RegisterGRPCServer registers gRPC services directly with the gRPC server.
func (app *BaseApp) RegisterGRPCServer(server gogogrpc.Server) {
// Define an interceptor for all gRPC queries: this interceptor will create
// a new sdk.Context, and pass it into the query handler.
interceptor := func(grpcCtx context.Context, req interface{}, _ *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) {
// If there's some metadata in the context, retrieve it.
md, ok := metadata.FromIncomingContext(grpcCtx)
if !ok {
return nil, status.Error(codes.Internal, "unable to retrieve metadata")
func (app *BaseApp) RegisterGRPCServer(clientCtx client.Context, server gogogrpc.Server) {
// Define an interceptor for all gRPC queries: this interceptor will route
// the query through the `clientCtx`, which itself queries Tendermint.
interceptor := func(grpcCtx context.Context, req interface{}, info *grpc.UnaryServerInfo, _ grpc.UnaryHandler) (interface{}, error) {
// Two things can happen here:
// 1. either we're broadcasting a Tx, in which case we call Tendermint's broadcast endpoint directly,
// 2. or we are querying for state, in which case we call ABCI's Query.
// Case 1. Broadcasting a Tx.
if reqProto, ok := req.(*tx.BroadcastTxRequest); ok {
if !ok {
return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "expected %T, got %T", (*tx.BroadcastTxRequest)(nil), req)
}
return client.TxServiceBroadcast(grpcCtx, clientCtx, reqProto)
}
// Get height header from the request context, if present.
var height int64
if heightHeaders := md.Get(grpctypes.GRPCBlockHeightHeader); len(heightHeaders) > 0 {
height, err = strconv.ParseInt(heightHeaders[0], 10, 64)
if err != nil {
return nil, sdkerrors.Wrapf(
sdkerrors.ErrInvalidRequest,
"Baseapp.RegisterGRPCServer: invalid height header %q: %v", grpctypes.GRPCBlockHeightHeader, err)
}
if err := checkNegativeHeight(height); err != nil {
return nil, err
}
}
// Create the sdk.Context. Passing false as 2nd arg, as we can't
// actually support proofs with gRPC right now.
sdkCtx, err := app.createQueryContext(height, false)
// Case 2. Querying state.
inMd, _ := metadata.FromIncomingContext(grpcCtx)
abciRes, outMd, err := client.RunGRPCQuery(clientCtx, grpcCtx, info.FullMethod, req, inMd)
if err != nil {
return nil, err
}
// Attach the sdk.Context into the gRPC's context.Context.
grpcCtx = context.WithValue(grpcCtx, sdk.SdkContextKey, sdkCtx)
// Add relevant gRPC headers
if height == 0 {
height = sdkCtx.BlockHeight() // If height was not set in the request, set it to the latest
// We need to know the return type of the grpc method for
// unmarshalling abciRes.Value.
//
// When we call each method handler for the first time, we save its
// return type in the `returnTypes` map (see the method handler in
// `grpcrouter.go`). By this time, the method handler has already run
// at least once (in the RunGRPCQuery call), so we're sure the
// returnType maps is populated for this method. We're retrieving it
// for decoding.
returnType, err := app.GRPCQueryRouter().returnTypeOf(info.FullMethod)
if err != nil {
return nil, err
}
md = metadata.Pairs(grpctypes.GRPCBlockHeightHeader, strconv.FormatInt(height, 10))
grpc.SetHeader(grpcCtx, md)
return handler(grpcCtx, req)
// returnType is a pointer to a struct. Here, we're creating res which
// is a new pointer to the underlying struct.
res := reflect.New(returnType.Elem()).Interface()
err = protoCodec.Unmarshal(abciRes.Value, res)
if err != nil {
return nil, err
}
// Send the metadata header back. The metadata currently includes:
// - block height.
err = grpc.SendHeader(grpcCtx, outMd)
if err != nil {
return nil, err
}
return res, nil
}
// Loop through all services and methods, add the interceptor, and register

View File

@ -29,12 +29,17 @@ func NewMsgServiceRouter() *MsgServiceRouter {
}
// MsgServiceHandler defines a function type which handles Msg service message.
type MsgServiceHandler = func(ctx sdk.Context, req sdk.MsgRequest) (*sdk.Result, error)
type MsgServiceHandler = func(ctx sdk.Context, req sdk.Msg) (*sdk.Result, error)
// Handler returns the MsgServiceHandler for a given query route path or nil
// Handler returns the MsgServiceHandler for a given msg or nil if not found.
func (msr *MsgServiceRouter) Handler(msg sdk.Msg) MsgServiceHandler {
return msr.routes[sdk.MsgTypeURL(msg)]
}
// HandlerbyTypeURL returns the MsgServiceHandler for a given query route path or nil
// if not found.
func (msr *MsgServiceRouter) Handler(methodName string) MsgServiceHandler {
return msr.routes[methodName]
func (msr *MsgServiceRouter) HandlerbyTypeURL(typeURL string) MsgServiceHandler {
return msr.routes[typeURL]
}
// RegisterService implements the gRPC Server.RegisterService method. sd is a gRPC
@ -50,20 +55,38 @@ func (msr *MsgServiceRouter) RegisterService(sd *grpc.ServiceDesc, handler inter
fqMethod := fmt.Sprintf("/%s/%s", sd.ServiceName, method.MethodName)
methodHandler := method.Handler
var requestTypeName string
// NOTE: This is how we pull the concrete request type for each handler for registering in the InterfaceRegistry.
// This approach is maybe a bit hacky, but less hacky than reflecting on the handler object itself.
// We use a no-op interceptor to avoid actually calling into the handler itself.
_, _ = methodHandler(nil, context.Background(), func(i interface{}) error {
msg, ok := i.(sdk.Msg)
if !ok {
// We panic here because there is no other alternative and the app cannot be initialized correctly
// this should only happen if there is a problem with code generation in which case the app won't
// work correctly anyway.
panic(fmt.Errorf("can't register request type %T for service method %s", i, fqMethod))
}
requestTypeName = sdk.MsgTypeURL(msg)
return nil
}, noopInterceptor)
// Check that the service Msg fully-qualified method name has already
// been registered (via RegisterInterfaces). If the user registers a
// service without registering according service Msg type, there might be
// some unexpected behavior down the road. Since we can't return an error
// (`Server.RegisterService` interface restriction) we panic (at startup).
serviceMsg, err := msr.interfaceRegistry.Resolve(fqMethod)
if err != nil || serviceMsg == nil {
reqType, err := msr.interfaceRegistry.Resolve(requestTypeName)
if err != nil || reqType == nil {
panic(
fmt.Errorf(
"type_url %s has not been registered yet. "+
"Before calling RegisterService, you must register all interfaces by calling the `RegisterInterfaces` "+
"method on module.BasicManager. Each module should call `msgservice.RegisterMsgServiceDesc` inside its "+
"`RegisterInterfaces` method with the `_Msg_serviceDesc` generated by proto-gen",
fqMethod,
requestTypeName,
),
)
}
@ -72,7 +95,7 @@ func (msr *MsgServiceRouter) RegisterService(sd *grpc.ServiceDesc, handler inter
// registered more than once, then we should error. Since we can't
// return an error (`Server.RegisterService` interface restriction) we
// panic (at startup).
_, found := msr.routes[fqMethod]
_, found := msr.routes[requestTypeName]
if found {
panic(
fmt.Errorf(
@ -83,7 +106,7 @@ func (msr *MsgServiceRouter) RegisterService(sd *grpc.ServiceDesc, handler inter
)
}
msr.routes[fqMethod] = func(ctx sdk.Context, req sdk.MsgRequest) (*sdk.Result, error) {
msr.routes[requestTypeName] = func(ctx sdk.Context, req sdk.Msg) (*sdk.Result, error) {
ctx = ctx.WithEventManager(sdk.NewEventManager())
interceptor := func(goCtx context.Context, _ interface{}, _ *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
goCtx = context.WithValue(goCtx, sdk.SdkContextKey, ctx)
@ -112,3 +135,6 @@ func (msr *MsgServiceRouter) SetInterfaceRegistry(interfaceRegistry codectypes.I
}
func noopDecoder(_ interface{}) error { return nil }
func noopInterceptor(_ context.Context, _ interface{}, _ *grpc.UnaryServerInfo, _ grpc.UnaryHandler) (interface{}, error) {
return nil, nil
}

View File

@ -80,11 +80,11 @@ func TestMsgService(t *testing.T) {
)
_ = app.BeginBlock(abci.RequestBeginBlock{Header: tmproto.Header{Height: 1}})
msg := testdata.NewServiceMsgCreateDog(&testdata.MsgCreateDog{Dog: &testdata.Dog{Name: "Spot"}})
msg := testdata.MsgCreateDog{Dog: &testdata.Dog{Name: "Spot"}}
txBuilder := encCfg.TxConfig.NewTxBuilder()
txBuilder.SetFeeAmount(testdata.NewTestFeeAmount())
txBuilder.SetGasLimit(testdata.NewTestGasLimit())
err := txBuilder.SetMsgs(msg)
err := txBuilder.SetMsgs(&msg)
require.NoError(t, err)
// First round: we gather all the signer infos. We use the "set empty

View File

@ -95,12 +95,16 @@ func (app *BaseApp) SetParamStore(ps ParamStore) {
app.paramStore = ps
}
// SetAppVersion sets the application's version string.
func (app *BaseApp) SetAppVersion(v string) {
// SetVersion sets the application's version string.
func (app *BaseApp) SetVersion(v string) {
if app.sealed {
panic("SetAppVersion() on sealed BaseApp")
panic("SetVersion() on sealed BaseApp")
}
app.version = v
}
// SetProtocolVersion sets the application's protocol version
func (app *BaseApp) SetProtocolVersion(v uint64) {
app.appVersion = v
}

View File

@ -26,6 +26,8 @@ lint:
breaking:
use:
- FILE
except:
- FIELD_NO_DELETE
ignore:
- tendermint
- gogoproto

View File

@ -8,7 +8,6 @@ import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/tendermint/tendermint/libs/cli"
rpchttp "github.com/tendermint/tendermint/rpc/client/http"
"github.com/cosmos/cosmos-sdk/client/flags"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
@ -94,9 +93,9 @@ func ReadPersistentCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Cont
clientCtx = clientCtx.WithOutputFormat(output)
}
if clientCtx.HomeDir == "" || flagSet.Changed(flags.FlagHome) {
homeDir, _ := flagSet.GetString(flags.FlagHome)
clientCtx = clientCtx.WithHomeDir(homeDir)
if !clientCtx.Simulate || flagSet.Changed(flags.FlagDryRun) {
dryRun, _ := flagSet.GetBool(flags.FlagDryRun)
clientCtx = clientCtx.WithSimulation(dryRun)
}
if clientCtx.KeyringDir == "" || flagSet.Changed(flags.FlagKeyringDir) {
@ -120,7 +119,7 @@ func ReadPersistentCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Cont
keyringBackend, _ := flagSet.GetString(flags.FlagKeyringBackend)
if keyringBackend != "" {
kr, err := newKeyringFromFlags(clientCtx, keyringBackend)
kr, err := NewKeyringFromBackend(clientCtx, keyringBackend)
if err != nil {
return clientCtx, err
}
@ -134,7 +133,7 @@ func ReadPersistentCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Cont
if rpcURI != "" {
clientCtx = clientCtx.WithNodeURI(rpcURI)
client, err := rpchttp.New(rpcURI, "/websocket")
client, err := NewClientFromNode(rpcURI)
if err != nil {
return clientCtx, err
}
@ -191,11 +190,6 @@ func readTxCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Context, err
clientCtx = clientCtx.WithGenerateOnly(genOnly)
}
if !clientCtx.Simulate || flagSet.Changed(flags.FlagDryRun) {
dryRun, _ := flagSet.GetBool(flags.FlagDryRun)
clientCtx = clientCtx.WithSimulation(dryRun)
}
if !clientCtx.Offline || flagSet.Changed(flags.FlagOffline) {
offline, _ := flagSet.GetBool(flags.FlagOffline)
clientCtx = clientCtx.WithOffline(offline)
@ -255,6 +249,21 @@ func readTxCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Context, err
return clientCtx, nil
}
// ReadHomeFlag checks if home flag is changed.
// If this is a case, we update HomeDir field of Client Context
/* Discovered a bug with Cory
./build/simd init andrei --home ./test
cd test/config there is no client.toml configuration file
*/
func ReadHomeFlag(clientCtx Context, cmd *cobra.Command) Context {
if cmd.Flags().Changed(flags.FlagHome) {
rootDir, _ := cmd.Flags().GetString(flags.FlagHome)
clientCtx = clientCtx.WithHomeDir(rootDir)
}
return clientCtx
}
// GetClientQueryContext returns a Context from a command with fields set based on flags
// defined in AddQueryFlagsToCmd. An error is returned if any flag query fails.
//

View File

@ -97,8 +97,7 @@ func TestSetCmdClientContextHandler(t *testing.T) {
tc := tc
t.Run(tc.name, func(t *testing.T) {
ctx := context.Background()
ctx = context.WithValue(ctx, client.ClientContextKey, &client.Context{})
ctx := context.WithValue(context.Background(), client.ClientContextKey, &client.Context{})
cmd := newCmd()
_ = testutil.ApplyMockIODiscardOutErr(cmd)

96
client/config/cmd.go Normal file
View File

@ -0,0 +1,96 @@
package config
import (
"encoding/json"
"fmt"
"path/filepath"
tmcli "github.com/tendermint/tendermint/libs/cli"
"github.com/spf13/cobra"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/flags"
)
// Cmd returns a CLI command to interactively create an application CLI
// config file.
func Cmd() *cobra.Command {
cmd := &cobra.Command{
Use: "config <key> [value]",
Short: "Create or query an application CLI configuration file",
RunE: runConfigCmd,
Args: cobra.RangeArgs(0, 2),
}
return cmd
}
func runConfigCmd(cmd *cobra.Command, args []string) error {
clientCtx := client.GetClientContextFromCmd(cmd)
configPath := filepath.Join(clientCtx.HomeDir, "config")
conf, err := getClientConfig(configPath, clientCtx.Viper)
if err != nil {
return fmt.Errorf("couldn't get client config: %v", err)
}
switch len(args) {
case 0:
// print all client config fields to sdt out
s, _ := json.MarshalIndent(conf, "", "\t")
cmd.Println(string(s))
case 1:
// it's a get
key := args[0]
switch key {
case flags.FlagChainID:
cmd.Println(conf.ChainID)
case flags.FlagKeyringBackend:
cmd.Println(conf.KeyringBackend)
case tmcli.OutputFlag:
cmd.Println(conf.Output)
case flags.FlagNode:
cmd.Println(conf.Node)
case flags.FlagBroadcastMode:
cmd.Println(conf.BroadcastMode)
default:
err := errUnknownConfigKey(key)
return fmt.Errorf("couldn't get the value for the key: %v, error: %v", key, err)
}
case 2:
// it's set
key, value := args[0], args[1]
switch key {
case flags.FlagChainID:
conf.SetChainID(value)
case flags.FlagKeyringBackend:
conf.SetKeyringBackend(value)
case tmcli.OutputFlag:
conf.SetOutput(value)
case flags.FlagNode:
conf.SetNode(value)
case flags.FlagBroadcastMode:
conf.SetBroadcastMode(value)
default:
return errUnknownConfigKey(key)
}
confFile := filepath.Join(configPath, "client.toml")
if err := writeConfigToFile(confFile, conf); err != nil {
return fmt.Errorf("could not write client config to the file: %v", err)
}
default:
panic("cound not execute config command")
}
return nil
}
func errUnknownConfigKey(key string) error {
return fmt.Errorf("unknown configuration key: %q", key)
}

96
client/config/config.go Normal file
View File

@ -0,0 +1,96 @@
package config
import (
"fmt"
"os"
"path/filepath"
"github.com/cosmos/cosmos-sdk/client"
)
// Default constants
const (
chainID = ""
keyringBackend = "os"
output = "text"
node = "tcp://localhost:26657"
broadcastMode = "sync"
)
type ClientConfig struct {
ChainID string `mapstructure:"chain-id" json:"chain-id"`
KeyringBackend string `mapstructure:"keyring-backend" json:"keyring-backend"`
Output string `mapstructure:"output" json:"output"`
Node string `mapstructure:"node" json:"node"`
BroadcastMode string `mapstructure:"broadcast-mode" json:"broadcast-mode"`
}
// defaultClientConfig returns the reference to ClientConfig with default values.
func defaultClientConfig() *ClientConfig {
return &ClientConfig{chainID, keyringBackend, output, node, broadcastMode}
}
func (c *ClientConfig) SetChainID(chainID string) {
c.ChainID = chainID
}
func (c *ClientConfig) SetKeyringBackend(keyringBackend string) {
c.KeyringBackend = keyringBackend
}
func (c *ClientConfig) SetOutput(output string) {
c.Output = output
}
func (c *ClientConfig) SetNode(node string) {
c.Node = node
}
func (c *ClientConfig) SetBroadcastMode(broadcastMode string) {
c.BroadcastMode = broadcastMode
}
// ReadFromClientConfig reads values from client.toml file and updates them in client Context
func ReadFromClientConfig(ctx client.Context) (client.Context, error) {
configPath := filepath.Join(ctx.HomeDir, "config")
configFilePath := filepath.Join(configPath, "client.toml")
conf := defaultClientConfig()
// if config.toml file does not exist we create it and write default ClientConfig values into it.
if _, err := os.Stat(configFilePath); os.IsNotExist(err) {
if err := ensureConfigPath(configPath); err != nil {
return ctx, fmt.Errorf("couldn't make client config: %v", err)
}
if err := writeConfigToFile(configFilePath, conf); err != nil {
return ctx, fmt.Errorf("could not write client config to the file: %v", err)
}
}
conf, err := getClientConfig(configPath, ctx.Viper)
if err != nil {
return ctx, fmt.Errorf("couldn't get client config: %v", err)
}
// we need to update KeyringDir field on Client Context first cause it is used in NewKeyringFromBackend
ctx = ctx.WithOutputFormat(conf.Output).
WithChainID(conf.ChainID)
keyring, err := client.NewKeyringFromBackend(ctx, conf.KeyringBackend)
if err != nil {
return ctx, fmt.Errorf("couldn't get key ring: %v", err)
}
ctx = ctx.WithKeyring(keyring)
// https://github.com/cosmos/cosmos-sdk/issues/8986
client, err := client.NewClientFromNode(conf.Node)
if err != nil {
return ctx, fmt.Errorf("couldn't get client from nodeURI: %v", err)
}
ctx = ctx.WithNodeURI(conf.Node).
WithClient(client).
WithBroadcastMode(conf.BroadcastMode)
return ctx, nil
}

View File

@ -0,0 +1,107 @@
package config_test
import (
"bytes"
"fmt"
"io/ioutil"
"os"
"testing"
"github.com/stretchr/testify/require"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/config"
"github.com/cosmos/cosmos-sdk/client/flags"
clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli"
"github.com/cosmos/cosmos-sdk/x/staking/client/cli"
)
const (
nodeEnv = "NODE"
testNode1 = "http://localhost:1"
testNode2 = "http://localhost:2"
)
// initClientContext initiates client Context for tests
func initClientContext(t *testing.T, envVar string) (client.Context, func()) {
home := t.TempDir()
clientCtx := client.Context{}.
WithHomeDir(home).
WithViper("")
clientCtx.Viper.BindEnv(nodeEnv)
if envVar != "" {
os.Setenv(nodeEnv, envVar)
}
clientCtx, err := config.ReadFromClientConfig(clientCtx)
require.NoError(t, err)
return clientCtx, func() { _ = os.RemoveAll(home) }
}
func TestConfigCmd(t *testing.T) {
clientCtx, cleanup := initClientContext(t, testNode1)
defer func() {
os.Unsetenv(nodeEnv)
cleanup()
}()
// NODE=http://localhost:1 ./build/simd config node http://localhost:2
cmd := config.Cmd()
args := []string{"node", testNode2}
_, err := clitestutil.ExecTestCLICmd(clientCtx, cmd, args)
require.NoError(t, err)
//./build/simd config node //http://localhost:1
b := bytes.NewBufferString("")
cmd.SetOut(b)
cmd.SetArgs([]string{"node"})
cmd.Execute()
out, err := ioutil.ReadAll(b)
require.NoError(t, err)
require.Equal(t, string(out), testNode1+"\n")
}
func TestConfigCmdEnvFlag(t *testing.T) {
const (
defaultNode = "http://localhost:26657"
)
tt := []struct {
name string
envVar string
args []string
expNode string
}{
{"env var is set with no flag", testNode1, []string{"validators"}, testNode1},
{"env var is set with a flag", testNode1, []string{"validators", fmt.Sprintf("--%s=%s", flags.FlagNode, testNode2)}, testNode2},
{"env var is not set with no flag", "", []string{"validators"}, defaultNode},
{"env var is not set with a flag", "", []string{"validators", fmt.Sprintf("--%s=%s", flags.FlagNode, testNode2)}, testNode2},
}
for _, tc := range tt {
tc := tc
t.Run(tc.name, func(t *testing.T) {
clientCtx, cleanup := initClientContext(t, tc.envVar)
defer func() {
if tc.envVar != "" {
os.Unsetenv(nodeEnv)
}
cleanup()
}()
/*
env var is set with a flag
NODE=http://localhost:1 ./build/simd q staking validators --node http://localhost:2
Error: post failed: Post "http://localhost:2": dial tcp 127.0.0.1:2: connect: connection refused
We dial http://localhost:2 cause a flag has the higher priority than env variable.
*/
cmd := cli.GetQueryCmd()
_, err := clitestutil.ExecTestCLICmd(clientCtx, cmd, tc.args)
require.Error(t, err)
require.Contains(t, err.Error(), tc.expNode, "Output does not contain expected Node")
})
}
}

70
client/config/toml.go Normal file
View File

@ -0,0 +1,70 @@
package config
import (
"bytes"
"io/ioutil"
"os"
"text/template"
"github.com/spf13/viper"
)
const defaultConfigTemplate = `# This is a TOML config file.
# For more information, see https://github.com/toml-lang/toml
###############################################################################
### Client Configuration ###
###############################################################################
# The network chain ID
chain-id = "{{ .ChainID }}"
# The keyring's backend, where the keys are stored (os|file|kwallet|pass|test|memory)
keyring-backend = "{{ .KeyringBackend }}"
# CLI output format (text|json)
output = "{{ .Output }}"
# <host>:<port> to Tendermint RPC interface for this chain
node = "{{ .Node }}"
# Transaction broadcasting mode (sync|async|block)
broadcast-mode = "{{ .BroadcastMode }}"
`
// writeConfigToFile parses defaultConfigTemplate, renders config using the template and writes it to
// configFilePath.
func writeConfigToFile(configFilePath string, config *ClientConfig) error {
var buffer bytes.Buffer
tmpl := template.New("clientConfigFileTemplate")
configTemplate, err := tmpl.Parse(defaultConfigTemplate)
if err != nil {
return err
}
if err := configTemplate.Execute(&buffer, config); err != nil {
return err
}
return ioutil.WriteFile(configFilePath, buffer.Bytes(), 0600)
}
// ensureConfigPath creates a directory configPath if it does not exist
func ensureConfigPath(configPath string) error {
return os.MkdirAll(configPath, os.ModePerm)
}
// getClientConfig reads values from client.toml file and unmarshalls them into ClientConfig
func getClientConfig(configPath string, v *viper.Viper) (*ClientConfig, error) {
v.AddConfigPath(configPath)
v.SetConfigName("client")
v.SetConfigType("toml")
if err := v.ReadInConfig(); err != nil {
return nil, err
}
conf := new(ClientConfig)
if err := v.Unmarshal(conf); err != nil {
return nil, err
}
return conf, nil
}

View File

@ -5,6 +5,8 @@ import (
"io"
"os"
"github.com/spf13/viper"
"gopkg.in/yaml.v2"
"github.com/gogo/protobuf/proto"
@ -23,10 +25,11 @@ type Context struct {
FromAddress sdk.AccAddress
Client rpcclient.Client
ChainID string
JSONMarshaler codec.JSONMarshaler
JSONMarshaler codec.JSONCodec
InterfaceRegistry codectypes.InterfaceRegistry
Input io.Reader
Keyring keyring.Keyring
KeyringOptions []keyring.Option
Output io.Writer
OutputFormat string
Height int64
@ -45,6 +48,7 @@ type Context struct {
AccountRetriever AccountRetriever
NodeURI string
FeeGranter sdk.AccAddress
Viper *viper.Viper
// TODO: Deprecated (remove).
LegacyAmino *codec.LegacyAmino
@ -56,6 +60,12 @@ func (ctx Context) WithKeyring(k keyring.Keyring) Context {
return ctx
}
// WithKeyringOptions returns a copy of the context with an updated keyring.
func (ctx Context) WithKeyringOptions(opts ...keyring.Option) Context {
ctx.KeyringOptions = opts
return ctx
}
// WithInput returns a copy of the context with an updated input.
func (ctx Context) WithInput(r io.Reader) Context {
ctx.Input = r
@ -63,7 +73,7 @@ func (ctx Context) WithInput(r io.Reader) Context {
}
// WithJSONMarshaler returns a copy of the Context with an updated JSONMarshaler.
func (ctx Context) WithJSONMarshaler(m codec.JSONMarshaler) Context {
func (ctx Context) WithJSONMarshaler(m codec.JSONCodec) Context {
ctx.JSONMarshaler = m
return ctx
}
@ -126,7 +136,9 @@ func (ctx Context) WithChainID(chainID string) Context {
// WithHomeDir returns a copy of the Context with HomeDir set.
func (ctx Context) WithHomeDir(dir string) Context {
ctx.HomeDir = dir
if dir != "" {
ctx.HomeDir = dir
}
return ctx
}
@ -213,6 +225,16 @@ func (ctx Context) WithInterfaceRegistry(interfaceRegistry codectypes.InterfaceR
return ctx
}
// WithViper returns the context with Viper field. This Viper instance is used to read
// client-side config from the config file.
func (ctx Context) WithViper(prefix string) Context {
v := viper.New()
v.SetEnvPrefix(prefix)
v.AutomaticEnv()
ctx.Viper = v
return ctx
}
// PrintString prints the raw string to ctx.Output if it's defined, otherwise to os.Stdout
func (ctx Context) PrintString(str string) error {
return ctx.PrintBytes([]byte(str))
@ -323,10 +345,11 @@ func GetFromFields(kr keyring.Keyring, from string, genOnly bool) (sdk.AccAddres
return info.GetAddress(), info.GetName(), info.GetType(), nil
}
func newKeyringFromFlags(ctx Context, backend string) (keyring.Keyring, error) {
if ctx.GenerateOnly {
return keyring.New(sdk.KeyringServiceName(), keyring.BackendMemory, ctx.KeyringDir, ctx.Input)
// NewKeyringFromBackend gets a Keyring object from a backend
func NewKeyringFromBackend(ctx Context, backend string) (keyring.Keyring, error) {
if ctx.GenerateOnly || ctx.Simulate {
return keyring.New(sdk.KeyringServiceName(), keyring.BackendMemory, ctx.KeyringDir, ctx.Input, ctx.KeyringOptions...)
}
return keyring.New(sdk.KeyringServiceName(), backend, ctx.KeyringDir, ctx.Input)
return keyring.New(sdk.KeyringServiceName(), backend, ctx.KeyringDir, ctx.Input, ctx.KeyringOptions...)
}

View File

@ -51,7 +51,7 @@ func TestContext_PrintObject(t *testing.T) {
require.NoError(t, err)
require.Equal(t,
`{"animal":{"@type":"/testdata.Dog","size":"big","name":"Spot"},"x":"10"}
`, string(buf.Bytes()))
`, buf.String())
// yaml
buf = &bytes.Buffer{}
@ -65,7 +65,7 @@ func TestContext_PrintObject(t *testing.T) {
name: Spot
size: big
x: "10"
`, string(buf.Bytes()))
`, buf.String())
//
// amino
@ -81,7 +81,7 @@ x: "10"
require.NoError(t, err)
require.Equal(t,
`{"type":"testdata/HasAnimal","value":{"animal":{"type":"testdata/Dog","value":{"size":"big","name":"Spot"}},"x":"10"}}
`, string(buf.Bytes()))
`, buf.String())
// yaml
buf = &bytes.Buffer{}
@ -98,7 +98,7 @@ value:
name: Spot
size: big
x: "10"
`, string(buf.Bytes()))
`, buf.String())
}
func TestCLIQueryConn(t *testing.T) {

View File

@ -1,7 +1,6 @@
package debug
import (
"encoding/base64"
"encoding/hex"
"fmt"
"strconv"
@ -10,13 +9,12 @@ import (
"github.com/spf13/cobra"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/errors"
"github.com/cosmos/cosmos-sdk/version"
)
// Cmd creates a main CLI command
func Cmd() *cobra.Command {
cmd := &cobra.Command{
Use: "debug",
@ -31,90 +29,31 @@ func Cmd() *cobra.Command {
return cmd
}
// getPubKeyFromString returns a Tendermint PubKey (PubKeyEd25519) by attempting
// to decode the pubkey string from hex, base64, and finally bech32. If all
// encodings fail, an error is returned.
func getPubKeyFromString(pkstr string) (cryptotypes.PubKey, error) {
bz, err := hex.DecodeString(pkstr)
if err == nil {
if len(bz) == ed25519.PubKeySize {
return &ed25519.PubKey{Key: bz}, nil
}
}
bz, err = base64.StdEncoding.DecodeString(pkstr)
if err == nil {
if len(bz) == ed25519.PubKeySize {
return &ed25519.PubKey{Key: bz}, nil
}
}
pk, err := sdk.GetPubKeyFromBech32(sdk.Bech32PubKeyTypeAccPub, pkstr)
if err == nil {
return pk, nil
}
pk, err = sdk.GetPubKeyFromBech32(sdk.Bech32PubKeyTypeValPub, pkstr)
if err == nil {
return pk, nil
}
pk, err = sdk.GetPubKeyFromBech32(sdk.Bech32PubKeyTypeConsPub, pkstr)
if err == nil {
return pk, nil
}
return nil, fmt.Errorf("pubkey '%s' invalid; expected hex, base64, or bech32 of correct size", pkstr)
// getPubKeyFromString decodes SDK PubKey using JSON marshaler.
func getPubKeyFromString(ctx client.Context, pkstr string) (cryptotypes.PubKey, error) {
var pk cryptotypes.PubKey
err := ctx.JSONMarshaler.UnmarshalInterfaceJSON([]byte(pkstr), &pk)
return pk, err
}
func PubkeyCmd() *cobra.Command {
return &cobra.Command{
Use: "pubkey [pubkey]",
Short: "Decode a ED25519 pubkey from hex, base64, or bech32",
Long: fmt.Sprintf(`Decode a pubkey from hex, base64, or bech32.
Short: "Decode a pubkey from proto JSON",
Long: fmt.Sprintf(`Decode a pubkey from proto JSON and display it's address.
Example:
$ %s debug pubkey TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlz
$ %s debug pubkey cosmos1e0jnq2sun3dzjh8p2xq95kk0expwmd7shwjpfg
`, version.AppName, version.AppName),
$ %s debug pubkey '{"@type":"/cosmos.crypto.secp256k1.PubKey","key":"AurroA7jvfPd1AadmmOvWM2rJSwipXfRf8yD6pLbA2DJ"}'
`, version.AppName),
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx := client.GetClientContextFromCmd(cmd)
pk, err := getPubKeyFromString(args[0])
pk, err := getPubKeyFromString(clientCtx, args[0])
if err != nil {
return err
}
edPK, ok := pk.(*ed25519.PubKey)
if !ok {
return errors.Wrapf(errors.ErrInvalidType, "invalid pubkey type; expected ED25519")
}
pubKeyJSONBytes, err := clientCtx.LegacyAmino.MarshalJSON(edPK)
if err != nil {
return err
}
accPub, err := sdk.Bech32ifyPubKey(sdk.Bech32PubKeyTypeAccPub, edPK)
if err != nil {
return err
}
valPub, err := sdk.Bech32ifyPubKey(sdk.Bech32PubKeyTypeValPub, edPK)
if err != nil {
return err
}
consenusPub, err := sdk.Bech32ifyPubKey(sdk.Bech32PubKeyTypeConsPub, edPK)
if err != nil {
return err
}
cmd.Println("Address:", edPK.Address())
cmd.Printf("Hex: %X\n", edPK.Key)
cmd.Println("JSON (base64):", string(pubKeyJSONBytes))
cmd.Println("Bech32 Acc:", accPub)
cmd.Println("Bech32 Validator Operator:", valPub)
cmd.Println("Bech32 Validator Consensus:", consenusPub)
cmd.Println("Address:", pk.Address())
cmd.Println("PubKey Hex:", hex.EncodeToString(pk.Bytes()))
return nil
},
}
@ -152,13 +91,10 @@ $ %s debug addr cosmos1e0jnq2sun3dzjh8p2xq95kk0expwmd7shwjpfg
}
}
accAddr := sdk.AccAddress(addr)
valAddr := sdk.ValAddress(addr)
cmd.Println("Address:", addr)
cmd.Printf("Address (hex): %X\n", addr)
cmd.Printf("Bech32 Acc: %s\n", accAddr)
cmd.Printf("Bech32 Val: %s\n", valAddr)
cmd.Printf("Bech32 Acc: %s\n", sdk.AccAddress(addr))
cmd.Printf("Bech32 Val: %s\n", sdk.ValAddress(addr))
return nil
},
}

View File

@ -106,38 +106,6 @@
"Params": "UpgradeParams"
}
}
},
{
"url": "./tmp-swagger-gen/ibc/core/channel/v1/query.swagger.json",
"operationIds": {
"rename": {
"Params": "IBCChannelParams"
}
}
},
{
"url": "./tmp-swagger-gen/ibc/core/client/v1/query.swagger.json",
"operationIds": {
"rename": {
"Params": "IBCClientParams"
}
}
},
{
"url": "./tmp-swagger-gen/ibc/core/connection/v1/query.swagger.json",
"operationIds": {
"rename": {
"Params": "IBCConnectionParams"
}
}
},
{
"url": "./tmp-swagger-gen/ibc/applications/transfer/v1/query.swagger.json",
"operationIds": {
"rename": {
"Params": "IBCTransferParams"
}
}
}
]
}

View File

@ -1,3 +1,3 @@
package statik
//This just for fixing the error in importing empty github.com/cosmos/cosmos-sdk/client/docs/statik
// This just for fixing the error in importing empty github.com/cosmos/cosmos-sdk/client/docs/statik

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -446,6 +446,45 @@ paths:
description: Invalid request
500:
description: Server internal error
/bank/total:
get:
deprecated: true
summary: Total supply of coins in the chain
tags:
- Bank
produces:
- application/json
responses:
200:
description: OK
schema:
$ref: "#/definitions/Supply"
500:
description: Internal Server Error
/bank/total/{denomination}:
parameters:
- in: path
name: denomination
description: Coin denomination
required: true
type: string
x-example: uatom
get:
deprecated: true
summary: Total supply of a single coin denomination
tags:
- Bank
produces:
- application/json
responses:
200:
description: OK
schema:
type: string
400:
description: Invalid coin denomination
500:
description: Internal Server Error
/auth/accounts/{address}:
get:
deprecated: true
@ -527,7 +566,7 @@ paths:
$ref: "#/definitions/Address"
validator_address:
$ref: "#/definitions/ValidatorAddress"
delegation:
amount:
$ref: "#/definitions/Coin"
tags:
- Staking
@ -617,9 +656,8 @@ paths:
$ref: "#/definitions/Address"
validator_address:
$ref: "#/definitions/ValidatorAddress"
shares:
type: string
example: "100"
amount:
$ref: "#/definitions/Coin"
tags:
- Staking
consumes:
@ -1940,45 +1978,6 @@ paths:
type: string
500:
description: Internal Server Error
/supply/total:
get:
deprecated: true
summary: Total supply of coins in the chain
tags:
- Supply
produces:
- application/json
responses:
200:
description: OK
schema:
$ref: "#/definitions/Supply"
500:
description: Internal Server Error
/supply/total/{denomination}:
parameters:
- in: path
name: denomination
description: Coin denomination
required: true
type: string
x-example: uatom
get:
deprecated: true
summary: Total supply of a single coin denomination
tags:
- Supply
produces:
- application/json
responses:
200:
description: OK
schema:
type: string
400:
description: Invalid coin denomination
500:
description: Internal Server Error
definitions:
CheckTxResult:
type: object

View File

@ -71,6 +71,7 @@ const (
FlagTimeoutHeight = "timeout-height"
FlagKeyAlgorithm = "algo"
FlagFeeAccount = "fee-account"
FlagReverse = "reverse"
// Tendermint logging flags
FlagLogLevel = "log_level"
@ -88,9 +89,6 @@ func AddQueryFlagsToCmd(cmd *cobra.Command) {
cmd.Flags().StringP(tmcli.OutputFlag, "o", "text", "Output format (text|json)")
cmd.MarkFlagRequired(FlagChainID)
cmd.SetErr(cmd.ErrOrStderr())
cmd.SetOut(cmd.OutOrStdout())
}
// AddTxFlagsToCmd adds common flags to a module tx command.
@ -110,7 +108,7 @@ func AddTxFlagsToCmd(cmd *cobra.Command) {
cmd.Flags().Bool(FlagGenerateOnly, false, "Build an unsigned transaction and write it to STDOUT (when enabled, the local Keybase is not accessible)")
cmd.Flags().Bool(FlagOffline, false, "Offline mode (does not allow any online functionality")
cmd.Flags().BoolP(FlagSkipConfirmation, "y", false, "Skip tx broadcasting prompt confirmation")
cmd.Flags().String(FlagKeyringBackend, DefaultKeyringBackend, "Select keyring's backend (os|file|kwallet|pass|test)")
cmd.Flags().String(FlagKeyringBackend, DefaultKeyringBackend, "Select keyring's backend (os|file|kwallet|pass|test|memory)")
cmd.Flags().String(FlagSignMode, "", "Choose sign mode (direct|amino-json), this is an advanced feature")
cmd.Flags().Uint64(FlagTimeoutHeight, 0, "Set a block timeout height to prevent the tx from being committed past a certain height")
cmd.Flags().String(FlagFeeAccount, "", "Fee account pays fees for the transaction instead of deducting from the signer")
@ -119,9 +117,6 @@ func AddTxFlagsToCmd(cmd *cobra.Command) {
cmd.Flags().String(FlagGas, "", fmt.Sprintf("gas limit to set per-transaction; set to %q to calculate sufficient gas automatically (default %d)", GasFlagAuto, DefaultGasLimit))
cmd.MarkFlagRequired(FlagChainID)
cmd.SetErr(cmd.ErrOrStderr())
cmd.SetOut(cmd.OutOrStdout())
}
// AddPaginationFlagsToCmd adds common pagination flags to cmd
@ -131,6 +126,7 @@ func AddPaginationFlagsToCmd(cmd *cobra.Command, query string) {
cmd.Flags().Uint64(FlagOffset, 0, fmt.Sprintf("pagination offset of %s to query for", query))
cmd.Flags().Uint64(FlagLimit, 100, fmt.Sprintf("pagination limit of %s to query for", query))
cmd.Flags().Bool(FlagCountTotal, false, fmt.Sprintf("count total number of records in %s to query for", query))
cmd.Flags().Bool(FlagReverse, false, "results are sorted in descending order")
}
// GasSetting encapsulates the possible values passed through the --gas flag.

View File

@ -234,9 +234,9 @@ func RegisterReflectionServiceHandlerClient(ctx context.Context, mux *runtime.Se
}
var (
pattern_ReflectionService_ListAllInterfaces_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4}, []string{"cosmos", "base", "reflection", "v1beta1", "interfaces"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_ReflectionService_ListAllInterfaces_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4}, []string{"cosmos", "base", "reflection", "v1beta1", "interfaces"}, "", runtime.AssumeColonVerbOpt(false)))
pattern_ReflectionService_ListImplementations_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5, 2, 6}, []string{"cosmos", "base", "reflection", "v1beta1", "interfaces", "interface_name", "implementations"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_ReflectionService_ListImplementations_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5, 2, 6}, []string{"cosmos", "base", "reflection", "v1beta1", "interfaces", "interface_name", "implementations"}, "", runtime.AssumeColonVerbOpt(false)))
)
var (

View File

@ -1,55 +0,0 @@
package reflection_test
import (
"context"
"testing"
"github.com/stretchr/testify/suite"
tmproto "github.com/tendermint/tendermint/proto/tendermint/types"
"github.com/cosmos/cosmos-sdk/baseapp"
"github.com/cosmos/cosmos-sdk/client/grpc/reflection"
"github.com/cosmos/cosmos-sdk/simapp"
)
type IntegrationTestSuite struct {
suite.Suite
queryClient reflection.ReflectionServiceClient
}
func (s *IntegrationTestSuite) SetupSuite() {
app := simapp.Setup(false)
sdkCtx := app.BaseApp.NewContext(false, tmproto.Header{})
queryHelper := baseapp.NewQueryServerTestHelper(sdkCtx, app.InterfaceRegistry())
queryClient := reflection.NewReflectionServiceClient(queryHelper)
s.queryClient = queryClient
}
func (s IntegrationTestSuite) TestSimulateService() {
// We will test the following interface for testing.
var iface = "cosmos.evidence.v1beta1.Evidence"
// Test that "cosmos.evidence.v1beta1.Evidence" is included in the
// interfaces.
resIface, err := s.queryClient.ListAllInterfaces(
context.Background(),
&reflection.ListAllInterfacesRequest{},
)
s.Require().NoError(err)
s.Require().Contains(resIface.GetInterfaceNames(), iface)
// Test that "cosmos.evidence.v1beta1.Evidence" has at least the
// Equivocation implementations.
resImpl, err := s.queryClient.ListImplementations(
context.Background(),
&reflection.ListImplementationsRequest{InterfaceName: iface},
)
s.Require().NoError(err)
s.Require().Contains(resImpl.GetImplementationMessageNames(), "/cosmos.evidence.v1beta1.Equivocation")
}
func TestSimulateTestSuite(t *testing.T) {
suite.Run(t, new(IntegrationTestSuite))
}

View File

@ -8,12 +8,12 @@ import (
"github.com/cosmos/cosmos-sdk/client"
)
func getBlock(clientCtx client.Context, height *int64) (*ctypes.ResultBlock, error) {
func getBlock(ctx context.Context, clientCtx client.Context, height *int64) (*ctypes.ResultBlock, error) {
// get the node
node, err := clientCtx.GetNode()
if err != nil {
return nil, err
}
return node.Block(context.Background(), height)
return node.Block(ctx, height)
}

View File

@ -538,17 +538,17 @@ func RegisterServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, cl
}
var (
pattern_Service_GetNodeInfo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4}, []string{"cosmos", "base", "tendermint", "v1beta1", "node_info"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Service_GetNodeInfo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4}, []string{"cosmos", "base", "tendermint", "v1beta1", "node_info"}, "", runtime.AssumeColonVerbOpt(false)))
pattern_Service_GetSyncing_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4}, []string{"cosmos", "base", "tendermint", "v1beta1", "syncing"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Service_GetSyncing_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4}, []string{"cosmos", "base", "tendermint", "v1beta1", "syncing"}, "", runtime.AssumeColonVerbOpt(false)))
pattern_Service_GetLatestBlock_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "blocks", "latest"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Service_GetLatestBlock_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "blocks", "latest"}, "", runtime.AssumeColonVerbOpt(false)))
pattern_Service_GetBlockByHeight_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "blocks", "height"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Service_GetBlockByHeight_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "blocks", "height"}, "", runtime.AssumeColonVerbOpt(false)))
pattern_Service_GetLatestValidatorSet_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "validatorsets", "latest"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Service_GetLatestValidatorSet_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "validatorsets", "latest"}, "", runtime.AssumeColonVerbOpt(false)))
pattern_Service_GetValidatorSetByHeight_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "validatorsets", "height"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Service_GetValidatorSetByHeight_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"cosmos", "base", "tendermint", "v1beta1", "validatorsets", "height"}, "", runtime.AssumeColonVerbOpt(false)))
)
var (

View File

@ -34,8 +34,8 @@ func NewQueryServer(clientCtx client.Context, interfaceRegistry codectypes.Inter
}
// GetSyncing implements ServiceServer.GetSyncing
func (s queryServer) GetSyncing(_ context.Context, _ *GetSyncingRequest) (*GetSyncingResponse, error) {
status, err := getNodeStatus(s.clientCtx)
func (s queryServer) GetSyncing(ctx context.Context, _ *GetSyncingRequest) (*GetSyncingResponse, error) {
status, err := getNodeStatus(ctx, s.clientCtx)
if err != nil {
return nil, err
}
@ -45,8 +45,8 @@ func (s queryServer) GetSyncing(_ context.Context, _ *GetSyncingRequest) (*GetSy
}
// GetLatestBlock implements ServiceServer.GetLatestBlock
func (s queryServer) GetLatestBlock(context.Context, *GetLatestBlockRequest) (*GetLatestBlockResponse, error) {
status, err := getBlock(s.clientCtx, nil)
func (s queryServer) GetLatestBlock(ctx context.Context, _ *GetLatestBlockRequest) (*GetLatestBlockResponse, error) {
status, err := getBlock(ctx, s.clientCtx, nil)
if err != nil {
return nil, err
}
@ -64,7 +64,7 @@ func (s queryServer) GetLatestBlock(context.Context, *GetLatestBlockRequest) (*G
}
// GetBlockByHeight implements ServiceServer.GetBlockByHeight
func (s queryServer) GetBlockByHeight(_ context.Context, req *GetBlockByHeightRequest) (*GetBlockByHeightResponse, error) {
func (s queryServer) GetBlockByHeight(ctx context.Context, req *GetBlockByHeightRequest) (*GetBlockByHeightResponse, error) {
chainHeight, err := rpc.GetChainHeight(s.clientCtx)
if err != nil {
return nil, err
@ -74,7 +74,7 @@ func (s queryServer) GetBlockByHeight(_ context.Context, req *GetBlockByHeightRe
return nil, status.Error(codes.InvalidArgument, "requested block height is bigger then the chain length")
}
res, err := getBlock(s.clientCtx, &req.Height)
res, err := getBlock(ctx, s.clientCtx, &req.Height)
if err != nil {
return nil, err
}
@ -95,30 +95,7 @@ func (s queryServer) GetLatestValidatorSet(ctx context.Context, req *GetLatestVa
if err != nil {
return nil, err
}
validatorsRes, err := rpc.GetValidators(s.clientCtx, nil, &page, &limit)
if err != nil {
return nil, err
}
outputValidatorsRes := &GetLatestValidatorSetResponse{
BlockHeight: validatorsRes.BlockHeight,
Validators: make([]*Validator, len(validatorsRes.Validators)),
}
for i, validator := range validatorsRes.Validators {
anyPub, err := codectypes.NewAnyWithValue(validator.PubKey)
if err != nil {
return nil, err
}
outputValidatorsRes.Validators[i] = &Validator{
Address: validator.Address.String(),
ProposerPriority: validator.ProposerPriority,
PubKey: anyPub,
VotingPower: validator.VotingPower,
}
}
return outputValidatorsRes, nil
return validatorsOutput(ctx, s.clientCtx, nil, page, limit)
}
func (m *GetLatestValidatorSetResponse) UnpackInterfaces(unpacker codectypes.AnyUnpacker) error {
@ -146,36 +123,47 @@ func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *GetValida
if req.Height > chainHeight {
return nil, status.Error(codes.InvalidArgument, "requested block height is bigger then the chain length")
}
validatorsRes, err := rpc.GetValidators(s.clientCtx, &req.Height, &page, &limit)
r, err := validatorsOutput(ctx, s.clientCtx, &req.Height, page, limit)
if err != nil {
return nil, err
}
return &GetValidatorSetByHeightResponse{
BlockHeight: r.BlockHeight,
Validators: r.Validators,
Pagination: r.Pagination,
}, nil
}
outputValidatorsRes := &GetValidatorSetByHeightResponse{
BlockHeight: validatorsRes.BlockHeight,
Validators: make([]*Validator, len(validatorsRes.Validators)),
func validatorsOutput(ctx context.Context, cctx client.Context, height *int64, page, limit int) (*GetLatestValidatorSetResponse, error) {
vs, err := rpc.GetValidators(ctx, cctx, height, &page, &limit)
if err != nil {
return nil, err
}
for i, validator := range validatorsRes.Validators {
anyPub, err := codectypes.NewAnyWithValue(validator.PubKey)
resp := GetLatestValidatorSetResponse{
BlockHeight: vs.BlockHeight,
Validators: make([]*Validator, len(vs.Validators)),
Pagination: &qtypes.PageResponse{
Total: vs.Total,
},
}
for i, v := range vs.Validators {
anyPub, err := codectypes.NewAnyWithValue(v.PubKey)
if err != nil {
return nil, err
}
outputValidatorsRes.Validators[i] = &Validator{
Address: validator.Address.String(),
ProposerPriority: validator.ProposerPriority,
resp.Validators[i] = &Validator{
Address: v.Address.String(),
ProposerPriority: v.ProposerPriority,
PubKey: anyPub,
VotingPower: validator.VotingPower,
VotingPower: v.VotingPower,
}
}
return outputValidatorsRes, nil
return &resp, nil
}
// GetNodeInfo implements ServiceServer.GetNodeInfo
func (s queryServer) GetNodeInfo(ctx context.Context, req *GetNodeInfoRequest) (*GetNodeInfoResponse, error) {
status, err := getNodeStatus(s.clientCtx)
status, err := getNodeStatus(ctx, s.clientCtx)
if err != nil {
return nil, err
}

View File

@ -131,32 +131,126 @@ func (s IntegrationTestSuite) TestQueryLatestValidatorSet() {
s.Require().Equal(validatorSetRes.Validators[0].PubKey, anyPub)
}
func (s IntegrationTestSuite) TestQueryValidatorSetByHeight() {
val := s.network.Validators[0]
func (s IntegrationTestSuite) TestLatestValidatorSet_GRPC() {
vals := s.network.Validators
testCases := []struct {
name string
req *tmservice.GetLatestValidatorSetRequest
expErr bool
expErrMsg string
}{
{"nil request", nil, true, "cannot be nil"},
{"no pagination", &tmservice.GetLatestValidatorSetRequest{}, false, ""},
{"with pagination", &tmservice.GetLatestValidatorSetRequest{Pagination: &qtypes.PageRequest{Offset: 0, Limit: uint64(len(vals))}}, false, ""},
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
grpcRes, err := s.queryClient.GetLatestValidatorSet(context.Background(), tc.req)
if tc.expErr {
s.Require().Error(err)
s.Require().Contains(err.Error(), tc.expErrMsg)
} else {
s.Require().NoError(err)
s.Require().Len(grpcRes.Validators, len(vals))
s.Require().Equal(grpcRes.Pagination.Total, uint64(len(vals)))
content, ok := grpcRes.Validators[0].PubKey.GetCachedValue().(cryptotypes.PubKey)
s.Require().Equal(true, ok)
s.Require().Equal(content, vals[0].PubKey)
}
})
}
}
// nil pagination
_, err := s.queryClient.GetValidatorSetByHeight(context.Background(), &tmservice.GetValidatorSetByHeightRequest{
Height: 1,
Pagination: nil,
})
s.Require().NoError(err)
func (s IntegrationTestSuite) TestLatestValidatorSet_GRPCGateway() {
vals := s.network.Validators
testCases := []struct {
name string
url string
expErr bool
expErrMsg string
}{
{"no pagination", fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/latest", vals[0].APIAddress), false, ""},
{"pagination invalid fields", fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/latest?pagination.offset=-1&pagination.limit=-2", vals[0].APIAddress), true, "strconv.ParseUint"},
{"with pagination", fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/latest?pagination.offset=0&pagination.limit=2", vals[0].APIAddress), false, ""},
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
res, err := rest.GetRequest(tc.url)
s.Require().NoError(err)
if tc.expErr {
s.Require().Contains(string(res), tc.expErrMsg)
} else {
var result tmservice.GetLatestValidatorSetResponse
err = vals[0].ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result)
s.Require().NoError(err)
s.Require().Equal(uint64(len(vals)), result.Pagination.Total)
anyPub, err := codectypes.NewAnyWithValue(vals[0].PubKey)
s.Require().NoError(err)
s.Require().Equal(result.Validators[0].PubKey, anyPub)
}
})
}
}
_, err = s.queryClient.GetValidatorSetByHeight(context.Background(), &tmservice.GetValidatorSetByHeightRequest{
Height: 1,
Pagination: &qtypes.PageRequest{
Offset: 0,
Limit: 10,
}})
s.Require().NoError(err)
func (s IntegrationTestSuite) TestValidatorSetByHeight_GRPC() {
vals := s.network.Validators
testCases := []struct {
name string
req *tmservice.GetValidatorSetByHeightRequest
expErr bool
expErrMsg string
}{
{"nil request", nil, true, "request cannot be nil"},
{"empty request", &tmservice.GetValidatorSetByHeightRequest{}, true, "height must be greater than 0"},
{"no pagination", &tmservice.GetValidatorSetByHeightRequest{Height: 1}, false, ""},
{"with pagination", &tmservice.GetValidatorSetByHeightRequest{Height: 1, Pagination: &qtypes.PageRequest{Offset: 0, Limit: 1}}, false, ""},
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
grpcRes, err := s.queryClient.GetValidatorSetByHeight(context.Background(), tc.req)
if tc.expErr {
s.Require().Error(err)
s.Require().Contains(err.Error(), tc.expErrMsg)
} else {
s.Require().NoError(err)
s.Require().Len(grpcRes.Validators, len(vals))
s.Require().Equal(grpcRes.Pagination.Total, uint64(len(vals)))
}
})
}
}
// no pagination rest
_, err = rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/%d", val.APIAddress, 1))
s.Require().NoError(err)
// rest query with pagination
restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/%d?pagination.offset=%d&pagination.limit=%d", val.APIAddress, 1, 0, 1))
var validatorSetRes tmservice.GetValidatorSetByHeightResponse
s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &validatorSetRes))
func (s IntegrationTestSuite) TestValidatorSetByHeight_GRPCGateway() {
vals := s.network.Validators
testCases := []struct {
name string
url string
expErr bool
expErrMsg string
}{
{"invalid height", fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/%d", vals[0].APIAddress, -1), true, "height must be greater than 0"},
{"no pagination", fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/%d", vals[0].APIAddress, 1), false, ""},
{"pagination invalid fields", fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/%d?pagination.offset=-1&pagination.limit=-2", vals[0].APIAddress, 1), true, "strconv.ParseUint"},
{"with pagination", fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/%d?pagination.offset=0&pagination.limit=2", vals[0].APIAddress, 1), false, ""},
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
res, err := rest.GetRequest(tc.url)
s.Require().NoError(err)
if tc.expErr {
s.Require().Contains(string(res), tc.expErrMsg)
} else {
var result tmservice.GetValidatorSetByHeightResponse
err = vals[0].ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result)
s.Require().NoError(err)
s.Require().Equal(uint64(len(vals)), result.Pagination.Total)
}
})
}
}
func TestIntegrationTestSuite(t *testing.T) {

View File

@ -8,10 +8,10 @@ import (
"github.com/cosmos/cosmos-sdk/client"
)
func getNodeStatus(clientCtx client.Context) (*ctypes.ResultStatus, error) {
func getNodeStatus(ctx context.Context, clientCtx client.Context) (*ctypes.ResultStatus, error) {
node, err := clientCtx.GetNode()
if err != nil {
return &ctypes.ResultStatus{}, err
}
return node.Status(context.Background())
return node.Status(ctx)
}

View File

@ -24,86 +24,54 @@ var _ gogogrpc.ClientConn = Context{}
var protoCodec = encoding.GetCodec(proto.Name)
// Invoke implements the grpc ClientConn.Invoke method
func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply interface{}, opts ...grpc.CallOption) (err error) {
func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, req, reply interface{}, opts ...grpc.CallOption) (err error) {
// Two things can happen here:
// 1. either we're broadcasting a Tx, in which call we call Tendermint's broadcast endpoint directly,
// 2. or we are querying for state, in which case we call ABCI's Query.
// In both cases, we don't allow empty request args (it will panic unexpectedly).
if reflect.ValueOf(args).IsNil() {
// In both cases, we don't allow empty request req (it will panic unexpectedly).
if reflect.ValueOf(req).IsNil() {
return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "request cannot be nil")
}
// Case 1. Broadcasting a Tx.
if isBroadcast(method) {
req, ok := args.(*tx.BroadcastTxRequest)
if reqProto, ok := req.(*tx.BroadcastTxRequest); ok {
if !ok {
return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "expected %T, got %T", (*tx.BroadcastTxRequest)(nil), args)
return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "expected %T, got %T", (*tx.BroadcastTxRequest)(nil), req)
}
res, ok := reply.(*tx.BroadcastTxResponse)
resProto, ok := reply.(*tx.BroadcastTxResponse)
if !ok {
return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "expected %T, got %T", (*tx.BroadcastTxResponse)(nil), args)
return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "expected %T, got %T", (*tx.BroadcastTxResponse)(nil), req)
}
broadcastRes, err := TxServiceBroadcast(grpcCtx, ctx, req)
broadcastRes, err := TxServiceBroadcast(grpcCtx, ctx, reqProto)
if err != nil {
return err
}
*res = *broadcastRes
*resProto = *broadcastRes
return err
}
// Case 2. Querying state.
reqBz, err := protoCodec.Marshal(args)
inMd, _ := metadata.FromOutgoingContext(grpcCtx)
abciRes, outMd, err := RunGRPCQuery(ctx, grpcCtx, method, req, inMd)
if err != nil {
return err
}
// parse height header
md, _ := metadata.FromOutgoingContext(grpcCtx)
if heights := md.Get(grpctypes.GRPCBlockHeightHeader); len(heights) > 0 {
height, err := strconv.ParseInt(heights[0], 10, 64)
if err != nil {
return err
}
if height < 0 {
return sdkerrors.Wrapf(
sdkerrors.ErrInvalidRequest,
"client.Context.Invoke: height (%d) from %q must be >= 0", height, grpctypes.GRPCBlockHeightHeader)
}
ctx = ctx.WithHeight(height)
}
req := abci.RequestQuery{
Path: method,
Data: reqBz,
}
res, err := ctx.QueryABCI(req)
err = protoCodec.Unmarshal(abciRes.Value, reply)
if err != nil {
return err
}
err = protoCodec.Unmarshal(res.Value, reply)
if err != nil {
return err
}
// Create header metadata. For now the headers contain:
// - block height
// We then parse all the call options, if the call option is a
// HeaderCallOption, then we manually set the value of that header to the
// metadata.
md = metadata.Pairs(grpctypes.GRPCBlockHeightHeader, strconv.FormatInt(res.Height, 10))
for _, callOpt := range opts {
header, ok := callOpt.(grpc.HeaderCallOption)
if !ok {
continue
}
*header.HeaderAddr = md
*header.HeaderAddr = outMd
}
if ctx.InterfaceRegistry != nil {
@ -118,6 +86,47 @@ func (Context) NewStream(gocontext.Context, *grpc.StreamDesc, string, ...grpc.Ca
return nil, fmt.Errorf("streaming rpc not supported")
}
func isBroadcast(method string) bool {
return method == "/cosmos.tx.v1beta1.Service/BroadcastTx"
// RunGRPCQuery runs a gRPC query from the clientCtx, given all necessary
// arguments for the gRPC method, and returns the ABCI response. It is used
// to factorize code between client (Invoke) and server (RegisterGRPCServer)
// gRPC handlers.
func RunGRPCQuery(ctx Context, grpcCtx gocontext.Context, method string, req interface{}, md metadata.MD) (abci.ResponseQuery, metadata.MD, error) {
reqBz, err := protoCodec.Marshal(req)
if err != nil {
return abci.ResponseQuery{}, nil, err
}
// parse height header
if heights := md.Get(grpctypes.GRPCBlockHeightHeader); len(heights) > 0 {
height, err := strconv.ParseInt(heights[0], 10, 64)
if err != nil {
return abci.ResponseQuery{}, nil, err
}
if height < 0 {
return abci.ResponseQuery{}, nil, sdkerrors.Wrapf(
sdkerrors.ErrInvalidRequest,
"client.Context.Invoke: height (%d) from %q must be >= 0", height, grpctypes.GRPCBlockHeightHeader)
}
ctx = ctx.WithHeight(height)
}
abciReq := abci.RequestQuery{
Path: method,
Data: reqBz,
}
abciRes, err := ctx.QueryABCI(abciReq)
if err != nil {
return abci.ResponseQuery{}, nil, err
}
// Create header metadata. For now the headers contain:
// - block height
// We then parse all the call options, if the call option is a
// HeaderCallOption, then we manually set the value of that header to the
// metadata.
md = metadata.Pairs(grpctypes.GRPCBlockHeightHeader, strconv.FormatInt(abciRes.Height, 10))
return abciRes, md, nil
}

View File

@ -40,7 +40,7 @@ const (
func AddKeyCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "add <name>",
Short: "Add an encrypted private key (either newly generated or recovered), encrypt it, and save to disk",
Short: "Add an encrypted private key (either newly generated or recovered), encrypt it, and save to <name> file",
Long: `Derive a new private key and encrypt to disk.
Optionally specify a BIP39 mnemonic, a BIP39 passphrase to further secure the mnemonic,
and a bip32 HD path to derive a specific account. The key will be stored under the given name
@ -53,58 +53,43 @@ local keystore.
Use the --pubkey flag to add arbitrary public keys to the keystore for constructing
multisig transactions.
You can add a multisig key by passing the list of key names you want the public
key to be composed of to the --multisig flag and the minimum number of signatures
required through --multisig-threshold. The keys are sorted by address, unless
the flag --nosort is set.
You can create and store a multisig key by passing the list of key names stored in a keyring
and the minimum number of signatures required through --multisig-threshold. The keys are
sorted by address, unless the flag --nosort is set.
Example:
keys add mymultisig --multisig "keyname1,keyname2,keyname3" --multisig-threshold 2
`,
Args: cobra.ExactArgs(1),
RunE: runAddCmd,
RunE: runAddCmdPrepare,
}
cmd.Flags().StringSlice(flagMultisig, nil, "Construct and store a multisig public key (implies --pubkey)")
cmd.Flags().Int(flagMultiSigThreshold, 1, "K out of N required signatures. For use in conjunction with --multisig")
cmd.Flags().Bool(flagNoSort, false, "Keys passed to --multisig are taken in the order they're supplied")
cmd.Flags().String(FlagPublicKey, "", "Parse a public key in bech32 format and save it to disk")
cmd.Flags().BoolP(flagInteractive, "i", false, "Interactively prompt user for BIP39 passphrase and mnemonic")
cmd.Flags().Bool(flags.FlagUseLedger, false, "Store a local reference to a private key on a Ledger device")
cmd.Flags().Bool(flagRecover, false, "Provide seed phrase to recover existing key instead of creating")
cmd.Flags().Bool(flagNoBackup, false, "Don't print out seed phrase (if others are watching the terminal)")
cmd.Flags().Bool(flags.FlagDryRun, false, "Perform action, but don't add key to local keystore")
cmd.Flags().String(flagHDPath, "", "Manual HD Path derivation (overrides BIP44 config)")
cmd.Flags().Uint32(flagCoinType, sdk.GetConfig().GetCoinType(), "coin type number for HD derivation")
cmd.Flags().Uint32(flagAccount, 0, "Account number for HD derivation")
cmd.Flags().Uint32(flagIndex, 0, "Address index number for HD derivation")
cmd.Flags().String(flags.FlagKeyAlgorithm, string(hd.Secp256k1Type), "Key signing algorithm to generate keys for")
cmd.SetOut(cmd.OutOrStdout())
cmd.SetErr(cmd.ErrOrStderr())
f := cmd.Flags()
f.StringSlice(flagMultisig, nil, "List of key names stored in keyring to construct a public legacy multisig key")
f.Int(flagMultiSigThreshold, 1, "K out of N required signatures. For use in conjunction with --multisig")
f.Bool(flagNoSort, false, "Keys passed to --multisig are taken in the order they're supplied")
f.String(FlagPublicKey, "", "Parse a public key in JSON format and saves key info to <name> file.")
f.BoolP(flagInteractive, "i", false, "Interactively prompt user for BIP39 passphrase and mnemonic")
f.Bool(flags.FlagUseLedger, false, "Store a local reference to a private key on a Ledger device")
f.Bool(flagRecover, false, "Provide seed phrase to recover existing key instead of creating")
f.Bool(flagNoBackup, false, "Don't print out seed phrase (if others are watching the terminal)")
f.Bool(flags.FlagDryRun, false, "Perform action, but don't add key to local keystore")
f.String(flagHDPath, "", "Manual HD Path derivation (overrides BIP44 config)")
f.Uint32(flagCoinType, sdk.GetConfig().GetCoinType(), "coin type number for HD derivation")
f.Uint32(flagAccount, 0, "Account number for HD derivation")
f.Uint32(flagIndex, 0, "Address index number for HD derivation")
f.String(flags.FlagKeyAlgorithm, string(hd.Secp256k1Type), "Key signing algorithm to generate keys for")
return cmd
}
func runAddCmd(cmd *cobra.Command, args []string) error {
func runAddCmdPrepare(cmd *cobra.Command, args []string) error {
buf := bufio.NewReader(cmd.InOrStdin())
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
var kr keyring.Keyring
dryRun, _ := cmd.Flags().GetBool(flags.FlagDryRun)
if dryRun {
kr, err = keyring.New(sdk.KeyringServiceName(), keyring.BackendMemory, clientCtx.KeyringDir, buf)
} else {
backend, _ := cmd.Flags().GetString(flags.FlagKeyringBackend)
kr, err = keyring.New(sdk.KeyringServiceName(), backend, clientCtx.KeyringDir, buf)
}
if err != nil {
return err
}
return RunAddCmd(cmd, args, kr, buf)
return RunAddCmd(clientCtx, cmd, args, buf)
}
/*
@ -116,13 +101,15 @@ input
output
- armor encrypted private key (saved to file)
*/
func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *bufio.Reader) error {
func RunAddCmd(ctx client.Context, cmd *cobra.Command, args []string, inBuf *bufio.Reader) error {
// func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *bufio.Reader) error {
var err error
name := args[0]
interactive, _ := cmd.Flags().GetBool(flagInteractive)
noBackup, _ := cmd.Flags().GetBool(flagNoBackup)
showMnemonic := !noBackup
kb := ctx.Keyring
keyringAlgos, _ := kb.SupportedAlgorithms()
algoStr, _ := cmd.Flags().GetString(flags.FlagKeyAlgorithm)
@ -153,7 +140,6 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
multisigKeys, _ := cmd.Flags().GetStringSlice(flagMultisig)
if len(multisigKeys) != 0 {
var pks []cryptotypes.PubKey
multisigThreshold, _ := cmd.Flags().GetInt(flagMultiSigThreshold)
if err := validateMultisigThreshold(multisigThreshold, len(multisigKeys)); err != nil {
return err
@ -186,16 +172,13 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
pubKey, _ := cmd.Flags().GetString(FlagPublicKey)
if pubKey != "" {
pk, err := sdk.GetPubKeyFromBech32(sdk.Bech32PubKeyTypeAccPub, pubKey)
var pk cryptotypes.PubKey
err = ctx.JSONMarshaler.UnmarshalInterfaceJSON([]byte(pubKey), &pk)
if err != nil {
return err
}
if _, err := kb.SavePubKey(name, pk, algo.Name()); err != nil {
return err
}
return nil
_, err := kb.SavePubKey(name, pk, algo.Name())
return err
}
coinType, _ := cmd.Flags().GetUint32(flagCoinType)
@ -298,11 +281,10 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
func printCreate(cmd *cobra.Command, info keyring.Info, showMnemonic bool, mnemonic string) error {
output, _ := cmd.Flags().GetString(cli.OutputFlag)
switch output {
case OutputFormatText:
cmd.PrintErrln()
printKeyInfo(cmd.OutOrStdout(), info, keyring.Bech32KeyOutput, output)
printKeyInfo(cmd.OutOrStdout(), info, keyring.MkAccKeyOutput, output)
// print mnemonic unless requested not to.
if showMnemonic {
@ -312,7 +294,7 @@ func printCreate(cmd *cobra.Command, info keyring.Info, showMnemonic bool, mnemo
fmt.Fprintln(cmd.ErrOrStderr(), mnemonic)
}
case OutputFormatJSON:
out, err := keyring.Bech32KeyOutput(info)
out, err := keyring.MkAccKeyOutput(info)
if err != nil {
return err
}

View File

@ -8,7 +8,6 @@ import (
"testing"
"github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/libs/cli"
"github.com/cosmos/cosmos-sdk/client"
@ -29,8 +28,8 @@ func Test_runAddCmdLedgerWithCustomCoinType(t *testing.T) {
bech32PrefixConsAddr := "terravalcons"
bech32PrefixConsPub := "terravalconspub"
config.SetPurpose(44)
config.SetCoinType(330)
config.SetFullFundraiserPath("44'/330'/0'/0/0")
config.SetBech32PrefixForAccount(bech32PrefixAccAddr, bech32PrefixAccPub)
config.SetBech32PrefixForValidator(bech32PrefixValAddr, bech32PrefixValPub)
config.SetBech32PrefixForConsensusNode(bech32PrefixConsAddr, bech32PrefixConsPub)
@ -74,11 +73,11 @@ func Test_runAddCmdLedgerWithCustomCoinType(t *testing.T) {
require.Equal(t, "keyname1", key1.GetName())
require.Equal(t, keyring.TypeLedger, key1.GetType())
require.Equal(t,
"terrapub1addwnpepqvpg7r26nl2pvqqern00m6s9uaax3hauu2rzg8qpjzq9hy6xve7sw0d84m6",
sdk.MustBech32ifyPubKey(sdk.Bech32PubKeyTypeAccPub, key1.GetPubKey()))
"PubKeySecp256k1{03028F0D5A9FD41600191CDEFDEA05E77A68DFBCE286241C0190805B9346667D07}",
key1.GetPubKey().String())
config.SetPurpose(44)
config.SetCoinType(118)
config.SetFullFundraiserPath("44'/118'/0'/0/0")
config.SetBech32PrefixForAccount(sdk.Bech32PrefixAccAddr, sdk.Bech32PrefixAccPub)
config.SetBech32PrefixForValidator(sdk.Bech32PrefixValAddr, sdk.Bech32PrefixValPub)
config.SetBech32PrefixForConsensusNode(sdk.Bech32PrefixConsAddr, sdk.Bech32PrefixConsPub)
@ -122,6 +121,6 @@ func Test_runAddCmdLedger(t *testing.T) {
require.Equal(t, "keyname1", key1.GetName())
require.Equal(t, keyring.TypeLedger, key1.GetType())
require.Equal(t,
"cosmospub1addwnpepqd87l8xhcnrrtzxnkql7k55ph8fr9jarf4hn6udwukfprlalu8lgw0urza0",
sdk.MustBech32ifyPubKey(sdk.Bech32PubKeyTypeAccPub, key1.GetPubKey()))
"PubKeySecp256k1{034FEF9CD7C4C63588D3B03FEB5281B9D232CBA34D6F3D71AEE59211FFBFE1FE87}",
key1.GetPubKey().String())
}

View File

@ -5,6 +5,8 @@ import (
cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec"
)
// TODO: remove this file https://github.com/cosmos/cosmos-sdk/issues/8047
// KeysCdc defines codec to be used with key operations
var KeysCdc *codec.LegacyAmino

View File

@ -20,10 +20,10 @@ func getTestCases() testCases {
return testCases{
// nolint:govet
[]keyring.KeyOutput{
{"A", "B", "C", "D", "E", 0, nil},
{"A", "B", "C", "D", "", 0, nil},
{"", "B", "C", "D", "", 0, nil},
{"", "", "", "", "", 0, nil},
{"A", "B", "C", "D", "E"},
{"A", "B", "C", "D", ""},
{"", "B", "C", "D", ""},
{"", "", "", "", ""},
},
make([]keyring.KeyOutput, 4),
[][]byte{

View File

@ -31,25 +31,27 @@ func Test_runDeleteCmd(t *testing.T) {
fakeKeyName1 := "runDeleteCmd_Key1"
fakeKeyName2 := "runDeleteCmd_Key2"
path := sdk.GetConfig().GetFullFundraiserPath()
path := sdk.GetConfig().GetFullBIP44Path()
cmd.SetArgs([]string{"blah", fmt.Sprintf("--%s=%s", flags.FlagHome, kbHome)})
kb, err := keyring.New(sdk.KeyringServiceName(), keyring.BackendTest, kbHome, mockIn)
require.NoError(t, err)
_, err = kb.NewAccount(fakeKeyName1, testutil.TestMnemonic, "", path, hd.Secp256k1)
require.NoError(t, err)
_, _, err = kb.NewMnemonic(fakeKeyName2, keyring.English, sdk.FullFundraiserPath, hd.Secp256k1)
_, _, err = kb.NewMnemonic(fakeKeyName2, keyring.English, sdk.FullFundraiserPath, keyring.DefaultBIP39Passphrase, hd.Secp256k1)
require.NoError(t, err)
cmd.SetArgs([]string{"blah", fmt.Sprintf("--%s=%s", flags.FlagHome, kbHome)})
clientCtx := client.Context{}.
WithKeyringDir(kbHome).
WithKeyring(kb)
clientCtx := client.Context{}.WithKeyring(kb)
ctx := context.WithValue(context.Background(), client.ClientContextKey, &clientCtx)
err = cmd.ExecuteContext(ctx)
require.Error(t, err)
require.Equal(t, "The specified item could not be found in the keyring", err.Error())
require.EqualError(t, err, "blah.info: key not found")
// User confirmation missing
cmd.SetArgs([]string{

View File

@ -31,7 +31,7 @@ func Test_runExportCmd(t *testing.T) {
kb.Delete("keyname1") // nolint:errcheck
})
path := sdk.GetConfig().GetFullFundraiserPath()
path := sdk.GetConfig().GetFullBIP44Path()
_, err = kb.NewAccount("keyname1", testutil.TestMnemonic, "", path, hd.Secp256k1)
require.NoError(t, err)
@ -45,7 +45,9 @@ func Test_runExportCmd(t *testing.T) {
mockIn.Reset("123456789\n123456789\n")
cmd.SetArgs(args)
clientCtx := client.Context{}.WithKeyring(kb)
clientCtx := client.Context{}.
WithKeyringDir(kbHome).
WithKeyring(kb)
ctx := context.WithValue(context.Background(), client.ClientContextKey, &clientCtx)
require.NoError(t, cmd.ExecuteContext(ctx))

View File

@ -25,7 +25,9 @@ func Test_runImportCmd(t *testing.T) {
kbHome := t.TempDir()
kb, err := keyring.New(sdk.KeyringServiceName(), keyring.BackendTest, kbHome, mockIn)
clientCtx := client.Context{}.WithKeyring(kb)
clientCtx := client.Context{}.
WithKeyringDir(kbHome).
WithKeyring(kb)
ctx := context.WithValue(context.Background(), client.ClientContextKey, &clientCtx)
require.NoError(t, err)

View File

@ -2,7 +2,6 @@ package keys
import (
"github.com/spf13/cobra"
"github.com/tendermint/tendermint/libs/cli"
"github.com/cosmos/cosmos-sdk/client"
)
@ -34,11 +33,8 @@ func runListCmd(cmd *cobra.Command, _ []string) error {
return err
}
cmd.SetOut(cmd.OutOrStdout())
if ok, _ := cmd.Flags().GetBool(flagListNames); !ok {
output, _ := cmd.Flags().GetString(cli.OutputFlag)
printInfos(cmd.OutOrStdout(), infos, output)
printInfos(cmd.OutOrStdout(), infos, clientCtx.OutputFormat)
return nil
}

View File

@ -30,7 +30,7 @@ func Test_runListCmd(t *testing.T) {
clientCtx := client.Context{}.WithKeyring(kb)
ctx := context.WithValue(context.Background(), client.ClientContextKey, &clientCtx)
path := "" //sdk.GetConfig().GetFullFundraiserPath()
path := "" //sdk.GetConfig().GetFullBIP44Path()
_, err = kb.NewAccount("something", testutil.TestMnemonic, "", path, hd.Secp256k1)
require.NoError(t, err)

View File

@ -22,16 +22,18 @@ const migratePassphrase = "NOOP_PASSPHRASE"
// MigrateCommand migrates key information from legacy keybase to OS secret store.
func MigrateCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "migrate",
Use: "migrate <old_home_dir>",
Short: "Migrate keys from the legacy (db-based) Keybase",
Long: `Migrate key information from the legacy (db-based) Keybase to the new keyring-based Keybase.
Long: `Migrate key information from the legacy (db-based) Keybase to the new keyring-based Keyring.
The legacy Keybase used to persist keys in a LevelDB database stored in a 'keys' sub-directory of
the old client application's home directory, e.g. $HOME/.gaiacli/keys/.
For each key material entry, the command will prompt if the key should be skipped or not. If the key
is not to be skipped, the passphrase must be entered. The key will only be migrated if the passphrase
is correct. Otherwise, the command will exit and migration must be repeated.
It is recommended to run in 'dry-run' mode first to verify all key migration material.
`,
Args: cobra.ExactArgs(0),
Args: cobra.ExactArgs(1),
RunE: runMigrateCmd,
}
@ -44,12 +46,12 @@ func runMigrateCmd(cmd *cobra.Command, args []string) error {
// instantiate legacy keybase
var legacyKb keyring.LegacyKeybase
legacyKb, err := NewLegacyKeyBaseFromDir(rootDir)
legacyKb, err := NewLegacyKeyBaseFromDir(args[0])
if err != nil {
return err
}
defer legacyKb.Close()
defer func() { _ = legacyKb.Close() }()
// fetch list of keys from legacy keybase
oldKeys, err := legacyKb.List()
@ -62,7 +64,7 @@ func runMigrateCmd(cmd *cobra.Command, args []string) error {
var (
tmpDir string
migrator keyring.InfoImporter
migrator keyring.Importer
)
if dryRun, _ := cmd.Flags().GetBool(flags.FlagDryRun); dryRun {
@ -71,12 +73,12 @@ func runMigrateCmd(cmd *cobra.Command, args []string) error {
return errors.Wrap(err, "failed to create temporary directory for dryrun migration")
}
defer os.RemoveAll(tmpDir)
defer func() { _ = os.RemoveAll(tmpDir) }()
migrator, err = keyring.NewInfoImporter(keyringServiceName, "test", tmpDir, buf)
migrator, err = keyring.New(keyringServiceName, keyring.BackendTest, tmpDir, buf)
} else {
backend, _ := cmd.Flags().GetString(flags.FlagKeyringBackend)
migrator, err = keyring.NewInfoImporter(keyringServiceName, backend, rootDir, buf)
migrator, err = keyring.New(keyringServiceName, backend, rootDir, buf)
}
if err != nil {
@ -86,16 +88,16 @@ func runMigrateCmd(cmd *cobra.Command, args []string) error {
))
}
for _, key := range oldKeys {
legKeyInfo, err := legacyKb.Export(key.GetName())
if err != nil {
return err
}
if len(oldKeys) == 0 {
cmd.PrintErrln("Migration Aborted: no keys to migrate")
return nil
}
keyName := key.GetName()
keyType := key.GetType()
for _, oldInfo := range oldKeys {
keyName := oldInfo.GetName()
keyType := oldInfo.GetType()
cmd.PrintErrf("Migrating key: '%s (%s)' ...\n", key.GetName(), keyType)
cmd.PrintErrf("Migrating key: '%s (%s)' ...\n", keyName, keyType)
// allow user to skip migrating specific keys
ok, err := input.GetConfirmation("Skip key migration?", buf, cmd.ErrOrStderr())
@ -106,8 +108,15 @@ func runMigrateCmd(cmd *cobra.Command, args []string) error {
continue
}
// TypeLocal needs an additional step to ask password.
// The other keyring types are handled by ImportInfo.
if keyType != keyring.TypeLocal {
if err := migrator.Import(keyName, legKeyInfo); err != nil {
infoImporter, ok := migrator.(keyring.LegacyInfoImporter)
if !ok {
return fmt.Errorf("the Keyring implementation does not support import operations of Info types")
}
if err = infoImporter.ImportInfo(oldInfo); err != nil {
return err
}
@ -127,10 +136,12 @@ func runMigrateCmd(cmd *cobra.Command, args []string) error {
return err
}
if err := migrator.Import(keyName, armoredPriv); err != nil {
if err := migrator.ImportPrivKey(keyName, armoredPriv, migratePassphrase); err != nil {
return err
}
}
cmd.PrintErrln("Migration complete.")
return err
}

View File

@ -5,44 +5,38 @@ import (
"fmt"
"testing"
"github.com/cosmos/cosmos-sdk/client"
"github.com/stretchr/testify/require"
"github.com/otiai10/copy"
"github.com/stretchr/testify/assert"
"github.com/tendermint/tendermint/libs/cli"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/flags"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
"github.com/cosmos/cosmos-sdk/testutil"
)
func Test_runMigrateCmd(t *testing.T) {
cmd := AddKeyCommand()
_ = testutil.ApplyMockIODiscardOutErr(cmd)
cmd.Flags().AddFlagSet(Commands("home").PersistentFlags())
kbHome := t.TempDir()
clientCtx := client.Context{}.WithKeyringDir(kbHome)
ctx := context.WithValue(context.Background(), client.ClientContextKey, &clientCtx)
copy.Copy("testdata", kbHome)
cmd.SetArgs([]string{
"keyname1",
fmt.Sprintf("--%s=%s", cli.OutputFlag, OutputFormatText),
fmt.Sprintf("--%s=%s", flags.FlagKeyringBackend, keyring.BackendTest),
})
assert.NoError(t, cmd.ExecuteContext(ctx))
require.NoError(t, copy.Copy("testdata", kbHome))
cmd = MigrateCommand()
cmd := MigrateCommand()
cmd.Flags().AddFlagSet(Commands("home").PersistentFlags())
mockIn := testutil.ApplyMockIODiscardOutErr(cmd)
//mockIn := testutil.ApplyMockIODiscardOutErr(cmd)
mockIn, mockOut := testutil.ApplyMockIO(cmd)
cmd.SetArgs([]string{
fmt.Sprintf("--%s=%s", flags.FlagHome, kbHome),
kbHome,
//fmt.Sprintf("--%s=%s", flags.FlagHome, kbHome),
fmt.Sprintf("--%s=true", flags.FlagDryRun),
fmt.Sprintf("--%s=%s", flags.FlagKeyringBackend, keyring.BackendTest),
})
mockIn.Reset("test1234\ntest1234\n")
mockIn.Reset("\n12345678\n\n\n\n\n")
t.Log(mockOut.String())
assert.NoError(t, cmd.ExecuteContext(ctx))
}

View File

@ -1,7 +1,6 @@
package keys
import (
"context"
"encoding/hex"
"errors"
"fmt"
@ -86,7 +85,7 @@ hexadecimal into bech32 cosmos prefixed format and vice versa.
}
func parseKey(cmd *cobra.Command, args []string) error {
config, _ := sdk.GetSealedConfig(context.Background())
config, _ := sdk.GetSealedConfig(cmd.Context())
return doParseKey(cmd, config, args)
}

View File

@ -13,6 +13,7 @@ import (
"github.com/cosmos/cosmos-sdk/crypto/ledger"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerr "github.com/cosmos/cosmos-sdk/types/errors"
)
const (
@ -41,12 +42,12 @@ consisting of all the keys provided by name and multisig threshold.`,
Args: cobra.MinimumNArgs(1),
RunE: runShowCmd,
}
cmd.Flags().String(FlagBechPrefix, sdk.PrefixAccount, "The Bech32 prefix encoding for a key (acc|val|cons)")
cmd.Flags().BoolP(FlagAddress, "a", false, "Output the address only (overrides --output)")
cmd.Flags().BoolP(FlagPublicKey, "p", false, "Output the public key only (overrides --output)")
cmd.Flags().BoolP(FlagDevice, "d", false, "Output the address in a ledger device")
cmd.Flags().Int(flagMultiSigThreshold, 1, "K out of N required signatures")
f := cmd.Flags()
f.String(FlagBechPrefix, sdk.PrefixAccount, "The Bech32 prefix encoding for a key (acc|val|cons)")
f.BoolP(FlagAddress, "a", false, "Output the address only (overrides --output)")
f.BoolP(FlagPublicKey, "p", false, "Output the public key only (overrides --output)")
f.BoolP(FlagDevice, "d", false, "Output the address in a ledger device")
f.Int(flagMultiSigThreshold, 1, "K out of N required signatures")
return cmd
}
@ -81,7 +82,10 @@ func runShowCmd(cmd *cobra.Command, args []string) (err error) {
}
multikey := multisig.NewLegacyAminoPubKey(multisigThreshold, pks)
info = keyring.NewMultiInfo(defaultMultiSigKeyName, multikey)
info, err = keyring.NewMultiInfo(defaultMultiSigKeyName, multikey)
if err != nil {
return err
}
}
isShowAddr, _ := cmd.Flags().GetBool(FlagAddress)
@ -111,10 +115,16 @@ func runShowCmd(cmd *cobra.Command, args []string) (err error) {
output, _ := cmd.Flags().GetString(cli.OutputFlag)
switch {
case isShowAddr:
printKeyAddress(cmd.OutOrStdout(), info, bechKeyOut)
case isShowPubKey:
printPubKey(cmd.OutOrStdout(), info, bechKeyOut)
case isShowAddr, isShowPubKey:
ko, err := bechKeyOut(info)
if err != nil {
return err
}
out := ko.Address
if isShowPubKey {
out = ko.PubKey
}
fmt.Fprintln(cmd.OutOrStdout(), out)
default:
printKeyInfo(cmd.OutOrStdout(), info, bechKeyOut, output)
}
@ -144,19 +154,20 @@ func runShowCmd(cmd *cobra.Command, args []string) (err error) {
}
func fetchKey(kb keyring.Keyring, keyref string) (keyring.Info, error) {
// firstly check if the keyref is a key name of a key registered in a keyring.
info, err := kb.Key(keyref)
if err != nil {
accAddr, err := sdk.AccAddressFromBech32(keyref)
if err != nil {
return info, err
}
info, err = kb.KeyByAddress(accAddr)
if err != nil {
return info, errors.New("key not found")
}
// if the key is not there or if we have a problem with a keyring itself then we move to a
// fallback: searching for key by address.
if err == nil || !sdkerr.IsOf(err, sdkerr.ErrIO, sdkerr.ErrKeyNotFound) {
return info, err
}
return info, nil
accAddr, err := sdk.AccAddressFromBech32(keyref)
if err != nil {
return info, err
}
info, err = kb.KeyByAddress(accAddr)
return info, sdkerr.Wrap(err, "Invalid key")
}
func validateMultisigThreshold(k, nKeys int) error {
@ -173,11 +184,11 @@ func validateMultisigThreshold(k, nKeys int) error {
func getBechKeyOut(bechPrefix string) (bechKeyOutFn, error) {
switch bechPrefix {
case sdk.PrefixAccount:
return keyring.Bech32KeyOutput, nil
return keyring.MkAccKeyOutput, nil
case sdk.PrefixValidator:
return keyring.Bech32ValKeyOutput, nil
return keyring.MkValKeyOutput, nil
case sdk.PrefixConsensus:
return keyring.Bech32ConsKeyOutput, nil
return keyring.MkConsKeyOutput, nil
}
return nil, fmt.Errorf("invalid Bech32 prefix encoding provided: %s", bechPrefix)

View File

@ -24,8 +24,8 @@ func Test_multiSigKey_Properties(t *testing.T) {
1,
[]cryptotypes.PubKey{tmpKey1.PubKey()},
)
tmp := keyring.NewMultiInfo("myMultisig", pk)
tmp, err := keyring.NewMultiInfo("myMultisig", pk)
require.NoError(t, err)
require.Equal(t, "myMultisig", tmp.GetName())
require.Equal(t, keyring.TypeMulti, tmp.GetType())
require.Equal(t, "D3923267FA8A3DD367BB768FA8BDC8FF7F89DA3F", tmp.GetPubKey().Address().String())
@ -48,7 +48,9 @@ func Test_runShowCmd(t *testing.T) {
kb, err := keyring.New(sdk.KeyringServiceName(), keyring.BackendTest, kbHome, mockIn)
require.NoError(t, err)
clientCtx := client.Context{}.WithKeyring(kb)
clientCtx := client.Context{}.
WithKeyringDir(kbHome).
WithKeyring(kb)
ctx := context.WithValue(context.Background(), client.ClientContextKey, &clientCtx)
cmd.SetArgs([]string{"invalid"})
@ -194,28 +196,20 @@ func Test_getBechKeyOut(t *testing.T) {
}{
{"empty", args{""}, nil, true},
{"wrong", args{"???"}, nil, true},
{"acc", args{sdk.PrefixAccount}, keyring.Bech32KeyOutput, false},
{"val", args{sdk.PrefixValidator}, keyring.Bech32ValKeyOutput, false},
{"cons", args{sdk.PrefixConsensus}, keyring.Bech32ConsKeyOutput, false},
{"acc", args{sdk.PrefixAccount}, keyring.MkAccKeyOutput, false},
{"val", args{sdk.PrefixValidator}, keyring.MkValKeyOutput, false},
{"cons", args{sdk.PrefixConsensus}, keyring.MkConsKeyOutput, false},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
got, err := getBechKeyOut(tt.args.bechPrefix)
if (err != nil) != tt.wantErr {
t.Errorf("getBechKeyOut() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !tt.wantErr {
if tt.wantErr {
require.Error(t, err)
} else {
require.NoError(t, err)
require.NotNil(t, got)
}
// TODO: Still not possible to compare functions
// Maybe in next release: https://github.com/stretchr/testify/issues/182
//if &got != &tt.want {
// t.Errorf("getBechKeyOut() = %v, want %v", got, tt.want)
//}
})
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1 +1 @@
MANIFEST-000004
MANIFEST-000167

View File

@ -1 +1 @@
MANIFEST-000000
MANIFEST-000165

View File

@ -1,18 +1,876 @@
=============== Mar 30, 2020 (CEST) ===============
02:07:34.137606 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
02:07:34.144547 db@open opening
02:07:34.144770 version@stat F·[] S·0B[] Sc·[]
02:07:34.145843 db@janitor F·2 G·0
02:07:34.145875 db@open done T·1.315251ms
02:07:34.335635 db@close closing
02:07:34.335736 db@close done T·98.95µs
=============== Mar 30, 2020 (CEST) ===============
02:08:33.239115 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
02:08:33.239264 version@stat F·[] S·0B[] Sc·[]
02:08:33.239281 db@open opening
02:08:33.239310 journal@recovery F·1
02:08:33.239398 journal@recovery recovering @1
02:08:33.322008 memdb@flush created L0@2 N·4 S·391B "cos..ess,v4":"run..nfo,v3"
02:08:33.323091 version@stat F·[1] S·391B[391B] Sc·[0.25]
02:08:33.421979 db@janitor F·3 G·0
02:08:33.422153 db@open done T·182.707962ms
=============== Sep 12, 2020 (BST) ===============
14:56:38.444867 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
14:56:38.447630 db@open opening
14:56:38.447826 version@stat F·[] S·0B[] Sc·[]
14:56:38.449162 db@janitor F·2 G·0
14:56:38.449180 db@open done T·1.537964ms
14:56:38.449193 db@close closing
14:56:38.449264 db@close done T·69.313µs
=============== Sep 12, 2020 (BST) ===============
14:56:49.081871 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
14:56:49.081975 version@stat F·[] S·0B[] Sc·[]
14:56:49.081994 db@open opening
14:56:49.082040 journal@recovery F·1
14:56:49.082399 journal@recovery recovering @1
14:56:49.083134 version@stat F·[] S·0B[] Sc·[]
14:56:49.088411 db@janitor F·2 G·0
14:56:49.088430 db@open done T·6.428462ms
14:56:49.088440 db@close closing
14:56:49.088491 db@close done T·48.589µs
=============== Sep 12, 2020 (BST) ===============
14:56:55.214003 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
14:56:55.214144 version@stat F·[] S·0B[] Sc·[]
14:56:55.214165 db@open opening
14:56:55.214215 journal@recovery F·1
14:56:55.214329 journal@recovery recovering @2
14:56:55.214750 version@stat F·[] S·0B[] Sc·[]
14:56:55.221347 db@janitor F·2 G·0
14:56:55.221365 db@open done T·7.194565ms
14:56:55.608587 db@close closing
14:56:55.608644 db@close done T·54.685µs
=============== Sep 12, 2020 (BST) ===============
14:57:07.211101 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
14:57:07.211224 version@stat F·[] S·0B[] Sc·[]
14:57:07.211243 db@open opening
14:57:07.211287 journal@recovery F·1
14:57:07.211388 journal@recovery recovering @4
14:57:07.213734 memdb@flush created L0@6 N·2 S·470B "cos..ess,v2":"val..nfo,v1"
14:57:07.214142 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:07.218723 db@janitor F·3 G·0
14:57:07.218743 db@open done T·7.488657ms
14:57:07.218804 db@close closing
14:57:07.218842 db@close done T·36.603µs
=============== Sep 12, 2020 (BST) ===============
14:57:16.418006 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
14:57:16.418133 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:16.418153 db@open opening
14:57:16.418199 journal@recovery F·1
14:57:16.418508 journal@recovery recovering @7
14:57:16.418891 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:16.425395 db@janitor F·3 G·0
14:57:16.425423 db@open done T·7.257565ms
14:57:16.425482 db@close closing
14:57:16.425522 db@close done T·38.172µs
=============== Sep 12, 2020 (BST) ===============
14:57:16.425854 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
14:57:16.425965 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:16.425983 db@open opening
14:57:16.426027 journal@recovery F·1
14:57:16.426133 journal@recovery recovering @9
14:57:16.426324 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:16.431088 db@janitor F·3 G·0
14:57:16.431103 db@open done T·5.115335ms
14:57:16.431142 db@close closing
14:57:16.431179 db@close done T·35.705µs
=============== Sep 12, 2020 (BST) ===============
14:57:16.431287 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
14:57:16.431376 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:16.431394 db@open opening
14:57:16.431437 journal@recovery F·1
14:57:16.431721 journal@recovery recovering @11
14:57:16.432205 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:16.437468 db@janitor F·3 G·0
14:57:16.437486 db@open done T·6.087128ms
14:57:16.437529 db@close closing
14:57:16.437571 db@close done T·40.188µs
=============== Sep 12, 2020 (BST) ===============
14:57:16.437907 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
14:57:16.438006 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:16.438024 db@open opening
14:57:16.438067 journal@recovery F·1
14:57:16.438573 journal@recovery recovering @13
14:57:16.439155 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:16.443451 db@janitor F·3 G·0
14:57:16.443466 db@open done T·5.437579ms
14:57:16.443511 db@close closing
14:57:16.443634 db@close done T·118.642µs
=============== Sep 12, 2020 (BST) ===============
14:57:16.443733 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
14:57:16.443847 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:16.443864 db@open opening
14:57:16.443915 journal@recovery F·1
14:57:16.444629 journal@recovery recovering @15
14:57:16.445570 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:16.450978 db@janitor F·3 G·0
14:57:16.451001 db@open done T·7.132193ms
14:57:16.451050 db@close closing
14:57:16.451089 db@close done T·37.371µs
=============== Sep 12, 2020 (BST) ===============
14:57:19.439656 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
14:57:19.439775 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:19.439793 db@open opening
14:57:19.439845 journal@recovery F·1
14:57:19.440199 journal@recovery recovering @17
14:57:19.440624 version@stat F·[1] S·470B[470B] Sc·[0.25]
14:57:19.445819 db@janitor F·3 G·0
14:57:19.445837 db@open done T·6.03822ms
14:57:19.828985 db@close closing
14:57:19.829058 db@close done T·71.028µs
=============== Sep 12, 2020 (BST) ===============
15:07:04.002859 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:04.002990 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:04.003010 db@open opening
15:07:04.003081 journal@recovery F·1
15:07:04.003191 journal@recovery recovering @19
15:07:04.003591 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:04.008917 db@janitor F·3 G·0
15:07:04.008942 db@open done T·5.916433ms
15:07:04.009005 db@close closing
15:07:04.009050 db@close done T·42.762µs
=============== Sep 12, 2020 (BST) ===============
15:07:15.240666 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:15.240802 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:15.240825 db@open opening
15:07:15.240871 journal@recovery F·1
15:07:15.241288 journal@recovery recovering @21
15:07:15.241702 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:15.249270 db@janitor F·3 G·0
15:07:15.249299 db@open done T·8.459432ms
15:07:15.249363 db@close closing
15:07:15.249404 db@close done T·39.294µs
=============== Sep 12, 2020 (BST) ===============
15:07:15.249761 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:15.249850 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:15.249868 db@open opening
15:07:15.249911 journal@recovery F·1
15:07:15.250026 journal@recovery recovering @23
15:07:15.250195 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:15.254923 db@janitor F·3 G·0
15:07:15.254943 db@open done T·5.069716ms
15:07:15.254987 db@close closing
15:07:15.255026 db@close done T·37.365µs
=============== Sep 12, 2020 (BST) ===============
15:07:15.255136 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:15.255218 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:15.255235 db@open opening
15:07:15.255277 journal@recovery F·1
15:07:15.255617 journal@recovery recovering @25
15:07:15.256091 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:15.262240 db@janitor F·3 G·0
15:07:15.262260 db@open done T·7.018813ms
15:07:15.262310 db@close closing
15:07:15.262353 db@close done T·41.276µs
=============== Sep 12, 2020 (BST) ===============
15:07:15.262707 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:15.262808 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:15.262829 db@open opening
15:07:15.262874 journal@recovery F·1
15:07:15.263408 journal@recovery recovering @27
15:07:15.263994 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:15.268793 db@janitor F·3 G·0
15:07:15.268810 db@open done T·5.975152ms
15:07:15.268861 db@close closing
15:07:15.268900 db@close done T·37.419µs
=============== Sep 12, 2020 (BST) ===============
15:07:15.268989 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:15.269096 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:15.269117 db@open opening
15:07:15.269165 journal@recovery F·1
15:07:15.269858 journal@recovery recovering @29
15:07:15.270587 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:15.275935 db@janitor F·3 G·0
15:07:15.275951 db@open done T·6.828156ms
15:07:15.275999 db@close closing
15:07:15.276033 db@close done T·32.757µs
=============== Sep 12, 2020 (BST) ===============
15:07:21.660414 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:21.660547 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:21.660568 db@open opening
15:07:21.660655 journal@recovery F·1
15:07:21.660960 journal@recovery recovering @31
15:07:21.661682 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:21.667796 db@janitor F·3 G·0
15:07:21.667813 db@open done T·7.237366ms
15:07:21.667869 db@close closing
15:07:21.667914 db@close done T·43.496µs
=============== Sep 12, 2020 (BST) ===============
15:07:21.668253 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:21.668354 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:21.668372 db@open opening
15:07:21.668418 journal@recovery F·1
15:07:21.668529 journal@recovery recovering @33
15:07:21.668930 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:21.674796 db@janitor F·3 G·0
15:07:21.674817 db@open done T·6.440491ms
15:07:21.674861 db@close closing
15:07:21.674898 db@close done T·35.584µs
=============== Sep 12, 2020 (BST) ===============
15:07:21.675013 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:21.675115 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:21.675131 db@open opening
15:07:21.675179 journal@recovery F·1
15:07:21.675707 journal@recovery recovering @35
15:07:21.676833 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:21.681212 db@janitor F·3 G·0
15:07:21.681226 db@open done T·6.089677ms
15:07:21.681270 db@close closing
15:07:21.681299 db@close done T·27.867µs
=============== Sep 12, 2020 (BST) ===============
15:07:21.681691 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:21.681799 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:21.681817 db@open opening
15:07:21.681882 journal@recovery F·1
15:07:21.683119 journal@recovery recovering @37
15:07:21.684000 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:21.689926 db@janitor F·3 G·0
15:07:21.689940 db@open done T·8.117662ms
15:07:21.689984 db@close closing
15:07:21.690027 db@close done T·42.379µs
=============== Sep 12, 2020 (BST) ===============
15:07:21.690104 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:21.690189 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:21.690205 db@open opening
15:07:21.690247 journal@recovery F·1
15:07:21.690536 journal@recovery recovering @39
15:07:21.690899 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:21.695207 db@janitor F·3 G·0
15:07:21.695223 db@open done T·5.013121ms
15:07:21.695265 db@close closing
15:07:21.695320 db@close done T·53.965µs
=============== Sep 12, 2020 (BST) ===============
15:07:24.335083 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:07:24.335214 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:24.335233 db@open opening
15:07:24.335282 journal@recovery F·1
15:07:24.336367 journal@recovery recovering @41
15:07:24.336786 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:07:24.342965 db@janitor F·3 G·0
15:07:24.342984 db@open done T·7.745647ms
15:07:24.725175 db@close closing
15:07:24.725234 db@close done T·57.895µs
=============== Nov 2, 2020 (GMT) ===============
00:08:43.299526 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
00:08:43.299860 version@stat F·[1] S·470B[470B] Sc·[0.25]
00:08:43.299875 db@open opening
00:08:43.299900 journal@recovery F·1
00:08:43.300467 journal@recovery recovering @43
00:08:43.301378 version@stat F·[1] S·470B[470B] Sc·[0.25]
00:08:43.307882 db@janitor F·3 G·0
00:08:43.307911 db@open done T·8.03178ms
00:08:43.308144 db@close closing
00:08:43.308231 db@close done T·85.824µs
=============== Nov 2, 2020 (GMT) ===============
00:09:14.493119 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
00:09:14.493237 version@stat F·[1] S·470B[470B] Sc·[0.25]
00:09:14.493272 db@open opening
00:09:14.493296 journal@recovery F·1
00:09:14.493370 journal@recovery recovering @45
00:09:14.493648 version@stat F·[1] S·470B[470B] Sc·[0.25]
00:09:14.499436 db@janitor F·3 G·0
00:09:14.499452 db@open done T·6.170984ms
00:09:14.499537 db@close closing
00:09:14.499592 db@close done T·52.707µs
=============== Jan 22, 2021 (GMT) ===============
12:47:15.935887 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:47:15.937333 version@stat F·[1] S·470B[470B] Sc·[0.25]
12:47:15.937343 db@open opening
12:47:15.937370 journal@recovery F·1
12:47:15.937642 journal@recovery recovering @47
12:47:15.937942 version@stat F·[1] S·470B[470B] Sc·[0.25]
12:47:15.944262 db@janitor F·3 G·0
12:47:15.944270 db@open done T·6.922789ms
12:47:15.944460 db@close closing
12:47:15.944492 db@close done T·30.723µs
=============== Jan 22, 2021 (GMT) ===============
15:23:04.060521 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:23:04.060694 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:23:04.060708 db@open opening
15:23:04.060734 journal@recovery F·1
15:23:04.061045 journal@recovery recovering @49
15:23:04.061463 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:23:04.067352 db@janitor F·3 G·0
15:23:04.067386 db@open done T·6.675171ms
15:23:11.819265 db@close closing
15:23:11.819317 db@close done T·51.057µs
=============== Jan 22, 2021 (GMT) ===============
15:23:14.037455 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:23:14.037524 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:23:14.037535 db@open opening
15:23:14.037560 journal@recovery F·1
15:23:14.037629 journal@recovery recovering @51
15:23:14.037951 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:23:14.045002 db@janitor F·3 G·0
15:23:14.045020 db@open done T·7.475686ms
15:23:22.065063 db@close closing
15:23:22.065111 db@close done T·47.074µs
=============== Jan 22, 2021 (GMT) ===============
15:23:43.145956 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:23:43.146094 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:23:43.146107 db@open opening
15:23:43.146132 journal@recovery F·1
15:23:43.146447 journal@recovery recovering @53
15:23:43.146912 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:23:43.153059 db@janitor F·3 G·0
15:23:43.153108 db@open done T·6.977141ms
15:23:43.153245 db@close closing
15:23:43.153290 db@close done T·43.663µs
=============== Jan 22, 2021 (GMT) ===============
15:25:14.027169 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:25:14.027240 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:25:14.027250 db@open opening
15:25:14.027274 journal@recovery F·1
15:25:14.027627 journal@recovery recovering @55
15:25:14.028059 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:25:14.033292 db@janitor F·3 G·0
15:25:14.033304 db@open done T·6.047911ms
15:25:19.981971 db@close closing
15:25:19.982011 db@close done T·39.165µs
=============== Jan 22, 2021 (GMT) ===============
15:25:51.137523 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:25:51.138542 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:25:51.138553 db@open opening
15:25:51.138579 journal@recovery F·1
15:25:51.138632 journal@recovery recovering @57
15:25:51.138981 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:25:51.144970 db@janitor F·3 G·0
15:25:51.144983 db@open done T·6.422769ms
15:25:51.145031 db@close closing
15:25:51.145071 db@close done T·39.108µs
=============== Jan 22, 2021 (GMT) ===============
15:25:56.504732 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:25:56.504809 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:25:56.504824 db@open opening
15:25:56.504872 journal@recovery F·1
15:25:56.505474 journal@recovery recovering @59
15:25:56.505571 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:25:56.512054 db@janitor F·3 G·0
15:25:56.512061 db@open done T·7.232269ms
15:25:56.710823 db@close closing
15:25:56.710860 db@close done T·36.326µs
=============== Jan 22, 2021 (GMT) ===============
15:26:02.847640 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
15:26:02.847733 version@stat F·[1] S·470B[470B] Sc·[0.25]
15:26:02.847745 db@open opening
15:26:02.847771 journal@recovery F·1
15:26:02.848002 journal@recovery recovering @61
15:26:02.850382 memdb@flush created L0@63 N·2 S·472B "cos..ess,v5":"tes..nfo,v4"
15:26:02.850491 version@stat F·[2] S·942B[942B] Sc·[0.50]
15:26:02.854544 db@janitor F·4 G·0
15:26:02.854552 db@open done T·6.802972ms
15:26:09.729296 db@close closing
15:26:09.729392 db@close done T·95.18µs
=============== Feb 6, 2021 (GMT) ===============
12:21:53.904083 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:21:53.904380 version@stat F·[2] S·942B[942B] Sc·[0.50]
12:21:53.904391 db@open opening
12:21:53.904417 journal@recovery F·1
12:21:53.905225 journal@recovery recovering @64
12:21:53.905589 version@stat F·[2] S·942B[942B] Sc·[0.50]
12:21:53.910965 db@janitor F·4 G·0
12:21:53.910976 db@open done T·6.578518ms
12:21:53.911304 db@close closing
12:21:53.911387 db@close done T·82.205µs
=============== Feb 6, 2021 (GMT) ===============
12:22:02.353974 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:22:02.354077 version@stat F·[2] S·942B[942B] Sc·[0.50]
12:22:02.354089 db@open opening
12:22:02.354116 journal@recovery F·1
12:22:02.354419 journal@recovery recovering @66
12:22:02.354608 version@stat F·[2] S·942B[942B] Sc·[0.50]
12:22:02.359491 db@janitor F·4 G·0
12:22:02.359504 db@open done T·5.408186ms
12:22:02.359514 db@close closing
12:22:02.359542 db@close done T·27.662µs
=============== Feb 6, 2021 (GMT) ===============
12:22:07.888198 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:22:07.888300 version@stat F·[2] S·942B[942B] Sc·[0.50]
12:22:07.888310 db@open opening
12:22:07.888338 journal@recovery F·1
12:22:07.888397 journal@recovery recovering @68
12:22:07.888494 version@stat F·[2] S·942B[942B] Sc·[0.50]
12:22:07.895048 db@janitor F·4 G·0
12:22:07.895060 db@open done T·6.746979ms
12:22:08.093013 db@close closing
12:22:08.093057 db@close done T·43.222µs
=============== Feb 18, 2021 (GMT) ===============
07:32:13.660053 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
07:32:13.661098 version@stat F·[2] S·942B[942B] Sc·[0.50]
07:32:13.661111 db@open opening
07:32:13.661140 journal@recovery F·1
07:32:13.661439 journal@recovery recovering @70
07:32:13.663498 memdb@flush created L0@72 N·2 S·465B "cia..nfo,v7":"cos..ess,v8"
07:32:13.663598 version@stat F·[3] S·1KiB[1KiB] Sc·[0.75]
07:32:13.668369 db@janitor F·5 G·0
07:32:13.668400 db@open done T·7.285777ms
07:32:13.668491 db@close closing
07:32:13.668557 db@close done T·65.011µs
=============== Feb 18, 2021 (GMT) ===============
07:32:20.349460 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
07:32:20.349568 version@stat F·[3] S·1KiB[1KiB] Sc·[0.75]
07:32:20.349618 db@open opening
07:32:20.349691 journal@recovery F·1
07:32:20.349769 journal@recovery recovering @73
07:32:20.349867 version@stat F·[3] S·1KiB[1KiB] Sc·[0.75]
07:32:20.355997 db@janitor F·5 G·0
07:32:20.356005 db@open done T·6.383828ms
07:32:20.553221 db@close closing
07:32:20.553251 db@close done T·28.713µs
=============== Feb 18, 2021 (GMT) ===============
07:32:30.022753 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
07:32:30.022830 version@stat F·[3] S·1KiB[1KiB] Sc·[0.75]
07:32:30.022842 db@open opening
07:32:30.022870 journal@recovery F·1
07:32:30.023106 journal@recovery recovering @75
07:32:30.025727 memdb@flush created L0@77 N·2 S·462B "cos..ess,v11":"foo.info,v10"
07:32:30.025896 version@stat F·[4] S·1KiB[1KiB] Sc·[1.00]
07:32:30.031203 db@janitor F·6 G·0
07:32:30.031214 db@open done T·8.368455ms
07:32:30.031222 db@close closing
07:32:30.031249 db@close done T·26.625µs
=============== Feb 18, 2021 (GMT) ===============
07:32:36.137856 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
07:32:36.137945 version@stat F·[4] S·1KiB[1KiB] Sc·[1.00]
07:32:36.137955 db@open opening
07:32:36.137988 journal@recovery F·1
07:32:36.138053 journal@recovery recovering @78
07:32:36.138160 version@stat F·[4] S·1KiB[1KiB] Sc·[1.00]
07:32:36.144271 db@janitor F·6 G·0
07:32:36.144281 db@open done T·6.322633ms
07:32:36.144342 table@compaction L0·4 -> L1·0 S·1KiB Q·12
07:32:36.145937 table@build created L1@82 N·8 S·1KiB "cia..nfo,v7":"val..nfo,v1"
07:32:36.145957 version@stat F·[0 1] S·1KiB[0B 1KiB] Sc·[0.00 0.00]
07:32:36.147223 table@compaction committed F-3 S-606B Ke·0 D·0 T·2.864358ms
07:32:36.147251 table@remove removed @77
07:32:36.147265 table@remove removed @72
07:32:36.147280 table@remove removed @63
07:32:36.147394 table@remove removed @6
07:32:36.341754 db@close closing
07:32:36.341789 db@close done T·34.217µs
=============== Feb 23, 2021 (GMT) ===============
11:59:56.652297 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
11:59:56.653267 version@stat F·[0 1] S·1KiB[0B 1KiB] Sc·[0.00 0.00]
11:59:56.653279 db@open opening
11:59:56.653333 journal@recovery F·1
11:59:56.653684 journal@recovery recovering @80
11:59:56.655439 memdb@flush created L0@83 N·2 S·491B "bar.info,v13":"cos..ess,v14"
11:59:56.655563 version@stat F·[1 1] S·1KiB[491B 1KiB] Sc·[0.25 0.00]
11:59:56.659803 db@janitor F·4 G·0
11:59:56.659812 db@open done T·6.529102ms
11:59:56.659952 db@close closing
11:59:56.660013 db@close done T·59.126µs
=============== Feb 23, 2021 (GMT) ===============
12:01:34.578182 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:01:34.578308 version@stat F·[1 1] S·1KiB[491B 1KiB] Sc·[0.25 0.00]
12:01:34.578348 db@open opening
12:01:34.578422 journal@recovery F·1
12:01:34.578796 journal@recovery recovering @84
12:01:34.579157 version@stat F·[1 1] S·1KiB[491B 1KiB] Sc·[0.25 0.00]
12:01:34.583888 db@janitor F·4 G·0
12:01:34.583925 db@open done T·5.547338ms
12:01:34.583962 db@close closing
12:01:34.584011 db@close done T·46.636µs
=============== Feb 23, 2021 (GMT) ===============
12:01:34.584060 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:01:34.584136 version@stat F·[1 1] S·1KiB[491B 1KiB] Sc·[0.25 0.00]
12:01:34.584166 db@open opening
12:01:34.584195 journal@recovery F·1
12:01:34.584799 journal@recovery recovering @86
12:01:34.584896 version@stat F·[1 1] S·1KiB[491B 1KiB] Sc·[0.25 0.00]
12:01:34.590435 db@janitor F·4 G·0
12:01:34.590445 db@open done T·6.275747ms
12:01:44.922399 db@close closing
12:01:44.922453 db@close done T·53.361µs
=============== Feb 23, 2021 (GMT) ===============
12:01:53.346191 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:01:53.346299 version@stat F·[1 1] S·1KiB[491B 1KiB] Sc·[0.25 0.00]
12:01:53.346310 db@open opening
12:01:53.346427 journal@recovery F·1
12:01:53.346591 journal@recovery recovering @88
12:01:53.350436 memdb@flush created L0@90 N·2 S·259B "cos..ess,v17":"led..nfo,v16"
12:01:53.350863 version@stat F·[2 1] S·1KiB[750B 1KiB] Sc·[0.50 0.00]
12:01:53.356998 db@janitor F·5 G·0
12:01:53.357009 db@open done T·10.694071ms
12:01:53.357177 db@close closing
12:01:53.357258 db@close done T·79.894µs
=============== Feb 23, 2021 (GMT) ===============
12:01:57.771688 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:01:57.771807 version@stat F·[2 1] S·1KiB[750B 1KiB] Sc·[0.50 0.00]
12:01:57.771818 db@open opening
12:01:57.771844 journal@recovery F·1
12:01:57.771911 journal@recovery recovering @91
12:01:57.772211 version@stat F·[2 1] S·1KiB[750B 1KiB] Sc·[0.50 0.00]
12:01:57.777712 db@janitor F·5 G·0
12:01:57.777726 db@open done T·5.899191ms
12:01:57.777794 db@close closing
12:01:57.777821 db@close done T·26.301µs
=============== Feb 23, 2021 (GMT) ===============
12:02:01.179234 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:02:01.179444 version@stat F·[2 1] S·1KiB[750B 1KiB] Sc·[0.50 0.00]
12:02:01.179471 db@open opening
12:02:01.179568 journal@recovery F·1
12:02:01.180395 journal@recovery recovering @93
12:02:01.180499 version@stat F·[2 1] S·1KiB[750B 1KiB] Sc·[0.50 0.00]
12:02:01.186898 db@janitor F·5 G·0
12:02:01.186908 db@open done T·7.433758ms
12:02:01.376649 db@close closing
12:02:01.376744 db@close done T·94.311µs
=============== Feb 23, 2021 (GMT) ===============
12:02:08.325782 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:02:08.325880 version@stat F·[2 1] S·1KiB[750B 1KiB] Sc·[0.50 0.00]
12:02:08.325892 db@open opening
12:02:08.325919 journal@recovery F·1
12:02:08.326096 journal@recovery recovering @95
12:02:08.328874 memdb@flush created L0@97 N·2 S·189B "cos..ess,d19":"tes..nfo,d20"
12:02:08.329781 version@stat F·[3 1] S·2KiB[939B 1KiB] Sc·[0.75 0.00]
12:02:08.335685 db@janitor F·6 G·0
12:02:08.335726 db@open done T·9.800531ms
12:02:08.335812 db@close closing
12:02:08.335913 db@close done T·98.185µs
=============== Feb 23, 2021 (GMT) ===============
12:02:10.989199 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:02:10.989372 version@stat F·[3 1] S·2KiB[939B 1KiB] Sc·[0.75 0.00]
12:02:10.989381 db@open opening
12:02:10.989413 journal@recovery F·1
12:02:10.989493 journal@recovery recovering @98
12:02:10.989823 version@stat F·[3 1] S·2KiB[939B 1KiB] Sc·[0.75 0.00]
12:02:10.997764 db@janitor F·6 G·0
12:02:10.997775 db@open done T·8.391051ms
12:02:11.186825 db@close closing
12:02:11.186873 db@close done T·46.355µs
=============== Feb 23, 2021 (GMT) ===============
12:02:13.779564 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:02:13.779705 version@stat F·[3 1] S·2KiB[939B 1KiB] Sc·[0.75 0.00]
12:02:13.779716 db@open opening
12:02:13.779766 journal@recovery F·1
12:02:13.780050 journal@recovery recovering @100
12:02:13.782794 memdb@flush created L0@102 N·2 S·186B "cia..nfo,d23":"cos..ess,d22"
12:02:13.782888 version@stat F·[4 1] S·2KiB[1KiB 1KiB] Sc·[1.00 0.00]
12:02:13.787114 db@janitor F·7 G·0
12:02:13.787129 db@open done T·7.382544ms
12:02:13.787201 table@compaction L0·4 -> L1·1 S·2KiB Q·24
12:02:13.787271 db@close closing
12:02:13.789006 table@build created L1@105 N·8 S·1KiB "bar.info,v13":"val..nfo,v1"
12:02:13.789011 table@build exiting
12:02:13.789013 table@build revert @105
12:02:13.789055 db@close done T·1.783005ms
=============== Feb 23, 2021 (GMT) ===============
12:02:19.245131 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:02:19.245285 version@stat F·[4 1] S·2KiB[1KiB 1KiB] Sc·[1.00 0.00]
12:02:19.245315 db@open opening
12:02:19.245368 journal@recovery F·1
12:02:19.245465 journal@recovery recovering @103
12:02:19.245858 version@stat F·[4 1] S·2KiB[1KiB 1KiB] Sc·[1.00 0.00]
12:02:19.251449 db@janitor F·7 G·0
12:02:19.251465 db@open done T·6.140479ms
12:02:19.251485 table@compaction L0·4 -> L1·1 S·2KiB Q·24
12:02:19.251521 db@close closing
12:02:19.251592 db@close done T·70.226µs
=============== Feb 23, 2021 (GMT) ===============
12:02:21.580113 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:02:21.580210 version@stat F·[4 1] S·2KiB[1KiB 1KiB] Sc·[1.00 0.00]
12:02:21.580222 db@open opening
12:02:21.580272 journal@recovery F·1
12:02:21.580647 journal@recovery recovering @105
12:02:21.580747 version@stat F·[4 1] S·2KiB[1KiB 1KiB] Sc·[1.00 0.00]
12:02:21.587123 db@janitor F·7 G·0
12:02:21.587130 db@open done T·6.905846ms
12:02:21.587221 table@compaction L0·4 -> L1·1 S·2KiB Q·24
12:02:21.589889 table@build created L1@109 N·8 S·1KiB "bar.info,v13":"val..nfo,v1"
12:02:21.589929 version@stat F·[0 1] S·1KiB[0B 1KiB] Sc·[0.00 0.00]
12:02:21.591275 table@compaction committed F-4 S-1KiB Ke·0 D·8 T·4.039289ms
12:02:21.591357 table@remove removed @102
12:02:21.591414 table@remove removed @97
12:02:21.591428 table@remove removed @90
12:02:21.591440 table@remove removed @83
12:02:21.591472 table@remove removed @82
12:02:21.777758 db@close closing
12:02:21.777800 db@close done T·40.787µs
=============== Feb 23, 2021 (GMT) ===============
12:02:22.900722 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:02:22.900859 version@stat F·[0 1] S·1KiB[0B 1KiB] Sc·[0.00 0.00]
12:02:22.900892 db@open opening
12:02:22.900963 journal@recovery F·1
12:02:22.901083 journal@recovery recovering @107
12:02:22.904868 memdb@flush created L0@110 N·2 S·193B "cos..ess,d25":"val..nfo,d26"
12:02:22.905267 version@stat F·[1 1] S·1KiB[193B 1KiB] Sc·[0.25 0.00]
12:02:22.909786 db@janitor F·4 G·0
12:02:22.909799 db@open done T·8.899965ms
12:02:22.909931 db@close closing
12:02:22.910008 db@close done T·74.647µs
=============== Feb 23, 2021 (GMT) ===============
12:02:53.139966 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:02:53.140102 version@stat F·[1 1] S·1KiB[193B 1KiB] Sc·[0.25 0.00]
12:02:53.140135 db@open opening
12:02:53.140206 journal@recovery F·1
12:02:53.140586 journal@recovery recovering @111
12:02:53.141053 version@stat F·[1 1] S·1KiB[193B 1KiB] Sc·[0.25 0.00]
12:02:53.147675 db@janitor F·4 G·0
12:02:53.147687 db@open done T·7.546001ms
12:02:53.147750 db@close closing
12:02:53.147818 db@close done T·67.754µs
=============== Feb 23, 2021 (GMT) ===============
12:02:53.147913 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:02:53.147982 version@stat F·[1 1] S·1KiB[193B 1KiB] Sc·[0.25 0.00]
12:02:53.147993 db@open opening
12:02:53.148043 journal@recovery F·1
12:02:53.148101 journal@recovery recovering @113
12:02:53.148192 version@stat F·[1 1] S·1KiB[193B 1KiB] Sc·[0.25 0.00]
12:02:53.152906 db@janitor F·4 G·0
12:02:53.152912 db@open done T·4.91707ms
12:02:53.156922 db@close closing
12:02:53.156949 db@close done T·25.968µs
=============== Feb 23, 2021 (GMT) ===============
12:03:24.147022 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:03:24.147113 version@stat F·[1 1] S·1KiB[193B 1KiB] Sc·[0.25 0.00]
12:03:24.147123 db@open opening
12:03:24.147195 journal@recovery F·1
12:03:24.147542 journal@recovery recovering @115
12:03:24.150459 memdb@flush created L0@117 N·2 S·244B "cos..ess,v29":"pub..nfo,v28"
12:03:24.150556 version@stat F·[2 1] S·1KiB[437B 1KiB] Sc·[0.50 0.00]
12:03:24.156079 db@janitor F·5 G·0
12:03:24.156116 db@open done T·8.964543ms
12:03:24.156215 db@close closing
12:03:24.156330 db@close done T·113.154µs
=============== Feb 23, 2021 (GMT) ===============
12:03:33.230269 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:03:33.230428 version@stat F·[2 1] S·1KiB[437B 1KiB] Sc·[0.50 0.00]
12:03:33.230456 db@open opening
12:03:33.230505 journal@recovery F·1
12:03:33.230859 journal@recovery recovering @118
12:03:33.231123 version@stat F·[2 1] S·1KiB[437B 1KiB] Sc·[0.50 0.00]
12:03:33.237886 db@janitor F·5 G·0
12:03:33.237932 db@open done T·7.464889ms
12:03:33.238009 db@close closing
12:03:33.238077 db@close done T·67.991µs
=============== Feb 23, 2021 (GMT) ===============
12:03:33.238135 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:03:33.238190 version@stat F·[2 1] S·1KiB[437B 1KiB] Sc·[0.50 0.00]
12:03:33.238200 db@open opening
12:03:33.238226 journal@recovery F·1
12:03:33.238295 journal@recovery recovering @120
12:03:33.238459 version@stat F·[2 1] S·1KiB[437B 1KiB] Sc·[0.50 0.00]
12:03:33.242714 db@janitor F·5 G·0
12:03:33.242723 db@open done T·4.520893ms
12:03:33.246526 db@close closing
12:03:33.246576 db@close done T·49.286µs
=============== Feb 23, 2021 (GMT) ===============
12:03:36.732039 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:03:36.732132 version@stat F·[2 1] S·1KiB[437B 1KiB] Sc·[0.50 0.00]
12:03:36.732143 db@open opening
12:03:36.732193 journal@recovery F·1
12:03:36.732321 journal@recovery recovering @122
12:03:36.734960 memdb@flush created L0@124 N·2 S·244B "cos..ess,v32":"pub..nfo,v31"
12:03:36.735282 version@stat F·[3 1] S·1KiB[681B 1KiB] Sc·[0.75 0.00]
12:03:36.740852 db@janitor F·6 G·0
12:03:36.740890 db@open done T·8.717358ms
12:03:36.741044 db@close closing
12:03:36.741134 db@close done T·87.869µs
=============== Feb 23, 2021 (GMT) ===============
12:03:56.009876 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:03:56.009989 version@stat F·[3 1] S·1KiB[681B 1KiB] Sc·[0.75 0.00]
12:03:56.010002 db@open opening
12:03:56.010034 journal@recovery F·1
12:03:56.010178 journal@recovery recovering @125
12:03:56.011128 version@stat F·[3 1] S·1KiB[681B 1KiB] Sc·[0.75 0.00]
12:03:56.018052 db@janitor F·6 G·0
12:03:56.018064 db@open done T·8.05417ms
12:03:56.018173 db@close closing
12:03:56.018224 db@close done T·49.879µs
=============== Feb 23, 2021 (GMT) ===============
12:03:58.983153 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:03:58.983257 version@stat F·[3 1] S·1KiB[681B 1KiB] Sc·[0.75 0.00]
12:03:58.983268 db@open opening
12:03:58.983297 journal@recovery F·1
12:03:58.983885 journal@recovery recovering @127
12:03:58.983986 version@stat F·[3 1] S·1KiB[681B 1KiB] Sc·[0.75 0.00]
12:03:58.991844 db@janitor F·6 G·0
12:03:58.991851 db@open done T·8.580014ms
12:03:59.181560 db@close closing
12:03:59.181637 db@close done T·76.045µs
=============== Feb 23, 2021 (GMT) ===============
12:04:10.259722 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:04:10.259852 version@stat F·[3 1] S·1KiB[681B 1KiB] Sc·[0.75 0.00]
12:04:10.259869 db@open opening
12:04:10.259919 journal@recovery F·1
12:04:10.260104 journal@recovery recovering @129
12:04:10.264224 memdb@flush created L0@131 N·2 S·187B "cos..ess,d34":"foo.info,d35"
12:04:10.264492 version@stat F·[4 1] S·1KiB[868B 1KiB] Sc·[1.00 0.00]
12:04:10.268582 db@janitor F·7 G·0
12:04:10.268595 db@open done T·8.720601ms
12:04:10.268655 table@compaction L0·4 -> L1·1 S·1KiB Q·36
12:04:10.268669 db@close closing
12:04:10.268830 db@close done T·159.948µs
=============== Feb 23, 2021 (GMT) ===============
12:04:10.268891 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:04:10.269025 version@stat F·[4 1] S·1KiB[868B 1KiB] Sc·[1.00 0.00]
12:04:10.269034 db@open opening
12:04:10.269089 journal@recovery F·1
12:04:10.269152 journal@recovery recovering @132
12:04:10.269259 version@stat F·[4 1] S·1KiB[868B 1KiB] Sc·[1.00 0.00]
12:04:10.274436 db@janitor F·7 G·0
12:04:10.274466 db@open done T·5.404186ms
12:04:10.274543 table@compaction L0·4 -> L1·1 S·1KiB Q·36
12:04:10.277245 table@build created L1@136 N·8 S·825B "bar.info,v13":"pub..nfo,v31"
12:04:10.277287 version@stat F·[0 1] S·825B[0B 825B] Sc·[0.00 0.00]
12:04:10.278388 db@close closing
12:04:10.280880 table@commit exiting
12:04:10.280907 db@close done T·2.542424ms
=============== Feb 23, 2021 (GMT) ===============
12:04:12.868499 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:04:12.868628 version@stat F·[0 1] S·825B[0B 825B] Sc·[0.00 0.00]
12:04:12.868640 db@open opening
12:04:12.868670 journal@recovery F·1
12:04:12.868785 journal@recovery recovering @134
12:04:12.870434 memdb@flush created L0@137 N·2 S·244B "cos..ess,v38":"pub..nfo,v37"
12:04:12.871017 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:04:12.876243 db@janitor F·9 G·5
12:04:12.876251 db@janitor removing table-124
12:04:12.876290 db@janitor removing table-110
12:04:12.876302 db@janitor removing table-109
12:04:12.876330 db@janitor removing table-117
12:04:12.876340 db@janitor removing table-131
12:04:12.876381 db@open done T·7.712682ms
12:04:12.876440 db@close closing
12:04:12.876498 db@close done T·55.873µs
=============== Feb 23, 2021 (GMT) ===============
12:09:38.966259 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:09:38.966450 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:09:38.966463 db@open opening
12:09:38.966490 journal@recovery F·1
12:09:38.966746 journal@recovery recovering @138
12:09:38.967252 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:09:38.974464 db@janitor F·4 G·0
12:09:38.974477 db@open done T·8.005768ms
12:09:56.196454 db@close closing
12:09:56.196575 db@close done T·142.606µs
=============== Feb 23, 2021 (GMT) ===============
12:10:09.568902 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:10:09.568981 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:10:09.568993 db@open opening
12:10:09.569022 journal@recovery F·1
12:10:09.569291 journal@recovery recovering @140
12:10:09.569781 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:10:09.575840 db@janitor F·4 G·0
12:10:09.575848 db@open done T·6.851269ms
12:10:23.290522 db@close closing
12:10:23.290590 db@close done T·66.518µs
=============== Feb 23, 2021 (GMT) ===============
12:11:01.674005 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:11:01.674086 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:11:01.674098 db@open opening
12:11:01.674128 journal@recovery F·1
12:11:01.674359 journal@recovery recovering @142
12:11:01.674814 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:11:01.680965 db@janitor F·4 G·0
12:11:01.680980 db@open done T·6.874747ms
12:11:06.655715 db@close closing
12:11:06.655759 db@close done T·43.852µs
=============== Feb 23, 2021 (GMT) ===============
12:19:52.269690 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:19:52.269780 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:19:52.269792 db@open opening
12:19:52.269826 journal@recovery F·1
12:19:52.270051 journal@recovery recovering @144
12:19:52.270585 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:19:52.276899 db@janitor F·4 G·0
12:19:52.276939 db@open done T·7.116495ms
12:19:59.249868 db@close closing
12:19:59.249968 db@close done T·99.117µs
=============== Feb 23, 2021 (GMT) ===============
12:20:30.569407 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:20:30.569504 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:20:30.569516 db@open opening
12:20:30.569545 journal@recovery F·1
12:20:30.569730 journal@recovery recovering @146
12:20:30.570245 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:20:30.577100 db@janitor F·4 G·0
12:20:30.577111 db@open done T·7.591098ms
=============== Feb 23, 2021 (GMT) ===============
12:20:35.223490 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:20:35.223588 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:20:35.223601 db@open opening
12:20:35.223630 journal@recovery F·1
12:20:35.223986 journal@recovery recovering @148
12:20:35.224401 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:20:35.229848 db@janitor F·4 G·0
12:20:35.229856 db@open done T·6.250812ms
12:20:41.049391 db@close closing
12:20:41.049441 db@close done T·49.18µs
=============== Feb 23, 2021 (GMT) ===============
12:21:45.804793 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
12:21:45.804915 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:21:45.804928 db@open opening
12:21:45.804961 journal@recovery F·1
12:21:45.805201 journal@recovery recovering @150
12:21:45.805681 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
12:21:45.810888 db@janitor F·4 G·0
12:21:45.810920 db@open done T·5.985873ms
12:21:49.489917 db@close closing
12:21:49.490008 db@close done T·89.528µs
=============== Feb 26, 2021 (GMT) ===============
11:30:44.083018 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
11:30:44.084062 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
11:30:44.084075 db@open opening
11:30:44.084102 journal@recovery F·1
11:30:44.084383 journal@recovery recovering @152
11:30:44.084768 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
11:30:44.090432 db@janitor F·4 G·0
11:30:44.090476 db@open done T·6.381184ms
11:30:44.090566 db@close closing
11:30:44.090613 db@close done T·44.34µs
=============== Feb 26, 2021 (GMT) ===============
11:32:36.352559 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
11:32:36.352641 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
11:32:36.352653 db@open opening
11:32:36.352681 journal@recovery F·1
11:32:36.352756 journal@recovery recovering @154
11:32:36.353034 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
11:32:36.360804 db@janitor F·4 G·0
11:32:36.360816 db@open done T·8.15837ms
11:32:36.360904 db@close closing
11:32:36.360960 db@close done T·54.048µs
=============== Feb 26, 2021 (GMT) ===============
11:32:48.449675 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
11:32:48.449787 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
11:32:48.449820 db@open opening
11:32:48.449847 journal@recovery F·1
11:32:48.449955 journal@recovery recovering @156
11:32:48.450282 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
11:32:48.456194 db@janitor F·4 G·0
11:32:48.456235 db@open done T·6.384513ms
11:32:48.456367 db@close closing
11:32:48.456478 db@close done T·109.034µs
=============== Feb 26, 2021 (GMT) ===============
11:34:15.269223 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
11:34:15.269382 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
11:34:15.269414 db@open opening
11:34:15.269464 journal@recovery F·1
11:34:15.269563 journal@recovery recovering @158
11:34:15.269872 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
11:34:15.275610 db@janitor F·4 G·0
11:34:15.275622 db@open done T·6.200818ms
11:34:15.275707 db@close closing
11:34:15.275752 db@close done T·44.471µs
=============== Feb 26, 2021 (GMT) ===============
11:34:32.038701 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
11:34:32.038798 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
11:34:32.038810 db@open opening
11:34:32.038837 journal@recovery F·1
11:34:32.039081 journal@recovery recovering @160
11:34:32.039560 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
11:34:32.045125 db@janitor F·4 G·0
11:34:32.045132 db@open done T·6.318174ms
11:34:52.928799 db@close closing
11:34:52.928908 db@close done T·94.101µs
=============== Feb 26, 2021 (GMT) ===============
19:42:33.585125 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
19:42:33.585220 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
19:42:33.585232 db@open opening
19:42:33.585283 journal@recovery F·1
19:42:33.585544 journal@recovery recovering @162
19:42:33.585964 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
19:42:33.592890 db@janitor F·4 G·0
19:42:33.592928 db@open done T·7.666705ms
19:42:33.592996 db@close closing
19:42:33.593063 db@close done T·63.906µs
=============== Feb 27, 2021 (GMT) ===============
17:05:01.817733 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
17:05:01.817819 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
17:05:01.817830 db@open opening
17:05:01.817855 journal@recovery F·1
17:05:01.818108 journal@recovery recovering @164
17:05:01.818567 version@stat F·[1 1] S·1KiB[244B 825B] Sc·[0.25 0.00]
17:05:01.824986 db@janitor F·4 G·0
17:05:01.825024 db@open done T·7.162696ms
17:05:01.825107 db@close closing
17:05:01.825221 db@close done T·111.618µs

Binary file not shown.

Binary file not shown.

View File

@ -52,7 +52,7 @@ func printKeyInfo(w io.Writer, keyInfo cryptokeyring.Info, bechKeyOut bechKeyOut
}
func printInfos(w io.Writer, infos []cryptokeyring.Info, output string) {
kos, err := cryptokeyring.Bech32KeysOutput(infos)
kos, err := cryptokeyring.MkAccKeysOutput(infos)
if err != nil {
panic(err)
}
@ -78,21 +78,3 @@ func printTextInfos(w io.Writer, kos []cryptokeyring.KeyOutput) {
}
fmt.Fprintln(w, string(out))
}
func printKeyAddress(w io.Writer, info cryptokeyring.Info, bechKeyOut bechKeyOutFn) {
ko, err := bechKeyOut(info)
if err != nil {
panic(err)
}
fmt.Fprintln(w, ko.Address)
}
func printPubKey(w io.Writer, info cryptokeyring.Info, bechKeyOut bechKeyOutFn) {
ko, err := bechKeyOut(info)
if err != nil {
panic(err)
}
fmt.Fprintln(w, ko.PubKey)
}

View File

@ -6,6 +6,8 @@ import (
"strings"
"github.com/pkg/errors"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
abci "github.com/tendermint/tendermint/abci/types"
tmbytes "github.com/tendermint/tendermint/libs/bytes"
@ -13,6 +15,7 @@ import (
"github.com/cosmos/cosmos-sdk/store/rootmulti"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
)
// GetNode returns an RPC client. If the context's client is not defined, an
@ -84,7 +87,7 @@ func (ctx Context) queryABCI(req abci.RequestQuery) (abci.ResponseQuery, error)
}
if !result.Response.IsOK() {
return abci.ResponseQuery{}, errors.New(result.Response.Log)
return abci.ResponseQuery{}, sdkErrorToGRPCError(result.Response)
}
// data from trusted node or subspace query doesn't need verification
@ -95,6 +98,19 @@ func (ctx Context) queryABCI(req abci.RequestQuery) (abci.ResponseQuery, error)
return result.Response, nil
}
func sdkErrorToGRPCError(resp abci.ResponseQuery) error {
switch resp.Code {
case sdkerrors.ErrInvalidRequest.ABCICode():
return status.Error(codes.InvalidArgument, resp.Log)
case sdkerrors.ErrUnauthorized.ABCICode():
return status.Error(codes.Unauthenticated, resp.Log)
case sdkerrors.ErrKeyNotFound.ABCICode():
return status.Error(codes.NotFound, resp.Log)
default:
return status.Error(codes.Unknown, resp.Log)
}
}
// query performs a query to a Tendermint node with the provided store name
// and path. It returns the result and height of the query upon success
// or an error if the query fails.

View File

@ -25,6 +25,7 @@ func addHTTPDeprecationHeaders(h http.Handler) http.Handler {
// WithHTTPDeprecationHeaders returns a new *mux.Router, identical to its input
// but with the addition of HTTP Deprecation headers. This is used to mark legacy
// amino REST endpoints as deprecated in the REST API.
// nolint: gocritic
func WithHTTPDeprecationHeaders(r *mux.Router) *mux.Router {
subRouter := r.NewRoute().Subrouter()
subRouter.Use(addHTTPDeprecationHeaders)

View File

@ -15,7 +15,7 @@ import (
"github.com/cosmos/cosmos-sdk/types/rest"
)
//BlockCommand returns the verified block data for a given heights
// BlockCommand returns the verified block data for a given heights
func BlockCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "block [height]",

View File

@ -9,7 +9,6 @@ import (
"github.com/gorilla/mux"
"github.com/spf13/cobra"
tmtypes "github.com/tendermint/tendermint/types"
"github.com/cosmos/cosmos-sdk/client"
@ -22,7 +21,7 @@ import (
// TODO these next two functions feel kinda hacky based on their placement
//ValidatorCommand returns the validator set for a given height
// ValidatorCommand returns the validator set for a given height
func ValidatorCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "tendermint-validator-set [height]",
@ -50,7 +49,7 @@ func ValidatorCommand() *cobra.Command {
page, _ := cmd.Flags().GetInt(flags.FlagPage)
limit, _ := cmd.Flags().GetInt(flags.FlagLimit)
result, err := GetValidators(clientCtx, height, &page, &limit)
result, err := GetValidators(cmd.Context(), clientCtx, height, &page, &limit)
if err != nil {
return err
}
@ -67,7 +66,7 @@ func ValidatorCommand() *cobra.Command {
return cmd
}
// Validator output in bech32 format
// Validator output
type ValidatorOutput struct {
Address sdk.ConsAddress `json:"address"`
PubKey cryptotypes.PubKey `json:"pub_key"`
@ -79,12 +78,14 @@ type ValidatorOutput struct {
type ResultValidatorsOutput struct {
BlockHeight int64 `json:"block_height"`
Validators []ValidatorOutput `json:"validators"`
Total uint64 `json:"total"`
}
func (rvo ResultValidatorsOutput) String() string {
var b strings.Builder
b.WriteString(fmt.Sprintf("block height: %d\n", rvo.BlockHeight))
b.WriteString(fmt.Sprintf("total count: %d\n", rvo.Total))
for _, val := range rvo.Validators {
b.WriteString(
@ -117,31 +118,35 @@ func validatorOutput(validator *tmtypes.Validator) (ValidatorOutput, error) {
}
// GetValidators from client
func GetValidators(clientCtx client.Context, height *int64, page, limit *int) (ResultValidatorsOutput, error) {
func GetValidators(ctx context.Context, clientCtx client.Context, height *int64, page, limit *int) (ResultValidatorsOutput, error) {
// get the node
node, err := clientCtx.GetNode()
if err != nil {
return ResultValidatorsOutput{}, err
}
validatorsRes, err := node.Validators(context.Background(), height, page, limit)
validatorsRes, err := node.Validators(ctx, height, page, limit)
if err != nil {
return ResultValidatorsOutput{}, err
}
outputValidatorsRes := ResultValidatorsOutput{
total := validatorsRes.Total
if validatorsRes.Total < 0 {
total = 0
}
out := ResultValidatorsOutput{
BlockHeight: validatorsRes.BlockHeight,
Validators: make([]ValidatorOutput, len(validatorsRes.Validators)),
Total: uint64(total),
}
for i := 0; i < len(validatorsRes.Validators); i++ {
outputValidatorsRes.Validators[i], err = validatorOutput(validatorsRes.Validators[i])
out.Validators[i], err = validatorOutput(validatorsRes.Validators[i])
if err != nil {
return ResultValidatorsOutput{}, err
return out, err
}
}
return outputValidatorsRes, nil
return out, nil
}
// REST
@ -172,7 +177,7 @@ func ValidatorSetRequestHandlerFn(clientCtx client.Context) http.HandlerFunc {
return
}
output, err := GetValidators(clientCtx, &height, &page, &limit)
output, err := GetValidators(r.Context(), clientCtx, &height, &page, &limit)
if rest.CheckInternalServerError(w, err) {
return
}
@ -189,7 +194,7 @@ func LatestValidatorSetRequestHandlerFn(clientCtx client.Context) http.HandlerFu
return
}
output, err := GetValidators(clientCtx, nil, &page, &limit)
output, err := GetValidators(r.Context(), clientCtx, nil, &page, &limit)
if rest.CheckInternalServerError(w, err) {
return
}

View File

@ -23,14 +23,13 @@ import (
const (
memo = "waboom"
gas = uint64(10000)
timeoutHeight = 5
timeoutHeight = uint64(5)
)
var (
fee = types.NewCoins(types.NewInt64Coin("bam", 100))
_, pub1, addr1 = testdata.KeyTestPubAddr()
_, _, addr2 = testdata.KeyTestPubAddr()
msg = banktypes.NewMsgSend(addr1, addr2, types.NewCoins(types.NewInt64Coin("wack", 10000)))
sig = signing2.SignatureV2{
PubKey: pub1,
Data: &signing2.SingleSignatureData{
@ -38,13 +37,15 @@ var (
Signature: []byte("dummy"),
},
}
msg0 = banktypes.NewMsgSend(addr1, addr2, types.NewCoins(types.NewInt64Coin("wack", 1)))
msg1 = banktypes.NewMsgSend(addr1, addr2, types.NewCoins(types.NewInt64Coin("wack", 2)))
)
func buildTestTx(t *testing.T, builder client.TxBuilder) {
builder.SetMemo(memo)
builder.SetGasLimit(gas)
builder.SetFeeAmount(fee)
err := builder.SetMsgs(msg)
err := builder.SetMsgs(msg0, msg1)
require.NoError(t, err)
err = builder.SetSignatures(sig)
require.NoError(t, err)
@ -75,11 +76,15 @@ func (s *TestSuite) TestCopyTx() {
protoBuilder2 := s.protoCfg.NewTxBuilder()
err = tx2.CopyTx(aminoBuilder.GetTx(), protoBuilder2, false)
s.Require().NoError(err)
bz, err := s.protoCfg.TxEncoder()(protoBuilder.GetTx())
// Check sigs, signers and msgs.
sigsV2_1, err := protoBuilder.GetTx().GetSignaturesV2()
s.Require().NoError(err)
bz2, err := s.protoCfg.TxEncoder()(protoBuilder2.GetTx())
sigsV2_2, err := protoBuilder2.GetTx().GetSignaturesV2()
s.Require().NoError(err)
s.Require().Equal(bz, bz2)
s.Require().Equal(sigsV2_1, sigsV2_2)
s.Require().Equal(protoBuilder.GetTx().GetSigners(), protoBuilder2.GetTx().GetSigners())
s.Require().Equal(protoBuilder.GetTx().GetMsgs()[0], protoBuilder2.GetTx().GetMsgs()[0])
s.Require().Equal(protoBuilder.GetTx().GetMsgs()[1], protoBuilder2.GetTx().GetMsgs()[1])
// amino -> proto -> amino
aminoBuilder = s.aminoCfg.NewTxBuilder()
@ -90,11 +95,15 @@ func (s *TestSuite) TestCopyTx() {
aminoBuilder2 := s.aminoCfg.NewTxBuilder()
err = tx2.CopyTx(protoBuilder.GetTx(), aminoBuilder2, false)
s.Require().NoError(err)
bz, err = s.aminoCfg.TxEncoder()(aminoBuilder.GetTx())
// Check sigs, signers, and msgs
sigsV2_1, err = aminoBuilder.GetTx().GetSignaturesV2()
s.Require().NoError(err)
bz2, err = s.aminoCfg.TxEncoder()(aminoBuilder2.GetTx())
sigsV2_2, err = aminoBuilder2.GetTx().GetSignaturesV2()
s.Require().NoError(err)
s.Require().Equal(bz, bz2)
s.Require().Equal(sigsV2_1, sigsV2_2)
s.Require().Equal(aminoBuilder.GetTx().GetSigners(), aminoBuilder2.GetTx().GetSigners())
s.Require().Equal(aminoBuilder.GetTx().GetMsgs()[0], aminoBuilder2.GetTx().GetMsgs()[0])
s.Require().Equal(aminoBuilder.GetTx().GetMsgs()[1], aminoBuilder2.GetTx().GetMsgs()[1]) // We lose the "ServiceMsg" information
}
func (s *TestSuite) TestConvertTxToStdTx() {
@ -106,7 +115,8 @@ func (s *TestSuite) TestConvertTxToStdTx() {
s.Require().Equal(memo, stdTx.Memo)
s.Require().Equal(gas, stdTx.Fee.Gas)
s.Require().Equal(fee, stdTx.Fee.Amount)
s.Require().Equal(msg, stdTx.Msgs[0])
s.Require().Equal(msg0, stdTx.Msgs[0])
s.Require().Equal(msg1, stdTx.Msgs[1])
s.Require().Equal(timeoutHeight, stdTx.TimeoutHeight)
s.Require().Equal(sig.PubKey, stdTx.Signatures[0].PubKey)
s.Require().Equal(sig.Data.(*signing2.SingleSignatureData).Signature, stdTx.Signatures[0].Signature)
@ -125,7 +135,8 @@ func (s *TestSuite) TestConvertTxToStdTx() {
s.Require().Equal(memo, stdTx.Memo)
s.Require().Equal(gas, stdTx.Fee.Gas)
s.Require().Equal(fee, stdTx.Fee.Amount)
s.Require().Equal(msg, stdTx.Msgs[0])
s.Require().Equal(msg0, stdTx.Msgs[0])
s.Require().Equal(msg1, stdTx.Msgs[1])
s.Require().Equal(timeoutHeight, stdTx.TimeoutHeight)
s.Require().Empty(stdTx.Signatures)
@ -158,3 +169,7 @@ func (s *TestSuite) TestConvertAndEncodeStdTx() {
s.Require().NoError(err)
s.Require().Equal(stdTx, decodedTx)
}
func TestTestSuite(t *testing.T) {
suite.Run(t, new(TestSuite))
}

View File

@ -2,11 +2,13 @@ package tx
import (
"bufio"
"context"
"errors"
"fmt"
"net/http"
"os"
gogogrpc "github.com/gogo/protobuf/grpc"
"github.com/spf13/pflag"
"github.com/cosmos/cosmos-sdk/client"
@ -20,7 +22,6 @@ import (
"github.com/cosmos/cosmos-sdk/types/tx"
"github.com/cosmos/cosmos-sdk/types/tx/signing"
authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing"
authtx "github.com/cosmos/cosmos-sdk/x/auth/tx"
)
// GenerateOrBroadcastTxCLI will either generate and print and unsigned transaction
@ -50,7 +51,7 @@ func GenerateTx(clientCtx client.Context, txf Factory, msgs ...sdk.Msg) error {
return errors.New("cannot estimate gas in offline mode")
}
_, adjusted, err := CalculateGas(clientCtx.QueryWithData, txf, msgs...)
_, adjusted, err := CalculateGas(clientCtx, txf, msgs...)
if err != nil {
return err
}
@ -76,13 +77,13 @@ func GenerateTx(clientCtx client.Context, txf Factory, msgs ...sdk.Msg) error {
// given set of messages. It will also simulate gas requirements if necessary.
// It will return an error upon failure.
func BroadcastTx(clientCtx client.Context, txf Factory, msgs ...sdk.Msg) error {
txf, err := PrepareFactory(clientCtx, txf)
txf, err := prepareFactory(clientCtx, txf)
if err != nil {
return err
}
if txf.SimulateAndExecute() || clientCtx.Simulate {
_, adjusted, err := CalculateGas(clientCtx.QueryWithData, txf, msgs...)
_, adjusted, err := CalculateGas(clientCtx, txf, msgs...)
if err != nil {
return err
}
@ -142,8 +143,9 @@ func BroadcastTx(clientCtx client.Context, txf Factory, msgs ...sdk.Msg) error {
// BaseReq. Upon any error, the error will be written to the http.ResponseWriter.
// Note that this function returns the legacy StdTx Amino JSON format for compatibility
// with legacy clients.
// Deprecated: We are removing Amino soon.
func WriteGeneratedTxResponse(
ctx client.Context, w http.ResponseWriter, br rest.BaseReq, msgs ...sdk.Msg,
clientCtx client.Context, w http.ResponseWriter, br rest.BaseReq, msgs ...sdk.Msg,
) {
gasAdj, ok := rest.ParseFloat64OrReturnBadRequest(w, br.GasAdjustment, flags.DefaultGasAdjustment)
if !ok {
@ -163,7 +165,7 @@ func WriteGeneratedTxResponse(
WithMemo(br.Memo).
WithChainID(br.ChainID).
WithSimulateAndExecute(br.Simulate).
WithTxConfig(ctx.TxConfig).
WithTxConfig(clientCtx.TxConfig).
WithTimeoutHeight(br.TimeoutHeight)
if br.Simulate || gasSetting.Simulate {
@ -172,7 +174,7 @@ func WriteGeneratedTxResponse(
return
}
_, adjusted, err := CalculateGas(ctx.QueryWithData, txf, msgs...)
_, adjusted, err := CalculateGas(clientCtx, txf, msgs...)
if rest.CheckInternalServerError(w, err) {
return
}
@ -180,7 +182,7 @@ func WriteGeneratedTxResponse(
txf = txf.WithGas(adjusted)
if br.Simulate {
rest.WriteSimulationResponse(w, ctx.LegacyAmino, txf.Gas())
rest.WriteSimulationResponse(w, clientCtx.LegacyAmino, txf.Gas())
return
}
}
@ -190,12 +192,12 @@ func WriteGeneratedTxResponse(
return
}
stdTx, err := ConvertTxToStdTx(ctx.LegacyAmino, tx.GetTx())
stdTx, err := ConvertTxToStdTx(clientCtx.LegacyAmino, tx.GetTx())
if rest.CheckInternalServerError(w, err) {
return
}
output, err := ctx.LegacyAmino.MarshalJSON(stdTx)
output, err := clientCtx.LegacyAmino.MarshalJSON(stdTx)
if rest.CheckInternalServerError(w, err) {
return
}
@ -268,46 +270,35 @@ func BuildSimTx(txf Factory, msgs ...sdk.Msg) ([]byte, error) {
return nil, err
}
protoProvider, ok := txb.(authtx.ProtoTxProvider)
if !ok {
return nil, fmt.Errorf("cannot simulate amino tx")
}
simReq := tx.SimulateRequest{Tx: protoProvider.GetProtoTx()}
return simReq.Marshal()
return txf.txConfig.TxEncoder()(txb.GetTx())
}
// CalculateGas simulates the execution of a transaction and returns the
// simulation response obtained by the query and the adjusted gas amount.
func CalculateGas(
queryFunc func(string, []byte) ([]byte, int64, error), txf Factory, msgs ...sdk.Msg,
) (tx.SimulateResponse, uint64, error) {
clientCtx gogogrpc.ClientConn, txf Factory, msgs ...sdk.Msg,
) (*tx.SimulateResponse, uint64, error) {
txBytes, err := BuildSimTx(txf, msgs...)
if err != nil {
return tx.SimulateResponse{}, 0, err
return nil, 0, err
}
// TODO This should use the generated tx service Client.
// https://github.com/cosmos/cosmos-sdk/issues/7726
bz, _, err := queryFunc("/cosmos.tx.v1beta1.Service/Simulate", txBytes)
txSvcClient := tx.NewServiceClient(clientCtx)
simRes, err := txSvcClient.Simulate(context.Background(), &tx.SimulateRequest{
TxBytes: txBytes,
})
if err != nil {
return tx.SimulateResponse{}, 0, err
}
var simRes tx.SimulateResponse
if err := simRes.Unmarshal(bz); err != nil {
return tx.SimulateResponse{}, 0, err
return nil, 0, err
}
return simRes, uint64(txf.GasAdjustment() * float64(simRes.GasInfo.GasUsed)), nil
}
// PrepareFactory ensures the account defined by ctx.GetFromAddress() exists and
// prepareFactory ensures the account defined by ctx.GetFromAddress() exists and
// if the account number and/or the account sequence number are zero (not set),
// they will be queried for and set on the provided Factory. A new Factory with
// the updated fields will be returned.
func PrepareFactory(clientCtx client.Context, txf Factory) (Factory, error) {
func prepareFactory(clientCtx client.Context, txf Factory) (Factory, error) {
from := clientCtx.GetFromAddress()
if err := txf.accountRetriever.EnsureExists(clientCtx, from); err != nil {

View File

@ -1,10 +1,12 @@
package tx_test
import (
"errors"
gocontext "context"
"fmt"
"testing"
"github.com/stretchr/testify/require"
"google.golang.org/grpc"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/tx"
@ -24,30 +26,34 @@ func NewTestTxConfig() client.TxConfig {
return cfg.TxConfig
}
func TestCalculateGas(t *testing.T) {
makeQueryFunc := func(gasUsed uint64, wantErr bool) func(string, []byte) ([]byte, int64, error) {
return func(string, []byte) ([]byte, int64, error) {
if wantErr {
return nil, 0, errors.New("query failed")
}
simRes := &txtypes.SimulateResponse{
GasInfo: &sdk.GasInfo{GasUsed: gasUsed, GasWanted: gasUsed},
Result: &sdk.Result{Data: []byte("tx data"), Log: "log"},
}
// mockContext is a mock client.Context to return abitrary simulation response, used to
// unit test CalculateGas.
type mockContext struct {
gasUsed uint64
wantErr bool
}
bz, err := simRes.Marshal()
if err != nil {
return nil, 0, err
}
return bz, 0, nil
}
func (m mockContext) Invoke(grpcCtx gocontext.Context, method string, req, reply interface{}, opts ...grpc.CallOption) (err error) {
if m.wantErr {
return fmt.Errorf("mock err")
}
*(reply.(*txtypes.SimulateResponse)) = txtypes.SimulateResponse{
GasInfo: &sdk.GasInfo{GasUsed: m.gasUsed, GasWanted: m.gasUsed},
Result: &sdk.Result{Data: []byte("tx data"), Log: "log"},
}
return nil
}
func (mockContext) NewStream(gocontext.Context, *grpc.StreamDesc, string, ...grpc.CallOption) (grpc.ClientStream, error) {
panic("not implemented")
}
func TestCalculateGas(t *testing.T) {
type args struct {
queryFuncGasUsed uint64
queryFuncWantErr bool
adjustment float64
mockGasUsed uint64
mockWantErr bool
adjustment float64
}
testCases := []struct {
@ -70,8 +76,11 @@ func TestCalculateGas(t *testing.T) {
WithTxConfig(txCfg).WithSignMode(txCfg.SignModeHandler().DefaultMode())
t.Run(stc.name, func(t *testing.T) {
queryFunc := makeQueryFunc(stc.args.queryFuncGasUsed, stc.args.queryFuncWantErr)
simRes, gotAdjusted, err := tx.CalculateGas(queryFunc, txf.WithGasAdjustment(stc.args.adjustment))
mockClientCtx := mockContext{
gasUsed: tc.args.mockGasUsed,
wantErr: tc.args.mockWantErr,
}
simRes, gotAdjusted, err := tx.CalculateGas(mockClientCtx, txf.WithGasAdjustment(stc.args.adjustment))
if stc.expPass {
require.NoError(t, err)
require.Equal(t, simRes.GasInfo.GasUsed, stc.wantEstimate)
@ -79,7 +88,7 @@ func TestCalculateGas(t *testing.T) {
require.NotNil(t, simRes.Result)
} else {
require.Error(t, err)
require.Nil(t, simRes.Result)
require.Nil(t, simRes)
}
})
}
@ -132,10 +141,10 @@ func TestSign(t *testing.T) {
var from2 = "test_key2"
// create a new key using a mnemonic generator and test if we can reuse seed to recreate that account
_, seed, err := kr.NewMnemonic(from1, keyring.English, path, hd.Secp256k1)
_, seed, err := kr.NewMnemonic(from1, keyring.English, path, keyring.DefaultBIP39Passphrase, hd.Secp256k1)
requireT.NoError(err)
requireT.NoError(kr.Delete(from1))
info1, _, err := kr.NewMnemonic(from1, keyring.English, path, hd.Secp256k1)
info1, _, err := kr.NewMnemonic(from1, keyring.English, path, keyring.DefaultBIP39Passphrase, hd.Secp256k1)
requireT.NoError(err)
info2, err := kr.NewAccount(from2, seed, "", path, hd.Secp256k1)

View File

@ -2,6 +2,7 @@ package client
import (
"github.com/spf13/pflag"
rpchttp "github.com/tendermint/tendermint/rpc/client/http"
"github.com/cosmos/cosmos-sdk/client/flags"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
@ -51,6 +52,7 @@ func ReadPageRequest(flagSet *pflag.FlagSet) (*query.PageRequest, error) {
limit, _ := flagSet.GetUint64(flags.FlagLimit)
countTotal, _ := flagSet.GetBool(flags.FlagCountTotal)
page, _ := flagSet.GetUint64(flags.FlagPage)
reverse, _ := flagSet.GetBool(flags.FlagReverse)
if page > 1 && offset > 0 {
return nil, sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "page and offset cannot be used together")
@ -65,5 +67,14 @@ func ReadPageRequest(flagSet *pflag.FlagSet) (*query.PageRequest, error) {
Offset: offset,
Limit: limit,
CountTotal: countTotal,
Reverse: reverse,
}, nil
}
// NewClientFromNode sets up Client implementation that communicates with a Tendermint node over
// JSON RPC and WebSockets
// TODO: We might not need to manually append `/websocket`:
// https://github.com/cosmos/cosmos-sdk/issues/8986
func NewClientFromNode(nodeURI string) (*rpchttp.HTTP, error) {
return rpchttp.New(nodeURI, "/websocket")
}

View File

@ -13,8 +13,8 @@ import (
"github.com/cosmos/cosmos-sdk/codec/types"
)
// deprecated: LegacyAmino defines a wrapper for an Amino codec that properly handles protobuf
// types with Any's
// LegacyAmino defines a wrapper for an Amino codec that properly
// handles protobuf types with Any's. Deprecated.
type LegacyAmino struct {
Amino *amino.Codec
}
@ -77,7 +77,7 @@ func (cdc *LegacyAmino) jsonUnmarshalAnys(o interface{}) error {
return types.UnpackInterfaces(o, types.AminoJSONUnpacker{Cdc: cdc.Amino})
}
func (cdc *LegacyAmino) MarshalBinaryBare(o interface{}) ([]byte, error) {
func (cdc *LegacyAmino) Marshal(o interface{}) ([]byte, error) {
err := cdc.marshalAnys(o)
if err != nil {
return nil, err
@ -85,15 +85,15 @@ func (cdc *LegacyAmino) MarshalBinaryBare(o interface{}) ([]byte, error) {
return cdc.Amino.MarshalBinaryBare(o)
}
func (cdc *LegacyAmino) MustMarshalBinaryBare(o interface{}) []byte {
bz, err := cdc.MarshalBinaryBare(o)
func (cdc *LegacyAmino) MustMarshal(o interface{}) []byte {
bz, err := cdc.Marshal(o)
if err != nil {
panic(err)
}
return bz
}
func (cdc *LegacyAmino) MarshalBinaryLengthPrefixed(o interface{}) ([]byte, error) {
func (cdc *LegacyAmino) MarshalLengthPrefixed(o interface{}) ([]byte, error) {
err := cdc.marshalAnys(o)
if err != nil {
return nil, err
@ -101,15 +101,15 @@ func (cdc *LegacyAmino) MarshalBinaryLengthPrefixed(o interface{}) ([]byte, erro
return cdc.Amino.MarshalBinaryLengthPrefixed(o)
}
func (cdc *LegacyAmino) MustMarshalBinaryLengthPrefixed(o interface{}) []byte {
bz, err := cdc.MarshalBinaryLengthPrefixed(o)
func (cdc *LegacyAmino) MustMarshalLengthPrefixed(o interface{}) []byte {
bz, err := cdc.MarshalLengthPrefixed(o)
if err != nil {
panic(err)
}
return bz
}
func (cdc *LegacyAmino) UnmarshalBinaryBare(bz []byte, ptr interface{}) error {
func (cdc *LegacyAmino) Unmarshal(bz []byte, ptr interface{}) error {
err := cdc.Amino.UnmarshalBinaryBare(bz, ptr)
if err != nil {
return err
@ -117,14 +117,14 @@ func (cdc *LegacyAmino) UnmarshalBinaryBare(bz []byte, ptr interface{}) error {
return cdc.unmarshalAnys(ptr)
}
func (cdc *LegacyAmino) MustUnmarshalBinaryBare(bz []byte, ptr interface{}) {
err := cdc.UnmarshalBinaryBare(bz, ptr)
func (cdc *LegacyAmino) MustUnmarshal(bz []byte, ptr interface{}) {
err := cdc.Unmarshal(bz, ptr)
if err != nil {
panic(err)
}
}
func (cdc *LegacyAmino) UnmarshalBinaryLengthPrefixed(bz []byte, ptr interface{}) error {
func (cdc *LegacyAmino) UnmarshalLengthPrefixed(bz []byte, ptr interface{}) error {
err := cdc.Amino.UnmarshalBinaryLengthPrefixed(bz, ptr)
if err != nil {
return err
@ -132,14 +132,14 @@ func (cdc *LegacyAmino) UnmarshalBinaryLengthPrefixed(bz []byte, ptr interface{}
return cdc.unmarshalAnys(ptr)
}
func (cdc *LegacyAmino) MustUnmarshalBinaryLengthPrefixed(bz []byte, ptr interface{}) {
err := cdc.UnmarshalBinaryLengthPrefixed(bz, ptr)
func (cdc *LegacyAmino) MustUnmarshalLengthPrefixed(bz []byte, ptr interface{}) {
err := cdc.UnmarshalLengthPrefixed(bz, ptr)
if err != nil {
panic(err)
}
}
// MarshalJSON implements codec.Marshaler interface
// MarshalJSON implements codec.Codec interface
func (cdc *LegacyAmino) MarshalJSON(o interface{}) ([]byte, error) {
err := cdc.jsonMarshalAnys(o)
if err != nil {
@ -156,7 +156,7 @@ func (cdc *LegacyAmino) MustMarshalJSON(o interface{}) []byte {
return bz
}
// UnmarshalJSON implements codec.Marshaler interface
// UnmarshalJSON implements codec.Codec interface
func (cdc *LegacyAmino) UnmarshalJSON(bz []byte, ptr interface{}) error {
err := cdc.Amino.UnmarshalJSON(bz, ptr)
if err != nil {

View File

@ -10,51 +10,51 @@ type AminoCodec struct {
*LegacyAmino
}
var _ Marshaler = &AminoCodec{}
var _ Codec = &AminoCodec{}
// NewAminoCodec returns a reference to a new AminoCodec
func NewAminoCodec(codec *LegacyAmino) *AminoCodec {
return &AminoCodec{LegacyAmino: codec}
}
// MarshalBinaryBare implements BinaryMarshaler.MarshalBinaryBare method.
func (ac *AminoCodec) MarshalBinaryBare(o ProtoMarshaler) ([]byte, error) {
return ac.LegacyAmino.MarshalBinaryBare(o)
// Marshal implements BinaryMarshaler.Marshal method.
func (ac *AminoCodec) Marshal(o ProtoMarshaler) ([]byte, error) {
return ac.LegacyAmino.Marshal(o)
}
// MustMarshalBinaryBare implements BinaryMarshaler.MustMarshalBinaryBare method.
func (ac *AminoCodec) MustMarshalBinaryBare(o ProtoMarshaler) []byte {
return ac.LegacyAmino.MustMarshalBinaryBare(o)
// MustMarshal implements BinaryMarshaler.MustMarshal method.
func (ac *AminoCodec) MustMarshal(o ProtoMarshaler) []byte {
return ac.LegacyAmino.MustMarshal(o)
}
// MarshalBinaryLengthPrefixed implements BinaryMarshaler.MarshalBinaryLengthPrefixed method.
func (ac *AminoCodec) MarshalBinaryLengthPrefixed(o ProtoMarshaler) ([]byte, error) {
return ac.LegacyAmino.MarshalBinaryLengthPrefixed(o)
// MarshalLengthPrefixed implements BinaryMarshaler.MarshalLengthPrefixed method.
func (ac *AminoCodec) MarshalLengthPrefixed(o ProtoMarshaler) ([]byte, error) {
return ac.LegacyAmino.MarshalLengthPrefixed(o)
}
// MustMarshalBinaryLengthPrefixed implements BinaryMarshaler.MustMarshalBinaryLengthPrefixed method.
func (ac *AminoCodec) MustMarshalBinaryLengthPrefixed(o ProtoMarshaler) []byte {
return ac.LegacyAmino.MustMarshalBinaryLengthPrefixed(o)
// MustMarshalLengthPrefixed implements BinaryMarshaler.MustMarshalLengthPrefixed method.
func (ac *AminoCodec) MustMarshalLengthPrefixed(o ProtoMarshaler) []byte {
return ac.LegacyAmino.MustMarshalLengthPrefixed(o)
}
// UnmarshalBinaryBare implements BinaryMarshaler.UnmarshalBinaryBare method.
func (ac *AminoCodec) UnmarshalBinaryBare(bz []byte, ptr ProtoMarshaler) error {
return ac.LegacyAmino.UnmarshalBinaryBare(bz, ptr)
// Unmarshal implements BinaryMarshaler.Unmarshal method.
func (ac *AminoCodec) Unmarshal(bz []byte, ptr ProtoMarshaler) error {
return ac.LegacyAmino.Unmarshal(bz, ptr)
}
// MustUnmarshalBinaryBare implements BinaryMarshaler.MustUnmarshalBinaryBare method.
func (ac *AminoCodec) MustUnmarshalBinaryBare(bz []byte, ptr ProtoMarshaler) {
ac.LegacyAmino.MustUnmarshalBinaryBare(bz, ptr)
// MustUnmarshal implements BinaryMarshaler.MustUnmarshal method.
func (ac *AminoCodec) MustUnmarshal(bz []byte, ptr ProtoMarshaler) {
ac.LegacyAmino.MustUnmarshal(bz, ptr)
}
// UnmarshalBinaryLengthPrefixed implements BinaryMarshaler.UnmarshalBinaryLengthPrefixed method.
func (ac *AminoCodec) UnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMarshaler) error {
return ac.LegacyAmino.UnmarshalBinaryLengthPrefixed(bz, ptr)
// UnmarshalLengthPrefixed implements BinaryMarshaler.UnmarshalLengthPrefixed method.
func (ac *AminoCodec) UnmarshalLengthPrefixed(bz []byte, ptr ProtoMarshaler) error {
return ac.LegacyAmino.UnmarshalLengthPrefixed(bz, ptr)
}
// MustUnmarshalBinaryLengthPrefixed implements BinaryMarshaler.MustUnmarshalBinaryLengthPrefixed method.
func (ac *AminoCodec) MustUnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMarshaler) {
ac.LegacyAmino.MustUnmarshalBinaryLengthPrefixed(bz, ptr)
// MustUnmarshalLengthPrefixed implements BinaryMarshaler.MustUnmarshalLengthPrefixed method.
func (ac *AminoCodec) MustUnmarshalLengthPrefixed(bz []byte, ptr ProtoMarshaler) {
ac.LegacyAmino.MustUnmarshalLengthPrefixed(bz, ptr)
}
// MarshalJSON implements JSONMarshaler.MarshalJSON method,
@ -83,23 +83,23 @@ func (ac *AminoCodec) MustUnmarshalJSON(bz []byte, ptr proto.Message) {
// MarshalInterface is a convenience function for amino marshaling interfaces.
// The `i` must be an interface.
// NOTE: to marshal a concrete type, you should use MarshalBinaryBare instead
// NOTE: to marshal a concrete type, you should use Marshal instead
func (ac *AminoCodec) MarshalInterface(i proto.Message) ([]byte, error) {
if err := assertNotNil(i); err != nil {
return nil, err
}
return ac.LegacyAmino.MarshalBinaryBare(i)
return ac.LegacyAmino.Marshal(i)
}
// UnmarshalInterface is a convenience function for amino unmarshaling interfaces.
// `ptr` must be a pointer to an interface.
// NOTE: to unmarshal a concrete type, you should use UnmarshalBinaryBare instead
// NOTE: to unmarshal a concrete type, you should use Unmarshal instead
//
// Example:
// var x MyInterface
// err := cdc.UnmarshalInterface(bz, &x)
func (ac *AminoCodec) UnmarshalInterface(bz []byte, ptr interface{}) error {
return ac.LegacyAmino.UnmarshalBinaryBare(bz, ptr)
return ac.LegacyAmino.Unmarshal(bz, ptr)
}
// MarshalInterfaceJSON is a convenience function for amino marshaling interfaces.

View File

@ -7,6 +7,10 @@ import (
"github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/codec/types"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
"github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
"github.com/cosmos/cosmos-sdk/simapp"
"github.com/cosmos/cosmos-sdk/testutil/testdata"
)
@ -53,3 +57,80 @@ func TestMarshalAny(t *testing.T) {
err = cdc.UnmarshalInterface(bz, nil)
require.Error(t, err)
}
func TestMarshalProtoPubKey(t *testing.T) {
require := require.New(t)
ccfg := simapp.MakeTestEncodingConfig()
privKey := ed25519.GenPrivKey()
pk := privKey.PubKey()
// **** test JSON serialization ****
pkAny, err := codectypes.NewAnyWithValue(pk)
require.NoError(err)
bz, err := ccfg.Marshaler.MarshalJSON(pkAny)
require.NoError(err)
var pkAny2 codectypes.Any
err = ccfg.Marshaler.UnmarshalJSON(bz, &pkAny2)
require.NoError(err)
// Before getting a cached value we need to unpack it.
// Normally this happens in types which implement UnpackInterfaces
var pkI cryptotypes.PubKey
err = ccfg.InterfaceRegistry.UnpackAny(&pkAny2, &pkI)
require.NoError(err)
var pk2 = pkAny2.GetCachedValue().(cryptotypes.PubKey)
require.True(pk2.Equals(pk))
// **** test binary serialization ****
bz, err = ccfg.Marshaler.Marshal(pkAny)
require.NoError(err)
var pkAny3 codectypes.Any
err = ccfg.Marshaler.Unmarshal(bz, &pkAny3)
require.NoError(err)
err = ccfg.InterfaceRegistry.UnpackAny(&pkAny3, &pkI)
require.NoError(err)
var pk3 = pkAny3.GetCachedValue().(cryptotypes.PubKey)
require.True(pk3.Equals(pk))
}
// TestMarshalProtoInterfacePubKey tests PubKey marshaling using (Un)marshalInterface
// helper functions
func TestMarshalProtoInterfacePubKey(t *testing.T) {
require := require.New(t)
ccfg := simapp.MakeTestEncodingConfig()
privKey := ed25519.GenPrivKey()
pk := privKey.PubKey()
// **** test JSON serialization ****
bz, err := ccfg.Marshaler.MarshalInterfaceJSON(pk)
require.NoError(err)
var pk3 cryptotypes.PubKey
err = ccfg.Marshaler.UnmarshalInterfaceJSON(bz, &pk3)
require.NoError(err)
require.True(pk3.Equals(pk))
// ** Check unmarshal using JSONMarshaler **
// Unpacking won't work straightforward s Any type
// Any can't implement UnpackInterfacesMessage interface. So Any is not
// automatically unpacked and we won't get a value.
var pkAny codectypes.Any
err = ccfg.Marshaler.UnmarshalJSON(bz, &pkAny)
require.NoError(err)
ifc := pkAny.GetCachedValue()
require.Nil(ifc)
// **** test binary serialization ****
bz, err = ccfg.Marshaler.MarshalInterface(pk)
require.NoError(err)
var pk2 cryptotypes.PubKey
err = ccfg.Marshaler.UnmarshalInterface(bz, &pk2)
require.NoError(err)
require.True(pk2.Equals(pk))
}

View File

@ -7,49 +7,76 @@ import (
)
type (
// Marshaler defines the interface module codecs must implement in order to support
// backwards compatibility with Amino while allowing custom Protobuf-based
// serialization. Note, Amino can still be used without any dependency on
// Protobuf. There are two typical implementations that fulfill this contract:
// Codec defines a functionality for serializing other objects.
// Users can defin a custom Protobuf-based serialization.
// Note, Amino can still be used without any dependency on Protobuf.
// SDK provides to Codec implementations:
//
// 1. AminoCodec: Provides full Amino serialization compatibility.
// 2. ProtoCodec: Provides full Protobuf serialization compatibility.
Marshaler interface {
BinaryMarshaler
JSONMarshaler
Codec interface {
BinaryCodec
JSONCodec
}
BinaryMarshaler interface {
MarshalBinaryBare(o ProtoMarshaler) ([]byte, error)
MustMarshalBinaryBare(o ProtoMarshaler) []byte
BinaryCodec interface {
// Marshal returns binary encoding of v.
Marshal(o ProtoMarshaler) ([]byte, error)
// MustMarshal calls Marshal and panics if error is returned.
MustMarshal(o ProtoMarshaler) []byte
MarshalBinaryLengthPrefixed(o ProtoMarshaler) ([]byte, error)
MustMarshalBinaryLengthPrefixed(o ProtoMarshaler) []byte
// MarshalLengthPrefixed returns binary encoding of v with bytes length prefix.
MarshalLengthPrefixed(o ProtoMarshaler) ([]byte, error)
// MustMarshalLengthPrefixed calls MarshalLengthPrefixed and panics if
// error is returned.
MustMarshalLengthPrefixed(o ProtoMarshaler) []byte
UnmarshalBinaryBare(bz []byte, ptr ProtoMarshaler) error
MustUnmarshalBinaryBare(bz []byte, ptr ProtoMarshaler)
// Unmarshal parses the data encoded with Marshal method and stores the result
// in the value pointed to by v.
Unmarshal(bz []byte, ptr ProtoMarshaler) error
// MustUnmarshal calls Unmarshal and panics if error is returned.
MustUnmarshal(bz []byte, ptr ProtoMarshaler)
UnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMarshaler) error
MustUnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMarshaler)
// Unmarshal parses the data encoded with UnmarshalLengthPrefixed method and stores
// the result in the value pointed to by v.
UnmarshalLengthPrefixed(bz []byte, ptr ProtoMarshaler) error
// MustUnmarshalLengthPrefixed calls UnmarshalLengthPrefixed and panics if error
// is returned.
MustUnmarshalLengthPrefixed(bz []byte, ptr ProtoMarshaler)
// MarshalInterface is a helper method which will wrap `i` into `Any` for correct
// binary interface (de)serialization.
MarshalInterface(i proto.Message) ([]byte, error)
// UnmarshalInterface is a helper method which will parse binary enoded data
// into `Any` and unpack any into the `ptr`. It fails if the target interface type
// is not registered in codec, or is not compatible with the serialized data
UnmarshalInterface(bz []byte, ptr interface{}) error
types.AnyUnpacker
}
JSONMarshaler interface {
JSONCodec interface {
// MarshalJSON returns JSON encoding of v.
MarshalJSON(o proto.Message) ([]byte, error)
// MustMarshalJSON calls MarshalJSON and panics if error is returned.
MustMarshalJSON(o proto.Message) []byte
// MarshalInterfaceJSON is a helper method which will wrap `i` into `Any` for correct
// JSON interface (de)serialization.
MarshalInterfaceJSON(i proto.Message) ([]byte, error)
// UnmarshalInterfaceJSON is a helper method which will parse JSON enoded data
// into `Any` and unpack any into the `ptr`. It fails if the target interface type
// is not registered in codec, or is not compatible with the serialized data
UnmarshalInterfaceJSON(bz []byte, ptr interface{}) error
// UnmarshalJSON parses the data encoded with MarshalJSON method and stores the result
// in the value pointed to by v.
UnmarshalJSON(bz []byte, ptr proto.Message) error
// MustUnmarshalJSON calls Unmarshal and panics if error is returned.
MustUnmarshalJSON(bz []byte, ptr proto.Message)
}
// ProtoMarshaler defines an interface a type must implement as protocol buffer
// defined message.
// ProtoMarshaler defines an interface a type must implement to serialize itself
// as a protocol buffer defined message.
ProtoMarshaler interface {
proto.Message // for JSON serialization
@ -60,8 +87,8 @@ type (
Unmarshal(data []byte) error
}
// AminoMarshaler defines an interface where Amino marshalling can be
// overridden by custom marshalling.
// AminoMarshaler defines an interface a type must implement to serialize itself
// for Amino codec.
AminoMarshaler interface {
MarshalAmino() ([]byte, error)
UnmarshalAmino([]byte) error

View File

@ -87,7 +87,7 @@ func testMarshalingTestCase(require *require.Assertions, tc testCase, m mustMars
}
}
func testMarshaling(t *testing.T, cdc codec.Marshaler) {
func testMarshaling(t *testing.T, cdc codec.Codec) {
any, err := types.NewAnyWithValue(&testdata.Dog{Name: "rufus"})
require.NoError(t, err)
@ -117,8 +117,8 @@ func testMarshaling(t *testing.T, cdc codec.Marshaler) {
for _, tc := range testCases {
tc := tc
m1 := mustMarshaler{cdc.MarshalBinaryBare, cdc.MustMarshalBinaryBare, cdc.UnmarshalBinaryBare, cdc.MustUnmarshalBinaryBare}
m2 := mustMarshaler{cdc.MarshalBinaryLengthPrefixed, cdc.MustMarshalBinaryLengthPrefixed, cdc.UnmarshalBinaryLengthPrefixed, cdc.MustUnmarshalBinaryLengthPrefixed}
m1 := mustMarshaler{cdc.Marshal, cdc.MustMarshal, cdc.Unmarshal, cdc.MustUnmarshal}
m2 := mustMarshaler{cdc.MarshalLengthPrefixed, cdc.MustMarshalLengthPrefixed, cdc.UnmarshalLengthPrefixed, cdc.MustUnmarshalLengthPrefixed}
m3 := mustMarshaler{
func(i codec.ProtoMarshaler) ([]byte, error) { return cdc.MarshalJSON(i) },
func(i codec.ProtoMarshaler) []byte { return cdc.MustMarshalJSON(i) },

View File

@ -3,25 +3,29 @@ package codec
import (
"bytes"
"github.com/cosmos/cosmos-sdk/codec/types"
"github.com/gogo/protobuf/jsonpb"
"github.com/gogo/protobuf/proto"
"github.com/cosmos/cosmos-sdk/codec/types"
)
var defaultJM = &jsonpb.Marshaler{OrigName: true, EmitDefaults: true, AnyResolver: nil}
// ProtoMarshalJSON provides an auxiliary function to return Proto3 JSON encoded
// bytes of a message.
func ProtoMarshalJSON(msg proto.Message, resolver jsonpb.AnyResolver) ([]byte, error) {
// We use the OrigName because camel casing fields just doesn't make sense.
// EmitDefaults is also often the more expected behavior for CLI users
jm := &jsonpb.Marshaler{OrigName: true, EmitDefaults: true, AnyResolver: resolver}
jm := defaultJM
if resolver != nil {
jm = &jsonpb.Marshaler{OrigName: true, EmitDefaults: true, AnyResolver: resolver}
}
err := types.UnpackInterfaces(msg, types.ProtoJSONPacker{JSONPBMarshaler: jm})
if err != nil {
return nil, err
}
buf := new(bytes.Buffer)
if err := jm.Marshal(buf, msg); err != nil {
return nil, err
}

View File

@ -20,12 +20,12 @@ func init() {
// PrivKeyFromBytes unmarshals private key bytes and returns a PrivKey
func PrivKeyFromBytes(privKeyBytes []byte) (privKey cryptotypes.PrivKey, err error) {
err = Cdc.UnmarshalBinaryBare(privKeyBytes, &privKey)
err = Cdc.Unmarshal(privKeyBytes, &privKey)
return
}
// PubKeyFromBytes unmarshals public key bytes and returns a PubKey
func PubKeyFromBytes(pubKeyBytes []byte) (pubKey cryptotypes.PubKey, err error) {
err = Cdc.UnmarshalBinaryBare(pubKeyBytes, &pubKey)
err = Cdc.Unmarshal(pubKeyBytes, &pubKey)
return
}

View File

@ -15,7 +15,7 @@ import (
// ProtoCodecMarshaler defines an interface for codecs that utilize Protobuf for both
// binary and JSON encoding.
type ProtoCodecMarshaler interface {
Marshaler
Codec
InterfaceRegistry() types.InterfaceRegistry
}
@ -25,7 +25,7 @@ type ProtoCodec struct {
interfaceRegistry types.InterfaceRegistry
}
var _ Marshaler = &ProtoCodec{}
var _ Codec = &ProtoCodec{}
var _ ProtoCodecMarshaler = &ProtoCodec{}
// NewProtoCodec returns a reference to a new ProtoCodec
@ -33,14 +33,18 @@ func NewProtoCodec(interfaceRegistry types.InterfaceRegistry) *ProtoCodec {
return &ProtoCodec{interfaceRegistry: interfaceRegistry}
}
// MarshalBinaryBare implements BinaryMarshaler.MarshalBinaryBare method.
func (pc *ProtoCodec) MarshalBinaryBare(o ProtoMarshaler) ([]byte, error) {
// Marshal implements BinaryMarshaler.Marshal method.
// NOTE: this function must be used with a concrete type which
// implements proto.Message. For interface please use the codec.MarshalInterface
func (pc *ProtoCodec) Marshal(o ProtoMarshaler) ([]byte, error) {
return o.Marshal()
}
// MustMarshalBinaryBare implements BinaryMarshaler.MustMarshalBinaryBare method.
func (pc *ProtoCodec) MustMarshalBinaryBare(o ProtoMarshaler) []byte {
bz, err := pc.MarshalBinaryBare(o)
// MustMarshal implements BinaryMarshaler.MustMarshal method.
// NOTE: this function must be used with a concrete type which
// implements proto.Message. For interface please use the codec.MarshalInterface
func (pc *ProtoCodec) MustMarshal(o ProtoMarshaler) []byte {
bz, err := pc.Marshal(o)
if err != nil {
panic(err)
}
@ -48,9 +52,9 @@ func (pc *ProtoCodec) MustMarshalBinaryBare(o ProtoMarshaler) []byte {
return bz
}
// MarshalBinaryLengthPrefixed implements BinaryMarshaler.MarshalBinaryLengthPrefixed method.
func (pc *ProtoCodec) MarshalBinaryLengthPrefixed(o ProtoMarshaler) ([]byte, error) {
bz, err := pc.MarshalBinaryBare(o)
// MarshalLengthPrefixed implements BinaryMarshaler.MarshalLengthPrefixed method.
func (pc *ProtoCodec) MarshalLengthPrefixed(o ProtoMarshaler) ([]byte, error) {
bz, err := pc.Marshal(o)
if err != nil {
return nil, err
}
@ -60,9 +64,9 @@ func (pc *ProtoCodec) MarshalBinaryLengthPrefixed(o ProtoMarshaler) ([]byte, err
return append(sizeBuf[:n], bz...), nil
}
// MustMarshalBinaryLengthPrefixed implements BinaryMarshaler.MustMarshalBinaryLengthPrefixed method.
func (pc *ProtoCodec) MustMarshalBinaryLengthPrefixed(o ProtoMarshaler) []byte {
bz, err := pc.MarshalBinaryLengthPrefixed(o)
// MustMarshalLengthPrefixed implements BinaryMarshaler.MustMarshalLengthPrefixed method.
func (pc *ProtoCodec) MustMarshalLengthPrefixed(o ProtoMarshaler) []byte {
bz, err := pc.MarshalLengthPrefixed(o)
if err != nil {
panic(err)
}
@ -70,8 +74,10 @@ func (pc *ProtoCodec) MustMarshalBinaryLengthPrefixed(o ProtoMarshaler) []byte {
return bz
}
// UnmarshalBinaryBare implements BinaryMarshaler.UnmarshalBinaryBare method.
func (pc *ProtoCodec) UnmarshalBinaryBare(bz []byte, ptr ProtoMarshaler) error {
// Unmarshal implements BinaryMarshaler.Unmarshal method.
// NOTE: this function must be used with a concrete type which
// implements proto.Message. For interface please use the codec.UnmarshalInterface
func (pc *ProtoCodec) Unmarshal(bz []byte, ptr ProtoMarshaler) error {
err := ptr.Unmarshal(bz)
if err != nil {
return err
@ -83,15 +89,17 @@ func (pc *ProtoCodec) UnmarshalBinaryBare(bz []byte, ptr ProtoMarshaler) error {
return nil
}
// MustUnmarshalBinaryBare implements BinaryMarshaler.MustUnmarshalBinaryBare method.
func (pc *ProtoCodec) MustUnmarshalBinaryBare(bz []byte, ptr ProtoMarshaler) {
if err := pc.UnmarshalBinaryBare(bz, ptr); err != nil {
// MustUnmarshal implements BinaryMarshaler.MustUnmarshal method.
// NOTE: this function must be used with a concrete type which
// implements proto.Message. For interface please use the codec.UnmarshalInterface
func (pc *ProtoCodec) MustUnmarshal(bz []byte, ptr ProtoMarshaler) {
if err := pc.Unmarshal(bz, ptr); err != nil {
panic(err)
}
}
// UnmarshalBinaryLengthPrefixed implements BinaryMarshaler.UnmarshalBinaryLengthPrefixed method.
func (pc *ProtoCodec) UnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMarshaler) error {
// UnmarshalLengthPrefixed implements BinaryMarshaler.UnmarshalLengthPrefixed method.
func (pc *ProtoCodec) UnmarshalLengthPrefixed(bz []byte, ptr ProtoMarshaler) error {
size, n := binary.Uvarint(bz)
if n < 0 {
return fmt.Errorf("invalid number of bytes read from length-prefixed encoding: %d", n)
@ -104,18 +112,20 @@ func (pc *ProtoCodec) UnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMarshale
}
bz = bz[n:]
return pc.UnmarshalBinaryBare(bz, ptr)
return pc.Unmarshal(bz, ptr)
}
// MustUnmarshalBinaryLengthPrefixed implements BinaryMarshaler.MustUnmarshalBinaryLengthPrefixed method.
func (pc *ProtoCodec) MustUnmarshalBinaryLengthPrefixed(bz []byte, ptr ProtoMarshaler) {
if err := pc.UnmarshalBinaryLengthPrefixed(bz, ptr); err != nil {
// MustUnmarshalLengthPrefixed implements BinaryMarshaler.MustUnmarshalLengthPrefixed method.
func (pc *ProtoCodec) MustUnmarshalLengthPrefixed(bz []byte, ptr ProtoMarshaler) {
if err := pc.UnmarshalLengthPrefixed(bz, ptr); err != nil {
panic(err)
}
}
// MarshalJSON implements JSONMarshaler.MarshalJSON method,
// it marshals to JSON using proto codec.
// NOTE: this function must be used with a concrete type which
// implements proto.Message. For interface please use the codec.MarshalInterfaceJSON
func (pc *ProtoCodec) MarshalJSON(o proto.Message) ([]byte, error) {
m, ok := o.(ProtoMarshaler)
if !ok {
@ -127,6 +137,8 @@ func (pc *ProtoCodec) MarshalJSON(o proto.Message) ([]byte, error) {
// MustMarshalJSON implements JSONMarshaler.MustMarshalJSON method,
// it executes MarshalJSON except it panics upon failure.
// NOTE: this function must be used with a concrete type which
// implements proto.Message. For interface please use the codec.MarshalInterfaceJSON
func (pc *ProtoCodec) MustMarshalJSON(o proto.Message) []byte {
bz, err := pc.MarshalJSON(o)
if err != nil {
@ -138,6 +150,8 @@ func (pc *ProtoCodec) MustMarshalJSON(o proto.Message) []byte {
// UnmarshalJSON implements JSONMarshaler.UnmarshalJSON method,
// it unmarshals from JSON using proto codec.
// NOTE: this function must be used with a concrete type which
// implements proto.Message. For interface please use the codec.UnmarshalInterfaceJSON
func (pc *ProtoCodec) UnmarshalJSON(bz []byte, ptr proto.Message) error {
m, ok := ptr.(ProtoMarshaler)
if !ok {
@ -155,6 +169,8 @@ func (pc *ProtoCodec) UnmarshalJSON(bz []byte, ptr proto.Message) error {
// MustUnmarshalJSON implements JSONMarshaler.MustUnmarshalJSON method,
// it executes UnmarshalJSON except it panics upon failure.
// NOTE: this function must be used with a concrete type which
// implements proto.Message. For interface please use the codec.UnmarshalInterfaceJSON
func (pc *ProtoCodec) MustUnmarshalJSON(bz []byte, ptr proto.Message) {
if err := pc.UnmarshalJSON(bz, ptr); err != nil {
panic(err)
@ -163,7 +179,7 @@ func (pc *ProtoCodec) MustUnmarshalJSON(bz []byte, ptr proto.Message) {
// MarshalInterface is a convenience function for proto marshalling interfaces. It packs
// the provided value, which must be an interface, in an Any and then marshals it to bytes.
// NOTE: to marshal a concrete type, you should use MarshalBinaryBare instead
// NOTE: to marshal a concrete type, you should use Marshal instead
func (pc *ProtoCodec) MarshalInterface(i proto.Message) ([]byte, error) {
if err := assertNotNil(i); err != nil {
return nil, err
@ -173,20 +189,20 @@ func (pc *ProtoCodec) MarshalInterface(i proto.Message) ([]byte, error) {
return nil, err
}
return pc.MarshalBinaryBare(any)
return pc.Marshal(any)
}
// UnmarshalInterface is a convenience function for proto unmarshaling interfaces. It
// unmarshals an Any from bz bytes and then unpacks it to the `ptr`, which must
// be a pointer to a non empty interface with registered implementations.
// NOTE: to unmarshal a concrete type, you should use UnmarshalBinaryBare instead
// NOTE: to unmarshal a concrete type, you should use Unmarshal instead
//
// Example:
// var x MyInterface
// err := cdc.UnmarshalInterface(bz, &x)
func (pc *ProtoCodec) UnmarshalInterface(bz []byte, ptr interface{}) error {
any := &types.Any{}
err := pc.UnmarshalBinaryBare(bz, any)
err := pc.Unmarshal(bz, any)
if err != nil {
return err
}
@ -229,6 +245,7 @@ func (pc *ProtoCodec) UnpackAny(any *types.Any, iface interface{}) error {
return pc.interfaceRegistry.UnpackAny(any, iface)
}
// InterfaceRegistry returns InterfaceRegistry
func (pc *ProtoCodec) InterfaceRegistry() types.InterfaceRegistry {
return pc.interfaceRegistry
}

View File

@ -46,11 +46,11 @@ func (lpm *lyingProtoMarshaler) Size() int {
return lpm.falseSize
}
func TestProtoCodecUnmarshalBinaryLengthPrefixedChecks(t *testing.T) {
func TestProtoCodecUnmarshalLengthPrefixedChecks(t *testing.T) {
cdc := codec.NewProtoCodec(createTestInterfaceRegistry())
truth := &testdata.Cat{Lives: 9, Moniker: "glowing"}
realSize := len(cdc.MustMarshalBinaryBare(truth))
realSize := len(cdc.MustMarshal(truth))
falseSizes := []int{
100,
@ -66,10 +66,10 @@ func TestProtoCodecUnmarshalBinaryLengthPrefixedChecks(t *testing.T) {
falseSize: falseSize,
}
var serialized []byte
require.NotPanics(t, func() { serialized = cdc.MustMarshalBinaryLengthPrefixed(lpm) })
require.NotPanics(t, func() { serialized = cdc.MustMarshalLengthPrefixed(lpm) })
recv := new(testdata.Cat)
gotErr := cdc.UnmarshalBinaryLengthPrefixed(serialized, recv)
gotErr := cdc.UnmarshalLengthPrefixed(serialized, recv)
var wantErr error
if falseSize > realSize {
wantErr = fmt.Errorf("not enough bytes to read; want: %d, got: %d", falseSize, realSize)
@ -83,10 +83,10 @@ func TestProtoCodecUnmarshalBinaryLengthPrefixedChecks(t *testing.T) {
t.Run("Crafted bad uvarint size", func(t *testing.T) {
crafted := []byte{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f}
recv := new(testdata.Cat)
gotErr := cdc.UnmarshalBinaryLengthPrefixed(crafted, recv)
gotErr := cdc.UnmarshalLengthPrefixed(crafted, recv)
require.Equal(t, gotErr, errors.New("invalid number of bytes read from length-prefixed encoding: -10"))
require.Panics(t, func() { cdc.MustUnmarshalBinaryLengthPrefixed(crafted, recv) })
require.Panics(t, func() { cdc.MustUnmarshalLengthPrefixed(crafted, recv) })
})
}
@ -98,7 +98,7 @@ func mustAny(msg proto.Message) *types.Any {
return any
}
func BenchmarkProtoCodecMarshalBinaryLengthPrefixed(b *testing.B) {
func BenchmarkProtoCodecMarshalLengthPrefixed(b *testing.B) {
var pCdc = codec.NewProtoCodec(types.NewInterfaceRegistry())
var msg = &testdata.HasAnimal{
X: 1000,
@ -124,7 +124,7 @@ func BenchmarkProtoCodecMarshalBinaryLengthPrefixed(b *testing.B) {
b.ReportAllocs()
for i := 0; i < b.N; i++ {
blob, err := pCdc.MarshalBinaryLengthPrefixed(msg)
blob, err := pCdc.MarshalLengthPrefixed(msg)
if err != nil {
b.Fatal(err)
}

View File

@ -1,6 +1,8 @@
package types
import (
fmt "fmt"
"github.com/gogo/protobuf/proto"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
@ -62,20 +64,17 @@ func NewAnyWithValue(v proto.Message) (*Any, error) {
if v == nil {
return nil, sdkerrors.Wrap(sdkerrors.ErrPackAny, "Expecting non nil value to create a new Any")
}
return NewAnyWithCustomTypeURL(v, "/"+proto.MessageName(v))
}
// NewAnyWithCustomTypeURL same as NewAnyWithValue, but sets a custom type url, instead
// using the one from proto.Message.
// NOTE: This functions should be only used for types with additional logic bundled
// into the protobuf Any serialization. For simple marshaling you should use NewAnyWithValue.
func NewAnyWithCustomTypeURL(v proto.Message, typeURL string) (*Any, error) {
bz, err := proto.Marshal(v)
if err != nil {
return nil, err
}
return &Any{
TypeUrl: typeURL,
TypeUrl: "/" + proto.MessageName(v),
Value: bz,
cachedValue: v,
}, err
}, nil
}
// UnsafePackAny packs the value x in the Any and instead of returning the error
@ -113,3 +112,26 @@ func (any *Any) pack(x proto.Message) error {
func (any *Any) GetCachedValue() interface{} {
return any.cachedValue
}
// GoString returns a string representing valid go code to reproduce the current state of
// the struct.
func (any *Any) GoString() string {
if any == nil {
return "nil"
}
extra := ""
if any.XXX_unrecognized != nil {
extra = fmt.Sprintf(",\n XXX_unrecognized: %#v,\n", any.XXX_unrecognized)
}
return fmt.Sprintf("&Any{TypeUrl: %#v,\n Value: %#v%s\n}",
any.TypeUrl, any.Value, extra)
}
// String implements the stringer interface
func (any *Any) String() string {
if any == nil {
return "nil"
}
return fmt.Sprintf("&Any{TypeUrl:%v,Value:%v,XXX_unrecognized:%v}",
any.TypeUrl, any.Value, any.XXX_unrecognized)
}

View File

@ -11,8 +11,6 @@ import (
io "io"
math "math"
math_bits "math/bits"
reflect "reflect"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
@ -82,22 +80,23 @@ func init() {
func init() { proto.RegisterFile("google/protobuf/any.proto", fileDescriptor_b53526c13ae22eb4) }
var fileDescriptor_b53526c13ae22eb4 = []byte{
// 235 bytes of a gzipped FileDescriptorProto
// 248 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcc, 0xab, 0xd4,
0x03, 0x73, 0x84, 0xf8, 0x21, 0x52, 0x7a, 0x30, 0x29, 0x29, 0x91, 0xf4, 0xfc, 0xf4, 0x7c, 0x30,
0x4f, 0x1f, 0xc4, 0x82, 0x48, 0x28, 0xd9, 0x70, 0x31, 0x3b, 0xe6, 0x55, 0x0a, 0x49, 0x72, 0x71,
0x4f, 0x1f, 0xc4, 0x82, 0x48, 0x28, 0x79, 0x70, 0x31, 0x3b, 0xe6, 0x55, 0x0a, 0x49, 0x72, 0x71,
0x94, 0x54, 0x16, 0xa4, 0xc6, 0x97, 0x16, 0xe5, 0x48, 0x30, 0x2a, 0x30, 0x6a, 0x70, 0x06, 0xb1,
0x83, 0xf8, 0xa1, 0x45, 0x39, 0x42, 0x22, 0x5c, 0xac, 0x65, 0x89, 0x39, 0xa5, 0xa9, 0x12, 0x4c,
0x0a, 0x8c, 0x1a, 0x3c, 0x41, 0x10, 0x8e, 0x15, 0xcb, 0x87, 0x85, 0xf2, 0x0c, 0x4e, 0xcd, 0x8c,
0x37, 0x1e, 0xca, 0x31, 0x7c, 0x78, 0x28, 0xc7, 0xf8, 0xe3, 0xa1, 0x1c, 0x63, 0xc3, 0x23, 0x39,
0xc6, 0x15, 0x8f, 0xe4, 0x18, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23,
0x39, 0xc6, 0x17, 0x8f, 0xe4, 0x18, 0x3e, 0x80, 0xc4, 0x1f, 0xcb, 0x31, 0x1e, 0x78, 0x2c, 0xc7,
0x70, 0xe2, 0xb1, 0x1c, 0x23, 0x97, 0x70, 0x72, 0x7e, 0xae, 0x1e, 0x9a, 0xfb, 0x9c, 0x38, 0x1c,
0xf3, 0x2a, 0x03, 0x40, 0x9c, 0x00, 0xc6, 0x28, 0x56, 0x90, 0xe5, 0xc5, 0x8b, 0x98, 0x98, 0xdd,
0x03, 0x9c, 0x56, 0x31, 0xc9, 0xb9, 0x43, 0x94, 0x06, 0x40, 0x95, 0xea, 0x85, 0xa7, 0xe6, 0xe4,
0x78, 0xe7, 0xe5, 0x97, 0xe7, 0x85, 0x80, 0x94, 0x25, 0xb1, 0x81, 0xcd, 0x30, 0x06, 0x04, 0x00,
0x00, 0xff, 0xff, 0xe6, 0xfb, 0xa0, 0x21, 0x0e, 0x01, 0x00, 0x00,
0x0a, 0x8c, 0x1a, 0x3c, 0x41, 0x10, 0x8e, 0x95, 0xc0, 0x8c, 0x05, 0xf2, 0x0c, 0x1b, 0x16, 0xc8,
0x33, 0x7c, 0x58, 0x28, 0xcf, 0xd0, 0x70, 0x47, 0x81, 0xc1, 0xa9, 0x99, 0xf1, 0xc6, 0x43, 0x39,
0x86, 0x0f, 0x0f, 0xe5, 0x18, 0x7f, 0x3c, 0x94, 0x63, 0x6c, 0x78, 0x24, 0xc7, 0xb8, 0xe2, 0x91,
0x1c, 0xe3, 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0xf8, 0xe2,
0x91, 0x1c, 0xc3, 0x07, 0x90, 0xf8, 0x63, 0x39, 0xc6, 0x03, 0x8f, 0xe5, 0x18, 0x4e, 0x3c, 0x96,
0x63, 0xe4, 0x12, 0x4e, 0xce, 0xcf, 0xd5, 0x43, 0x73, 0xab, 0x13, 0x87, 0x63, 0x5e, 0x65, 0x00,
0x88, 0x13, 0xc0, 0x18, 0xc5, 0x0a, 0x72, 0x48, 0xf1, 0x22, 0x26, 0x66, 0xf7, 0x00, 0xa7, 0x55,
0x4c, 0x72, 0xee, 0x10, 0xa5, 0x01, 0x50, 0xa5, 0x7a, 0xe1, 0xa9, 0x39, 0x39, 0xde, 0x79, 0xf9,
0xe5, 0x79, 0x21, 0x20, 0x65, 0x49, 0x6c, 0x60, 0x33, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff,
0x4d, 0x91, 0x00, 0xa0, 0x1a, 0x01, 0x00, 0x00,
}
func (this *Any) Compare(that interface{}) int {
@ -169,28 +168,6 @@ func (this *Any) Equal(that interface{}) bool {
}
return true
}
func (this *Any) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 6)
s = append(s, "&types.Any{")
s = append(s, "TypeUrl: "+fmt.Sprintf("%#v", this.TypeUrl)+",\n")
s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n")
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func valueToGoStringAny(v interface{}, typ string) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv)
}
func (m *Any) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
@ -355,26 +332,6 @@ func sovAny(x uint64) (n int) {
func sozAny(x uint64) (n int) {
return sovAny(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *Any) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Any{`,
`TypeUrl:` + fmt.Sprintf("%v", this.TypeUrl) + `,`,
`Value:` + fmt.Sprintf("%v", this.Value) + `,`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func valueToStringAny(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *Any) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
@ -476,10 +433,7 @@ func (m *Any) Unmarshal(dAtA []byte) error {
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthAny
}
if (iNdEx + skippy) < 0 {
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthAny
}
if (iNdEx + skippy) > l {

View File

@ -51,3 +51,15 @@ func TestAnyPackUnpack(t *testing.T) {
require.NoError(t, err)
require.Equal(t, spot, animal)
}
func TestString(t *testing.T) {
require := require.New(t)
spot := &Dog{Name: "Spot"}
any, err := NewAnyWithValue(spot)
require.NoError(err)
require.Equal("&Any{TypeUrl:/tests/dog,Value:[10 4 83 112 111 116],XXX_unrecognized:[]}", any.String())
require.Equal(`&Any{TypeUrl: "/tests/dog",
Value: []byte{0xa, 0x4, 0x53, 0x70, 0x6f, 0x74}
}`, any.GoString())
}

66
codec/types/any_test.go Normal file
View File

@ -0,0 +1,66 @@
package types_test
import (
"fmt"
"runtime"
"testing"
"github.com/gogo/protobuf/proto"
"github.com/cosmos/cosmos-sdk/codec/types"
"github.com/cosmos/cosmos-sdk/testutil/testdata"
)
type errOnMarshal struct {
testdata.Dog
}
var _ proto.Message = (*errOnMarshal)(nil)
var errAlways = fmt.Errorf("always erroring")
func (eom *errOnMarshal) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return nil, errAlways
}
var eom = &errOnMarshal{}
// Ensure that returning an error doesn't suddenly allocate and waste bytes.
// See https://github.com/cosmos/cosmos-sdk/issues/8537
func TestNewAnyWithCustomTypeURLWithErrorNoAllocation(t *testing.T) {
var ms1, ms2 runtime.MemStats
runtime.ReadMemStats(&ms1)
any, err := types.NewAnyWithValue(eom)
runtime.ReadMemStats(&ms2)
// Ensure that no fresh allocation was made.
if diff := ms2.HeapAlloc - ms1.HeapAlloc; diff > 0 {
t.Errorf("Unexpected allocation of %d bytes", diff)
}
if err == nil {
t.Fatal("err wasn't returned")
}
if any != nil {
t.Fatalf("Unexpectedly got a non-nil Any value: %v", any)
}
}
var sink interface{}
func BenchmarkNewAnyWithCustomTypeURLWithErrorReturned(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
for i := 0; i < b.N; i++ {
any, err := types.NewAnyWithValue(eom)
if err == nil {
b.Fatal("err wasn't returned")
}
if any != nil {
b.Fatalf("Unexpectedly got a non-nil Any value: %v", any)
}
sink = any
}
if sink == nil {
b.Fatal("benchmark didn't run")
}
sink = (interface{})(nil)
}

View File

@ -37,7 +37,7 @@ func anyCompatError(errType string, x interface{}) error {
func (any Any) MarshalAmino() ([]byte, error) {
ac := any.compat
if ac == nil {
return nil, anyCompatError("amino binary unmarshal", any)
return nil, anyCompatError("amino binary marshal", any)
}
return ac.aminoBz, ac.err
}

Some files were not shown because too many files have changed in this diff Show More