Merge branch 'master' into sunny/change-pubkey-adr
This commit is contained in:
commit
fd6f07bda2
|
@ -15,12 +15,15 @@ coverage:
|
|||
threshold: 1% # allow this much decrease on project
|
||||
app:
|
||||
target: 70%
|
||||
flags: app
|
||||
flags:
|
||||
- app
|
||||
modules:
|
||||
target: 70%
|
||||
flags: modules
|
||||
flags:
|
||||
- modules
|
||||
client:
|
||||
flags: client
|
||||
flags:
|
||||
- client
|
||||
changes: false
|
||||
|
||||
comment:
|
||||
|
|
|
@ -12,10 +12,10 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: lint
|
||||
run: make proto-lint-docker
|
||||
run: make proto-lint
|
||||
breakage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: check-breakage
|
||||
run: make proto-check-breaking-docker
|
||||
run: make proto-check-breaking
|
||||
|
|
|
@ -30,7 +30,7 @@ jobs:
|
|||
- name: install runsim
|
||||
run: |
|
||||
export GO111MODULE="on" && go get github.com/cosmos/tools/cmd/runsim@v1.0.0
|
||||
- uses: actions/cache@v2.1.2
|
||||
- uses: actions/cache@v2.1.3
|
||||
with:
|
||||
path: ~/go/bin
|
||||
key: ${{ runner.os }}-go-runsim-binary
|
||||
|
@ -40,7 +40,7 @@ jobs:
|
|||
needs: [build, install-runsim]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/cache@v2.1.2
|
||||
- uses: actions/cache@v2.1.3
|
||||
with:
|
||||
path: ~/go/bin
|
||||
key: ${{ runner.os }}-go-runsim-binary
|
||||
|
|
|
@ -39,7 +39,7 @@ jobs:
|
|||
run: go version
|
||||
- name: Install runsim
|
||||
run: export GO111MODULE="on" && go get github.com/cosmos/tools/cmd/runsim@v1.0.0
|
||||
- uses: actions/cache@v2.1.2
|
||||
- uses: actions/cache@v2.1.3
|
||||
with:
|
||||
path: ~/go/bin
|
||||
key: ${{ runner.os }}-go-runsim-binary
|
||||
|
@ -60,7 +60,7 @@ jobs:
|
|||
**/**.go
|
||||
go.mod
|
||||
go.sum
|
||||
- uses: actions/cache@v2.1.2
|
||||
- uses: actions/cache@v2.1.3
|
||||
with:
|
||||
path: ~/go/bin
|
||||
key: ${{ runner.os }}-go-runsim-binary
|
||||
|
@ -88,7 +88,7 @@ jobs:
|
|||
go.sum
|
||||
SET_ENV_NAME_INSERTIONS: 1
|
||||
SET_ENV_NAME_LINES: 1
|
||||
- uses: actions/cache@v2.1.2
|
||||
- uses: actions/cache@v2.1.3
|
||||
with:
|
||||
path: ~/go/bin
|
||||
key: ${{ runner.os }}-go-runsim-binary
|
||||
|
@ -116,7 +116,7 @@ jobs:
|
|||
go.sum
|
||||
SET_ENV_NAME_INSERTIONS: 1
|
||||
SET_ENV_NAME_LINES: 1
|
||||
- uses: actions/cache@v2.1.2
|
||||
- uses: actions/cache@v2.1.3
|
||||
with:
|
||||
path: ~/go/bin
|
||||
key: ${{ runner.os }}-go-runsim-binary
|
||||
|
@ -144,7 +144,7 @@ jobs:
|
|||
go.sum
|
||||
SET_ENV_NAME_INSERTIONS: 1
|
||||
SET_ENV_NAME_LINES: 1
|
||||
- uses: actions/cache@v2.1.2
|
||||
- uses: actions/cache@v2.1.3
|
||||
with:
|
||||
path: ~/go/bin
|
||||
key: ${{ runner.os }}-go-runsim-binary
|
||||
|
|
|
@ -26,7 +26,7 @@ jobs:
|
|||
- name: install tparse
|
||||
run: |
|
||||
export GO111MODULE="on" && go get github.com/mfridman/tparse@v0.8.3
|
||||
- uses: actions/cache@v2.1.2
|
||||
- uses: actions/cache@v2.1.3
|
||||
with:
|
||||
path: ~/go/bin
|
||||
key: ${{ runner.os }}-go-tparse-binary
|
||||
|
@ -144,6 +144,7 @@ jobs:
|
|||
run: |
|
||||
excludelist="$(find ./ -type f -name '*.go' | xargs grep -l 'DONTCOVER')"
|
||||
excludelist+=" $(find ./ -type f -name '*.pb.go')"
|
||||
excludelist+=" $(find ./ -type f -name '*.pb.gw.go')"
|
||||
excludelist+=" $(find ./ -type f -path './tests/mocks/*.go')"
|
||||
for filename in ${excludelist}; do
|
||||
filename=$(echo $filename | sed 's/^./github.com\/cosmos\/cosmos-sdk/g')
|
||||
|
@ -151,7 +152,7 @@ jobs:
|
|||
sed -i.bak "/$(echo $filename | sed 's/\//\\\//g')/d" coverage.txt
|
||||
done
|
||||
if: env.GIT_DIFF
|
||||
- uses: codecov/codecov-action@v1.0.13
|
||||
- uses: codecov/codecov-action@v1.0.15
|
||||
with:
|
||||
file: ./coverage.txt
|
||||
if: env.GIT_DIFF
|
||||
|
@ -180,7 +181,7 @@ jobs:
|
|||
if: env.GIT_DIFF
|
||||
- name: test & coverage report creation
|
||||
run: |
|
||||
cat pkgs.txt.part.${{ matrix.part }} | xargs go test -mod=readonly -timeout 30m -race -tags='cgo ledger test_ledger_mock' > ${{ matrix.part }}-race-output.txt
|
||||
cat pkgs.txt.part.${{ matrix.part }} | xargs go test -mod=readonly -json -timeout 30m -race -tags='cgo ledger test_ledger_mock' > ${{ matrix.part }}-race-output.txt
|
||||
if: env.GIT_DIFF
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
|
@ -216,7 +217,7 @@ jobs:
|
|||
with:
|
||||
name: "${{ github.sha }}-03-race-output"
|
||||
if: env.GIT_DIFF
|
||||
- uses: actions/cache@v2.1.2
|
||||
- uses: actions/cache@v2.1.3
|
||||
with:
|
||||
path: ~/go/bin
|
||||
key: ${{ runner.os }}-go-tparse-binary
|
||||
|
@ -230,6 +231,9 @@ jobs:
|
|||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-go@v2.1.3
|
||||
with:
|
||||
go-version: 1.15
|
||||
- uses: technote-space/get-diff-action@v4
|
||||
id: git_diff
|
||||
with:
|
||||
|
|
|
@ -20,7 +20,6 @@ docs/node_modules
|
|||
docs/modules
|
||||
dist
|
||||
tools-stamp
|
||||
proto-tools-stamp
|
||||
buf-stamp
|
||||
artifacts
|
||||
|
||||
|
|
93
CHANGELOG.md
93
CHANGELOG.md
|
@ -36,30 +36,110 @@ Ref: https://keepachangelog.com/en/1.0.0/
|
|||
|
||||
## [Unreleased]
|
||||
|
||||
### Improvements
|
||||
* (SDK) [\#7925](https://github.com/cosmos/cosmos-sdk/pull/7925) Updated dependencies to use gRPC v1.33.2
|
||||
* Updated gRPC dependency to v1.33.2
|
||||
* Updated iavl dependency to v0.15-rc2
|
||||
* (version) [\#7848](https://github.com/cosmos/cosmos-sdk/pull/7848) [\#7941](https://github.com/cosmos/cosmos-sdk/pull/7941) `version --long` output now shows the list of build dependencies and replaced build dependencies.
|
||||
|
||||
### State Machine Breaking Changes
|
||||
* (x/upgrade) [\#7979](https://github.com/cosmos/cosmos-sdk/pull/7979) keeper pubkey storage serialization migration from bech32 to protobuf.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* (crypto) [\#7966](https://github.com/cosmos/cosmos-sdk/issues/7966) `Bip44Params` `String()` function now correctly returns the absolute HD path by adding the `m/` prefix.
|
||||
|
||||
## [v0.40.0-rc3](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.0-rc3) - 2020-11-06
|
||||
|
||||
### Client Breaking
|
||||
|
||||
* (x/staking) [\#7499](https://github.com/cosmos/cosmos-sdk/pull/7499) `BondStatus` is now a protobuf `enum` instead of an `int32`, and JSON serialized using its protobuf name, so expect names like `BOND_STATUS_UNBONDING` as opposed to `Unbonding`.
|
||||
* (x/evidence) [\#7538](https://github.com/cosmos/cosmos-sdk/pull/7538) The ABCI's `Result.Data` field of `MsgSubmitEvidence` does not contain the raw evidence's hash, but the encoded `MsgSubmitEvidenceResponse` struct.
|
||||
* (x/staking) [\#7419](https://github.com/cosmos/cosmos-sdk/pull/7419) The `TmConsPubKey` method on ValidatorI has been removed and replaced instead by `ConsPubKey` (which returns a SDK `cryptotypes.PubKey`) and `TmConsPublicKey` (which returns a Tendermint proto PublicKey).
|
||||
|
||||
### Improvements
|
||||
* (tendermint) [\#7828](https://github.com/cosmos/cosmos-sdk/pull/7828) Update tendermint dependency to v0.34.0-rc6
|
||||
|
||||
## [v0.40.0-rc2](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.0-rc2) - 2020-11-02
|
||||
|
||||
### Client Breaking
|
||||
|
||||
* (x/upgrade) [#7697](https://github.com/cosmos/cosmos-sdk/pull/7697) Rename flag name "--time" to "--upgrade-time", "--info" to "--upgrade-info", to keep it consistent with help message.
|
||||
* (x/auth) [#7788](https://github.com/cosmos/cosmos-sdk/pull/7788) Remove `tx auth` subcommands, all auth subcommands exist as `tx <subcommand>`
|
||||
|
||||
### API Breaking
|
||||
|
||||
* (AppModule) [\#7518](https://github.com/cosmos/cosmos-sdk/pull/7518) [\#7584](https://github.com/cosmos/cosmos-sdk/pull/7584) Rename `AppModule.RegisterQueryServices` to `AppModule.RegisterServices`, as this method now registers multiple services (the gRPC query service and the protobuf Msg service). A `Configurator` struct is used to hold the different services.
|
||||
* (x/staking/types) [\#7447](https://github.com/cosmos/cosmos-sdk/issues/7447) Remove bech32 PubKey support:
|
||||
* `ValidatorI` interface update. `GetConsPubKey` renamed to `TmConsPubKey` (consensus public key must be a tendermint key). `TmConsPubKey`, `GetConsAddr` methods return error.
|
||||
* `Validator` update. Methods changed in `ValidatorI` (as described above) and `ToTmValidator` return error.
|
||||
* `Validator.ConsensusPubkey` type changed from `string` to `codectypes.Any`.
|
||||
* `MsgCreateValidator.Pubkey` type changed from `string` to `codectypes.Any`.
|
||||
* Deprecating and renaming `MakeEncodingConfig` to `MakeTestEncodingConfig` (both in `simapp` and `simapp/params` packages).
|
||||
* (tx) [\#7688](https://github.com/cosmos/cosmos-sdk/pull/7688) The gRPC simulate service method has been moved from `cosmos.base.v1beta1.simulate` to `cosmos.tx.v1beta1`, as a method in the Tx service.
|
||||
* [#7764](https://github.com/cosmos/cosmos-sdk/pull/7764) Added module initialization options:
|
||||
* `server/types.AppExporter` requires extra argument: `AppOptions`.
|
||||
* `server.AddCommands` requires extra argument: `addStartFlags types.ModuleInitFlags`
|
||||
* `x/crisis.NewAppModule` has a new attribute: `skipGenesisInvariants`. [PR](https://github.com/cosmos/cosmos-sdk/pull/7764)
|
||||
* [#7918](https://github.com/cosmos/cosmos-sdk/pull/7918) Add x/capability safety checks:
|
||||
* All outward facing APIs will now check that capability is not nil and name is not empty before performing any state-machine changes
|
||||
* `SetIndex` has been renamed to `InitializeIndex`
|
||||
|
||||
### Features
|
||||
|
||||
* (tx) [\#7688](https://github.com/cosmos/cosmos-sdk/pull/7688) Add a new Tx gRPC service with methods `Simulate` and `GetTx` (by hash).
|
||||
* __Modules__
|
||||
* `x/crisis` has a new function: `AddModuleInitFlags`, which will register optional crisis module flags for the start command.
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* (client) [\#7699](https://github.com/cosmos/cosmos-sdk/pull/7699) Fix panic in context when setting invalid nodeURI. `WithNodeURI` does not set the `Client` in the context.
|
||||
* (x/gov) [#7641](https://github.com/cosmos/cosmos-sdk/pull/7641) Fix tally calculation precision error.
|
||||
|
||||
### Improvements
|
||||
|
||||
* (rest) [#7649](https://github.com/cosmos/cosmos-sdk/pull/7649) Return an unsigned tx in legacy GET /tx endpoint when signature conversion fails
|
||||
* (cli) [#7764](https://github.com/cosmos/cosmos-sdk/pull/7764) Update x/banking and x/crisis InitChain to improve node startup time
|
||||
|
||||
|
||||
## [v0.40.0-rc1](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.0-rc1) - 2020-10-19
|
||||
|
||||
### Client Breaking Changes
|
||||
|
||||
* __Modules__
|
||||
* (x/staking) [\#7499](https://github.com/cosmos/cosmos-sdk/pull/7499) `BondStatus` is now a protobuf `enum` instead
|
||||
of an `int32`, and JSON serialized using its protobuf name, so expect names like `BOND_STATUS_UNBONDING` as opposed
|
||||
to `Unbonding`.
|
||||
* (x/staking) [\#7556](https://github.com/cosmos/cosmos-sdk/pull/7556) The ABCI's `Result.Data` field for
|
||||
`MsgBeginRedelegate` and `MsgUndelegate` responses does not contain custom binary marshaled `completionTime`, but the
|
||||
protobuf encoded `MsgBeginRedelegateResponse` and `MsgUndelegateResponse` structs respectively
|
||||
* (x/evidence) [\#7538](https://github.com/cosmos/cosmos-sdk/pull/7538) The ABCI's `Result.Data` field for
|
||||
`MsgSubmitEvidence` responses does not contain the raw evidence's hash, but the protobuf encoded
|
||||
`MsgSubmitEvidenceResponse` struct.
|
||||
* (x/gov) [\#7533](https://github.com/cosmos/cosmos-sdk/pull/7533) The ABCI's `Result.Data` field for
|
||||
`MsgSubmitProposal` responses does not contain a raw binary encoding of the `proposalID`, but the protobuf encoded
|
||||
`MsgSubmitSubmitProposalResponse` struct.
|
||||
|
||||
### API Breaking
|
||||
|
||||
* (AppModule) [\#7518](https://github.com/cosmos/cosmos-sdk/pull/7518) [\#7584](https://github.com/cosmos/cosmos-sdk/pull/7584) Rename `AppModule.RegisterQueryServices` to `AppModule.RegisterServices`, as this method now registers multiple services (the gRPC query service and the protobuf Msg service). A `Configurator` struct is used to hold the different services.
|
||||
|
||||
### Features
|
||||
|
||||
* (modules) [\#7540](https://github.com/cosmos/cosmos-sdk/issues/7540) Protobuf service definitions can now be used for
|
||||
packing `Msg`s in transactions as defined in [ADR 031](./docs/architecture/adr-031-msg-service.md). All modules now
|
||||
define a `Msg` protobuf service.
|
||||
* (codec) [\#7519](https://github.com/cosmos/cosmos-sdk/pull/7519) `InterfaceRegistry` now inherits `jsonpb.AnyResolver`, and has a `RegisterCustomTypeURL` method to support ADR 031 packing of `Any`s. `AnyResolver` is now a required parameter to `RejectUnknownFields`.
|
||||
* (baseapp) [\#7519](https://github.com/cosmos/cosmos-sdk/pull/7519) Add `ServiceMsgRouter` to BaseApp to handle routing of protobuf service `Msg`s. The two new types defined in ADR 031, `sdk.ServiceMsg` and `sdk.MsgRequest` are introduced with this router.
|
||||
* (cli) [\#7221](https://github.com/cosmos/cosmos-sdk/pull/7221) Add the option of emitting amino encoded json from the CLI
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* (kvstore) [\#7415](https://github.com/cosmos/cosmos-sdk/pull/7415) Allow new stores to be registered during on-chain upgrades.
|
||||
* (rest) [\#7730](https://github.com/cosmos/cosmos-sdk/pull/7730) Fix fetch txs by height on legacy REST endpoint
|
||||
|
||||
### Improvements
|
||||
|
||||
* (tendermint) [\#7527](https://github.com/cosmos/cosmos-sdk/pull/7527) Update sdk to tendermint 0.34-rc5
|
||||
* (iavl) [\#7549](https://github.com/cosmos/cosmos-sdk/pull/7549) Update sdk to IAVL 0.15.0-rc4
|
||||
|
||||
## [v0.40.0-rc0](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.0-rc0) - 2020-10-13
|
||||
|
||||
|
@ -649,6 +729,7 @@ generalized genesis accounts through the `GenesisAccount` interface.
|
|||
* (sdk) [\#4758](https://github.com/cosmos/cosmos-sdk/issues/4758) update `x/genaccounts` to match module spec
|
||||
* (simulation) [\#4824](https://github.com/cosmos/cosmos-sdk/issues/4824) `PrintAllInvariants` flag will print all failed invariants
|
||||
* (simulation) [\#4490](https://github.com/cosmos/cosmos-sdk/issues/4490) add `InitialBlockHeight` flag to resume a simulation from a given block
|
||||
|
||||
* Support exporting the simulation stats to a given JSON file
|
||||
* (simulation) [\#4847](https://github.com/cosmos/cosmos-sdk/issues/4847), [\#4838](https://github.com/cosmos/cosmos-sdk/pull/4838) and [\#4869](https://github.com/cosmos/cosmos-sdk/pull/4869) `SimApp` and simulation refactors:
|
||||
* Implement `SimulationManager` for executing modules' simulation functionalities in a modularized way
|
||||
|
@ -962,6 +1043,7 @@ that error is that the account doesn't exist.
|
|||
* (simulation) PrintAllInvariants flag will print all failed invariants
|
||||
* (simulation) Add `InitialBlockHeight` flag to resume a simulation from a given block
|
||||
* (simulation) [\#4670](https://github.com/cosmos/cosmos-sdk/issues/4670) Update simulation statistics to JSON format
|
||||
|
||||
- Support exporting the simulation stats to a given JSON file
|
||||
* [\#4775](https://github.com/cosmos/cosmos-sdk/issues/4775) Refactor CI config
|
||||
* Upgrade IAVL to v0.12.4
|
||||
|
@ -1567,8 +1649,9 @@ BREAKING CHANGES
|
|||
FEATURES
|
||||
|
||||
* Gaia REST API
|
||||
* [\#2358](https://github.com/cosmos/cosmos-sdk/issues/2358) Add distribution module REST interface
|
||||
|
||||
|
||||
* [\#2358](https://github.com/cosmos/cosmos-sdk/issues/2358) Add distribution module REST interface
|
||||
|
||||
* Gaia CLI (`gaiacli`)
|
||||
* [\#3429](https://github.com/cosmos/cosmos-sdk/issues/3429) Support querying
|
||||
for all delegator distribution rewards.
|
||||
|
|
|
@ -142,11 +142,15 @@ build, in which case we can fall back on `go mod tidy -v`.
|
|||
|
||||
We use [Protocol Buffers](https://developers.google.com/protocol-buffers) along with [gogoproto](https://github.com/gogo/protobuf) to generate code for use in Cosmos-SDK.
|
||||
|
||||
For determinstic behavior around Protobuf tooling, everything is containerized using Docker. Make sure to have Docker installed on your machine, or head to [Docker's website](https://docs.docker.com/get-docker/) to install it.
|
||||
|
||||
For formatting code in `.proto` files, you can run `make proto-format` command.
|
||||
|
||||
For linting and checking breaking changes, we use [buf](https://buf.build/). There are two options for linting and to check if your changes will cause a break. The first is that you can install [buf](https://buf.build/docs/installation) locally, the commands for running buf after installing are `make proto-lint` and the breaking changes check will be `make proto-check-breaking`. If you do not want to install buf and have docker installed already then you can use these commands `make proto-lint-docker` and `make proto-check-breaking-docker`.
|
||||
For linting and checking breaking changes, we use [buf](https://buf.build/). You can use the commands `make proto-lint` and `make proto-check-breaking` to respectively lint your proto files and check for breaking changes.
|
||||
|
||||
To generate the protobuf stubs you must have `protoc` and `protoc-gen-gocosmos` installed. To install these tools run `make proto-tools`. After this step you will be able to run `make proto-gen` to generate the protobuf stubs.
|
||||
To generate the protobuf stubs, you can run `make proto-gen`.
|
||||
|
||||
We also added the `make proto-all` command to run all the above commands sequentially.
|
||||
|
||||
In order for imports to properly compile in your IDE, you may need to manually set your protobuf path in your IDE's workspace settings/config.
|
||||
|
||||
|
@ -268,8 +272,8 @@ and PRs are merged into `master`, if a contributor wishes the PR to be released
|
|||
- **[Impact]** Explanation of how the bug affects users or developers.
|
||||
- **[Test Case]** section with detailed instructions on how to reproduce the bug.
|
||||
- **[Regression Potential]** section with a discussion how regressions are most likely to manifest, or might
|
||||
manifest even if it's unlikely, as a result of the change. **It is assumed that any SRU candidate PR is
|
||||
well-tested before it is merged in and has an overall low risk of regression**.
|
||||
manifest even if it's unlikely, as a result of the change. **It is assumed that any SRU candidate PR is
|
||||
well-tested before it is merged in and has an overall low risk of regression**.
|
||||
|
||||
It is the PR's author's responsibility to fix merge conflicts, update changelog entries, and
|
||||
ensure CI passes. If a PR originates from an external contributor, it may be a core team member's
|
||||
|
@ -280,7 +284,7 @@ Finally, when a point release is ready to be made:
|
|||
|
||||
1. Create `release/v0.38.N` branch
|
||||
2. Ensure changelog entries are verified
|
||||
2. Be sure changelog entries are added to `RELEASE_CHANGELOG.md`
|
||||
1. Be sure changelog entries are added to `RELEASE_CHANGELOG.md`
|
||||
3. Add release version date to the changelog
|
||||
4. Push release branch along with the annotated tag: **git tag -a**
|
||||
5. Create a PR into `master` containing ONLY `CHANGELOG.md` updates
|
||||
|
|
26
Makefile
26
Makefile
|
@ -10,7 +10,7 @@ BUILDDIR ?= $(CURDIR)/build
|
|||
SIMAPP = ./simapp
|
||||
MOCKS_DIR = $(CURDIR)/tests/mocks
|
||||
HTTPS_GIT := https://github.com/cosmos/cosmos-sdk.git
|
||||
DOCKER_BUF := docker run -v $(shell pwd):/workspace --workdir /workspace bufbuild/buf
|
||||
DOCKER_BUF := docker run -v $(CURDIR):/workspace --workdir /workspace bufbuild/buf
|
||||
|
||||
export GO111MODULE = on
|
||||
|
||||
|
@ -356,40 +356,32 @@ devdoc-update:
|
|||
### Protobuf ###
|
||||
###############################################################################
|
||||
|
||||
proto-all: proto-tools proto-gen proto-lint proto-check-breaking proto-swagger-gen proto-format
|
||||
proto-all: proto-format proto-lint proto-check-breaking proto-gen
|
||||
|
||||
proto-gen:
|
||||
@./scripts/protocgen.sh
|
||||
@echo "Generating Protobuf files"
|
||||
docker run -v $(CURDIR):/workspace --workdir /workspace tendermintdev/sdk-proto-gen sh ./scripts/protocgen.sh
|
||||
|
||||
proto-format:
|
||||
@echo "Formatting Protobuf files"
|
||||
docker run -v $(shell pwd):/workspace \
|
||||
docker run -v $(CURDIR):/workspace \
|
||||
--workdir /workspace tendermintdev/docker-build-proto \
|
||||
find ./ -not -path "./third_party/*" -name *.proto -exec clang-format -i {} \;
|
||||
.PHONY: proto-format
|
||||
|
||||
# This generates the SDK's custom wrapper for google.protobuf.Any. It should only be run manually when needed
|
||||
proto-gen-any:
|
||||
@./scripts/protocgen-any.sh
|
||||
docker run -v $(CURDIR):/workspace --workdir /workspace tendermintdev/sdk-proto-gen sh ./scripts/protocgen-any.sh
|
||||
|
||||
proto-swagger-gen:
|
||||
@./scripts/protoc-swagger-gen.sh
|
||||
|
||||
proto-lint:
|
||||
@buf check lint --error-format=json
|
||||
@$(DOCKER_BUF) check lint --error-format=json
|
||||
|
||||
proto-check-breaking:
|
||||
@buf check breaking --against-input '.git#branch=master'
|
||||
|
||||
proto-lint-docker:
|
||||
@$(DOCKER_BUF) check lint --error-format=json
|
||||
.PHONY: proto-lint
|
||||
|
||||
proto-check-breaking-docker:
|
||||
@$(DOCKER_BUF) check breaking --against-input $(HTTPS_GIT)#branch=master
|
||||
.PHONY: proto-check-breaking-ci
|
||||
|
||||
TM_URL = https://raw.githubusercontent.com/tendermint/tendermint/v0.34.0-rc5/proto/tendermint
|
||||
TM_URL = https://raw.githubusercontent.com/tendermint/tendermint/v0.34.0-rc6/proto/tendermint
|
||||
GOGO_PROTO_URL = https://raw.githubusercontent.com/regen-network/protobuf/cosmos
|
||||
COSMOS_PROTO_URL = https://raw.githubusercontent.com/regen-network/cosmos-proto/master
|
||||
CONFIO_URL = https://raw.githubusercontent.com/confio/ics23/v0.6.3
|
||||
|
@ -440,7 +432,7 @@ proto-update-deps:
|
|||
## Issue link: https://github.com/confio/ics23/issues/32
|
||||
@sed -i '4ioption go_package = "github.com/confio/ics23/go";' $(CONFIO_TYPES)/proofs.proto
|
||||
|
||||
.PHONY: proto-all proto-gen proto-lint proto-check-breaking proto-update-deps
|
||||
.PHONY: proto-all proto-gen proto-gen-any proto-swagger-gen proto-format proto-lint proto-check-breaking proto-update-deps
|
||||
|
||||
###############################################################################
|
||||
### Localnet ###
|
||||
|
|
|
@ -354,6 +354,10 @@ func (app *BaseApp) halt() {
|
|||
|
||||
// snapshot takes a snapshot of the current state and prunes any old snapshottypes.
|
||||
func (app *BaseApp) snapshot(height int64) {
|
||||
if app.snapshotManager == nil {
|
||||
app.logger.Info("snapshot manager not configured")
|
||||
return
|
||||
}
|
||||
app.logger.Info("Creating state snapshot", "height", height)
|
||||
snapshot, err := app.snapshotManager.Create(uint64(height))
|
||||
if err != nil {
|
||||
|
@ -447,6 +451,11 @@ func (app *BaseApp) LoadSnapshotChunk(req abci.RequestLoadSnapshotChunk) abci.Re
|
|||
|
||||
// OfferSnapshot implements the ABCI interface. It delegates to app.snapshotManager if set.
|
||||
func (app *BaseApp) OfferSnapshot(req abci.RequestOfferSnapshot) abci.ResponseOfferSnapshot {
|
||||
if app.snapshotManager == nil {
|
||||
app.logger.Error("snapshot manager not configured")
|
||||
return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_ABORT}
|
||||
}
|
||||
|
||||
if req.Snapshot == nil {
|
||||
app.logger.Error("Received nil snapshot")
|
||||
return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT}
|
||||
|
@ -481,6 +490,11 @@ func (app *BaseApp) OfferSnapshot(req abci.RequestOfferSnapshot) abci.ResponseOf
|
|||
|
||||
// ApplySnapshotChunk implements the ABCI interface. It delegates to app.snapshotManager if set.
|
||||
func (app *BaseApp) ApplySnapshotChunk(req abci.RequestApplySnapshotChunk) abci.ResponseApplySnapshotChunk {
|
||||
if app.snapshotManager == nil {
|
||||
app.logger.Error("snapshot manager not configured")
|
||||
return abci.ResponseApplySnapshotChunk{Result: abci.ResponseApplySnapshotChunk_ABORT}
|
||||
}
|
||||
|
||||
_, err := app.snapshotManager.RestoreChunk(req.Chunk)
|
||||
switch {
|
||||
case err == nil:
|
||||
|
@ -537,9 +551,24 @@ func gRPCErrorToSDKError(err error) error {
|
|||
}
|
||||
}
|
||||
|
||||
func checkNegativeHeight(height int64) error {
|
||||
if height < 0 {
|
||||
// Reject invalid heights.
|
||||
return sdkerrors.Wrap(
|
||||
sdkerrors.ErrInvalidRequest,
|
||||
"cannot query with height < 0; please provide a valid height",
|
||||
)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// createQueryContext creates a new sdk.Context for a query, taking as args
|
||||
// the block height and whether the query needs a proof or not.
|
||||
func (app *BaseApp) createQueryContext(height int64, prove bool) (sdk.Context, error) {
|
||||
if err := checkNegativeHeight(height); err != nil {
|
||||
return sdk.Context{}, err
|
||||
}
|
||||
|
||||
// when a client did not provide a query height, manually inject the latest
|
||||
if height == 0 {
|
||||
height = app.LastBlockHeight()
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package baseapp
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
@ -116,3 +117,25 @@ func TestGetBlockRentionHeight(t *testing.T) {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test and ensure that negative heights always cause errors.
|
||||
// See issue https://github.com/cosmos/cosmos-sdk/issues/7662.
|
||||
func TestBaseAppCreateQueryContextRejectsNegativeHeights(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
logger := defaultLogger()
|
||||
db := dbm.NewMemDB()
|
||||
name := t.Name()
|
||||
app := NewBaseApp(name, logger, db, nil)
|
||||
|
||||
proves := []bool{
|
||||
false, true,
|
||||
}
|
||||
for _, prove := range proves {
|
||||
t.Run(fmt.Sprintf("prove=%t", prove), func(t *testing.T) {
|
||||
sctx, err := app.createQueryContext(-10, true)
|
||||
require.Error(t, err)
|
||||
require.Equal(t, sctx, sdk.Context{})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,6 @@ import (
|
|||
"google.golang.org/grpc/encoding/proto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/client/grpc/reflection"
|
||||
"github.com/cosmos/cosmos-sdk/client/grpc/simulate"
|
||||
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
@ -54,13 +53,31 @@ func (qrt *GRPCQueryRouter) Route(path string) GRPCQueryHandler {
|
|||
}
|
||||
|
||||
// RegisterService implements the gRPC Server.RegisterService method. sd is a gRPC
|
||||
// service description, handler is an object which implements that gRPC service
|
||||
// service description, handler is an object which implements that gRPC service/
|
||||
//
|
||||
// This functions PANICS:
|
||||
// - if a protobuf service is registered twice.
|
||||
func (qrt *GRPCQueryRouter) RegisterService(sd *grpc.ServiceDesc, handler interface{}) {
|
||||
// adds a top-level query handler based on the gRPC service name
|
||||
for _, method := range sd.Methods {
|
||||
fqName := fmt.Sprintf("/%s/%s", sd.ServiceName, method.MethodName)
|
||||
methodHandler := method.Handler
|
||||
|
||||
// Check that each service is only registered once. If a service is
|
||||
// registered more than once, then we should error. Since we can't
|
||||
// return an error (`Server.RegisterService` interface restriction) we
|
||||
// panic (at startup).
|
||||
_, found := qrt.routes[fqName]
|
||||
if found {
|
||||
panic(
|
||||
fmt.Errorf(
|
||||
"gRPC query service %s has already been registered. Please make sure to only register each service once. "+
|
||||
"This usually means that there are conflicting modules registering the same gRPC query service",
|
||||
fqName,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
qrt.routes[fqName] = func(ctx sdk.Context, req abci.RequestQuery) (abci.ResponseQuery, error) {
|
||||
// call the method handler from the service description with the handler object,
|
||||
// a wrapped sdk.Context with proto-unmarshaled data from the ABCI request data
|
||||
|
@ -110,14 +127,3 @@ func (qrt *GRPCQueryRouter) SetInterfaceRegistry(interfaceRegistry codectypes.In
|
|||
reflection.NewReflectionServiceServer(interfaceRegistry),
|
||||
)
|
||||
}
|
||||
|
||||
// RegisterSimulateService registers the simulate service on the gRPC router.
|
||||
func (qrt *GRPCQueryRouter) RegisterSimulateService(
|
||||
simulateFn simulate.BaseAppSimulateFn,
|
||||
interfaceRegistry codectypes.InterfaceRegistry,
|
||||
) {
|
||||
simulate.RegisterSimulateServiceServer(
|
||||
qrt,
|
||||
simulate.NewSimulateServer(simulateFn, interfaceRegistry),
|
||||
)
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ import (
|
|||
// service client.
|
||||
type QueryServiceTestHelper struct {
|
||||
*GRPCQueryRouter
|
||||
ctx sdk.Context
|
||||
Ctx sdk.Context
|
||||
}
|
||||
|
||||
var (
|
||||
|
@ -31,7 +31,7 @@ var (
|
|||
func NewQueryServerTestHelper(ctx sdk.Context, interfaceRegistry types.InterfaceRegistry) *QueryServiceTestHelper {
|
||||
qrt := NewGRPCQueryRouter()
|
||||
qrt.SetInterfaceRegistry(interfaceRegistry)
|
||||
return &QueryServiceTestHelper{GRPCQueryRouter: qrt, ctx: ctx}
|
||||
return &QueryServiceTestHelper{GRPCQueryRouter: qrt, Ctx: ctx}
|
||||
}
|
||||
|
||||
// Invoke implements the grpc ClientConn.Invoke method
|
||||
|
@ -45,7 +45,7 @@ func (q *QueryServiceTestHelper) Invoke(_ gocontext.Context, method string, args
|
|||
return err
|
||||
}
|
||||
|
||||
res, err := querier(q.ctx, abci.RequestQuery{Data: reqBz})
|
||||
res, err := querier(q.Ctx, abci.RequestQuery{Data: reqBz})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -1,24 +1,29 @@
|
|||
package baseapp
|
||||
package baseapp_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/tendermint/tendermint/libs/log"
|
||||
dbm "github.com/tendermint/tm-db"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/baseapp"
|
||||
"github.com/cosmos/cosmos-sdk/codec/types"
|
||||
"github.com/cosmos/cosmos-sdk/simapp"
|
||||
"github.com/cosmos/cosmos-sdk/testutil/testdata"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
||||
func TestGRPCRouter(t *testing.T) {
|
||||
qr := NewGRPCQueryRouter()
|
||||
qr := baseapp.NewGRPCQueryRouter()
|
||||
interfaceRegistry := testdata.NewTestInterfaceRegistry()
|
||||
qr.SetInterfaceRegistry(interfaceRegistry)
|
||||
testdata.RegisterQueryServer(qr, testdata.QueryImpl{})
|
||||
helper := &QueryServiceTestHelper{
|
||||
helper := &baseapp.QueryServiceTestHelper{
|
||||
GRPCQueryRouter: qr,
|
||||
ctx: sdk.Context{}.WithContext(context.Background()),
|
||||
Ctx: sdk.Context{}.WithContext(context.Background()),
|
||||
}
|
||||
client := testdata.NewQueryClient(helper)
|
||||
|
||||
|
@ -44,3 +49,28 @@ func TestGRPCRouter(t *testing.T) {
|
|||
require.NotNil(t, res3)
|
||||
require.Equal(t, spot, res3.HasAnimal.Animal.GetCachedValue())
|
||||
}
|
||||
|
||||
func TestRegisterQueryServiceTwice(t *testing.T) {
|
||||
// Setup baseapp.
|
||||
db := dbm.NewMemDB()
|
||||
encCfg := simapp.MakeTestEncodingConfig()
|
||||
app := baseapp.NewBaseApp("test", log.NewTMLogger(log.NewSyncWriter(os.Stdout)), db, encCfg.TxConfig.TxDecoder())
|
||||
app.SetInterfaceRegistry(encCfg.InterfaceRegistry)
|
||||
testdata.RegisterInterfaces(encCfg.InterfaceRegistry)
|
||||
|
||||
// First time registering service shouldn't panic.
|
||||
require.NotPanics(t, func() {
|
||||
testdata.RegisterQueryServer(
|
||||
app.GRPCQueryRouter(),
|
||||
testdata.QueryImpl{},
|
||||
)
|
||||
})
|
||||
|
||||
// Second time should panic.
|
||||
require.Panics(t, func() {
|
||||
testdata.RegisterQueryServer(
|
||||
app.GRPCQueryRouter(),
|
||||
testdata.QueryImpl{},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
"google.golang.org/grpc/status"
|
||||
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
|
||||
grpctypes "github.com/cosmos/cosmos-sdk/types/grpc"
|
||||
)
|
||||
|
||||
|
@ -33,6 +34,11 @@ func (app *BaseApp) RegisterGRPCServer(server gogogrpc.Server) {
|
|||
if heightHeaders := md.Get(grpctypes.GRPCBlockHeightHeader); len(heightHeaders) > 0 {
|
||||
height, err = strconv.ParseInt(heightHeaders[0], 10, 64)
|
||||
if err != nil {
|
||||
return nil, sdkerrors.Wrapf(
|
||||
sdkerrors.ErrInvalidRequest,
|
||||
"Baseapp.RegisterGRPCServer: invalid height header %q: %v", grpctypes.GRPCBlockHeightHeader, err)
|
||||
}
|
||||
if err := checkNegativeHeight(height); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,27 +39,49 @@ func (msr *MsgServiceRouter) Handler(methodName string) MsgServiceHandler {
|
|||
|
||||
// RegisterService implements the gRPC Server.RegisterService method. sd is a gRPC
|
||||
// service description, handler is an object which implements that gRPC service.
|
||||
//
|
||||
// This function PANICs:
|
||||
// - if it is called before the service `Msg`s have been registered using
|
||||
// RegisterInterfaces,
|
||||
// - or if a service is being registered twice.
|
||||
func (msr *MsgServiceRouter) RegisterService(sd *grpc.ServiceDesc, handler interface{}) {
|
||||
// Adds a top-level query handler based on the gRPC service name.
|
||||
for _, method := range sd.Methods {
|
||||
fqMethod := fmt.Sprintf("/%s/%s", sd.ServiceName, method.MethodName)
|
||||
methodHandler := method.Handler
|
||||
|
||||
// NOTE: This is how we pull the concrete request type for each handler for registering in the InterfaceRegistry.
|
||||
// This approach is maybe a bit hacky, but less hacky than reflecting on the handler object itself.
|
||||
// We use a no-op interceptor to avoid actually calling into the handler itself.
|
||||
_, _ = methodHandler(nil, context.Background(), func(i interface{}) error {
|
||||
msg, ok := i.(proto.Message)
|
||||
if !ok {
|
||||
// We panic here because there is no other alternative and the app cannot be initialized correctly
|
||||
// this should only happen if there is a problem with code generation in which case the app won't
|
||||
// work correctly anyway.
|
||||
panic(fmt.Errorf("can't register request type %T for service method %s", i, fqMethod))
|
||||
}
|
||||
// Check that the service Msg fully-qualified method name has already
|
||||
// been registered (via RegisterInterfaces). If the user registers a
|
||||
// service without registering according service Msg type, there might be
|
||||
// some unexpected behavior down the road. Since we can't return an error
|
||||
// (`Server.RegisterService` interface restriction) we panic (at startup).
|
||||
serviceMsg, err := msr.interfaceRegistry.Resolve(fqMethod)
|
||||
if err != nil || serviceMsg == nil {
|
||||
panic(
|
||||
fmt.Errorf(
|
||||
"type_url %s has not been registered yet. "+
|
||||
"Before calling RegisterService, you must register all interfaces by calling the `RegisterInterfaces` "+
|
||||
"method on module.BasicManager. Each module should call `msgservice.RegisterMsgServiceDesc` inside its "+
|
||||
"`RegisterInterfaces` method with the `_Msg_serviceDesc` generated by proto-gen",
|
||||
fqMethod,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
msr.interfaceRegistry.RegisterCustomTypeURL((*sdk.MsgRequest)(nil), fqMethod, msg)
|
||||
return nil
|
||||
}, noopInterceptor)
|
||||
// Check that each service is only registered once. If a service is
|
||||
// registered more than once, then we should error. Since we can't
|
||||
// return an error (`Server.RegisterService` interface restriction) we
|
||||
// panic (at startup).
|
||||
_, found := msr.routes[fqMethod]
|
||||
if found {
|
||||
panic(
|
||||
fmt.Errorf(
|
||||
"msg service %s has already been registered. Please make sure to only register each service once. "+
|
||||
"This usually means that there are conflicting modules registering the same msg service",
|
||||
fqMethod,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
msr.routes[fqMethod] = func(ctx sdk.Context, req sdk.MsgRequest) (*sdk.Result, error) {
|
||||
ctx = ctx.WithEventManager(sdk.NewEventManager())
|
||||
|
@ -89,9 +111,4 @@ func (msr *MsgServiceRouter) SetInterfaceRegistry(interfaceRegistry codectypes.I
|
|||
msr.interfaceRegistry = interfaceRegistry
|
||||
}
|
||||
|
||||
// gRPC NOOP interceptor
|
||||
func noopInterceptor(_ context.Context, _ interface{}, _ *grpc.UnaryServerInfo, _ grpc.UnaryHandler) (interface{}, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func noopDecoder(_ interface{}) error { return nil }
|
||||
|
|
|
@ -4,15 +4,13 @@ import (
|
|||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/baseapp"
|
||||
|
||||
tmproto "github.com/tendermint/tendermint/proto/tendermint/types"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
abci "github.com/tendermint/tendermint/abci/types"
|
||||
"github.com/tendermint/tendermint/libs/log"
|
||||
tmproto "github.com/tendermint/tendermint/proto/tendermint/types"
|
||||
dbm "github.com/tendermint/tm-db"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/baseapp"
|
||||
"github.com/cosmos/cosmos-sdk/client/tx"
|
||||
"github.com/cosmos/cosmos-sdk/simapp"
|
||||
"github.com/cosmos/cosmos-sdk/testutil/testdata"
|
||||
|
@ -20,9 +18,59 @@ import (
|
|||
authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing"
|
||||
)
|
||||
|
||||
func TestRegisterMsgService(t *testing.T) {
|
||||
db := dbm.NewMemDB()
|
||||
|
||||
// Create an encoding config that doesn't register testdata Msg services.
|
||||
encCfg := simapp.MakeTestEncodingConfig()
|
||||
app := baseapp.NewBaseApp("test", log.NewTMLogger(log.NewSyncWriter(os.Stdout)), db, encCfg.TxConfig.TxDecoder())
|
||||
app.SetInterfaceRegistry(encCfg.InterfaceRegistry)
|
||||
require.Panics(t, func() {
|
||||
testdata.RegisterMsgServer(
|
||||
app.MsgServiceRouter(),
|
||||
testdata.MsgServerImpl{},
|
||||
)
|
||||
})
|
||||
|
||||
// Register testdata Msg services, and rerun `RegisterService`.
|
||||
testdata.RegisterInterfaces(encCfg.InterfaceRegistry)
|
||||
require.NotPanics(t, func() {
|
||||
testdata.RegisterMsgServer(
|
||||
app.MsgServiceRouter(),
|
||||
testdata.MsgServerImpl{},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRegisterMsgServiceTwice(t *testing.T) {
|
||||
// Setup baseapp.
|
||||
db := dbm.NewMemDB()
|
||||
encCfg := simapp.MakeTestEncodingConfig()
|
||||
app := baseapp.NewBaseApp("test", log.NewTMLogger(log.NewSyncWriter(os.Stdout)), db, encCfg.TxConfig.TxDecoder())
|
||||
app.SetInterfaceRegistry(encCfg.InterfaceRegistry)
|
||||
testdata.RegisterInterfaces(encCfg.InterfaceRegistry)
|
||||
|
||||
// First time registering service shouldn't panic.
|
||||
require.NotPanics(t, func() {
|
||||
testdata.RegisterMsgServer(
|
||||
app.MsgServiceRouter(),
|
||||
testdata.MsgServerImpl{},
|
||||
)
|
||||
})
|
||||
|
||||
// Second time should panic.
|
||||
require.Panics(t, func() {
|
||||
testdata.RegisterMsgServer(
|
||||
app.MsgServiceRouter(),
|
||||
testdata.MsgServerImpl{},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
func TestMsgService(t *testing.T) {
|
||||
priv, _, _ := testdata.KeyTestPubAddr()
|
||||
encCfg := simapp.MakeTestEncodingConfig()
|
||||
testdata.RegisterInterfaces(encCfg.InterfaceRegistry)
|
||||
db := dbm.NewMemDB()
|
||||
app := baseapp.NewBaseApp("test", log.NewTMLogger(log.NewSyncWriter(os.Stdout)), db, encCfg.TxConfig.TxDecoder())
|
||||
app.SetInterfaceRegistry(encCfg.InterfaceRegistry)
|
||||
|
|
|
@ -204,6 +204,10 @@ func (app *BaseApp) SetSnapshotStore(snapshotStore *snapshots.Store) {
|
|||
if app.sealed {
|
||||
panic("SetSnapshotStore() on sealed BaseApp")
|
||||
}
|
||||
if snapshotStore == nil {
|
||||
app.snapshotManager = nil
|
||||
return
|
||||
}
|
||||
app.snapshotManager = snapshots.NewManager(snapshotStore, app.cms)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
package client
|
||||
|
||||
import (
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
||||
// Account defines a read-only version of the auth module's AccountI.
|
||||
type Account interface {
|
||||
GetAddress() sdk.AccAddress
|
||||
GetPubKey() crypto.PubKey // can return nil.
|
||||
GetPubKey() cryptotypes.PubKey // can return nil.
|
||||
GetAccountNumber() uint64
|
||||
GetSequence() uint64
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
"github.com/tendermint/tendermint/libs/cli"
|
||||
rpchttp "github.com/tendermint/tendermint/rpc/client/http"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/client/flags"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
|
@ -131,6 +132,13 @@ func ReadPersistentCommandFlags(clientCtx Context, flagSet *pflag.FlagSet) (Cont
|
|||
rpcURI, _ := flagSet.GetString(flags.FlagNode)
|
||||
if rpcURI != "" {
|
||||
clientCtx = clientCtx.WithNodeURI(rpcURI)
|
||||
|
||||
client, err := rpchttp.New(rpcURI, "/websocket")
|
||||
if err != nil {
|
||||
return clientCtx, err
|
||||
}
|
||||
|
||||
clientCtx = clientCtx.WithClient(client)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,6 @@ import (
|
|||
"github.com/gogo/protobuf/proto"
|
||||
"github.com/pkg/errors"
|
||||
rpcclient "github.com/tendermint/tendermint/rpc/client"
|
||||
rpchttp "github.com/tendermint/tendermint/rpc/client/http"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
|
||||
|
@ -95,12 +94,6 @@ func (ctx Context) WithOutputFormat(format string) Context {
|
|||
// WithNodeURI returns a copy of the context with an updated node URI.
|
||||
func (ctx Context) WithNodeURI(nodeURI string) Context {
|
||||
ctx.NodeURI = nodeURI
|
||||
client, err := rpchttp.New(nodeURI, "/websocket")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
ctx.Client = client
|
||||
return ctx
|
||||
}
|
||||
|
||||
|
|
|
@ -8,10 +8,10 @@ import (
|
|||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/client"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
"github.com/cosmos/cosmos-sdk/types/errors"
|
||||
"github.com/cosmos/cosmos-sdk/version"
|
||||
|
@ -34,7 +34,7 @@ func Cmd() *cobra.Command {
|
|||
// getPubKeyFromString returns a Tendermint PubKey (PubKeyEd25519) by attempting
|
||||
// to decode the pubkey string from hex, base64, and finally bech32. If all
|
||||
// encodings fail, an error is returned.
|
||||
func getPubKeyFromString(pkstr string) (crypto.PubKey, error) {
|
||||
func getPubKeyFromString(pkstr string) (cryptotypes.PubKey, error) {
|
||||
bz, err := hex.DecodeString(pkstr)
|
||||
if err == nil {
|
||||
if len(bz) == ed25519.PubKeySize {
|
||||
|
|
|
@ -94,7 +94,7 @@
|
|||
}
|
||||
},
|
||||
{
|
||||
"url": "./tmp-swagger-gen/ibc/channel/query.swagger.json",
|
||||
"url": "./tmp-swagger-gen/ibc/core/channel/v1/query.swagger.json",
|
||||
"operationIds": {
|
||||
"rename": {
|
||||
"Params": "IBCChannelParams"
|
||||
|
@ -102,7 +102,7 @@
|
|||
}
|
||||
},
|
||||
{
|
||||
"url": "./tmp-swagger-gen/ibc/client/query.swagger.json",
|
||||
"url": "./tmp-swagger-gen/ibc/core/client/v1/query.swagger.json",
|
||||
"operationIds": {
|
||||
"rename": {
|
||||
"Params": "IBCClientParams"
|
||||
|
@ -110,7 +110,7 @@
|
|||
}
|
||||
},
|
||||
{
|
||||
"url": "./tmp-swagger-gen/ibc/connection/query.swagger.json",
|
||||
"url": "./tmp-swagger-gen/ibc/core/connection/v1/query.swagger.json",
|
||||
"operationIds": {
|
||||
"rename": {
|
||||
"Params": "IBCConnectionParams"
|
||||
|
@ -118,7 +118,7 @@
|
|||
}
|
||||
},
|
||||
{
|
||||
"url": "./tmp-swagger-gen/ibc/transfer/query.swagger.json",
|
||||
"url": "./tmp-swagger-gen/ibc/applications/transfer/v1/query.swagger.json",
|
||||
"operationIds": {
|
||||
"rename": {
|
||||
"Params": "IBCTransferParams"
|
||||
|
|
|
@ -41773,4 +41773,4 @@
|
|||
}, o.resolve = i, e.exports = o, o.id = 1058
|
||||
}])
|
||||
});
|
||||
//# sourceMappingURL=swagger-ui-bundle.js.map
|
||||
//# sourceMappingURL=swagger-ui-bundle.js.map
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -2595,4 +2595,4 @@ definitions:
|
|||
total:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/definitions/Coin"
|
||||
$ref: "#/definitions/Coin"
|
||||
|
|
|
@ -21,14 +21,9 @@ type IntegrationTestSuite struct {
|
|||
func (s *IntegrationTestSuite) SetupSuite() {
|
||||
app := simapp.Setup(false)
|
||||
|
||||
srv := reflection.NewReflectionServiceServer(app.InterfaceRegistry())
|
||||
|
||||
sdkCtx := app.BaseApp.NewContext(false, tmproto.Header{})
|
||||
queryHelper := baseapp.NewQueryServerTestHelper(sdkCtx, app.InterfaceRegistry())
|
||||
|
||||
reflection.RegisterReflectionServiceServer(queryHelper, srv)
|
||||
queryClient := reflection.NewReflectionServiceClient(queryHelper)
|
||||
|
||||
s.queryClient = queryClient
|
||||
}
|
||||
|
||||
|
|
|
@ -1,55 +0,0 @@
|
|||
package simulate
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"google.golang.org/grpc/codes"
|
||||
"google.golang.org/grpc/status"
|
||||
|
||||
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
||||
// BaseAppSimulateFn is the signature of the Baseapp#Simulate function.
|
||||
type BaseAppSimulateFn func(txBytes []byte) (sdk.GasInfo, *sdk.Result, error)
|
||||
|
||||
type simulateServer struct {
|
||||
simulate BaseAppSimulateFn
|
||||
interfaceRegistry codectypes.InterfaceRegistry
|
||||
}
|
||||
|
||||
// NewSimulateServer creates a new SimulateServer.
|
||||
func NewSimulateServer(simulate BaseAppSimulateFn, interfaceRegistry codectypes.InterfaceRegistry) SimulateServiceServer {
|
||||
return simulateServer{
|
||||
simulate: simulate,
|
||||
interfaceRegistry: interfaceRegistry,
|
||||
}
|
||||
}
|
||||
|
||||
var _ SimulateServiceServer = simulateServer{}
|
||||
|
||||
// Simulate implements the SimulateService.Simulate RPC method.
|
||||
func (s simulateServer) Simulate(ctx context.Context, req *SimulateRequest) (*SimulateResponse, error) {
|
||||
if req.Tx == nil {
|
||||
return nil, status.Error(codes.InvalidArgument, "invalid empty tx")
|
||||
}
|
||||
|
||||
err := req.Tx.UnpackInterfaces(s.interfaceRegistry)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
txBytes, err := req.Tx.Marshal()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gasInfo, result, err := s.simulate(txBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &SimulateResponse{
|
||||
GasInfo: &gasInfo,
|
||||
Result: result,
|
||||
}, nil
|
||||
}
|
|
@ -1,679 +0,0 @@
|
|||
// Code generated by protoc-gen-gogo. DO NOT EDIT.
|
||||
// source: cosmos/base/simulate/v1beta1/simulate.proto
|
||||
|
||||
package simulate
|
||||
|
||||
import (
|
||||
context "context"
|
||||
fmt "fmt"
|
||||
types "github.com/cosmos/cosmos-sdk/types"
|
||||
tx "github.com/cosmos/cosmos-sdk/types/tx"
|
||||
grpc1 "github.com/gogo/protobuf/grpc"
|
||||
proto "github.com/gogo/protobuf/proto"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
io "io"
|
||||
math "math"
|
||||
math_bits "math/bits"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
|
||||
|
||||
// SimulateRequest is the request type for the SimulateServiceService.Simulate
|
||||
// RPC method.
|
||||
type SimulateRequest struct {
|
||||
// tx is the transaction to simulate.
|
||||
Tx *tx.Tx `protobuf:"bytes,1,opt,name=tx,proto3" json:"tx,omitempty"`
|
||||
}
|
||||
|
||||
func (m *SimulateRequest) Reset() { *m = SimulateRequest{} }
|
||||
func (m *SimulateRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*SimulateRequest) ProtoMessage() {}
|
||||
func (*SimulateRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_503c836d80bb2d47, []int{0}
|
||||
}
|
||||
func (m *SimulateRequest) XXX_Unmarshal(b []byte) error {
|
||||
return m.Unmarshal(b)
|
||||
}
|
||||
func (m *SimulateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
if deterministic {
|
||||
return xxx_messageInfo_SimulateRequest.Marshal(b, m, deterministic)
|
||||
} else {
|
||||
b = b[:cap(b)]
|
||||
n, err := m.MarshalToSizedBuffer(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return b[:n], nil
|
||||
}
|
||||
}
|
||||
func (m *SimulateRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_SimulateRequest.Merge(m, src)
|
||||
}
|
||||
func (m *SimulateRequest) XXX_Size() int {
|
||||
return m.Size()
|
||||
}
|
||||
func (m *SimulateRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_SimulateRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_SimulateRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *SimulateRequest) GetTx() *tx.Tx {
|
||||
if m != nil {
|
||||
return m.Tx
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// SimulateResponse is the response type for the
|
||||
// SimulateServiceService.SimulateRPC method.
|
||||
type SimulateResponse struct {
|
||||
// gas_info is the information about gas used in the simulation.
|
||||
GasInfo *types.GasInfo `protobuf:"bytes,1,opt,name=gas_info,json=gasInfo,proto3" json:"gas_info,omitempty"`
|
||||
// result is the result of the simulation.
|
||||
Result *types.Result `protobuf:"bytes,2,opt,name=result,proto3" json:"result,omitempty"`
|
||||
}
|
||||
|
||||
func (m *SimulateResponse) Reset() { *m = SimulateResponse{} }
|
||||
func (m *SimulateResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*SimulateResponse) ProtoMessage() {}
|
||||
func (*SimulateResponse) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_503c836d80bb2d47, []int{1}
|
||||
}
|
||||
func (m *SimulateResponse) XXX_Unmarshal(b []byte) error {
|
||||
return m.Unmarshal(b)
|
||||
}
|
||||
func (m *SimulateResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
if deterministic {
|
||||
return xxx_messageInfo_SimulateResponse.Marshal(b, m, deterministic)
|
||||
} else {
|
||||
b = b[:cap(b)]
|
||||
n, err := m.MarshalToSizedBuffer(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return b[:n], nil
|
||||
}
|
||||
}
|
||||
func (m *SimulateResponse) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_SimulateResponse.Merge(m, src)
|
||||
}
|
||||
func (m *SimulateResponse) XXX_Size() int {
|
||||
return m.Size()
|
||||
}
|
||||
func (m *SimulateResponse) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_SimulateResponse.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_SimulateResponse proto.InternalMessageInfo
|
||||
|
||||
func (m *SimulateResponse) GetGasInfo() *types.GasInfo {
|
||||
if m != nil {
|
||||
return m.GasInfo
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *SimulateResponse) GetResult() *types.Result {
|
||||
if m != nil {
|
||||
return m.Result
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*SimulateRequest)(nil), "cosmos.base.simulate.v1beta1.SimulateRequest")
|
||||
proto.RegisterType((*SimulateResponse)(nil), "cosmos.base.simulate.v1beta1.SimulateResponse")
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterFile("cosmos/base/simulate/v1beta1/simulate.proto", fileDescriptor_503c836d80bb2d47)
|
||||
}
|
||||
|
||||
var fileDescriptor_503c836d80bb2d47 = []byte{
|
||||
// 351 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xbf, 0x4b, 0xf3, 0x40,
|
||||
0x18, 0xc7, 0x7b, 0x19, 0xfa, 0x96, 0x7b, 0x87, 0xf7, 0x25, 0x20, 0x94, 0x50, 0x42, 0x8d, 0x28,
|
||||
0x05, 0xe9, 0x1d, 0xad, 0x4b, 0x07, 0x27, 0x17, 0x11, 0xb7, 0xd4, 0xc9, 0x45, 0x2e, 0xf1, 0x1a,
|
||||
0x0f, 0xd3, 0xbb, 0x98, 0x7b, 0x52, 0x32, 0x3b, 0x3a, 0x09, 0x4e, 0xfe, 0x11, 0xfe, 0x1f, 0x8e,
|
||||
0x05, 0x17, 0x47, 0x69, 0xfd, 0x43, 0xa4, 0xc9, 0x25, 0x2d, 0x82, 0xd2, 0x29, 0xb9, 0xe7, 0xf9,
|
||||
0x7c, 0xbf, 0xcf, 0x2f, 0x7c, 0x18, 0x2a, 0x3d, 0x55, 0x9a, 0x06, 0x4c, 0x73, 0xaa, 0xc5, 0x34,
|
||||
0x8b, 0x19, 0x70, 0x3a, 0x1b, 0x04, 0x1c, 0xd8, 0xa0, 0x0e, 0x90, 0x24, 0x55, 0xa0, 0xec, 0x4e,
|
||||
0x09, 0x93, 0x15, 0x4c, 0xea, 0x9c, 0x81, 0x9d, 0x4e, 0xa4, 0x54, 0x14, 0x73, 0xca, 0x12, 0x41,
|
||||
0x99, 0x94, 0x0a, 0x18, 0x08, 0x25, 0x75, 0xa9, 0x75, 0xf6, 0x36, 0x0b, 0xb1, 0x20, 0x14, 0x75,
|
||||
0x91, 0xd5, 0xc3, 0x40, 0x8e, 0x81, 0x20, 0xaf, 0xb3, 0x90, 0x97, 0x39, 0x6f, 0x84, 0xff, 0x8d,
|
||||
0x4d, 0x49, 0x9f, 0xdf, 0x65, 0x5c, 0x83, 0xbd, 0x8f, 0x2d, 0xc8, 0xdb, 0xa8, 0x8b, 0x7a, 0x7f,
|
||||
0x87, 0x3b, 0xc4, 0x34, 0x07, 0x79, 0xd5, 0x11, 0xb9, 0xc8, 0x7d, 0x0b, 0x72, 0xef, 0x01, 0xe1,
|
||||
0xff, 0x6b, 0xa9, 0x4e, 0x94, 0xd4, 0xdc, 0x3e, 0xc6, 0xad, 0x88, 0xe9, 0x2b, 0x21, 0x27, 0xca,
|
||||
0x38, 0xec, 0x92, 0xcd, 0xf1, 0x8a, 0xae, 0x2a, 0xa3, 0x53, 0xa6, 0xcf, 0xe4, 0x44, 0xf9, 0x7f,
|
||||
0xa2, 0xf2, 0xc7, 0x1e, 0xe1, 0x66, 0xca, 0x75, 0x16, 0x43, 0xdb, 0x2a, 0xb4, 0xdd, 0x9f, 0xb5,
|
||||
0x7e, 0xc1, 0xf9, 0x86, 0x1f, 0xbe, 0xa0, 0xf5, 0x1c, 0x63, 0x9e, 0xce, 0x44, 0xc8, 0xed, 0x67,
|
||||
0x84, 0x5b, 0x55, 0xcc, 0xee, 0x93, 0xdf, 0xb6, 0x4c, 0xbe, 0xed, 0xc0, 0x21, 0xdb, 0xe2, 0xe5,
|
||||
0xdc, 0x1e, 0xb9, 0x7f, 0xfb, 0x7c, 0xb2, 0x7a, 0xde, 0x01, 0xdd, 0xea, 0xf2, 0x27, 0xe7, 0xaf,
|
||||
0x0b, 0x17, 0xcd, 0x17, 0x2e, 0xfa, 0x58, 0xb8, 0xe8, 0x71, 0xe9, 0x36, 0xe6, 0x4b, 0xb7, 0xf1,
|
||||
0xbe, 0x74, 0x1b, 0x97, 0x83, 0x48, 0xc0, 0x4d, 0x16, 0x90, 0x50, 0x4d, 0x2b, 0xaf, 0xf2, 0xd3,
|
||||
0xd7, 0xd7, 0xb7, 0x34, 0x8c, 0x05, 0x97, 0x40, 0xa3, 0x34, 0x09, 0x6b, 0xb3, 0xa0, 0x59, 0x9c,
|
||||
0xf2, 0xe8, 0x2b, 0x00, 0x00, 0xff, 0xff, 0x7d, 0x16, 0xed, 0x90, 0x76, 0x02, 0x00, 0x00,
|
||||
}
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ context.Context
|
||||
var _ grpc.ClientConn
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the grpc package it is being compiled against.
|
||||
const _ = grpc.SupportPackageIsVersion4
|
||||
|
||||
// SimulateServiceClient is the client API for SimulateService service.
|
||||
//
|
||||
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
|
||||
type SimulateServiceClient interface {
|
||||
// Simulate simulates executing a transaction for estimating gas usage.
|
||||
Simulate(ctx context.Context, in *SimulateRequest, opts ...grpc.CallOption) (*SimulateResponse, error)
|
||||
}
|
||||
|
||||
type simulateServiceClient struct {
|
||||
cc grpc1.ClientConn
|
||||
}
|
||||
|
||||
func NewSimulateServiceClient(cc grpc1.ClientConn) SimulateServiceClient {
|
||||
return &simulateServiceClient{cc}
|
||||
}
|
||||
|
||||
func (c *simulateServiceClient) Simulate(ctx context.Context, in *SimulateRequest, opts ...grpc.CallOption) (*SimulateResponse, error) {
|
||||
out := new(SimulateResponse)
|
||||
err := c.cc.Invoke(ctx, "/cosmos.base.simulate.v1beta1.SimulateService/Simulate", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// SimulateServiceServer is the server API for SimulateService service.
|
||||
type SimulateServiceServer interface {
|
||||
// Simulate simulates executing a transaction for estimating gas usage.
|
||||
Simulate(context.Context, *SimulateRequest) (*SimulateResponse, error)
|
||||
}
|
||||
|
||||
// UnimplementedSimulateServiceServer can be embedded to have forward compatible implementations.
|
||||
type UnimplementedSimulateServiceServer struct {
|
||||
}
|
||||
|
||||
func (*UnimplementedSimulateServiceServer) Simulate(ctx context.Context, req *SimulateRequest) (*SimulateResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method Simulate not implemented")
|
||||
}
|
||||
|
||||
func RegisterSimulateServiceServer(s grpc1.Server, srv SimulateServiceServer) {
|
||||
s.RegisterService(&_SimulateService_serviceDesc, srv)
|
||||
}
|
||||
|
||||
func _SimulateService_Simulate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(SimulateRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(SimulateServiceServer).Simulate(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/cosmos.base.simulate.v1beta1.SimulateService/Simulate",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(SimulateServiceServer).Simulate(ctx, req.(*SimulateRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
var _SimulateService_serviceDesc = grpc.ServiceDesc{
|
||||
ServiceName: "cosmos.base.simulate.v1beta1.SimulateService",
|
||||
HandlerType: (*SimulateServiceServer)(nil),
|
||||
Methods: []grpc.MethodDesc{
|
||||
{
|
||||
MethodName: "Simulate",
|
||||
Handler: _SimulateService_Simulate_Handler,
|
||||
},
|
||||
},
|
||||
Streams: []grpc.StreamDesc{},
|
||||
Metadata: "cosmos/base/simulate/v1beta1/simulate.proto",
|
||||
}
|
||||
|
||||
func (m *SimulateRequest) Marshal() (dAtA []byte, err error) {
|
||||
size := m.Size()
|
||||
dAtA = make([]byte, size)
|
||||
n, err := m.MarshalToSizedBuffer(dAtA[:size])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dAtA[:n], nil
|
||||
}
|
||||
|
||||
func (m *SimulateRequest) MarshalTo(dAtA []byte) (int, error) {
|
||||
size := m.Size()
|
||||
return m.MarshalToSizedBuffer(dAtA[:size])
|
||||
}
|
||||
|
||||
func (m *SimulateRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i := len(dAtA)
|
||||
_ = i
|
||||
var l int
|
||||
_ = l
|
||||
if m.Tx != nil {
|
||||
{
|
||||
size, err := m.Tx.MarshalToSizedBuffer(dAtA[:i])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
i -= size
|
||||
i = encodeVarintSimulate(dAtA, i, uint64(size))
|
||||
}
|
||||
i--
|
||||
dAtA[i] = 0xa
|
||||
}
|
||||
return len(dAtA) - i, nil
|
||||
}
|
||||
|
||||
func (m *SimulateResponse) Marshal() (dAtA []byte, err error) {
|
||||
size := m.Size()
|
||||
dAtA = make([]byte, size)
|
||||
n, err := m.MarshalToSizedBuffer(dAtA[:size])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dAtA[:n], nil
|
||||
}
|
||||
|
||||
func (m *SimulateResponse) MarshalTo(dAtA []byte) (int, error) {
|
||||
size := m.Size()
|
||||
return m.MarshalToSizedBuffer(dAtA[:size])
|
||||
}
|
||||
|
||||
func (m *SimulateResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i := len(dAtA)
|
||||
_ = i
|
||||
var l int
|
||||
_ = l
|
||||
if m.Result != nil {
|
||||
{
|
||||
size, err := m.Result.MarshalToSizedBuffer(dAtA[:i])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
i -= size
|
||||
i = encodeVarintSimulate(dAtA, i, uint64(size))
|
||||
}
|
||||
i--
|
||||
dAtA[i] = 0x12
|
||||
}
|
||||
if m.GasInfo != nil {
|
||||
{
|
||||
size, err := m.GasInfo.MarshalToSizedBuffer(dAtA[:i])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
i -= size
|
||||
i = encodeVarintSimulate(dAtA, i, uint64(size))
|
||||
}
|
||||
i--
|
||||
dAtA[i] = 0xa
|
||||
}
|
||||
return len(dAtA) - i, nil
|
||||
}
|
||||
|
||||
func encodeVarintSimulate(dAtA []byte, offset int, v uint64) int {
|
||||
offset -= sovSimulate(v)
|
||||
base := offset
|
||||
for v >= 1<<7 {
|
||||
dAtA[offset] = uint8(v&0x7f | 0x80)
|
||||
v >>= 7
|
||||
offset++
|
||||
}
|
||||
dAtA[offset] = uint8(v)
|
||||
return base
|
||||
}
|
||||
func (m *SimulateRequest) Size() (n int) {
|
||||
if m == nil {
|
||||
return 0
|
||||
}
|
||||
var l int
|
||||
_ = l
|
||||
if m.Tx != nil {
|
||||
l = m.Tx.Size()
|
||||
n += 1 + l + sovSimulate(uint64(l))
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (m *SimulateResponse) Size() (n int) {
|
||||
if m == nil {
|
||||
return 0
|
||||
}
|
||||
var l int
|
||||
_ = l
|
||||
if m.GasInfo != nil {
|
||||
l = m.GasInfo.Size()
|
||||
n += 1 + l + sovSimulate(uint64(l))
|
||||
}
|
||||
if m.Result != nil {
|
||||
l = m.Result.Size()
|
||||
n += 1 + l + sovSimulate(uint64(l))
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func sovSimulate(x uint64) (n int) {
|
||||
return (math_bits.Len64(x|1) + 6) / 7
|
||||
}
|
||||
func sozSimulate(x uint64) (n int) {
|
||||
return sovSimulate(uint64((x << 1) ^ uint64((int64(x) >> 63))))
|
||||
}
|
||||
func (m *SimulateRequest) Unmarshal(dAtA []byte) error {
|
||||
l := len(dAtA)
|
||||
iNdEx := 0
|
||||
for iNdEx < l {
|
||||
preIndex := iNdEx
|
||||
var wire uint64
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowSimulate
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
wire |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
fieldNum := int32(wire >> 3)
|
||||
wireType := int(wire & 0x7)
|
||||
if wireType == 4 {
|
||||
return fmt.Errorf("proto: SimulateRequest: wiretype end group for non-group")
|
||||
}
|
||||
if fieldNum <= 0 {
|
||||
return fmt.Errorf("proto: SimulateRequest: illegal tag %d (wire type %d)", fieldNum, wire)
|
||||
}
|
||||
switch fieldNum {
|
||||
case 1:
|
||||
if wireType != 2 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field Tx", wireType)
|
||||
}
|
||||
var msglen int
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowSimulate
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
msglen |= int(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if msglen < 0 {
|
||||
return ErrInvalidLengthSimulate
|
||||
}
|
||||
postIndex := iNdEx + msglen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthSimulate
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
if m.Tx == nil {
|
||||
m.Tx = &tx.Tx{}
|
||||
}
|
||||
if err := m.Tx.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
iNdEx = postIndex
|
||||
default:
|
||||
iNdEx = preIndex
|
||||
skippy, err := skipSimulate(dAtA[iNdEx:])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if skippy < 0 {
|
||||
return ErrInvalidLengthSimulate
|
||||
}
|
||||
if (iNdEx + skippy) < 0 {
|
||||
return ErrInvalidLengthSimulate
|
||||
}
|
||||
if (iNdEx + skippy) > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
iNdEx += skippy
|
||||
}
|
||||
}
|
||||
|
||||
if iNdEx > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
return nil
|
||||
}
|
||||
func (m *SimulateResponse) Unmarshal(dAtA []byte) error {
|
||||
l := len(dAtA)
|
||||
iNdEx := 0
|
||||
for iNdEx < l {
|
||||
preIndex := iNdEx
|
||||
var wire uint64
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowSimulate
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
wire |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
fieldNum := int32(wire >> 3)
|
||||
wireType := int(wire & 0x7)
|
||||
if wireType == 4 {
|
||||
return fmt.Errorf("proto: SimulateResponse: wiretype end group for non-group")
|
||||
}
|
||||
if fieldNum <= 0 {
|
||||
return fmt.Errorf("proto: SimulateResponse: illegal tag %d (wire type %d)", fieldNum, wire)
|
||||
}
|
||||
switch fieldNum {
|
||||
case 1:
|
||||
if wireType != 2 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field GasInfo", wireType)
|
||||
}
|
||||
var msglen int
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowSimulate
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
msglen |= int(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if msglen < 0 {
|
||||
return ErrInvalidLengthSimulate
|
||||
}
|
||||
postIndex := iNdEx + msglen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthSimulate
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
if m.GasInfo == nil {
|
||||
m.GasInfo = &types.GasInfo{}
|
||||
}
|
||||
if err := m.GasInfo.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
iNdEx = postIndex
|
||||
case 2:
|
||||
if wireType != 2 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field Result", wireType)
|
||||
}
|
||||
var msglen int
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowSimulate
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
msglen |= int(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if msglen < 0 {
|
||||
return ErrInvalidLengthSimulate
|
||||
}
|
||||
postIndex := iNdEx + msglen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthSimulate
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
if m.Result == nil {
|
||||
m.Result = &types.Result{}
|
||||
}
|
||||
if err := m.Result.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
iNdEx = postIndex
|
||||
default:
|
||||
iNdEx = preIndex
|
||||
skippy, err := skipSimulate(dAtA[iNdEx:])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if skippy < 0 {
|
||||
return ErrInvalidLengthSimulate
|
||||
}
|
||||
if (iNdEx + skippy) < 0 {
|
||||
return ErrInvalidLengthSimulate
|
||||
}
|
||||
if (iNdEx + skippy) > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
iNdEx += skippy
|
||||
}
|
||||
}
|
||||
|
||||
if iNdEx > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
return nil
|
||||
}
|
||||
func skipSimulate(dAtA []byte) (n int, err error) {
|
||||
l := len(dAtA)
|
||||
iNdEx := 0
|
||||
depth := 0
|
||||
for iNdEx < l {
|
||||
var wire uint64
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return 0, ErrIntOverflowSimulate
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
wire |= (uint64(b) & 0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
wireType := int(wire & 0x7)
|
||||
switch wireType {
|
||||
case 0:
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return 0, ErrIntOverflowSimulate
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
iNdEx++
|
||||
if dAtA[iNdEx-1] < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
case 1:
|
||||
iNdEx += 8
|
||||
case 2:
|
||||
var length int
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return 0, ErrIntOverflowSimulate
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
length |= (int(b) & 0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if length < 0 {
|
||||
return 0, ErrInvalidLengthSimulate
|
||||
}
|
||||
iNdEx += length
|
||||
case 3:
|
||||
depth++
|
||||
case 4:
|
||||
if depth == 0 {
|
||||
return 0, ErrUnexpectedEndOfGroupSimulate
|
||||
}
|
||||
depth--
|
||||
case 5:
|
||||
iNdEx += 4
|
||||
default:
|
||||
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
|
||||
}
|
||||
if iNdEx < 0 {
|
||||
return 0, ErrInvalidLengthSimulate
|
||||
}
|
||||
if depth == 0 {
|
||||
return iNdEx, nil
|
||||
}
|
||||
}
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
|
||||
var (
|
||||
ErrInvalidLengthSimulate = fmt.Errorf("proto: negative length found during unmarshaling")
|
||||
ErrIntOverflowSimulate = fmt.Errorf("proto: integer overflow")
|
||||
ErrUnexpectedEndOfGroupSimulate = fmt.Errorf("proto: unexpected end of group")
|
||||
)
|
|
@ -1,166 +0,0 @@
|
|||
// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
|
||||
// source: cosmos/base/simulate/v1beta1/simulate.proto
|
||||
|
||||
/*
|
||||
Package simulate is a reverse proxy.
|
||||
|
||||
It translates gRPC into RESTful JSON APIs.
|
||||
*/
|
||||
package simulate
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/golang/protobuf/descriptor"
|
||||
"github.com/golang/protobuf/proto"
|
||||
"github.com/grpc-ecosystem/grpc-gateway/runtime"
|
||||
"github.com/grpc-ecosystem/grpc-gateway/utilities"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/codes"
|
||||
"google.golang.org/grpc/grpclog"
|
||||
"google.golang.org/grpc/status"
|
||||
)
|
||||
|
||||
// Suppress "imported and not used" errors
|
||||
var _ codes.Code
|
||||
var _ io.Reader
|
||||
var _ status.Status
|
||||
var _ = runtime.String
|
||||
var _ = utilities.NewDoubleArray
|
||||
var _ = descriptor.ForMessage
|
||||
|
||||
var (
|
||||
filter_SimulateService_Simulate_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
|
||||
)
|
||||
|
||||
func request_SimulateService_Simulate_0(ctx context.Context, marshaler runtime.Marshaler, client SimulateServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq SimulateRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
if err := req.ParseForm(); err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_SimulateService_Simulate_0); err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
|
||||
msg, err := client.Simulate(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_SimulateService_Simulate_0(ctx context.Context, marshaler runtime.Marshaler, server SimulateServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq SimulateRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
if err := req.ParseForm(); err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_SimulateService_Simulate_0); err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
|
||||
msg, err := server.Simulate(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
// RegisterSimulateServiceHandlerServer registers the http handlers for service SimulateService to "mux".
|
||||
// UnaryRPC :call SimulateServiceServer directly.
|
||||
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
|
||||
// Note that using this registration option will cause many gRPC library features (such as grpc.SendHeader, etc) to stop working. Consider using RegisterSimulateServiceHandlerFromEndpoint instead.
|
||||
func RegisterSimulateServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server SimulateServiceServer) error {
|
||||
|
||||
mux.Handle("POST", pattern_SimulateService_Simulate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_SimulateService_Simulate_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_SimulateService_Simulate_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// RegisterSimulateServiceHandlerFromEndpoint is same as RegisterSimulateServiceHandler but
|
||||
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
|
||||
func RegisterSimulateServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
|
||||
conn, err := grpc.Dial(endpoint, opts...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err != nil {
|
||||
if cerr := conn.Close(); cerr != nil {
|
||||
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
|
||||
}
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
if cerr := conn.Close(); cerr != nil {
|
||||
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
|
||||
}
|
||||
}()
|
||||
}()
|
||||
|
||||
return RegisterSimulateServiceHandler(ctx, mux, conn)
|
||||
}
|
||||
|
||||
// RegisterSimulateServiceHandler registers the http handlers for service SimulateService to "mux".
|
||||
// The handlers forward requests to the grpc endpoint over "conn".
|
||||
func RegisterSimulateServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
|
||||
return RegisterSimulateServiceHandlerClient(ctx, mux, NewSimulateServiceClient(conn))
|
||||
}
|
||||
|
||||
// RegisterSimulateServiceHandlerClient registers the http handlers for service SimulateService
|
||||
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "SimulateServiceClient".
|
||||
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "SimulateServiceClient"
|
||||
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
|
||||
// "SimulateServiceClient" to call the correct interceptors.
|
||||
func RegisterSimulateServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client SimulateServiceClient) error {
|
||||
|
||||
mux.Handle("POST", pattern_SimulateService_Simulate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_SimulateService_Simulate_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_SimulateService_Simulate_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
pattern_SimulateService_Simulate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 2}, []string{"cosmos", "base", "simulate", "v1beta1"}, "", runtime.AssumeColonVerbOpt(true)))
|
||||
)
|
||||
|
||||
var (
|
||||
forward_SimulateService_Simulate_0 = runtime.ForwardResponseMessage
|
||||
)
|
|
@ -1,119 +0,0 @@
|
|||
package simulate_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/suite"
|
||||
tmproto "github.com/tendermint/tendermint/proto/tendermint/types"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/baseapp"
|
||||
"github.com/cosmos/cosmos-sdk/client"
|
||||
"github.com/cosmos/cosmos-sdk/client/grpc/simulate"
|
||||
"github.com/cosmos/cosmos-sdk/client/tx"
|
||||
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
|
||||
"github.com/cosmos/cosmos-sdk/simapp"
|
||||
"github.com/cosmos/cosmos-sdk/testutil/testdata"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
txtypes "github.com/cosmos/cosmos-sdk/types/tx"
|
||||
"github.com/cosmos/cosmos-sdk/types/tx/signing"
|
||||
authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing"
|
||||
authtypes "github.com/cosmos/cosmos-sdk/x/auth/types"
|
||||
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
|
||||
)
|
||||
|
||||
type IntegrationTestSuite struct {
|
||||
suite.Suite
|
||||
|
||||
app *simapp.SimApp
|
||||
clientCtx client.Context
|
||||
queryClient simulate.SimulateServiceClient
|
||||
sdkCtx sdk.Context
|
||||
}
|
||||
|
||||
func (s *IntegrationTestSuite) SetupSuite() {
|
||||
app := simapp.Setup(true)
|
||||
sdkCtx := app.BaseApp.NewContext(true, tmproto.Header{})
|
||||
|
||||
app.AccountKeeper.SetParams(sdkCtx, authtypes.DefaultParams())
|
||||
app.BankKeeper.SetParams(sdkCtx, banktypes.DefaultParams())
|
||||
|
||||
// Set up TxConfig.
|
||||
encodingConfig := simapp.MakeTestEncodingConfig()
|
||||
clientCtx := client.Context{}.WithTxConfig(encodingConfig.TxConfig)
|
||||
|
||||
// Create new simulation server.
|
||||
srv := simulate.NewSimulateServer(app.BaseApp.Simulate, encodingConfig.InterfaceRegistry)
|
||||
|
||||
queryHelper := baseapp.NewQueryServerTestHelper(sdkCtx, app.InterfaceRegistry())
|
||||
simulate.RegisterSimulateServiceServer(queryHelper, srv)
|
||||
queryClient := simulate.NewSimulateServiceClient(queryHelper)
|
||||
|
||||
s.app = app
|
||||
s.clientCtx = clientCtx
|
||||
s.queryClient = queryClient
|
||||
s.sdkCtx = sdkCtx
|
||||
}
|
||||
|
||||
func (s IntegrationTestSuite) TestSimulateService() {
|
||||
// Create an account with some funds.
|
||||
priv1, _, addr1 := testdata.KeyTestPubAddr()
|
||||
acc1 := s.app.AccountKeeper.NewAccountWithAddress(s.sdkCtx, addr1)
|
||||
err := acc1.SetAccountNumber(0)
|
||||
s.Require().NoError(err)
|
||||
s.app.AccountKeeper.SetAccount(s.sdkCtx, acc1)
|
||||
s.app.BankKeeper.SetBalances(s.sdkCtx, addr1, sdk.Coins{
|
||||
sdk.NewInt64Coin("atom", 10000000),
|
||||
})
|
||||
|
||||
// Create a test x/bank MsgSend.
|
||||
coins := sdk.NewCoins(sdk.NewInt64Coin("atom", 10))
|
||||
_, _, addr2 := testdata.KeyTestPubAddr()
|
||||
msg := banktypes.NewMsgSend(addr1, addr2, coins)
|
||||
feeAmount := testdata.NewTestFeeAmount()
|
||||
gasLimit := testdata.NewTestGasLimit()
|
||||
memo := "foo"
|
||||
accSeq, accNum := uint64(0), uint64(0)
|
||||
|
||||
// Create a txBuilder.
|
||||
txBuilder := s.clientCtx.TxConfig.NewTxBuilder()
|
||||
txBuilder.SetMsgs(msg)
|
||||
txBuilder.SetMemo(memo)
|
||||
txBuilder.SetFeeAmount(feeAmount)
|
||||
txBuilder.SetGasLimit(gasLimit)
|
||||
// 1st round: set empty signature
|
||||
sigV2 := signing.SignatureV2{
|
||||
PubKey: priv1.PubKey(),
|
||||
Data: &signing.SingleSignatureData{
|
||||
SignMode: s.clientCtx.TxConfig.SignModeHandler().DefaultMode(),
|
||||
Signature: nil,
|
||||
},
|
||||
}
|
||||
txBuilder.SetSignatures(sigV2)
|
||||
// 2nd round: actually sign
|
||||
sigV2, err = tx.SignWithPrivKey(
|
||||
s.clientCtx.TxConfig.SignModeHandler().DefaultMode(),
|
||||
authsigning.SignerData{ChainID: s.sdkCtx.ChainID(), AccountNumber: accNum, Sequence: accSeq},
|
||||
txBuilder, priv1, s.clientCtx.TxConfig, accSeq,
|
||||
)
|
||||
txBuilder.SetSignatures(sigV2)
|
||||
|
||||
any, ok := txBuilder.(codectypes.IntoAny)
|
||||
s.Require().True(ok)
|
||||
cached := any.AsAny().GetCachedValue()
|
||||
txTx, ok := cached.(*txtypes.Tx)
|
||||
s.Require().True(ok)
|
||||
res, err := s.queryClient.Simulate(
|
||||
context.Background(),
|
||||
&simulate.SimulateRequest{Tx: txTx},
|
||||
)
|
||||
s.Require().NoError(err)
|
||||
|
||||
// Check the result and gas used are correct.
|
||||
s.Require().Equal(len(res.GetResult().GetEvents()), 4) // 1 transfer, 3 messages.
|
||||
s.Require().True(res.GetGasInfo().GetGasUsed() > 0) // Gas used sometimes change, just check it's not empty.
|
||||
}
|
||||
|
||||
func TestSimulateTestSuite(t *testing.T) {
|
||||
suite.Run(t, new(IntegrationTestSuite))
|
||||
}
|
|
@ -15,6 +15,7 @@ import (
|
|||
grpctypes "github.com/cosmos/cosmos-sdk/types/grpc"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec/types"
|
||||
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
|
||||
)
|
||||
|
||||
var _ gogogrpc.ClientConn = Context{}
|
||||
|
@ -35,6 +36,11 @@ func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if height < 0 {
|
||||
return sdkerrors.Wrapf(
|
||||
sdkerrors.ErrInvalidRequest,
|
||||
"client.Context.Invoke: height (%d) from %q must be >= 0", height, grpctypes.GRPCBlockHeightHeader)
|
||||
}
|
||||
|
||||
ctx = ctx.WithHeight(height)
|
||||
}
|
||||
|
|
|
@ -9,7 +9,6 @@ import (
|
|||
|
||||
bip39 "github.com/cosmos/go-bip39"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
"github.com/tendermint/tendermint/libs/cli"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/client"
|
||||
|
@ -18,6 +17,7 @@ import (
|
|||
"github.com/cosmos/cosmos-sdk/crypto/hd"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keyring"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
||||
|
@ -152,7 +152,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
|
|||
|
||||
multisigKeys, _ := cmd.Flags().GetStringSlice(flagMultisig)
|
||||
if len(multisigKeys) != 0 {
|
||||
var pks []crypto.PubKey
|
||||
var pks []cryptotypes.PubKey
|
||||
|
||||
multisigThreshold, _ := cmd.Flags().GetInt(flagMultiSigThreshold)
|
||||
if err := validateMultisigThreshold(multisigThreshold, len(multisigKeys)); err != nil {
|
||||
|
@ -247,7 +247,7 @@ func RunAddCmd(cmd *cobra.Command, args []string, kb keyring.Keyring, inBuf *buf
|
|||
}
|
||||
|
||||
if len(mnemonic) == 0 {
|
||||
// read entropy seed straight from crypto.Rand and convert to mnemonic
|
||||
// read entropy seed straight from tmcrypto.Rand and convert to mnemonic
|
||||
entropySeed, err := bip39.NewEntropy(mnemonicEntropySize)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMzowNjozOC45MTAzNTggLTA0MDAgRURUIG09KzAuMDUwMTczMjM4IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiT25IN1lscERZSHZUVFFQcCJ9.27X3naS_OL75csQLEIFoPvvCyYb9R4D573z1Z1obm3TRGn4HyPFN_w.GXNcqKAUkxqM537Q.cT169l1KGKeOra6NXHbx3kEOEDw77Lom-42mwKV0bRQ_5WZU3kG5o6Ix14r7LFL1ajjc8rdXkuiUgKQyVXEXVpo-6WkEfk2-D_CQaaUgq0-UErT-9Pj7djI3FZkPPG-yxlVSiQXB1xMk38I_AxYwAakctpwHlEK_YC0-UycFmk25Qjezar_ni69KDRPyuqCYh3dyhimG6LgdpWF4pQHjtZPy5qIqcaE7TR0OeKvf9MtsaKEzpAQOeAvh.WbbZ_Fs8qk9rsN6FuWa2zg
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMzA4ODggLTA0MDAgRURUIG09KzAuMTYxMTg1Nzk2IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiU3ZNM1hfZU42VHhpSVg3aiJ9.BjL9xqItRueA3u4ujcWPTp4TJjO6w4NeR9G7p32ndo63ADDGJ7j1JQ.8Fd_XM52yxKhF31U.7Cm3SBAmp0u4QffFwBgeueuU3rWT1npSKI5CUROX5COgKbDpqj5CaT54k6UGeZiUxv8itQXglUpAsg7XsF-1LjbbUAfVxXe9H9n1GcfxrLov0L8_Ia-5JadXMXkbvv9jKyjhVg6kSziQXoHcHaeauF1X0_ij3a-UVH87cLqsdAI_OXtptyU8GonVyt_Q0n8mljonjZhj2c_bmXmHARYXZOmCj52dmzSpmkyQ9vqdhlRPco93-JWR5P1V.5J7fb71-1WKJ91g02D0JGg
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MzoyMy4yNjI0NzEgLTA0MDAgRURUIG09KzAuMDc2MzQ4MjA5IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiTE02SnY0RUdZNHBPLTQxWSJ9.wsqaCPHz_PlOH4_B3QlKT_4N9nTEjMzqn_Rqjq2ZM3vzf1dTO1_gjA.WVEaKSvNNWJ3ZaTT.eWrtCGCCplDULPw1QEyijVO_totUT5-6yx-TK4KP_BdKmhdEG8Bm319dXU33BchHthFa2VxDyB4NH_hsUenErJSKIJgJGoVc_AMwqrVZr0Wg0qJaay7jRGh1IRNXc0cuEsNpEek1C31tNaXjD2IuJzkicwdDT3BARFLFFdRhY97LG83YTvX0gVKyJFfjx8TAgUHZgpYyJMI4_vVajnneI-v1SYCY_VMbFTaCqWKFZdYOhu3x-hXfFBww.rxnMJbBz5OU4itr8nuyZgA
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yNDQwMTEgLTA0MDAgRURUIG09KzAuMTc0MzMxMjAxIiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiYmlQLXR5Mnlvc3U2aHBvLSJ9.3drCmgYTeqS3PohaYKQc7i1fyjtOMuEPu_pDqMpT0UStPNDxG_LUDg.VS6Au9HoIruV0RiE._2BmFif-VbT_x4OD1NfsOCVFdL2MZfsG645SkptEKZAncOwHkKmWnBlKiV_LwnNzRBh-9eGGsCGfyou3zjUQRMDDHJOuW2EaVNmufmBWcAb9UoNO8O5kzPHwIvNqqJo5TQyjOviKCoP2PVcJXAwzttqDOw71B-9OuPwt_Ed4G6u8evwGIe08CzV6CKVImzj-AQg-1UI-uL06yFIEJ6CzB1DMdPR0qDQddP8pSYR_RTHnEUsii7HeKK1O.jqlYm4IZhXqe1k5kBQtguA
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMDQ1NjYgLTA0MDAgRURUIG09KzAuMTM0ODY0MzE4IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoidjVpMzRMY3NNbXduakEyMiJ9.XTokiwtSrKOIGREG7P7uaSfcV3hEr2ANHVUwaKbvLbuQVlTQO8fALw.bldSMLqfirE4GM9S.kNlvEojt1cavNW-nCaxX-Qk3tNm09xtXbuKppWbmMBUCf-_p-U_TWsnHuKbLon47RH1lxomrc1RpcfXwWhDEsGLwibtsjRdxz_2DGh124jeKOr4-Bl2raoPWdHKimm_cf5Ve17ChFfVy1AOaXwIr97ZdGWSU0FP8hOvv5_z5iUsuMK9T0DLxjz0162-_xSQMWWl4-hLknHz-QdO3oR_FpYo2K2eucNaFKmcN5Rn4s2n8FYLU9dIcopUF.WpNuRheBDoTiv3rK95yNjA
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMjAyOSAtMDQwMCBFRFQgbT0rMC4xNTA1ODc4MjYiLCJlbmMiOiJBMjU2R0NNIiwicDJjIjo4MTkyLCJwMnMiOiJJYXRUWEFMVjBfeXV2ZnNMIn0.kCXD6XXDHeBiXR-GqF10fbMWBvy3qe38r16b92Xu3oLpA5c0a6ByMg.ONW9ggBJFhdfIA8M.IWm_ioQqOCLSK3FbSwjAlEVtzRR4AAW7ceIXpKzv_voaCGDNgcr7xSyRR5N-YK-sVYInwUDrme8rb5T14mjcsNgoGdKKB2QXuApY-GcPwpe2Tf7TyiCxFp91VotHnrbjCh1NvWnjDC-SZNm8HDVolkYtiBPkIkk0uFGh35WWprkVpgEYFyNIFQ0PP3XD4D9A58X0UXdGEu5Q8VcJnt1p86XUyI1le_LufJUrWAz3o_89n3xKj-b6sYzQ.KZSIrdNzE97BxrTSNkMkTw
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMzowNjozOC45MDMwMjcgLTA0MDAgRURUIG09KzAuMDQyODQyNTY4IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiSjBxN2Zza0pGRUJJR25mYSJ9.W91-I2lpaBfacsUO2Xn2_tCadqztjGX7MjAkA6GKL4uMkqjEHDXyhw.c8uKD9z5w-jpSmq1.XGnt9JaOg0VT1cjg4RAlwC6Bsq9KowSF6wM6Ak1Y16Kq4sV3NnwA4CqJKnluIjAG6D4sfBKEs2FCHy5zux4uaOQ3Y5EJjRxWoTdBP7HahmO2-jsSFX_sPIzr86KIlKIqaYFJAOUqvaObOsQkX3EL_2-vDonSRMz32abg8thFS6mNi7NtM4xGXQ5Knrix-6OgzBmvWbn4Y0v82vNNWh8d4ubKf_RSEBV7CIWfuFg2CxfRq5EbUUmtMINF74eG52F8y8zjTDcn6n3qKLcecdr6s0n1tc7iq-f3s1EHnzPefwROPLFxiq0Zyt7N7vZCSowOElYZtgQWEg0dy6CIyZ274gNPlfLXMHA-kUsZj4Q_3w.sUPc7D8bBR4I3S-njXa4Ww
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MzoyMy4yNTg0OTUgLTA0MDAgRURUIG09KzAuMDcyMzcyNTM1IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiTW1XOFlVU0Nia2JUNW9iSSJ9.JyBrlPAvgtgYWwu0rcfTn6k9qvv6DywUotcWxPUiJncCBue2WPC5cQ.CmWeu5wMFFinUfiE.FA9k3Q_W8mBgSuJRkYV8h_U5YR2mDmW595L4DnFzuSFJ19Us0O1SQF9-xPJQAyjh4jli46o5mfFfsmU0ce1h4HwklW7AdrRJXVXZ0reZLjrdiojCbLvzyM9tsWInRXi6izUcwLggv2lNCXP5UIRpjMpUPiEC4GsHiwNH8qN04_feICxHuSWJ4mKLWEDtgKxHTrBqvaHT304UF6gRD-_W9_hWdEIj66-5HE4jlxcJAe22WdoF2Z1c3ujhm4piSfHaNnWYsZHLI5Jy1WhkFC2eULOe31c6eAeik5DyUUdWKvAoSiEk4H0Z9EcSbNzlW2rrU30WIIb-icK1qLID21WYurbxM8zvXl-CvhSM2VRN1g.tu_usvTlCOy3okBKmC6zHg
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4xOTkwMDUgLTA0MDAgRURUIG09KzAuMTI5MzA0MDMxIiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiYzVYM09VdTBtQmJ4TFd4ayJ9.Pa4p1u-9N9x4E9-5rjUlReLsfH3TvTfAw-Dr1iV8z5ccAfnqLY1UWQ.Q41_cYh4c-C2zi3v.aFna4CwpZeGQBI2_ecOzlJSKypCV0NLCD8PCOnpYvY-k-HqoUFSeouFbuKeN9VaIo12JSZmjzGhfCAupZDBcSJisLVHOvaBAjl5XCOa8k49jb-aSopMI4HXQWatBJcnM65p9Hl1JrYOcnoKPxNKzJ4PiPQnHKv_VgAvWU_CBt6nnSjkwwVJjPMobgvNzeQTEFq-4pyziJNDbDWKUrQSrc-VaO-31JBlhpu6dPOJPFsnOcMyPc76po5cAQQdog-g79d59_Y4vj8s7qd-YPCHnWmoCbgf9w8vbpmJ4Y9evXZQz8A0-c0rFX7F96aZBYtQOeC1ZpRi0BMsbs_WHrpdN678HXej6YpfNDijQmiNYzQ.ayDBvX4W6GiGxAjN3ch8DA
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMTYwNTkgLTA0MDAgRURUIG09KzAuMTQ2MzU3MTI5IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoidmNnOXNfXzFvY0N1ZC1YTCJ9.hQ21Z1cXTw925V8Xxm_Gywb05S92rKEX54tnELv16xZvkgV8XiEj9A.4Wnaa4LemIi3bdIT.kEJyzWUvmLof5_bYbDePbrYyfjyqTLuUIOdnom3igOuwfzFDHtPuEb3rSLKjxj7LgJOmZVqZGP_ihW1sJQPXbK7ZuWA4zH_Wf-n5T8CFDmNIUhlUIb6sfd_ze-s45CE58hjKRkp7b5k61xBnMujZ5KC5Vk_JHUOUyZB5SqhTuEUJDjSSCFnMDJ1UYKEp23U__XFwcZonent4IMfM0fWvmA6NC2h0qLAMcKw9hbJ_yyNHt2I3lI5twthsAOsXKxUkjhx7c9Tc7BnttFxq-puD_QyjReExP77DzuueDJ-5KBd8PMgeiQMHoYM8e2NAAJU7MXe7voB-D8Ki2QcEgH7GfHNcr6vP1by3hvV5M32OXg.ifBDbtRjrXBOdH_jEORHgw
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMjY1OTEgLTA0MDAgRURUIG09KzAuMTU2ODg5MTU0IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiNUtTSWJtdGhCVHVSUnRjYyJ9.qD2C7cQK5P0EHy2Yr-uQZzMHep6U6n57z2LwWTmgxQp6m7ujLw-C-g.rmsltoJfFO4e56RZ.IDpVmduqe2WgyepT_-paXzcosHQzK6sfKY9JH16lT4QRVJ_lAozQOyZrW3X5MbgefrmtXGsoEIEFYhTDYBtXxrW7IqLaBhSCiA5MVwR403H3C2NkcygdGDdR-uDQGW3_bp7xnOhVL_3ofu0-7MQMMhZyz_wEmVW-aG7F6lN68TPaO5KTIqfnI8vOJyyZsSgB0M0gA3f-P4aar64YDTUdjgXPOSBkyRZr07JIOauGhTFXwmHWsDVBvGo3aIIx9ybAg_Blgo8ZAPqOJ6EYmA3J5RE2_LkfJjgI8dEpIFaviBHeWrG54AAN0klQ7trq9MOCpUGPc7PqySwiwTmxb2g4kFH9fR_yQ-g5g6mjj3JYVA.GRnNxd28SYmRt1I9twptPw
|
|
@ -1 +0,0 @@
|
|||
eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMC0xMC0wOCAxMjo1MDoxOC4yMzg1MTQgLTA0MDAgRURUIG09KzAuMTY4ODExNzMwIiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiUmZDZC0xeGh6OVJ3RzJfViJ9.hMyIYfHwLYAwJs6THPC30rWfhd1SRUl5po4ifTvln5cV_VHyHLW3MQ.ku5jtKB-G5acpq4v.s0oNPaUaRQbFk-X8AL8QitkI_SdBWB2BpBmRRbo2ZMAkq4x81hSC5p7RlSrM3OGTNFZ4yOrRgzdMv43YpCl7ZpJIypF4l7Hyvl_13jTjqzB7o81dEhl_10SI_Fw607VKCnwqq02_VoqD489EpMVuQ05Fg2pUT3M_mJMacGztORYVJrIWwzbyUiHfM4GlnaoUQaKfwbkHS2W2-1wOPTSWTLEBVJlRG1EAZR_upcPJolcAStjl8PY5EfkxXD56c8Xu6SI8LjMrJAXXg7lTqOGNOkt0v8M8UZWd95Gy2zH_KJm3ItYR_YjPoMIHh-_Cb2-0uoXNRyykW4EpGptp08n7QubSYltzXwaw_NgLP9KUmg.67EgfbLDNyvEYCR12Bjoew
|
|
@ -5,13 +5,13 @@ import (
|
|||
"fmt"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
tmcrypto "github.com/tendermint/tendermint/crypto"
|
||||
"github.com/tendermint/tendermint/libs/cli"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/client"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keyring"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/ledger"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
||||
|
@ -61,7 +61,7 @@ func runShowCmd(cmd *cobra.Command, args []string) (err error) {
|
|||
return fmt.Errorf("%s is not a valid name or address: %v", args[0], err)
|
||||
}
|
||||
} else {
|
||||
pks := make([]tmcrypto.PubKey, len(args))
|
||||
pks := make([]cryptotypes.PubKey, len(args))
|
||||
for i, keyref := range args {
|
||||
info, err := fetchKey(clientCtx.Keyring, keyref)
|
||||
if err != nil {
|
||||
|
|
|
@ -7,14 +7,13 @@ import (
|
|||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/client"
|
||||
"github.com/cosmos/cosmos-sdk/client/flags"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/hd"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keyring"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
"github.com/cosmos/cosmos-sdk/testutil"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
@ -23,7 +22,7 @@ func Test_multiSigKey_Properties(t *testing.T) {
|
|||
tmpKey1 := secp256k1.GenPrivKeyFromSecret([]byte("mySecret"))
|
||||
pk := multisig.NewLegacyAminoPubKey(
|
||||
1,
|
||||
[]crypto.PubKey{tmpKey1.PubKey()},
|
||||
[]cryptotypes.PubKey{tmpKey1.PubKey()},
|
||||
)
|
||||
tmp := keyring.NewMultiInfo("myMultisig", pk)
|
||||
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
package rest
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
// addHTTPDeprecationHeaders is a mux middleware function for adding HTTP
|
||||
// Deprecation headers to a http handler
|
||||
func addHTTPDeprecationHeaders(h http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Deprecation", "true")
|
||||
w.Header().Set("Link", "<https://docs.cosmos.network/v0.40/interfaces/rest.html>; rel=\"deprecation\"")
|
||||
w.Header().Set("Warning", "199 - \"this endpoint is deprecated and may not work as before, see deprecation link for more info\"")
|
||||
h.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
// WithHTTPDeprecationHeaders returns a new *mux.Router, identical to its input
|
||||
// but with the addition of HTTP Deprecation headers. This is used to mark legacy
|
||||
// amino REST endpoints as deprecated in the REST API.
|
||||
func WithHTTPDeprecationHeaders(r *mux.Router) *mux.Router {
|
||||
subRouter := r.NewRoute().Subrouter()
|
||||
subRouter.Use(addHTTPDeprecationHeaders)
|
||||
return subRouter
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
package rpc_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/client/rpc"
|
||||
clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli"
|
||||
"github.com/cosmos/cosmos-sdk/testutil/network"
|
||||
)
|
||||
|
||||
type IntegrationTestSuite struct {
|
||||
suite.Suite
|
||||
|
||||
network *network.Network
|
||||
}
|
||||
|
||||
func (s *IntegrationTestSuite) SetupSuite() {
|
||||
s.T().Log("setting up integration test suite")
|
||||
|
||||
s.network = network.New(s.T(), network.DefaultConfig())
|
||||
s.Require().NotNil(s.network)
|
||||
|
||||
s.Require().NoError(s.network.WaitForNextBlock())
|
||||
}
|
||||
|
||||
func (s *IntegrationTestSuite) TearDownSuite() {
|
||||
s.T().Log("tearing down integration test suite")
|
||||
s.network.Cleanup()
|
||||
}
|
||||
|
||||
func (s *IntegrationTestSuite) TestStatusCommand() {
|
||||
val0 := s.network.Validators[0]
|
||||
cmd := rpc.StatusCommand()
|
||||
|
||||
out, err := clitestutil.ExecTestCLICmd(val0.ClientCtx, cmd, []string{})
|
||||
s.Require().NoError(err)
|
||||
|
||||
// Make sure the output has the validator moniker.
|
||||
s.Require().Contains(out.String(), fmt.Sprintf("\"moniker\":\"%s\"", val0.Moniker))
|
||||
}
|
||||
|
||||
func TestIntegrationTestSuite(t *testing.T) {
|
||||
suite.Run(t, new(IntegrationTestSuite))
|
||||
}
|
|
@ -2,22 +2,38 @@ package rpc
|
|||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/tendermint/tendermint/libs/bytes"
|
||||
"github.com/tendermint/tendermint/p2p"
|
||||
ctypes "github.com/tendermint/tendermint/rpc/core/types"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/client"
|
||||
"github.com/cosmos/cosmos-sdk/client/flags"
|
||||
"github.com/cosmos/cosmos-sdk/codec/legacy"
|
||||
cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
"github.com/cosmos/cosmos-sdk/types/rest"
|
||||
"github.com/cosmos/cosmos-sdk/version"
|
||||
|
||||
"github.com/tendermint/tendermint/p2p"
|
||||
)
|
||||
|
||||
// ValidatorInfo is info about the node's validator, same as Tendermint,
|
||||
// except that we use our own PubKey.
|
||||
type validatorInfo struct {
|
||||
Address bytes.HexBytes
|
||||
PubKey cryptotypes.PubKey
|
||||
VotingPower int64
|
||||
}
|
||||
|
||||
// ResultStatus is node's info, same as Tendermint, except that we use our own
|
||||
// PubKey.
|
||||
type resultStatus struct {
|
||||
NodeInfo p2p.DefaultNodeInfo
|
||||
SyncInfo ctypes.SyncInfo
|
||||
ValidatorInfo validatorInfo
|
||||
}
|
||||
|
||||
// StatusCommand returns the command to return the status of the network.
|
||||
func StatusCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
|
@ -31,12 +47,27 @@ func StatusCommand() *cobra.Command {
|
|||
return err
|
||||
}
|
||||
|
||||
output, err := legacy.Cdc.MarshalJSON(status)
|
||||
// `status` has TM pubkeys, we need to convert them to our pubkeys.
|
||||
pk, err := cryptocodec.FromTmPubKeyInterface(status.ValidatorInfo.PubKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
statusWithPk := resultStatus{
|
||||
NodeInfo: status.NodeInfo,
|
||||
SyncInfo: status.SyncInfo,
|
||||
ValidatorInfo: validatorInfo{
|
||||
Address: status.ValidatorInfo.Address,
|
||||
PubKey: pk,
|
||||
VotingPower: status.ValidatorInfo.VotingPower,
|
||||
},
|
||||
}
|
||||
|
||||
output, err := clientCtx.LegacyAmino.MarshalJSON(statusWithPk)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Println(string(output))
|
||||
cmd.Println(string(output))
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import (
|
|||
|
||||
"github.com/cosmos/cosmos-sdk/client"
|
||||
"github.com/cosmos/cosmos-sdk/client/flags"
|
||||
cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
"github.com/cosmos/cosmos-sdk/types/rest"
|
||||
)
|
||||
|
@ -99,7 +100,11 @@ func (rvo ResultValidatorsOutput) String() string {
|
|||
}
|
||||
|
||||
func bech32ValidatorOutput(validator *tmtypes.Validator) (ValidatorOutput, error) {
|
||||
bechValPubkey, err := sdk.Bech32ifyPubKey(sdk.Bech32PubKeyTypeConsPub, validator.PubKey)
|
||||
pk, err := cryptocodec.FromTmPubKeyInterface(validator.PubKey)
|
||||
if err != nil {
|
||||
return ValidatorOutput{}, err
|
||||
}
|
||||
bechValPubkey, err := sdk.Bech32ifyPubKey(sdk.Bech32PubKeyTypeConsPub, pk)
|
||||
if err != nil {
|
||||
return ValidatorOutput{}, err
|
||||
}
|
||||
|
|
|
@ -3,8 +3,7 @@ package client
|
|||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
||||
|
@ -26,7 +25,7 @@ func (t TestAccount) GetAddress() sdk.AccAddress {
|
|||
}
|
||||
|
||||
// GetPubKey implements client Account.GetPubKey
|
||||
func (t TestAccount) GetPubKey() crypto.PubKey {
|
||||
func (t TestAccount) GetPubKey() cryptotypes.PubKey {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -120,7 +120,7 @@ func (f Factory) WithGas(gas uint64) Factory {
|
|||
|
||||
// WithFees returns a copy of the Factory with an updated fee.
|
||||
func (f Factory) WithFees(fees string) Factory {
|
||||
parsedFees, err := sdk.ParseCoins(fees)
|
||||
parsedFees, err := sdk.ParseCoinsNormalized(fees)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
|
|
@ -8,14 +8,13 @@ import (
|
|||
"os"
|
||||
|
||||
"github.com/spf13/pflag"
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/client"
|
||||
"github.com/cosmos/cosmos-sdk/client/flags"
|
||||
sim "github.com/cosmos/cosmos-sdk/client/grpc/simulate"
|
||||
"github.com/cosmos/cosmos-sdk/client/input"
|
||||
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
|
||||
"github.com/cosmos/cosmos-sdk/types/rest"
|
||||
|
@ -279,7 +278,7 @@ func BuildSimTx(txf Factory, msgs ...sdk.Msg) ([]byte, error) {
|
|||
return nil, fmt.Errorf("cannot simulate amino tx")
|
||||
}
|
||||
|
||||
simReq := sim.SimulateRequest{Tx: protoTx}
|
||||
simReq := tx.SimulateRequest{Tx: protoTx}
|
||||
|
||||
return simReq.Marshal()
|
||||
}
|
||||
|
@ -288,21 +287,23 @@ func BuildSimTx(txf Factory, msgs ...sdk.Msg) ([]byte, error) {
|
|||
// simulation response obtained by the query and the adjusted gas amount.
|
||||
func CalculateGas(
|
||||
queryFunc func(string, []byte) ([]byte, int64, error), txf Factory, msgs ...sdk.Msg,
|
||||
) (sim.SimulateResponse, uint64, error) {
|
||||
) (tx.SimulateResponse, uint64, error) {
|
||||
txBytes, err := BuildSimTx(txf, msgs...)
|
||||
if err != nil {
|
||||
return sim.SimulateResponse{}, 0, err
|
||||
return tx.SimulateResponse{}, 0, err
|
||||
}
|
||||
|
||||
bz, _, err := queryFunc("/cosmos.base.simulate.v1beta1.SimulateService/Simulate", txBytes)
|
||||
// TODO This should use the generated tx service Client.
|
||||
// https://github.com/cosmos/cosmos-sdk/issues/7726
|
||||
bz, _, err := queryFunc("/cosmos.tx.v1beta1.Service/Simulate", txBytes)
|
||||
if err != nil {
|
||||
return sim.SimulateResponse{}, 0, err
|
||||
return tx.SimulateResponse{}, 0, err
|
||||
}
|
||||
|
||||
var simRes sim.SimulateResponse
|
||||
var simRes tx.SimulateResponse
|
||||
|
||||
if err := simRes.Unmarshal(bz); err != nil {
|
||||
return sim.SimulateResponse{}, 0, err
|
||||
return tx.SimulateResponse{}, 0, err
|
||||
}
|
||||
|
||||
return simRes, uint64(txf.GasAdjustment() * float64(simRes.GasInfo.GasUsed)), nil
|
||||
|
@ -342,7 +343,7 @@ func PrepareFactory(clientCtx client.Context, txf Factory) (Factory, error) {
|
|||
// corresponding SignatureV2 if the signing is successful.
|
||||
func SignWithPrivKey(
|
||||
signMode signing.SignMode, signerData authsigning.SignerData,
|
||||
txBuilder client.TxBuilder, priv crypto.PrivKey, txConfig client.TxConfig,
|
||||
txBuilder client.TxBuilder, priv cryptotypes.PrivKey, txConfig client.TxConfig,
|
||||
accSeq uint64,
|
||||
) (signing.SignatureV2, error) {
|
||||
var sigV2 signing.SignatureV2
|
||||
|
|
|
@ -7,12 +7,12 @@ import (
|
|||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/client"
|
||||
"github.com/cosmos/cosmos-sdk/client/grpc/simulate"
|
||||
"github.com/cosmos/cosmos-sdk/client/tx"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/hd"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keyring"
|
||||
"github.com/cosmos/cosmos-sdk/simapp"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
txtypes "github.com/cosmos/cosmos-sdk/types/tx"
|
||||
"github.com/cosmos/cosmos-sdk/x/auth/signing"
|
||||
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
|
||||
)
|
||||
|
@ -28,7 +28,7 @@ func TestCalculateGas(t *testing.T) {
|
|||
if wantErr {
|
||||
return nil, 0, errors.New("query failed")
|
||||
}
|
||||
simRes := &simulate.SimulateResponse{
|
||||
simRes := &txtypes.SimulateResponse{
|
||||
GasInfo: &sdk.GasInfo{GasUsed: gasUsed, GasWanted: gasUsed},
|
||||
Result: &sdk.Result{Data: []byte("tx data"), Log: "log"},
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
// Code generated by protoc-gen-gogo. DO NOT EDIT.
|
||||
// source: third_party/proto/google/protobuf/any.proto
|
||||
// source: google/protobuf/any.proto
|
||||
|
||||
package types
|
||||
|
||||
|
@ -29,7 +29,7 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
|
|||
func (m *Any) Reset() { *m = Any{} }
|
||||
func (*Any) ProtoMessage() {}
|
||||
func (*Any) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_cb68f365a8e2bcdc, []int{0}
|
||||
return fileDescriptor_b53526c13ae22eb4, []int{0}
|
||||
}
|
||||
func (*Any) XXX_WellKnownType() string { return "Any" }
|
||||
func (m *Any) XXX_Unmarshal(b []byte) error {
|
||||
|
@ -79,28 +79,25 @@ func (*Any) XXX_MessageName() string {
|
|||
func init() {
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterFile("third_party/proto/google/protobuf/any.proto", fileDescriptor_cb68f365a8e2bcdc)
|
||||
}
|
||||
func init() { proto.RegisterFile("google/protobuf/any.proto", fileDescriptor_b53526c13ae22eb4) }
|
||||
|
||||
var fileDescriptor_cb68f365a8e2bcdc = []byte{
|
||||
// 246 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x2e, 0xc9, 0xc8, 0x2c,
|
||||
0x4a, 0x89, 0x2f, 0x48, 0x2c, 0x2a, 0xa9, 0xd4, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0xd7, 0x4f, 0xcf,
|
||||
0xcf, 0x4f, 0xcf, 0x49, 0x85, 0x70, 0x92, 0x4a, 0xd3, 0xf4, 0x13, 0xf3, 0x2a, 0xf5, 0xc0, 0x1c,
|
||||
0x21, 0x7e, 0x88, 0x94, 0x1e, 0x4c, 0x4a, 0x4a, 0x0d, 0x9b, 0xee, 0xf4, 0x7c, 0x04, 0x0b, 0xa2,
|
||||
0x54, 0xc9, 0x86, 0x8b, 0xd9, 0x31, 0xaf, 0x52, 0x48, 0x92, 0x8b, 0xa3, 0xa4, 0xb2, 0x20, 0x35,
|
||||
0xbe, 0xb4, 0x28, 0x47, 0x82, 0x51, 0x81, 0x51, 0x83, 0x33, 0x88, 0x1d, 0xc4, 0x0f, 0x2d, 0xca,
|
||||
0x11, 0x12, 0xe1, 0x62, 0x2d, 0x4b, 0xcc, 0x29, 0x4d, 0x95, 0x60, 0x52, 0x60, 0xd4, 0xe0, 0x09,
|
||||
0x82, 0x70, 0xac, 0x58, 0x3e, 0x2c, 0x94, 0x67, 0x70, 0x6a, 0x66, 0xbc, 0xf1, 0x50, 0x8e, 0xe1,
|
||||
0xc3, 0x43, 0x39, 0xc6, 0x1f, 0x0f, 0xe5, 0x18, 0x1b, 0x1e, 0xc9, 0x31, 0xae, 0x78, 0x24, 0xc7,
|
||||
0x78, 0xe2, 0x91, 0x1c, 0xe3, 0x85, 0x47, 0x72, 0x8c, 0x0f, 0x1e, 0xc9, 0x31, 0xbe, 0x78, 0x24,
|
||||
0xc7, 0xf0, 0x01, 0x24, 0xfe, 0x58, 0x8e, 0xf1, 0xc0, 0x63, 0x39, 0x86, 0x13, 0x8f, 0xe5, 0x18,
|
||||
0xb9, 0x84, 0x93, 0xf3, 0x73, 0xf5, 0xd0, 0x5c, 0xec, 0xc4, 0xe1, 0x98, 0x57, 0x19, 0x00, 0xe2,
|
||||
0x04, 0x30, 0x46, 0xb1, 0x82, 0x2c, 0x2f, 0x5e, 0xc4, 0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49,
|
||||
0xce, 0x1d, 0xa2, 0x34, 0x00, 0xaa, 0x54, 0x2f, 0x3c, 0x35, 0x27, 0xc7, 0x3b, 0x2f, 0xbf, 0x3c,
|
||||
0x2f, 0x04, 0xa4, 0x2c, 0x89, 0x0d, 0x6c, 0x86, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0x16, 0xc3,
|
||||
0x46, 0x5f, 0x32, 0x01, 0x00, 0x00,
|
||||
var fileDescriptor_b53526c13ae22eb4 = []byte{
|
||||
// 235 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0xcf, 0xcf, 0x4f,
|
||||
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcc, 0xab, 0xd4,
|
||||
0x03, 0x73, 0x84, 0xf8, 0x21, 0x52, 0x7a, 0x30, 0x29, 0x29, 0x91, 0xf4, 0xfc, 0xf4, 0x7c, 0x30,
|
||||
0x4f, 0x1f, 0xc4, 0x82, 0x48, 0x28, 0xd9, 0x70, 0x31, 0x3b, 0xe6, 0x55, 0x0a, 0x49, 0x72, 0x71,
|
||||
0x94, 0x54, 0x16, 0xa4, 0xc6, 0x97, 0x16, 0xe5, 0x48, 0x30, 0x2a, 0x30, 0x6a, 0x70, 0x06, 0xb1,
|
||||
0x83, 0xf8, 0xa1, 0x45, 0x39, 0x42, 0x22, 0x5c, 0xac, 0x65, 0x89, 0x39, 0xa5, 0xa9, 0x12, 0x4c,
|
||||
0x0a, 0x8c, 0x1a, 0x3c, 0x41, 0x10, 0x8e, 0x15, 0xcb, 0x87, 0x85, 0xf2, 0x0c, 0x4e, 0xcd, 0x8c,
|
||||
0x37, 0x1e, 0xca, 0x31, 0x7c, 0x78, 0x28, 0xc7, 0xf8, 0xe3, 0xa1, 0x1c, 0x63, 0xc3, 0x23, 0x39,
|
||||
0xc6, 0x15, 0x8f, 0xe4, 0x18, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23,
|
||||
0x39, 0xc6, 0x17, 0x8f, 0xe4, 0x18, 0x3e, 0x80, 0xc4, 0x1f, 0xcb, 0x31, 0x1e, 0x78, 0x2c, 0xc7,
|
||||
0x70, 0xe2, 0xb1, 0x1c, 0x23, 0x97, 0x70, 0x72, 0x7e, 0xae, 0x1e, 0x9a, 0xfb, 0x9c, 0x38, 0x1c,
|
||||
0xf3, 0x2a, 0x03, 0x40, 0x9c, 0x00, 0xc6, 0x28, 0x56, 0x90, 0xe5, 0xc5, 0x8b, 0x98, 0x98, 0xdd,
|
||||
0x03, 0x9c, 0x56, 0x31, 0xc9, 0xb9, 0x43, 0x94, 0x06, 0x40, 0x95, 0xea, 0x85, 0xa7, 0xe6, 0xe4,
|
||||
0x78, 0xe7, 0xe5, 0x97, 0xe7, 0x85, 0x80, 0x94, 0x25, 0xb1, 0x81, 0xcd, 0x30, 0x06, 0x04, 0x00,
|
||||
0x00, 0xff, 0xff, 0xe6, 0xfb, 0xa0, 0x21, 0x0e, 0x01, 0x00, 0x00,
|
||||
}
|
||||
|
||||
func (this *Any) Compare(that interface{}) int {
|
||||
|
|
|
@ -115,6 +115,11 @@ func (registry *interfaceRegistry) RegisterInterface(protoName string, iface int
|
|||
registry.RegisterImplementations(iface, impls...)
|
||||
}
|
||||
|
||||
// RegisterImplementations registers a concrete proto Message which implements
|
||||
// the given interface.
|
||||
//
|
||||
// This function PANICs if different concrete types are registered under the
|
||||
// same typeURL.
|
||||
func (registry *interfaceRegistry) RegisterImplementations(iface interface{}, impls ...proto.Message) {
|
||||
for _, impl := range impls {
|
||||
typeURL := "/" + proto.MessageName(impl)
|
||||
|
@ -122,10 +127,20 @@ func (registry *interfaceRegistry) RegisterImplementations(iface interface{}, im
|
|||
}
|
||||
}
|
||||
|
||||
// RegisterCustomTypeURL registers a concrete type which implements the given
|
||||
// interface under `typeURL`.
|
||||
//
|
||||
// This function PANICs if different concrete types are registered under the
|
||||
// same typeURL.
|
||||
func (registry *interfaceRegistry) RegisterCustomTypeURL(iface interface{}, typeURL string, impl proto.Message) {
|
||||
registry.registerImpl(iface, typeURL, impl)
|
||||
}
|
||||
|
||||
// registerImpl registers a concrete type which implements the given
|
||||
// interface under `typeURL`.
|
||||
//
|
||||
// This function PANICs if different concrete types are registered under the
|
||||
// same typeURL.
|
||||
func (registry *interfaceRegistry) registerImpl(iface interface{}, typeURL string, impl proto.Message) {
|
||||
ityp := reflect.TypeOf(iface).Elem()
|
||||
imap, found := registry.interfaceImpls[ityp]
|
||||
|
@ -138,6 +153,24 @@ func (registry *interfaceRegistry) registerImpl(iface interface{}, typeURL strin
|
|||
panic(fmt.Errorf("type %T doesn't actually implement interface %+v", impl, ityp))
|
||||
}
|
||||
|
||||
// Check if we already registered something under the given typeURL. It's
|
||||
// okay to register the same concrete type again, but if we are registering
|
||||
// a new concrete type under the same typeURL, then we throw an error (here,
|
||||
// we panic).
|
||||
foundImplType, found := imap[typeURL]
|
||||
if found && foundImplType != implType {
|
||||
panic(
|
||||
fmt.Errorf(
|
||||
"concrete type %s has already been registered under typeURL %s, cannot register %s under same typeURL. "+
|
||||
"This usually means that there are conflicting modules registering different concrete types "+
|
||||
"for a same interface implementation",
|
||||
foundImplType,
|
||||
typeURL,
|
||||
implType,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
imap[typeURL] = implType
|
||||
registry.typeURLMap[typeURL] = implType
|
||||
|
||||
|
|
|
@ -48,22 +48,61 @@ type TestI interface {
|
|||
DoSomething()
|
||||
}
|
||||
|
||||
// A struct that has the same typeURL as testdata.Dog, but is actually another
|
||||
// concrete type.
|
||||
type FakeDog struct{}
|
||||
|
||||
var (
|
||||
_ proto.Message = &FakeDog{}
|
||||
_ testdata.Animal = &FakeDog{}
|
||||
)
|
||||
|
||||
// dummy implementation of proto.Message and testdata.Animal
|
||||
func (dog FakeDog) Reset() {}
|
||||
func (dog FakeDog) String() string { return "fakedog" }
|
||||
func (dog FakeDog) ProtoMessage() {}
|
||||
func (dog FakeDog) XXX_MessageName() string { return proto.MessageName(&testdata.Dog{}) }
|
||||
func (dog FakeDog) Greet() string { return "fakedog" }
|
||||
|
||||
func TestRegister(t *testing.T) {
|
||||
registry := types.NewInterfaceRegistry()
|
||||
registry.RegisterInterface("Animal", (*testdata.Animal)(nil))
|
||||
registry.RegisterInterface("TestI", (*TestI)(nil))
|
||||
|
||||
// Happy path.
|
||||
require.NotPanics(t, func() {
|
||||
registry.RegisterImplementations((*testdata.Animal)(nil), &testdata.Dog{})
|
||||
})
|
||||
|
||||
// testdata.Dog doesn't implement TestI
|
||||
require.Panics(t, func() {
|
||||
registry.RegisterImplementations((*TestI)(nil), &testdata.Dog{})
|
||||
})
|
||||
|
||||
// nil proto message
|
||||
require.Panics(t, func() {
|
||||
registry.RegisterImplementations((*TestI)(nil), nil)
|
||||
})
|
||||
|
||||
// Not an interface.
|
||||
require.Panics(t, func() {
|
||||
registry.RegisterInterface("not_an_interface", (*testdata.Dog)(nil))
|
||||
})
|
||||
|
||||
// Duplicate registration with same concrete type.
|
||||
require.NotPanics(t, func() {
|
||||
registry.RegisterImplementations((*testdata.Animal)(nil), &testdata.Dog{})
|
||||
})
|
||||
|
||||
// Duplicate registration with different concrete type on same typeURL.
|
||||
require.PanicsWithError(
|
||||
t,
|
||||
"concrete type *testdata.Dog has already been registered under typeURL /testdata.Dog, cannot register *types_test.FakeDog under same typeURL. "+
|
||||
"This usually means that there are conflicting modules registering different concrete types for a same interface implementation",
|
||||
func() {
|
||||
registry.RegisterImplementations((*testdata.Animal)(nil), &FakeDog{})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func TestUnpackInterfaces(t *testing.T) {
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
package unknownproto_test
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/simapp"
|
||||
)
|
||||
|
||||
// Issue #7739: Catch parse errors resulting from unexpected EOF in
|
||||
// protowire.ConsumeFieldValue. Discovered from fuzzing.
|
||||
func TestBadBytesPassedIntoDecoder(t *testing.T) {
|
||||
data, _ := hex.DecodeString("0A9F010A9C200A2D2F6962632E636F72652E636F6E6E656374696F6E2E76312E4D7367436F6E6E656374696F584F75656E496E6974126B0A0D6962637A65726F636C69656E74120B6962637A65726F636F6E6E1A1C0A0C6962636F6E65636C69656E74120A6962636F6E65636F6E6E00002205312E302E302A283235454635364341373935313335453430393336384536444238313130463232413442453035433212080A0612040A0208011A40143342993E25DA936CDDC7BE3D8F603CA6E9661518D536D0C482E18A0154AA096E438A6B9BCADFCFC2F0D689DCCAF55B96399D67A8361B70F5DA13091E2F929")
|
||||
cfg := simapp.MakeTestEncodingConfig()
|
||||
decoder := cfg.TxConfig.TxDecoder()
|
||||
tx, err := decoder(data)
|
||||
|
||||
// TODO: When issue https://github.com/cosmos/cosmos-sdk/issues/7846
|
||||
// is addressed, we'll remove this .Contains check.
|
||||
require.Contains(t, err.Error(), io.ErrUnexpectedEOF.Error())
|
||||
require.Nil(t, tx)
|
||||
}
|
|
@ -92,6 +92,11 @@ func RejectUnknownFields(bz []byte, msg proto.Message, allowUnknownNonCriticals
|
|||
// Skip over the bytes that store fieldNumber and wireType bytes.
|
||||
bz = bz[m:]
|
||||
n := protowire.ConsumeFieldValue(tagNum, wireType, bz)
|
||||
if n < 0 {
|
||||
err = fmt.Errorf("could not consume field value for tagNum: %d, wireType: %q; %w",
|
||||
tagNum, wireTypeToString(wireType), protowire.ParseError(n))
|
||||
return hasUnknownNonCriticals, err
|
||||
}
|
||||
fieldBytes := bz[:n]
|
||||
bz = bz[n:]
|
||||
|
||||
|
|
|
@ -57,14 +57,6 @@ tools-stamp: statik runsim
|
|||
# in a row.
|
||||
touch $@
|
||||
|
||||
proto-tools: proto-tools-stamp
|
||||
proto-tools-stamp:
|
||||
bash $(mkfile_dir)/proto-tools-installer.sh
|
||||
# Create dummy file to satisfy dependency and avoid
|
||||
# rebuilding when this Makefile target is hit twice
|
||||
# in a row.
|
||||
touch $@
|
||||
|
||||
# Install the runsim binary with a temporary workaround of entering an outside
|
||||
# directory as the "go get" command ignores the -mod option and will polute the
|
||||
# go.{mod, sum} files.
|
||||
|
@ -87,6 +79,6 @@ $(RUNSIM):
|
|||
|
||||
tools-clean:
|
||||
rm -f $(STATIK) $(GOLANGCI_LINT) $(RUNSIM)
|
||||
rm -f proto-tools-stamp tools-stamp
|
||||
rm -f tools-stamp
|
||||
|
||||
.PHONY: tools-clean statik runsim
|
||||
|
|
|
@ -1,128 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -ue
|
||||
|
||||
DESTDIR=${DESTDIR:-}
|
||||
PREFIX=${PREFIX:-/usr/local}
|
||||
UNAME_S="$(uname -s 2>/dev/null)"
|
||||
UNAME_M="$(uname -m 2>/dev/null)"
|
||||
BUF_VERSION=0.11.0
|
||||
PROTOC_VERSION=3.13.0
|
||||
PROTOC_GRPC_GATEWAY_VERSION=1.14.7
|
||||
|
||||
f_abort() {
|
||||
local l_rc=$1
|
||||
shift
|
||||
|
||||
echo $@ >&2
|
||||
exit ${l_rc}
|
||||
}
|
||||
|
||||
case "${UNAME_S}" in
|
||||
Linux)
|
||||
PROTOC_ZIP="protoc-${PROTOC_VERSION}-linux-x86_64.zip"
|
||||
PROTOC_GRPC_GATEWAY_BIN="protoc-gen-grpc-gateway-v${PROTOC_GRPC_GATEWAY_VERSION}-linux-x86_64"
|
||||
;;
|
||||
Darwin)
|
||||
PROTOC_ZIP="protoc-${PROTOC_VERSION}-osx-x86_64.zip"
|
||||
PROTOC_GRPC_GATEWAY_BIN="protoc-gen-grpc-gateway-v${PROTOC_GRPC_GATEWAY_VERSION}-darwin-x86_64"
|
||||
;;
|
||||
*)
|
||||
f_abort 1 "Unknown kernel name. Exiting."
|
||||
esac
|
||||
|
||||
TEMPDIR="$(mktemp -d)"
|
||||
|
||||
trap "rm -rvf ${TEMPDIR}" EXIT
|
||||
|
||||
f_print_installing_with_padding() {
|
||||
printf "Installing %30s ..." "$1" >&2
|
||||
}
|
||||
|
||||
f_print_done() {
|
||||
echo -e "\tDONE" >&2
|
||||
}
|
||||
|
||||
f_ensure_tools() {
|
||||
! which curl &>/dev/null && f_abort 2 "couldn't find curl, aborting" || true
|
||||
}
|
||||
|
||||
f_ensure_dirs() {
|
||||
mkdir -p "${DESTDIR}/${PREFIX}/bin"
|
||||
mkdir -p "${DESTDIR}/${PREFIX}/include"
|
||||
}
|
||||
|
||||
f_needs_install() {
|
||||
if [ -x $1 ]; then
|
||||
echo -e "\talready installed. Skipping." >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
f_install_protoc() {
|
||||
f_print_installing_with_padding proto_c
|
||||
f_needs_install "${DESTDIR}/${PREFIX}/bin/protoc" || return 0
|
||||
|
||||
pushd "${TEMPDIR}" >/dev/null
|
||||
curl -o "${PROTOC_ZIP}" -sSL "https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/${PROTOC_ZIP}"
|
||||
unzip -q -o ${PROTOC_ZIP} -d ${DESTDIR}/${PREFIX} bin/protoc; \
|
||||
unzip -q -o ${PROTOC_ZIP} -d ${DESTDIR}/${PREFIX} 'include/*'; \
|
||||
rm -f ${PROTOC_ZIP}
|
||||
popd >/dev/null
|
||||
f_print_done
|
||||
}
|
||||
|
||||
f_install_buf() {
|
||||
f_print_installing_with_padding buf
|
||||
f_needs_install "${DESTDIR}/${PREFIX}/bin/buf" || return 0
|
||||
|
||||
curl -sSL "https://github.com/bufbuild/buf/releases/download/v${BUF_VERSION}/buf-${UNAME_S}-${UNAME_M}" -o "${DESTDIR}/${PREFIX}/bin/buf"
|
||||
chmod +x "${DESTDIR}/${PREFIX}/bin/buf"
|
||||
f_print_done
|
||||
}
|
||||
|
||||
f_install_protoc_gen_gocosmos() {
|
||||
f_print_installing_with_padding protoc-gen-gocosmos
|
||||
|
||||
if ! grep "github.com/gogo/protobuf => github.com/regen-network/protobuf" go.mod &>/dev/null ; then
|
||||
echo -e "\tPlease run this command from somewhere inside the cosmos-sdk folder."
|
||||
return 1
|
||||
fi
|
||||
|
||||
go get github.com/regen-network/cosmos-proto/protoc-gen-gocosmos 2>/dev/null
|
||||
f_print_done
|
||||
}
|
||||
|
||||
f_install_protoc_gen_grpc_gateway() {
|
||||
f_print_installing_with_padding protoc-gen-grpc-gateway
|
||||
f_needs_install "${DESTDIR}/${PREFIX}/bin/protoc-gen-grpc-gateway" || return 0
|
||||
|
||||
curl -o "${DESTDIR}/${PREFIX}/bin/protoc-gen-grpc-gateway" -sSL "https://github.com/grpc-ecosystem/grpc-gateway/releases/download/v${PROTOC_GRPC_GATEWAY_VERSION}/${PROTOC_GRPC_GATEWAY_BIN}"
|
||||
f_print_done
|
||||
}
|
||||
|
||||
f_install_protoc_gen_swagger() {
|
||||
f_print_installing_with_padding protoc-gen-swagger
|
||||
f_needs_install "${DESTDIR}/${PREFIX}/bin/protoc-gen-swagger" || return 0
|
||||
|
||||
if ! which npm &>/dev/null ; then
|
||||
echo -e "\tNPM is not installed. Skipping."
|
||||
return 0
|
||||
fi
|
||||
|
||||
pushd "${TEMPDIR}" >/dev/null
|
||||
go get github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger
|
||||
npm install -g swagger-combine
|
||||
popd >/dev/null
|
||||
f_print_done
|
||||
}
|
||||
|
||||
f_ensure_tools
|
||||
f_ensure_dirs
|
||||
f_install_protoc
|
||||
f_install_buf
|
||||
f_install_protoc_gen_gocosmos
|
||||
f_install_protoc_gen_grpc_gateway
|
||||
f_install_protoc_gen_swagger
|
|
@ -5,10 +5,18 @@ import (
|
|||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
)
|
||||
|
||||
func TestConfigPaths(t *testing.T) {
|
||||
type argsTestSuite struct {
|
||||
suite.Suite
|
||||
}
|
||||
|
||||
func TestArgsTestSuite(t *testing.T) {
|
||||
suite.Run(t, new(argsTestSuite))
|
||||
}
|
||||
|
||||
func (s *argsTestSuite) TestConfigPaths() {
|
||||
cases := map[string]struct {
|
||||
cfg Config
|
||||
upgradeName string
|
||||
|
@ -32,23 +40,21 @@ func TestConfigPaths(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
for name, tc := range cases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
assert.Equal(t, tc.cfg.Root(), filepath.FromSlash(tc.expectRoot))
|
||||
assert.Equal(t, tc.cfg.GenesisBin(), filepath.FromSlash(tc.expectGenesis))
|
||||
assert.Equal(t, tc.cfg.UpgradeBin(tc.upgradeName), filepath.FromSlash(tc.expectUpgrade))
|
||||
})
|
||||
for _, tc := range cases {
|
||||
s.Require().Equal(tc.cfg.Root(), filepath.FromSlash(tc.expectRoot))
|
||||
s.Require().Equal(tc.cfg.GenesisBin(), filepath.FromSlash(tc.expectGenesis))
|
||||
s.Require().Equal(tc.cfg.UpgradeBin(tc.upgradeName), filepath.FromSlash(tc.expectUpgrade))
|
||||
}
|
||||
}
|
||||
|
||||
// Test validate
|
||||
func TestValidate(t *testing.T) {
|
||||
func (s *argsTestSuite) TestValidate() {
|
||||
relPath := filepath.Join("testdata", "validate")
|
||||
absPath, err := filepath.Abs(relPath)
|
||||
assert.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
|
||||
testdata, err := filepath.Abs("testdata")
|
||||
assert.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
|
||||
cases := map[string]struct {
|
||||
cfg Config
|
||||
|
@ -84,28 +90,25 @@ func TestValidate(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
for name, tc := range cases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
err := tc.cfg.validate()
|
||||
if tc.valid {
|
||||
assert.NoError(t, err)
|
||||
} else {
|
||||
assert.Error(t, err)
|
||||
}
|
||||
})
|
||||
for _, tc := range cases {
|
||||
err := tc.cfg.validate()
|
||||
if tc.valid {
|
||||
s.Require().NoError(err)
|
||||
} else {
|
||||
s.Require().Error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnsureBin(t *testing.T) {
|
||||
func (s *argsTestSuite) TestEnsureBin() {
|
||||
relPath := filepath.Join("testdata", "validate")
|
||||
absPath, err := filepath.Abs(relPath)
|
||||
assert.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
|
||||
cfg := Config{Home: absPath, Name: "dummyd"}
|
||||
assert.NoError(t, cfg.validate())
|
||||
s.Require().NoError(cfg.validate())
|
||||
|
||||
err = EnsureBinary(cfg.GenesisBin())
|
||||
assert.NoError(t, err)
|
||||
s.Require().NoError(EnsureBinary(cfg.GenesisBin()))
|
||||
|
||||
cases := map[string]struct {
|
||||
upgrade string
|
||||
|
@ -117,14 +120,12 @@ func TestEnsureBin(t *testing.T) {
|
|||
"no directory": {"foobarbaz", false},
|
||||
}
|
||||
|
||||
for name, tc := range cases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
err := EnsureBinary(cfg.UpgradeBin(tc.upgrade))
|
||||
if tc.hasBin {
|
||||
assert.NoError(t, err)
|
||||
} else {
|
||||
assert.Error(t, err)
|
||||
}
|
||||
})
|
||||
for _, tc := range cases {
|
||||
err := EnsureBinary(cfg.UpgradeBin(tc.upgrade))
|
||||
if tc.hasBin {
|
||||
s.Require().NoError(err)
|
||||
} else {
|
||||
s.Require().Error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,9 +4,13 @@ import (
|
|||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"strings"
|
||||
"sync"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
// LaunchProcess runs a subprocess and returns when the subprocess exits,
|
||||
|
@ -39,6 +43,15 @@ func LaunchProcess(cfg *Config, args []string, stdout, stderr io.Writer) (bool,
|
|||
return false, fmt.Errorf("launching process %s %s: %w", bin, strings.Join(args, " "), err)
|
||||
}
|
||||
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, syscall.SIGQUIT, syscall.SIGTERM)
|
||||
go func() {
|
||||
sig := <-sigs
|
||||
if err := cmd.Process.Signal(sig); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}()
|
||||
|
||||
// three ways to exit - command ends, find regexp in scanOut, find regexp in scanErr
|
||||
upgradeInfo, err := WaitForUpgradeOrExit(cmd, scanOut, scanErr)
|
||||
if err != nil {
|
||||
|
|
|
@ -1,110 +1,114 @@
|
|||
// +build linux
|
||||
|
||||
package cosmovisor
|
||||
package cosmovisor_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/cosmovisor"
|
||||
)
|
||||
|
||||
// TestLaunchProcess will try running the script a few times and watch upgrades work properly
|
||||
// and args are passed through
|
||||
func TestLaunchProcess(t *testing.T) {
|
||||
home, err := copyTestData("validate")
|
||||
cfg := &Config{Home: home, Name: "dummyd"}
|
||||
require.NoError(t, err)
|
||||
defer os.RemoveAll(home)
|
||||
type processTestSuite struct {
|
||||
suite.Suite
|
||||
}
|
||||
|
||||
// should run the genesis binary and produce expected output
|
||||
var stdout, stderr bytes.Buffer
|
||||
currentBin, err := cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, cfg.GenesisBin(), currentBin)
|
||||
|
||||
args := []string{"foo", "bar", "1234"}
|
||||
doUpgrade, err := LaunchProcess(cfg, args, &stdout, &stderr)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, doUpgrade)
|
||||
assert.Equal(t, "", stderr.String())
|
||||
assert.Equal(t, "Genesis foo bar 1234\nUPGRADE \"chain2\" NEEDED at height: 49: {}\n", stdout.String())
|
||||
|
||||
// ensure this is upgraded now and produces new output
|
||||
|
||||
currentBin, err = cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cfg.UpgradeBin("chain2"), currentBin)
|
||||
args = []string{"second", "run", "--verbose"}
|
||||
stdout.Reset()
|
||||
stderr.Reset()
|
||||
doUpgrade, err = LaunchProcess(cfg, args, &stdout, &stderr)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, doUpgrade)
|
||||
assert.Equal(t, "", stderr.String())
|
||||
assert.Equal(t, "Chain 2 is live!\nArgs: second run --verbose\nFinished successfully\n", stdout.String())
|
||||
|
||||
// ended without other upgrade
|
||||
require.Equal(t, cfg.UpgradeBin("chain2"), currentBin)
|
||||
func TestProcessTestSuite(t *testing.T) {
|
||||
suite.Run(t, new(processTestSuite))
|
||||
}
|
||||
|
||||
// TestLaunchProcess will try running the script a few times and watch upgrades work properly
|
||||
// and args are passed through
|
||||
func TestLaunchProcessWithDownloads(t *testing.T) {
|
||||
func (s *processTestSuite) TestLaunchProcess() {
|
||||
home := copyTestData(s.T(), "validate")
|
||||
cfg := &cosmovisor.Config{Home: home, Name: "dummyd"}
|
||||
|
||||
// should run the genesis binary and produce expected output
|
||||
var stdout, stderr bytes.Buffer
|
||||
currentBin, err := cfg.CurrentBin()
|
||||
s.Require().NoError(err)
|
||||
|
||||
s.Require().Equal(cfg.GenesisBin(), currentBin)
|
||||
|
||||
args := []string{"foo", "bar", "1234"}
|
||||
doUpgrade, err := cosmovisor.LaunchProcess(cfg, args, &stdout, &stderr)
|
||||
s.Require().NoError(err)
|
||||
s.Require().True(doUpgrade)
|
||||
s.Require().Equal("", stderr.String())
|
||||
s.Require().Equal("Genesis foo bar 1234\nUPGRADE \"chain2\" NEEDED at height: 49: {}\n", stdout.String())
|
||||
|
||||
// ensure this is upgraded now and produces new output
|
||||
|
||||
currentBin, err = cfg.CurrentBin()
|
||||
s.Require().NoError(err)
|
||||
s.Require().Equal(cfg.UpgradeBin("chain2"), currentBin)
|
||||
args = []string{"second", "run", "--verbose"}
|
||||
stdout.Reset()
|
||||
stderr.Reset()
|
||||
doUpgrade, err = cosmovisor.LaunchProcess(cfg, args, &stdout, &stderr)
|
||||
s.Require().NoError(err)
|
||||
s.Require().False(doUpgrade)
|
||||
s.Require().Equal("", stderr.String())
|
||||
s.Require().Equal("Chain 2 is live!\nArgs: second run --verbose\nFinished successfully\n", stdout.String())
|
||||
|
||||
// ended without other upgrade
|
||||
s.Require().Equal(cfg.UpgradeBin("chain2"), currentBin)
|
||||
}
|
||||
|
||||
// TestLaunchProcess will try running the script a few times and watch upgrades work properly
|
||||
// and args are passed through
|
||||
func (s *processTestSuite) TestLaunchProcessWithDownloads() {
|
||||
// this is a fun path
|
||||
// genesis -> "chain2" = zip_binary
|
||||
// zip_binary -> "chain3" = ref_zipped -> zip_directory
|
||||
// zip_directory no upgrade
|
||||
home, err := copyTestData("download")
|
||||
cfg := &Config{Home: home, Name: "autod", AllowDownloadBinaries: true}
|
||||
require.NoError(t, err)
|
||||
defer os.RemoveAll(home)
|
||||
home := copyTestData(s.T(), "download")
|
||||
cfg := &cosmovisor.Config{Home: home, Name: "autod", AllowDownloadBinaries: true}
|
||||
|
||||
// should run the genesis binary and produce expected output
|
||||
var stdout, stderr bytes.Buffer
|
||||
currentBin, err := cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
|
||||
require.Equal(t, cfg.GenesisBin(), currentBin)
|
||||
s.Require().Equal(cfg.GenesisBin(), currentBin)
|
||||
args := []string{"some", "args"}
|
||||
doUpgrade, err := LaunchProcess(cfg, args, &stdout, &stderr)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, doUpgrade)
|
||||
assert.Equal(t, "", stderr.String())
|
||||
assert.Equal(t, "Preparing auto-download some args\n"+`ERROR: UPGRADE "chain2" NEEDED at height: 49: {"binaries":{"linux/amd64":"https://github.com/cosmos/cosmos-sdk/raw/51249cb93130810033408934454841c98423ed4b/cosmovisor/testdata/repo/zip_binary/autod.zip?checksum=sha256:dc48829b4126ae95bc0db316c66d4e9da5f3db95e212665b6080638cca77e998"}} module=main`+"\n", stdout.String())
|
||||
doUpgrade, err := cosmovisor.LaunchProcess(cfg, args, &stdout, &stderr)
|
||||
s.Require().NoError(err)
|
||||
s.Require().True(doUpgrade)
|
||||
s.Require().Equal("", stderr.String())
|
||||
s.Require().Equal("Preparing auto-download some args\n"+`ERROR: UPGRADE "chain2" NEEDED at height: 49: {"binaries":{"linux/amd64":"https://github.com/cosmos/cosmos-sdk/raw/51249cb93130810033408934454841c98423ed4b/cosmovisor/testdata/repo/zip_binary/autod.zip?checksum=sha256:dc48829b4126ae95bc0db316c66d4e9da5f3db95e212665b6080638cca77e998"}} module=main`+"\n", stdout.String())
|
||||
|
||||
// ensure this is upgraded now and produces new output
|
||||
currentBin, err = cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cfg.UpgradeBin("chain2"), currentBin)
|
||||
s.Require().NoError(err)
|
||||
s.Require().Equal(cfg.UpgradeBin("chain2"), currentBin)
|
||||
args = []string{"run", "--fast"}
|
||||
stdout.Reset()
|
||||
stderr.Reset()
|
||||
doUpgrade, err = LaunchProcess(cfg, args, &stdout, &stderr)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, doUpgrade)
|
||||
assert.Equal(t, "", stderr.String())
|
||||
assert.Equal(t, "Chain 2 from zipped binary link to referral\nArgs: run --fast\n"+`ERROR: UPGRADE "chain3" NEEDED at height: 936: https://github.com/cosmos/cosmos-sdk/raw/0eae1a50612b8bf803336d35055896fbddaa1ddd/cosmovisor/testdata/repo/ref_zipped?checksum=sha256:0a428575de718ed3cf0771c9687eefaf6f19359977eca4d94a0abd0e11ef8e64 module=main`+"\n", stdout.String())
|
||||
doUpgrade, err = cosmovisor.LaunchProcess(cfg, args, &stdout, &stderr)
|
||||
s.Require().NoError(err)
|
||||
s.Require().True(doUpgrade)
|
||||
s.Require().Equal("", stderr.String())
|
||||
s.Require().Equal("Chain 2 from zipped binary link to referral\nArgs: run --fast\n"+`ERROR: UPGRADE "chain3" NEEDED at height: 936: https://github.com/cosmos/cosmos-sdk/raw/0eae1a50612b8bf803336d35055896fbddaa1ddd/cosmovisor/testdata/repo/ref_zipped?checksum=sha256:0a428575de718ed3cf0771c9687eefaf6f19359977eca4d94a0abd0e11ef8e64 module=main`+"\n", stdout.String())
|
||||
|
||||
// ended with one more upgrade
|
||||
currentBin, err = cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cfg.UpgradeBin("chain3"), currentBin)
|
||||
s.Require().NoError(err)
|
||||
s.Require().Equal(cfg.UpgradeBin("chain3"), currentBin)
|
||||
// make sure this is the proper binary now....
|
||||
args = []string{"end", "--halt"}
|
||||
stdout.Reset()
|
||||
stderr.Reset()
|
||||
doUpgrade, err = LaunchProcess(cfg, args, &stdout, &stderr)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, doUpgrade)
|
||||
assert.Equal(t, "", stderr.String())
|
||||
assert.Equal(t, "Chain 2 from zipped directory\nArgs: end --halt\n", stdout.String())
|
||||
doUpgrade, err = cosmovisor.LaunchProcess(cfg, args, &stdout, &stderr)
|
||||
s.Require().NoError(err)
|
||||
s.Require().False(doUpgrade)
|
||||
s.Require().Equal("", stderr.String())
|
||||
s.Require().Equal("Chain 2 from zipped directory\nArgs: end --halt\n", stdout.String())
|
||||
|
||||
// and this doesn't upgrade
|
||||
currentBin, err = cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cfg.UpgradeBin("chain3"), currentBin)
|
||||
s.Require().NoError(err)
|
||||
s.Require().Equal(cfg.UpgradeBin("chain3"), currentBin)
|
||||
}
|
||||
|
|
|
@ -1,17 +1,19 @@
|
|||
package cosmovisor
|
||||
package cosmovisor_test
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/cosmos/cosmos-sdk/cosmovisor"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestWaitForInfo(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
write []string
|
||||
expectUpgrade *UpgradeInfo
|
||||
expectUpgrade *cosmovisor.UpgradeInfo
|
||||
expectErr bool
|
||||
}{
|
||||
"no match": {
|
||||
|
@ -19,14 +21,14 @@ func TestWaitForInfo(t *testing.T) {
|
|||
},
|
||||
"match name with no info": {
|
||||
write: []string{"first line\n", `UPGRADE "myname" NEEDED at height: 123: `, "\nnext line\n"},
|
||||
expectUpgrade: &UpgradeInfo{
|
||||
expectUpgrade: &cosmovisor.UpgradeInfo{
|
||||
Name: "myname",
|
||||
Info: "",
|
||||
},
|
||||
},
|
||||
"match name with info": {
|
||||
write: []string{"first line\n", `UPGRADE "take2" NEEDED at height: 123: DownloadData here!`, "\nnext line\n"},
|
||||
expectUpgrade: &UpgradeInfo{
|
||||
expectUpgrade: &cosmovisor.UpgradeInfo{
|
||||
Name: "take2",
|
||||
Info: "DownloadData",
|
||||
},
|
||||
|
@ -42,20 +44,20 @@ func TestWaitForInfo(t *testing.T) {
|
|||
go func() {
|
||||
for _, line := range tc.write {
|
||||
n, err := w.Write([]byte(line))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, len(line), n)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(line), n)
|
||||
}
|
||||
w.Close()
|
||||
}()
|
||||
|
||||
// now scan the info
|
||||
info, err := WaitForUpdate(scan)
|
||||
info, err := cosmovisor.WaitForUpdate(scan)
|
||||
if tc.expectErr {
|
||||
assert.Error(t, err)
|
||||
require.Error(t, err)
|
||||
return
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tc.expectUpgrade, info)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, tc.expectUpgrade, info)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -119,12 +119,12 @@ func GetDownloadURL(info *UpgradeInfo) (string, error) {
|
|||
var config UpgradeConfig
|
||||
|
||||
if err := json.Unmarshal([]byte(doc), &config); err == nil {
|
||||
url, ok := config.Binaries[osArch()]
|
||||
url, ok := config.Binaries[OSArch()]
|
||||
if !ok {
|
||||
url, ok = config.Binaries["any"]
|
||||
}
|
||||
if !ok {
|
||||
return "", fmt.Errorf("cannot find binary for os/arch: neither %s, nor any", osArch())
|
||||
return "", fmt.Errorf("cannot find binary for os/arch: neither %s, nor any", OSArch())
|
||||
}
|
||||
|
||||
return url, nil
|
||||
|
@ -133,7 +133,7 @@ func GetDownloadURL(info *UpgradeInfo) (string, error) {
|
|||
return "", errors.New("upgrade info doesn't contain binary map")
|
||||
}
|
||||
|
||||
func osArch() string {
|
||||
func OSArch() string {
|
||||
return fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,139 +1,138 @@
|
|||
// +build linux
|
||||
|
||||
package cosmovisor
|
||||
package cosmovisor_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
copy2 "github.com/otiai10/copy"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/otiai10/copy"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/cosmovisor"
|
||||
)
|
||||
|
||||
func TestCurrentBin(t *testing.T) {
|
||||
home, err := copyTestData("validate")
|
||||
require.NoError(t, err)
|
||||
defer os.RemoveAll(home)
|
||||
type upgradeTestSuite struct {
|
||||
suite.Suite
|
||||
}
|
||||
|
||||
cfg := Config{Home: home, Name: "dummyd"}
|
||||
func TestUpgradeTestSuite(t *testing.T) {
|
||||
suite.Run(t, new(upgradeTestSuite))
|
||||
}
|
||||
|
||||
func (s *upgradeTestSuite) TestCurrentBin() {
|
||||
home := copyTestData(s.T(), "validate")
|
||||
cfg := cosmovisor.Config{Home: home, Name: "dummyd"}
|
||||
|
||||
currentBin, err := cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
|
||||
assert.Equal(t, cfg.GenesisBin(), currentBin)
|
||||
s.Require().Equal(cfg.GenesisBin(), currentBin)
|
||||
|
||||
// ensure we cannot set this to an invalid value
|
||||
for _, name := range []string{"missing", "nobin", "noexec"} {
|
||||
err = cfg.SetCurrentUpgrade(name)
|
||||
require.Error(t, err, name)
|
||||
s.Require().Error(cfg.SetCurrentUpgrade(name), name)
|
||||
|
||||
currentBin, err := cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
|
||||
assert.Equal(t, cfg.GenesisBin(), currentBin, name)
|
||||
s.Require().Equal(cfg.GenesisBin(), currentBin, name)
|
||||
}
|
||||
|
||||
// try a few times to make sure this can be reproduced
|
||||
for _, upgrade := range []string{"chain2", "chain3", "chain2"} {
|
||||
// now set it to a valid upgrade and make sure CurrentBin is now set properly
|
||||
err = cfg.SetCurrentUpgrade(upgrade)
|
||||
require.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
// we should see current point to the new upgrade dir
|
||||
currentBin, err := cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
|
||||
assert.Equal(t, cfg.UpgradeBin(upgrade), currentBin)
|
||||
s.Require().Equal(cfg.UpgradeBin(upgrade), currentBin)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCurrentAlwaysSymlinkToDirectory(t *testing.T) {
|
||||
home, err := copyTestData("validate")
|
||||
require.NoError(t, err)
|
||||
defer os.RemoveAll(home)
|
||||
|
||||
cfg := Config{Home: home, Name: "dummyd"}
|
||||
func (s *upgradeTestSuite) TestCurrentAlwaysSymlinkToDirectory() {
|
||||
home := copyTestData(s.T(), "validate")
|
||||
cfg := cosmovisor.Config{Home: home, Name: "dummyd"}
|
||||
|
||||
currentBin, err := cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, cfg.GenesisBin(), currentBin)
|
||||
assertCurrentLink(t, cfg, "genesis")
|
||||
s.Require().NoError(err)
|
||||
s.Require().Equal(cfg.GenesisBin(), currentBin)
|
||||
s.assertCurrentLink(cfg, "genesis")
|
||||
|
||||
err = cfg.SetCurrentUpgrade("chain2")
|
||||
require.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
currentBin, err = cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, cfg.UpgradeBin("chain2"), currentBin)
|
||||
assertCurrentLink(t, cfg, filepath.Join("upgrades", "chain2"))
|
||||
s.Require().NoError(err)
|
||||
s.Require().Equal(cfg.UpgradeBin("chain2"), currentBin)
|
||||
s.assertCurrentLink(cfg, filepath.Join("upgrades", "chain2"))
|
||||
}
|
||||
|
||||
func assertCurrentLink(t *testing.T, cfg Config, target string) {
|
||||
link := filepath.Join(cfg.Root(), currentLink)
|
||||
func (s *upgradeTestSuite) assertCurrentLink(cfg cosmovisor.Config, target string) {
|
||||
link := filepath.Join(cfg.Root(), "current")
|
||||
// ensure this is a symlink
|
||||
info, err := os.Lstat(link)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, os.ModeSymlink, info.Mode()&os.ModeSymlink)
|
||||
s.Require().NoError(err)
|
||||
s.Require().Equal(os.ModeSymlink, info.Mode()&os.ModeSymlink)
|
||||
|
||||
dest, err := os.Readlink(link)
|
||||
require.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
expected := filepath.Join(cfg.Root(), target)
|
||||
require.Equal(t, expected, dest)
|
||||
s.Require().Equal(expected, dest)
|
||||
}
|
||||
|
||||
// TODO: test with download (and test all download functions)
|
||||
func TestDoUpgradeNoDownloadUrl(t *testing.T) {
|
||||
home, err := copyTestData("validate")
|
||||
require.NoError(t, err)
|
||||
defer os.RemoveAll(home)
|
||||
|
||||
cfg := &Config{Home: home, Name: "dummyd", AllowDownloadBinaries: true}
|
||||
func (s *upgradeTestSuite) TestDoUpgradeNoDownloadUrl() {
|
||||
home := copyTestData(s.T(), "validate")
|
||||
cfg := &cosmovisor.Config{Home: home, Name: "dummyd", AllowDownloadBinaries: true}
|
||||
|
||||
currentBin, err := cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
|
||||
assert.Equal(t, cfg.GenesisBin(), currentBin)
|
||||
s.Require().Equal(cfg.GenesisBin(), currentBin)
|
||||
|
||||
// do upgrade ignores bad files
|
||||
for _, name := range []string{"missing", "nobin", "noexec"} {
|
||||
info := &UpgradeInfo{Name: name}
|
||||
err = DoUpgrade(cfg, info)
|
||||
require.Error(t, err, name)
|
||||
info := &cosmovisor.UpgradeInfo{Name: name}
|
||||
err = cosmovisor.DoUpgrade(cfg, info)
|
||||
s.Require().Error(err, name)
|
||||
currentBin, err := cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, cfg.GenesisBin(), currentBin, name)
|
||||
s.Require().NoError(err)
|
||||
s.Require().Equal(cfg.GenesisBin(), currentBin, name)
|
||||
}
|
||||
|
||||
// make sure it updates a few times
|
||||
for _, upgrade := range []string{"chain2", "chain3"} {
|
||||
// now set it to a valid upgrade and make sure CurrentBin is now set properly
|
||||
info := &UpgradeInfo{Name: upgrade}
|
||||
err = DoUpgrade(cfg, info)
|
||||
require.NoError(t, err)
|
||||
info := &cosmovisor.UpgradeInfo{Name: upgrade}
|
||||
err = cosmovisor.DoUpgrade(cfg, info)
|
||||
s.Require().NoError(err)
|
||||
// we should see current point to the new upgrade dir
|
||||
upgradeBin := cfg.UpgradeBin(upgrade)
|
||||
currentBin, err := cfg.CurrentBin()
|
||||
require.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
|
||||
assert.Equal(t, upgradeBin, currentBin)
|
||||
s.Require().Equal(upgradeBin, currentBin)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOsArch(t *testing.T) {
|
||||
func (s *upgradeTestSuite) TestOsArch() {
|
||||
// all download tests will fail if we are not on linux...
|
||||
assert.Equal(t, "linux/amd64", osArch())
|
||||
s.Require().Equal("linux/amd64", cosmovisor.OSArch())
|
||||
}
|
||||
|
||||
func TestGetDownloadURL(t *testing.T) {
|
||||
func (s *upgradeTestSuite) TestGetDownloadURL() {
|
||||
// all download tests will fail if we are not on linux...
|
||||
ref, err := filepath.Abs(filepath.FromSlash("./testdata/repo/ref_zipped"))
|
||||
require.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
badref, err := filepath.Abs(filepath.FromSlash("./testdata/repo/zip_binary/autod.zip"))
|
||||
require.NoError(t, err)
|
||||
s.Require().NoError(err)
|
||||
|
||||
cases := map[string]struct {
|
||||
info string
|
||||
|
@ -173,20 +172,18 @@ func TestGetDownloadURL(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
for name, tc := range cases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
url, err := GetDownloadURL(&UpgradeInfo{Info: tc.info})
|
||||
if tc.isErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tc.url, url)
|
||||
}
|
||||
})
|
||||
for _, tc := range cases {
|
||||
url, err := cosmovisor.GetDownloadURL(&cosmovisor.UpgradeInfo{Info: tc.info})
|
||||
if tc.isErr {
|
||||
s.Require().Error(err)
|
||||
} else {
|
||||
s.Require().NoError(err)
|
||||
s.Require().Equal(tc.url, url)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDownloadBinary(t *testing.T) {
|
||||
func (s *upgradeTestSuite) TestDownloadBinary() {
|
||||
cases := map[string]struct {
|
||||
url string
|
||||
canDownload bool
|
||||
|
@ -228,65 +225,55 @@ func TestDownloadBinary(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
for name, tc := range cases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
// make temp dir
|
||||
home, err := copyTestData("download")
|
||||
require.NoError(t, err)
|
||||
defer os.RemoveAll(home)
|
||||
for _, tc := range cases {
|
||||
var err error
|
||||
// make temp dir
|
||||
home := copyTestData(s.T(), "download")
|
||||
|
||||
cfg := &Config{
|
||||
Home: home,
|
||||
Name: "autod",
|
||||
AllowDownloadBinaries: true,
|
||||
}
|
||||
cfg := &cosmovisor.Config{
|
||||
Home: home,
|
||||
Name: "autod",
|
||||
AllowDownloadBinaries: true,
|
||||
}
|
||||
|
||||
// if we have a relative path, make it absolute, but don't change eg. https://... urls
|
||||
url := tc.url
|
||||
if strings.HasPrefix(url, "./") {
|
||||
url, err = filepath.Abs(url)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
// if we have a relative path, make it absolute, but don't change eg. https://... urls
|
||||
url := tc.url
|
||||
if strings.HasPrefix(url, "./") {
|
||||
url, err = filepath.Abs(url)
|
||||
s.Require().NoError(err)
|
||||
}
|
||||
|
||||
upgrade := "amazonas"
|
||||
info := &UpgradeInfo{
|
||||
Name: upgrade,
|
||||
Info: fmt.Sprintf(`{"binaries":{"%s": "%s"}}`, osArch(), url),
|
||||
}
|
||||
upgrade := "amazonas"
|
||||
info := &cosmovisor.UpgradeInfo{
|
||||
Name: upgrade,
|
||||
Info: fmt.Sprintf(`{"binaries":{"%s": "%s"}}`, cosmovisor.OSArch(), url),
|
||||
}
|
||||
|
||||
err = DownloadBinary(cfg, info)
|
||||
if !tc.canDownload {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
err = cosmovisor.DownloadBinary(cfg, info)
|
||||
if !tc.canDownload {
|
||||
s.Require().Error(err)
|
||||
return
|
||||
}
|
||||
s.Require().NoError(err)
|
||||
|
||||
err = EnsureBinary(cfg.UpgradeBin(upgrade))
|
||||
if tc.validBinary {
|
||||
require.NoError(t, err)
|
||||
} else {
|
||||
require.Error(t, err)
|
||||
}
|
||||
})
|
||||
err = cosmovisor.EnsureBinary(cfg.UpgradeBin(upgrade))
|
||||
if tc.validBinary {
|
||||
s.Require().NoError(err)
|
||||
} else {
|
||||
s.Require().Error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// copyTestData will make a tempdir and then
|
||||
// "cp -r" a subdirectory under testdata there
|
||||
// returns the directory (which can now be used as Config.Home) and modified safely
|
||||
func copyTestData(subdir string) (string, error) {
|
||||
tmpdir, err := ioutil.TempDir("", "upgrade-manager-test")
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("couldn't create temporary directory: %w", err)
|
||||
}
|
||||
func copyTestData(t *testing.T, subdir string) string {
|
||||
t.Helper()
|
||||
|
||||
src := filepath.Join("testdata", subdir)
|
||||
tmpdir := t.TempDir()
|
||||
|
||||
err = copy2.Copy(src, tmpdir)
|
||||
if err != nil {
|
||||
os.RemoveAll(tmpdir)
|
||||
return "", fmt.Errorf("couldn't copy files: %w", err)
|
||||
}
|
||||
require.NoError(t, copy.Copy(filepath.Join("testdata", subdir), tmpdir))
|
||||
|
||||
return tmpdir, nil
|
||||
return tmpdir
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
|
||||
"github.com/cosmos/cosmos-sdk/codec/legacy"
|
||||
cryptoAmino "github.com/cosmos/cosmos-sdk/crypto/codec"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
|
||||
)
|
||||
|
||||
|
@ -124,7 +125,7 @@ func unarmorBytes(armorStr, blockType string) (bz []byte, header map[string]stri
|
|||
// encrypt/decrypt with armor
|
||||
|
||||
// Encrypt and armor the private key.
|
||||
func EncryptArmorPrivKey(privKey crypto.PrivKey, passphrase string, algo string) string {
|
||||
func EncryptArmorPrivKey(privKey cryptotypes.PrivKey, passphrase string, algo string) string {
|
||||
saltBytes, encBytes := encryptPrivKey(privKey, passphrase)
|
||||
header := map[string]string{
|
||||
"kdf": "bcrypt",
|
||||
|
@ -143,7 +144,7 @@ func EncryptArmorPrivKey(privKey crypto.PrivKey, passphrase string, algo string)
|
|||
// encrypt the given privKey with the passphrase using a randomly
|
||||
// generated salt and the xsalsa20 cipher. returns the salt and the
|
||||
// encrypted priv key.
|
||||
func encryptPrivKey(privKey crypto.PrivKey, passphrase string) (saltBytes []byte, encBytes []byte) {
|
||||
func encryptPrivKey(privKey cryptotypes.PrivKey, passphrase string) (saltBytes []byte, encBytes []byte) {
|
||||
saltBytes = crypto.CRandBytes(16)
|
||||
key, err := bcrypt.GenerateFromPassword(saltBytes, []byte(passphrase), BcryptSecurityParameter)
|
||||
|
||||
|
@ -158,7 +159,7 @@ func encryptPrivKey(privKey crypto.PrivKey, passphrase string) (saltBytes []byte
|
|||
}
|
||||
|
||||
// UnarmorDecryptPrivKey returns the privkey byte slice, a string of the algo type, and an error
|
||||
func UnarmorDecryptPrivKey(armorStr string, passphrase string) (privKey crypto.PrivKey, algo string, err error) {
|
||||
func UnarmorDecryptPrivKey(armorStr string, passphrase string) (privKey cryptotypes.PrivKey, algo string, err error) {
|
||||
blockType, header, encBytes, err := armor.DecodeArmor(armorStr)
|
||||
if err != nil {
|
||||
return privKey, "", err
|
||||
|
@ -190,7 +191,7 @@ func UnarmorDecryptPrivKey(armorStr string, passphrase string) (privKey crypto.P
|
|||
return privKey, header[headerType], err
|
||||
}
|
||||
|
||||
func decryptPrivKey(saltBytes []byte, encBytes []byte, passphrase string) (privKey crypto.PrivKey, err error) {
|
||||
func decryptPrivKey(saltBytes []byte, encBytes []byte, passphrase string) (privKey cryptotypes.PrivKey, err error) {
|
||||
key, err := bcrypt.GenerateFromPassword(saltBytes, []byte(passphrase), BcryptSecurityParameter)
|
||||
if err != nil {
|
||||
return privKey, sdkerrors.Wrap(err, "error generating bcrypt key from passphrase")
|
||||
|
|
|
@ -19,6 +19,7 @@ import (
|
|||
"github.com/cosmos/cosmos-sdk/crypto/hd"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keyring"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
"github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
||||
|
@ -46,7 +47,7 @@ func TestArmorUnarmorPrivKey(t *testing.T) {
|
|||
require.Contains(t, err.Error(), "unrecognized armor type")
|
||||
|
||||
// armor key manually
|
||||
encryptPrivKeyFn := func(privKey tmcrypto.PrivKey, passphrase string) (saltBytes []byte, encBytes []byte) {
|
||||
encryptPrivKeyFn := func(privKey cryptotypes.PrivKey, passphrase string) (saltBytes []byte, encBytes []byte) {
|
||||
saltBytes = tmcrypto.CRandBytes(16)
|
||||
key, err := bcrypt.GenerateFromPassword(saltBytes, []byte(passphrase), crypto.BcryptSecurityParameter)
|
||||
require.NoError(t, err)
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
package codec
|
||||
|
||||
import (
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
tmed25519 "github.com/tendermint/tendermint/crypto/ed25519"
|
||||
"github.com/tendermint/tendermint/crypto/sr25519"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
|
@ -22,16 +20,9 @@ func init() {
|
|||
// RegisterCrypto registers all crypto dependency types with the provided Amino
|
||||
// codec.
|
||||
func RegisterCrypto(cdc *codec.LegacyAmino) {
|
||||
// TODO We now register both Tendermint's PubKey and our own PubKey. In the
|
||||
// long-term, we should move away from Tendermint's PubKey, and delete this
|
||||
// first line.
|
||||
cdc.RegisterInterface((*crypto.PubKey)(nil), nil)
|
||||
cdc.RegisterInterface((*cryptotypes.PubKey)(nil), nil)
|
||||
cdc.RegisterConcrete(sr25519.PubKey{},
|
||||
sr25519.PubKeyName, nil)
|
||||
// TODO Same as above, for ED25519
|
||||
cdc.RegisterConcrete(tmed25519.PubKey{},
|
||||
tmed25519.PubKeyName, nil)
|
||||
cdc.RegisterConcrete(&ed25519.PubKey{},
|
||||
ed25519.PubKeyName, nil)
|
||||
cdc.RegisterConcrete(&secp256k1.PubKey{},
|
||||
|
@ -39,12 +30,9 @@ func RegisterCrypto(cdc *codec.LegacyAmino) {
|
|||
cdc.RegisterConcrete(&kmultisig.LegacyAminoPubKey{},
|
||||
kmultisig.PubKeyAminoRoute, nil)
|
||||
|
||||
cdc.RegisterInterface((*crypto.PrivKey)(nil), nil)
|
||||
cdc.RegisterInterface((*cryptotypes.PrivKey)(nil), nil)
|
||||
cdc.RegisterConcrete(sr25519.PrivKey{},
|
||||
sr25519.PrivKeyName, nil)
|
||||
// TODO Same as above
|
||||
cdc.RegisterConcrete(tmed25519.PrivKey{},
|
||||
tmed25519.PrivKeyName, nil)
|
||||
cdc.RegisterConcrete(&ed25519.PrivKey{},
|
||||
ed25519.PrivKeyName, nil)
|
||||
cdc.RegisterConcrete(&secp256k1.PrivKey{},
|
||||
|
@ -52,13 +40,13 @@ func RegisterCrypto(cdc *codec.LegacyAmino) {
|
|||
}
|
||||
|
||||
// PrivKeyFromBytes unmarshals private key bytes and returns a PrivKey
|
||||
func PrivKeyFromBytes(privKeyBytes []byte) (privKey crypto.PrivKey, err error) {
|
||||
func PrivKeyFromBytes(privKeyBytes []byte) (privKey cryptotypes.PrivKey, err error) {
|
||||
err = amino.UnmarshalBinaryBare(privKeyBytes, &privKey)
|
||||
return
|
||||
}
|
||||
|
||||
// PubKeyFromBytes unmarshals public key bytes and returns a PubKey
|
||||
func PubKeyFromBytes(pubKeyBytes []byte) (pubKey crypto.PubKey, err error) {
|
||||
func PubKeyFromBytes(pubKeyBytes []byte) (pubKey cryptotypes.PubKey, err error) {
|
||||
err = amino.UnmarshalBinaryBare(pubKeyBytes, &pubKey)
|
||||
return
|
||||
}
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
package codec
|
||||
|
||||
import (
|
||||
tmcrypto "github.com/tendermint/tendermint/crypto"
|
||||
|
||||
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
|
||||
|
@ -12,14 +10,6 @@ import (
|
|||
|
||||
// RegisterInterfaces registers the sdk.Tx interface.
|
||||
func RegisterInterfaces(registry codectypes.InterfaceRegistry) {
|
||||
// TODO We now register both Tendermint's PubKey and our own PubKey. In the
|
||||
// long-term, we should move away from Tendermint's PubKey, and delete
|
||||
// these lines.
|
||||
registry.RegisterInterface("tendermint.crypto.PubKey", (*tmcrypto.PubKey)(nil))
|
||||
registry.RegisterImplementations((*tmcrypto.PubKey)(nil), &ed25519.PubKey{})
|
||||
registry.RegisterImplementations((*tmcrypto.PubKey)(nil), &secp256k1.PubKey{})
|
||||
registry.RegisterImplementations((*tmcrypto.PubKey)(nil), &multisig.LegacyAminoPubKey{})
|
||||
|
||||
registry.RegisterInterface("cosmos.crypto.PubKey", (*cryptotypes.PubKey)(nil))
|
||||
registry.RegisterImplementations((*cryptotypes.PubKey)(nil), &ed25519.PubKey{})
|
||||
registry.RegisterImplementations((*cryptotypes.PubKey)(nil), &secp256k1.PubKey{})
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
package codec
|
||||
|
||||
import (
|
||||
tmcrypto "github.com/tendermint/tendermint/crypto"
|
||||
"github.com/tendermint/tendermint/crypto/encoding"
|
||||
tmprotocrypto "github.com/tendermint/tendermint/proto/tendermint/crypto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
|
||||
)
|
||||
|
||||
// FromTmProtoPublicKey converts a TM's tmprotocrypto.PublicKey into our own PubKey.
|
||||
func FromTmProtoPublicKey(protoPk tmprotocrypto.PublicKey) (cryptotypes.PubKey, error) {
|
||||
switch protoPk := protoPk.Sum.(type) {
|
||||
case *tmprotocrypto.PublicKey_Ed25519:
|
||||
return &ed25519.PubKey{
|
||||
Key: protoPk.Ed25519,
|
||||
}, nil
|
||||
case *tmprotocrypto.PublicKey_Secp256K1:
|
||||
return &secp256k1.PubKey{
|
||||
Key: protoPk.Secp256K1,
|
||||
}, nil
|
||||
default:
|
||||
return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidType, "cannot convert %v from Tendermint public key", protoPk)
|
||||
}
|
||||
}
|
||||
|
||||
// ToTmProtoPublicKey converts our own PubKey to TM's tmprotocrypto.PublicKey.
|
||||
func ToTmProtoPublicKey(pk cryptotypes.PubKey) (tmprotocrypto.PublicKey, error) {
|
||||
switch pk := pk.(type) {
|
||||
case *ed25519.PubKey:
|
||||
return tmprotocrypto.PublicKey{
|
||||
Sum: &tmprotocrypto.PublicKey_Ed25519{
|
||||
Ed25519: pk.Key,
|
||||
},
|
||||
}, nil
|
||||
case *secp256k1.PubKey:
|
||||
return tmprotocrypto.PublicKey{
|
||||
Sum: &tmprotocrypto.PublicKey_Secp256K1{
|
||||
Secp256K1: pk.Key,
|
||||
},
|
||||
}, nil
|
||||
default:
|
||||
return tmprotocrypto.PublicKey{}, sdkerrors.Wrapf(sdkerrors.ErrInvalidType, "cannot convert %v to Tendermint public key", pk)
|
||||
}
|
||||
}
|
||||
|
||||
// FromTmPubKeyInterface converts TM's tmcrypto.PubKey to our own PubKey.
|
||||
func FromTmPubKeyInterface(tmPk tmcrypto.PubKey) (cryptotypes.PubKey, error) {
|
||||
tmProtoPk, err := encoding.PubKeyToProto(tmPk)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return FromTmProtoPublicKey(tmProtoPk)
|
||||
}
|
||||
|
||||
// ToTmPubKeyInterface converts our own PubKey to TM's tmcrypto.PubKey.
|
||||
func ToTmPubKeyInterface(pk cryptotypes.PubKey) (tmcrypto.PubKey, error) {
|
||||
tmProtoPk, err := ToTmProtoPublicKey(pk)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return encoding.PubKeyFromProto(tmProtoPk)
|
||||
}
|
|
@ -2,9 +2,9 @@ package hd
|
|||
|
||||
import (
|
||||
bip39 "github.com/cosmos/go-bip39"
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
)
|
||||
|
||||
// PubKeyType defines an algorithm to derive key-pairs which can be used for cryptographic signing.
|
||||
|
@ -28,11 +28,11 @@ var (
|
|||
)
|
||||
|
||||
type DeriveFn func(mnemonic string, bip39Passphrase, hdPath string) ([]byte, error)
|
||||
type GenerateFn func(bz []byte) crypto.PrivKey
|
||||
type GenerateFn func(bz []byte) types.PrivKey
|
||||
|
||||
type WalletGenerator interface {
|
||||
Derive(mnemonic string, bip39Passphrase, hdPath string) ([]byte, error)
|
||||
Generate(bz []byte) crypto.PrivKey
|
||||
Generate(bz []byte) types.PrivKey
|
||||
}
|
||||
|
||||
type secp256k1Algo struct {
|
||||
|
@ -62,7 +62,7 @@ func (s secp256k1Algo) Derive() DeriveFn {
|
|||
|
||||
// Generate generates a secp256k1 private key from the given bytes.
|
||||
func (s secp256k1Algo) Generate() GenerateFn {
|
||||
return func(bz []byte) crypto.PrivKey {
|
||||
return func(bz []byte) types.PrivKey {
|
||||
var bzArr = make([]byte, secp256k1.PrivKeySize)
|
||||
copy(bzArr, bz)
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ type addrData struct {
|
|||
}
|
||||
|
||||
func TestFullFundraiserPath(t *testing.T) {
|
||||
require.Equal(t, "44'/118'/0'/0/0", hd.NewFundraiserParams(0, 118, 0).String())
|
||||
require.Equal(t, "m/44'/118'/0'/0/0", hd.NewFundraiserParams(0, 118, 0).String())
|
||||
}
|
||||
|
||||
func initFundraiserTestVectors(t *testing.T) []addrData {
|
||||
|
@ -63,8 +63,9 @@ func TestFundraiserCompatibility(t *testing.T) {
|
|||
t.Logf("ROUND: %d MNEMONIC: %s", i, d.Mnemonic)
|
||||
|
||||
master, ch := hd.ComputeMastersFromSeed(seed)
|
||||
priv, err := hd.DerivePrivateKeyForPath(master, ch, "44'/118'/0'/0/0")
|
||||
priv, err := hd.DerivePrivateKeyForPath(master, ch, "m/44'/118'/0'/0/0")
|
||||
require.NoError(t, err)
|
||||
|
||||
privKey := &secp256k1.PrivKey{Key: priv}
|
||||
pub := privKey.PubKey()
|
||||
|
||||
|
|
|
@ -36,52 +36,66 @@ func NewParams(purpose, coinType, account uint32, change bool, addressIdx uint32
|
|||
}
|
||||
}
|
||||
|
||||
// Parse the BIP44 path and unmarshal into the struct.
|
||||
// NewParamsFromPath parses the BIP44 path and unmarshals it into a Bip44Params. It supports both
|
||||
// absolute and relative paths.
|
||||
func NewParamsFromPath(path string) (*BIP44Params, error) {
|
||||
spl := strings.Split(path, "/")
|
||||
|
||||
// Handle absolute or relative paths
|
||||
switch {
|
||||
case spl[0] == path:
|
||||
return nil, fmt.Errorf("path %s doesn't contain '/' separators", path)
|
||||
|
||||
case strings.TrimSpace(spl[0]) == "":
|
||||
return nil, fmt.Errorf("ambiguous path %s: use 'm/' prefix for absolute paths, or no leading '/' for relative ones", path)
|
||||
|
||||
case strings.TrimSpace(spl[0]) == "m":
|
||||
spl = spl[1:]
|
||||
}
|
||||
|
||||
if len(spl) != 5 {
|
||||
return nil, fmt.Errorf("path length is wrong. Expected 5, got %d", len(spl))
|
||||
return nil, fmt.Errorf("invalid path length %s", path)
|
||||
}
|
||||
|
||||
// Check items can be parsed
|
||||
purpose, err := hardenedInt(spl[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("invalid HD path purpose %s: %w", spl[0], err)
|
||||
}
|
||||
|
||||
coinType, err := hardenedInt(spl[1])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("invalid HD path coin type %s: %w", spl[1], err)
|
||||
}
|
||||
|
||||
account, err := hardenedInt(spl[2])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("invalid HD path account %s: %w", spl[2], err)
|
||||
}
|
||||
|
||||
change, err := hardenedInt(spl[3])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("invalid HD path change %s: %w", spl[3], err)
|
||||
}
|
||||
|
||||
addressIdx, err := hardenedInt(spl[4])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("invalid HD path address index %s: %w", spl[4], err)
|
||||
}
|
||||
|
||||
// Confirm valid values
|
||||
if spl[0] != "44'" {
|
||||
return nil, fmt.Errorf("first field in path must be 44', got %v", spl[0])
|
||||
return nil, fmt.Errorf("first field in path must be 44', got %s", spl[0])
|
||||
}
|
||||
|
||||
if !isHardened(spl[1]) || !isHardened(spl[2]) {
|
||||
return nil,
|
||||
fmt.Errorf("second and third field in path must be hardened (ie. contain the suffix ', got %v and %v", spl[1], spl[2])
|
||||
fmt.Errorf("second and third field in path must be hardened (ie. contain the suffix ', got %s and %s", spl[1], spl[2])
|
||||
}
|
||||
|
||||
if isHardened(spl[3]) || isHardened(spl[4]) {
|
||||
return nil,
|
||||
fmt.Errorf("fourth and fifth field in path must not be hardened (ie. not contain the suffix ', got %v and %v", spl[3], spl[4])
|
||||
fmt.Errorf("fourth and fifth field in path must not be hardened (ie. not contain the suffix ', got %s and %s", spl[3], spl[4])
|
||||
}
|
||||
|
||||
if !(change == 0 || change == 1) {
|
||||
|
@ -135,6 +149,8 @@ func (p BIP44Params) DerivationPath() []uint32 {
|
|||
}
|
||||
}
|
||||
|
||||
// String returns the full absolute HD path of the BIP44 (https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki) params:
|
||||
// m / purpose' / coin_type' / account' / change / address_index
|
||||
func (p BIP44Params) String() string {
|
||||
var changeStr string
|
||||
if p.Change {
|
||||
|
@ -142,8 +158,7 @@ func (p BIP44Params) String() string {
|
|||
} else {
|
||||
changeStr = "0"
|
||||
}
|
||||
// m / Purpose' / coin_type' / Account' / Change / address_index
|
||||
return fmt.Sprintf("%d'/%d'/%d'/%s/%d",
|
||||
return fmt.Sprintf("m/%d'/%d'/%d'/%s/%d",
|
||||
p.Purpose,
|
||||
p.CoinType,
|
||||
p.Account,
|
||||
|
@ -165,6 +180,13 @@ func DerivePrivateKeyForPath(privKeyBytes, chainCode [32]byte, path string) ([]b
|
|||
data := privKeyBytes
|
||||
parts := strings.Split(path, "/")
|
||||
|
||||
switch {
|
||||
case parts[0] == path:
|
||||
return nil, fmt.Errorf("path '%s' doesn't contain '/' separators", path)
|
||||
case strings.TrimSpace(parts[0]) == "m":
|
||||
parts = parts[1:]
|
||||
}
|
||||
|
||||
for _, part := range parts {
|
||||
// do we have an apostrophe?
|
||||
harden := part[len(part)-1:] == "'"
|
||||
|
@ -173,9 +195,12 @@ func DerivePrivateKeyForPath(privKeyBytes, chainCode [32]byte, path string) ([]b
|
|||
part = part[:len(part)-1]
|
||||
}
|
||||
|
||||
idx, err := strconv.ParseUint(part, 10, 32)
|
||||
// As per the extended keys specification in
|
||||
// https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#extended-keys
|
||||
// index values are in the range [0, 1<<31-1] aka [0, max(int32)]
|
||||
idx, err := strconv.ParseUint(part, 10, 31)
|
||||
if err != nil {
|
||||
return []byte{}, fmt.Errorf("invalid BIP 32 path: %s", err)
|
||||
return []byte{}, fmt.Errorf("invalid BIP 32 path %s: %w", path, err)
|
||||
}
|
||||
|
||||
data, chainCode = derivePrivateKey(data, chainCode, uint32(idx), harden)
|
||||
|
@ -185,7 +210,7 @@ func DerivePrivateKeyForPath(privKeyBytes, chainCode [32]byte, path string) ([]b
|
|||
n := copy(derivedKey, data[:])
|
||||
|
||||
if n != 32 || len(data) != 32 {
|
||||
return []byte{}, fmt.Errorf("expected a (secp256k1) key of length 32, got length: %v", len(data))
|
||||
return []byte{}, fmt.Errorf("expected a key of length 32, got length: %d", len(data))
|
||||
}
|
||||
|
||||
return derivedKey, nil
|
||||
|
|
|
@ -9,7 +9,6 @@ import (
|
|||
"github.com/cosmos/cosmos-sdk/types"
|
||||
|
||||
bip39 "github.com/cosmos/go-bip39"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
@ -20,26 +19,15 @@ func mnemonicToSeed(mnemonic string) []byte {
|
|||
return bip39.NewSeed(mnemonic, defaultBIP39Passphrase)
|
||||
}
|
||||
|
||||
// nolint:govet
|
||||
func ExampleStringifyPathParams() {
|
||||
path := hd.NewParams(44, 0, 0, false, 0)
|
||||
fmt.Println(path.String())
|
||||
path = hd.NewParams(44, 33, 7, true, 9)
|
||||
fmt.Println(path.String())
|
||||
// Output:
|
||||
// 44'/0'/0'/0/0
|
||||
// 44'/33'/7'/1/9
|
||||
}
|
||||
|
||||
func TestStringifyFundraiserPathParams(t *testing.T) {
|
||||
path := hd.NewFundraiserParams(4, types.CoinType, 22)
|
||||
require.Equal(t, "44'/118'/4'/0/22", path.String())
|
||||
require.Equal(t, "m/44'/118'/4'/0/22", path.String())
|
||||
|
||||
path = hd.NewFundraiserParams(4, types.CoinType, 57)
|
||||
require.Equal(t, "44'/118'/4'/0/57", path.String())
|
||||
require.Equal(t, "m/44'/118'/4'/0/57", path.String())
|
||||
|
||||
path = hd.NewFundraiserParams(4, 12345, 57)
|
||||
require.Equal(t, "44'/12345'/4'/0/57", path.String())
|
||||
require.Equal(t, "m/44'/12345'/4'/0/57", path.String())
|
||||
}
|
||||
|
||||
func TestPathToArray(t *testing.T) {
|
||||
|
@ -55,56 +43,160 @@ func TestParamsFromPath(t *testing.T) {
|
|||
params *hd.BIP44Params
|
||||
path string
|
||||
}{
|
||||
{&hd.BIP44Params{44, 0, 0, false, 0}, "44'/0'/0'/0/0"},
|
||||
{&hd.BIP44Params{44, 1, 0, false, 0}, "44'/1'/0'/0/0"},
|
||||
{&hd.BIP44Params{44, 0, 1, false, 0}, "44'/0'/1'/0/0"},
|
||||
{&hd.BIP44Params{44, 0, 0, true, 0}, "44'/0'/0'/1/0"},
|
||||
{&hd.BIP44Params{44, 0, 0, false, 1}, "44'/0'/0'/0/1"},
|
||||
{&hd.BIP44Params{44, 1, 1, true, 1}, "44'/1'/1'/1/1"},
|
||||
{&hd.BIP44Params{44, 118, 52, true, 41}, "44'/118'/52'/1/41"},
|
||||
{&hd.BIP44Params{44, 0, 0, false, 0}, "m/44'/0'/0'/0/0"},
|
||||
{&hd.BIP44Params{44, 1, 0, false, 0}, "m/44'/1'/0'/0/0"},
|
||||
{&hd.BIP44Params{44, 0, 1, false, 0}, "m/44'/0'/1'/0/0"},
|
||||
{&hd.BIP44Params{44, 0, 0, true, 0}, "m/44'/0'/0'/1/0"},
|
||||
{&hd.BIP44Params{44, 0, 0, false, 1}, "m/44'/0'/0'/0/1"},
|
||||
{&hd.BIP44Params{44, 1, 1, true, 1}, "m/44'/1'/1'/1/1"},
|
||||
{&hd.BIP44Params{44, 118, 52, true, 41}, "m/44'/118'/52'/1/41"},
|
||||
}
|
||||
|
||||
for i, c := range goodCases {
|
||||
params, err := hd.NewParamsFromPath(c.path)
|
||||
errStr := fmt.Sprintf("%d %v", i, c)
|
||||
assert.NoError(t, err, errStr)
|
||||
assert.EqualValues(t, c.params, params, errStr)
|
||||
assert.Equal(t, c.path, c.params.String())
|
||||
require.NoError(t, err, errStr)
|
||||
require.EqualValues(t, c.params, params, errStr)
|
||||
require.Equal(t, c.path, c.params.String())
|
||||
}
|
||||
|
||||
badCases := []struct {
|
||||
path string
|
||||
}{
|
||||
{"43'/0'/0'/0/0"}, // doesnt start with 44
|
||||
{"44'/1'/0'/0/0/5"}, // too many fields
|
||||
{"44'/0'/1'/0"}, // too few fields
|
||||
{"44'/0'/0'/2/0"}, // change field can only be 0/1
|
||||
{"44/0'/0'/0/0"}, // first field needs '
|
||||
{"44'/0/0'/0/0"}, // second field needs '
|
||||
{"44'/0'/0/0/0"}, // third field needs '
|
||||
{"44'/0'/0'/0'/0"}, // fourth field must not have '
|
||||
{"44'/0'/0'/0/0'"}, // fifth field must not have '
|
||||
{"44'/-1'/0'/0/0"}, // no negatives
|
||||
{"44'/0'/0'/-1/0"}, // no negatives
|
||||
{"a'/0'/0'/-1/0"}, // valid values
|
||||
{"0/X/0'/-1/0"}, // valid values
|
||||
{"44'/0'/X/-1/0"}, // valid values
|
||||
{"44'/0'/0'/%/0"}, // valid values
|
||||
{"44'/0'/0'/0/%"}, // valid values
|
||||
{"m/43'/0'/0'/0/0"}, // doesn't start with 44
|
||||
{"m/44'/1'/0'/0/0/5"}, // too many fields
|
||||
{"m/44'/0'/1'/0"}, // too few fields
|
||||
{"m/44'/0'/0'/2/0"}, // change field can only be 0/1
|
||||
{"m/44/0'/0'/0/0"}, // first field needs '
|
||||
{"m/44'/0/0'/0/0"}, // second field needs '
|
||||
{"m/44'/0'/0/0/0"}, // third field needs '
|
||||
{"m/44'/0'/0'/0'/0"}, // fourth field must not have '
|
||||
{"m/44'/0'/0'/0/0'"}, // fifth field must not have '
|
||||
{"m/44'/-1'/0'/0/0"}, // no negatives
|
||||
{"m/44'/0'/0'/-1/0"}, // no negatives
|
||||
{"m/a'/0'/0'/-1/0"}, // invalid values
|
||||
{"m/0/X/0'/-1/0"}, // invalid values
|
||||
{"m/44'/0'/X/-1/0"}, // invalid values
|
||||
{"m/44'/0'/0'/%/0"}, // invalid values
|
||||
{"m/44'/0'/0'/0/%"}, // invalid values
|
||||
{"m44'0'0'00"}, // no separators
|
||||
{" /44'/0'/0'/0/0"}, // blank first component
|
||||
}
|
||||
|
||||
for i, c := range badCases {
|
||||
params, err := hd.NewParamsFromPath(c.path)
|
||||
errStr := fmt.Sprintf("%d %v", i, c)
|
||||
assert.Nil(t, params, errStr)
|
||||
assert.Error(t, err, errStr)
|
||||
require.Nil(t, params, errStr)
|
||||
require.Error(t, err, errStr)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// nolint:govet
|
||||
func ExampleSomeBIP32TestVecs() {
|
||||
func TestCreateHDPath(t *testing.T) {
|
||||
type args struct {
|
||||
coinType uint32
|
||||
account uint32
|
||||
index uint32
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want hd.BIP44Params
|
||||
}{
|
||||
{"m/44'/0'/0'/0/0", args{0, 0, 0}, hd.BIP44Params{Purpose: 44}},
|
||||
{"m/44'/114'/0'/0/0", args{114, 0, 0}, hd.BIP44Params{Purpose: 44, CoinType: 114, Account: 0, AddressIndex: 0}},
|
||||
{"m/44'/114'/1'/1/0", args{114, 1, 1}, hd.BIP44Params{Purpose: 44, CoinType: 114, Account: 1, AddressIndex: 1}},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
tt := tt
|
||||
require.Equal(t, tt.want, *hd.CreateHDPath(tt.args.coinType, tt.args.account, tt.args.index))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Tests to ensure that any index value is in the range [0, max(int32)] as per
|
||||
// the extended keys specification. If the index belongs to that of a hardened key,
|
||||
// its 0x80000000 bit will be set, so we can still accept values in [0, max(int32)] and then
|
||||
// increase its value as deriveKeyPath already augments.
|
||||
// See issue https://github.com/cosmos/cosmos-sdk/issues/7627.
|
||||
func TestDeriveHDPathRange(t *testing.T) {
|
||||
seed := mnemonicToSeed("I am become Death, the destroyer of worlds!")
|
||||
|
||||
tests := []struct {
|
||||
path string
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
path: "m/1'/2147483648/0'/0/0",
|
||||
wantErr: "out of range",
|
||||
},
|
||||
{
|
||||
path: "m/2147483648'/1/0/0",
|
||||
wantErr: "out of range",
|
||||
},
|
||||
{
|
||||
path: "m/2147483648'/2147483648/0'/0/0",
|
||||
wantErr: "out of range",
|
||||
},
|
||||
{
|
||||
path: "m/1'/-5/0'/0/0",
|
||||
wantErr: "invalid syntax",
|
||||
},
|
||||
{
|
||||
path: "m/-2147483646'/1/0/0",
|
||||
wantErr: "invalid syntax",
|
||||
},
|
||||
{
|
||||
path: "m/-2147483648'/-2147483648/0'/0/0",
|
||||
wantErr: "invalid syntax",
|
||||
},
|
||||
{
|
||||
path: "m44'118'0'00",
|
||||
wantErr: "path 'm44'118'0'00' doesn't contain '/' separators",
|
||||
},
|
||||
{
|
||||
path: "",
|
||||
wantErr: "path '' doesn't contain '/' separators",
|
||||
},
|
||||
{
|
||||
// Should pass.
|
||||
path: "m/1'/2147483647'/1/0'/0/0",
|
||||
},
|
||||
{
|
||||
// Should pass.
|
||||
path: "1'/2147483647'/1/0'/0/0",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.path, func(t *testing.T) {
|
||||
master, ch := hd.ComputeMastersFromSeed(seed)
|
||||
_, err := hd.DerivePrivateKeyForPath(master, ch, tt.path)
|
||||
|
||||
if tt.wantErr == "" {
|
||||
require.NoError(t, err, "unexpected error")
|
||||
} else {
|
||||
require.Error(t, err, "expected a report of an int overflow")
|
||||
require.Contains(t, err.Error(), tt.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func ExampleStringifyPathParams() {
|
||||
path := hd.NewParams(44, 0, 0, false, 0)
|
||||
fmt.Println(path.String())
|
||||
path = hd.NewParams(44, 33, 7, true, 9)
|
||||
fmt.Println(path.String())
|
||||
// Output:
|
||||
// m/44'/0'/0'/0/0
|
||||
// m/44'/33'/7'/1/9
|
||||
}
|
||||
|
||||
func ExampleSomeBIP32TestVecs() {
|
||||
seed := mnemonicToSeed("barrel original fuel morning among eternal " +
|
||||
"filter ball stove pluck matrix mechanic")
|
||||
master, ch := hd.ComputeMastersFromSeed(seed)
|
||||
|
@ -189,27 +281,3 @@ func ExampleSomeBIP32TestVecs() {
|
|||
//
|
||||
// c4c11d8c03625515905d7e89d25dfc66126fbc629ecca6db489a1a72fc4bda78
|
||||
}
|
||||
|
||||
func TestCreateHDPath(t *testing.T) {
|
||||
type args struct {
|
||||
coinType uint32
|
||||
account uint32
|
||||
index uint32
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want hd.BIP44Params
|
||||
}{
|
||||
{"44'/0'/0'/0/0", args{0, 0, 0}, hd.BIP44Params{Purpose: 44}},
|
||||
{"44'/114'/0'/0/0", args{114, 0, 0}, hd.BIP44Params{Purpose: 44, CoinType: 114, Account: 0, AddressIndex: 0}},
|
||||
{"44'/114'/1'/1/0", args{114, 1, 1}, hd.BIP44Params{Purpose: 44, CoinType: 114, Account: 1, AddressIndex: 1}},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
tt := tt
|
||||
require.Equal(t, tt.want, *hd.CreateHDPath(tt.args.coinType, tt.args.account, tt.args.index))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
// that the keyring keyring may be kept unlocked for the whole duration of the user
|
||||
// session.
|
||||
// file This backend more closely resembles the previous keyring storage used prior to
|
||||
// v0.38.1. It stores the keyring encrypted within the apps configuration directory.
|
||||
// v0.38.1. It stores the keyring encrypted within the app's configuration directory.
|
||||
// This keyring will request a password each time it is accessed, which may occur
|
||||
// multiple times in a single command resulting in repeated password prompts.
|
||||
// kwallet This backend uses KDE Wallet Manager as a credentials management application:
|
||||
|
|
|
@ -3,11 +3,10 @@ package keyring
|
|||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
|
||||
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/hd"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
"github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
||||
|
@ -18,7 +17,7 @@ type Info interface {
|
|||
// Name of the key
|
||||
GetName() string
|
||||
// Public key
|
||||
GetPubKey() crypto.PubKey
|
||||
GetPubKey() cryptotypes.PubKey
|
||||
// Address
|
||||
GetAddress() types.AccAddress
|
||||
// Bip44 Path
|
||||
|
@ -37,13 +36,13 @@ var (
|
|||
// localInfo is the public information about a locally stored key
|
||||
// Note: Algo must be last field in struct for backwards amino compatibility
|
||||
type localInfo struct {
|
||||
Name string `json:"name"`
|
||||
PubKey crypto.PubKey `json:"pubkey"`
|
||||
PrivKeyArmor string `json:"privkey.armor"`
|
||||
Algo hd.PubKeyType `json:"algo"`
|
||||
Name string `json:"name"`
|
||||
PubKey cryptotypes.PubKey `json:"pubkey"`
|
||||
PrivKeyArmor string `json:"privkey.armor"`
|
||||
Algo hd.PubKeyType `json:"algo"`
|
||||
}
|
||||
|
||||
func newLocalInfo(name string, pub crypto.PubKey, privArmor string, algo hd.PubKeyType) Info {
|
||||
func newLocalInfo(name string, pub cryptotypes.PubKey, privArmor string, algo hd.PubKeyType) Info {
|
||||
return &localInfo{
|
||||
Name: name,
|
||||
PubKey: pub,
|
||||
|
@ -63,7 +62,7 @@ func (i localInfo) GetName() string {
|
|||
}
|
||||
|
||||
// GetType implements Info interface
|
||||
func (i localInfo) GetPubKey() crypto.PubKey {
|
||||
func (i localInfo) GetPubKey() cryptotypes.PubKey {
|
||||
return i.PubKey
|
||||
}
|
||||
|
||||
|
@ -85,13 +84,13 @@ func (i localInfo) GetPath() (*hd.BIP44Params, error) {
|
|||
// ledgerInfo is the public information about a Ledger key
|
||||
// Note: Algo must be last field in struct for backwards amino compatibility
|
||||
type ledgerInfo struct {
|
||||
Name string `json:"name"`
|
||||
PubKey crypto.PubKey `json:"pubkey"`
|
||||
Path hd.BIP44Params `json:"path"`
|
||||
Algo hd.PubKeyType `json:"algo"`
|
||||
Name string `json:"name"`
|
||||
PubKey cryptotypes.PubKey `json:"pubkey"`
|
||||
Path hd.BIP44Params `json:"path"`
|
||||
Algo hd.PubKeyType `json:"algo"`
|
||||
}
|
||||
|
||||
func newLedgerInfo(name string, pub crypto.PubKey, path hd.BIP44Params, algo hd.PubKeyType) Info {
|
||||
func newLedgerInfo(name string, pub cryptotypes.PubKey, path hd.BIP44Params, algo hd.PubKeyType) Info {
|
||||
return &ledgerInfo{
|
||||
Name: name,
|
||||
PubKey: pub,
|
||||
|
@ -111,7 +110,7 @@ func (i ledgerInfo) GetName() string {
|
|||
}
|
||||
|
||||
// GetPubKey implements Info interface
|
||||
func (i ledgerInfo) GetPubKey() crypto.PubKey {
|
||||
func (i ledgerInfo) GetPubKey() cryptotypes.PubKey {
|
||||
return i.PubKey
|
||||
}
|
||||
|
||||
|
@ -134,12 +133,12 @@ func (i ledgerInfo) GetPath() (*hd.BIP44Params, error) {
|
|||
// offlineInfo is the public information about an offline key
|
||||
// Note: Algo must be last field in struct for backwards amino compatibility
|
||||
type offlineInfo struct {
|
||||
Name string `json:"name"`
|
||||
PubKey crypto.PubKey `json:"pubkey"`
|
||||
Algo hd.PubKeyType `json:"algo"`
|
||||
Name string `json:"name"`
|
||||
PubKey cryptotypes.PubKey `json:"pubkey"`
|
||||
Algo hd.PubKeyType `json:"algo"`
|
||||
}
|
||||
|
||||
func newOfflineInfo(name string, pub crypto.PubKey, algo hd.PubKeyType) Info {
|
||||
func newOfflineInfo(name string, pub cryptotypes.PubKey, algo hd.PubKeyType) Info {
|
||||
return &offlineInfo{
|
||||
Name: name,
|
||||
PubKey: pub,
|
||||
|
@ -158,7 +157,7 @@ func (i offlineInfo) GetName() string {
|
|||
}
|
||||
|
||||
// GetPubKey implements Info interface
|
||||
func (i offlineInfo) GetPubKey() crypto.PubKey {
|
||||
func (i offlineInfo) GetPubKey() cryptotypes.PubKey {
|
||||
return i.PubKey
|
||||
}
|
||||
|
||||
|
@ -178,20 +177,20 @@ func (i offlineInfo) GetPath() (*hd.BIP44Params, error) {
|
|||
}
|
||||
|
||||
type multisigPubKeyInfo struct {
|
||||
PubKey crypto.PubKey `json:"pubkey"`
|
||||
Weight uint `json:"weight"`
|
||||
PubKey cryptotypes.PubKey `json:"pubkey"`
|
||||
Weight uint `json:"weight"`
|
||||
}
|
||||
|
||||
// multiInfo is the public information about a multisig key
|
||||
type multiInfo struct {
|
||||
Name string `json:"name"`
|
||||
PubKey crypto.PubKey `json:"pubkey"`
|
||||
PubKey cryptotypes.PubKey `json:"pubkey"`
|
||||
Threshold uint `json:"threshold"`
|
||||
PubKeys []multisigPubKeyInfo `json:"pubkeys"`
|
||||
}
|
||||
|
||||
// NewMultiInfo creates a new multiInfo instance
|
||||
func NewMultiInfo(name string, pub crypto.PubKey) Info {
|
||||
func NewMultiInfo(name string, pub cryptotypes.PubKey) Info {
|
||||
multiPK := pub.(*multisig.LegacyAminoPubKey)
|
||||
|
||||
pubKeys := make([]multisigPubKeyInfo, len(multiPK.PubKeys))
|
||||
|
@ -219,7 +218,7 @@ func (i multiInfo) GetName() string {
|
|||
}
|
||||
|
||||
// GetPubKey implements Info interface
|
||||
func (i multiInfo) GetPubKey() crypto.PubKey {
|
||||
func (i multiInfo) GetPubKey() cryptotypes.PubKey {
|
||||
return i.PubKey
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ import (
|
|||
cryptoamino "github.com/cosmos/cosmos-sdk/crypto/codec"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/hd"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/ledger"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
|
||||
)
|
||||
|
@ -76,10 +77,10 @@ type Keyring interface {
|
|||
SaveLedgerKey(uid string, algo SignatureAlgo, hrp string, coinType, account, index uint32) (Info, error)
|
||||
|
||||
// SavePubKey stores a public key and returns the persisted Info structure.
|
||||
SavePubKey(uid string, pubkey tmcrypto.PubKey, algo hd.PubKeyType) (Info, error)
|
||||
SavePubKey(uid string, pubkey types.PubKey, algo hd.PubKeyType) (Info, error)
|
||||
|
||||
// SaveMultisig stores and returns a new multsig (offline) key reference.
|
||||
SaveMultisig(uid string, pubkey tmcrypto.PubKey) (Info, error)
|
||||
SaveMultisig(uid string, pubkey types.PubKey) (Info, error)
|
||||
|
||||
Signer
|
||||
|
||||
|
@ -90,10 +91,10 @@ type Keyring interface {
|
|||
// Signer is implemented by key stores that want to provide signing capabilities.
|
||||
type Signer interface {
|
||||
// Sign sign byte messages with a user key.
|
||||
Sign(uid string, msg []byte) ([]byte, tmcrypto.PubKey, error)
|
||||
Sign(uid string, msg []byte) ([]byte, types.PubKey, error)
|
||||
|
||||
// SignByAddress sign byte messages with a user key providing the address.
|
||||
SignByAddress(address sdk.Address, msg []byte) ([]byte, tmcrypto.PubKey, error)
|
||||
SignByAddress(address sdk.Address, msg []byte) ([]byte, types.PubKey, error)
|
||||
}
|
||||
|
||||
// Importer is implemented by key stores that support import of public and private keys.
|
||||
|
@ -224,13 +225,13 @@ func (ks keystore) ExportPrivKeyArmor(uid, encryptPassphrase string) (armor stri
|
|||
}
|
||||
|
||||
// ExportPrivateKeyObject exports an armored private key object.
|
||||
func (ks keystore) ExportPrivateKeyObject(uid string) (tmcrypto.PrivKey, error) {
|
||||
func (ks keystore) ExportPrivateKeyObject(uid string) (types.PrivKey, error) {
|
||||
info, err := ks.Key(uid)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var priv tmcrypto.PrivKey
|
||||
var priv types.PrivKey
|
||||
|
||||
switch linfo := info.(type) {
|
||||
case localInfo:
|
||||
|
@ -301,13 +302,13 @@ func (ks keystore) ImportPubKey(uid string, armor string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (ks keystore) Sign(uid string, msg []byte) ([]byte, tmcrypto.PubKey, error) {
|
||||
func (ks keystore) Sign(uid string, msg []byte) ([]byte, types.PubKey, error) {
|
||||
info, err := ks.Key(uid)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
var priv tmcrypto.PrivKey
|
||||
var priv types.PrivKey
|
||||
|
||||
switch i := info.(type) {
|
||||
case localInfo:
|
||||
|
@ -335,7 +336,7 @@ func (ks keystore) Sign(uid string, msg []byte) ([]byte, tmcrypto.PubKey, error)
|
|||
return sig, priv.PubKey(), nil
|
||||
}
|
||||
|
||||
func (ks keystore) SignByAddress(address sdk.Address, msg []byte) ([]byte, tmcrypto.PubKey, error) {
|
||||
func (ks keystore) SignByAddress(address sdk.Address, msg []byte) ([]byte, types.PubKey, error) {
|
||||
key, err := ks.KeyByAddress(address)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
|
@ -359,7 +360,7 @@ func (ks keystore) SaveLedgerKey(uid string, algo SignatureAlgo, hrp string, coi
|
|||
return ks.writeLedgerKey(uid, priv.PubKey(), *hdPath, algo.Name())
|
||||
}
|
||||
|
||||
func (ks keystore) writeLedgerKey(name string, pub tmcrypto.PubKey, path hd.BIP44Params, algo hd.PubKeyType) (Info, error) {
|
||||
func (ks keystore) writeLedgerKey(name string, pub types.PubKey, path hd.BIP44Params, algo hd.PubKeyType) (Info, error) {
|
||||
info := newLedgerInfo(name, pub, path, algo)
|
||||
if err := ks.writeInfo(info); err != nil {
|
||||
return nil, err
|
||||
|
@ -368,11 +369,11 @@ func (ks keystore) writeLedgerKey(name string, pub tmcrypto.PubKey, path hd.BIP4
|
|||
return info, nil
|
||||
}
|
||||
|
||||
func (ks keystore) SaveMultisig(uid string, pubkey tmcrypto.PubKey) (Info, error) {
|
||||
func (ks keystore) SaveMultisig(uid string, pubkey types.PubKey) (Info, error) {
|
||||
return ks.writeMultisigKey(uid, pubkey)
|
||||
}
|
||||
|
||||
func (ks keystore) SavePubKey(uid string, pubkey tmcrypto.PubKey, algo hd.PubKeyType) (Info, error) {
|
||||
func (ks keystore) SavePubKey(uid string, pubkey types.PubKey, algo hd.PubKeyType) (Info, error) {
|
||||
return ks.writeOfflineKey(uid, pubkey, algo)
|
||||
}
|
||||
|
||||
|
@ -533,7 +534,7 @@ func (ks keystore) SupportedAlgorithms() (SigningAlgoList, SigningAlgoList) {
|
|||
// SignWithLedger signs a binary message with the ledger device referenced by an Info object
|
||||
// and returns the signed bytes and the public key. It returns an error if the device could
|
||||
// not be queried or it returned an error.
|
||||
func SignWithLedger(info Info, msg []byte) (sig []byte, pub tmcrypto.PubKey, err error) {
|
||||
func SignWithLedger(info Info, msg []byte) (sig []byte, pub types.PubKey, err error) {
|
||||
switch info.(type) {
|
||||
case *ledgerInfo, ledgerInfo:
|
||||
default:
|
||||
|
@ -691,7 +692,7 @@ func newRealPrompt(dir string, buf io.Reader) func(string) (string, error) {
|
|||
}
|
||||
}
|
||||
|
||||
func (ks keystore) writeLocalKey(name string, priv tmcrypto.PrivKey, algo hd.PubKeyType) (Info, error) {
|
||||
func (ks keystore) writeLocalKey(name string, priv types.PrivKey, algo hd.PubKeyType) (Info, error) {
|
||||
// encrypt private key using keyring
|
||||
pub := priv.PubKey()
|
||||
|
||||
|
@ -753,7 +754,7 @@ func (ks keystore) existsInDb(info Info) (bool, error) {
|
|||
return false, nil
|
||||
}
|
||||
|
||||
func (ks keystore) writeOfflineKey(name string, pub tmcrypto.PubKey, algo hd.PubKeyType) (Info, error) {
|
||||
func (ks keystore) writeOfflineKey(name string, pub types.PubKey, algo hd.PubKeyType) (Info, error) {
|
||||
info := newOfflineInfo(name, pub, algo)
|
||||
err := ks.writeInfo(info)
|
||||
if err != nil {
|
||||
|
@ -763,7 +764,7 @@ func (ks keystore) writeOfflineKey(name string, pub tmcrypto.PubKey, algo hd.Pub
|
|||
return info, nil
|
||||
}
|
||||
|
||||
func (ks keystore) writeMultisigKey(name string, pub tmcrypto.PubKey) (Info, error) {
|
||||
func (ks keystore) writeMultisigKey(name string, pub types.PubKey) (Info, error) {
|
||||
info := NewMultiInfo(name, pub)
|
||||
err := ks.writeInfo(info)
|
||||
if err != nil {
|
||||
|
|
|
@ -45,7 +45,7 @@ func TestInMemoryCreateLedger(t *testing.T) {
|
|||
|
||||
path, err := restoredKey.GetPath()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "44'/118'/3'/0/1", path.String())
|
||||
require.Equal(t, "m/44'/118'/3'/0/1", path.String())
|
||||
}
|
||||
|
||||
// TestSignVerify does some detailed checks on how we sign and validate
|
||||
|
@ -123,5 +123,5 @@ func TestAltKeyring_SaveLedgerKey(t *testing.T) {
|
|||
|
||||
path, err := restoredKey.GetPath()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "44'/118'/3'/0/1", path.String())
|
||||
require.Equal(t, "m/44'/118'/3'/0/1", path.String())
|
||||
}
|
||||
|
|
|
@ -8,13 +8,13 @@ import (
|
|||
"github.com/99designs/keyring"
|
||||
bip39 "github.com/cosmos/go-bip39"
|
||||
"github.com/stretchr/testify/require"
|
||||
tmcrypto "github.com/tendermint/tendermint/crypto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/crypto"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/hd"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
||||
|
@ -166,7 +166,7 @@ func TestSignVerifyKeyRing(t *testing.T) {
|
|||
|
||||
// let's try to validate and make sure it only works when everything is proper
|
||||
cases := []struct {
|
||||
key tmcrypto.PubKey
|
||||
key types.PubKey
|
||||
data []byte
|
||||
sig []byte
|
||||
valid bool
|
||||
|
@ -380,7 +380,7 @@ func TestInMemoryCreateMultisig(t *testing.T) {
|
|||
kb, err := New("keybasename", "memory", "", nil)
|
||||
require.NoError(t, err)
|
||||
multi := multisig.NewLegacyAminoPubKey(
|
||||
1, []tmcrypto.PubKey{
|
||||
1, []types.PubKey{
|
||||
secp256k1.GenPrivKey().PubKey(),
|
||||
},
|
||||
)
|
||||
|
@ -521,7 +521,7 @@ func TestInMemorySignVerify(t *testing.T) {
|
|||
|
||||
// let's try to validate and make sure it only works when everything is proper
|
||||
cases := []struct {
|
||||
key tmcrypto.PubKey
|
||||
key types.PubKey
|
||||
data []byte
|
||||
sig []byte
|
||||
valid bool
|
||||
|
@ -947,7 +947,7 @@ func TestAltKeyring_SaveMultisig(t *testing.T) {
|
|||
key := "multi"
|
||||
pub := multisig.NewLegacyAminoPubKey(
|
||||
2,
|
||||
[]tmcrypto.PubKey{
|
||||
[]types.PubKey{
|
||||
&secp256k1.PubKey{Key: mnemonic1.GetPubKey().Bytes()},
|
||||
&secp256k1.PubKey{Key: mnemonic2.GetPubKey().Bytes()},
|
||||
},
|
||||
|
|
|
@ -6,11 +6,11 @@ import (
|
|||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
tmcrypto "github.com/tendermint/tendermint/crypto"
|
||||
tmos "github.com/tendermint/tendermint/libs/os"
|
||||
dbm "github.com/tendermint/tm-db"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/crypto"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
|
||||
)
|
||||
|
@ -101,13 +101,13 @@ func (kb dbKeybase) Get(name string) (Info, error) {
|
|||
// ExportPrivateKeyObject returns a PrivKey object given the key name and
|
||||
// passphrase. An error is returned if the key does not exist or if the Info for
|
||||
// the key is invalid.
|
||||
func (kb dbKeybase) ExportPrivateKeyObject(name string, passphrase string) (tmcrypto.PrivKey, error) {
|
||||
func (kb dbKeybase) ExportPrivateKeyObject(name string, passphrase string) (types.PrivKey, error) {
|
||||
info, err := kb.Get(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var priv tmcrypto.PrivKey
|
||||
var priv types.PrivKey
|
||||
|
||||
switch i := info.(type) {
|
||||
case localInfo:
|
||||
|
|
|
@ -4,10 +4,10 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
|
||||
kmultisig "github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
|
||||
|
@ -16,7 +16,7 @@ func TestBech32KeysOutput(t *testing.T) {
|
|||
bechTmpKey := sdk.MustBech32ifyPubKey(sdk.Bech32PubKeyTypeAccPub, tmpKey)
|
||||
tmpAddr := sdk.AccAddress(tmpKey.Address().Bytes())
|
||||
|
||||
multisigPks := kmultisig.NewLegacyAminoPubKey(1, []crypto.PubKey{tmpKey})
|
||||
multisigPks := kmultisig.NewLegacyAminoPubKey(1, []types.PubKey{tmpKey})
|
||||
multiInfo := NewMultiInfo("multisig", multisigPks)
|
||||
accAddr := sdk.AccAddress(multiInfo.GetPubKey().Address().Bytes())
|
||||
bechPubKey := sdk.MustBech32ifyPubKey(sdk.Bech32PubKeyTypeAccPub, multiInfo.GetPubKey())
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
package keyring
|
||||
|
||||
import (
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/crypto/hd"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
)
|
||||
|
||||
// Language is a language to create the BIP 39 mnemonic in.
|
||||
|
@ -68,5 +67,5 @@ type (
|
|||
// DeriveKeyFunc defines the function to derive a new key from a seed and hd path
|
||||
DeriveKeyFunc func(mnemonic string, bip39Passphrase, hdPath string, algo hd.PubKeyType) ([]byte, error)
|
||||
// PrivKeyGenFunc defines the function to convert derived key bytes to a tendermint private key
|
||||
PrivKeyGenFunc func(bz []byte, algo hd.PubKeyType) (crypto.PrivKey, error)
|
||||
PrivKeyGenFunc func(bz []byte, algo hd.PubKeyType) (cryptotypes.PrivKey, error)
|
||||
)
|
||||
|
|
|
@ -4,11 +4,10 @@ import (
|
|||
"encoding/hex"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/crypto/hd"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_writeReadLedgerInfo(t *testing.T) {
|
||||
|
@ -17,28 +16,27 @@ func Test_writeReadLedgerInfo(t *testing.T) {
|
|||
copy(tmpKey[:], bz)
|
||||
|
||||
lInfo := newLedgerInfo("some_name", &secp256k1.PubKey{Key: tmpKey}, *hd.NewFundraiserParams(5, sdk.CoinType, 1), hd.Secp256k1Type)
|
||||
assert.Equal(t, TypeLedger, lInfo.GetType())
|
||||
require.Equal(t, TypeLedger, lInfo.GetType())
|
||||
|
||||
path, err := lInfo.GetPath()
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "44'/118'/5'/0/1", path.String())
|
||||
assert.Equal(t,
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "m/44'/118'/5'/0/1", path.String())
|
||||
require.Equal(t,
|
||||
"cosmospub1addwnpepqddddqg2glc8x4fl7vxjlnr7p5a3czm5kcdp4239sg6yqdc4rc2r5wmxv8p",
|
||||
sdk.MustBech32ifyPubKey(sdk.Bech32PubKeyTypeAccPub, lInfo.GetPubKey()))
|
||||
|
||||
// Serialize and restore
|
||||
serialized := marshalInfo(lInfo)
|
||||
restoredInfo, err := unmarshalInfo(serialized)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, restoredInfo)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, restoredInfo)
|
||||
|
||||
// Check both keys match
|
||||
assert.Equal(t, lInfo.GetName(), restoredInfo.GetName())
|
||||
assert.Equal(t, lInfo.GetType(), restoredInfo.GetType())
|
||||
assert.Equal(t, lInfo.GetPubKey(), restoredInfo.GetPubKey())
|
||||
require.Equal(t, lInfo.GetName(), restoredInfo.GetName())
|
||||
require.Equal(t, lInfo.GetType(), restoredInfo.GetType())
|
||||
require.Equal(t, lInfo.GetPubKey(), restoredInfo.GetPubKey())
|
||||
|
||||
restoredPath, err := restoredInfo.GetPath()
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, path, restoredPath)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, path, restoredPath)
|
||||
}
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
package ed25519
|
||||
|
||||
import (
|
||||
"crypto/ed25519"
|
||||
"crypto/subtle"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
tmed25519 "github.com/tendermint/tendermint/crypto/ed25519"
|
||||
"github.com/tendermint/tendermint/crypto/tmhash"
|
||||
"golang.org/x/crypto/ed25519"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
|
@ -18,8 +17,8 @@ import (
|
|||
//-------------------------------------
|
||||
|
||||
const (
|
||||
PrivKeyName = "cosmos/PrivKeyEd25519"
|
||||
PubKeyName = "cosmos/PubKeyEd25519"
|
||||
PrivKeyName = "tendermint/PrivKeyEd25519"
|
||||
PubKeyName = "tendermint/PubKeyEd25519"
|
||||
// PubKeySize is is the size, in bytes, of public keys as used in this package.
|
||||
PubKeySize = 32
|
||||
// PrivKeySize is the size, in bytes, of private keys as used in this package.
|
||||
|
@ -50,13 +49,13 @@ func (privKey *PrivKey) Bytes() []byte {
|
|||
// If these conditions aren't met, Sign will panic or produce an
|
||||
// incorrect signature.
|
||||
func (privKey *PrivKey) Sign(msg []byte) ([]byte, error) {
|
||||
return ed25519.Sign(ed25519.PrivateKey(privKey.Key), msg), nil
|
||||
return ed25519.Sign(privKey.Key, msg), nil
|
||||
}
|
||||
|
||||
// PubKey gets the corresponding public key from the private key.
|
||||
//
|
||||
// Panics if the private key is not initialized.
|
||||
func (privKey *PrivKey) PubKey() crypto.PubKey {
|
||||
func (privKey *PrivKey) PubKey() cryptotypes.PubKey {
|
||||
// If the latter 32 bytes of the privkey are all zero, privkey is not
|
||||
// initialized.
|
||||
initialized := false
|
||||
|
@ -78,7 +77,7 @@ func (privKey *PrivKey) PubKey() crypto.PubKey {
|
|||
|
||||
// Equals - you probably don't need to use this.
|
||||
// Runs in constant time based on length of the keys.
|
||||
func (privKey *PrivKey) Equals(other crypto.PrivKey) bool {
|
||||
func (privKey *PrivKey) Equals(other cryptotypes.LedgerPrivKey) bool {
|
||||
if privKey.Type() != other.Type() {
|
||||
return false
|
||||
}
|
||||
|
@ -150,7 +149,6 @@ func GenPrivKeyFromSecret(secret []byte) *PrivKey {
|
|||
|
||||
var _ cryptotypes.PubKey = &PubKey{}
|
||||
var _ codec.AminoMarshaler = &PubKey{}
|
||||
var _ cryptotypes.IntoTmPubKey = &PubKey{}
|
||||
|
||||
// Address is the SHA256-20 of the raw pubkey bytes.
|
||||
func (pubKey *PubKey) Address() crypto.Address {
|
||||
|
@ -171,7 +169,7 @@ func (pubKey *PubKey) VerifySignature(msg []byte, sig []byte) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
return ed25519.Verify(ed25519.PublicKey(pubKey.Key), msg, sig)
|
||||
return ed25519.Verify(pubKey.Key, msg, sig)
|
||||
}
|
||||
|
||||
func (pubKey *PubKey) String() string {
|
||||
|
@ -182,7 +180,7 @@ func (pubKey *PubKey) Type() string {
|
|||
return keyType
|
||||
}
|
||||
|
||||
func (pubKey *PubKey) Equals(other crypto.PubKey) bool {
|
||||
func (pubKey *PubKey) Equals(other cryptotypes.PubKey) bool {
|
||||
if pubKey.Type() != other.Type() {
|
||||
return false
|
||||
}
|
||||
|
@ -216,19 +214,3 @@ func (pubKey PubKey) MarshalAminoJSON() ([]byte, error) {
|
|||
func (pubKey *PubKey) UnmarshalAminoJSON(bz []byte) error {
|
||||
return pubKey.UnmarshalAmino(bz)
|
||||
}
|
||||
|
||||
// AsTmPubKey converts our own PubKey into a Tendermint ED25519 pubkey.
|
||||
func (pubKey *PubKey) AsTmPubKey() crypto.PubKey {
|
||||
return tmed25519.PubKey(pubKey.Key)
|
||||
}
|
||||
|
||||
// FromTmEd25519 converts a Tendermint ED25519 pubkey into our own ED25519
|
||||
// PubKey.
|
||||
func FromTmEd25519(pubKey crypto.PubKey) (*PubKey, error) {
|
||||
tmPk, ok := pubKey.(tmed25519.PubKey)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected %T, got %T", tmed25519.PubKey{}, pubKey)
|
||||
}
|
||||
|
||||
return &PubKey{Key: []byte(tmPk)}, nil
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ed25519_test
|
||||
|
||||
import (
|
||||
stded25519 "crypto/ed25519"
|
||||
"encoding/base64"
|
||||
"testing"
|
||||
|
||||
|
@ -8,10 +9,10 @@ import (
|
|||
"github.com/stretchr/testify/require"
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
tmed25519 "github.com/tendermint/tendermint/crypto/ed25519"
|
||||
"github.com/tendermint/tendermint/crypto/sr25519"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
|
||||
ed25519 "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
)
|
||||
|
||||
|
@ -19,17 +20,28 @@ func TestSignAndValidateEd25519(t *testing.T) {
|
|||
privKey := ed25519.GenPrivKey()
|
||||
pubKey := privKey.PubKey()
|
||||
|
||||
msg := crypto.CRandBytes(128)
|
||||
msg := crypto.CRandBytes(1000)
|
||||
sig, err := privKey.Sign(msg)
|
||||
require.Nil(t, err)
|
||||
|
||||
// Test the signature
|
||||
assert.True(t, pubKey.VerifySignature(msg, sig))
|
||||
|
||||
// ----
|
||||
// Test cross packages verification
|
||||
stdPrivKey := stded25519.PrivateKey(privKey.Key)
|
||||
stdPubKey := stdPrivKey.Public().(stded25519.PublicKey)
|
||||
|
||||
assert.Equal(t, stdPubKey, pubKey.(*ed25519.PubKey).Key)
|
||||
assert.Equal(t, stdPrivKey, privKey.Key)
|
||||
assert.True(t, stded25519.Verify(stdPubKey, msg, sig))
|
||||
sig2 := stded25519.Sign(stdPrivKey, msg)
|
||||
assert.True(t, pubKey.VerifySignature(msg, sig2))
|
||||
|
||||
// ----
|
||||
// Mutate the signature, just one bit.
|
||||
// TODO: Replace this with a much better fuzzer, tendermint/ed25519/issues/10
|
||||
sig[7] ^= byte(0x01)
|
||||
|
||||
assert.False(t, pubKey.VerifySignature(msg, sig))
|
||||
}
|
||||
|
||||
|
@ -39,7 +51,7 @@ func TestPubKeyEquals(t *testing.T) {
|
|||
testCases := []struct {
|
||||
msg string
|
||||
pubKey cryptotypes.PubKey
|
||||
other crypto.PubKey
|
||||
other cryptotypes.PubKey
|
||||
expectEq bool
|
||||
}{
|
||||
{
|
||||
|
@ -59,7 +71,7 @@ func TestPubKeyEquals(t *testing.T) {
|
|||
{
|
||||
"different types",
|
||||
ed25519PubKey,
|
||||
sr25519.GenPrivKey().PubKey(),
|
||||
secp256k1.GenPrivKey().PubKey(),
|
||||
false,
|
||||
},
|
||||
}
|
||||
|
@ -78,7 +90,7 @@ func TestPrivKeyEquals(t *testing.T) {
|
|||
testCases := []struct {
|
||||
msg string
|
||||
privKey cryptotypes.PrivKey
|
||||
other crypto.PrivKey
|
||||
other cryptotypes.PrivKey
|
||||
expectEq bool
|
||||
}{
|
||||
{
|
||||
|
@ -98,7 +110,7 @@ func TestPrivKeyEquals(t *testing.T) {
|
|||
{
|
||||
"different types",
|
||||
ed25519PrivKey,
|
||||
sr25519.GenPrivKey(),
|
||||
secp256k1.GenPrivKey(),
|
||||
false,
|
||||
},
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
package ed25519
|
||||
|
||||
import (
|
||||
crypto_ed25519 "crypto/ed25519"
|
||||
fmt "fmt"
|
||||
_ "github.com/gogo/protobuf/gogoproto"
|
||||
proto "github.com/gogo/protobuf/proto"
|
||||
|
@ -29,7 +30,7 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
|
|||
// the x-coordinate. Otherwise the first byte is a 0x03.
|
||||
// This prefix is followed with the x-coordinate.
|
||||
type PubKey struct {
|
||||
Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
|
||||
Key crypto_ed25519.PublicKey `protobuf:"bytes,1,opt,name=key,proto3,casttype=crypto/ed25519.PublicKey" json:"key,omitempty"`
|
||||
}
|
||||
|
||||
func (m *PubKey) Reset() { *m = PubKey{} }
|
||||
|
@ -64,7 +65,7 @@ func (m *PubKey) XXX_DiscardUnknown() {
|
|||
|
||||
var xxx_messageInfo_PubKey proto.InternalMessageInfo
|
||||
|
||||
func (m *PubKey) GetKey() []byte {
|
||||
func (m *PubKey) GetKey() crypto_ed25519.PublicKey {
|
||||
if m != nil {
|
||||
return m.Key
|
||||
}
|
||||
|
@ -73,7 +74,7 @@ func (m *PubKey) GetKey() []byte {
|
|||
|
||||
// PrivKey defines a ed25519 private key.
|
||||
type PrivKey struct {
|
||||
Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
|
||||
Key crypto_ed25519.PrivateKey `protobuf:"bytes,1,opt,name=key,proto3,casttype=crypto/ed25519.PrivateKey" json:"key,omitempty"`
|
||||
}
|
||||
|
||||
func (m *PrivKey) Reset() { *m = PrivKey{} }
|
||||
|
@ -109,7 +110,7 @@ func (m *PrivKey) XXX_DiscardUnknown() {
|
|||
|
||||
var xxx_messageInfo_PrivKey proto.InternalMessageInfo
|
||||
|
||||
func (m *PrivKey) GetKey() []byte {
|
||||
func (m *PrivKey) GetKey() crypto_ed25519.PrivateKey {
|
||||
if m != nil {
|
||||
return m.Key
|
||||
}
|
||||
|
@ -124,19 +125,21 @@ func init() {
|
|||
func init() { proto.RegisterFile("cosmos/crypto/ed25519/keys.proto", fileDescriptor_48fe3336771e732d) }
|
||||
|
||||
var fileDescriptor_48fe3336771e732d = []byte{
|
||||
// 183 bytes of a gzipped FileDescriptorProto
|
||||
// 221 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x48, 0xce, 0x2f, 0xce,
|
||||
0xcd, 0x2f, 0xd6, 0x4f, 0x2e, 0xaa, 0x2c, 0x28, 0xc9, 0xd7, 0x4f, 0x4d, 0x31, 0x32, 0x35, 0x35,
|
||||
0xb4, 0xd4, 0xcf, 0x4e, 0xad, 0x2c, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x12, 0x85, 0xa8,
|
||||
0xd0, 0x83, 0xa8, 0xd0, 0x83, 0xaa, 0x90, 0x12, 0x49, 0xcf, 0x4f, 0xcf, 0x07, 0xab, 0xd0, 0x07,
|
||||
0xb1, 0x20, 0x8a, 0x95, 0x14, 0xb8, 0xd8, 0x02, 0x4a, 0x93, 0xbc, 0x53, 0x2b, 0x85, 0x04, 0xb8,
|
||||
0x98, 0xb3, 0x53, 0x2b, 0x25, 0x18, 0x15, 0x18, 0x35, 0x78, 0x82, 0x40, 0x4c, 0x2b, 0x96, 0x19,
|
||||
0x0b, 0xe4, 0x19, 0x94, 0xa4, 0xb9, 0xd8, 0x03, 0x8a, 0x32, 0xcb, 0xb0, 0x2a, 0x71, 0xf2, 0x3a,
|
||||
0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, 0xe4, 0x18, 0x27, 0x3c, 0x96, 0x63,
|
||||
0xb8, 0xf0, 0x58, 0x8e, 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, 0x83, 0xf4, 0xcc, 0x92, 0x8c, 0xd2,
|
||||
0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0x7d, 0x98, 0x93, 0xc1, 0x94, 0x6e, 0x71, 0x4a, 0x36, 0xcc, 0xf5,
|
||||
0x20, 0x57, 0xc3, 0xbc, 0x90, 0xc4, 0x06, 0x76, 0x91, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0xcc,
|
||||
0xf3, 0x11, 0x99, 0xe2, 0x00, 0x00, 0x00,
|
||||
0xb1, 0x20, 0x8a, 0x95, 0xec, 0xb8, 0xd8, 0x02, 0x4a, 0x93, 0xbc, 0x53, 0x2b, 0x85, 0xf4, 0xb8,
|
||||
0x98, 0xb3, 0x53, 0x2b, 0x25, 0x18, 0x15, 0x18, 0x35, 0x78, 0x9c, 0x64, 0x7e, 0xdd, 0x93, 0x97,
|
||||
0x40, 0xb5, 0x42, 0x2f, 0xa0, 0x34, 0x29, 0x27, 0x33, 0xd9, 0x3b, 0xb5, 0x32, 0x08, 0xa4, 0xd0,
|
||||
0x8a, 0x65, 0xc6, 0x02, 0x79, 0x06, 0x25, 0x2b, 0x2e, 0xf6, 0x80, 0xa2, 0xcc, 0x32, 0x90, 0x01,
|
||||
0xfa, 0xc8, 0x06, 0xc8, 0xfe, 0xba, 0x27, 0x2f, 0x89, 0x6e, 0x40, 0x51, 0x66, 0x59, 0x62, 0x49,
|
||||
0x2a, 0xcc, 0x04, 0x27, 0xaf, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c, 0x92, 0x63, 0x7c, 0xf0, 0x48,
|
||||
0x8e, 0x71, 0xc2, 0x63, 0x39, 0x86, 0x0b, 0x8f, 0xe5, 0x18, 0x6e, 0x3c, 0x96, 0x63, 0x88, 0x32,
|
||||
0x48, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x87, 0xf9, 0x17, 0x4c, 0xe9,
|
||||
0x16, 0xa7, 0x64, 0xc3, 0xbc, 0x0e, 0xf2, 0x32, 0xcc, 0xec, 0x24, 0x36, 0xb0, 0x77, 0x8c, 0x01,
|
||||
0x01, 0x00, 0x00, 0xff, 0xff, 0xb0, 0xd8, 0x01, 0xc0, 0x1f, 0x01, 0x00, 0x00,
|
||||
}
|
||||
|
||||
func (m *PubKey) Marshal() (dAtA []byte, err error) {
|
||||
|
|
|
@ -4,7 +4,7 @@ import (
|
|||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
)
|
||||
|
||||
// The code in this file is adapted from agl/ed25519.
|
||||
|
@ -24,7 +24,7 @@ func (zeroReader) Read(buf []byte) (int, error) {
|
|||
|
||||
// BenchmarkKeyGeneration benchmarks the given key generation algorithm using
|
||||
// a dummy reader.
|
||||
func BenchmarkKeyGeneration(b *testing.B, generateKey func(reader io.Reader) crypto.PrivKey) {
|
||||
func BenchmarkKeyGeneration(b *testing.B, generateKey func(reader io.Reader) types.PrivKey) {
|
||||
var zero zeroReader
|
||||
for i := 0; i < b.N; i++ {
|
||||
generateKey(zero)
|
||||
|
@ -33,7 +33,7 @@ func BenchmarkKeyGeneration(b *testing.B, generateKey func(reader io.Reader) cry
|
|||
|
||||
// BenchmarkSigning benchmarks the given signing algorithm using
|
||||
// the provided privkey.
|
||||
func BenchmarkSigning(b *testing.B, priv crypto.PrivKey) {
|
||||
func BenchmarkSigning(b *testing.B, priv types.PrivKey) {
|
||||
message := []byte("Hello, world!")
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
|
@ -47,7 +47,7 @@ func BenchmarkSigning(b *testing.B, priv crypto.PrivKey) {
|
|||
|
||||
// BenchmarkVerification benchmarks the given verification algorithm using
|
||||
// the provided privkey on a constant message.
|
||||
func BenchmarkVerification(b *testing.B, priv crypto.PrivKey) {
|
||||
func BenchmarkVerification(b *testing.B, priv types.PrivKey) {
|
||||
pub := priv.PubKey()
|
||||
// use a short message, so this time doesn't get dominated by hashing.
|
||||
message := []byte("Hello, world!")
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package multisig
|
||||
|
||||
import (
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
"github.com/tendermint/tendermint/crypto/sr25519"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
|
@ -19,10 +18,6 @@ const (
|
|||
var AminoCdc = codec.NewLegacyAmino()
|
||||
|
||||
func init() {
|
||||
// TODO We now register both Tendermint's PubKey and our own PubKey. In the
|
||||
// long-term, we should move away from Tendermint's PubKey, and delete this
|
||||
// first line.
|
||||
AminoCdc.RegisterInterface((*crypto.PubKey)(nil), nil)
|
||||
AminoCdc.RegisterInterface((*cryptotypes.PubKey)(nil), nil)
|
||||
AminoCdc.RegisterConcrete(ed25519.PubKey{},
|
||||
ed25519.PubKeyName, nil)
|
||||
|
|
|
@ -5,10 +5,8 @@ import (
|
|||
|
||||
tmcrypto "github.com/tendermint/tendermint/crypto"
|
||||
|
||||
proto "github.com/gogo/protobuf/proto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec/types"
|
||||
crypto "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
multisigtypes "github.com/cosmos/cosmos-sdk/crypto/types/multisig"
|
||||
"github.com/cosmos/cosmos-sdk/types/tx/signing"
|
||||
)
|
||||
|
@ -18,7 +16,7 @@ var _ types.UnpackInterfacesMessage = &LegacyAminoPubKey{}
|
|||
|
||||
// NewLegacyAminoPubKey returns a new LegacyAminoPubKey.
|
||||
// Panics if len(pubKeys) < k or 0 >= k.
|
||||
func NewLegacyAminoPubKey(k int, pubKeys []tmcrypto.PubKey) *LegacyAminoPubKey {
|
||||
func NewLegacyAminoPubKey(k int, pubKeys []cryptotypes.PubKey) *LegacyAminoPubKey {
|
||||
if k <= 0 {
|
||||
panic("threshold k of n multisignature: k <= 0")
|
||||
}
|
||||
|
@ -32,8 +30,8 @@ func NewLegacyAminoPubKey(k int, pubKeys []tmcrypto.PubKey) *LegacyAminoPubKey {
|
|||
return &LegacyAminoPubKey{Threshold: uint32(k), PubKeys: anyPubKeys}
|
||||
}
|
||||
|
||||
// Address implements crypto.PubKey Address method
|
||||
func (m *LegacyAminoPubKey) Address() tmcrypto.Address {
|
||||
// Address implements cryptotypes.PubKey Address method
|
||||
func (m *LegacyAminoPubKey) Address() cryptotypes.Address {
|
||||
return tmcrypto.AddressHash(m.Bytes())
|
||||
}
|
||||
|
||||
|
@ -91,7 +89,7 @@ func (m *LegacyAminoPubKey) VerifyMultisignature(getSignBytes multisigtypes.GetS
|
|||
return nil
|
||||
}
|
||||
|
||||
// VerifySignature implements crypto.PubKey VerifySignature method,
|
||||
// VerifySignature implements cryptotypes.PubKey VerifySignature method,
|
||||
// it panics because it can't handle MultiSignatureData
|
||||
// cf. https://github.com/cosmos/cosmos-sdk/issues/7109#issuecomment-686329936
|
||||
func (m *LegacyAminoPubKey) VerifySignature(msg []byte, sig []byte) bool {
|
||||
|
@ -99,11 +97,11 @@ func (m *LegacyAminoPubKey) VerifySignature(msg []byte, sig []byte) bool {
|
|||
}
|
||||
|
||||
// GetPubKeys implements the PubKey.GetPubKeys method
|
||||
func (m *LegacyAminoPubKey) GetPubKeys() []tmcrypto.PubKey {
|
||||
func (m *LegacyAminoPubKey) GetPubKeys() []cryptotypes.PubKey {
|
||||
if m != nil {
|
||||
pubKeys := make([]tmcrypto.PubKey, len(m.PubKeys))
|
||||
pubKeys := make([]cryptotypes.PubKey, len(m.PubKeys))
|
||||
for i := 0; i < len(m.PubKeys); i++ {
|
||||
pubKeys[i] = m.PubKeys[i].GetCachedValue().(tmcrypto.PubKey)
|
||||
pubKeys[i] = m.PubKeys[i].GetCachedValue().(cryptotypes.PubKey)
|
||||
}
|
||||
return pubKeys
|
||||
}
|
||||
|
@ -113,7 +111,7 @@ func (m *LegacyAminoPubKey) GetPubKeys() []tmcrypto.PubKey {
|
|||
|
||||
// Equals returns true if m and other both have the same number of keys, and
|
||||
// all constituent keys are the same, and in the same order.
|
||||
func (m *LegacyAminoPubKey) Equals(key tmcrypto.PubKey) bool {
|
||||
func (m *LegacyAminoPubKey) Equals(key cryptotypes.PubKey) bool {
|
||||
otherKey, ok := key.(multisigtypes.PubKey)
|
||||
if !ok {
|
||||
return false
|
||||
|
@ -145,7 +143,7 @@ func (m *LegacyAminoPubKey) Type() string {
|
|||
// UnpackInterfaces implements UnpackInterfacesMessage.UnpackInterfaces
|
||||
func (m *LegacyAminoPubKey) UnpackInterfaces(unpacker types.AnyUnpacker) error {
|
||||
for _, any := range m.PubKeys {
|
||||
var pk crypto.PubKey
|
||||
var pk cryptotypes.PubKey
|
||||
err := unpacker.UnpackAny(any, &pk)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -154,11 +152,11 @@ func (m *LegacyAminoPubKey) UnpackInterfaces(unpacker types.AnyUnpacker) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func packPubKeys(pubKeys []tmcrypto.PubKey) ([]*types.Any, error) {
|
||||
func packPubKeys(pubKeys []cryptotypes.PubKey) ([]*types.Any, error) {
|
||||
anyPubKeys := make([]*types.Any, len(pubKeys))
|
||||
|
||||
for i := 0; i < len(pubKeys); i++ {
|
||||
any, err := types.NewAnyWithValue(pubKeys[i].(proto.Message))
|
||||
any, err := types.NewAnyWithValue(pubKeys[i])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -3,19 +3,16 @@ package multisig_test
|
|||
import (
|
||||
"testing"
|
||||
|
||||
tmcrypto "github.com/tendermint/tendermint/crypto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec/types"
|
||||
crypto "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/types/multisig"
|
||||
"github.com/cosmos/cosmos-sdk/x/auth/legacy/legacytx"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
"github.com/cosmos/cosmos-sdk/codec/types"
|
||||
kmultisig "github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/types/multisig"
|
||||
"github.com/cosmos/cosmos-sdk/types/tx/signing"
|
||||
"github.com/cosmos/cosmos-sdk/x/auth/legacy/legacytx"
|
||||
)
|
||||
|
||||
func TestAddress(t *testing.T) {
|
||||
|
@ -30,12 +27,12 @@ func TestEquals(t *testing.T) {
|
|||
pubKey1 := secp256k1.GenPrivKey().PubKey()
|
||||
pubKey2 := secp256k1.GenPrivKey().PubKey()
|
||||
|
||||
multisigKey := kmultisig.NewLegacyAminoPubKey(1, []tmcrypto.PubKey{pubKey1, pubKey2})
|
||||
otherMultisigKey := kmultisig.NewLegacyAminoPubKey(1, []tmcrypto.PubKey{pubKey1, multisigKey})
|
||||
multisigKey := kmultisig.NewLegacyAminoPubKey(1, []cryptotypes.PubKey{pubKey1, pubKey2})
|
||||
otherMultisigKey := kmultisig.NewLegacyAminoPubKey(1, []cryptotypes.PubKey{pubKey1, multisigKey})
|
||||
|
||||
testCases := []struct {
|
||||
msg string
|
||||
other tmcrypto.PubKey
|
||||
other cryptotypes.PubKey
|
||||
expectEq bool
|
||||
}{
|
||||
{
|
||||
|
@ -255,8 +252,8 @@ func TestPubKeyMultisigThresholdAminoToIface(t *testing.T) {
|
|||
|
||||
ab, err := kmultisig.AminoCdc.MarshalBinaryLengthPrefixed(multisigKey)
|
||||
require.NoError(t, err)
|
||||
// like other crypto.Pubkey implementations (e.g. ed25519.PubKey),
|
||||
// LegacyAminoPubKey should be deserializable into a crypto.LegacyAminoPubKey:
|
||||
// like other cryptotypes.Pubkey implementations (e.g. ed25519.PubKey),
|
||||
// LegacyAminoPubKey should be deserializable into a cryptotypes.LegacyAminoPubKey:
|
||||
var pubKey kmultisig.LegacyAminoPubKey
|
||||
err = kmultisig.AminoCdc.UnmarshalBinaryLengthPrefixed(ab, &pubKey)
|
||||
require.NoError(t, err)
|
||||
|
@ -264,8 +261,8 @@ func TestPubKeyMultisigThresholdAminoToIface(t *testing.T) {
|
|||
require.Equal(t, multisigKey.Equals(&pubKey), true)
|
||||
}
|
||||
|
||||
func generatePubKeysAndSignatures(n int, msg []byte) (pubKeys []tmcrypto.PubKey, signatures []signing.SignatureData) {
|
||||
pubKeys = make([]tmcrypto.PubKey, n)
|
||||
func generatePubKeysAndSignatures(n int, msg []byte) (pubKeys []cryptotypes.PubKey, signatures []signing.SignatureData) {
|
||||
pubKeys = make([]cryptotypes.PubKey, n)
|
||||
signatures = make([]signing.SignatureData, n)
|
||||
|
||||
for i := 0; i < n; i++ {
|
||||
|
@ -279,12 +276,12 @@ func generatePubKeysAndSignatures(n int, msg []byte) (pubKeys []tmcrypto.PubKey,
|
|||
}
|
||||
|
||||
func generateNestedMultiSignature(n int, msg []byte) (multisig.PubKey, *signing.MultiSignatureData) {
|
||||
pubKeys := make([]tmcrypto.PubKey, n)
|
||||
pubKeys := make([]cryptotypes.PubKey, n)
|
||||
signatures := make([]signing.SignatureData, n)
|
||||
bitArray := crypto.NewCompactBitArray(n)
|
||||
bitArray := cryptotypes.NewCompactBitArray(n)
|
||||
for i := 0; i < n; i++ {
|
||||
nestedPks, nestedSigs := generatePubKeysAndSignatures(5, msg)
|
||||
nestedBitArray := crypto.NewCompactBitArray(5)
|
||||
nestedBitArray := cryptotypes.NewCompactBitArray(5)
|
||||
for j := 0; j < 5; j++ {
|
||||
nestedBitArray.SetIndex(j, true)
|
||||
}
|
||||
|
|
|
@ -4,13 +4,12 @@ import (
|
|||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/internal/benchmarking"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
)
|
||||
|
||||
func BenchmarkKeyGeneration(b *testing.B) {
|
||||
benchmarkKeygenWrapper := func(reader io.Reader) crypto.PrivKey {
|
||||
benchmarkKeygenWrapper := func(reader io.Reader) types.PrivKey {
|
||||
priv := genPrivKey(reader)
|
||||
return &PrivKey{Key: priv}
|
||||
}
|
||||
|
|
|
@ -9,13 +9,12 @@ import (
|
|||
"math/big"
|
||||
|
||||
secp256k1 "github.com/btcsuite/btcd/btcec"
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
"golang.org/x/crypto/ripemd160" // nolint: staticcheck // necessary for Bitcoin address format
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
"github.com/cosmos/cosmos-sdk/types/errors"
|
||||
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
)
|
||||
|
||||
var _ cryptotypes.PrivKey = &PrivKey{}
|
||||
|
@ -35,7 +34,7 @@ func (privKey *PrivKey) Bytes() []byte {
|
|||
|
||||
// PubKey performs the point-scalar multiplication from the privKey on the
|
||||
// generator point to get the pubkey.
|
||||
func (privKey *PrivKey) PubKey() crypto.PubKey {
|
||||
func (privKey *PrivKey) PubKey() cryptotypes.PubKey {
|
||||
_, pubkeyObject := secp256k1.PrivKeyFromBytes(secp256k1.S256(), privKey.Key)
|
||||
pk := pubkeyObject.SerializeCompressed()
|
||||
return &PubKey{Key: pk}
|
||||
|
@ -43,7 +42,7 @@ func (privKey *PrivKey) PubKey() crypto.PubKey {
|
|||
|
||||
// Equals - you probably don't need to use this.
|
||||
// Runs in constant time based on length of the
|
||||
func (privKey *PrivKey) Equals(other crypto.PrivKey) bool {
|
||||
func (privKey *PrivKey) Equals(other cryptotypes.LedgerPrivKey) bool {
|
||||
return privKey.Type() == other.Type() && subtle.ConstantTimeCompare(privKey.Bytes(), other.Bytes()) == 1
|
||||
}
|
||||
|
||||
|
@ -174,7 +173,7 @@ func (pubKey *PubKey) Type() string {
|
|||
return keyType
|
||||
}
|
||||
|
||||
func (pubKey *PubKey) Equals(other crypto.PubKey) bool {
|
||||
func (pubKey *PubKey) Equals(other cryptotypes.PubKey) bool {
|
||||
return pubKey.Type() == other.Type() && bytes.Equal(pubKey.Bytes(), other.Bytes())
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,8 @@ func (privKey *PrivKey) Sign(msg []byte) ([]byte, error) {
|
|||
return rs, nil
|
||||
}
|
||||
|
||||
// VerifySignature validates the signature.
|
||||
// The msg will be hashed prior to signature verification.
|
||||
func (pubKey *PrivKey) VerifySignature(msg []byte, sig []byte) bool {
|
||||
return secp256k1.VerifySignature(pubKey.Key, crypto.Sha256(msg), sig)
|
||||
}
|
||||
|
|
|
@ -5,9 +5,8 @@ import (
|
|||
"math/big"
|
||||
"testing"
|
||||
|
||||
btcSecp256k1 "github.com/btcsuite/btcd/btcec"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
underlyingSecp256k1 "github.com/btcsuite/btcd/btcec"
|
||||
)
|
||||
|
||||
func Test_genPrivKey(t *testing.T) {
|
||||
|
@ -25,7 +24,7 @@ func Test_genPrivKey(t *testing.T) {
|
|||
shouldPanic bool
|
||||
}{
|
||||
{"empty bytes (panics because 1st 32 bytes are zero and 0 is not a valid field element)", empty, true},
|
||||
{"curve order: N", underlyingSecp256k1.S256().N.Bytes(), true},
|
||||
{"curve order: N", btcSecp256k1.S256().N.Bytes(), true},
|
||||
{"valid because 0 < 1 < N", validOne, false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
|
@ -39,7 +38,7 @@ func Test_genPrivKey(t *testing.T) {
|
|||
}
|
||||
got := genPrivKey(bytes.NewReader(tt.notSoRand))
|
||||
fe := new(big.Int).SetBytes(got[:])
|
||||
require.True(t, fe.Cmp(underlyingSecp256k1.S256().N) < 0)
|
||||
require.True(t, fe.Cmp(btcSecp256k1.S256().N) < 0)
|
||||
require.True(t, fe.Sign() > 0)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,21 +1,21 @@
|
|||
package secp256k1_test
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"math/big"
|
||||
"testing"
|
||||
|
||||
btcSecp256k1 "github.com/btcsuite/btcd/btcec"
|
||||
"github.com/btcsuite/btcutil/base58"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/tendermint/tendermint/crypto"
|
||||
"github.com/tendermint/tendermint/crypto/sr25519"
|
||||
|
||||
underlyingSecp256k1 "github.com/btcsuite/btcd/btcec"
|
||||
tmsecp256k1 "github.com/tendermint/tendermint/crypto/secp256k1"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
)
|
||||
|
@ -41,7 +41,7 @@ func TestPubKeySecp256k1Address(t *testing.T) {
|
|||
addrBbz, _, _ := base58.CheckDecode(d.addr)
|
||||
addrB := crypto.Address(addrBbz)
|
||||
|
||||
var priv secp256k1.PrivKey = secp256k1.PrivKey{Key: privB}
|
||||
var priv = secp256k1.PrivKey{Key: privB}
|
||||
|
||||
pubKey := priv.PubKey()
|
||||
pubT, _ := pubKey.(*secp256k1.PubKey)
|
||||
|
@ -56,15 +56,34 @@ func TestSignAndValidateSecp256k1(t *testing.T) {
|
|||
privKey := secp256k1.GenPrivKey()
|
||||
pubKey := privKey.PubKey()
|
||||
|
||||
msg := crypto.CRandBytes(128)
|
||||
msg := crypto.CRandBytes(1000)
|
||||
sig, err := privKey.Sign(msg)
|
||||
require.Nil(t, err)
|
||||
|
||||
assert.True(t, pubKey.VerifySignature(msg, sig))
|
||||
|
||||
// ----
|
||||
// Test cross packages verification
|
||||
msgHash := crypto.Sha256(msg)
|
||||
btcPrivKey, btcPubKey := btcSecp256k1.PrivKeyFromBytes(btcSecp256k1.S256(), privKey.Key)
|
||||
// This fails: malformed signature: no header magic
|
||||
// btcSig, err := secp256k1.ParseSignature(sig, secp256k1.S256())
|
||||
// require.NoError(t, err)
|
||||
// assert.True(t, btcSig.Verify(msgHash, btcPubKey))
|
||||
// So we do a hacky way:
|
||||
r := new(big.Int)
|
||||
s := new(big.Int)
|
||||
r.SetBytes(sig[:32])
|
||||
s.SetBytes(sig[32:])
|
||||
ok := ecdsa.Verify(btcPubKey.ToECDSA(), msgHash, r, s)
|
||||
require.True(t, ok)
|
||||
|
||||
sig2, err := btcPrivKey.Sign(msgHash)
|
||||
require.NoError(t, err)
|
||||
pubKey.VerifySignature(msg, sig2.Serialize())
|
||||
|
||||
// ----
|
||||
// Mutate the signature, just one bit.
|
||||
sig[3] ^= byte(0x01)
|
||||
|
||||
assert.False(t, pubKey.VerifySignature(msg, sig))
|
||||
}
|
||||
|
||||
|
@ -79,7 +98,7 @@ func TestSecp256k1LoadPrivkeyAndSerializeIsIdentity(t *testing.T) {
|
|||
|
||||
// This function creates a private and public key in the underlying libraries format.
|
||||
// The private key is basically calling new(big.Int).SetBytes(pk), which removes leading zero bytes
|
||||
priv, _ := underlyingSecp256k1.PrivKeyFromBytes(underlyingSecp256k1.S256(), privKeyBytes[:])
|
||||
priv, _ := btcSecp256k1.PrivKeyFromBytes(btcSecp256k1.S256(), privKeyBytes[:])
|
||||
// this takes the bytes returned by `(big int).Bytes()`, and if the length is less than 32 bytes,
|
||||
// pads the bytes from the left with zero bytes. Therefore these two functions composed
|
||||
// result in the identity function on privKeyBytes, hence the following equality check
|
||||
|
@ -91,7 +110,7 @@ func TestSecp256k1LoadPrivkeyAndSerializeIsIdentity(t *testing.T) {
|
|||
|
||||
func TestGenPrivKeyFromSecret(t *testing.T) {
|
||||
// curve oder N
|
||||
N := underlyingSecp256k1.S256().N
|
||||
N := btcSecp256k1.S256().N
|
||||
tests := []struct {
|
||||
name string
|
||||
secret []byte
|
||||
|
@ -125,7 +144,7 @@ func TestPubKeyEquals(t *testing.T) {
|
|||
testCases := []struct {
|
||||
msg string
|
||||
pubKey cryptotypes.PubKey
|
||||
other crypto.PubKey
|
||||
other cryptotypes.PubKey
|
||||
expectEq bool
|
||||
}{
|
||||
{
|
||||
|
@ -145,7 +164,7 @@ func TestPubKeyEquals(t *testing.T) {
|
|||
{
|
||||
"different types",
|
||||
secp256K1PubKey,
|
||||
sr25519.GenPrivKey().PubKey(),
|
||||
ed25519.GenPrivKey().PubKey(),
|
||||
false,
|
||||
},
|
||||
}
|
||||
|
@ -164,7 +183,7 @@ func TestPrivKeyEquals(t *testing.T) {
|
|||
testCases := []struct {
|
||||
msg string
|
||||
privKey cryptotypes.PrivKey
|
||||
other crypto.PrivKey
|
||||
other cryptotypes.PrivKey
|
||||
expectEq bool
|
||||
}{
|
||||
{
|
||||
|
@ -184,7 +203,7 @@ func TestPrivKeyEquals(t *testing.T) {
|
|||
{
|
||||
"different types",
|
||||
secp256K1PrivKey,
|
||||
sr25519.GenPrivKey(),
|
||||
ed25519.GenPrivKey(),
|
||||
false,
|
||||
},
|
||||
}
|
||||
|
@ -249,3 +268,56 @@ func TestMarshalAmino(t *testing.T) {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestMarshalAmino_BackwardsCompatibility(t *testing.T) {
|
||||
aminoCdc := codec.NewLegacyAmino()
|
||||
// Create Tendermint keys.
|
||||
tmPrivKey := tmsecp256k1.GenPrivKey()
|
||||
tmPubKey := tmPrivKey.PubKey()
|
||||
// Create our own keys, with the same private key as Tendermint's.
|
||||
privKey := &secp256k1.PrivKey{Key: []byte(tmPrivKey)}
|
||||
pubKey := privKey.PubKey().(*secp256k1.PubKey)
|
||||
|
||||
testCases := []struct {
|
||||
desc string
|
||||
tmKey interface{}
|
||||
ourKey interface{}
|
||||
marshalFn func(o interface{}) ([]byte, error)
|
||||
}{
|
||||
{
|
||||
"secp256k1 private key, binary",
|
||||
tmPrivKey,
|
||||
privKey,
|
||||
aminoCdc.MarshalBinaryBare,
|
||||
},
|
||||
{
|
||||
"secp256k1 private key, JSON",
|
||||
tmPrivKey,
|
||||
privKey,
|
||||
aminoCdc.MarshalJSON,
|
||||
},
|
||||
{
|
||||
"secp256k1 public key, binary",
|
||||
tmPubKey,
|
||||
pubKey,
|
||||
aminoCdc.MarshalBinaryBare,
|
||||
},
|
||||
{
|
||||
"secp256k1 public key, JSON",
|
||||
tmPubKey,
|
||||
pubKey,
|
||||
aminoCdc.MarshalJSON,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
// Make sure Amino encoding override is not breaking backwards compatibility.
|
||||
bz1, err := tc.marshalFn(tc.tmKey)
|
||||
require.NoError(t, err)
|
||||
bz2, err := tc.marshalFn(tc.ourKey)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, bz1, bz2)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
tcrypto "github.com/tendermint/tendermint/crypto"
|
||||
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
)
|
||||
|
||||
type byter interface {
|
||||
|
@ -51,12 +51,12 @@ func TestNilEncodings(t *testing.T) {
|
|||
require.EqualValues(t, a, b)
|
||||
|
||||
// Check nil PubKey.
|
||||
var c, d tcrypto.PubKey
|
||||
var c, d cryptotypes.PubKey
|
||||
checkAminoJSON(t, &c, &d, true)
|
||||
require.EqualValues(t, c, d)
|
||||
|
||||
// Check nil PrivKey.
|
||||
var e, f tcrypto.PrivKey
|
||||
var e, f cryptotypes.PrivKey
|
||||
checkAminoJSON(t, &e, &f, true)
|
||||
require.EqualValues(t, e, f)
|
||||
|
||||
|
|
|
@ -8,10 +8,10 @@ import (
|
|||
"github.com/pkg/errors"
|
||||
|
||||
tmbtcec "github.com/tendermint/btcd/btcec"
|
||||
tmcrypto "github.com/tendermint/tendermint/crypto"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/crypto/hd"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
|
||||
"github.com/cosmos/cosmos-sdk/crypto/types"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -43,7 +43,7 @@ type (
|
|||
// CachedPubKey should be private, but we want to encode it via
|
||||
// go-amino so we can view the address later, even without having the
|
||||
// ledger attached.
|
||||
CachedPubKey tmcrypto.PubKey
|
||||
CachedPubKey types.PubKey
|
||||
Path hd.BIP44Params
|
||||
}
|
||||
)
|
||||
|
@ -53,7 +53,7 @@ type (
|
|||
// This function is marked as unsafe as it will retrieve a pubkey without user verification.
|
||||
// It can only be used to verify a pubkey but never to create new accounts/keys. In that case,
|
||||
// please refer to NewPrivKeySecp256k1
|
||||
func NewPrivKeySecp256k1Unsafe(path hd.BIP44Params) (tmcrypto.PrivKey, error) {
|
||||
func NewPrivKeySecp256k1Unsafe(path hd.BIP44Params) (types.LedgerPrivKey, error) {
|
||||
device, err := getDevice()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -70,7 +70,7 @@ func NewPrivKeySecp256k1Unsafe(path hd.BIP44Params) (tmcrypto.PrivKey, error) {
|
|||
|
||||
// NewPrivKeySecp256k1 will generate a new key and store the public key for later use.
|
||||
// The request will require user confirmation and will show account and index in the device
|
||||
func NewPrivKeySecp256k1(path hd.BIP44Params, hrp string) (tmcrypto.PrivKey, string, error) {
|
||||
func NewPrivKeySecp256k1(path hd.BIP44Params, hrp string) (types.LedgerPrivKey, string, error) {
|
||||
device, err := getDevice()
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
|
@ -86,7 +86,7 @@ func NewPrivKeySecp256k1(path hd.BIP44Params, hrp string) (tmcrypto.PrivKey, str
|
|||
}
|
||||
|
||||
// PubKey returns the cached public key.
|
||||
func (pkl PrivKeyLedgerSecp256k1) PubKey() tmcrypto.PubKey {
|
||||
func (pkl PrivKeyLedgerSecp256k1) PubKey() types.PubKey {
|
||||
return pkl.CachedPubKey
|
||||
}
|
||||
|
||||
|
@ -102,7 +102,7 @@ func (pkl PrivKeyLedgerSecp256k1) Sign(message []byte) ([]byte, error) {
|
|||
}
|
||||
|
||||
// ShowAddress triggers a ledger device to show the corresponding address.
|
||||
func ShowAddress(path hd.BIP44Params, expectedPubKey tmcrypto.PubKey,
|
||||
func ShowAddress(path hd.BIP44Params, expectedPubKey types.PubKey,
|
||||
accountAddressPrefix string) error {
|
||||
device, err := getDevice()
|
||||
if err != nil {
|
||||
|
@ -154,7 +154,7 @@ func (pkl PrivKeyLedgerSecp256k1) Bytes() []byte {
|
|||
|
||||
// Equals implements the PrivKey interface. It makes sure two private keys
|
||||
// refer to the same public key.
|
||||
func (pkl PrivKeyLedgerSecp256k1) Equals(other tmcrypto.PrivKey) bool {
|
||||
func (pkl PrivKeyLedgerSecp256k1) Equals(other types.LedgerPrivKey) bool {
|
||||
if otherKey, ok := other.(PrivKeyLedgerSecp256k1); ok {
|
||||
return pkl.CachedPubKey.Equals(otherKey.CachedPubKey)
|
||||
}
|
||||
|
@ -234,7 +234,7 @@ func sign(device SECP256K1, pkl PrivKeyLedgerSecp256k1, msg []byte) ([]byte, err
|
|||
//
|
||||
// since this involves IO, it may return an error, which is not exposed
|
||||
// in the PubKey interface, so this function allows better error handling
|
||||
func getPubKeyUnsafe(device SECP256K1, path hd.BIP44Params) (tmcrypto.PubKey, error) {
|
||||
func getPubKeyUnsafe(device SECP256K1, path hd.BIP44Params) (types.PubKey, error) {
|
||||
publicKey, err := device.GetPublicKeySECP256K1(path.DerivationPath())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("please open Cosmos app on the Ledger device - error: %v", err)
|
||||
|
@ -258,7 +258,7 @@ func getPubKeyUnsafe(device SECP256K1, path hd.BIP44Params) (tmcrypto.PubKey, er
|
|||
//
|
||||
// Since this involves IO, it may return an error, which is not exposed
|
||||
// in the PubKey interface, so this function allows better error handling.
|
||||
func getPubKeyAddrSafe(device SECP256K1, path hd.BIP44Params, hrp string) (tmcrypto.PubKey, string, error) {
|
||||
func getPubKeyAddrSafe(device SECP256K1, path hd.BIP44Params, hrp string) (types.PubKey, string, error) {
|
||||
publicKey, addr, err := device.GetAddressPubKeySECP256K1(path.DerivationPath(), hrp)
|
||||
if err != nil {
|
||||
return nil, "", fmt.Errorf("address %s rejected", addr)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue