commit
51dca8f0a9
|
@ -9,6 +9,7 @@ examples/build/*
|
||||||
examples/basecoin/glide.lock
|
examples/basecoin/glide.lock
|
||||||
examples/basecoin/app/data
|
examples/basecoin/app/data
|
||||||
baseapp/data/*
|
baseapp/data/*
|
||||||
|
docs/_build
|
||||||
|
|
||||||
### Vagrant ###
|
### Vagrant ###
|
||||||
.vagrant/
|
.vagrant/
|
||||||
|
|
|
@ -1,14 +0,0 @@
|
||||||
FROM golang:latest
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y jq
|
|
||||||
|
|
||||||
RUN mkdir -p /go/src/github.com/tendermint/basecoin
|
|
||||||
WORKDIR /go/src/github.com/tendermint/basecoin
|
|
||||||
|
|
||||||
COPY Makefile /go/src/github.com/tendermint/basecoin/
|
|
||||||
COPY glide.yaml /go/src/github.com/tendermint/basecoin/
|
|
||||||
COPY glide.lock /go/src/github.com/tendermint/basecoin/
|
|
||||||
|
|
||||||
RUN make get_vendor_deps
|
|
||||||
|
|
||||||
COPY . /go/src/github.com/tendermint/basecoin
|
|
8
Makefile
8
Makefile
|
@ -68,13 +68,7 @@ test_unit:
|
||||||
@go test $(PACKAGES)
|
@go test $(PACKAGES)
|
||||||
|
|
||||||
test_cover:
|
test_cover:
|
||||||
@bash test_cover.sh
|
@bash tests/test_cover.sh
|
||||||
|
|
||||||
test_tutorial:
|
|
||||||
@shelldown ${TUTORIALS}
|
|
||||||
@for script in docs/guide/*.sh ; do \
|
|
||||||
bash $$script ; \
|
|
||||||
done
|
|
||||||
|
|
||||||
benchmark:
|
benchmark:
|
||||||
@go test -bench=. $(PACKAGES)
|
@go test -bench=. $(PACKAGES)
|
||||||
|
|
56
RELEASE.md
56
RELEASE.md
|
@ -1,56 +0,0 @@
|
||||||
# Release Process
|
|
||||||
|
|
||||||
Basecoin is the heart of most demo apps and the testnets, but the last few releases have been a little chaotic. In order to guarantee a higher, production-quality release in the future, we will work on a release process to check before the push to master. This is a work-in-progress and should be trialed on the 0.6.x patches, and used for the 0.7.0 release.
|
|
||||||
|
|
||||||
This is a rough-guide. Please add comments here, let's try it out for 0.6.1 and see what is annoying and useless, and what is missing and useful.
|
|
||||||
|
|
||||||
## Planning
|
|
||||||
|
|
||||||
* Create issues (and invite others to do so)
|
|
||||||
* Create WIP PR for release as placeholder
|
|
||||||
* Clarify scope of release in text
|
|
||||||
* Create labels, eg. (0.6.1 and 0.6.x)
|
|
||||||
* Tag all issues for this release with 0.6.1
|
|
||||||
* Other, less urgent enhancements should get the 0.6.x label
|
|
||||||
|
|
||||||
## Coding
|
|
||||||
|
|
||||||
* Freeze tagging more issues for this release
|
|
||||||
* Update PR to note this
|
|
||||||
* If you want an exception, you need a good excuse ;)
|
|
||||||
* Handle all issues
|
|
||||||
* Write code
|
|
||||||
* Update CHANGELOG
|
|
||||||
* Review and merge
|
|
||||||
* Update version
|
|
||||||
* Remove WIP flag on PR
|
|
||||||
* Organize QA
|
|
||||||
* Prepare blog post (optional for patch/bugfix releases?)
|
|
||||||
|
|
||||||
## QA
|
|
||||||
|
|
||||||
Once we have a PR for the release and think it is ready, we should test it out internally:
|
|
||||||
|
|
||||||
* Code review
|
|
||||||
* Hopefully dealt with by individual code reviews on the merged issues
|
|
||||||
* A general run-through is always good to find dead-code, things to cleanup
|
|
||||||
* Review blog post (and run-through)
|
|
||||||
* Manual run-through of tutorials (and feedback on bad UX)
|
|
||||||
* Deployment of a private testnet, multiple users test out manually (feedback on bugs, or annoying UX)
|
|
||||||
* Test out upgrading existing testnet from last version, document or add tools for easier upgrade.
|
|
||||||
* If problems arrise here:
|
|
||||||
* Create bugfix issues
|
|
||||||
* Fix them
|
|
||||||
* Repeat QA
|
|
||||||
|
|
||||||
## Release
|
|
||||||
|
|
||||||
Once QA passes, we need to orchestrate the release.
|
|
||||||
|
|
||||||
* Merge to master
|
|
||||||
* Set all glide dependencies to proper master versions of repos
|
|
||||||
* Push code with new version tag
|
|
||||||
* Link CHANGELOG to the [github release](https://github.com/tendermint/basecoin/releases)
|
|
||||||
* Package up new version as binaries (and upload to s3)
|
|
||||||
* Upgrade our public-facing testnets with the latest versions
|
|
||||||
* Release blog post
|
|
9
TODO
9
TODO
|
@ -1,9 +0,0 @@
|
||||||
|
|
||||||
* global state dumper.
|
|
||||||
for developer to list accounts, etc.
|
|
||||||
e.g. what does the world look like?
|
|
||||||
cmd cli.
|
|
||||||
|
|
||||||
* something that can list transactions ...
|
|
||||||
make all decorators actually use the logger
|
|
||||||
so you can see all the txs and see what's going on
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
# Minimal makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line.
|
||||||
|
SPHINXOPTS =
|
||||||
|
SPHINXBUILD = python -msphinx
|
||||||
|
SPHINXPROJ = Cosmos-SDK
|
||||||
|
SOURCEDIR = .
|
||||||
|
BUILDDIR = _build
|
||||||
|
|
||||||
|
# Put it first so that "make" without argument is like "make help".
|
||||||
|
help:
|
||||||
|
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
|
|
||||||
|
.PHONY: help Makefile
|
||||||
|
|
||||||
|
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||||
|
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||||
|
%: Makefile
|
||||||
|
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
@ -0,0 +1,289 @@
|
||||||
|
Basecoin Basics
|
||||||
|
===============
|
||||||
|
|
||||||
|
Here we explain how to get started with a basic Basecoin blockchain, how
|
||||||
|
to send transactions between accounts using the ``basecoin`` tool, and
|
||||||
|
what is happening under the hood.
|
||||||
|
|
||||||
|
Install
|
||||||
|
-------
|
||||||
|
|
||||||
|
With go, it's one command:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
go get -u github.com/cosmos/cosmos-sdk
|
||||||
|
|
||||||
|
If you have trouble, see the `installation guide <./install.html>`__.
|
||||||
|
|
||||||
|
TODO: update all the below
|
||||||
|
|
||||||
|
Generate some keys
|
||||||
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Let's generate two keys, one to receive an initial allocation of coins,
|
||||||
|
and one to send some coins to later:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli keys new cool
|
||||||
|
basecli keys new friend
|
||||||
|
|
||||||
|
You'll need to enter passwords. You can view your key names and
|
||||||
|
addresses with ``basecli keys list``, or see a particular key's address
|
||||||
|
with ``basecli keys get <NAME>``.
|
||||||
|
|
||||||
|
Initialize Basecoin
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
To initialize a new Basecoin blockchain, run:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecoin init <ADDRESS>
|
||||||
|
|
||||||
|
If you prefer not to copy-paste, you can provide the address
|
||||||
|
programatically:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecoin init $(basecli keys get cool | awk '{print $2}')
|
||||||
|
|
||||||
|
This will create the necessary files for a Basecoin blockchain with one
|
||||||
|
validator and one account (corresponding to your key) in
|
||||||
|
``~/.basecoin``. For more options on setup, see the `guide to using the
|
||||||
|
Basecoin tool </docs/guide/basecoin-tool.md>`__.
|
||||||
|
|
||||||
|
If you like, you can manually add some more accounts to the blockchain
|
||||||
|
by generating keys and editing the ``~/.basecoin/genesis.json``.
|
||||||
|
|
||||||
|
Start Basecoin
|
||||||
|
~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Now we can start Basecoin:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecoin start
|
||||||
|
|
||||||
|
You should see blocks start streaming in!
|
||||||
|
|
||||||
|
Initialize Light-Client
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
Now that Basecoin is running we can initialize ``basecli``, the
|
||||||
|
light-client utility. Basecli is used for sending transactions and
|
||||||
|
querying the state. Leave Basecoin running and open a new terminal
|
||||||
|
window. Here run:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli init --node=tcp://localhost:46657 --genesis=$HOME/.basecoin/genesis.json
|
||||||
|
|
||||||
|
If you provide the genesis file to basecli, it can calculate the proper
|
||||||
|
chainID and validator hash. Basecli needs to get this information from
|
||||||
|
some trusted source, so all queries done with ``basecli`` can be
|
||||||
|
cryptographically proven to be correct according to a known validator
|
||||||
|
set.
|
||||||
|
|
||||||
|
Note: that ``--genesis`` only works if there have been no validator set
|
||||||
|
changes since genesis. If there are validator set changes, you need to
|
||||||
|
find the current set through some other method.
|
||||||
|
|
||||||
|
Send transactions
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Now we are ready to send some transactions. First Let's check the
|
||||||
|
balance of the two accounts we setup earlier:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
ME=$(basecli keys get cool | awk '{print $2}')
|
||||||
|
YOU=$(basecli keys get friend | awk '{print $2}')
|
||||||
|
basecli query account $ME
|
||||||
|
basecli query account $YOU
|
||||||
|
|
||||||
|
The first account is flush with cash, while the second account doesn't
|
||||||
|
exist. Let's send funds from the first account to the second:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli tx send --name=cool --amount=1000mycoin --to=$YOU --sequence=1
|
||||||
|
|
||||||
|
Now if we check the second account, it should have ``1000`` 'mycoin'
|
||||||
|
coins!
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli query account $YOU
|
||||||
|
|
||||||
|
We can send some of these coins back like so:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli tx send --name=friend --amount=500mycoin --to=$ME --sequence=1
|
||||||
|
|
||||||
|
Note how we use the ``--name`` flag to select a different account to
|
||||||
|
send from.
|
||||||
|
|
||||||
|
If we try to send too much, we'll get an error:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli tx send --name=friend --amount=500000mycoin --to=$ME --sequence=2
|
||||||
|
|
||||||
|
Let's send another transaction:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli tx send --name=cool --amount=2345mycoin --to=$YOU --sequence=2
|
||||||
|
|
||||||
|
Note the ``hash`` value in the response - this is the hash of the
|
||||||
|
transaction. We can query for the transaction by this hash:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli query tx <HASH>
|
||||||
|
|
||||||
|
See ``basecli tx send --help`` for additional details.
|
||||||
|
|
||||||
|
Proof
|
||||||
|
-----
|
||||||
|
|
||||||
|
Even if you don't see it in the UI, the result of every query comes with
|
||||||
|
a proof. This is a Merkle proof that the result of the query is actually
|
||||||
|
contained in the state. And the state's Merkle root is contained in a
|
||||||
|
recent block header. Behind the scenes, ``countercli`` will not only
|
||||||
|
verify that this state matches the header, but also that the header is
|
||||||
|
properly signed by the known validator set. It will even update the
|
||||||
|
validator set as needed, so long as there have not been major changes
|
||||||
|
and it is secure to do so. So, if you wonder why the query may take a
|
||||||
|
second... there is a lot of work going on in the background to make sure
|
||||||
|
even a lying full node can't trick your client.
|
||||||
|
|
||||||
|
Accounts and Transactions
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
For a better understanding of how to further use the tools, it helps to
|
||||||
|
understand the underlying data structures.
|
||||||
|
|
||||||
|
Accounts
|
||||||
|
~~~~~~~~
|
||||||
|
|
||||||
|
The Basecoin state consists entirely of a set of accounts. Each account
|
||||||
|
contains a public key, a balance in many different coin denominations,
|
||||||
|
and a strictly increasing sequence number for replay protection. This
|
||||||
|
type of account was directly inspired by accounts in Ethereum, and is
|
||||||
|
unlike Bitcoin's use of Unspent Transaction Outputs (UTXOs). Note
|
||||||
|
Basecoin is a multi-asset cryptocurrency, so each account can have many
|
||||||
|
different kinds of tokens.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type Account struct {
|
||||||
|
PubKey crypto.PubKey `json:"pub_key"` // May be nil, if not known.
|
||||||
|
Sequence int `json:"sequence"`
|
||||||
|
Balance Coins `json:"coins"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Coins []Coin
|
||||||
|
|
||||||
|
type Coin struct {
|
||||||
|
Denom string `json:"denom"`
|
||||||
|
Amount int64 `json:"amount"`
|
||||||
|
}
|
||||||
|
|
||||||
|
If you want to add more coins to a blockchain, you can do so manually in
|
||||||
|
the ``~/.basecoin/genesis.json`` before you start the blockchain for the
|
||||||
|
first time.
|
||||||
|
|
||||||
|
Accounts are serialized and stored in a Merkle tree under the key
|
||||||
|
``base/a/<address>``, where ``<address>`` is the address of the account.
|
||||||
|
Typically, the address of the account is the 20-byte ``RIPEMD160`` hash
|
||||||
|
of the public key, but other formats are acceptable as well, as defined
|
||||||
|
in the `Tendermint crypto
|
||||||
|
library <https://github.com/tendermint/go-crypto>`__. The Merkle tree
|
||||||
|
used in Basecoin is a balanced, binary search tree, which we call an
|
||||||
|
`IAVL tree <https://github.com/tendermint/iavl>`__.
|
||||||
|
|
||||||
|
Transactions
|
||||||
|
~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Basecoin defines a transaction type, the ``SendTx``, which allows tokens
|
||||||
|
to be sent to other accounts. The ``SendTx`` takes a list of inputs and
|
||||||
|
a list of outputs, and transfers all the tokens listed in the inputs
|
||||||
|
from their corresponding accounts to the accounts listed in the output.
|
||||||
|
The ``SendTx`` is structured as follows:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type SendTx struct {
|
||||||
|
Gas int64 `json:"gas"`
|
||||||
|
Fee Coin `json:"fee"`
|
||||||
|
Inputs []TxInput `json:"inputs"`
|
||||||
|
Outputs []TxOutput `json:"outputs"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TxInput struct {
|
||||||
|
Address []byte `json:"address"` // Hash of the PubKey
|
||||||
|
Coins Coins `json:"coins"` //
|
||||||
|
Sequence int `json:"sequence"` // Must be 1 greater than the last committed TxInput
|
||||||
|
Signature crypto.Signature `json:"signature"` // Depends on the PubKey type and the whole Tx
|
||||||
|
PubKey crypto.PubKey `json:"pub_key"` // Is present iff Sequence == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
type TxOutput struct {
|
||||||
|
Address []byte `json:"address"` // Hash of the PubKey
|
||||||
|
Coins Coins `json:"coins"` //
|
||||||
|
}
|
||||||
|
|
||||||
|
Note the ``SendTx`` includes a field for ``Gas`` and ``Fee``. The
|
||||||
|
``Gas`` limits the total amount of computation that can be done by the
|
||||||
|
transaction, while the ``Fee`` refers to the total amount paid in fees.
|
||||||
|
This is slightly different from Ethereum's concept of ``Gas`` and
|
||||||
|
``GasPrice``, where ``Fee = Gas x GasPrice``. In Basecoin, the ``Gas``
|
||||||
|
and ``Fee`` are independent, and the ``GasPrice`` is implicit.
|
||||||
|
|
||||||
|
In Basecoin, the ``Fee`` is meant to be used by the validators to inform
|
||||||
|
the ordering of transactions, like in Bitcoin. And the ``Gas`` is meant
|
||||||
|
to be used by the application plugin to control its execution. There is
|
||||||
|
currently no means to pass ``Fee`` information to the Tendermint
|
||||||
|
validators, but it will come soon...
|
||||||
|
|
||||||
|
Note also that the ``PubKey`` only needs to be sent for
|
||||||
|
``Sequence == 0``. After that, it is stored under the account in the
|
||||||
|
Merkle tree and subsequent transactions can exclude it, using only the
|
||||||
|
``Address`` to refer to the sender. Ethereum does not require public
|
||||||
|
keys to be sent in transactions as it uses a different elliptic curve
|
||||||
|
scheme which enables the public key to be derived from the signature
|
||||||
|
itself.
|
||||||
|
|
||||||
|
Finally, note that the use of multiple inputs and multiple outputs
|
||||||
|
allows us to send many different types of tokens between many different
|
||||||
|
accounts at once in an atomic transaction. Thus, the ``SendTx`` can
|
||||||
|
serve as a basic unit of decentralized exchange. When using multiple
|
||||||
|
inputs and outputs, you must make sure that the sum of coins of the
|
||||||
|
inputs equals the sum of coins of the outputs (no creating money), and
|
||||||
|
that all accounts that provide inputs have signed the transaction.
|
||||||
|
|
||||||
|
Clean Up
|
||||||
|
--------
|
||||||
|
|
||||||
|
**WARNING:** Running these commands will wipe out any existing
|
||||||
|
information in both the ``~/.basecli`` and ``~/.basecoin`` directories,
|
||||||
|
including private keys.
|
||||||
|
|
||||||
|
To remove all the files created and refresh your environment (e.g., if
|
||||||
|
starting this tutorial again or trying something new), the following
|
||||||
|
commands are run:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli reset_all
|
||||||
|
rm -rf ~/.basecoin
|
||||||
|
|
||||||
|
In this guide, we introduced the ``basecoin`` and ``basecli`` tools,
|
||||||
|
demonstrated how to start a new basecoin blockchain and how to send
|
||||||
|
tokens between accounts, and discussed the underlying data types for
|
||||||
|
accounts and transactions, specifically the ``Account`` and the
|
||||||
|
``SendTx``.
|
|
@ -0,0 +1,215 @@
|
||||||
|
Basecoin Extensions
|
||||||
|
===================
|
||||||
|
|
||||||
|
TODO: re-write for extensions
|
||||||
|
|
||||||
|
In the `previous guide <basecoin-basics.md>`__, we saw how to use the
|
||||||
|
``basecoin`` tool to start a blockchain and the ``basecli`` tools to
|
||||||
|
send transactions. We also learned about ``Account`` and ``SendTx``, the
|
||||||
|
basic data types giving us a multi-asset cryptocurrency. Here, we will
|
||||||
|
demonstrate how to extend the tools to use another transaction type, the
|
||||||
|
``AppTx``, so we can send data to a custom plugin. In this example we
|
||||||
|
explore a simple plugin named ``counter``.
|
||||||
|
|
||||||
|
Example Plugin
|
||||||
|
--------------
|
||||||
|
|
||||||
|
The design of the ``basecoin`` tool makes it easy to extend for custom
|
||||||
|
functionality. The Counter plugin is bundled with basecoin, so if you
|
||||||
|
have already `installed basecoin <install.md>`__ and run
|
||||||
|
``make install`` then you should be able to run a full node with
|
||||||
|
``counter`` and the a light-client ``countercli`` from terminal. The
|
||||||
|
Counter plugin is just like the ``basecoin`` tool. They both use the
|
||||||
|
same library of commands, including one for signing and broadcasting
|
||||||
|
``SendTx``.
|
||||||
|
|
||||||
|
Counter transactions take two custom inputs, a boolean argument named
|
||||||
|
``valid``, and a coin amount named ``countfee``. The transaction is only
|
||||||
|
accepted if both ``valid`` is set to true and the transaction input
|
||||||
|
coins is greater than ``countfee`` that the user provides.
|
||||||
|
|
||||||
|
A new blockchain can be initialized and started just like in the
|
||||||
|
`previous guide <basecoin-basics.md>`__:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
# WARNING: this wipes out data - but counter is only for demos...
|
||||||
|
rm -rf ~/.counter
|
||||||
|
countercli reset_all
|
||||||
|
|
||||||
|
countercli keys new cool
|
||||||
|
countercli keys new friend
|
||||||
|
|
||||||
|
counter init $(countercli keys get cool | awk '{print $2}')
|
||||||
|
|
||||||
|
counter start
|
||||||
|
|
||||||
|
The default files are stored in ``~/.counter``. In another window we can
|
||||||
|
initialize the light-client and send a transaction:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
countercli init --node=tcp://localhost:46657 --genesis=$HOME/.counter/genesis.json
|
||||||
|
|
||||||
|
YOU=$(countercli keys get friend | awk '{print $2}')
|
||||||
|
countercli tx send --name=cool --amount=1000mycoin --to=$YOU --sequence=1
|
||||||
|
|
||||||
|
But the Counter has an additional command, ``countercli tx counter``,
|
||||||
|
which crafts an ``AppTx`` specifically for this plugin:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
countercli tx counter --name cool
|
||||||
|
countercli tx counter --name cool --valid
|
||||||
|
|
||||||
|
The first transaction is rejected by the plugin because it was not
|
||||||
|
marked as valid, while the second transaction passes. We can build
|
||||||
|
plugins that take many arguments of different types, and easily extend
|
||||||
|
the tool to accomodate them. Of course, we can also expose queries on
|
||||||
|
our plugin:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
countercli query counter
|
||||||
|
|
||||||
|
Tada! We can now see that our custom counter plugin transactions went
|
||||||
|
through. You should see a Counter value of 1 representing the number of
|
||||||
|
valid transactions. If we send another transaction, and then query
|
||||||
|
again, we will see the value increment. Note that we need the sequence
|
||||||
|
number here to send the coins (it didn't increment when we just pinged
|
||||||
|
the counter)
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
countercli tx counter --name cool --countfee=2mycoin --sequence=2 --valid
|
||||||
|
countercli query counter
|
||||||
|
|
||||||
|
The Counter value should be 2, because we sent a second valid
|
||||||
|
transaction. And this time, since we sent a countfee (which must be less
|
||||||
|
than or equal to the total amount sent with the tx), it stores the
|
||||||
|
``TotalFees`` on the counter as well.
|
||||||
|
|
||||||
|
Keep it mind that, just like with ``basecli``, the ``countercli``
|
||||||
|
verifies a proof that the query response is correct and up-to-date.
|
||||||
|
|
||||||
|
Now, before we implement our own plugin and tooling, it helps to
|
||||||
|
understand the ``AppTx`` and the design of the plugin system.
|
||||||
|
|
||||||
|
AppTx
|
||||||
|
-----
|
||||||
|
|
||||||
|
The ``AppTx`` is similar to the ``SendTx``, but instead of sending coins
|
||||||
|
from inputs to outputs, it sends coins from one input to a plugin, and
|
||||||
|
can also send some data.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type AppTx struct {
|
||||||
|
Gas int64 `json:"gas"`
|
||||||
|
Fee Coin `json:"fee"`
|
||||||
|
Input TxInput `json:"input"`
|
||||||
|
Name string `json:"type"` // Name of the plugin
|
||||||
|
Data []byte `json:"data"` // Data for the plugin to process
|
||||||
|
}
|
||||||
|
|
||||||
|
The ``AppTx`` enables Basecoin to be extended with arbitrary additional
|
||||||
|
functionality through the use of plugins. The ``Name`` field in the
|
||||||
|
``AppTx`` refers to the particular plugin which should process the
|
||||||
|
transaction, and the ``Data`` field of the ``AppTx`` is the data to be
|
||||||
|
forwarded to the plugin for processing.
|
||||||
|
|
||||||
|
Note the ``AppTx`` also has a ``Gas`` and ``Fee``, with the same meaning
|
||||||
|
as for the ``SendTx``. It also includes a single ``TxInput``, which
|
||||||
|
specifies the sender of the transaction, and some coins that can be
|
||||||
|
forwarded to the plugin as well.
|
||||||
|
|
||||||
|
Plugins
|
||||||
|
-------
|
||||||
|
|
||||||
|
A plugin is simply a Go package that implements the ``Plugin``
|
||||||
|
interface:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type Plugin interface {
|
||||||
|
|
||||||
|
// Name of this plugin, should be short.
|
||||||
|
Name() string
|
||||||
|
|
||||||
|
// Run a transaction from ABCI DeliverTx
|
||||||
|
RunTx(store KVStore, ctx CallContext, txBytes []byte) (res abci.Result)
|
||||||
|
|
||||||
|
// Other ABCI message handlers
|
||||||
|
SetOption(store KVStore, key string, value string) (log string)
|
||||||
|
InitChain(store KVStore, vals []*abci.Validator)
|
||||||
|
BeginBlock(store KVStore, hash []byte, header *abci.Header)
|
||||||
|
EndBlock(store KVStore, height uint64) (res abci.ResponseEndBlock)
|
||||||
|
}
|
||||||
|
|
||||||
|
type CallContext struct {
|
||||||
|
CallerAddress []byte // Caller's Address (hash of PubKey)
|
||||||
|
CallerAccount *Account // Caller's Account, w/ fee & TxInputs deducted
|
||||||
|
Coins Coins // The coins that the caller wishes to spend, excluding fees
|
||||||
|
}
|
||||||
|
|
||||||
|
The workhorse of the plugin is ``RunTx``, which is called when an
|
||||||
|
``AppTx`` is processed. The ``Data`` from the ``AppTx`` is passed in as
|
||||||
|
the ``txBytes``, while the ``Input`` from the ``AppTx`` is used to
|
||||||
|
populate the ``CallContext``.
|
||||||
|
|
||||||
|
Note that ``RunTx`` also takes a ``KVStore`` - this is an abstraction
|
||||||
|
for the underlying Merkle tree which stores the account data. By passing
|
||||||
|
this to the plugin, we enable plugins to update accounts in the Basecoin
|
||||||
|
state directly, and also to store arbitrary other information in the
|
||||||
|
state. In this way, the functionality and state of a Basecoin-derived
|
||||||
|
cryptocurrency can be greatly extended. One could imagine going so far
|
||||||
|
as to implement the Ethereum Virtual Machine as a plugin!
|
||||||
|
|
||||||
|
For details on how to initialize the state using ``SetOption``, see the
|
||||||
|
`guide to using the basecoin tool <basecoin-tool.md#genesis>`__.
|
||||||
|
|
||||||
|
Implement your own
|
||||||
|
------------------
|
||||||
|
|
||||||
|
To implement your own plugin and tooling, make a copy of
|
||||||
|
``docs/guide/counter``, and modify the code accordingly. Here, we will
|
||||||
|
briefly describe the design and the changes to be made, but see the code
|
||||||
|
for more details.
|
||||||
|
|
||||||
|
First is the ``cmd/counter/main.go``, which drives the program. It can
|
||||||
|
be left alone, but you should change any occurrences of ``counter`` to
|
||||||
|
whatever your plugin tool is going to be called. You must also register
|
||||||
|
your plugin(s) with the basecoin app with ``RegisterStartPlugin``.
|
||||||
|
|
||||||
|
The light-client is located in ``cmd/countercli/main.go`` and allows for
|
||||||
|
transaction and query commands. This file can also be left mostly alone
|
||||||
|
besides replacing the application name and adding references to new
|
||||||
|
plugin commands.
|
||||||
|
|
||||||
|
Next is the custom commands in ``cmd/countercli/commands/``. These files
|
||||||
|
are where we extend the tool with any new commands and flags we need to
|
||||||
|
send transactions or queries to our plugin. You define custom ``tx`` and
|
||||||
|
``query`` subcommands, which are registered in ``main.go`` (avoiding
|
||||||
|
``init()`` auto-registration, for less magic and more control in the
|
||||||
|
main executable).
|
||||||
|
|
||||||
|
Finally is ``plugins/counter/counter.go``, where we provide an
|
||||||
|
implementation of the ``Plugin`` interface. The most important part of
|
||||||
|
the implementation is the ``RunTx`` method, which determines the meaning
|
||||||
|
of the data sent along in the ``AppTx``. In our example, we define a new
|
||||||
|
transaction type, the ``CounterTx``, which we expect to be encoded in
|
||||||
|
the ``AppTx.Data``, and thus to be decoded in the ``RunTx`` method, and
|
||||||
|
used to update the plugin state.
|
||||||
|
|
||||||
|
For more examples and inspiration, see our `repository of example
|
||||||
|
plugins <https://github.com/tendermint/basecoin-examples>`__.
|
||||||
|
|
||||||
|
Conclusion
|
||||||
|
----------
|
||||||
|
|
||||||
|
In this guide, we demonstrated how to create a new plugin and how to
|
||||||
|
extend the ``basecoin`` tool to start a blockchain with the plugin
|
||||||
|
enabled and send transactions to it. In the next guide, we introduce a
|
||||||
|
`plugin for Inter Blockchain Communication <ibc.md>`__, which allows us
|
||||||
|
to publish proofs of the state of one blockchain to another, and thus to
|
||||||
|
transfer tokens and data between them.
|
|
@ -0,0 +1,170 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Cosmos-SDK documentation build configuration file, created by
|
||||||
|
# sphinx-quickstart on Fri Sep 1 21:37:02 2017.
|
||||||
|
#
|
||||||
|
# This file is execfile()d with the current directory set to its
|
||||||
|
# containing dir.
|
||||||
|
#
|
||||||
|
# Note that not all possible configuration values are present in this
|
||||||
|
# autogenerated file.
|
||||||
|
#
|
||||||
|
# All configuration values have a default; values that are commented out
|
||||||
|
# serve to show the default.
|
||||||
|
|
||||||
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
|
#
|
||||||
|
# import os
|
||||||
|
# import sys
|
||||||
|
# sys.path.insert(0, os.path.abspath('.'))
|
||||||
|
|
||||||
|
import sphinx_rtd_theme
|
||||||
|
|
||||||
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
|
#
|
||||||
|
# needs_sphinx = '1.0'
|
||||||
|
|
||||||
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
|
# ones.
|
||||||
|
extensions = []
|
||||||
|
|
||||||
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
|
templates_path = ['_templates']
|
||||||
|
|
||||||
|
# The suffix(es) of source filenames.
|
||||||
|
# You can specify multiple suffix as a list of string:
|
||||||
|
#
|
||||||
|
# source_suffix = ['.rst', '.md']
|
||||||
|
source_suffix = '.rst'
|
||||||
|
|
||||||
|
# The master toctree document.
|
||||||
|
master_doc = 'index'
|
||||||
|
|
||||||
|
# General information about the project.
|
||||||
|
project = u'Cosmos-SDK'
|
||||||
|
copyright = u'2017, The Authors'
|
||||||
|
author = u'The Authors'
|
||||||
|
|
||||||
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
# built documents.
|
||||||
|
#
|
||||||
|
# The short X.Y version.
|
||||||
|
version = u''
|
||||||
|
# The full version, including alpha/beta/rc tags.
|
||||||
|
release = u''
|
||||||
|
|
||||||
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
|
# for a list of supported languages.
|
||||||
|
#
|
||||||
|
# This is also used if you do content translation via gettext catalogs.
|
||||||
|
# Usually you set "language" from the command line for these cases.
|
||||||
|
language = None
|
||||||
|
|
||||||
|
# List of patterns, relative to source directory, that match files and
|
||||||
|
# directories to ignore when looking for source files.
|
||||||
|
# This patterns also effect to html_static_path and html_extra_path
|
||||||
|
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||||
|
|
||||||
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
|
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||||
|
todo_include_todos = False
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
|
# a list of builtin themes.
|
||||||
|
#
|
||||||
|
html_theme = 'sphinx_rtd_theme'
|
||||||
|
# html_theme = 'alabaster'
|
||||||
|
|
||||||
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
|
# further. For a list of options available for each theme, see the
|
||||||
|
# documentation.
|
||||||
|
#
|
||||||
|
# html_theme_options = {}
|
||||||
|
|
||||||
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
|
html_static_path = ['_static']
|
||||||
|
|
||||||
|
# Custom sidebar templates, must be a dictionary that maps document names
|
||||||
|
# to template names.
|
||||||
|
#
|
||||||
|
# This is required for the alabaster theme
|
||||||
|
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
|
||||||
|
html_sidebars = {
|
||||||
|
'**': [
|
||||||
|
'about.html',
|
||||||
|
'navigation.html',
|
||||||
|
'relations.html', # needs 'show_related': True theme option to display
|
||||||
|
'searchbox.html',
|
||||||
|
'donate.html',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTMLHelp output ------------------------------------------
|
||||||
|
|
||||||
|
# Output file base name for HTML help builder.
|
||||||
|
htmlhelp_basename = 'Cosmos-SDKdoc'
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
|
latex_elements = {
|
||||||
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
|
#
|
||||||
|
# 'papersize': 'letterpaper',
|
||||||
|
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
|
#
|
||||||
|
# 'pointsize': '10pt',
|
||||||
|
|
||||||
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#
|
||||||
|
# 'preamble': '',
|
||||||
|
|
||||||
|
# Latex figure (float) alignment
|
||||||
|
#
|
||||||
|
# 'figure_align': 'htbp',
|
||||||
|
}
|
||||||
|
|
||||||
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
|
# (source start file, target name, title,
|
||||||
|
# author, documentclass [howto, manual, or own class]).
|
||||||
|
latex_documents = [
|
||||||
|
(master_doc, 'Cosmos-SDK.tex', u'Cosmos-SDK Documentation',
|
||||||
|
u'The Authors', 'manual'),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for manual page output ---------------------------------------
|
||||||
|
|
||||||
|
# One entry per manual page. List of tuples
|
||||||
|
# (source start file, name, description, authors, manual section).
|
||||||
|
man_pages = [
|
||||||
|
(master_doc, 'cosmos-sdk', u'Cosmos-SDK Documentation',
|
||||||
|
[author], 1)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for Texinfo output -------------------------------------------
|
||||||
|
|
||||||
|
# Grouping the document tree into Texinfo files. List of tuples
|
||||||
|
# (source start file, target name, title, author,
|
||||||
|
# dir menu entry, description, category)
|
||||||
|
texinfo_documents = [
|
||||||
|
(master_doc, 'Cosmos-SDK', u'Cosmos-SDK Documentation',
|
||||||
|
author, 'Cosmos-SDK', 'One line description of project.',
|
||||||
|
'Miscellaneous'),
|
||||||
|
]
|
|
@ -1,96 +0,0 @@
|
||||||
## Design Document
|
|
||||||
|
|
||||||
### Object-Capability Model
|
|
||||||
|
|
||||||
When thinking about security, it's good to start with a specific threat model. Our threat model is the following:
|
|
||||||
|
|
||||||
> We want to assume a thriving ecosystem of Cosmos-SDK modules that are easy to compose into a blockchain application. Some of these modules will be faulty or malicious.
|
|
||||||
|
|
||||||
The Cosmos-SDK is designed to address this threat by being the foundation of an object capability system.
|
|
||||||
|
|
||||||
```
|
|
||||||
The structural properties of object capability systems favor
|
|
||||||
modularity in code design and ensure reliable encapsulation in
|
|
||||||
code implementation.
|
|
||||||
|
|
||||||
These structural properties facilitate the analysis of some
|
|
||||||
security properties of an object-capability program or operating
|
|
||||||
system. Some of these — in particular, information flow properties
|
|
||||||
— can be analyzed at the level of object references and
|
|
||||||
connectivity, independent of any knowledge or analysis of the code
|
|
||||||
that determines the behavior of the objects. As a consequence,
|
|
||||||
these security properties can be established and maintained in the
|
|
||||||
presence of new objects that contain unknown and possibly
|
|
||||||
malicious code.
|
|
||||||
|
|
||||||
These structural properties stem from the two rules governing
|
|
||||||
access to existing objects:
|
|
||||||
|
|
||||||
1) An object A can send a message to B only if object A holds a
|
|
||||||
reference to B.
|
|
||||||
|
|
||||||
2) An object A can obtain a reference to C only
|
|
||||||
if object A receives a message containing a reference to C. As a
|
|
||||||
consequence of these two rules, an object can obtain a reference
|
|
||||||
to another object only through a preexisting chain of references.
|
|
||||||
In short, "Only connectivity begets connectivity."
|
|
||||||
|
|
||||||
- https://en.wikipedia.org/wiki/Object-capability_model
|
|
||||||
```
|
|
||||||
|
|
||||||
Strictly speaking, Golang does not implement object capabilities completely, because of several issues:
|
|
||||||
|
|
||||||
* pervasive ability to import primitive modules (e.g. "unsafe", "os")
|
|
||||||
* pervasive ability to override module vars https://github.com/golang/go/issues/23161
|
|
||||||
* data-race vulnerability where 2+ goroutines can create illegal interface values
|
|
||||||
|
|
||||||
The first is easy to catch by auditing imports and using a proper dependency version control system like Glide. The second and third are unfortunate but it can be audited with some cost.
|
|
||||||
|
|
||||||
Perhaps [Go2 will implement the object capability model](https://github.com/golang/go/issues/23157).
|
|
||||||
|
|
||||||
### What does it look like?
|
|
||||||
|
|
||||||
Only reveal what is necessary to get the work done.
|
|
||||||
|
|
||||||
For example, the following code snippet violates the object capabilities principle:
|
|
||||||
|
|
||||||
```golang
|
|
||||||
type AppAccount struct {...}
|
|
||||||
var account := &AppAccount{
|
|
||||||
Address: pub.Address(),
|
|
||||||
Coins: sdk.Coins{{"ATM", 100}},
|
|
||||||
}
|
|
||||||
var sumValue := externalModule.ComputeSumValue(account)
|
|
||||||
```
|
|
||||||
|
|
||||||
The method "ComputeSumValue" implies a pure function, yet the implied capability of accepting a pointer value is the capability to modify that value. The preferred method signature should take a copy instead.
|
|
||||||
|
|
||||||
```golang
|
|
||||||
var sumValue := externalModule.ComputeSumValue(*account)
|
|
||||||
```
|
|
||||||
|
|
||||||
In the Cosmos SDK, you can see the application of this principle in the basecoin examples folder.
|
|
||||||
|
|
||||||
```golang
|
|
||||||
// File: cosmos-sdk/examples/basecoin/app/init_handlers.go
|
|
||||||
package app
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/cosmos/cosmos-sdk/x/bank"
|
|
||||||
"github.com/cosmos/cosmos-sdk/x/sketchy"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (app *BasecoinApp) initRouterHandlers() {
|
|
||||||
|
|
||||||
// All handlers must be added here.
|
|
||||||
// The order matters.
|
|
||||||
app.router.AddRoute("bank", bank.NewHandler(app.accountMapper))
|
|
||||||
app.router.AddRoute("sketchy", sketchy.NewHandler())
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
In the Basecoin example, the sketchy handler isn't provided an account mapper, which does provide the bank handler with the capability (in conjunction with the context of a transaction run).
|
|
||||||
|
|
||||||
### More Resources
|
|
||||||
|
|
||||||
* Read the [Cosmos SDK Guide](./guide.md).
|
|
Binary file not shown.
After Width: | Height: | Size: 186 KiB |
|
@ -4,7 +4,7 @@ If you want to see some examples, take a look at the [examples/basecoin](/exampl
|
||||||
|
|
||||||
## Design Goals
|
## Design Goals
|
||||||
|
|
||||||
The design of the Cosmos SDK is based on the principles of "cababilities systems".
|
The design of the Cosmos SDK is based on the principles of "capabilities systems".
|
||||||
|
|
||||||
## Capabilities systems
|
## Capabilities systems
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,424 @@
|
||||||
|
IBC
|
||||||
|
===
|
||||||
|
|
||||||
|
TODO: update in light of latest SDK (this document is currently out of date)
|
||||||
|
|
||||||
|
One of the most exciting elements of the Cosmos Network is the
|
||||||
|
InterBlockchain Communication (IBC) protocol, which enables
|
||||||
|
interoperability across different blockchains. We implemented IBC as a
|
||||||
|
basecoin plugin, and we'll show you how to use it to send tokens across
|
||||||
|
blockchains!
|
||||||
|
|
||||||
|
Please note: this tutorial assumes familiarity with the Cosmos SDK.
|
||||||
|
|
||||||
|
The IBC plugin defines a new set of transactions as subtypes of the
|
||||||
|
``AppTx``. The plugin's functionality is accessed by setting the
|
||||||
|
``AppTx.Name`` field to ``"IBC"``, and setting the ``Data`` field to the
|
||||||
|
serialized IBC transaction type.
|
||||||
|
|
||||||
|
We'll demonstrate exactly how this works below.
|
||||||
|
|
||||||
|
Inter BlockChain Communication
|
||||||
|
------------------------------
|
||||||
|
|
||||||
|
Let's review the IBC protocol. The purpose of IBC is to enable one
|
||||||
|
blockchain to function as a light-client of another. Since we are using
|
||||||
|
a classical Byzantine Fault Tolerant consensus algorithm, light-client
|
||||||
|
verification is cheap and easy: all we have to do is check validator
|
||||||
|
signatures on the latest block, and verify a Merkle proof of the state.
|
||||||
|
|
||||||
|
In Tendermint, validators agree on a block before processing it. This
|
||||||
|
means that the signatures and state root for that block aren't included
|
||||||
|
until the next block. Thus, each block contains a field called
|
||||||
|
``LastCommit``, which contains the votes responsible for committing the
|
||||||
|
previous block, and a field in the block header called ``AppHash``,
|
||||||
|
which refers to the Merkle root hash of the application after processing
|
||||||
|
the transactions from the previous block. So, if we want to verify the
|
||||||
|
``AppHash`` from height H, we need the signatures from ``LastCommit`` at
|
||||||
|
height H+1. (And remember that this ``AppHash`` only contains the
|
||||||
|
results from all transactions up to and including block H-1)
|
||||||
|
|
||||||
|
Unlike Proof-of-Work, the light-client protocol does not need to
|
||||||
|
download and check all the headers in the blockchain - the client can
|
||||||
|
always jump straight to the latest header available, so long as the
|
||||||
|
validator set has not changed much. If the validator set is changing,
|
||||||
|
the client needs to track these changes, which requires downloading
|
||||||
|
headers for each block in which there is a significant change. Here, we
|
||||||
|
will assume the validator set is constant, and postpone handling
|
||||||
|
validator set changes for another time.
|
||||||
|
|
||||||
|
Now we can describe exactly how IBC works. Suppose we have two
|
||||||
|
blockchains, ``chain1`` and ``chain2``, and we want to send some data
|
||||||
|
from ``chain1`` to ``chain2``. We need to do the following: 1. Register
|
||||||
|
the details (ie. chain ID and genesis configuration) of ``chain1`` on
|
||||||
|
``chain2`` 2. Within ``chain1``, broadcast a transaction that creates an
|
||||||
|
outgoing IBC packet destined for ``chain2`` 3. Broadcast a transaction
|
||||||
|
to ``chain2`` informing it of the latest state (ie. header and commit
|
||||||
|
signatures) of ``chain1`` 4. Post the outgoing packet from ``chain1`` to
|
||||||
|
``chain2``, including the proof that it was indeed committed on
|
||||||
|
``chain1``. Note ``chain2`` can only verify this proof because it has a
|
||||||
|
recent header and commit.
|
||||||
|
|
||||||
|
Each of these steps involves a separate IBC transaction type. Let's take
|
||||||
|
them up in turn.
|
||||||
|
|
||||||
|
IBCRegisterChainTx
|
||||||
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The ``IBCRegisterChainTx`` is used to register one chain on another. It
|
||||||
|
contains the chain ID and genesis configuration of the chain to
|
||||||
|
register:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type IBCRegisterChainTx struct { BlockchainGenesis }
|
||||||
|
|
||||||
|
type BlockchainGenesis struct { ChainID string Genesis string }
|
||||||
|
|
||||||
|
This transaction should only be sent once for a given chain ID, and
|
||||||
|
successive sends will return an error.
|
||||||
|
|
||||||
|
IBCUpdateChainTx
|
||||||
|
~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The ``IBCUpdateChainTx`` is used to update the state of one chain on
|
||||||
|
another. It contains the header and commit signatures for some block in
|
||||||
|
the chain:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type IBCUpdateChainTx struct {
|
||||||
|
Header tm.Header
|
||||||
|
Commit tm.Commit
|
||||||
|
}
|
||||||
|
|
||||||
|
In the future, it needs to be updated to include changes to the
|
||||||
|
validator set as well. Anyone can relay an ``IBCUpdateChainTx``, and
|
||||||
|
they only need to do so as frequently as packets are being sent or the
|
||||||
|
validator set is changing.
|
||||||
|
|
||||||
|
IBCPacketCreateTx
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The ``IBCPacketCreateTx`` is used to create an outgoing packet on one
|
||||||
|
chain. The packet itself contains the source and destination chain IDs,
|
||||||
|
a sequence number (i.e. an integer that increments with every message
|
||||||
|
sent between this pair of chains), a packet type (e.g. coin, data,
|
||||||
|
etc.), and a payload.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type IBCPacketCreateTx struct {
|
||||||
|
Packet
|
||||||
|
}
|
||||||
|
|
||||||
|
type Packet struct {
|
||||||
|
SrcChainID string
|
||||||
|
DstChainID string
|
||||||
|
Sequence uint64
|
||||||
|
Type string
|
||||||
|
Payload []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
We have yet to define the format for the payload, so, for now, it's just
|
||||||
|
arbitrary bytes.
|
||||||
|
|
||||||
|
One way to think about this is that ``chain2`` has an account on
|
||||||
|
``chain1``. With a ``IBCPacketCreateTx`` on ``chain1``, we send funds to
|
||||||
|
that account. Then we can prove to ``chain2`` that there are funds
|
||||||
|
locked up for it in it's account on ``chain1``. Those funds can only be
|
||||||
|
unlocked with corresponding IBC messages back from ``chain2`` to
|
||||||
|
``chain1`` sending the locked funds to another account on ``chain1``.
|
||||||
|
|
||||||
|
IBCPacketPostTx
|
||||||
|
~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The ``IBCPacketPostTx`` is used to post an outgoing packet from one
|
||||||
|
chain to another. It contains the packet and a proof that the packet was
|
||||||
|
committed into the state of the sending chain:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type IBCPacketPostTx struct {
|
||||||
|
FromChainID string // The immediate source of the packet, not always Packet.SrcChainID
|
||||||
|
FromChainHeight uint64 // The block height in which Packet was committed, to check Proof Packet
|
||||||
|
Proof *merkle.IAVLProof
|
||||||
|
}
|
||||||
|
|
||||||
|
The proof is a Merkle proof in an IAVL tree, our implementation of a
|
||||||
|
balanced, Merklized binary search tree. It contains a list of nodes in
|
||||||
|
the tree, which can be hashed together to get the Merkle root hash. This
|
||||||
|
hash must match the ``AppHash`` contained in the header at
|
||||||
|
``FromChainHeight + 1``
|
||||||
|
|
||||||
|
- note the ``+ 1`` is necessary since ``FromChainHeight`` is the height
|
||||||
|
in which the packet was committed, and the resulting state root is
|
||||||
|
not included until the next block.
|
||||||
|
|
||||||
|
IBC State
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
Now that we've seen all the transaction types, let's talk about the
|
||||||
|
state. Each chain stores some IBC state in its Merkle tree. For each
|
||||||
|
chain being tracked by our chain, we store:
|
||||||
|
|
||||||
|
- Genesis configuration
|
||||||
|
- Latest state
|
||||||
|
- Headers for recent heights
|
||||||
|
|
||||||
|
We also store all incoming (ingress) and outgoing (egress) packets.
|
||||||
|
|
||||||
|
The state of a chain is updated every time an ``IBCUpdateChainTx`` is
|
||||||
|
committed. New packets are added to the egress state upon
|
||||||
|
``IBCPacketCreateTx``. New packets are added to the ingress state upon
|
||||||
|
``IBCPacketPostTx``, assuming the proof checks out.
|
||||||
|
|
||||||
|
Merkle Queries
|
||||||
|
--------------
|
||||||
|
|
||||||
|
The Basecoin application uses a single Merkle tree that is shared across
|
||||||
|
all its state, including the built-in accounts state and all plugin
|
||||||
|
state. For this reason, it's important to use explicit key names and/or
|
||||||
|
hashes to ensure there are no collisions.
|
||||||
|
|
||||||
|
We can query the Merkle tree using the ABCI Query method. If we pass in
|
||||||
|
the correct key, it will return the corresponding value, as well as a
|
||||||
|
proof that the key and value are contained in the Merkle tree.
|
||||||
|
|
||||||
|
The results of a query can thus be used as proof in an
|
||||||
|
``IBCPacketPostTx``.
|
||||||
|
|
||||||
|
Relay
|
||||||
|
-----
|
||||||
|
|
||||||
|
While we need all these packet types internally to keep track of all the
|
||||||
|
proofs on both chains in a secure manner, for the normal work-flow, we
|
||||||
|
can run a relay node that handles the cross-chain interaction.
|
||||||
|
|
||||||
|
In this case, there are only two steps. First ``basecoin relay init``,
|
||||||
|
which must be run once to register each chain with the other one, and
|
||||||
|
make sure they are ready to send and recieve. And then
|
||||||
|
``basecoin relay start``, which is a long-running process polling the
|
||||||
|
queue on each side, and relaying all new message to the other block.
|
||||||
|
|
||||||
|
This requires that the relay has access to accounts with some funds on
|
||||||
|
both chains to pay for all the ibc packets it will be forwarding.
|
||||||
|
|
||||||
|
Try it out
|
||||||
|
----------
|
||||||
|
|
||||||
|
Now that we have all the background knowledge, let's actually walk
|
||||||
|
through the tutorial.
|
||||||
|
|
||||||
|
Make sure you have installed `basecoin and
|
||||||
|
basecli </docs/guide/install.md>`__.
|
||||||
|
|
||||||
|
Basecoin is a framework for creating new cryptocurrency applications. It
|
||||||
|
comes with an ``IBC`` plugin enabled by default.
|
||||||
|
|
||||||
|
You will also want to install the
|
||||||
|
`jq <https://stedolan.github.io/jq/>`__ for handling JSON at the command
|
||||||
|
line.
|
||||||
|
|
||||||
|
If you have any trouble with this, you can also look at the `test
|
||||||
|
scripts </tests/cli/ibc.sh>`__ or just run ``make test_cli`` in basecoin
|
||||||
|
repo. Otherwise, open up 5 (yes 5!) terminal tabs....
|
||||||
|
|
||||||
|
Preliminaries
|
||||||
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
# first, clean up any old garbage for a fresh slate...
|
||||||
|
rm -rf ~/.ibcdemo/
|
||||||
|
|
||||||
|
Let's start by setting up some environment variables and aliases:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
export BCHOME1_CLIENT=~/.ibcdemo/chain1/client
|
||||||
|
export BCHOME1_SERVER=~/.ibcdemo/chain1/server
|
||||||
|
export BCHOME2_CLIENT=~/.ibcdemo/chain2/client
|
||||||
|
export BCHOME2_SERVER=~/.ibcdemo/chain2/server
|
||||||
|
alias basecli1="basecli --home $BCHOME1_CLIENT"
|
||||||
|
alias basecli2="basecli --home $BCHOME2_CLIENT"
|
||||||
|
alias basecoin1="basecoin --home $BCHOME1_SERVER"
|
||||||
|
alias basecoin2="basecoin --home $BCHOME2_SERVER"
|
||||||
|
|
||||||
|
This will give us some new commands to use instead of raw ``basecli``
|
||||||
|
and ``basecoin`` to ensure we're using the right configuration for the
|
||||||
|
chain we want to talk to.
|
||||||
|
|
||||||
|
We also want to set some chain IDs:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
export CHAINID1="test-chain-1"
|
||||||
|
export CHAINID2="test-chain-2"
|
||||||
|
|
||||||
|
And since we will run two different chains on one machine, we need to
|
||||||
|
maintain different sets of ports:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
export PORT_PREFIX1=1234
|
||||||
|
export PORT_PREFIX2=2345
|
||||||
|
export RPC_PORT1=${PORT_PREFIX1}7
|
||||||
|
export RPC_PORT2=${PORT_PREFIX2}7
|
||||||
|
|
||||||
|
Setup Chain 1
|
||||||
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Now, let's create some keys that we can use for accounts on
|
||||||
|
test-chain-1:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli1 keys new money
|
||||||
|
basecli1 keys new gotnone
|
||||||
|
export MONEY=$(basecli1 keys get money | awk '{print $2}')
|
||||||
|
export GOTNONE=$(basecli1 keys get gotnone | awk '{print $2}')
|
||||||
|
|
||||||
|
and create an initial configuration giving lots of coins to the $MONEY
|
||||||
|
key:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecoin1 init --chain-id $CHAINID1 $MONEY
|
||||||
|
|
||||||
|
Now start basecoin:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
sed -ie "s/4665/$PORT_PREFIX1/" $BCHOME1_SERVER/config.toml
|
||||||
|
|
||||||
|
basecoin1 start &> basecoin1.log &
|
||||||
|
|
||||||
|
Note the ``sed`` command to replace the ports in the config file. You
|
||||||
|
can follow the logs with ``tail -f basecoin1.log``
|
||||||
|
|
||||||
|
Now we can attach the client to the chain and verify the state. The
|
||||||
|
first account should have money, the second none:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli1 init --node=tcp://localhost:${RPC_PORT1} --genesis=${BCHOME1_SERVER}/genesis.json
|
||||||
|
basecli1 query account $MONEY
|
||||||
|
basecli1 query account $GOTNONE
|
||||||
|
|
||||||
|
Setup Chain 2
|
||||||
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
This is the same as above, except with ``basecli2``, ``basecoin2``, and
|
||||||
|
``$CHAINID2``. We will also need to change the ports, since we're
|
||||||
|
running another chain on the same local machine.
|
||||||
|
|
||||||
|
Let's create new keys for test-chain-2:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli2 keys new moremoney
|
||||||
|
basecli2 keys new broke
|
||||||
|
MOREMONEY=$(basecli2 keys get moremoney | awk '{print $2}')
|
||||||
|
BROKE=$(basecli2 keys get broke | awk '{print $2}')
|
||||||
|
|
||||||
|
And prepare the genesis block, and start the server:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecoin2 init --chain-id $CHAINID2 $(basecli2 keys get moremoney | awk '{print $2}')
|
||||||
|
|
||||||
|
sed -ie "s/4665/$PORT_PREFIX2/" $BCHOME2_SERVER/config.toml
|
||||||
|
|
||||||
|
basecoin2 start &> basecoin2.log &
|
||||||
|
|
||||||
|
Now attach the client to the chain and verify the state. The first
|
||||||
|
account should have money, the second none:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecli2 init --node=tcp://localhost:${RPC_PORT2} --genesis=${BCHOME2_SERVER}/genesis.json
|
||||||
|
basecli2 query account $MOREMONEY
|
||||||
|
basecli2 query account $BROKE
|
||||||
|
|
||||||
|
Connect these chains
|
||||||
|
~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
OK! So we have two chains running on your local machine, with different
|
||||||
|
keys on each. Let's hook them up together by starting a relay process to
|
||||||
|
forward messages from one chain to the other.
|
||||||
|
|
||||||
|
The relay account needs some money in it to pay for the ibc messages, so
|
||||||
|
for now, we have to transfer some cash from the rich accounts before we
|
||||||
|
start the actual relay.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
# note that this key.json file is a hardcoded demo for all chains, this will
|
||||||
|
# be updated in a future release
|
||||||
|
RELAY_KEY=$BCHOME1_SERVER/key.json
|
||||||
|
RELAY_ADDR=$(cat $RELAY_KEY | jq .address | tr -d \")
|
||||||
|
|
||||||
|
basecli1 tx send --amount=100000mycoin --sequence=1 --to=$RELAY_ADDR--name=money
|
||||||
|
basecli1 query account $RELAY_ADDR
|
||||||
|
|
||||||
|
basecli2 tx send --amount=100000mycoin --sequence=1 --to=$RELAY_ADDR --name=moremoney
|
||||||
|
basecli2 query account $RELAY_ADDR
|
||||||
|
|
||||||
|
Now we can start the relay process.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
basecoin relay init --chain1-id=$CHAINID1 --chain2-id=$CHAINID2 \
|
||||||
|
--chain1-addr=tcp://localhost:${RPC_PORT1} --chain2-addr=tcp://localhost:${RPC_PORT2} \
|
||||||
|
--genesis1=${BCHOME1_SERVER}/genesis.json --genesis2=${BCHOME2_SERVER}/genesis.json \
|
||||||
|
--from=$RELAY_KEY
|
||||||
|
|
||||||
|
basecoin relay start --chain1-id=$CHAINID1 --chain2-id=$CHAINID2 \
|
||||||
|
--chain1-addr=tcp://localhost:${RPC_PORT1} --chain2-addr=tcp://localhost:${RPC_PORT2} \
|
||||||
|
--from=$RELAY_KEY &> relay.log &
|
||||||
|
|
||||||
|
This should start up the relay, and assuming no error messages came out,
|
||||||
|
the two chains are now fully connected over IBC. Let's use this to send
|
||||||
|
our first tx accross the chains...
|
||||||
|
|
||||||
|
Sending cross-chain payments
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The hard part is over, we set up two blockchains, a few private keys,
|
||||||
|
and a secure relay between them. Now we can enjoy the fruits of our
|
||||||
|
labor...
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
# Here's an empty account on test-chain-2
|
||||||
|
basecli2 query account $BROKE
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
# Let's send some funds from test-chain-1
|
||||||
|
basecli1 tx send --amount=12345mycoin --sequence=2 --to=test-chain-2/$BROKE --name=money
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
# give it time to arrive...
|
||||||
|
sleep 2
|
||||||
|
# now you should see 12345 coins!
|
||||||
|
basecli2 query account $BROKE
|
||||||
|
|
||||||
|
You're no longer broke! Cool, huh? Now have fun exploring and sending
|
||||||
|
coins across the chains. And making more accounts as you want to.
|
||||||
|
|
||||||
|
Conclusion
|
||||||
|
----------
|
||||||
|
|
||||||
|
In this tutorial we explained how IBC works, and demonstrated how to use
|
||||||
|
it to communicate between two chains. We did the simplest communciation
|
||||||
|
possible: a one way transfer of data from chain1 to chain2. The most
|
||||||
|
important part was that we updated chain2 with the latest state (i.e.
|
||||||
|
header and commit) of chain1, and then were able to post a proof to
|
||||||
|
chain2 that a packet was committed to the outgoing state of chain1.
|
||||||
|
|
||||||
|
In a future tutorial, we will demonstrate how to use IBC to actually
|
||||||
|
transfer tokens between two blockchains, but we'll do it with real
|
||||||
|
testnets deployed across multiple nodes on the network. Stay tuned!
|
|
@ -0,0 +1,66 @@
|
||||||
|
.. Cosmos-SDK documentation master file, created by
|
||||||
|
sphinx-quickstart on Fri Sep 1 21:37:02 2017.
|
||||||
|
You can adapt this file completely to your liking, but it should at least
|
||||||
|
contain the root `toctree` directive.
|
||||||
|
|
||||||
|
Welcome to the Cosmos SDK!
|
||||||
|
==========================
|
||||||
|
|
||||||
|
.. image:: graphics/cosmos-sdk-image.png
|
||||||
|
:height: 250px
|
||||||
|
:width: 500px
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
SDK
|
||||||
|
---
|
||||||
|
|
||||||
|
.. One maxdepth for now
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
sdk/overview.rst
|
||||||
|
sdk/install.rst
|
||||||
|
sdk/glossary.rst
|
||||||
|
|
||||||
|
.. Basecoin
|
||||||
|
.. --------
|
||||||
|
|
||||||
|
.. .. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
.. basecoin/basics.rst
|
||||||
|
.. basecoin/extensions.rst
|
||||||
|
|
||||||
|
Extensions
|
||||||
|
----------
|
||||||
|
|
||||||
|
Replay Protection
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
x/replay-protection.rst
|
||||||
|
|
||||||
|
|
||||||
|
Staking
|
||||||
|
~~~~~~~
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
staking/intro.rst
|
||||||
|
staking/key-management.rst
|
||||||
|
staking/local-testnet.rst
|
||||||
|
staking/public-testnet.rst
|
||||||
|
|
||||||
|
Extras
|
||||||
|
------
|
||||||
|
|
||||||
|
.. One maxdepth for now
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
ibc.rst
|
|
@ -0,0 +1,36 @@
|
||||||
|
@ECHO OFF
|
||||||
|
|
||||||
|
pushd %~dp0
|
||||||
|
|
||||||
|
REM Command file for Sphinx documentation
|
||||||
|
|
||||||
|
if "%SPHINXBUILD%" == "" (
|
||||||
|
set SPHINXBUILD=python -msphinx
|
||||||
|
)
|
||||||
|
set SOURCEDIR=.
|
||||||
|
set BUILDDIR=_build
|
||||||
|
set SPHINXPROJ=Cosmos-SDK
|
||||||
|
|
||||||
|
if "%1" == "" goto help
|
||||||
|
|
||||||
|
%SPHINXBUILD% >NUL 2>NUL
|
||||||
|
if errorlevel 9009 (
|
||||||
|
echo.
|
||||||
|
echo.The Sphinx module was not found. Make sure you have Sphinx installed,
|
||||||
|
echo.then set the SPHINXBUILD environment variable to point to the full
|
||||||
|
echo.path of the 'sphinx-build' executable. Alternatively you may add the
|
||||||
|
echo.Sphinx directory to PATH.
|
||||||
|
echo.
|
||||||
|
echo.If you don't have Sphinx installed, grab it from
|
||||||
|
echo.http://sphinx-doc.org/
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
||||||
|
goto end
|
||||||
|
|
||||||
|
:help
|
||||||
|
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
||||||
|
|
||||||
|
:end
|
||||||
|
popd
|
|
@ -0,0 +1,231 @@
|
||||||
|
Glossary
|
||||||
|
========
|
||||||
|
|
||||||
|
This glossary defines many terms used throughout documentation of Quark.
|
||||||
|
If there is every a concept that seems unclear, check here. This is
|
||||||
|
mainly to provide a background and general understanding of the
|
||||||
|
different words and concepts that are used. Other documents will explain
|
||||||
|
in more detail how to combine these concepts to build a particular
|
||||||
|
application.
|
||||||
|
|
||||||
|
Transaction
|
||||||
|
-----------
|
||||||
|
|
||||||
|
A transaction is a packet of binary data that contains all information
|
||||||
|
to validate and perform an action on the blockchain. The only other data
|
||||||
|
that it interacts with is the current state of the chain (key-value
|
||||||
|
store), and it must have a deterministic action. The transaction is the
|
||||||
|
main piece of one request.
|
||||||
|
|
||||||
|
We currently make heavy use of
|
||||||
|
`go-wire <https://github.com/tendermint/go-wire>`__ and
|
||||||
|
`data <https://github.com/tendermint/go-wire/tree/master/data>`__ to
|
||||||
|
provide binary and json encodings and decodings for ``struct`` or
|
||||||
|
interface\ ``objects. Here, encoding and decoding operations are designed to operate with interfaces nested any amount times (like an onion!). There is one public``\ TxMapper\`
|
||||||
|
in the basecoin root package, and all modules can register their own
|
||||||
|
transaction types there. This allows us to deserialize the entire
|
||||||
|
transaction in one location (even with types defined in other repos), to
|
||||||
|
easily embed an arbitrary transaction inside another without specifying
|
||||||
|
the type, and provide an automatic json representation allowing for
|
||||||
|
users (or apps) to inspect the chain.
|
||||||
|
|
||||||
|
Note how we can wrap any other transaction, add a fee level, and not
|
||||||
|
worry about the encoding in our module any more?
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type Fee struct {
|
||||||
|
Fee coin.Coin `json:"fee"`
|
||||||
|
Payer basecoin.Actor `json:"payer"` // the address who pays the fee
|
||||||
|
Tx basecoin.Tx `json:"tx"`
|
||||||
|
}
|
||||||
|
|
||||||
|
Context (ctx)
|
||||||
|
-------------
|
||||||
|
|
||||||
|
As a request passes through the system, it may pick up information such
|
||||||
|
as the block height the request runs at. In order to carry this information
|
||||||
|
between modules it is saved to the context. Further, all information
|
||||||
|
must be deterministic from the context in which the request runs (based
|
||||||
|
on the transaction and the block it was included in) and can be used to
|
||||||
|
validate the transaction.
|
||||||
|
|
||||||
|
Data Store
|
||||||
|
----------
|
||||||
|
|
||||||
|
In order to provide proofs to Tendermint, we keep all data in one
|
||||||
|
key-value (kv) store which is indexed with a merkle tree. This allows
|
||||||
|
for the easy generation of a root hash and proofs for queries without
|
||||||
|
requiring complex logic inside each module. Standardization of this
|
||||||
|
process also allows powerful light-client tooling as any store data may
|
||||||
|
be verified on the fly.
|
||||||
|
|
||||||
|
The largest limitation of the current implemenation of the kv-store is
|
||||||
|
that interface that the application must use can only ``Get`` and
|
||||||
|
``Set`` single data points. That said, there are some data structures
|
||||||
|
like queues and range queries that are available in ``state`` package.
|
||||||
|
These provide higher-level functionality in a standard format, but have
|
||||||
|
not yet been integrated into the kv-store interface.
|
||||||
|
|
||||||
|
Isolation
|
||||||
|
---------
|
||||||
|
|
||||||
|
One of the main arguments for blockchain is security. So while we
|
||||||
|
encourage the use of third-party modules, all developers must be
|
||||||
|
vigilant against security holes. If you use the
|
||||||
|
`stack <https://github.com/cosmos/cosmos-sdk/tree/master/stack>`__
|
||||||
|
package, it will provide two different types of compartmentalization
|
||||||
|
security.
|
||||||
|
|
||||||
|
The first is to limit the working kv-store space of each module. When
|
||||||
|
``DeliverTx`` is called for a module, it is never given the entire data
|
||||||
|
store, but rather only its own prefixed subset of the store. This is
|
||||||
|
achieved by prefixing all keys transparently with
|
||||||
|
``<module name> + 0x0``, using the null byte as a separator. Since the
|
||||||
|
module name must be a string, no malicious naming scheme can ever lead
|
||||||
|
to a collision. Inside a module, we can write using any key value we
|
||||||
|
desire without the possibility that we have modified data belonging to
|
||||||
|
separate module.
|
||||||
|
|
||||||
|
The second is to add permissions to the transaction context. The
|
||||||
|
transaction context can specify that the tx has been signed by one or
|
||||||
|
multiple specific actors.
|
||||||
|
|
||||||
|
A transactions will only be executed if the permission requirements have
|
||||||
|
been fulfilled. For example the sender of funds must have signed, or 2
|
||||||
|
out of 3 multi-signature actors must have signed a joint account. To
|
||||||
|
prevent the forgery of account signatures from unintended modules each
|
||||||
|
permission is associated with the module that granted it (in this case
|
||||||
|
`auth <https://github.com/cosmos/cosmos-sdk/tree/master/x/auth>`__),
|
||||||
|
and if a module tries to add a permission for another module, it will
|
||||||
|
panic. There is also protection if a module creates a brand new fake
|
||||||
|
context to trick the downstream modules. Each context enforces the rules
|
||||||
|
on how to make child contexts, and the stack builder enforces
|
||||||
|
that the context passed from one level to the next is a valid child of
|
||||||
|
the original one.
|
||||||
|
|
||||||
|
These security measures ensure that modules can confidently write to
|
||||||
|
their local section of the database and trust the permissions associated
|
||||||
|
with the context, without concern of interference from other modules.
|
||||||
|
(Okay, if you see a bunch of C-code in the module traversing through all
|
||||||
|
the memory space of the application, then get worried....)
|
||||||
|
|
||||||
|
Handler
|
||||||
|
-------
|
||||||
|
|
||||||
|
The ABCI interface is handled by ``app``, which translates these data
|
||||||
|
structures into an internal format that is more convenient, but unable
|
||||||
|
to travel over the wire. The basic interface for any code that modifies
|
||||||
|
state is the ``Handler`` interface, which provides four methods:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Name() string
|
||||||
|
CheckTx(ctx Context, store state.KVStore, tx Tx) (Result, error)
|
||||||
|
DeliverTx(ctx Context, store state.KVStore, tx Tx) (Result, error)
|
||||||
|
SetOption(l log.Logger, store state.KVStore, module, key, value string) (string, error)
|
||||||
|
|
||||||
|
Note the ``Context``, ``KVStore``, and ``Tx`` as principal carriers of
|
||||||
|
information. And that Result is always success, and we have a second
|
||||||
|
error return for errors (which is much more standard golang that
|
||||||
|
``res.IsErr()``)
|
||||||
|
|
||||||
|
The ``Handler`` interface is designed to be the basis for all modules
|
||||||
|
that execute transactions, and this can provide a large degree of code
|
||||||
|
interoperability, much like ``http.Handler`` does in golang web
|
||||||
|
development.
|
||||||
|
|
||||||
|
Modules
|
||||||
|
-------
|
||||||
|
|
||||||
|
TODO: update (s/Modules/handlers+mappers+stores/g) & add Msg + Tx (a signed message)
|
||||||
|
|
||||||
|
A module is a set of functionality which should be typically designed as
|
||||||
|
self-sufficient. Common elements of a module are:
|
||||||
|
|
||||||
|
- transaction types (either end transactions, or transaction wrappers)
|
||||||
|
- custom error codes
|
||||||
|
- data models (to persist in the kv-store)
|
||||||
|
- handler (to handle any end transactions)
|
||||||
|
|
||||||
|
Dispatcher
|
||||||
|
----------
|
||||||
|
|
||||||
|
We usually will want to have multiple modules working together, and need
|
||||||
|
to make sure the correct transactions get to the correct module. So we
|
||||||
|
have ``coin`` sending money, ``roles`` to create multi-sig accounts, and
|
||||||
|
``ibc`` for following other chains all working together without
|
||||||
|
interference.
|
||||||
|
|
||||||
|
We can then register a ``Dispatcher``, which
|
||||||
|
also implements the ``Handler`` interface. We then register a list of
|
||||||
|
modules with the dispatcher. Every module has a unique ``Name()``, which
|
||||||
|
is used for isolating its state space. We use this same name for routing
|
||||||
|
transactions. Each transaction implementation must be registed with
|
||||||
|
go-wire via ``TxMapper``, so we just look at the registered name of this
|
||||||
|
transaction, which should be of the form ``<module name>/xxx``. The
|
||||||
|
dispatcher grabs the appropriate module name from the tx name and routes
|
||||||
|
it if the module is present.
|
||||||
|
|
||||||
|
This all seems like a bit of magic, but really we're just making use of
|
||||||
|
go-wire magic that we are already using, rather than add another layer.
|
||||||
|
For all the transactions to be properly routed, the only thing you need
|
||||||
|
to remember is to use the following pattern:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
const (
|
||||||
|
NameCoin = "coin"
|
||||||
|
TypeSend = NameCoin + "/send"
|
||||||
|
)
|
||||||
|
|
||||||
|
Permissions
|
||||||
|
-----------
|
||||||
|
|
||||||
|
TODO: replaces perms with object capabilities/object capability keys
|
||||||
|
- get rid of IPC
|
||||||
|
|
||||||
|
IPC requires a more complex permissioning system to allow the modules to
|
||||||
|
have limited access to each other and also to allow more types of
|
||||||
|
permissions than simple public key signatures. Rather than just use an
|
||||||
|
address to identify who is performing an action, we can use a more
|
||||||
|
complex structure:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type Actor struct {
|
||||||
|
ChainID string `json:"chain"` // this is empty unless it comes from a different chain
|
||||||
|
App string `json:"app"` // the app that the actor belongs to
|
||||||
|
Address data.Bytes `json:"addr"` // arbitrary app-specific unique id
|
||||||
|
}
|
||||||
|
|
||||||
|
Here, the ``Actor`` abstracts any address that can authorize actions,
|
||||||
|
hold funds, or initiate any sort of transaction. It doesn't just have to
|
||||||
|
be a pubkey on this chain, it could stem from another app (such as
|
||||||
|
multi-sig account), or even another chain (via IBC)
|
||||||
|
|
||||||
|
``ChainID`` is for IBC, discussed below. Let's focus on ``App`` and
|
||||||
|
``Address``. For a signature, the App is ``auth``, and any modules can
|
||||||
|
check to see if a specific public key address signed like this
|
||||||
|
``ctx.HasPermission(auth.SigPerm(addr))``. However, we can also
|
||||||
|
authorize a tx with ``roles``, which handles multi-sig accounts, it
|
||||||
|
checks if there were enough signatures by checking as above, then it can
|
||||||
|
add the role permission like
|
||||||
|
``ctx= ctx.WithPermissions(NewPerm(assume.Role))``
|
||||||
|
|
||||||
|
In addition to the permissions schema, the Actors are addresses just
|
||||||
|
like public key addresses. So one can create a mulit-sig role, then send
|
||||||
|
coin there, which can only be moved upon meeting the authorization
|
||||||
|
requirements from that module. ``coin`` doesn't even know the existence
|
||||||
|
of ``roles`` and one could build any other sort of module to provide
|
||||||
|
permissions (like bind the outcome of an election to move coins or to
|
||||||
|
modify the accounts on a role).
|
||||||
|
|
||||||
|
One idea - not yet implemented - is to provide scopes on the
|
||||||
|
permissions. Currently, if I sign a transaction to one module, it can
|
||||||
|
pass it on to any other module over IPC with the same permissions. It
|
||||||
|
could move coins, vote in an election, or anything else. Ideally, when
|
||||||
|
signing, one could also specify the scope(s) that this signature
|
||||||
|
authorizes. The `oauth
|
||||||
|
protocol <https://api.slack.com/docs/oauth-scopes>`__ also has to deal
|
||||||
|
with a similar problem, and maybe could provide some inspiration.
|
|
@ -0,0 +1,35 @@
|
||||||
|
Install
|
||||||
|
=======
|
||||||
|
|
||||||
|
If you aren't used to compile go programs and just want the released
|
||||||
|
version of the code, please head to our
|
||||||
|
`downloads <https://tendermint.com/download>`__ page to get a
|
||||||
|
pre-compiled binary for your platform.
|
||||||
|
|
||||||
|
Usually, Cosmos SDK can be installed like a normal Go program:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
go get -u github.com/cosmos/cosmos-sdk
|
||||||
|
|
||||||
|
If the dependencies have been updated with breaking changes, or if
|
||||||
|
another branch is required, ``glide`` is used for dependency management.
|
||||||
|
Thus, assuming you've already run ``go get`` or otherwise cloned the
|
||||||
|
repo, the correct way to install is:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
cd $GOPATH/src/github.com/cosmos/cosmos-sdk
|
||||||
|
git pull origin master
|
||||||
|
make all
|
||||||
|
|
||||||
|
This will create the ``basecoin`` binary in ``$GOPATH/bin``.
|
||||||
|
``make all`` implies ``make get_vendor_deps`` and uses ``glide`` to
|
||||||
|
install the correct version of all dependencies. It also tests the code,
|
||||||
|
including some cli tests to make sure your binary behaves properly.
|
||||||
|
|
||||||
|
If you need another branch, make sure to run ``git checkout <branch>``
|
||||||
|
before ``make all``. And if you switch branches a lot, especially
|
||||||
|
touching other tendermint repos, you may need to ``make fresh``
|
||||||
|
sometimes so glide doesn't get confused with all the branches and
|
||||||
|
versions lying around.
|
|
@ -0,0 +1,435 @@
|
||||||
|
Overview
|
||||||
|
========
|
||||||
|
|
||||||
|
The SDK design optimizes flexibility and security. The
|
||||||
|
framework is designed around a modular execution stack which allows
|
||||||
|
applications to mix and match elements as desired. In addition,
|
||||||
|
all modules are sandboxed for greater application security.
|
||||||
|
|
||||||
|
Framework Overview
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Object-Capability Model
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
When thinking about security, it's good to start with a specific threat model. Our threat model is the following:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
We assume that a thriving ecosystem of Cosmos-SDK modules that are easy to compose into a blockchain application will contain faulty or malicious modules.
|
||||||
|
|
||||||
|
The Cosmos-SDK is designed to address this threat by being the foundation of an object capability system.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
The structural properties of object capability systems favor
|
||||||
|
modularity in code design and ensure reliable encapsulation in
|
||||||
|
code implementation.
|
||||||
|
|
||||||
|
These structural properties facilitate the analysis of some
|
||||||
|
security properties of an object-capability program or operating
|
||||||
|
system. Some of these — in particular, information flow properties
|
||||||
|
— can be analyzed at the level of object references and
|
||||||
|
connectivity, independent of any knowledge or analysis of the code
|
||||||
|
that determines the behavior of the objects. As a consequence,
|
||||||
|
these security properties can be established and maintained in the
|
||||||
|
presence of new objects that contain unknown and possibly
|
||||||
|
malicious code.
|
||||||
|
|
||||||
|
These structural properties stem from the two rules governing
|
||||||
|
access to existing objects:
|
||||||
|
|
||||||
|
1) An object A can send a message to B only if object A holds a
|
||||||
|
reference to B.
|
||||||
|
|
||||||
|
2) An object A can obtain a reference to C only
|
||||||
|
if object A receives a message containing a reference to C. As a
|
||||||
|
consequence of these two rules, an object can obtain a reference
|
||||||
|
to another object only through a preexisting chain of references.
|
||||||
|
In short, "Only connectivity begets connectivity."
|
||||||
|
|
||||||
|
See the `wikipedia article <https://en.wikipedia.org/wiki/Object-capability_model>`__ for more information.
|
||||||
|
|
||||||
|
Strictly speaking, Golang does not implement object capabilities completely, because of several issues:
|
||||||
|
|
||||||
|
* pervasive ability to import primitive modules (e.g. "unsafe", "os")
|
||||||
|
* pervasive ability to override module vars https://github.com/golang/go/issues/23161
|
||||||
|
* data-race vulnerability where 2+ goroutines can create illegal interface values
|
||||||
|
|
||||||
|
The first is easy to catch by auditing imports and using a proper dependency version control system like Glide. The second and third are unfortunate but it can be audited with some cost.
|
||||||
|
|
||||||
|
Perhaps `Go2 will implement the object capability model <https://github.com/golang/go/issues/23157>`__.
|
||||||
|
|
||||||
|
What does it look like?
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Only reveal what is necessary to get the work done.
|
||||||
|
|
||||||
|
For example, the following code snippet violates the object capabilities principle:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type AppAccount struct {...}
|
||||||
|
var account := &AppAccount{
|
||||||
|
Address: pub.Address(),
|
||||||
|
Coins: sdk.Coins{{"ATM", 100}},
|
||||||
|
}
|
||||||
|
var sumValue := externalModule.ComputeSumValue(account)
|
||||||
|
|
||||||
|
The method "ComputeSumValue" implies a pure function, yet the implied capability of accepting a pointer value is the capability to modify that value. The preferred method signature should take a copy instead.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
var sumValue := externalModule.ComputeSumValue(*account)
|
||||||
|
|
||||||
|
In the Cosmos SDK, you can see the application of this principle in the basecoin examples folder.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
// File: cosmos-sdk/examples/basecoin/app/init_handlers.go
|
||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/cosmos/cosmos-sdk/x/bank"
|
||||||
|
"github.com/cosmos/cosmos-sdk/x/sketchy"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (app *BasecoinApp) initRouterHandlers() {
|
||||||
|
|
||||||
|
// All handlers must be added here.
|
||||||
|
// The order matters.
|
||||||
|
app.router.AddRoute("bank", bank.NewHandler(app.accountMapper))
|
||||||
|
app.router.AddRoute("sketchy", sketchy.NewHandler())
|
||||||
|
}
|
||||||
|
|
||||||
|
In the Basecoin example, the sketchy handler isn't provided an account mapper, which does provide the bank handler with the capability (in conjunction with the context of a transaction run).
|
||||||
|
|
||||||
|
Security Overview
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
For examples, see the `examples <https://github.com/cosmos/cosmos-sdk/tree/develop/examples>`__ directory.
|
||||||
|
|
||||||
|
Design Goals
|
||||||
|
~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The design of the Cosmos SDK is based on the principles of "capabilities systems".
|
||||||
|
|
||||||
|
Capabilities systems
|
||||||
|
~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
TODO:
|
||||||
|
|
||||||
|
* Need for module isolation
|
||||||
|
* Capability is implied permission
|
||||||
|
* Link to thesis
|
||||||
|
|
||||||
|
Tx & Msg
|
||||||
|
~~~~~~~~
|
||||||
|
|
||||||
|
The SDK distinguishes between transactions (Tx) and messages
|
||||||
|
(Msg). A Tx is a Msg wrapped with authentication and fee data.
|
||||||
|
|
||||||
|
Messages
|
||||||
|
^^^^^^^^
|
||||||
|
|
||||||
|
Users can create messages containing arbitrary information by
|
||||||
|
implementing the ``Msg`` interface:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type Msg interface {
|
||||||
|
|
||||||
|
// Return the message type.
|
||||||
|
// Must be alphanumeric or empty.
|
||||||
|
Type() string
|
||||||
|
|
||||||
|
// Get some property of the Msg.
|
||||||
|
Get(key interface{}) (value interface{})
|
||||||
|
|
||||||
|
// Get the canonical byte representation of the Msg.
|
||||||
|
GetSignBytes() []byte
|
||||||
|
|
||||||
|
// ValidateBasic does a simple validation check that
|
||||||
|
// doesn't require access to any other information.
|
||||||
|
ValidateBasic() error
|
||||||
|
|
||||||
|
// Signers returns the addrs of signers that must sign.
|
||||||
|
// CONTRACT: All signatures must be present to be valid.
|
||||||
|
// CONTRACT: Returns addrs in some deterministic order.
|
||||||
|
GetSigners() []crypto.Address
|
||||||
|
}
|
||||||
|
|
||||||
|
Messages must specify their type via the ``Type()`` method. The type should
|
||||||
|
correspond to the messages handler, so there can be many messages with the same
|
||||||
|
type.
|
||||||
|
|
||||||
|
Messages must also specify how they are to be authenticated. The ``GetSigners()``
|
||||||
|
method return a list of addresses that must sign the message, while the
|
||||||
|
``GetSignBytes()`` method returns the bytes that must be signed for a signature
|
||||||
|
to be valid.
|
||||||
|
|
||||||
|
Addresses in the SDK are arbitrary byte arrays that are hex-encoded when
|
||||||
|
displayed as a string or rendered in JSON.
|
||||||
|
|
||||||
|
Messages can specify basic self-consistency checks using the ``ValidateBasic()``
|
||||||
|
method to enforce that message contents are well formed before any actual logic
|
||||||
|
begins.
|
||||||
|
|
||||||
|
Finally, messages can provide generic access to their contents via ``Get(key)``,
|
||||||
|
but this is mostly for convenience and not type-safe.
|
||||||
|
|
||||||
|
For instance, the ``Basecoin`` message types are defined in ``x/bank/tx.go``:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type SendMsg struct {
|
||||||
|
Inputs []Input `json:"inputs"`
|
||||||
|
Outputs []Output `json:"outputs"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type IssueMsg struct {
|
||||||
|
Banker crypto.Address `json:"banker"`
|
||||||
|
Outputs []Output `json:"outputs"`
|
||||||
|
}
|
||||||
|
|
||||||
|
Each specifies the addresses that must sign the message:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
func (msg SendMsg) GetSigners() []crypto.Address {
|
||||||
|
addrs := make([]crypto.Address, len(msg.Inputs))
|
||||||
|
for i, in := range msg.Inputs {
|
||||||
|
addrs[i] = in.Address
|
||||||
|
}
|
||||||
|
return addrs
|
||||||
|
}
|
||||||
|
|
||||||
|
func (msg IssueMsg) GetSigners() []crypto.Address {
|
||||||
|
return []crypto.Address{msg.Banker}
|
||||||
|
}
|
||||||
|
|
||||||
|
Transactions
|
||||||
|
^^^^^^^^^^^^
|
||||||
|
|
||||||
|
A transaction is a message with additional information for authentication:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type Tx interface {
|
||||||
|
|
||||||
|
GetMsg() Msg
|
||||||
|
|
||||||
|
// The address that pays the base fee for this message. The fee is
|
||||||
|
// deducted before the Msg is processed.
|
||||||
|
GetFeePayer() crypto.Address
|
||||||
|
|
||||||
|
// Get the canonical byte representation of the Tx.
|
||||||
|
// Includes any signatures (or empty slots).
|
||||||
|
GetTxBytes() []byte
|
||||||
|
|
||||||
|
// Signatures returns the signature of signers who signed the Msg.
|
||||||
|
// CONTRACT: Length returned is same as length of
|
||||||
|
// pubkeys returned from MsgKeySigners, and the order
|
||||||
|
// matches.
|
||||||
|
// CONTRACT: If the signature is missing (ie the Msg is
|
||||||
|
// invalid), then the corresponding signature is
|
||||||
|
// .Empty().
|
||||||
|
GetSignatures() []StdSignature
|
||||||
|
}
|
||||||
|
|
||||||
|
The ``tx.GetSignatures()`` method returns a list of signatures, which must match
|
||||||
|
the list of addresses returned by ``tx.Msg.GetSigners()``. The signatures come in
|
||||||
|
a standard form:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type StdSignature struct {
|
||||||
|
crypto.PubKey // optional
|
||||||
|
crypto.Signature
|
||||||
|
Sequence int64
|
||||||
|
}
|
||||||
|
|
||||||
|
It contains the signature itself, as well as the corresponding account's
|
||||||
|
sequence number. The sequence number is expected to increment every time a
|
||||||
|
message is signed by a given account. This prevents "replay attacks", where
|
||||||
|
the same message could be executed over and over again.
|
||||||
|
|
||||||
|
The ``StdSignature`` can also optionally include the public key for verifying the
|
||||||
|
signature. An application can store the public key for each address it knows
|
||||||
|
about, making it optional to include the public key in the transaction. In the
|
||||||
|
case of Basecoin, the public key only needs to be included in the first
|
||||||
|
transaction send by a given account - after that, the public key is forever
|
||||||
|
stored by the application and can be left out of transactions.
|
||||||
|
|
||||||
|
Transactions can also specify the address responsible for paying the
|
||||||
|
transaction's fees using the ``tx.GetFeePayer()`` method.
|
||||||
|
|
||||||
|
The standard way to create a transaction from a message is to use the ``StdTx``:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type StdTx struct {
|
||||||
|
Msg
|
||||||
|
Signatures []StdSignature
|
||||||
|
}
|
||||||
|
|
||||||
|
Encoding and Decoding Transactions
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Messages and transactions are designed to be generic enough for developers to
|
||||||
|
specify their own encoding schemes. This enables the SDK to be used as the
|
||||||
|
framwork for constructing already specified cryptocurrency state machines, for
|
||||||
|
instance Ethereum.
|
||||||
|
|
||||||
|
When initializing an application, a developer must specify a ``TxDecoder``
|
||||||
|
function which determines how an arbitrary byte array should be unmarshalled
|
||||||
|
into a ``Tx``:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type TxDecoder func(txBytes []byte) (Tx, error)
|
||||||
|
|
||||||
|
In ``Basecoin``, we use the Tendermint wire format and the ``go-wire`` library for
|
||||||
|
encoding and decoding all message types. The ``go-wire`` library has the nice
|
||||||
|
property that it can unmarshal into interface types, but it requires the
|
||||||
|
relevant types to be registered ahead of type. Registration happens on a
|
||||||
|
``Codec`` object, so as not to taint the global name space.
|
||||||
|
|
||||||
|
For instance, in ``Basecoin``, we wish to register the ``SendMsg`` and ``IssueMsg``
|
||||||
|
types:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
cdc.RegisterInterface((*sdk.Msg)(nil), nil)
|
||||||
|
cdc.RegisterConcrete(bank.SendMsg{}, "cosmos-sdk/SendMsg", nil)
|
||||||
|
cdc.RegisterConcrete(bank.IssueMsg{}, "cosmos-sdk/IssueMsg", nil)
|
||||||
|
|
||||||
|
Note how each concrete type is given a name - these name determine the type's
|
||||||
|
unique "prefix bytes" during encoding. A registered type will always use the
|
||||||
|
same prefix-bytes, regardless of what interface it is satisfying. For more
|
||||||
|
details, see the `go-wire documentation <https://github.com/tendermint/go-wire/tree/develop>`__.
|
||||||
|
|
||||||
|
|
||||||
|
MultiStore
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
MultiStore is like a filesystem
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Mounting an IAVLStore
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
TODO:
|
||||||
|
|
||||||
|
* IAVLStore: Fast balanced dynamic Merkle store.
|
||||||
|
|
||||||
|
* supports iteration.
|
||||||
|
|
||||||
|
* MultiStore: multiple Merkle tree backends in a single store
|
||||||
|
|
||||||
|
* allows using Ethereum Patricia Trie and Tendermint IAVL in same app
|
||||||
|
|
||||||
|
* Provide caching for intermediate state during execution of blocks and transactions (including for iteration)
|
||||||
|
* Historical state pruning and snapshotting.
|
||||||
|
* Query proofs (existence, absence, range, etc.) on current and retained historical state.
|
||||||
|
|
||||||
|
Context
|
||||||
|
-------
|
||||||
|
|
||||||
|
The SDK uses a ``Context`` to propogate common information across functions. The
|
||||||
|
``Context`` is modelled after the Golang ``context.Context`` object, which has
|
||||||
|
become ubiquitous in networking middleware and routing applications as a means
|
||||||
|
to easily propogate request context through handler functions.
|
||||||
|
|
||||||
|
The main information stored in the ``Context`` includes the application
|
||||||
|
MultiStore (see below), the last block header, and the transaction bytes.
|
||||||
|
Effectively, the context contains all data that may be necessary for processing
|
||||||
|
a transaction.
|
||||||
|
|
||||||
|
Many methods on SDK objects receive a context as the first argument.
|
||||||
|
|
||||||
|
Handler
|
||||||
|
-------
|
||||||
|
|
||||||
|
Transaction processing in the SDK is defined through ``Handler`` functions:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
type Handler func(ctx Context, tx Tx) Result
|
||||||
|
|
||||||
|
A handler takes a context and a transaction and returns a result. All
|
||||||
|
information necessary for processing a transaction should be available in the
|
||||||
|
context.
|
||||||
|
|
||||||
|
While the context holds the entire application state (all referenced from the
|
||||||
|
root MultiStore), a particular handler only needs a particular kind of access
|
||||||
|
to a particular store (or two or more). Access to stores is managed using
|
||||||
|
capabilities keys and mappers. When a handler is initialized, it is passed a
|
||||||
|
key or mapper that gives it access to the relevant stores.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
// File: cosmos-sdk/examples/basecoin/app/init_stores.go
|
||||||
|
app.BaseApp.MountStore(app.capKeyMainStore, sdk.StoreTypeIAVL)
|
||||||
|
app.accountMapper = auth.NewAccountMapper(
|
||||||
|
app.capKeyMainStore, // target store
|
||||||
|
&types.AppAccount{}, // prototype
|
||||||
|
)
|
||||||
|
|
||||||
|
// File: cosmos-sdk/examples/basecoin/app/init_handlers.go
|
||||||
|
app.router.AddRoute("bank", bank.NewHandler(app.accountMapper))
|
||||||
|
|
||||||
|
// File: cosmos-sdk/x/bank/handler.go
|
||||||
|
// NOTE: Technically, NewHandler only needs a CoinMapper
|
||||||
|
func NewHandler(am sdk.AccountMapper) sdk.Handler {
|
||||||
|
return func(ctx sdk.Context, msg sdk.Msg) sdk.Result {
|
||||||
|
cm := CoinMapper{am}
|
||||||
|
...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
AnteHandler
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Handling Fee payment
|
||||||
|
~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Handling Authentication
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Accounts and x/auth
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
sdk.Account
|
||||||
|
~~~~~~~~~~~
|
||||||
|
|
||||||
|
auth.BaseAccount
|
||||||
|
~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
auth.AccountMapper
|
||||||
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Wire codec
|
||||||
|
----------
|
||||||
|
|
||||||
|
Why another codec?
|
||||||
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
vs encoding/json
|
||||||
|
~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
vs protobuf
|
||||||
|
~~~~~~~~~~~
|
||||||
|
|
||||||
|
Dummy example
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Basecoin example
|
||||||
|
----------------
|
||||||
|
|
||||||
|
The quintessential SDK application is Basecoin - a simple
|
||||||
|
multi-asset cryptocurrency. Basecoin consists of a set of
|
||||||
|
accounts stored in a Merkle tree, where each account may have
|
||||||
|
many coins. There are two message types: SendMsg and IssueMsg.
|
||||||
|
SendMsg allows coins to be sent around, while IssueMsg allows a
|
||||||
|
set of predefined users to issue new coins.
|
|
@ -0,0 +1,270 @@
|
||||||
|
Using Gaia
|
||||||
|
==========
|
||||||
|
|
||||||
|
This project is a demonstration of the Cosmos Hub with staking functionality; it is
|
||||||
|
designed to get validator acquianted with staking concepts and procedure.
|
||||||
|
|
||||||
|
Potential validators will be declaring their candidacy, after which users can
|
||||||
|
delegate and, if they so wish, unbond. This can be practiced using a local or
|
||||||
|
public testnet.
|
||||||
|
|
||||||
|
Install
|
||||||
|
-------
|
||||||
|
|
||||||
|
The ``gaia`` tooling is an extension of the Cosmos-SDK; to install:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
go get github.com/cosmos/gaia
|
||||||
|
cd $GOPATH/src/github.com/cosmos/gaia
|
||||||
|
make get_vendor_deps
|
||||||
|
make install
|
||||||
|
|
||||||
|
It has three primary commands:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Available Commands:
|
||||||
|
node The Cosmos Network delegation-game blockchain test
|
||||||
|
rest-server REST client for gaia commands
|
||||||
|
client Gaia light client
|
||||||
|
|
||||||
|
version Show version info
|
||||||
|
help Help about any command
|
||||||
|
|
||||||
|
and a handful of flags that are highlighted only as necessary.
|
||||||
|
|
||||||
|
The ``gaia node`` command is a proxt for running a tendermint node. You'll be using
|
||||||
|
this command to either initialize a new node, or - using existing files - joining
|
||||||
|
the testnet.
|
||||||
|
|
||||||
|
The ``gaia rest-server`` command is used by the `cosmos UI <https://github.com/cosmos/cosmos-ui>`__.
|
||||||
|
|
||||||
|
Lastly, the ``gaia client`` command is the workhorse of the staking module. It allows
|
||||||
|
for sending various transactions and other types of interaction with a running chain.
|
||||||
|
that you've setup or joined a testnet.
|
||||||
|
|
||||||
|
Generating Keys
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Review the `key management tutorial <../key-management.html>`__ and create one key
|
||||||
|
if you'll be joining the public testnet, and three keys if you'll be trying out a local
|
||||||
|
testnet.
|
||||||
|
|
||||||
|
Setup Testnet
|
||||||
|
-------------
|
||||||
|
|
||||||
|
The first thing you'll want to do is either `create a local testnet <./local-testnet.html>`__ or
|
||||||
|
join a `public testnet <./public-testnet.html>`__. Either step is required before proceeding.
|
||||||
|
|
||||||
|
The rest of this tutorial will assume a local testnet with three participants: ``alice`` will be
|
||||||
|
the initial validator, ``bob`` will first receives tokens from ``alice`` then declare candidacy
|
||||||
|
as a validator, and ``charlie`` will bond then unbond to ``bob``. If you're joining the public
|
||||||
|
testnet, the token amounts will need to be adjusted.
|
||||||
|
|
||||||
|
Sending Tokens
|
||||||
|
--------------
|
||||||
|
|
||||||
|
We'll have ``alice`` who is currently quite rich, send some ``fermions`` to ``bob``:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client tx send --amount=1000fermion --sequence=1 --name=alice --to=5A35E4CC7B7DC0A5CB49CEA91763213A9AE92AD6
|
||||||
|
|
||||||
|
where the ``--sequence`` flag is to be incremented for each transaction, the ``--name`` flag names the sender, and the ``--to`` flag takes ``bob``'s address. You'll see something like:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Please enter passphrase for alice:
|
||||||
|
{
|
||||||
|
"check_tx": {
|
||||||
|
"gas": 30
|
||||||
|
},
|
||||||
|
"deliver_tx": {
|
||||||
|
"tags": [
|
||||||
|
{
|
||||||
|
"key": "height",
|
||||||
|
"value_type": 1,
|
||||||
|
"value_int": 2963
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "coin.sender",
|
||||||
|
"value_string": "5D93A6059B6592833CBC8FA3DA90EE0382198985"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "coin.receiver",
|
||||||
|
"value_string": "5A35E4CC7B7DC0A5CB49CEA91763213A9AE92AD6"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "423BD7EA3C4B36AF8AFCCA381C0771F8A698BA77",
|
||||||
|
"height": 2963
|
||||||
|
}
|
||||||
|
|
||||||
|
Check out ``bob``'s account, which should now have 992 fermions:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client query account 5A35E4CC7B7DC0A5CB49CEA91763213A9AE92AD6
|
||||||
|
|
||||||
|
Adding a Second Validator
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
Next, let's add the second node as a validator.
|
||||||
|
|
||||||
|
First, we need the pub_key data:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
cat $HOME/.gaia2/priv_validator.json
|
||||||
|
|
||||||
|
the first part will look like:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
{"address":"7B78527942C831E16907F10C3263D5ED933F7E99","pub_key":{"type":"ed25519","data":"96864CE7085B2E342B0F96F2E92B54B18C6CC700186238810D5AA7DFDAFDD3B2"},
|
||||||
|
|
||||||
|
and you want the ``pub_key`` ``data`` that starts with ``96864CE``.
|
||||||
|
|
||||||
|
Now ``bob`` can declare candidacy to that pubkey:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client tx declare-candidacy --amount=10fermion --name=bob --pubkey=<pub_key data> --moniker=bobby
|
||||||
|
|
||||||
|
with an output like:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Please enter passphrase for bob:
|
||||||
|
{
|
||||||
|
"check_tx": {
|
||||||
|
"gas": 30
|
||||||
|
},
|
||||||
|
"deliver_tx": {},
|
||||||
|
"hash": "2A2A61FFBA1D7A59138E0068C82CC830E5103799",
|
||||||
|
"height": 4075
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
We should see ``bob``'s account balance decrease by 10 fermions:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client query account 5D93A6059B6592833CBC8FA3DA90EE0382198985
|
||||||
|
|
||||||
|
To confirm for certain the new validator is active, ask the tendermint node:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
curl localhost:46657/validators
|
||||||
|
|
||||||
|
If you now kill either node, blocks will stop streaming in, because
|
||||||
|
there aren't enough validators online. Turn it back on and they will
|
||||||
|
start streaming again.
|
||||||
|
|
||||||
|
Now that ``bob`` has declared candidacy, which essentially bonded 10 fermions and made him a validator, we're going to get ``charlie`` to delegate some coins to ``bob``.
|
||||||
|
|
||||||
|
Delegating
|
||||||
|
----------
|
||||||
|
|
||||||
|
First let's have ``alice`` send some coins to ``charlie``:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client tx send --amount=1000fermion --sequence=2 --name=alice --to=48F74F48281C89E5E4BE9092F735EA519768E8EF
|
||||||
|
|
||||||
|
Then ``charlie`` will delegate some fermions to ``bob``:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client tx delegate --amount=10fermion --name=charlie --pubkey=<pub_key data>
|
||||||
|
|
||||||
|
You'll see output like:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Please enter passphrase for charlie:
|
||||||
|
{
|
||||||
|
"check_tx": {
|
||||||
|
"gas": 30
|
||||||
|
},
|
||||||
|
"deliver_tx": {},
|
||||||
|
"hash": "C3443BA30FCCC1F6E3A3D6AAAEE885244F8554F0",
|
||||||
|
"height": 51585
|
||||||
|
}
|
||||||
|
|
||||||
|
And that's it. You can query ``charlie``'s account to see the decrease in fermions.
|
||||||
|
|
||||||
|
To get more information about the candidate, try:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client query candidate --pubkey=<pub_key data>
|
||||||
|
|
||||||
|
and you'll see output similar to:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
{
|
||||||
|
"height": 51899,
|
||||||
|
"data": {
|
||||||
|
"pub_key": {
|
||||||
|
"type": "ed25519",
|
||||||
|
"data": "52D6FCD8C92A97F7CCB01205ADF310A18411EA8FDCC10E65BF2FCDB05AD1689B"
|
||||||
|
},
|
||||||
|
"owner": {
|
||||||
|
"chain": "",
|
||||||
|
"app": "sigs",
|
||||||
|
"addr": "5A35E4CC7B7DC0A5CB49CEA91763213A9AE92AD6"
|
||||||
|
},
|
||||||
|
"shares": 20,
|
||||||
|
"voting_power": 20,
|
||||||
|
"description": {
|
||||||
|
"moniker": "bobby",
|
||||||
|
"identity": "",
|
||||||
|
"website": "",
|
||||||
|
"details": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
It's also possible the query the delegator's bond like so:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client query delegator-bond --delegator-address 48F74F48281C89E5E4BE9092F735EA519768E8EF --pubkey 52D6FCD8C92A97F7CCB01205ADF310A18411EA8FDCC10E65BF2FCDB05AD1689B
|
||||||
|
|
||||||
|
with an output similar to:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
{
|
||||||
|
"height": 325782,
|
||||||
|
"data": {
|
||||||
|
"PubKey": {
|
||||||
|
"type": "ed25519",
|
||||||
|
"data": "52D6FCD8C92A97F7CCB01205ADF310A18411EA8FDCC10E65BF2FCDB05AD1689B"
|
||||||
|
},
|
||||||
|
"Shares": 20
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
where the ``--delegator-address`` is ``charlie``'s address and the ``-pubkey`` is the same as we've been using.
|
||||||
|
|
||||||
|
|
||||||
|
Unbonding
|
||||||
|
---------
|
||||||
|
|
||||||
|
Finally, to relinquish your voting power, unbond some coins. You should see
|
||||||
|
your VotingPower reduce and your account balance increase.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client tx unbond --amount=5fermion --name=charlie --pubkey=<pub_key data>
|
||||||
|
gaia client query account 48F74F48281C89E5E4BE9092F735EA519768E8EF
|
||||||
|
|
||||||
|
See the bond decrease with ``gaia client query delegator-bond`` like above.
|
||||||
|
|
||||||
|
That concludes an overview of the ``gaia`` tooling for local testing.
|
|
@ -0,0 +1,204 @@
|
||||||
|
Key Management
|
||||||
|
==============
|
||||||
|
|
||||||
|
Here we explain a bit how to work with your keys, using the
|
||||||
|
``gaia client keys`` subcommand.
|
||||||
|
|
||||||
|
**Note:** This keys tooling is not considered production ready and is
|
||||||
|
for dev only.
|
||||||
|
|
||||||
|
We'll look at what you can do using the six sub-commands of
|
||||||
|
``gaia client keys``:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
new
|
||||||
|
list
|
||||||
|
get
|
||||||
|
delete
|
||||||
|
recover
|
||||||
|
update
|
||||||
|
|
||||||
|
Create keys
|
||||||
|
-----------
|
||||||
|
|
||||||
|
``gaia client keys new`` has two inputs (name, password) and two outputs
|
||||||
|
(address, seed).
|
||||||
|
|
||||||
|
First, we name our key:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client keys new alice
|
||||||
|
|
||||||
|
This will prompt (10 character minimum) password entry which must be
|
||||||
|
re-typed. You'll see:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Enter a passphrase:
|
||||||
|
Repeat the passphrase:
|
||||||
|
alice A159C96AE911F68913E715ED889D211C02EC7D70
|
||||||
|
**Important** write this seed phrase in a safe place.
|
||||||
|
It is the only way to recover your account if you ever forget your password.
|
||||||
|
|
||||||
|
pelican amateur empower assist awkward claim brave process cliff save album pigeon intact asset
|
||||||
|
|
||||||
|
which shows the address of your key named ``alice``, and its recovery
|
||||||
|
seed. We'll use these shortly.
|
||||||
|
|
||||||
|
Adding the ``--output json`` flag to the above command would give this
|
||||||
|
output:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Enter a passphrase:
|
||||||
|
Repeat the passphrase:
|
||||||
|
{
|
||||||
|
"key": {
|
||||||
|
"name": "alice",
|
||||||
|
"address": "A159C96AE911F68913E715ED889D211C02EC7D70",
|
||||||
|
"pubkey": {
|
||||||
|
"type": "ed25519",
|
||||||
|
"data": "4BF22554B0F0BF2181187E5E5456E3BF3D96DB4C416A91F07F03A9C36F712B77"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"seed": "pelican amateur empower assist awkward claim brave process cliff save album pigeon intact asset"
|
||||||
|
}
|
||||||
|
|
||||||
|
To avoid the prompt, it's possible to pipe the password into the
|
||||||
|
command, e.g.:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
echo 1234567890 | gaia client keys new fred --output json
|
||||||
|
|
||||||
|
After trying each of the three ways to create a key, look at them, use:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client keys list
|
||||||
|
|
||||||
|
to list all the keys:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
All keys:
|
||||||
|
alice 6FEA9C99E2565B44FCC3C539A293A1378CDA7609
|
||||||
|
bob A159C96AE911F68913E715ED889D211C02EC7D70
|
||||||
|
charlie 784D623E0C15DE79043C126FA6449B68311339E5
|
||||||
|
|
||||||
|
Again, we can use the ``--output json`` flag:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "alice",
|
||||||
|
"address": "6FEA9C99E2565B44FCC3C539A293A1378CDA7609",
|
||||||
|
"pubkey": {
|
||||||
|
"type": "ed25519",
|
||||||
|
"data": "878B297F1E863CC30CAD71E04A8B3C23DB71C18F449F39E35B954EDB2276D32D"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "bob",
|
||||||
|
"address": "A159C96AE911F68913E715ED889D211C02EC7D70",
|
||||||
|
"pubkey": {
|
||||||
|
"type": "ed25519",
|
||||||
|
"data": "2127CAAB96C08E3042C5B33C8B5A820079AAE8DD50642DCFCC1E8B74821B2BB9"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "charlie",
|
||||||
|
"address": "784D623E0C15DE79043C126FA6449B68311339E5",
|
||||||
|
"pubkey": {
|
||||||
|
"type": "ed25519",
|
||||||
|
"data": "4BF22554B0F0BF2181187E5E5456E3BF3D96DB4C416A91F07F03A9C36F712B77"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
to get machine readable output.
|
||||||
|
|
||||||
|
If we want information about one specific key, then:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client keys get charlie --output json
|
||||||
|
|
||||||
|
will, for example, return the info for only the "charlie" key returned
|
||||||
|
from the previous ``gaia client keys list`` command.
|
||||||
|
|
||||||
|
The keys tooling can support different types of keys with a flag:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client keys new bit --type secp256k1
|
||||||
|
|
||||||
|
and you'll see the difference in the ``"type": field from``\ gaia client
|
||||||
|
keys get\`
|
||||||
|
|
||||||
|
Before moving on, let's set an enviroment variable to make
|
||||||
|
``--output json`` the default.
|
||||||
|
|
||||||
|
Either run or put in your ``~/.bash_profile`` the following line:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
export BC_OUTPUT=json
|
||||||
|
|
||||||
|
Recover a key
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Let's say, for whatever reason, you lose a key or forget the password.
|
||||||
|
On creation, you were given a seed. We'll use it to recover a lost key.
|
||||||
|
|
||||||
|
First, let's simulate the loss by deleting a key:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client keys delete alice
|
||||||
|
|
||||||
|
which prompts for your current password, now rendered obsolete, and
|
||||||
|
gives a warning message. The only way you can recover your key now is
|
||||||
|
using the 12 word seed given on initial creation of the key. Let's try
|
||||||
|
it:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client keys recover alice-again
|
||||||
|
|
||||||
|
which prompts for a new password then the seed:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Enter the new passphrase:
|
||||||
|
Enter your recovery seed phrase:
|
||||||
|
strike alien praise vendor term left market practice junior better deputy divert front calm
|
||||||
|
alice-again CBF5D9CE6DDCC32806162979495D07B851C53451
|
||||||
|
|
||||||
|
and voila! You've recovered your key. Note that the seed can be typed
|
||||||
|
out, pasted in, or piped into the command alongside the password.
|
||||||
|
|
||||||
|
To change the password of a key, we can:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client keys update alice-again
|
||||||
|
|
||||||
|
and follow the prompts.
|
||||||
|
|
||||||
|
That covers most features of the keys sub command.
|
||||||
|
|
||||||
|
.. raw:: html
|
||||||
|
|
||||||
|
<!-- use later in a test script, or more advance tutorial?
|
||||||
|
SEED=$(echo 1234567890 | gaia client keys new fred -o json | jq .seed | tr -d \")
|
||||||
|
echo $SEED
|
||||||
|
(echo qwertyuiop; echo $SEED stamp) | gaia client keys recover oops
|
||||||
|
(echo qwertyuiop; echo $SEED) | gaia client keys recover derf
|
||||||
|
gaia client keys get fred -o json
|
||||||
|
gaia client keys get derf -o json
|
||||||
|
```
|
||||||
|
-->
|
|
@ -0,0 +1,83 @@
|
||||||
|
Local Testnet
|
||||||
|
=============
|
||||||
|
|
||||||
|
This tutorial demonstrates the basics of setting up a gaia
|
||||||
|
testnet locally.
|
||||||
|
|
||||||
|
If you haven't already made a key, make one now:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client keys new alice
|
||||||
|
|
||||||
|
otherwise, use an existing key.
|
||||||
|
|
||||||
|
Initialize The Chain
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
Now initialize a gaia chain, using ``alice``'s address:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia node init 5D93A6059B6592833CBC8FA3DA90EE0382198985 --home=$HOME/.gaia1 --chain-id=gaia-test
|
||||||
|
|
||||||
|
This will create all the files necessary to run a single node chain in
|
||||||
|
``$HOME/.gaia1``: a ``priv_validator.json`` file with the validators
|
||||||
|
private key, and a ``genesis.json`` file with the list of validators and
|
||||||
|
accounts.
|
||||||
|
|
||||||
|
We'll add a second node on our local machine by initiating a node in a
|
||||||
|
new directory, with the same address, and copying in the genesis:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia node init 5D93A6059B6592833CBC8FA3DA90EE0382198985 --home=$HOME/.gaia2 --chain-id=gaia-test
|
||||||
|
cp $HOME/.gaia1/genesis.json $HOME/.gaia2/genesis.json
|
||||||
|
|
||||||
|
We also need to modify ``$HOME/.gaia2/config.toml`` to set new seeds
|
||||||
|
and ports. It should look like:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
proxy_app = "tcp://127.0.0.1:46668"
|
||||||
|
moniker = "anonymous"
|
||||||
|
fast_sync = true
|
||||||
|
db_backend = "leveldb"
|
||||||
|
log_level = "state:info,*:error"
|
||||||
|
|
||||||
|
[rpc]
|
||||||
|
laddr = "tcp://0.0.0.0:46667"
|
||||||
|
|
||||||
|
[p2p]
|
||||||
|
laddr = "tcp://0.0.0.0:46666"
|
||||||
|
seeds = "0.0.0.0:46656"
|
||||||
|
|
||||||
|
Start Nodes
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Now that we've initialized the chains, we can start both nodes:
|
||||||
|
|
||||||
|
NOTE: each command below must be started in seperate terminal windows. Alternatively, to run this testnet across multiple machines, you'd replace the ``seeds = "0.0.0.0"`` in ``~/.gaia2.config.toml`` with the IP of the first node, and could skip the modifications we made to the config file above because port conflicts would be avoided.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia node start --home=$HOME/.gaia1
|
||||||
|
gaia node start --home=$HOME/.gaia2
|
||||||
|
|
||||||
|
Now we can initialize a client for the first node, and look up our
|
||||||
|
account:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client init --chain-id=gaia-test --node=tcp://localhost:46657
|
||||||
|
gaia client query account 5D93A6059B6592833CBC8FA3DA90EE0382198985
|
||||||
|
|
||||||
|
To see what tendermint considers the validator set is, use:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
curl localhost:46657/validators
|
||||||
|
|
||||||
|
and compare the information in this file: ``~/.gaia1/priv_validator.json``. The ``address`` and ``pub_key`` fields should match.
|
||||||
|
|
||||||
|
To add a second validator on your testnet, you'll need to bond some tokens be declaring candidacy.
|
|
@ -0,0 +1,64 @@
|
||||||
|
Public Testnets
|
||||||
|
===============
|
||||||
|
|
||||||
|
Here we'll cover the basics of joining a public testnet. These testnets
|
||||||
|
come and go with various names are we release new versions of tendermint
|
||||||
|
core. This tutorial covers joining the ``gaia-1`` testnet. To join
|
||||||
|
other testnets, choose different initialization files, described below.
|
||||||
|
|
||||||
|
Get Tokens
|
||||||
|
----------
|
||||||
|
|
||||||
|
If you haven't already `created a key <../key-management.html>`__,
|
||||||
|
do so now. Copy your key's address and enter it into
|
||||||
|
`this utility <http://www.cosmosvalidators.com/>`__ which will send you
|
||||||
|
some ``fermion`` testnet tokens.
|
||||||
|
|
||||||
|
Get Files
|
||||||
|
---------
|
||||||
|
|
||||||
|
Now, to sync with the testnet, we need the genesis file and seeds. The
|
||||||
|
easiest way to get them is to clone and navigate to the tendermint
|
||||||
|
testnet repo:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
git clone https://github.com/tendermint/testnets ~/testnets
|
||||||
|
cd ~/testnets/gaia-1/gaia
|
||||||
|
|
||||||
|
NOTE: to join a different testnet, change the ``gaia-1/gaia`` filepath
|
||||||
|
to another directory with testnet inititalization files *and* an
|
||||||
|
active testnet.
|
||||||
|
|
||||||
|
Start Node
|
||||||
|
----------
|
||||||
|
|
||||||
|
Now we can start a new node:it may take awhile to sync with the
|
||||||
|
existing testnet.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia node start --home=$HOME/testnets/gaia-1/gaia
|
||||||
|
|
||||||
|
Once blocks slow down to about one per second, you're all caught up.
|
||||||
|
|
||||||
|
The ``gaia node start`` command will automaticaly generate a validator
|
||||||
|
private key found in ``~/testnets/gaia-1/gaia/priv_validator.json``.
|
||||||
|
|
||||||
|
Finally, let's initialize the gaia client to interact with the testnet:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client init --chain-id=gaia-1 --node=tcp://localhost:46657
|
||||||
|
|
||||||
|
and check our balance:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
gaia client query account $MYADDR
|
||||||
|
|
||||||
|
Where ``$MYADDR`` is the address originally generated by ``gaia keys new bob``.
|
||||||
|
|
||||||
|
You are now ready to declare candidacy or delegate some fermions. See the
|
||||||
|
`staking module overview <./staking-module.html>`__ for more information
|
||||||
|
on using the ``gaia client``.
|
|
@ -0,0 +1,38 @@
|
||||||
|
Replay Protection
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
In order to prevent `replay
|
||||||
|
attacks <https://en.wikipedia.org/wiki/Replay_attack>`__ a multi account
|
||||||
|
nonce system has been constructed as a module, which can be found in
|
||||||
|
``modules/nonce``. By adding the nonce module to the stack, each
|
||||||
|
transaction is verified for authenticity against replay attacks. This is
|
||||||
|
achieved by requiring that a new signed copy of the sequence number
|
||||||
|
which must be exactly 1 greater than the sequence number of the previous
|
||||||
|
transaction. A distinct sequence number is assigned per chain-id,
|
||||||
|
application, and group of signers. Each sequence number is tracked as a
|
||||||
|
nonce-store entry where the key is the marshaled list of actors after
|
||||||
|
having been sorted by chain, app, and address.
|
||||||
|
|
||||||
|
.. code:: golang
|
||||||
|
|
||||||
|
// Tx - Nonce transaction structure, contains list of signers and current sequence number
|
||||||
|
type Tx struct {
|
||||||
|
Sequence uint32 `json:"sequence"`
|
||||||
|
Signers []basecoin.Actor `json:"signers"`
|
||||||
|
Tx basecoin.Tx `json:"tx"`
|
||||||
|
}
|
||||||
|
|
||||||
|
By distinguishing sequence numbers across groups of Signers,
|
||||||
|
multi-signature Actors need not lock up use of their Address while
|
||||||
|
waiting for all the members of a multi-sig transaction to occur. Instead
|
||||||
|
only the multi-sig account will be locked, while other accounts
|
||||||
|
belonging to that signer can be used and signed with other sequence
|
||||||
|
numbers.
|
||||||
|
|
||||||
|
By abstracting out the nonce module in the stack, entire series of
|
||||||
|
transactions can occur without needing to verify the nonce for each
|
||||||
|
member of the series. An common example is a stack which will send coins
|
||||||
|
and charge a fee. Within the SDK this can be achieved using separate
|
||||||
|
modules in a stack, one to send the coins and the other to charge the
|
||||||
|
fee, however both modules do not need to check the nonce. This can occur
|
||||||
|
as a separate module earlier in the stack.
|
|
@ -1,12 +0,0 @@
|
||||||
FROM golang:1.7.4
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
zip \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# We want to ensure that release builds never have any cgo dependencies so we
|
|
||||||
# switch that off at the highest level.
|
|
||||||
ENV CGO_ENABLED 0
|
|
||||||
|
|
||||||
RUN mkdir -p $GOPATH/src/github.com/tendermint/basecoin
|
|
||||||
WORKDIR $GOPATH/src/github.com/tendermint/basecoin
|
|
|
@ -1,51 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
REPO_NAME="basecoin"
|
|
||||||
|
|
||||||
# Get the version from the environment, or try to figure it out.
|
|
||||||
if [ -z $VERSION ]; then
|
|
||||||
VERSION=$(awk -F\" '/Version =/ { print $2; exit }' < version/version.go)
|
|
||||||
fi
|
|
||||||
if [ -z "$VERSION" ]; then
|
|
||||||
echo "Please specify a version."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "==> Building version $VERSION..."
|
|
||||||
|
|
||||||
# Get the parent directory of where this script is.
|
|
||||||
SOURCE="${BASH_SOURCE[0]}"
|
|
||||||
while [ -h "$SOURCE" ] ; do SOURCE="$(readlink "$SOURCE")"; done
|
|
||||||
DIR="$( cd -P "$( dirname "$SOURCE" )/.." && pwd )"
|
|
||||||
|
|
||||||
# Change into that dir because we expect that.
|
|
||||||
cd "$DIR"
|
|
||||||
|
|
||||||
# Delete the old dir
|
|
||||||
echo "==> Removing old directory..."
|
|
||||||
rm -rf build/pkg
|
|
||||||
mkdir -p build/pkg
|
|
||||||
|
|
||||||
# Do a hermetic build inside a Docker container.
|
|
||||||
docker build -t tendermint/${REPO_NAME}-builder scripts/${REPO_NAME}-builder/
|
|
||||||
docker run --rm -e "BUILD_TAGS=$BUILD_TAGS" -v "$(pwd)":/go/src/github.com/tendermint/${REPO_NAME} tendermint/${REPO_NAME}-builder ./scripts/dist_build.sh
|
|
||||||
|
|
||||||
# Add $REPO_NAME and $VERSION prefix to package name.
|
|
||||||
rm -rf ./build/dist
|
|
||||||
mkdir -p ./build/dist
|
|
||||||
for FILENAME in $(find ./build/pkg -mindepth 1 -maxdepth 1 -type f); do
|
|
||||||
FILENAME=$(basename "$FILENAME")
|
|
||||||
cp "./build/pkg/${FILENAME}" "./build/dist/${REPO_NAME}_${VERSION}_${FILENAME}"
|
|
||||||
done
|
|
||||||
|
|
||||||
# Make the checksums.
|
|
||||||
pushd ./build/dist
|
|
||||||
shasum -a256 ./* > "./${REPO_NAME}_${VERSION}_SHA256SUMS"
|
|
||||||
popd
|
|
||||||
|
|
||||||
# Done
|
|
||||||
echo
|
|
||||||
echo "==> Results:"
|
|
||||||
ls -hl ./build/dist
|
|
||||||
|
|
||||||
exit 0
|
|
|
@ -1,60 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# Get the parent directory of where this script is.
|
|
||||||
SOURCE="${BASH_SOURCE[0]}"
|
|
||||||
while [ -h "$SOURCE" ] ; do SOURCE="$(readlink "$SOURCE")"; done
|
|
||||||
DIR="$( cd -P "$( dirname "$SOURCE" )/.." && pwd )"
|
|
||||||
|
|
||||||
# Change into that dir because we expect that.
|
|
||||||
cd "$DIR"
|
|
||||||
|
|
||||||
# Get the git commit
|
|
||||||
GIT_COMMIT="$(git rev-parse --short HEAD)"
|
|
||||||
GIT_DESCRIBE="$(git describe --tags --always)"
|
|
||||||
GIT_IMPORT="github.com/tendermint/basecoin/version"
|
|
||||||
|
|
||||||
# Determine the arch/os combos we're building for
|
|
||||||
XC_ARCH=${XC_ARCH:-"386 amd64 arm"}
|
|
||||||
XC_OS=${XC_OS:-"solaris darwin freebsd linux windows"}
|
|
||||||
|
|
||||||
# Make sure build tools are available.
|
|
||||||
make tools
|
|
||||||
|
|
||||||
# Get VENDORED dependencies
|
|
||||||
make get_vendor_deps
|
|
||||||
|
|
||||||
# Build!
|
|
||||||
echo "==> Building basecoin..."
|
|
||||||
"$(which gox)" \
|
|
||||||
-os="${XC_OS}" \
|
|
||||||
-arch="${XC_ARCH}" \
|
|
||||||
-osarch="!darwin/arm !solaris/amd64 !freebsd/amd64" \
|
|
||||||
-ldflags "-X ${GIT_IMPORT}.GitCommit='${GIT_COMMIT}' -X ${GIT_IMPORT}.GitDescribe='${GIT_DESCRIBE}'" \
|
|
||||||
-output "build/pkg/{{.OS}}_{{.Arch}}/basecoin" \
|
|
||||||
-tags="${BUILD_TAGS}" \
|
|
||||||
github.com/tendermint/basecoin/cmd/basecoin
|
|
||||||
|
|
||||||
echo "==> Building basecli..."
|
|
||||||
"$(which gox)" \
|
|
||||||
-os="${XC_OS}" \
|
|
||||||
-arch="${XC_ARCH}" \
|
|
||||||
-osarch="!darwin/arm !solaris/amd64 !freebsd/amd64" \
|
|
||||||
-ldflags "-X ${GIT_IMPORT}.GitCommit='${GIT_COMMIT}' -X ${GIT_IMPORT}.GitDescribe='${GIT_DESCRIBE}'" \
|
|
||||||
-output "build/pkg/{{.OS}}_{{.Arch}}/basecli" \
|
|
||||||
-tags="${BUILD_TAGS}" \
|
|
||||||
github.com/tendermint/basecoin/cmd/basecli
|
|
||||||
|
|
||||||
# Zip all the files.
|
|
||||||
echo "==> Packaging..."
|
|
||||||
for PLATFORM in $(find ./build/pkg -mindepth 1 -maxdepth 1 -type d); do
|
|
||||||
OSARCH=$(basename "${PLATFORM}")
|
|
||||||
echo "--> ${OSARCH}"
|
|
||||||
|
|
||||||
pushd "$PLATFORM" >/dev/null 2>&1
|
|
||||||
zip "../${OSARCH}.zip" ./*
|
|
||||||
popd >/dev/null 2>&1
|
|
||||||
done
|
|
||||||
|
|
||||||
|
|
||||||
exit 0
|
|
|
@ -1,23 +0,0 @@
|
||||||
// +build scripts
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/cosmos/cosmos-sdk/tests"
|
|
||||||
"github.com/tendermint/go-wire"
|
|
||||||
)
|
|
||||||
|
|
||||||
/*
|
|
||||||
PrivKey: 019F86D081884C7D659A2FEAA0C55AD015A3BF4F1B2B0B822CD15D6C15B0F00A0867D3B5EAF0C0BF6B5A602D359DAECC86A7A74053490EC37AE08E71360587C870
|
|
||||||
PubKey: 0167D3B5EAF0C0BF6B5A602D359DAECC86A7A74053490EC37AE08E71360587C870
|
|
||||||
Address: D9B727742AA29FA638DC63D70813C976014C4CE0
|
|
||||||
*/
|
|
||||||
func main() {
|
|
||||||
tAcc := tests.PrivAccountFromSecret("test")
|
|
||||||
fmt.Printf("PrivKey:%X\n", tAcc.PrivKey.Bytes())
|
|
||||||
fmt.Printf("PubKey:%X\n", tAcc.Account.PubKey.Bytes())
|
|
||||||
fmt.Printf("Address:%X\n", tAcc.Account.PubKey.Address())
|
|
||||||
fmt.Println(string(wire.JSONBytesPretty(tAcc)))
|
|
||||||
}
|
|
|
@ -1,7 +0,0 @@
|
||||||
#! /bin/bash
|
|
||||||
|
|
||||||
# Get the version from the environment, or try to figure it out.
|
|
||||||
if [ -z $VERSION ]; then
|
|
||||||
VERSION=$(awk -F\" '/Version =/ { print $2; exit }' < version/version.go)
|
|
||||||
fi
|
|
||||||
aws s3 cp --recursive build/dist s3://tendermint/binaries/basecoin/v${VERSION} --acl public-read
|
|
|
@ -1,260 +0,0 @@
|
||||||
# This is not executable, but helper functions for the other scripts
|
|
||||||
|
|
||||||
# XXX XXX XXX XXX XXX
|
|
||||||
# The following global variables must be defined before calling common functions:
|
|
||||||
# SERVER_EXE=foobar # Server binary name
|
|
||||||
# CLIENT_EXE=foobarcli # Client binary name
|
|
||||||
# ACCOUNTS=(foo bar) # List of accounts for initialization
|
|
||||||
# RICH=${ACCOUNTS[0]} # Account to assign genesis balance
|
|
||||||
|
|
||||||
|
|
||||||
# XXX Ex Usage: quickSetup $WORK_NAME $CHAIN_ID
|
|
||||||
# Desc: Start the program, use with shunit2 OneTimeSetUp()
|
|
||||||
quickSetup() {
|
|
||||||
# These are passed in as args
|
|
||||||
BASE_DIR=$HOME/$1
|
|
||||||
CHAIN_ID=$2
|
|
||||||
|
|
||||||
rm -rf $BASE_DIR 2>/dev/null
|
|
||||||
mkdir -p $BASE_DIR
|
|
||||||
|
|
||||||
# Set up client - make sure you use the proper prefix if you set
|
|
||||||
# a custom CLIENT_EXE
|
|
||||||
export BC_HOME=${BASE_DIR}/client
|
|
||||||
prepareClient
|
|
||||||
|
|
||||||
# start basecoin server (with counter)
|
|
||||||
initServer $BASE_DIR $CHAIN_ID
|
|
||||||
if [ $? != 0 ]; then return 1; fi
|
|
||||||
|
|
||||||
initClient $CHAIN_ID
|
|
||||||
if [ $? != 0 ]; then return 1; fi
|
|
||||||
|
|
||||||
printf "...Testing may begin!\n\n\n"
|
|
||||||
}
|
|
||||||
|
|
||||||
# XXX Ex Usage: quickTearDown
|
|
||||||
# Desc: close the test server, use with shunit2 OneTimeTearDown()
|
|
||||||
quickTearDown() {
|
|
||||||
printf "\n\nstopping $SERVER_EXE test server..."
|
|
||||||
kill -9 $PID_SERVER >/dev/null 2>&1
|
|
||||||
sleep 1
|
|
||||||
}
|
|
||||||
|
|
||||||
############################################################
|
|
||||||
|
|
||||||
prepareClient() {
|
|
||||||
echo "Preparing client keys..."
|
|
||||||
${CLIENT_EXE} reset_all
|
|
||||||
assertTrue "line=${LINENO}, prepare client" $?
|
|
||||||
|
|
||||||
for i in "${!ACCOUNTS[@]}"; do
|
|
||||||
newKey ${ACCOUNTS[$i]}
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
# XXX Ex Usage1: initServer $ROOTDIR $CHAINID
|
|
||||||
# XXX Ex Usage2: initServer $ROOTDIR $CHAINID $PORTPREFIX
|
|
||||||
# Desc: Grabs the Rich account and gives it all genesis money
|
|
||||||
# port-prefix default is 4665{6,7,8}
|
|
||||||
initServer() {
|
|
||||||
echo "Setting up genesis..."
|
|
||||||
SERVE_DIR=$1/server
|
|
||||||
assertNotNull "line=${LINENO}, no chain" $2
|
|
||||||
CHAIN=$2
|
|
||||||
SERVER_LOG=$1/${SERVER_EXE}.log
|
|
||||||
|
|
||||||
GENKEY=$(${CLIENT_EXE} keys get ${RICH} | awk '{print $2}')
|
|
||||||
${SERVER_EXE} init --static --chain-id $CHAIN $GENKEY --home=$SERVE_DIR >>$SERVER_LOG
|
|
||||||
|
|
||||||
# optionally set the port
|
|
||||||
if [ -n "$3" ]; then
|
|
||||||
echo "setting port $3"
|
|
||||||
sed -ie "s/4665/$3/" $SERVE_DIR/config.toml
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Starting ${SERVER_EXE} server..."
|
|
||||||
startServer $SERVE_DIR $SERVER_LOG
|
|
||||||
return $?
|
|
||||||
}
|
|
||||||
|
|
||||||
# XXX Ex Usage: startServer $SERVE_DIR $SERVER_LOG
|
|
||||||
startServer() {
|
|
||||||
${SERVER_EXE} start --home=$1 >>$2 2>&1 &
|
|
||||||
sleep 5
|
|
||||||
PID_SERVER=$!
|
|
||||||
disown
|
|
||||||
if ! ps $PID_SERVER >/dev/null; then
|
|
||||||
echo "**FAILED**"
|
|
||||||
cat $SERVER_LOG
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# XXX Ex Usage1: initClient $CHAINID
|
|
||||||
# XXX Ex Usage2: initClient $CHAINID $PORTPREFIX
|
|
||||||
# Desc: Initialize the client program
|
|
||||||
# port-prefix default is 46657
|
|
||||||
initClient() {
|
|
||||||
echo "Attaching ${CLIENT_EXE} client..."
|
|
||||||
PORT=${2:-46657}
|
|
||||||
# hard-code the expected validator hash
|
|
||||||
${CLIENT_EXE} init --chain-id=$1 --node=tcp://localhost:${PORT} --valhash=EB168E17E45BAEB194D4C79067FFECF345C64DE6
|
|
||||||
assertTrue "line=${LINENO}, initialized light-client" $?
|
|
||||||
}
|
|
||||||
|
|
||||||
# XXX Ex Usage1: newKey $NAME
|
|
||||||
# XXX Ex Usage2: newKey $NAME $PASSWORD
|
|
||||||
# Desc: Generates key for given username and password
|
|
||||||
newKey(){
|
|
||||||
assertNotNull "line=${LINENO}, keyname required" "$1"
|
|
||||||
KEYPASS=${2:-qwertyuiop}
|
|
||||||
(echo $KEYPASS; echo $KEYPASS) | ${CLIENT_EXE} keys new $1 >/dev/null 2>/dev/null
|
|
||||||
assertTrue "line=${LINENO}, created $1" $?
|
|
||||||
assertTrue "line=${LINENO}, $1 doesn't exist" "${CLIENT_EXE} keys get $1"
|
|
||||||
}
|
|
||||||
|
|
||||||
# XXX Ex Usage: getAddr $NAME
|
|
||||||
# Desc: Gets the address for a key name
|
|
||||||
getAddr() {
|
|
||||||
assertNotNull "line=${LINENO}, keyname required" "$1"
|
|
||||||
RAW=$(${CLIENT_EXE} keys get $1)
|
|
||||||
assertTrue "line=${LINENO}, no key for $1" $?
|
|
||||||
# print the addr
|
|
||||||
echo $RAW | cut -d' ' -f2
|
|
||||||
}
|
|
||||||
|
|
||||||
# XXX Ex Usage: checkAccount $ADDR $AMOUNT [$HEIGHT]
|
|
||||||
# Desc: Assumes just one coin, checks the balance of first coin in any case
|
|
||||||
# pass optional height to query which block to query
|
|
||||||
checkAccount() {
|
|
||||||
# default height of 0, but accept an argument
|
|
||||||
HEIGHT=${3:-0}
|
|
||||||
|
|
||||||
# make sure sender goes down
|
|
||||||
ACCT=$(${CLIENT_EXE} query account $1 --height=$HEIGHT)
|
|
||||||
if ! assertTrue "line=${LINENO}, account must exist" $?; then
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "$DEBUG" ]; then echo $ACCT; echo; fi
|
|
||||||
assertEquals "line=${LINENO}, proper money" "$2" $(echo $ACCT | jq .data.coins[0].amount)
|
|
||||||
return $?
|
|
||||||
}
|
|
||||||
|
|
||||||
# XXX Ex Usage: checkRole $ROLE $SIGS $NUM_SIGNERS [$HEIGHT]
|
|
||||||
# Desc: Ensures this named role exists, and has the number of members and required signatures as above
|
|
||||||
checkRole() {
|
|
||||||
# default height of 0, but accept an argument
|
|
||||||
HEIGHT=${4:-0}
|
|
||||||
|
|
||||||
# make sure sender goes down
|
|
||||||
QROLE=$(${CLIENT_EXE} query role $1 --height=$HEIGHT)
|
|
||||||
if ! assertTrue "line=${LINENO}, role must exist" $?; then
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "$DEBUG" ]; then echo $QROLE; echo; fi
|
|
||||||
assertEquals "line=${LINENO}, proper sigs" "$2" $(echo $QROLE | jq .data.min_sigs)
|
|
||||||
assertEquals "line=${LINENO}, proper app" '"sigs"' $(echo $QROLE | jq '.data.signers[0].app' )
|
|
||||||
assertEquals "line=${LINENO}, proper signers" "$3" $(echo $QROLE | jq '.data.signers | length')
|
|
||||||
return $?
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# XXX Ex Usage: txSucceeded $? "$TX" "$RECIEVER"
|
|
||||||
# Desc: Must be called right after the `tx` command, makes sure it got a success response
|
|
||||||
txSucceeded() {
|
|
||||||
if (assertTrue "line=${LINENO}, sent tx ($3): $2" $1); then
|
|
||||||
TX=$2
|
|
||||||
assertEquals "line=${LINENO}, good check ($3): $TX" "0" $(echo $TX | jq .check_tx.code)
|
|
||||||
assertEquals "line=${LINENO}, good deliver ($3): $TX" "0" $(echo $TX | jq .deliver_tx.code)
|
|
||||||
else
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# XXX Ex Usage: checkSendTx $HASH $HEIGHT $SENDER $AMOUNT
|
|
||||||
# Desc: This looks up the tx by hash, and makes sure the height and type match
|
|
||||||
# and that the first input was from this sender for this amount
|
|
||||||
checkSendTx() {
|
|
||||||
TX=$(${CLIENT_EXE} query tx $1)
|
|
||||||
assertTrue "line=${LINENO}, found tx" $?
|
|
||||||
if [ -n "$DEBUG" ]; then echo $TX; echo; fi
|
|
||||||
|
|
||||||
assertEquals "line=${LINENO}, proper height" $2 $(echo $TX | jq .height)
|
|
||||||
assertEquals "line=${LINENO}, type=sigs/one" '"sigs/one"' $(echo $TX | jq .data.type)
|
|
||||||
CTX=$(echo $TX | jq .data.data.tx)
|
|
||||||
assertEquals "line=${LINENO}, type=chain/tx" '"chain/tx"' $(echo $CTX | jq .type)
|
|
||||||
NTX=$(echo $CTX | jq .data.tx)
|
|
||||||
assertEquals "line=${LINENO}, type=nonce" '"nonce"' $(echo $NTX | jq .type)
|
|
||||||
STX=$(echo $NTX | jq .data.tx)
|
|
||||||
assertEquals "line=${LINENO}, type=coin/send" '"coin/send"' $(echo $STX | jq .type)
|
|
||||||
assertEquals "line=${LINENO}, proper sender" "\"$3\"" $(echo $STX | jq .data.inputs[0].address.addr)
|
|
||||||
assertEquals "line=${LINENO}, proper out amount" "$4" $(echo $STX | jq .data.outputs[0].coins[0].amount)
|
|
||||||
return $?
|
|
||||||
}
|
|
||||||
|
|
||||||
# XXX Ex Usage: checkRoleTx $HASH $HEIGHT $NAME $NUM_SIGNERS
|
|
||||||
# Desc: This looks up the tx by hash, and makes sure the height and type match
|
|
||||||
# and that the it refers to the proper role
|
|
||||||
checkRoleTx() {
|
|
||||||
TX=$(${CLIENT_EXE} query tx $1)
|
|
||||||
assertTrue "line=${LINENO}, found tx" $?
|
|
||||||
if [ -n "$DEBUG" ]; then echo $TX; echo; fi
|
|
||||||
|
|
||||||
|
|
||||||
assertEquals "line=${LINENO}, proper height" $2 $(echo $TX | jq .height)
|
|
||||||
assertEquals "line=${LINENO}, type=sigs/one" '"sigs/one"' $(echo $TX | jq .data.type)
|
|
||||||
CTX=$(echo $TX | jq .data.data.tx)
|
|
||||||
assertEquals "line=${LINENO}, type=chain/tx" '"chain/tx"' $(echo $CTX | jq .type)
|
|
||||||
NTX=$(echo $CTX | jq .data.tx)
|
|
||||||
assertEquals "line=${LINENO}, type=nonce" '"nonce"' $(echo $NTX | jq .type)
|
|
||||||
RTX=$(echo $NTX | jq .data.tx)
|
|
||||||
assertEquals "line=${LINENO}, type=role/create" '"role/create"' $(echo $RTX | jq .type)
|
|
||||||
assertEquals "line=${LINENO}, proper name" "\"$3\"" $(echo $RTX | jq .data.role)
|
|
||||||
assertEquals "line=${LINENO}, proper num signers" "$4" $(echo $RTX | jq '.data.signers | length')
|
|
||||||
return $?
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# XXX Ex Usage: checkSendFeeTx $HASH $HEIGHT $SENDER $AMOUNT $FEE
|
|
||||||
# Desc: This is like checkSendTx, but asserts a feetx wrapper with $FEE value.
|
|
||||||
# This looks up the tx by hash, and makes sure the height and type match
|
|
||||||
# and that the first input was from this sender for this amount
|
|
||||||
checkSendFeeTx() {
|
|
||||||
TX=$(${CLIENT_EXE} query tx $1)
|
|
||||||
assertTrue "line=${LINENO}, found tx" $?
|
|
||||||
if [ -n "$DEBUG" ]; then echo $TX; echo; fi
|
|
||||||
|
|
||||||
assertEquals "line=${LINENO}, proper height" $2 $(echo $TX | jq .height)
|
|
||||||
assertEquals "line=${LINENO}, type=sigs/one" '"sigs/one"' $(echo $TX | jq .data.type)
|
|
||||||
CTX=$(echo $TX | jq .data.data.tx)
|
|
||||||
assertEquals "line=${LINENO}, type=chain/tx" '"chain/tx"' $(echo $CTX | jq .type)
|
|
||||||
NTX=$(echo $CTX | jq .data.tx)
|
|
||||||
assertEquals "line=${LINENO}, type=nonce" '"nonce"' $(echo $NTX | jq .type)
|
|
||||||
FTX=$(echo $NTX | jq .data.tx)
|
|
||||||
assertEquals "line=${LINENO}, type=fee/tx" '"fee/tx"' $(echo $FTX | jq .type)
|
|
||||||
assertEquals "line=${LINENO}, proper fee" "$5" $(echo $FTX | jq .data.fee.amount)
|
|
||||||
STX=$(echo $FTX | jq .data.tx)
|
|
||||||
assertEquals "line=${LINENO}, type=coin/send" '"coin/send"' $(echo $STX | jq .type)
|
|
||||||
assertEquals "line=${LINENO}, proper sender" "\"$3\"" $(echo $STX | jq .data.inputs[0].address.addr)
|
|
||||||
assertEquals "line=${LINENO}, proper out amount" "$4" $(echo $STX | jq .data.outputs[0].coins[0].amount)
|
|
||||||
return $?
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# XXX Ex Usage: waitForBlock $port
|
|
||||||
# Desc: Waits until the block height on that node increases by one
|
|
||||||
waitForBlock() {
|
|
||||||
addr=http://localhost:$1
|
|
||||||
b1=`curl -s $addr/status | jq .result.latest_block_height`
|
|
||||||
b2=$b1
|
|
||||||
while [ "$b2" == "$b1" ]; do
|
|
||||||
echo "Waiting for node $addr to commit a block ..."
|
|
||||||
sleep 1
|
|
||||||
b2=`curl -s $addr/status | jq .result.latest_block_height`
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
1067
tests/cli/shunit2
1067
tests/cli/shunit2
File diff suppressed because it is too large
Load Diff
|
@ -1,142 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
func main() {}
|
|
||||||
|
|
||||||
// import (
|
|
||||||
// "fmt"
|
|
||||||
// "time"
|
|
||||||
|
|
||||||
// "github.com/gorilla/websocket"
|
|
||||||
// "github.com/cosmos/cosmos-sdk/types"
|
|
||||||
// wire "github.com/tendermint/go-wire"
|
|
||||||
// _ "github.com/tendermint/tendermint/rpc/core/types" // Register RPCResponse > Result types
|
|
||||||
// "github.com/tendermint/tendermint/rpc/lib/client"
|
|
||||||
// "github.com/tendermint/tendermint/rpc/lib/types"
|
|
||||||
// cmn "github.com/tendermint/tmlibs/common"
|
|
||||||
// )
|
|
||||||
|
|
||||||
// func main() {
|
|
||||||
// // ws := rpcclient.NewWSClient("127.0.0.1:46657", "/websocket")
|
|
||||||
// ws := rpcclient.NewWSClient("192.168.99.100:46657", "/websocket")
|
|
||||||
// chainID := "test_chain_id"
|
|
||||||
|
|
||||||
// _, err := ws.Start()
|
|
||||||
// if err != nil {
|
|
||||||
// cmn.Exit(err.Error())
|
|
||||||
// }
|
|
||||||
// var counter = 0
|
|
||||||
|
|
||||||
// // Read a bunch of responses
|
|
||||||
// go func() {
|
|
||||||
// for {
|
|
||||||
// res, ok := <-ws.ResultsCh
|
|
||||||
// if !ok {
|
|
||||||
// break
|
|
||||||
// }
|
|
||||||
// fmt.Println(counter, "res:", cmn.Blue(string(res)))
|
|
||||||
// }
|
|
||||||
// }()
|
|
||||||
|
|
||||||
// // Get the root account
|
|
||||||
// root := types.PrivAccountFromSecret("test")
|
|
||||||
// sequence := int(0)
|
|
||||||
// // Make a bunch of PrivAccounts
|
|
||||||
// privAccounts := types.RandAccounts(1000, 1000000, 0)
|
|
||||||
// privAccountSequences := make(map[string]int)
|
|
||||||
|
|
||||||
// // Send coins to each account
|
|
||||||
// for i := 0; i < len(privAccounts); i++ {
|
|
||||||
// privAccount := privAccounts[i]
|
|
||||||
// tx := &types.SendTx{
|
|
||||||
// Inputs: []types.TxInput{
|
|
||||||
// types.TxInput{
|
|
||||||
// Address: root.Account.PubKey.Address(),
|
|
||||||
// PubKey: root.Account.PubKey, // TODO is this needed?
|
|
||||||
// Coins: coin.Coins{{"", 1000002}},
|
|
||||||
// Sequence: sequence,
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// Outputs: []types.TxOutput{
|
|
||||||
// types.TxOutput{
|
|
||||||
// Address: privAccount.Account.PubKey.Address(),
|
|
||||||
// Coins: coin.Coins{{"", 1000000}},
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// }
|
|
||||||
// sequence += 1
|
|
||||||
|
|
||||||
// // Sign request
|
|
||||||
// signBytes := tx.SignBytes(chainID)
|
|
||||||
// sig := root.Sign(signBytes)
|
|
||||||
// tx.Inputs[0].Signature = sig
|
|
||||||
// //fmt.Println("tx:", tx)
|
|
||||||
|
|
||||||
// // Write request
|
|
||||||
// txBytes := wire.BinaryBytes(struct{ types.Tx }{tx})
|
|
||||||
// request, err := rpctypes.MapToRequest("fakeid", "broadcast_tx_sync", map[string]interface{}{"tx": txBytes})
|
|
||||||
// if err != nil {
|
|
||||||
// cmn.Exit("cannot encode request: " + err.Error())
|
|
||||||
// }
|
|
||||||
// reqBytes := wire.JSONBytes(request)
|
|
||||||
// //fmt.Print(".")
|
|
||||||
// err = ws.WriteMessage(websocket.TextMessage, reqBytes)
|
|
||||||
// if err != nil {
|
|
||||||
// cmn.Exit("writing websocket request: " + err.Error())
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // Now send coins between these accounts
|
|
||||||
// for {
|
|
||||||
// counter += 1
|
|
||||||
// time.Sleep(time.Millisecond * 10)
|
|
||||||
|
|
||||||
// randA := cmn.RandInt() % len(privAccounts)
|
|
||||||
// randB := cmn.RandInt() % len(privAccounts)
|
|
||||||
// if randA == randB {
|
|
||||||
// continue
|
|
||||||
// }
|
|
||||||
|
|
||||||
// privAccountA := privAccounts[randA]
|
|
||||||
// privAccountASequence := privAccountSequences[privAccountA.Account.PubKey.KeyString()]
|
|
||||||
// privAccountSequences[privAccountA.Account.PubKey.KeyString()] = privAccountASequence + 1
|
|
||||||
// privAccountB := privAccounts[randB]
|
|
||||||
|
|
||||||
// tx := &types.SendTx{
|
|
||||||
// Inputs: []types.TxInput{
|
|
||||||
// types.TxInput{
|
|
||||||
// Address: privAccountA.Account.PubKey.Address(),
|
|
||||||
// PubKey: privAccountA.Account.PubKey,
|
|
||||||
// Coins: coin.Coins{{"", 3}},
|
|
||||||
// Sequence: privAccountASequence + 1,
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// Outputs: []types.TxOutput{
|
|
||||||
// types.TxOutput{
|
|
||||||
// Address: privAccountB.Account.PubKey.Address(),
|
|
||||||
// Coins: coin.Coins{{"", 1}},
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // Sign request
|
|
||||||
// signBytes := tx.SignBytes(chainID)
|
|
||||||
// sig := privAccountA.Sign(signBytes)
|
|
||||||
// tx.Inputs[0].Signature = sig
|
|
||||||
// //fmt.Println("tx:", tx)
|
|
||||||
|
|
||||||
// // Write request
|
|
||||||
// txBytes := wire.BinaryBytes(struct{ types.Tx }{tx})
|
|
||||||
// request, err := rpctypes.MapToRequest("fakeid", "broadcast_tx_sync", map[string]interface{}{"tx": txBytes})
|
|
||||||
// if err != nil {
|
|
||||||
// cmn.Exit("cannot encode request: " + err.Error())
|
|
||||||
// }
|
|
||||||
// reqBytes := wire.JSONBytes(request)
|
|
||||||
// //fmt.Print(".")
|
|
||||||
// err = ws.WriteMessage(websocket.TextMessage, reqBytes)
|
|
||||||
// if err != nil {
|
|
||||||
// cmn.Exit("writing websocket request: " + err.Error())
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// ws.Stop()
|
|
||||||
// }
|
|
Loading…
Reference in New Issue