algo: contracts

This commit is contained in:
Evan Gray 2022-04-29 15:53:48 -04:00 committed by Evan Gray
parent d4a4f8aab5
commit cc72c2a644
135 changed files with 16528 additions and 17512 deletions

View File

@ -48,3 +48,4 @@ FROM scratch AS const-export
COPY --from=const-build /scripts/.env.0x ethereum/.env
COPY --from=const-build /scripts/.env.hex solana/.env
COPY --from=const-build /scripts/.env.hex terra/tools/.env
COPY --from=const-build /scripts/.env.hex algorand/.env

View File

@ -1,27 +0,0 @@
# syntax=docker.io/docker/dockerfile:1.3@sha256:42399d4635eddd7a9b8a24be879d2f9a930d0ed040a61324cfdf59ef1357b3b2
FROM docker.io/fedora:34 AS teal-build
# Support additional root CAs
COPY README.md cert.pem* /certs/
# Fedora
RUN if [ -e /certs/cert.pem ]; then cp /certs/cert.pem /etc/pki/tls/certs/ca-bundle.crt; fi
RUN dnf -y install python3-pip
COPY staging/algorand/teal /teal
# Install pyTEAL dependencies
COPY third_party/algorand/Pipfile.lock Pipfile.lock
COPY third_party/algorand/Pipfile Pipfile
RUN pip install pipenv
RUN pipenv install
# Regenerate TEAL assembly
RUN pipenv run python3 /teal/wormhole/pyteal/vaa-processor.py vaa-processor-approval.teal vaa-processor-clear.teal
RUN pipenv run python3 /teal/wormhole/pyteal/vaa-verify.py 0 vaa-verify.teal
FROM scratch AS teal-export
COPY --from=teal-build /vaa-processor-approval.teal third_party/algorand/teal/
COPY --from=teal-build /vaa-processor-clear.teal third_party/algorand/teal/
COPY --from=teal-build /vaa-verify.teal third_party/algorand/teal/

View File

@ -113,17 +113,6 @@ local_resource(
trigger_mode = trigger_mode,
)
if algorand:
local_resource(
name = "teal-gen",
deps = ["staging/algorand/teal"],
cmd = "tilt docker build -- --target teal-export -f Dockerfile.teal -o type=local,dest=. .",
env = {"DOCKER_BUILDKIT": "1"},
labels = ["algorand"],
allow_parallel = True,
trigger_mode = trigger_mode,
)
# wasm
if solana:
@ -443,27 +432,6 @@ if ci_tests:
trigger_mode = trigger_mode,
)
# algorand
if algorand:
k8s_yaml_with_ns("devnet/algorand.yaml")
docker_build(
ref = "algorand",
context = "third_party/algorand",
dockerfile = "third_party/algorand/Dockerfile",
)
k8s_resource(
"algorand",
resource_deps = ["teal-gen"],
port_forwards = [
port_forward(4001, name = "Algorand RPC [:4001]", host = webHost),
port_forward(4002, name = "Algorand KMD [:4002]", host = webHost),
],
labels = ["algorand"],
trigger_mode = trigger_mode,
)
# e2e
if e2e:
k8s_yaml_with_ns("devnet/e2e.yaml")
@ -581,3 +549,38 @@ k8s_resource(
labels = ["terra"],
trigger_mode = trigger_mode,
)
if algorand:
k8s_yaml_with_ns("devnet/algorand-devnet.yaml")
docker_build(
ref = "algorand-algod",
context = "algorand/sandbox-algorand",
dockerfile = "algorand/sandbox-algorand/images/algod/Dockerfile"
)
docker_build(
ref = "algorand-indexer",
context = "algorand/sandbox-algorand",
dockerfile = "algorand/sandbox-algorand/images/indexer/Dockerfile"
)
docker_build(
ref = "algorand-contracts",
context = "algorand",
dockerfile = "algorand/Dockerfile",
ignore = ["algorand/test/*.*"]
)
k8s_resource(
"algorand",
port_forwards = [
port_forward(4001, name = "Algod [:4001]", host = webHost),
port_forward(4002, name = "KMD [:4002]", host = webHost),
port_forward(8980, name = "Indexer [:8980]", host = webHost),
],
resource_deps = ["const-gen"],
labels = ["algorand"],
trigger_mode = trigger_mode,
)

3
algorand/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
__pycache__
_sandbox
genesis.json

23
algorand/Dockerfile Normal file
View File

@ -0,0 +1,23 @@
# syntax=docker.io/docker/dockerfile:1.3@sha256:42399d4635eddd7a9b8a24be879d2f9a930d0ed040a61324cfdf59ef1357b3b2
FROM docker.io/python:3.10
# Support additional root CAs
COPY README.md cert.pem* /certs/
# Debian
RUN if [ -e /certs/cert.pem ]; then cp /certs/cert.pem /etc/ssl/certs/ca-certificates.crt; fi
RUN python3 -m pip install virtualenv
RUN apt-get update
RUN apt-get -y install netcat
COPY Pipfile.lock Pipfile.lock
COPY Pipfile Pipfile
RUN python3 -m pip install pipenv
RUN pipenv install
RUN mkdir teal
COPY *.py .
COPY deploy.sh deploy.sh
COPY .env .env

381
algorand/MEMORY.md Normal file
View File

@ -0,0 +1,381 @@
# Algorand memory allocation
# Table of Contents
1. [Background](about:blank#orgea5c5c2)
2. [The “allocator” program](about:blank#org85bc975)
1. [Instantiating template variables](about:blank#orgf176818)
1. [Instantiation, off-chain](about:blank#org2091dfe)
2. [Instantiation, on-chain](about:blank#orga6fa146)
2. [Allocating, client-side](about:blank#org74c4227)
<a id="orgea5c5c2"></a>
# Background
The Algorand blockchain has a completely different virtual machine from the other chains Wormhole currently supports. The assembly language of Algorand is called TEAL, which runs on the Algorand VM (AVM). This means that understanding the Algorand contracts will require understanding a whole new set of platform-specific features and constraints.
The purpose of this post is to investigate the way the Wormhole contracts handle (or rather, implement) memory management on Algorand. This is particularly interesting because of the unique memory constraints on this platform which require a fair amount of creativity to overcome. This code is critical, and highly non-trivial.
Like EVM bytecode, TEAL is a purpose-designed instruction set, but unlike EVM, there is currently no high-level language (like Solidity) with a compiler targeting TEAL. There is an in-between solution, called pyTEAL. pyTEAL is **not** a compiler from Python to TEAL, instead, it is an embedded domain-specific language for generating TEAL code in Python. This means that each pyTEAL program is a code generator (its a heterogeneous two-stage programming language). The thing about multistage programming languages is that you always have to think about when a piece of code will execute - compile time or runtime? Well discuss this in detail.
A pyTEAL program essentially constructs TEAL abstract syntax during its execution. The pyTEAL library provides a function called `compileTeal` which turns this abstract syntax into concrete syntax (not sure how much compilation is happening, but well roll with this name). Finally, the TEAL file has to be compiled to binary before it can be uploaded to the Algorand blockchain. Somewhat frustratingly, this part of the process requires connecting to a running Algorand node. Its unclear to me why this is the case, as we will see, this compilation step might as well be just called an assembler step. Theres pretty much a 1-to-1 mapping from TEAL to the binary.
Algorand contracts can only store a fixed amount of state. This is a major limitation for us, as in order to support some of the key wormhole features like replay protection, we need an unbounded amount of storage.
<a id="org85bc975"></a>
# The “allocator” program
The main allocator code resides in [/algorand/TmplSig.py](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py). Lets work backwards in this file:
```python
if __name__ == '__main__':
core = TmplSig("sig")
with open("sig.tmpl.teal", "w") as f:
f.write(core.get_sig_tmpl())
```
[/algorand/TmplSig.py#L136-L142](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py#L136-L142)
If we run this program, it will generate a file called `sig.tmpl.teal` based on whatever `get_sig_tmpl()` returns from the `TmplSig` class. The first few lines of `sig.tmpl.teal` look like this:
```
#pragma version 6
intcblock 1
pushint TMPL_ADDR_IDX // TMPL_ADDR_IDX
pop
pushbytes TMPL_EMITTER_ID // TMPL_EMITTER_ID
pop
txn TypeEnum
pushint 6 // appl
==
assert
txn OnCompletion
intc_0 // OptIn
==
assert
txn ApplicationID
pushint TMPL_APP_ID // TMPL_APP_ID
==
assert
txn RekeyTo
pushbytes TMPL_APP_ADDRESS // TMPL_APP_ADDRESS
==
assert
txn Fee
pushint 0 // 0
==
assert
txn CloseRemainderTo
global ZeroAddress
==
assert
txn AssetCloseTo
global ZeroAddress
==
assert
intc_0 // 1
return
```
Well examine this file more carefully soon. For now, the key takeaway is that it contains TEAL bytecode, which is a stack-based programming language. Whats curious is that right at the beginning, we push `TMPL_ADDR_IDX` and `TMPL_EMITTER_ID`, only to immediately pop them from the stack, which seems redundant. Indeed, as we will see, these four lines of code are here just for the sake of being here, and we dont actually expect them to do anything useful. This will make more sense soon.
Lets look at the `get_sig_tmpl` function.
```python
def get_sig_tmpl(self):
```
[/algorand/TmplSig.py#L111](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py#L111)
its return statement is a call to `compileTeal`:
```python
return compileTeal(sig_tmpl(), mode=Mode.Signature, version=6, assembleConstants=True)
```
[/algorand/TmplSig.py#L134](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py#L134)
From the `mode=Mode.Signature` argument in `compileTeal` we can see that this program is a signature program, also known as a LogicSig. LogicSigs are special programs that belong to an account, and their purpose is to authorise transactions from that account (or more generally, to act as a signing authority for the account). If the LogicSig program executes successfully (without reverting), then the transaction is authorised. Algorand allows running such programs as a way of implementing domain-specific account authorisation, such as for escrow systems, etc. The address of the LogicSigs account is deterministically derived from the hash of the LogicSigs bytecode (this will be very important very soon).
The `sig_tmpl` function returns a sequence of TEAL instructions, the first two of which are
```python
# Just putting adding this as a tmpl var to make the address unique and deterministic
# We don't actually care what the value is, pop it
Pop(Tmpl.Int("TMPL_ADDR_IDX")),
Pop(Tmpl.Bytes("TMPL_EMITTER_ID")),
```
[/algorand/TmplSig.py#L117-L120](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py#L117-L120)
Here are the two pop statements we looked at in the TEAL code. The pyTEAL compiler knows to generate push instructions for arguments whenever necessary, so we dont explicitly push `TMPL_ADDR_IDX` and `TMPL_EMITTER_ID`. These variables immediately get popped, because the LogicSig doesnt actually make use of them, and theyre only there so when replaced with different values in the bytecode, we get an operationally equivalent, yet distinct bytecode.
`Tmpl.Int("TMPL_ADDR_IDX")` and `Tmpl.Bytes("TMPL_EMITTER_ID")` are *template variables*. Normally, they can be thought of as variables in a TEAL program that get replaced at compile time by the compiler, sort of like CPP macros. In fact, this already hints at how the LogicSig is going to be used: these variables will be programmatically replaced (albeit not just at compile time, but more on that later) with distinct values to generate distinct LogicSigs, with deterministic addresses. The wormhole contract will then be able to use the memory of the associated accounts of these LogicSigs. To see how, well first go through what the LogicSig does in the first place.
When using a LogicSig to sign a transaction (like in our case), the LogicSig program can query information about the transaction from the Algorand runtime. If the LogicSig doesn't revert, then the transaction will be executed on-chain. It is the LogicSigs responsibility to decide whether it wants to approve this transaction, so it will perform a number of checks to ensure the transaction does whats expected. Importantly, anyone can pass in their own transactions and use the LogicSig to sign it, so forgetting a check here could result in hijacking the LogicSigs associated account. That's because (by default), transactions that are signed (that is, approved) by the LogicSig can acces the LogicSigs account. In fact, that's what LogicSigs were designed for in the first place: to implement arbitrary logic for deciding who can spend money out of some account.
The first instruction after the two `Pop`s above is
```python
Assert(Txn.type_enum() == TxnType.ApplicationCall),
```
[/algorand/TmplSig.py#L122](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py#L122)
Which asserts that the transaction being signed is an application call.
Note that `==` here is an overloaded operator, and it doesnt compare two python values. Instead, it generates a piece of pyTEAL abstract syntax that represents an equality operation. In TEALs concrete syntax, this looks like:
```
txn TypeEnum
pushint 6 // appl
==
assert
```
The `txn` opcode pushes a transaction field variable to the stack, in this case its type, which is made avaiable by the AVM runtime.
`pushint` pushes an integer to the stack, here the number 6, which corresponds to application call. `==` pops the top two elements from the stack and pushes 1 if they are equal, or 0 if they are not. Finally, `assert` pops the top of the stack, and reverts the transaction if its 0 (or if the stack is empty).
Application calls are one of the built-in transaction types defined by Algorand, another one is Payment. We require that this one is an application call, because of the next check, opting in:
```python
Assert(Txn.on_completion() == OnComplete.OptIn),
Assert(Txn.application_id() == Tmpl.Int("TMPL_APP_ID")),
```
[/algorand/TmplSig.py#L123-L124](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py#L123-L124)
Opting in means that the current application (which is the `"TMPL_APP_ID"`) can allocate local storage into the _sender_'s account data. When the sender is the LogicSig's associated account, then the transaction opts into the LogicSig's account data. **This is the memory allocation mechanism**. By opting the LogicSigs associated account into the wormhole contract, wormhole can now use it to store memory. Since these accounts are also limited in size, we need multiple of them, and at deterministic locations. This is why `TMPL_ADDR_IDX` and `TMPL_EMITTER_ID` are used in the program. The wormhole contracts populate these templates with actual values which will allow deriving a deterministic address (the LogicSigs associated account address) which will be known to be a writeable account by the wormhole contract (since the LogicSig opted into the wormhole contract here). This mechanism is similar to Solanas Program-Derived Addresses (PDAs). The difference is that in Solana, PDAs are always owned by the program theyre derived from, whereas in Algorand, ownership of an account can be transferred using the `rekey` mechanism. At this point, the LogicSig's account is owned by the LogicSig. Next, we make sure that the transaction transfers ownership of the account to our application:
```python
Assert(Txn.rekey_to() == Tmpl.Bytes("TMPL_APP_ADDRESS")),
```
[/algorand/TmplSig.py#L125](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py#L125)
Rekeying is a feature that allows changing an accounts key so that someone else becomes the signing authority for it. Only the current signing authority can authorize rekeying, and in the process it loses authorization. Since this transaction is signed by a LogicSig, the account in question is the LogicSigs associated account, and the current signing authority is the LogicSig itself. Once it approves a rekey on its associated account, then further transactions from the associated account do not require running the LogicSig logic, and could just use whatever the new key is (in this case, the wormhole application will be able to use this memory freely).
This means that at this stage, the LogicSig transfers ownership of its associated account to the wormhole program. This has two functions. First, a safety mechanism, because it means that the LogicSig is no longer able to sign any further transactions, the wormhole program owns it now. With this ownership assignment, the LogicSics account truly behaves like a Solana PDA: its an account at a deterministic address thats owned (and thus only writeable) by the program. Second, we can allow assets to get created into this account (which gets over the asset limitation issue) but the main wormhole contract can still sign for transactions against it.
Finally, 3 more checks:
```python
Assert(Txn.fee() == Int(0)),
Assert(Txn.close_remainder_to() == Global.zero_address()),
Assert(Txn.asset_close_to() == Global.zero_address()),
```
[/algorand/TmplSig.py#L127-L129](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py#L127-L129)
We check that the transaction fee is 0. `close_remainder_to` could request the account to be closed and the funds to be sent to another account. This has to be zero, otherwise the account would be deleted. Similarly, `asset_close_to` is also the zero address.
Finally, if all the checks succeeded, the LogicSig succeeds:
```python
Approve()
```
[/algorand/TmplSig.py#L152](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py#L152)
To summarize, this is how allocation works: a special program opts in to the wormhole contract, thereby allowing wormhole to write memory into the programs account, then transfers ownership of the account to wormhole. Since the address of this account is derived from the programs bytecode, the addresses are reconstructable deterministically, and a given account with such an address cannot have been created any other way than by executing that program with the expected arguments.
<a id="orgf176818"></a>
## Instantiating template variables
Now we understand how the allocator contract works. In this section, well review how it can be used. The placeholder variables serve as a way to generate multiple distinct programs in a deterministic way. Lets see how this works.
The construction of the bytecode has to happen both off-chain and on-chain. It needs to happen off-chain because the client has to deploy these programs (since an on-chain program cannot deploy another program, this must be done via a transaction from an off-chain entity). It also needs to happen on-chain, because the wormhole program needs to be able to derive (and validate) the addresses of thusly allocated accounts, and the address derivation works by hashing the bytecode, so the bytecode needs to be constructed in the smart contract from runtime information. The off-chain element could be done easily: we can just compile the allocator LogicSig with the template variables filled in with the appropriate variables. The TEAL compiler supports template variable substitution in this way. However, the on-chain component is more complicated, because the smart contract has no access to the compiler, so theres no way to instantiate the template variables using the standard mechanism.
Instead, we turn to programmatically patching the generated binary. The LogicSig is compiled once and for all with default values standing in for the template variables, and theres some code (both off-chain and on-chain) that knows where in the bytecode the template variables are, and replaces them with the appropriate values. The off-chain counterpart can just deploy this patched bytecode, while the on-chain code can hash it to derive the address.
The constructor of the `TmplSig` class starts by initializing the following data structure:
```python
self.map = {"name":"lsig.teal","version":6,"source":"","bytecode":"BiABAYEASIAASDEQgQYSRDEZIhJEMRiBABJEMSCAABJEMQGBABJEMQkyAxJEMRUyAxJEIg==",
"template_labels":{
"TMPL_ADDR_IDX":{"source_line":3,"position":5,"bytes":False},
"TMPL_EMITTER_ID":{"source_line":5,"position":8,"bytes":True},
"TMPL_APP_ID":{"source_line":16,"position":24,"bytes":False},
"TMPL_APP_ADDRESS":{"source_line":20,"position":30,"bytes":True}
},
}
```
[/algorand/TmplSig.py#L39-L47](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py#L39-L47)
`bytecode` is a base64 encoded binary blob, the assembled binary of the `sig.tmpl.teal` program. The `template_labels` then encodes offsets into the binary where occurrences of the template variables are. In addition to `position`, they all store a `bytes` flag too. This stores whether the template variable is a byte array or not. The importance of this will be that byte arrays can be arbitrary length, and they have an additional byte at the beginning that describes the length of the byte array. Ints on the other hand, are encoded as varints, which are variable width integers that do not contain an additional length byte (see [https://www.sqlite.org/src4/doc/trunk/www/varint.wiki](https://www.sqlite.org/src4/doc/trunk/www/varint.wiki)). The code that patches the binary will need to make sure to write an additional length byte for byte arrays, hence the flag.
To see what the layout looks like, lets decode the first few bytes of the bytecode by hand. The TEAL opcodes are documented on the Algorand website here: [https://developer.algorand.org/docs/get-details/dapps/avm/teal/opcodes/](https://developer.algorand.org/docs/get-details/dapps/avm/teal/opcodes/). If we decode the bytecode from base64, we get the following (hex):
```
06 20 01 01 81 00 48 80 00 48 31 10 81 06 12 44 31 19 22 12 44 31 18 81 00 12 44
31 20 80 00 12 44 31 01 81 00 12 44 31 09 32 03 12 44 31 15 32 03 12 44 22
```
and the first few lines of the TEAL code as a reminder:
```
#pragma version 6
intcblock 1
pushint TMPL_ADDR_IDX // TMPL_ADDR_IDX
pop
pushbytes TMPL_EMITTER_ID // TMPL_EMITTER_ID
pop
```
The first byte (`0x06`) is the version identifier. This matches `#pragma version 6` in the TEAL file. `0x20` is the `intcblock` instruction. It takes a byte that represents how many ints are stored (1 here) in this section, and then a list of ints (here, its just 1). `0x81` is the `pushint` instruction, and here we push `0x0`. This means that that this program was compiled with the template variables filled with zeros. This 0 is at offset 5 in the bytecode, which agrees with the `'position': 5'` field of the above data structure for `TMPL_ADDR_IDX`. The `0x48` opcode next is the pop instruction. Next, `0x80` is a `pushbytes` instruction, which first takes the a varint for the length of the byte array, then the byte array. Here, since the length is 0, there are no bytes following, instead `0x48` pops immediately. This byte array is at position 8, which corresponds to `TMPL_EMITTER_ID` above.
<a id="org2091dfe"></a>
### Instantiation, off-chain
The python code that constructs the bytecode is defined as
```python
def populate(self, values: Dict[str, Union[str, int]]) -> LogicSigAccount:
"""populate uses the map to fill in the variable of the bytecode and returns a logic sig with the populated bytecode"""
# Get the template source
contract = list(base64.b64decode(self.map["bytecode"]))
shift = 0
for k, v in self.sorted.items():
if k in values:
pos = v["position"] + shift
if v["bytes"]:
val = bytes.fromhex(values[k])
lbyte = uvarint.encode(len(val))
# -1 to account for the existing 00 byte for length
shift += (len(lbyte) - 1) + len(val)
# +1 to overwrite the existing 00 byte for length
contract[pos : pos + 1] = lbyte + val
else:
val = uvarint.encode(values[k])
# -1 to account for existing 00 byte
shift += len(val) - 1
# +1 to overwrite existing 00 byte
contract[pos : pos + 1] = val
# Create a new LogicSigAccount given the populated bytecode,
return LogicSigAccount(bytes(contract))
```
[/algorand/TmplSig.py#L58-L85](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/TmplSig.py#L58-L85)
It loops through the template variables, and replaces them with values defined in the `values` dictionary. For byte arrays, it inserts the length byte first. The `shift` variable maintains the number of extra bytes inserted so far, as the subsequent byte offsets all shift by this amount.
<a id="orga6fa146"></a>
### Instantiation, on-chain
The on-chain program is similar to the above, but it just concatenates the byte chunks together:
```python
@Subroutine(TealType.bytes)
def get_sig_address(acct_seq_start: Expr, emitter: Expr):
# We could iterate over N items and encode them for a more general interface
# but we inline them directly here
return Sha512_256(
Concat(
Bytes("Program"),
# ADDR_IDX aka sequence start
tmpl_sig.get_bytecode_chunk(0),
encode_uvarint(acct_seq_start, Bytes("")),
# EMMITTER_ID
tmpl_sig.get_bytecode_chunk(1),
encode_uvarint(Len(emitter), Bytes("")),
emitter,
# APP_ID
tmpl_sig.get_bytecode_chunk(2),
encode_uvarint(Global.current_application_id(), Bytes("")),
# TMPL_APP_ADDRESS
tmpl_sig.get_bytecode_chunk(3),
encode_uvarint(Len(Global.current_application_address()), Bytes("")),
Global.current_application_address(),
tmpl_sig.get_bytecode_chunk(4),
)
)
```
[/algorand/wormhole_core.py#L86-L115](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/wormhole_core.py#L86-L115)
It writes the string “Program” first. Thats because program addresses are derived by hashing the string “Program” appended to the programs bytecode. The `get_sig_address` function generates exactly this hash. Notice that the arguments it takes are both of type `Expr`. Thats again because `get_sig_address` is a python program that operates on TEAL expressions to construct a TEAL expression. The bytecode chunks are constructed at compile time, but the concatenation happens at runtime (since the template variables are TEAL expressions, whose values are only available at runtime). This works similarly to the off-chain typescript code.
<a id="org74c4227"></a>
## Allocating, client-side
Finally, let us look at how the client-side code actually allocates these accounts. The main idea is that it constructs the allocator LogicSig with the appropriate template variables substituted, then constructs the three transactions required by the allocator.
The function that does this is `optin`:
```python
def optin(self, client, sender, app_id, idx, emitter, doCreate=True):
```
[/algorand/admin.py#L485](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/admin.py#L485)
First, construct the bytecode with the variables filled in
```python
lsa = self.tsig.populate(
{
"TMPL_APP_ID": app_id,
"TMPL_APP_ADDRESS": aa,
"TMPL_ADDR_IDX": idx,
"TMPL_EMITTER_ID": emitter,
}
)
```
[/algorand/admin.py#L488-L495](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/admin.py#L488-L495)
Then grab the address of the associated account
```python
sig_addr = lsa.address()
```
[/algorand/admin.py#L497](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/admin.py#L497)
Then check if weve already allocated this account
```python
if sig_addr not in self.cache and not self.account_exists(client, app_id, sig_addr):
```
[/algorand/admin.py#L499](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/admin.py#L499)
if not, construct the optin transaction which will also rekey the LogicSig's account to our application.
First, we construct a "seed" transaction, which will pay enough money from the user's wallet into the LogicSig's account to cover for the execution cost:
```python
seed_txn = transaction.PaymentTxn(sender = sender.getAddress(),
sp = sp,
receiver = sig_addr,
amt = self.seed_amt)
seed_txn.fee = seed_txn.fee * 2
```
[/algorand/admin.py#L506-L510](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/admin.py#L506-L510)
Next, the actual opt-in transaction. The sender (the first argument to `ApplicationOptInTxn`) is the `sig_address`, so our application will allocate memory into it via opting in.
```python
optin_txn = transaction.ApplicationOptInTxn(sig_addr, sp, app_id, rekey_to=get_application_address(app_id))
optin_txn.fee = 0
```
[/algorand/admin.py#L512-L513](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/admin.py#L512-L513)
remember that this code is not trusted, and the LogicSig will verify this transaction is doing the correct thing.
Next, sign the transactions:
```python
signed_seed = seed_txn.sign(sender.getPrivateKey())
signed_optin = transaction.LogicSigTransaction(optin_txn, lsa)
```
[/algorand/admin.py#L517-L518](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/admin.py#L517-L518)
The first one is signed by the user's wallet, as it's only used to send money from the user's account. The transaction is signed by the logic sig (so it has signing authority over the associated account). Next, send the transactions
```python
client.send_transactions([signed_seed, signed_optin])
self.waitForTransaction(client, signed_optin.get_txid())
```
[/algorand/admin.py#L520-L521](https://github.com/certusone/wormhole/blob/0af600ddde4f507b30ea043de66033d7383f53af/algorand/admin.py#L520-L521)
With that, an account is allocated. The client can now pass this account to wormhole, which, after validating that the address is right, will be able to use it to read and write values to.

283
algorand/NOTES.md Normal file
View File

@ -0,0 +1,283 @@
current algorand machine size:
https://howbigisalgorand.com/
custom indexes:
https://github.com/algorand/indexer/blob/develop/docs/PostgresqlIndexes.md
Installing node:
https://developer.algorand.org/docs/run-a-node/setup/install/
kubectl exec -it algorand-0 -c algorand-algod -- /bin/bash
docker exec -it algorand-tilt-indexer /bin/bash
to switch to sandbox, change devnet/node.yaml
- - http://algorand:8980
+ - http://host.minikube.internal:8980
put into dev/node.yaml
- --algorandAppID
- "4"
Install the algorand requirements
python3 -m pip install -r requirements.txt
install docker-compile
./sandbox down; ./sandbox clean; ./sandbox up dev -v; python3 admin.py --devnet
bring up the dev sandbox
./sandbox down; ./sandbox clean
[jsiegel@gusc1a-ossdev-jsl1 ~/.../algorand/_sandbox]{master} git diff
diff --git a/images/indexer/start.sh b/images/indexer/start.sh
index 9e224c2..f1714ea 100755
--- a/images/indexer/start.sh
+++ b/images/indexer/start.sh
@@ -28,6 +28,7 @@ start_with_algod() {
/tmp/algorand-indexer daemon \
--dev-mode \
+ --enable-all-parameters \
--server ":$PORT" \
-P "$CONNECTION_STRING" \
--algod-net "${ALGOD_ADDR}" \
./sandbox up dev
docker_compose("./algorand/sandbox-algorand/tilt-compose.yml")
dc_resource('algo-algod', labels=["algorand"])
dc_resource('algo-indexer', labels=["algorand"])
dc_resource('algo-indexer-db', labels=["algorand"])
// Solana
"01000000000100c9f4230109e378f7efc0605fb40f0e1869f2d82fda5b1dfad8a5a2dafee85e033d155c18641165a77a2db6a7afbf2745b458616cb59347e89ae0c7aa3e7cc2d400000000010000000100010000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000546f6b656e4272696467650100000001c69a1b1a65dd336bf1df6a77afb501fc25db7fc0938cb08595a9ef473265cb4f",
// Ethereum
"01000000000100e2e1975d14734206e7a23d90db48a6b5b6696df72675443293c6057dcb936bf224b5df67d32967adeb220d4fe3cb28be515be5608c74aab6adb31099a478db5c01000000010000000100010000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000546f6b656e42726964676501000000020000000000000000000000000290fb167208af455bb137780163b7b7a9a10c16",
// BSC
"01000000000100719b4ada436f614489dbf87593c38ba9aea35aa7b997387f8ae09f819806f5654c8d45b6b751faa0e809ccbc294794885efa205bd8a046669464c7cbfb03d183010000000100000001000100000000000000000000000000000000000000000000000000000000000000040000000002c8bb0600000000000000000000000000000000000000000000546f6b656e42726964676501000000040000000000000000000000000290fb167208af455bb137780163b7b7a9a10c16",
('0100000001010001ca2fbf60ac6227d47dda4fe2e7bccc087f27d22170a212b9800da5b4cbf0d64c52deb2f65ce58be2267bf5b366437c267b5c7b795cd6cea1ac2fee8a1db3ad006225f801000000010001000000000000000000000000000000000000000000000000000000000000000400000000000000012000000000000000000000000000000000000000000000000000000000436f72650200000000000001beFA429d57cD18b7F8A4d91A2da9AB4AF05d0FBe',
{'Meta': 'CoreGovernance',
'NewGuardianSetIndex': 0,
'action': 2,
'chain': 1,
'chainRaw': b'\x00\x01',
'consistency': 32,
'digest': b'b%\xf8\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00'
b'\x00\x01 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00C'
b'ore\x02\x00\x00\x00\x00\x00\x00\x01\xbe\xfaB\x9dW\xcd\x18\xb7\xf8'
b'\xa4\xd9\x1a-\xa9\xabJ\xf0]\x0f\xbe',
'emitter': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04',
'index': 1,
'module': '00000000000000000000000000000000000000000000000000000000436f7265',
'nonce': 1,
'sequence': 1,
'siglen': 1,
'signatures': b"\x00\x01\xca/\xbf`\xacb'\xd4}\xdaO\xe2\xe7\xbc\xcc\x08\x7f'"
b'\xd2!p\xa2\x12\xb9\x80\r\xa5\xb4\xcb\xf0\xd6LR\xde'
b'\xb2\xf6\\\xe5\x8b\xe2&{\xf5\xb3fC|&{\\{y\\\xd6\xce\xa1\xac/'
b'\xee\x8a\x1d\xb3\xad\x00',
'sigs': ['0001ca2fbf60ac6227d47dda4fe2e7bccc087f27d22170a212b9800da5b4cbf0d64c52deb2f65ce58be2267bf5b366437c267b5c7b795cd6cea1ac2fee8a1db3ad00'],
'targetChain': 0,
'timestamp': 1646655489,
'version': 1})
Registering chain 1
('01000000020100c2f0b6e546e093630295e5007e8b077b1028d3aa9a72ab4c454b261306eb4f550179638597f25afd6f40a18580bc87fa315552e7294b407bd4616f0995d1cb55016225f5fd0000000300010000000000000000000000000000000000000000000000000000000000000004000000000000000320000000000000000000000000000000000000000000546f6b656e4272696467650100000001ec7372995d5cc8732397fb0ad35c0121e0eaa90d26f828a534cab54391b3a4f5',
{'EmitterChainID': 1,
'Meta': 'TokenBridge RegisterChain',
'action': 1,
'chain': 1,
'chainRaw': b'\x00\x01',
'consistency': 32,
'digest': b'b%\xf5\xfd\x00\x00\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00'
b'\x00\x03 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00TokenBridge\x01\x00\x00\x00\x01'
b'\xecsr\x99]\\\xc8s#\x97\xfb\n\xd3\\\x01!\xe0\xea\xa9\r&\xf8(\xa5'
b'4\xca\xb5C\x91\xb3\xa4\xf5',
'emitter': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04',
'index': 2,
'module': '000000000000000000000000000000000000000000546f6b656e427269646765',
'nonce': 3,
'sequence': 3,
'siglen': 1,
'signatures': b'\x00\xc2\xf0\xb6\xe5F\xe0\x93c\x02\x95\xe5\x00~\x8b\x07'
b'{\x10(\xd3\xaa\x9ar\xabLEK&\x13\x06\xebOU\x01yc\x85\x97\xf2Z'
b'\xfdo@\xa1\x85\x80\xbc\x87\xfa1UR\xe7)K@{\xd4ao\t\x95\xd1\xcb'
b'U\x01',
'sigs': ['00c2f0b6e546e093630295e5007e8b077b1028d3aa9a72ab4c454b261306eb4f550179638597f25afd6f40a18580bc87fa315552e7294b407bd4616f0995d1cb5501'],
'targetChain': 0,
'targetEmitter': 'ec7372995d5cc8732397fb0ad35c0121e0eaa90d26f828a534cab54391b3a4f5',
'timestamp': 1646654973,
'version': 1})
Sending 3000 algo to cover fees
[1000, 1000, 1000, 1000]
{0: 99997976000}
Registering chain 2
('010000000201008c7153db06d433e304dcb7dc029b6cb142093adf87eac7a14adff78060f9b80275479d0620612ae656f7281190ab7bbf85f31eb2ace579e77b2e7855af2a4504016225f5fe0000000400010000000000000000000000000000000000000000000000000000000000000004000000000000000420000000000000000000000000000000000000000000546f6b656e42726964676501000000020000000000000000000000003ee18b2214aff97000d974cf647e7c347e8fa585',
{'EmitterChainID': 2,
'Meta': 'TokenBridge RegisterChain',
'action': 1,
'chain': 1,
'chainRaw': b'\x00\x01',
'consistency': 32,
'digest': b'b%\xf5\xfe\x00\x00\x00\x04\x00\x01\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00'
b'\x00\x04 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00TokenBridge\x01\x00\x00\x00\x02'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>\xe1\x8b"'
b'\x14\xaf\xf9p\x00\xd9t\xcfd~|4~\x8f\xa5\x85',
'emitter': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04',
'index': 2,
'module': '000000000000000000000000000000000000000000546f6b656e427269646765',
'nonce': 4,
'sequence': 4,
'siglen': 1,
'signatures': b'\x00\x8cqS\xdb\x06\xd43\xe3\x04\xdc\xb7\xdc\x02\x9bl\xb1B\t:'
b'\xdf\x87\xea\xc7\xa1J\xdf\xf7\x80`\xf9\xb8\x02uG\x9d\x06 a*'
b'\xe6V\xf7(\x11\x90\xab{\xbf\x85\xf3\x1e\xb2\xac\xe5y\xe7{.x'
b'U\xaf*E\x04\x01',
'sigs': ['008c7153db06d433e304dcb7dc029b6cb142093adf87eac7a14adff78060f9b80275479d0620612ae656f7281190ab7bbf85f31eb2ace579e77b2e7855af2a450401'],
'targetChain': 0,
'targetEmitter': '0000000000000000000000003ee18b2214aff97000d974cf647e7c347e8fa585',
'timestamp': 1646654974,
'version': 1})
Sending 3000 algo to cover fees
[1000, 1000, 1000, 1000]
{0: 99997967000}
Registering chain 3
('010000000201006896223475308eb13bc6d279b620b167f0e4884afc56942b2199faa81e1d50d83d74f7c0700254aa78a7e8966508608f0d827969df09745ad569575136551bce006225f5ff0000000500010000000000000000000000000000000000000000000000000000000000000004000000000000000520000000000000000000000000000000000000000000546f6b656e42726964676501000000030000000000000000000000007cf7b764e38a0a5e967972c1df77d432510564e2',
{'EmitterChainID': 3,
'Meta': 'TokenBridge RegisterChain',
'action': 1,
'chain': 1,
'chainRaw': b'\x00\x01',
'consistency': 32,
'digest': b'b%\xf5\xff\x00\x00\x00\x05\x00\x01\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00'
b'\x00\x05 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00TokenBridge\x01\x00\x00\x00\x03'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00|\xf7\xb7d'
b'\xe3\x8a\n^\x96yr\xc1\xdfw\xd42Q\x05d\xe2',
'emitter': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04',
'index': 2,
'module': '000000000000000000000000000000000000000000546f6b656e427269646765',
'nonce': 5,
'sequence': 5,
'siglen': 1,
'signatures': b'\x00h\x96"4u0\x8e\xb1;\xc6\xd2y\xb6 \xb1g\xf0\xe4\x88'
b'J\xfcV\x94+!\x99\xfa\xa8\x1e\x1dP\xd8=t\xf7\xc0p\x02T'
b'\xaax\xa7\xe8\x96e\x08`\x8f\r\x82yi\xdf\ttZ\xd5iWQ6U\x1b'
b'\xce\x00',
'sigs': ['006896223475308eb13bc6d279b620b167f0e4884afc56942b2199faa81e1d50d83d74f7c0700254aa78a7e8966508608f0d827969df09745ad569575136551bce00'],
'targetChain': 0,
'targetEmitter': '0000000000000000000000007cf7b764e38a0a5e967972c1df77d432510564e2',
'timestamp': 1646654975,
'version': 1})
Sending 3000 algo to cover fees
[1000, 1000, 1000, 1000]
{0: 99997958000}
Registering chain 4
('0100000002010023b80ca2402119348543c14134218cd0e1e54428e54ecdf21acb1a1d6c01be261fcc138023955a04bcd09230a5710340251b68db080a8bbf64d06ab744624d6a016225f5ff0000000600010000000000000000000000000000000000000000000000000000000000000004000000000000000620000000000000000000000000000000000000000000546f6b656e4272696467650100000004000000000000000000000000b6f6d86a8f9879a9c87f643768d9efc38c1da6e7',
{'EmitterChainID': 4,
'Meta': 'TokenBridge RegisterChain',
'action': 1,
'chain': 1,
'chainRaw': b'\x00\x01',
'consistency': 32,
'digest': b'b%\xf5\xff\x00\x00\x00\x06\x00\x01\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00'
b'\x00\x06 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00TokenBridge\x01\x00\x00\x00\x04'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb6\xf6\xd8j'
b'\x8f\x98y\xa9\xc8\x7fd7h\xd9\xef\xc3\x8c\x1d\xa6\xe7',
'emitter': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04',
'index': 2,
'module': '000000000000000000000000000000000000000000546f6b656e427269646765',
'nonce': 6,
'sequence': 6,
'siglen': 1,
'signatures': b'\x00#\xb8\x0c\xa2@!\x194\x85C\xc1A4!\x8c\xd0\xe1\xe5D'
b'(\xe5N\xcd\xf2\x1a\xcb\x1a\x1dl\x01\xbe&\x1f\xcc\x13'
b'\x80#\x95Z\x04\xbc\xd0\x920\xa5q\x03@%\x1bh\xdb\x08\n\x8b'
b'\xbfd\xd0j\xb7DbMj\x01',
'sigs': ['0023b80ca2402119348543c14134218cd0e1e54428e54ecdf21acb1a1d6c01be261fcc138023955a04bcd09230a5710340251b68db080a8bbf64d06ab744624d6a01'],
'targetChain': 0,
'targetEmitter': '000000000000000000000000b6f6d86a8f9879a9c87f643768d9efc38c1da6e7',
'timestamp': 1646654975,
'version': 1})
Sending 3000 algo to cover fees
[1000, 1000, 1000, 1000]
{0: 99997949000}
Registering chain 5
('010000000201003a168d6617cc74c3a5e254a6e65441d341cec315dcd5b588e72f781f8dd9c82977ad1234732d097151a54add996a33a6e4da3a2e80c41146de0bc834d8830661006225f6000000000700010000000000000000000000000000000000000000000000000000000000000004000000000000000720000000000000000000000000000000000000000000546f6b656e42726964676501000000050000000000000000000000005a58505a96d1dbf8df91cb21b54419fc36e93fde',
{'EmitterChainID': 5,
'Meta': 'TokenBridge RegisterChain',
'action': 1,
'chain': 1,
'chainRaw': b'\x00\x01',
'consistency': 32,
'digest': b'b%\xf6\x00\x00\x00\x00\x07\x00\x01\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00'
b'\x00\x07 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00TokenBridge\x01\x00\x00\x00\x05'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ZXPZ'
b'\x96\xd1\xdb\xf8\xdf\x91\xcb!\xb5D\x19\xfc6\xe9?\xde',
'emitter': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04',
'index': 2,
'module': '000000000000000000000000000000000000000000546f6b656e427269646765',
'nonce': 7,
'sequence': 7,
'siglen': 1,
'signatures': b'\x00:\x16\x8df\x17\xcct\xc3\xa5\xe2T\xa6\xe6TA\xd3A\xce\xc3'
b'\x15\xdc\xd5\xb5\x88\xe7/x\x1f\x8d\xd9\xc8)w\xad\x124s-\t'
b'qQ\xa5J\xdd\x99j3\xa6\xe4\xda:.\x80\xc4\x11F\xde\x0b\xc8'
b'4\xd8\x83\x06a\x00',
'sigs': ['003a168d6617cc74c3a5e254a6e65441d341cec315dcd5b588e72f781f8dd9c82977ad1234732d097151a54add996a33a6e4da3a2e80c41146de0bc834d883066100'],
'targetChain': 0,
'targetEmitter': '0000000000000000000000005a58505a96d1dbf8df91cb21b54419fc36e93fde',
'timestamp': 1646654976,
'version': 1})
./sandbox down; ./sandbox clean; ./sandbox up dev -v; python3 admin.py --devnet
[jsiegel@gusc1a-ossdev-jsl1 ~/.../algorand/_sandbox]{master} git diff
diff --git a/images/indexer/start.sh b/images/indexer/start.sh
index 9e224c2..f1714ea 100755
--- a/images/indexer/start.sh
+++ b/images/indexer/start.sh
@@ -28,6 +28,7 @@ start_with_algod() {
/tmp/algorand-indexer daemon \
--dev-mode \
+ --enable-all-parameters \
--server ":$PORT" \
-P "$CONNECTION_STRING" \
--algod-net "${ALGOD_ADDR}" \

37
algorand/Pipfile Normal file
View File

@ -0,0 +1,37 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
attrs = "==21.4.0"
cffi = "==1.15.0"
colorama = "==0.4.4"
execnet = "==1.9.0"
future-fstrings = "==1.2.0"
iniconfig = "==1.1.1"
msgpack = "==1.0.3"
pluggy = "==1.0.0"
py = "==1.11.0"
pycparser = "==2.21"
pycryptodomex = "==3.12.0"
pydantic = "==1.9.0"
pyparsing = "==3.0.6"
pyteal = "==v0.11.1"
py-algorand-sdk = "==1.10.0b1"
pytest = "==6.2.5"
pytest-depends = "==1.0.1"
pytest-forked = "==1.4.0"
pytest-xdist = "==2.5.0"
toml = "==0.10.2"
typing-extensions = "==4.0.1"
uvarint = "==1.2.0"
eth_abi = "==2.1.1"
coincurve = "==16.0.0"
PyNaCl = "==1.5.0"
PyYAML = "==6.0"
[dev-packages]
[requires]
python_version = "3.10"

559
algorand/Pipfile.lock generated Normal file
View File

@ -0,0 +1,559 @@
{
"_meta": {
"hash": {
"sha256": "d57ff41afd46f1f82ef8cfa680858a40bda1d72482b073dba8666ac42275264f"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.9"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"asn1crypto": {
"hashes": [
"sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c",
"sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"
],
"version": "==1.5.1"
},
"attrs": {
"hashes": [
"sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4",
"sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"
],
"index": "pypi",
"version": "==21.4.0"
},
"cffi": {
"hashes": [
"sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3",
"sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2",
"sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636",
"sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20",
"sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728",
"sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27",
"sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66",
"sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443",
"sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0",
"sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7",
"sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39",
"sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605",
"sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a",
"sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37",
"sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029",
"sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139",
"sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc",
"sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df",
"sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14",
"sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880",
"sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2",
"sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a",
"sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e",
"sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474",
"sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024",
"sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8",
"sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0",
"sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e",
"sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a",
"sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e",
"sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032",
"sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6",
"sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e",
"sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b",
"sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e",
"sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954",
"sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962",
"sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c",
"sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4",
"sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55",
"sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962",
"sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023",
"sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c",
"sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6",
"sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8",
"sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382",
"sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7",
"sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc",
"sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997",
"sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"
],
"index": "pypi",
"version": "==1.15.0"
},
"coincurve": {
"hashes": [
"sha256:0a07a80843094c4c1e80a39e3b2525f5fd165804226f556d564b6db513850979",
"sha256:0df23cba832bea28bc0f35176b2aaa7585cc85decce9560b79ec4991d49e0901",
"sha256:1106efb37d8209690a56d3e25acf2bf76adec692fb13cd4574544e2644220f6d",
"sha256:14824fe05f032052f9c028ed820ee542d284f0e056af3258b8fbbd2faa832a47",
"sha256:1621b1fdc5740caa27ce6c670c7134dfb471d0a430c6a62c6891cc40b5479951",
"sha256:166bd6ed3d3f3323f8f2bbada3840fde7a5e21790c33b510e666b81bebcf3b5e",
"sha256:1a8feb5b943fdd2520bcd77fde25101f20a8309a069a5d2e9648fc5156e05e0e",
"sha256:29e4e57996a12cf2a410cf61759917fdb0177a7bcf1a8ab5c8888c9da8d9a26c",
"sha256:3470d6529abc00bb6810ed05e1dd371b32a9679fa0f26d4d2fdd60784a9652cf",
"sha256:400ba504e6ab40ef906b93440866da962a956e3edd17afbc6c493a85a34740bc",
"sha256:43b1e975b83390b9f3e18b9b0ef00465a069328b9c32645201414fd8abb409fb",
"sha256:4d9d1cb54c36dfd22539bb6aeea6a6135ea34e6b84c183272a4dbcd5cf5055d4",
"sha256:4df9498894b6f3a19c5507cfe8bd945a261390bc687b9ed12c1f4671123eb172",
"sha256:5a1448dacd1a79b7c6a5e0fee49268cbf8330c2971b9577b03a0315dd40f278e",
"sha256:5d432ae55989ab4993e8aa5b888958347a947b9d61887fc5072b265329a3648f",
"sha256:5d4ae85e6125a65d38b5c0e1d1e9025ba39b49550703a07eb35efa5c41c18e56",
"sha256:5ed6cc2844e88efd03f30bae00f9114624f813f9460585aa3d57cecad197736a",
"sha256:659b77c9c8e81c35b72c3b1c1759590ed0a7f74674efb2baa9da933f3788e7f6",
"sha256:7eda756fe9e026436eeb98d739b378a739f80c1cc99c77f00d3c91aed0908ff4",
"sha256:7ffc693dde858c30ac295c261799007ddc6bf92f3b2afbfefd61b1dd42e2199f",
"sha256:8618c7b9fdca0f91808db1c5cbe5b8c583b7a1d24504fe9bb50f878e265118ca",
"sha256:8cc73c0b7903b669cc94b0775d588e13ce06df496032cc91bdeb77d79f633e07",
"sha256:8dea0e00245fd9a4f5a83c25a691a6a4651a6d20f4b63e18da5be98d47a96897",
"sha256:9539c1736812fc09cbe6b0471e90f7ffd947688705832360299e97c3dab917ae",
"sha256:9a43b256344398fe27240ff1c740057b1f62b4ccfbdc01580258f54d1f82b5d8",
"sha256:9b87cb82ce8b62f56643696f344d0ca4bf0835b72be2fa874473a889d4a263da",
"sha256:a06789f710541a4370bfca5eb21a8e73b0f1ab2ecf5a0bab0837223f7929d28c",
"sha256:a8edec78e126e106d0e1a0a052c40a77a135225fce46a56110a73f08275ed266",
"sha256:aa427eb103d370175604e628cc3c6dca90a4f3c7801914985f1500c1267b3b52",
"sha256:ae7548e8be34bc595959933ae3f29ad7a63f1d35207d3e11cbc76e6c7484190c",
"sha256:b29aa4b890066b89be2cd8f1a176a872ad79893a649d6a09b34365eae905aba5",
"sha256:b7ad945429c86de2f861e4042bc8996bad80814ecd7984e81fc461258d61c46a",
"sha256:bedf752706ce8b1fa6960d9294db24db76e28343dd0831ba8f435cfac4af973b",
"sha256:c45b3ce930ed4a9ba8690a313494fc9f05e3f313b080bf1d1695f2f7076ca016",
"sha256:c7fa11335c8518eead358c3bcfb9a66f53ce36f1bcfb4393a621207f1bcfda4f",
"sha256:c981c7cf967937c66a537a2f4ae6839189dbc28933a2b737910385788dd4d00c",
"sha256:cae66a1fa69c07cdce05f315830c2ea86528724a96507848b820ada029ac05e9",
"sha256:d0f07d9ab46d7b6178d8ae4ca94ca64ef81abfc3f4042f3b133dbef2acace2a9",
"sha256:d24dc141d15fd3dd75b114572556fe186106a0f5922ed9585cf6ae8323cd1de8",
"sha256:d7969af60e7d2a58cf5280bfc7cac16cdb52a7f7c2e57e94c40807691e96035b",
"sha256:df1888a1f268275c84ce2245d1e6fe73e0135ecc3295ab69c736c1ded0373d8d",
"sha256:e8bfb4091c796085f3d14c946aa34e767a3c947ba5da9bdaa400126ca89c8499",
"sha256:e9fbbd29be70ebc6a9a3acd48a39cbe421ab0f4074654caef6a8ef465252433a",
"sha256:f73e9f08d9da0bdacc0b4b82216ef1db3d9b37d7a69ada9b8a4e6ade665c6ade",
"sha256:fbe60c5e79dd47bbe91e096ad21c5c37e59361f1726913d2dda59680f2f111f0"
],
"index": "pypi",
"version": "==16.0.0"
},
"colorama": {
"hashes": [
"sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b",
"sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"
],
"index": "pypi",
"version": "==0.4.4"
},
"cytoolz": {
"hashes": [
"sha256:ea23663153806edddce7e4153d1d407d62357c05120a4e8485bddf1bd5ab22b4"
],
"markers": "implementation_name == 'cpython'",
"version": "==0.11.2"
},
"dataclasses": {
"hashes": [
"sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf",
"sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"
],
"markers": "python_version < '3.7'",
"version": "==0.8"
},
"decorator": {
"hashes": [
"sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760",
"sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"
],
"version": "==4.4.2"
},
"eth-abi": {
"hashes": [
"sha256:4bb1d87bb6605823379b07f6c02c8af45df01a27cc85bd6abb7cf1446ce7d188",
"sha256:78df5d2758247a8f0766a7cfcea4575bcfe568c34a33e6d05a72c328a9040444"
],
"index": "pypi",
"version": "==2.1.1"
},
"eth-hash": {
"hashes": [
"sha256:3f40cecd5ead88184aa9550afc19d057f103728108c5102f592f8415949b5a76",
"sha256:de7385148a8e0237ba1240cddbc06d53f56731140f8593bdb8429306f6b42271"
],
"markers": "python_version >= '3.5' and python_version < '4'",
"version": "==0.3.2"
},
"eth-typing": {
"hashes": [
"sha256:39cce97f401f082739b19258dfa3355101c64390914c73fe2b90012f443e0dc7",
"sha256:b7fa58635c1cb0cbf538b2f5f1e66139575ea4853eac1d6000f0961a4b277422"
],
"markers": "python_version >= '3.5' and python_version < '4'",
"version": "==2.3.0"
},
"eth-utils": {
"hashes": [
"sha256:74240a8c6f652d085ed3c85f5f1654203d2f10ff9062f83b3bad0a12ff321c7a",
"sha256:bf82762a46978714190b0370265a7148c954d3f0adaa31c6f085ea375e4c61af"
],
"markers": "python_version >= '3.5' and python_version < '4' and python_full_version != '3.5.2'",
"version": "==1.10.0"
},
"execnet": {
"hashes": [
"sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5",
"sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"
],
"index": "pypi",
"version": "==1.9.0"
},
"future-fstrings": {
"hashes": [
"sha256:6cf41cbe97c398ab5a81168ce0dbb8ad95862d3caf23c21e4430627b90844089",
"sha256:90e49598b553d8746c4dc7d9442e0359d038c3039d802c91c0a55505da318c63"
],
"index": "pypi",
"version": "==1.2.0"
},
"importlib-metadata": {
"hashes": [
"sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e",
"sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"
],
"markers": "python_version < '3.8'",
"version": "==4.8.3"
},
"iniconfig": {
"hashes": [
"sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3",
"sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"
],
"index": "pypi",
"version": "==1.1.1"
},
"msgpack": {
"hashes": [
"sha256:0d8c332f53ffff01953ad25131272506500b14750c1d0ce8614b17d098252fbc",
"sha256:1c58cdec1cb5fcea8c2f1771d7b5fec79307d056874f746690bd2bdd609ab147",
"sha256:2c3ca57c96c8e69c1a0d2926a6acf2d9a522b41dc4253a8945c4c6cd4981a4e3",
"sha256:2f30dd0dc4dfe6231ad253b6f9f7128ac3202ae49edd3f10d311adc358772dba",
"sha256:2f97c0f35b3b096a330bb4a1a9247d0bd7e1f3a2eba7ab69795501504b1c2c39",
"sha256:36a64a10b16c2ab31dcd5f32d9787ed41fe68ab23dd66957ca2826c7f10d0b85",
"sha256:3d875631ecab42f65f9dce6f55ce6d736696ced240f2634633188de2f5f21af9",
"sha256:40fb89b4625d12d6027a19f4df18a4de5c64f6f3314325049f219683e07e678a",
"sha256:47d733a15ade190540c703de209ffbc42a3367600421b62ac0c09fde594da6ec",
"sha256:494471d65b25a8751d19c83f1a482fd411d7ca7a3b9e17d25980a74075ba0e88",
"sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e",
"sha256:6eef0cf8db3857b2b556213d97dd82de76e28a6524853a9beb3264983391dc1a",
"sha256:6f4c22717c74d44bcd7af353024ce71c6b55346dad5e2cc1ddc17ce8c4507c6b",
"sha256:73a80bd6eb6bcb338c1ec0da273f87420829c266379c8c82fa14c23fb586cfa1",
"sha256:89908aea5f46ee1474cc37fbc146677f8529ac99201bc2faf4ef8edc023c2bf3",
"sha256:8a3a5c4b16e9d0edb823fe54b59b5660cc8d4782d7bf2c214cb4b91a1940a8ef",
"sha256:96acc674bb9c9be63fa8b6dabc3248fdc575c4adc005c440ad02f87ca7edd079",
"sha256:973ad69fd7e31159eae8f580f3f707b718b61141838321c6fa4d891c4a2cca52",
"sha256:9b6f2d714c506e79cbead331de9aae6837c8dd36190d02da74cb409b36162e8a",
"sha256:9c0903bd93cbd34653dd63bbfcb99d7539c372795201f39d16fdfde4418de43a",
"sha256:9fce00156e79af37bb6db4e7587b30d11e7ac6a02cb5bac387f023808cd7d7f4",
"sha256:a598d0685e4ae07a0672b59792d2cc767d09d7a7f39fd9bd37ff84e060b1a996",
"sha256:b0a792c091bac433dfe0a70ac17fc2087d4595ab835b47b89defc8bbabcf5c73",
"sha256:bb87f23ae7d14b7b3c21009c4b1705ec107cb21ee71975992f6aca571fb4a42a",
"sha256:bf1e6bfed4860d72106f4e0a1ab519546982b45689937b40257cfd820650b920",
"sha256:c1ba333b4024c17c7591f0f372e2daa3c31db495a9b2af3cf664aef3c14354f7",
"sha256:c2140cf7a3ec475ef0938edb6eb363fa704159e0bf71dde15d953bacc1cf9d7d",
"sha256:c7e03b06f2982aa98d4ddd082a210c3db200471da523f9ac197f2828e80e7770",
"sha256:d02cea2252abc3756b2ac31f781f7a98e89ff9759b2e7450a1c7a0d13302ff50",
"sha256:da24375ab4c50e5b7486c115a3198d207954fe10aaa5708f7b65105df09109b2",
"sha256:e4c309a68cb5d6bbd0c50d5c71a25ae81f268c2dc675c6f4ea8ab2feec2ac4e2",
"sha256:f01b26c2290cbd74316990ba84a14ac3d599af9cebefc543d241a66e785cf17d",
"sha256:f201d34dc89342fabb2a10ed7c9a9aaaed9b7af0f16a5923f1ae562b31258dea",
"sha256:f74da1e5fcf20ade12c6bf1baa17a2dc3604958922de8dc83cbe3eff22e8b611"
],
"index": "pypi",
"version": "==1.0.3"
},
"networkx": {
"hashes": [
"sha256:0635858ed7e989f4c574c2328380b452df892ae85084144c73d8cd819f0c4e06",
"sha256:109cd585cac41297f71103c3c42ac6ef7379f29788eb54cb751be5a663bb235a"
],
"markers": "python_version >= '3.6'",
"version": "==2.5.1"
},
"packaging": {
"hashes": [
"sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb",
"sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"
],
"markers": "python_version >= '3.6'",
"version": "==21.3"
},
"parsimonious": {
"hashes": [
"sha256:3add338892d580e0cb3b1a39e4a1b427ff9f687858fdd61097053742391a9f6b"
],
"version": "==0.8.1"
},
"pluggy": {
"hashes": [
"sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159",
"sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"
],
"index": "pypi",
"version": "==1.0.0"
},
"py": {
"hashes": [
"sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719",
"sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"
],
"index": "pypi",
"version": "==1.11.0"
},
"py-algorand-sdk": {
"hashes": [
"sha256:2a029dbd0b20f23ea43c13c1113c224d6fa89d7280db9768520c7973dccb4e9c",
"sha256:59c868b6c7426f356462fcfb063a237382b86a0e5e93fb6274c8723f5a3b6aa1"
],
"index": "pypi",
"version": "==1.10.0b1"
},
"pycparser": {
"hashes": [
"sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9",
"sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"
],
"index": "pypi",
"version": "==2.21"
},
"pycryptodomex": {
"hashes": [
"sha256:08c809e9f4be8d4f9948cf4d5ebc7431bbd9e1c0cd5ff478d0d5229f1bc4ad50",
"sha256:097095a7c24b9e7eec865417f620f78adf218162c03b68e4fde194bf87801a67",
"sha256:0981e8071d2ee457d842821438f833e362eed9a25a445d54ad7610b24293118f",
"sha256:1bd9d158afa33dca04748b23e7b9d4055f8c8015ace2e972a866519af02d5eed",
"sha256:1f6c370abf11546b1c9b70062918d601ac8fb7ff113554601b43175eed7480ef",
"sha256:2595b7be43b8b2da953ea3506a8d71c07fc9b479d5c118b0c44a5eca2a1664f6",
"sha256:2d173a5db4e306cd32558b1a3ceb45bd2ebeb6596525fd5945963798b3851e3d",
"sha256:33c06d6819a0204fac675d100f92aa472c704cd774a47171a5949c75c1040ed6",
"sha256:3559da56e1045ad567e69fcc74245073fe1943b07b137bfd1073c7a540a89df7",
"sha256:3bfa2936f8391bfaa17ed6a5c726e33acad56d7b47b8bf824b1908b16b140025",
"sha256:4361881388817f89aa819a553e987200a6eb664df995632b063997dd373a7cee",
"sha256:43af464dcac1ae53e6e14a0ae6f08373b538f3c49fb9e426423618571edfecff",
"sha256:44097663c62b3aa03b5b403b816dedafa592984e8c6857a061ade41f32a2666e",
"sha256:4cbaea8ab8bfa283e6219af39624d921f72f8174765a35416aab4d4b4dec370e",
"sha256:5b0fd9fc81d43cd54dc8e4b2df8730ffd1e34f1f0679920deae16f6487aa1414",
"sha256:676d9f4286f490612fa35ca8fe4b1fced8ff18e653abc1dda34fbf166129d6c2",
"sha256:79ad48096ceb5c714fbc4dc82e3e6b37f095f627b1fef90d94d85e19a19d6611",
"sha256:83379f1fd7b99c4993f5e905f2a6ddb9003996655151ea3c2ee77332ad009d08",
"sha256:88dc997e3e9199a0d67b547fba36c6d1c54fca7d83c4bfe0d3f34f55a4717a2c",
"sha256:8c5b97953130ff76500c6e8e159f2b881c737ebf00034006517b57f382d5317c",
"sha256:922e9dac0166e4617e5c7980d2cff6912a6eb5cb5c13e7ece222438650bd7f66",
"sha256:9c037aaf6affc8f7c4f6f9f6279aa57dd526734246fb5221a0fff3124f57e0b1",
"sha256:a896b41c518269c1cceb582e298a868e6c74bb3cbfd362865ea686c78aebe91d",
"sha256:b1a6f17c4ad896ed628663b021cd797b098c7e9537fd259958f6ffb3b8921081",
"sha256:b5ddaee74e1f06af9c0765a147904dddacf4ca9707f8f079e14e2b14b4f5a544",
"sha256:d55374ebc36de7a3217f2e2318886f0801dd5e486e21aba1fc4ca08e3b6637d7",
"sha256:ddac6a092b97aa11d2a21aec33e941b4453ef774da3d98f2b7c1e01da05e6d5e",
"sha256:de9832ac3c51484fe1757ca4303695423b16cfa54874dae9239bf41f50a2affa",
"sha256:e42a82c63952ed70be3c13782c6984a519b514e6b10108a9647c7576b6c86650",
"sha256:ea8e83bf4731a2369350d7771a1f2ef8d72ad3da70a37d86b1374be8c675abd0"
],
"index": "pypi",
"version": "==3.12.0"
},
"pydantic": {
"hashes": [
"sha256:085ca1de245782e9b46cefcf99deecc67d418737a1fd3f6a4f511344b613a5b3",
"sha256:086254884d10d3ba16da0588604ffdc5aab3f7f09557b998373e885c690dd398",
"sha256:0b6037175234850ffd094ca77bf60fb54b08b5b22bc85865331dd3bda7a02fa1",
"sha256:0fe476769acaa7fcddd17cadd172b156b53546ec3614a4d880e5d29ea5fbce65",
"sha256:1d5278bd9f0eee04a44c712982343103bba63507480bfd2fc2790fa70cd64cf4",
"sha256:2cc6a4cb8a118ffec2ca5fcb47afbacb4f16d0ab8b7350ddea5e8ef7bcc53a16",
"sha256:2ee7e3209db1e468341ef41fe263eb655f67f5c5a76c924044314e139a1103a2",
"sha256:3011b975c973819883842c5ab925a4e4298dffccf7782c55ec3580ed17dc464c",
"sha256:3c3b035103bd4e2e4a28da9da7ef2fa47b00ee4a9cf4f1a735214c1bcd05e0f6",
"sha256:4c68c3bc88dbda2a6805e9a142ce84782d3930f8fdd9655430d8576315ad97ce",
"sha256:574936363cd4b9eed8acdd6b80d0143162f2eb654d96cb3a8ee91d3e64bf4cf9",
"sha256:5a79330f8571faf71bf93667d3ee054609816f10a259a109a0738dac983b23c3",
"sha256:5e48ef4a8b8c066c4a31409d91d7ca372a774d0212da2787c0d32f8045b1e034",
"sha256:6c5b77947b9e85a54848343928b597b4f74fc364b70926b3c4441ff52620640c",
"sha256:742645059757a56ecd886faf4ed2441b9c0cd406079c2b4bee51bcc3fbcd510a",
"sha256:7bdfdadb5994b44bd5579cfa7c9b0e1b0e540c952d56f627eb227851cda9db77",
"sha256:815ddebb2792efd4bba5488bc8fde09c29e8ca3227d27cf1c6990fc830fd292b",
"sha256:8b5ac0f1c83d31b324e57a273da59197c83d1bb18171e512908fe5dc7278a1d6",
"sha256:96f240bce182ca7fe045c76bcebfa0b0534a1bf402ed05914a6f1dadff91877f",
"sha256:a733965f1a2b4090a5238d40d983dcd78f3ecea221c7af1497b845a9709c1721",
"sha256:ab624700dc145aa809e6f3ec93fb8e7d0f99d9023b713f6a953637429b437d37",
"sha256:b2571db88c636d862b35090ccf92bf24004393f85c8870a37f42d9f23d13e032",
"sha256:bbbc94d0c94dd80b3340fc4f04fd4d701f4b038ebad72c39693c794fd3bc2d9d",
"sha256:c0727bda6e38144d464daec31dff936a82917f431d9c39c39c60a26567eae3ed",
"sha256:c556695b699f648c58373b542534308922c46a1cda06ea47bc9ca45ef5b39ae6",
"sha256:c86229333cabaaa8c51cf971496f10318c4734cf7b641f08af0a6fbf17ca3054",
"sha256:c8d7da6f1c1049eefb718d43d99ad73100c958a5367d30b9321b092771e96c25",
"sha256:c8e9dcf1ac499679aceedac7e7ca6d8641f0193c591a2d090282aaf8e9445a46",
"sha256:cb23bcc093697cdea2708baae4f9ba0e972960a835af22560f6ae4e7e47d33f5",
"sha256:d1e4c28f30e767fd07f2ddc6f74f41f034d1dd6bc526cd59e63a82fe8bb9ef4c",
"sha256:d9c9bdb3af48e242838f9f6e6127de9be7063aad17b32215ccc36a09c5cf1070",
"sha256:dee5ef83a76ac31ab0c78c10bd7d5437bfdb6358c95b91f1ba7ff7b76f9996a1",
"sha256:e0896200b6a40197405af18828da49f067c2fa1f821491bc8f5bde241ef3f7d7",
"sha256:f5a64b64ddf4c99fe201ac2724daada8595ada0d102ab96d019c1555c2d6441d",
"sha256:f947352c3434e8b937e3aa8f96f47bdfe6d92779e44bb3f41e4c213ba6a32145"
],
"index": "pypi",
"version": "==1.9.0"
},
"pynacl": {
"hashes": [
"sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858",
"sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d",
"sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93",
"sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1",
"sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92",
"sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff",
"sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba",
"sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394",
"sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b",
"sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"
],
"index": "pypi",
"version": "==1.5.0"
},
"pyparsing": {
"hashes": [
"sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4",
"sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"
],
"index": "pypi",
"version": "==3.0.6"
},
"pyteal": {
"hashes": [
"sha256:6c140db6ac365f438dc979c7de4153686ef97b762b9c4efe23bba0db4ff699e5",
"sha256:a72d1f0859f2248459bbcdbdfcd8cf09023ea003b121f0dbaf58289b8167d20c"
],
"index": "pypi",
"version": "==v0.10.0"
},
"pytest": {
"hashes": [
"sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89",
"sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"
],
"index": "pypi",
"version": "==6.2.5"
},
"pytest-depends": {
"hashes": [
"sha256:90a28e2b87b75b18abd128c94015248544acac20e4392e9921e5a86f93319dfe",
"sha256:a1df072bcc93d77aca3f0946903f5fed8af2d9b0056db1dfc9ed5ac164ab0642"
],
"index": "pypi",
"version": "==1.0.1"
},
"pytest-forked": {
"hashes": [
"sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e",
"sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"
],
"index": "pypi",
"version": "==1.4.0"
},
"pytest-xdist": {
"hashes": [
"sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf",
"sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"
],
"index": "pypi",
"version": "==2.5.0"
},
"pyyaml": {
"hashes": [
"sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293",
"sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b",
"sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57",
"sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b",
"sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4",
"sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07",
"sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba",
"sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9",
"sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287",
"sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513",
"sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0",
"sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0",
"sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92",
"sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f",
"sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2",
"sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc",
"sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c",
"sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86",
"sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4",
"sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c",
"sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34",
"sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b",
"sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c",
"sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb",
"sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737",
"sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3",
"sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d",
"sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53",
"sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78",
"sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803",
"sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a",
"sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174",
"sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"
],
"index": "pypi",
"version": "==6.0"
},
"six": {
"hashes": [
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.16.0"
},
"toml": {
"hashes": [
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
],
"index": "pypi",
"version": "==0.10.2"
},
"toolz": {
"hashes": [
"sha256:6b312d5e15138552f1bda8a4e66c30e236c831b612b2bf0005f8a1df10a4bc33",
"sha256:a5700ce83414c64514d82d60bcda8aabfde092d1c1a8663f9200c07fdcc6da8f"
],
"markers": "python_version >= '3.5'",
"version": "==0.11.2"
},
"typing-extensions": {
"hashes": [
"sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e",
"sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"
],
"index": "pypi",
"version": "==4.0.1"
},
"uvarint": {
"hashes": [
"sha256:7cc17481c9af8ebe3978f5357412b57ff93ce8b14712236338f782cc8ae9a989",
"sha256:9dcc98ad92b642c57494bed9370c4f93479d2d36223b2c3702823f0aa8b1a9a3"
],
"index": "pypi",
"version": "==1.2.0"
},
"zipp": {
"hashes": [
"sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832",
"sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"
],
"markers": "python_version >= '3.6'",
"version": "==3.6.0"
}
},
"develop": {}
}

108
algorand/README.md Normal file
View File

@ -0,0 +1,108 @@
Wormhole Support for Algorand
=============================
This directory contains the components needed to support full Wormhole functionality under the Algorand blockchain platform.
## Component overview
---------------------
This system is comprised of the following main components:
* **Core contract (`wormhole_core.py`)**: Algorand stateful contract with entrypoints for publishing messages (VAAs), verification of VAA signatures, and triggering of governance chores. This will be referred as _CoreContract_ in this document.
* **Token bridge contract (`token_bridge.py`)**: Algorand stateful contract supporting cross-chain bridging, exposing entrypoints for exchanging attestations, native tokens and ASAs, and triggering of governance. This will be referred as _TokenBridge_ in this document.
* **VAA verification stateless program (`vaa_verify.py`)**: Stateless program for verifying the signatures of a Wormhole VAA payload against the set of active guardian public keys. This will be referred as _VaaVerify_ in this document.
* **Dynamic storage stateless program (`TmplSig.py`)**: A stateless program that is bound to the main core and token bridge contracts to provide dynamic storage spaces addressable as a raw blob of bytes. See `local_blob.py`. This will be referred as _TmplSig_ in this document.
Helper utilities and code include support PyTEAL code, deployment tools and tests.
## System Architecture
----------------------
### _TmplSig_ details
--------------------
This stateless program code is parametrized with several values that give different output address. The stateless code will check for several transaction group parameters accordingly.
|Text |Replaced by |
|------|------------|
|`TMPL_ADDR_IDX`| Where storage starts interpreting the space as a raw array of bytes |
|`TMPL_EMITTER_ID` | Concatenation of chain Id + emitter Id in VAAs to be processed, or a hardcoded string identifying the type of information stored e.g `guardian` utf8 string stored in hex. |
|`TMPL_SEED_AMT` | The amount that is paid in Gtxn[0] transaction group fee |
|`TMPL_APP_ID` | Application Id of _CoreContract_, _TokenBridge_, etc that is specified in Gtxn[1] for **opt-in** transaction |
|`TMPL_APP_ADDRESS`| Escrow address of the stateful contract specified in `APP_ID`. Used for **rekey transaction** in Gtxn[2]|
* Local-state associated with the _Tmplsig_ accounts are used as dynamic storage. The technique is to access this local storage as a plain array of bytes instead of the typical key/value structure. With the current Algorand parameters, we have 127 * 15 ~ 2K of storage to be used random-access-like.
* The contract accounts addresses are generated by compilation of a stateless code parametrized by several parameters. In the system, the following contract accounts are generated:
* Account (`seq_addr`) for storing verified sequence number bits based on chainId,emitter,`int(vaa.sequence / MAX_BITS)` where MAX_BITS = 15240. This allows the system to reject duplicated VAAs for the last 2K sequence numbers.
* Account (`guardian_addr` and `new_guardian_addr`) for storing total guardian count , the guardian public keys and guardian set expiration time.
* Once generated, the accounts are opted-in and rekeyed to the core application.
Briefly, this is the transaction group when _TmplSig_ is "attached" to a stateful app:
|Tx#| Semantics |
|-|-|
|0 | Payment Txn of `TMPL_SEED_AMT` ALGOs to fund operation |
|1 | Optin of LogicSig to target stateful contract `TMPL_APP_ID` for the app to use Logicsig account local storage |
|2 | Rekey of LogicSig to escrow address for the smart contract to become the sole "governor" of the Logicsig account address |
## Core Contract: Functional Description
----------------------------------------
### Initialization stage
The initialization call needs a governance VAA to be passed in, typically to setup initial guardian list. The init call will:
* store the _VaaVerify_ hash in the `vphash` global state key
* check for the creator address, set `booted` global state to `1`
* check for duplicate VAA
* handle the governance VAA passed as argument.
See below on how governance VAAs are processed, and how duplicate detection technique is used.
### publishMessage
The `publishMessage` call will retrieve the current sequence number from related _TmplSig_ local store, increment in by 1, store the new sequence number and emit a Log message which can be picked by Wormhole network for subsequently creating a guardian-signed VAA message.
### hdlGovernance
Governance messages can carry requests for:
* Update the active guardian set
* Upgrade contracts: For Algorand, an upgrade-contract governance VAA must contain the hash of the program that is approved as an upgrade (stored in global `validUpdateApproveHash`). The upgrade process itself is triggered with the **update** action, where the clear and approval program hashes are checked against what the governance VAA carried. If they differ, an assertion is thrown and the update call is aborted. A successful call writes an onchain Log with the new hashes and allows the update process to go on.
* Setting the per-message fee
* Retrieving previously paid message fees
A governance request packed in a VAA must be verified by a `verifyVaa` call in the transaction group.
### vaaVerify
The VAA verify call will work by design *only* in a transaction group structured as:
| TX | args | accounts | sender |
| ---------- | ------------------------------------------------------- | ----------------------- | -------------------- |
| verifySigs | [sigs<sub>0..n</sub>, keyset<sub>0..n</sub>, digest] | seq_addr, guardian_addr | vaa_verify_stateless |
| verifySigs | ... | seq_addr, guardian_addr | vaa_verify_stateless
| verifyVAA | vaa | seq_addr, guardian_addr | foundation |
Keep in mind that depending on the number of signatures to verify there can be one or several _verifySigs_ calls working in tandem with the _VaaVerify_ stateless program. This depends on how many signatures we can verify on a single TX. At time of this writing, considering the opcode budget limitation of AVM 1.1, a total of nine (9) signatures can be verified at once, so for the current 19 guardians three _verifySigs_ calls would be needed for verifying signatures 0..8, 9..17, 18.
A successful call must:
* Retrieve the guardian keys from the proper local dynamic storage
* Validate if the VAA passed in Argument #1 has enough guardians to be verified
* Check that it's not expired.
* Verify that each _verifySigs_ TX is validated by the correct stateless _VerifyVaa_
* Verify that each _verifySigs_ TX is verifying the expected signature subset.
* Verify that each _verifySigs_ TX is verifying against the same guardian keys.
* Verify that each _verifySigs_ TX is verifying the same VAA.
The vaaVerify call does allow *nop* (dummy) TX in the group to maximize opcode budgets and/or storage capacity. After the `verifyVAA` call, a client can issue more transactions with the fact that the VAA was verified.
## Appendix: Duplicate verification
------------------------------------
To detect duplicate VAA sequence numbers the following technique is used:
* For each key in local state, there is an associated value entry. The total space of value-entries is 127*15, we have 2K of addressable space using the `LocalBlob` class.
* A _TmplSig_ stateless account is generated using the 2K space as a bit field, yielding 15240 bits. So for ~16K consecutive VAA numbers, the contract code sets a bit for identifying already verified VAAs. Based on setting the stateless `TMPL_ADDR_IDX` to formula `vaa_sequence_number / 15240`, we have designated storage for marking VAAs in consecutive 16k-bit blocks.

143
algorand/TmplSig.py Normal file
View File

@ -0,0 +1,143 @@
from time import time, sleep
from typing import List, Tuple, Dict, Any, Optional, Union
from base64 import b64decode
import base64
import random
import hashlib
import uuid
import sys
import json
import uvarint
import pprint
from local_blob import LocalBlob
from algosdk.v2client.algod import AlgodClient
from algosdk.kmd import KMDClient
from algosdk import account, mnemonic
from algosdk.encoding import decode_address
from algosdk.future import transaction
from pyteal import compileTeal, Mode, Expr
from pyteal import *
from algosdk.logic import get_application_address
from algosdk.future.transaction import LogicSigAccount
class TmplSig:
"""KeySig class reads in a json map containing assembly details of a template smart signature and allows you to populate it with the variables
In this case we are only interested in a single variable, the key which is a byte string to make the address unique.
In this demo we're using random strings but in practice you can choose something meaningful to your application
"""
def __init__(self, name):
# Read the source map
# with open("{}.json".format(name)) as f:
# self.map = json.loads(f.read())
self.map = {"name":"lsig.teal","version":6,"source":"","bytecode":"BiABAYEASIAASDEQgQYSRDEZIhJEMRiBABJEMSCAABJEMQGBABJEMQkyAxJEMRUyAxJEIg==",
"template_labels":{
"TMPL_ADDR_IDX":{"source_line":3,"position":5,"bytes":False},
"TMPL_EMITTER_ID":{"source_line":5,"position":8,"bytes":True},
"TMPL_APP_ID":{"source_line":16,"position":24,"bytes":False},
"TMPL_APP_ADDRESS":{"source_line":20,"position":30,"bytes":True}
},
"label_map":{},"line_map":[0,1,4,6,7,9,10,12,14,15,16,18,19,20,21,23,25,26,27,29,31,32,33,35,37,38,39,41,43,44,45,47,49,50,51]
}
self.src = base64.b64decode(self.map["bytecode"])
self.sorted = dict(
sorted(
self.map["template_labels"].items(),
key=lambda item: item[1]["position"],
)
)
def populate(self, values: Dict[str, Union[str, int]]) -> LogicSigAccount:
"""populate uses the map to fill in the variable of the bytecode and returns a logic sig with the populated bytecode"""
# Get the template source
contract = list(base64.b64decode(self.map["bytecode"]))
shift = 0
for k, v in self.sorted.items():
if k in values:
pos = v["position"] + shift
if v["bytes"]:
val = bytes.fromhex(values[k])
lbyte = uvarint.encode(len(val))
# -1 to account for the existing 00 byte for length
shift += (len(lbyte) - 1) + len(val)
# +1 to overwrite the existing 00 byte for length
contract[pos : pos + 1] = lbyte + val
else:
val = uvarint.encode(values[k])
# -1 to account for existing 00 byte
shift += len(val) - 1
# +1 to overwrite existing 00 byte
contract[pos : pos + 1] = val
# Create a new LogicSigAccount given the populated bytecode,
#pprint.pprint({"values": values, "contract": bytes(contract).hex()})
return LogicSigAccount(bytes(contract))
def get_bytecode_chunk(self, idx: int) -> Bytes:
start = 0
if idx > 0:
start = list(self.sorted.values())[idx - 1]["position"] + 1
stop = len(self.src)
if idx < len(self.sorted):
stop = list(self.sorted.values())[idx]["position"]
chunk = self.src[start:stop]
return Bytes(chunk)
def get_bytecode_raw(self, idx: int):
start = 0
if idx > 0:
start = list(self.sorted.values())[idx - 1]["position"] + 1
stop = len(self.src)
if idx < len(self.sorted):
stop = list(self.sorted.values())[idx]["position"]
chunk = self.src[start:stop]
return chunk
def get_sig_tmpl(self):
def sig_tmpl():
admin_app_id = ScratchVar()
admin_address= ScratchVar()
return Seq(
# Just putting adding this as a tmpl var to make the address unique and deterministic
# We don't actually care what the value is, pop it
Pop(Tmpl.Int("TMPL_ADDR_IDX")),
Pop(Tmpl.Bytes("TMPL_EMITTER_ID")),
Assert(Txn.type_enum() == TxnType.ApplicationCall),
Assert(Txn.on_completion() == OnComplete.OptIn),
Assert(Txn.application_id() == Tmpl.Int("TMPL_APP_ID")),
Assert(Txn.rekey_to() == Tmpl.Bytes("TMPL_APP_ADDRESS")),
Assert(Txn.fee() == Int(0)),
Assert(Txn.close_remainder_to() == Global.zero_address()),
Assert(Txn.asset_close_to() == Global.zero_address()),
Approve()
)
return compileTeal(sig_tmpl(), mode=Mode.Signature, version=6, assembleConstants=True)
if __name__ == '__main__':
core = TmplSig("sig")
# client = AlgodClient("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "http://localhost:4001")
# pprint.pprint(client.compile( core.get_sig_tmpl()))
with open("sig.tmpl.teal", "w") as f:
f.write(core.get_sig_tmpl())

1383
algorand/admin.py Normal file

File diff suppressed because it is too large Load Diff

21
algorand/deploy.sh Executable file
View File

@ -0,0 +1,21 @@
#!/bin/bash
# need netcat
# Wait for node to start
while ! netcat -z localhost 4001; do
sleep 1
done
while ! wget http://localhost:4001/genesis -O genesis.json ; do
sleep 15
done
if [ ! -f genesis.json ]; then
echo "Failed to create genesis file!"
exit 1
fi
sleep 2
pipenv run python3 admin.py --devnet --boot

237
algorand/gentest.py Normal file
View File

@ -0,0 +1,237 @@
from eth_abi import encode_single, encode_abi
import sys
import pprint
import time
from Cryptodome.Hash import keccak
import coincurve
import base64
from random import random
from algosdk.encoding import decode_address
class GenTest:
def __init__(self, bigSet) -> None:
if bigSet:
self.guardianKeys = [
"52A26Ce40F8CAa8D36155d37ef0D5D783fc614d2",
"389A74E8FFa224aeAD0778c786163a7A2150768C",
"B4459EA6482D4aE574305B239B4f2264239e7599",
"072491bd66F63356090C11Aae8114F5372aBf12B",
"51280eA1fd2B0A1c76Ae29a7d54dda68860A2bfF",
"fa9Aa60CfF05e20E2CcAA784eE89A0A16C2057CB",
"e42d59F8FCd86a1c5c4bA351bD251A5c5B05DF6A",
"4B07fF9D5cE1A6ed58b6e9e7d6974d1baBEc087e",
"c8306B84235D7b0478c61783C50F990bfC44cFc0",
"C8C1035110a13fe788259A4148F871b52bAbcb1B",
"58A2508A20A7198E131503ce26bBE119aA8c62b2",
"8390820f04ddA22AFe03be1c3bb10f4ba6CF94A0",
"1FD6e97387C34a1F36DE0f8341E9D409E06ec45b",
"255a41fC2792209CB998A8287204D40996df9E54",
"bA663B12DD23fbF4FbAC618Be140727986B3BBd0",
"79040E577aC50486d0F6930e160A5C75FD1203C6",
"3580D2F00309A9A85efFAf02564Fc183C0183A96",
"3869795913D3B6dBF3B24a1C7654672c69A23c35",
"1c0Cc52D7673c52DE99785741344662F5b2308a0",
]
self.guardianPrivKeys = [
"563d8d2fd4e701901d3846dee7ae7a92c18f1975195264d676f8407ac5976757",
"8d97f25916a755df1d9ef74eb4dbebc5f868cb07830527731e94478cdc2b9d5f",
"9bd728ad7617c05c31382053b57658d4a8125684c0098f740a054d87ddc0e93b",
"5a02c4cd110d20a83a7ce8d1a2b2ae5df252b4e5f6781c7855db5cc28ed2d1b4",
"93d4e3b443bf11f99a00901222c032bd5f63cf73fc1bcfa40829824d121be9b2",
"ea40e40c63c6ff155230da64a2c44fcd1f1c9e50cacb752c230f77771ce1d856",
"87eaabe9c27a82198e618bca20f48f9679c0f239948dbd094005e262da33fe6a",
"61ffed2bff38648a6d36d6ed560b741b1ca53d45391441124f27e1e48ca04770",
"bd12a242c6da318fef8f98002efb98efbf434218a78730a197d981bebaee826e",
"20d3597bb16525b6d09e5fb56feb91b053d961ab156f4807e37d980f50e71aff",
"344b313ffbc0199ff6ca08cacdaf5dc1d85221e2f2dc156a84245bd49b981673",
"848b93264edd3f1a521274ca4da4632989eb5303fd15b14e5ec6bcaa91172b05",
"c6f2046c1e6c172497fc23bd362104e2f4460d0f61984938fa16ef43f27d93f6",
"693b256b1ee6b6fb353ba23274280e7166ab3be8c23c203cc76d716ba4bc32bf",
"13c41508c0da03018d61427910b9922345ced25e2bbce50652e939ee6e5ea56d",
"460ee0ee403be7a4f1eb1c63dd1edaa815fbaa6cf0cf2344dcba4a8acf9aca74",
"b25148579b99b18c8994b0b86e4dd586975a78fa6e7ad6ec89478d7fbafd2683",
"90d7ac6a82166c908b8cf1b352f3c9340a8d1f2907d7146fb7cd6354a5436cca",
"b71d23908e4cf5d6cd973394f3a4b6b164eb1065785feee612efdfd8d30005ed",
]
else:
self.guardianKeys = [
"beFA429d57cD18b7F8A4d91A2da9AB4AF05d0FBe"
]
self.guardianPrivKeys = [
"cfb12303a19cde580bb4dd771639b0d26bc68353645571a8cff516ab2ee113a0"
]
self.zeroPadBytes = "00"*64
def encoder(self, type, val):
if type == 'uint8':
return encode_single(type, val).hex()[62:64]
if type == 'uint16':
return encode_single(type, val).hex()[60:64]
if type == 'uint32':
return encode_single(type, val).hex()[56:64]
if type == 'uint64':
return encode_single(type, val).hex()[64-(16):64]
if type == 'uint128':
return encode_single(type, val).hex()[64-(32):64]
if type == 'uint256' or type == 'bytes32':
return encode_single(type, val).hex()[64-(64):64]
raise Exception("you suck")
def createSignedVAA(self, guardianSetIndex, signers, ts, nonce, emitterChainId, emitterAddress, sequence, consistencyLevel, target, payload):
print("createSignedVAA: " + str(signers))
b = ""
b += self.encoder("uint32", ts)
b += self.encoder("uint32", nonce)
b += self.encoder("uint16", emitterChainId)
b += self.encoder("bytes32", emitterAddress)
b += self.encoder("uint64", sequence)
b += self.encoder("uint8", consistencyLevel)
b += payload
hash = keccak.new(digest_bits=256).update(keccak.new(digest_bits=256).update(bytes.fromhex(b)).digest()).digest()
signatures = ""
for i in range(len(signers)):
signatures += self.encoder("uint8", i)
key = coincurve.PrivateKey(bytes.fromhex(signers[i]))
signature = key.sign_recoverable(hash, hasher=None)
signatures += signature.hex()
ret = self.encoder("uint8", 1)
ret += self.encoder("uint32", guardianSetIndex)
ret += self.encoder("uint8", len(signers))
ret += signatures
ret += b
return ret
def genGuardianSetUpgrade(self, signers, guardianSet, targetSet, nonce, seq):
b = self.zeroPadBytes[0:(28*2)]
b += self.encoder("uint8", ord("C"))
b += self.encoder("uint8", ord("o"))
b += self.encoder("uint8", ord("r"))
b += self.encoder("uint8", ord("e"))
b += self.encoder("uint8", 2)
b += self.encoder("uint16", 0)
b += self.encoder("uint32", targetSet)
b += self.encoder("uint8", len(self.guardianKeys))
for i in self.guardianKeys:
b += i
emitter = bytes.fromhex(self.zeroPadBytes[0:(31*2)] + "04")
return self.createSignedVAA(guardianSet, signers, int(time.time()), nonce, 1, emitter, seq, 32, 0, b)
def genGSetFee(self, signers, guardianSet, nonce, seq, amt):
b = self.zeroPadBytes[0:(28*2)]
b += self.encoder("uint8", ord("C"))
b += self.encoder("uint8", ord("o"))
b += self.encoder("uint8", ord("r"))
b += self.encoder("uint8", ord("e"))
b += self.encoder("uint8", 3)
b += self.encoder("uint16", 8)
b += self.encoder("uint256", int(amt)) # a whole algo!
emitter = bytes.fromhex(self.zeroPadBytes[0:(31*2)] + "04")
return self.createSignedVAA(guardianSet, signers, int(time.time()), nonce, 1, emitter, seq, 32, 0, b)
def genGFeePayout(self, signers, guardianSet, targetSet, nonce, seq, amt, dest):
b = self.zeroPadBytes[0:(28*2)]
b += self.encoder("uint8", ord("C"))
b += self.encoder("uint8", ord("o"))
b += self.encoder("uint8", ord("r"))
b += self.encoder("uint8", ord("e"))
b += self.encoder("uint8", 4)
b += self.encoder("uint16", 8)
b += self.encoder("uint256", int(amt * 1000000))
b += decode_address(dest).hex()
emitter = bytes.fromhex(self.zeroPadBytes[0:(31*2)] + "04")
return self.createSignedVAA(guardianSet, signers, int(time.time()), nonce, 1, emitter, seq, 32, 0, b)
def getEmitter(self, chain):
if chain == 1:
return "ec7372995d5cc8732397fb0ad35c0121e0eaa90d26f828a534cab54391b3a4f5"
if chain == 2:
return "0000000000000000000000003ee18b2214aff97000d974cf647e7c347e8fa585"
if chain == 3:
return "0000000000000000000000007cf7b764e38a0a5e967972c1df77d432510564e2"
if chain == 4:
return "000000000000000000000000b6f6d86a8f9879a9c87f643768d9efc38c1da6e7"
if chain == 5:
return "0000000000000000000000005a58505a96d1dbf8df91cb21b54419fc36e93fde"
raise Exception("you suck")
def genRegisterChain(self, signers, guardianSet, nonce, seq, chain, addr = None):
b = self.zeroPadBytes[0:((32 -11)*2)]
b += self.encoder("uint8", ord("T"))
b += self.encoder("uint8", ord("o"))
b += self.encoder("uint8", ord("k"))
b += self.encoder("uint8", ord("e"))
b += self.encoder("uint8", ord("n"))
b += self.encoder("uint8", ord("B"))
b += self.encoder("uint8", ord("r"))
b += self.encoder("uint8", ord("i"))
b += self.encoder("uint8", ord("d"))
b += self.encoder("uint8", ord("g"))
b += self.encoder("uint8", ord("e"))
b += self.encoder("uint8", 1) # action
b += self.encoder("uint16", 0) # target chain
b += self.encoder("uint16", chain)
if addr == None:
b += self.getEmitter(chain)
else:
b += addr
emitter = bytes.fromhex(self.zeroPadBytes[0:(31*2)] + "04")
return self.createSignedVAA(guardianSet, signers, int(time.time()), nonce, 1, emitter, seq, 32, 0, b)
def genAssetMeta(self, signers, guardianSet, nonce, seq, tokenAddress, chain, decimals, symbol, name):
b = self.encoder("uint8", 2)
b += self.zeroPadBytes[0:((32-len(tokenAddress))*2)]
b += tokenAddress.hex()
b += self.encoder("uint16", chain)
b += self.encoder("uint8", decimals)
b += symbol.hex()
b += self.zeroPadBytes[0:((32-len(symbol))*2)]
b += name.hex()
b += self.zeroPadBytes[0:((32-len(name))*2)]
emitter = bytes.fromhex(self.getEmitter(chain))
return self.createSignedVAA(guardianSet, signers, int(time.time()), nonce, 1, emitter, seq, 32, 0, b)
def genTransfer(self, signers, guardianSet, nonce, seq, amount, tokenAddress, tokenChain, toAddress, toChain, fee):
b = self.encoder("uint8", 1)
b += self.encoder("uint256", int(amount * 100000000))
b += self.zeroPadBytes[0:((32-len(tokenAddress))*2)]
b += tokenAddress.hex()
b += self.encoder("uint16", tokenChain)
b += self.zeroPadBytes[0:((32-len(toAddress))*2)]
b += toAddress.hex()
b += self.encoder("uint16", toChain)
b += self.encoder("uint256", int(fee * 100000000))
emitter = bytes.fromhex(self.getEmitter(tokenChain))
return self.createSignedVAA(guardianSet, signers, int(time.time()), nonce, 1, emitter, seq, 32, 0, b)
def genVaa(self, emitter, seq, payload):
nonce = int(random() * 4000000.0)
return self.createSignedVAA(1, self.guardianPrivKeys, int(time.time()), nonce, 8, emitter, seq, 32, 0, payload.hex())
def test(self):
print(self.genTransfer(self.guardianPrivKeys, 1, 1, 1, 1, bytes.fromhex("4523c3F29447d1f32AEa95BEBD00383c4640F1b4"), 1, decode_address("ROOKEPZMHHBAEH75Y44OCNXQAGTXZWG3PY7IYQQCMXO7IG7DJMVHU32YVI"), 8, 0))
if __name__ == '__main__':
core = GenTest(True)
core.test()

212
algorand/local_blob.py Normal file
View File

@ -0,0 +1,212 @@
from typing import Tuple
from pyteal import (
And,
App,
Assert,
Bytes,
BytesZero,
Concat,
Expr,
Extract,
For,
GetByte,
If,
Int,
Itob,
Len,
Or,
ScratchVar,
Seq,
SetByte,
Subroutine,
Substring,
TealType,
)
_max_keys = 15
_page_size = 128 - 1 # need 1 byte for key
_max_bytes = _max_keys * _page_size
_max_bits = _max_bytes * 8
max_keys = Int(_max_keys)
page_size = Int(_page_size)
max_bytes = Int(_max_bytes)
def _key_and_offset(idx: Int) -> Tuple[Int, Int]:
return idx / page_size, idx % page_size
@Subroutine(TealType.bytes)
def intkey(i: Expr) -> Expr:
return Extract(Itob(i), Int(7), Int(1))
# TODO: Add Keyspace range?
class LocalBlob:
"""
Blob is a class holding static methods to work with the local storage of an account as a binary large object
The `zero` method must be called on an account on opt in and the schema of the local storage should be 16 bytes
"""
@staticmethod
@Subroutine(TealType.none)
def zero(acct: Expr) -> Expr:
"""
initializes local state of an account to all zero bytes
This allows us to be lazy later and _assume_ all the strings are the same size
"""
i = ScratchVar()
init = i.store(Int(0))
cond = i.load() < max_keys
iter = i.store(i.load() + Int(1))
return For(init, cond, iter).Do(
App.localPut(acct, intkey(i.load()), BytesZero(page_size))
)
@staticmethod
@Subroutine(TealType.uint64)
def get_byte(acct: Expr, idx: Expr):
"""
Get a single byte from local storage of an account by index
"""
key, offset = _key_and_offset(idx)
return GetByte(App.localGet(acct, intkey(key)), offset)
@staticmethod
@Subroutine(TealType.none)
def set_byte(acct: Expr, idx: Expr, byte: Expr):
"""
Set a single byte from local storage of an account by index
"""
key, offset = _key_and_offset(idx)
return App.localPut(
acct, intkey(key), SetByte(App.localGet(acct, intkey(key)), offset, byte)
)
@staticmethod
@Subroutine(TealType.bytes)
def read(
acct: Expr, bstart: Expr, bend: Expr
) -> Expr:
"""
read bytes between bstart and bend from local storage of an account by index
"""
start_key, start_offset = _key_and_offset(bstart)
stop_key, stop_offset = _key_and_offset(bend)
key = ScratchVar()
buff = ScratchVar()
start = ScratchVar()
stop = ScratchVar()
init = key.store(start_key)
cond = key.load() <= stop_key
incr = key.store(key.load() + Int(1))
return Seq(
buff.store(Bytes("")),
For(init, cond, incr).Do(
Seq(
start.store(If(key.load() == start_key, start_offset, Int(0))),
stop.store(If(key.load() == stop_key, stop_offset, page_size)),
buff.store(
Concat(
buff.load(),
Substring(
App.localGet(acct, intkey(key.load())),
start.load(),
stop.load(),
),
)
),
)
),
buff.load(),
)
@staticmethod
@Subroutine(TealType.none)
def meta(
acct: Expr, val: Expr
):
return Seq(
App.localPut(acct, Bytes("meta"), val)
)
@staticmethod
@Subroutine(TealType.none)
def checkMeta(acct: Expr, val: Expr):
return Seq(Assert(And(App.localGet(acct, Bytes("meta")) == val, Int(145))))
@staticmethod
@Subroutine(TealType.uint64)
def write(
acct: Expr, bstart: Expr, buff: Expr
) -> Expr:
"""
write bytes between bstart and len(buff) to local storage of an account
"""
start_key, start_offset = _key_and_offset(bstart)
stop_key, stop_offset = _key_and_offset(bstart + Len(buff))
key = ScratchVar()
start = ScratchVar()
stop = ScratchVar()
written = ScratchVar()
init = key.store(start_key)
cond = key.load() <= stop_key
incr = key.store(key.load() + Int(1))
delta = ScratchVar()
return Seq(
written.store(Int(0)),
For(init, cond, incr).Do(
Seq(
start.store(If(key.load() == start_key, start_offset, Int(0))),
stop.store(If(key.load() == stop_key, stop_offset, page_size)),
App.localPut(
acct,
intkey(key.load()),
If(
Or(stop.load() != page_size, start.load() != Int(0))
) # Its a partial write
.Then(
Seq(
delta.store(stop.load() - start.load()),
Concat(
Substring(
App.localGet(acct, intkey(key.load())),
Int(0),
start.load(),
),
Extract(buff, written.load(), delta.load()),
Substring(
App.localGet(acct, intkey(key.load())),
stop.load(),
page_size,
),
),
)
)
.Else(
Seq(
delta.store(page_size),
Extract(buff, written.load(), page_size),
)
),
),
written.store(written.load() + delta.load()),
)
),
written.load(),
)

2532
algorand/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

16
algorand/package.json Normal file
View File

@ -0,0 +1,16 @@
{
"dependencies": {
"@certusone/wormhole-sdk": "file:../sdk/js",
"algosdk": "^1.14.0",
"ts-node": "^10.7.0",
"typescript": "^4.6.2",
"varint": "^6.0.0",
"web3-eth-abi": "^1.7.1"
},
"devDependencies": {
"@types/node": "^17.0.23"
},
"scripts": {
"test": "npm run --prefix=../sdk/js build-lib && ts-node test/test.ts"
}
}

27
algorand/requirements.txt Normal file
View File

@ -0,0 +1,27 @@
attrs==21.4.0
cffi==1.15.0
colorama==0.4.4
execnet==1.9.0
future-fstrings==1.2.0
iniconfig==1.1.1
msgpack==1.0.3
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycparser==2.21
pycryptodomex==3.12.0
pydantic==1.9.0
PyNaCl==1.5.0
pyparsing==3.0.6
pyteal==v0.11.1
py-algorand-sdk==1.10.0b1
pytest==6.2.5
pytest-depends==1.0.1
pytest-forked==1.4.0
pytest-xdist==2.5.0
PyYAML==6.0
toml==0.10.2
typing-extensions==4.0.1
uvarint==1.2.0
eth_abi==2.1.1
coincurve==16.0.0

19
algorand/sandbox Executable file
View File

@ -0,0 +1,19 @@
#!/usr/bin/env bash
set -e
# ./sandbox down; ./sandbox clean; ./sandbox up dev; python3 admin.py --devnet --boot
# ./sandbox reset -v; python3 admin.py --devnet --boot
# python3 admin.py --devnet --genTeal
# (cd ../sdk/js; npm run test -- token_bridge)
PARENT_DIR=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P )
SANDBOX_DIR=$PARENT_DIR/_sandbox
if [ ! -d "$SANDBOX_DIR" ]; then
echo "Pulling sandbox..."
git clone https://github.com/algorand/sandbox.git $SANDBOX_DIR
fi
$SANDBOX_DIR/sandbox "$@"

View File

@ -0,0 +1,12 @@
export ALGOD_CHANNEL=""
export ALGOD_URL="https://github.com/algorand/go-algorand"
export ALGOD_BRANCH="master"
export ALGOD_SHA=""
export NETWORK=""
export NETWORK_TEMPLATE="images/algod/DevModeNetwork.json"
export NETWORK_BOOTSTRAP_URL=""
export NETWORK_GENESIS_FILE=""
export INDEXER_URL="https://github.com/algorand/indexer"
export INDEXER_BRANCH="develop"
export INDEXER_SHA=""
export INDEXER_DISABLED=""

View File

@ -0,0 +1,4 @@
{
"EndpointAddress": ":4001",
"DNSBootstrapID": "<network>.algodev.network"
}

View File

@ -0,0 +1,3 @@
{
"EndpointAddress": ":4001"
}

View File

@ -0,0 +1,4 @@
{
"address":"0.0.0.0:4002",
"allowed_origins":["*"]
}

View File

@ -0,0 +1,6 @@
#!/usr/bin/env bash
chmod 700 /opt/algorand/node/data/kmd-v0.5
/opt/algorand/node/goal kmd start -d /opt/algorand/node/data
/opt/algorand/node/algod -d /opt/algorand/node/data

View File

@ -0,0 +1,34 @@
FROM ubuntu:18.04
ARG channel
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update && apt-get install -y ca-certificates curl
# Use a non-privilidged user with a random UID / GID for security reasons
RUN groupadd -g 10353 algorand && \
useradd -m -u 10353 -g algorand algorand && \
chown -R algorand:algorand /opt && \
ls -lha /opt
USER algorand
COPY --chown=algorand:algorand ./config/update.sh /tmp
RUN \
set -eux; \
mkdir /opt/installer ; \
cd /opt/installer ; \
mv /tmp/update.sh . ; \
./update.sh -i -c $channel -p /opt/algorand/node -d /opt/algorand/node/data.tmp -n ; \
rm -rf /opt/algorand/node/data.tmp ; \
mkdir /opt/algorand/node/data
COPY ./config/start.sh /opt/algorand
VOLUME /opt/algorand/node/data
# Set up environment variable to make life easier
ENV PATH="/opt/algorand/node:${PATH}"
ENV ALGORAND_DATA="/opt/algorand/node/data"
ENTRYPOINT [ "/opt/algorand/start.sh" ]

View File

@ -0,0 +1,44 @@
{
"Genesis": {
"ConsensusProtocol": "future",
"NetworkName": "devmodenet",
"Wallets": [
{
"Name": "Wallet1",
"Stake": 40,
"Online": true
},
{
"Name": "Wallet2",
"Stake": 40,
"Online": true
},
{
"Name": "Wallet3",
"Stake": 20,
"Online": true
}
],
"DevMode": true
},
"Nodes": [
{
"Name": "Node",
"IsRelay": false,
"Wallets": [
{
"Name": "Wallet1",
"ParticipationOnly": false
},
{
"Name": "Wallet2",
"ParticipationOnly": false
},
{
"Name": "Wallet3",
"ParticipationOnly": false
}
]
}
]
}

View File

@ -0,0 +1,63 @@
ARG GO_VERSION=1.17.5
FROM golang:$GO_VERSION as algorand-algod
# Support additional root CAs
COPY config.dev cert.pem* /certs/
# Debian
RUN if [ -e /certs/cert.pem ]; then cp /certs/cert.pem /etc/ssl/certs/ca-certificates.crt; fi
# git
RUN if [ -e /certs/cert.pem ]; then git config --global http.sslCAInfo /certs/cert.pem; fi
ARG CHANNEL=master
ARG URL="https://github.com/algorand/go-algorand"
ARG BRANCH=master
ARG SHA=
# When these are set attempt to connect to a network.
ARG GENESIS_FILE=""
ARG BOOTSTRAP_URL=""
# Options for algod config
ARG ALGOD_PORT="4001"
ARG KMD_PORT="4002"
ARG TOKEN="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
ARG TEMPLATE="images/algod/DevModeNetwork.json"
RUN echo "Installing from source. ${URL} -- ${BRANCH}"
ENV BIN_DIR="$HOME/node"
ENV ALGORAND_DATA="/opt/data"
# Basic dependencies.
ENV HOME /opt
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update && apt-get install -y apt-utils curl git git-core bsdmainutils python3
# Copy lots of things into the container. The gitignore indicates which directories.
COPY . /tmp
# Install algod binaries.
RUN /tmp/images/algod/install.sh \
-d "${BIN_DIR}" \
-u "https://github.com/algorand/go-algorand" \
-b "master" \
-s ""
# Configure network
RUN /tmp/images/algod/setup.py \
--bin-dir "$BIN_DIR" \
--data-dir "/opt/data" \
--start-script "/opt/start_algod.sh" \
--network-dir "/opt/testnetwork" \
--network-template "//tmp/images/algod/DevModeNetwork.json" \
--network-token "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" \
--algod-port "4001" \
--kmd-port "4002" \
--bootstrap-url "${BOOTSTRAP_URL}" \
--genesis-file "/tmp/${GENESIS_FILE}"
ENV PATH="$BIN_DIR:${PATH}"
WORKDIR /opt/data
# Start algod
CMD ["/opt/start_algod.sh"]

View File

@ -0,0 +1,57 @@
{
"Genesis": {
"ConsensusProtocol": "future",
"NetworkName": "",
"Wallets": [
{
"Name": "Wallet1",
"Stake": 10,
"Online": true
},
{
"Name": "Wallet2",
"Stake": 40,
"Online": true
},
{
"Name": "Wallet3",
"Stake": 40,
"Online": false
},
{
"Name": "Wallet4",
"Stake": 10,
"Online": false
}
]
},
"Nodes": [
{
"Name": "Primary",
"IsRelay": true,
"Wallets": [
{
"Name": "Wallet1",
"ParticipationOnly": false
}
]
},
{
"Name": "Node",
"Wallets": [
{
"Name": "Wallet2",
"ParticipationOnly": false
},
{
"Name": "Wallet3",
"ParticipationOnly": false
},
{
"Name": "Wallet4",
"ParticipationOnly": false
}
]
}
]
}

View File

@ -0,0 +1,75 @@
#!/usr/bin/env bash
# Script to install algod in all sorts of different ways.
#
# Parameters:
# -d : Location where binaries will be installed.
# -c : Channel to install. Mutually exclusive with source options.
# -u : Git repository URL. Mutually exclusive with -c.
# -b : Git branch. Mutually exclusive with -c.
# -s : (optional) Git Commit SHA hash. Mutually exclusive with -c.
set -e
rootdir=`dirname $0`
pushd $rootdir
BINDIR=""
CHANNEL=""
URL=""
BRANCH=""
SHA=""
while getopts "d:c:u:b:s:" opt; do
case "$opt" in
d) BINDIR=$OPTARG; ;;
c) CHANNEL=$OPTARG; ;;
u) URL=$OPTARG; ;;
b) BRANCH=$OPTARG; ;;
s) SHA=$OPTARG; ;;
esac
done
if [ -z BINDIR ]; then
echo "-d <bindir> is required."
exit 1
fi
if [ ! -z $CHANNEL ] && [ ! -z $BRANCH ]; then
echo "Set only one of -c <channel> or -b <branch>"
exit 1
fi
if [ ! -z $BRANCH ] && [ -z $URL ]; then
echo "If using -b <branch>, must also set -u <git url>"
exit 1
fi
echo "Installing algod with options:"
echo " BINDIR = ${BINDIR}"
echo " CHANNEL = ${CHANNEL}"
echo " URL = ${URL}"
echo " BRANCH = ${BRANCH}"
echo " SHA = ${SHA}"
if [ ! -z $CHANNEL ]; then
./update.sh -i -c $CHANNEL -p $BINDIR -d $BINDIR/data -n
exit 0
fi
if [ ! -z $BRANCH ]; then
git clone --single-branch --branch "${BRANCH}" "${URL}"
cd go-algorand
if [ "${SHA}" != "" ]; then
echo "Checking out ${SHA}"
git checkout "${SHA}"
fi
git log -n 5
./scripts/configure_dev.sh
make build
./scripts/dev_install.sh -p $BINDIR
fi
$BINDIR/algod -v

View File

@ -0,0 +1,144 @@
#!/usr/bin/env python3
# Script to help configure and run different algorand configurations.
# Notably this script can configure an algorand installation to run as a
# private network, or as a node connected to a long-running network.
#
# For parameter information run with './setup.py -h'
#
# Parameter error handling is not great with this script. It wont complain
# if you provide arguments unused parameters.
import argparse
import os
import pprint
import shutil
import subprocess
import tarfile
import time
import json
import urllib.request
from os.path import expanduser, join
from typing import List
parser = argparse.ArgumentParser(description='''\
Configure private network for SDK and prepare it to run. A start script and
symlink to data directory will be generated to make it easier to use.''')
parser.add_argument('--bin-dir', required=True, help='Location to install algod binaries.')
parser.add_argument('--data-dir', required=True, help='Location to place a symlink to the data directory.')
parser.add_argument('--start-script', required=True, help='Path to start script, including the script name.')
parser.add_argument('--network-template', required=True, help='Path to private network template file.')
parser.add_argument('--network-token', required=True, help='Valid token to use for algod/kmd.')
parser.add_argument('--algod-port', required=True, help='Port to use for algod.')
parser.add_argument('--kmd-port', required=True, help='Port to use for kmd.')
parser.add_argument('--network-dir', required=True, help='Path to create network.')
parser.add_argument('--bootstrap-url', required=True, help='DNS Bootstrap URL, empty for private networks.')
parser.add_argument('--genesis-file', required=True, help='Genesis file used by the network.')
pp = pprint.PrettyPrinter(indent=4)
def algod_directories(network_dir):
"""
Compute data/kmd directories.
"""
data_dir=join(network_dir, 'Node')
kmd_dir = None
options = [filename for filename in os.listdir(data_dir) if filename.startswith('kmd')]
# When setting up the real network the kmd dir doesn't exist yet because algod hasn't been started.
if len(options) == 0:
kmd_dir=join(data_dir, 'kmd-v0.5')
os.mkdir(kmd_dir)
else:
kmd_dir=join(data_dir, options[0])
return data_dir, kmd_dir
def create_real_network(bin_dir, network_dir, template, genesis_file) -> List[str]:
data_dir_src=join(bin_dir, 'data')
target=join(network_dir, 'Node')
# Reset in case it exists
if os.path.exists(target):
shutil.rmtree(target)
os.makedirs(target, exist_ok=True)
# Copy in the genesis file...
shutil.copy(genesis_file, target)
data_dir, kmd_dir = algod_directories(network_dir)
return ['%s/goal node start -d %s' % (bin_dir, data_dir),
'%s/kmd start -t 0 -d %s' % (bin_dir, kmd_dir)]
def create_private_network(bin_dir, network_dir, template) -> List[str]:
"""
Create a private network.
"""
# Reset network dir before creating a new one.
if os.path.exists(args.network_dir):
shutil.rmtree(args.network_dir)
# Use goal to create the private network.
subprocess.check_call(['%s/goal network create -n sandnet -r %s -t %s' % (bin_dir, network_dir, template)], shell=True)
data_dir, kmd_dir = algod_directories(network_dir)
return ['%s/goal network start -r %s' % (bin_dir, network_dir),
'%s/kmd start -t 0 -d %s' % (bin_dir, kmd_dir)]
def configure_data_dir(network_dir, token, algod_port, kmd_port, bootstrap_url):
node_dir, kmd_dir = algod_directories(network_dir)
# Set tokens
with open(join(node_dir, 'algod.token'), 'w') as f:
f.write(token)
with open(join(kmd_dir, 'kmd.token'), 'w') as f:
f.write(token)
# Setup config, inject port
with open(join(node_dir, 'config.json'), 'w') as f:
f.write('{ "Version": 12, "GossipFanout": 1, "EndpointAddress": "0.0.0.0:%s", "DNSBootstrapID": "%s", "IncomingConnectionsLimit": 0, "Archival":false, "isIndexerActive":false, "EnableDeveloperAPI":true}' % (algod_port, bootstrap_url))
with open(join(kmd_dir, 'kmd_config.json'), 'w') as f:
f.write('{ "address":"0.0.0.0:%s", "allowed_origins":["*"]}' % kmd_port)
if __name__ == '__main__':
args = parser.parse_args()
print('Configuring network with the following arguments:')
pp.pprint(vars(args))
# Setup network
privateNetworkMode = args.genesis_file == None or args.genesis_file == '' or os.path.isdir(args.genesis_file)
if privateNetworkMode:
print('Creating a private network.')
startCommands = create_private_network(args.bin_dir, args.network_dir, args.network_template)
else:
print('Setting up real retwork.')
startCommands = create_real_network(args.bin_dir, args.network_dir, args.network_template, args.genesis_file)
# Write start script
print(f'Start commands for {args.start_script}:')
pp.pprint(startCommands)
with open(args.start_script, 'w') as f:
f.write('#!/usr/bin/env bash\n')
for line in startCommands:
f.write(f'{line}\n')
f.write('sleep infinity\n')
os.chmod(args.start_script, 0o755)
# Create symlink
data_dir, _ = algod_directories(args.network_dir)
print(f'Creating symlink {args.data_dir} -> {data_dir}')
os.symlink(data_dir, args.data_dir)
# Configure network
configure_data_dir(args.network_dir, args.network_token, args.algod_port, args.kmd_port, args.bootstrap_url)

View File

@ -0,0 +1,56 @@
{
"Genesis": {
"NetworkName": "",
"Wallets": [
{
"Name": "Wallet1",
"Stake": 10,
"Online": true
},
{
"Name": "Wallet2",
"Stake": 40,
"Online": true
},
{
"Name": "Wallet3",
"Stake": 40,
"Online": false
},
{
"Name": "Wallet4",
"Stake": 10,
"Online": false
}
]
},
"Nodes": [
{
"Name": "Primary",
"IsRelay": true,
"Wallets": [
{
"Name": "Wallet1",
"ParticipationOnly": false
}
]
},
{
"Name": "Node",
"Wallets": [
{
"Name": "Wallet2",
"ParticipationOnly": false
},
{
"Name": "Wallet3",
"ParticipationOnly": false
},
{
"Name": "Wallet4",
"ParticipationOnly": false
}
]
}
]
}

View File

@ -0,0 +1,676 @@
#!/bin/bash
# This is a copy of the standalone update script.
# The latest version is available on github:
#
# https://github.com/algorand/go-algorand/blob/97fb6a0fd3f74bcdb3fa0ac8ee49028cecba6e4f/cmd/updater/update.sh
FILENAME=$(basename -- "$0")
SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )"
UPDATETYPE="update"
INSTALLOPT="-u"
RESUME_INSTALL=0
BINDIR=""
CHANNEL=""
DATADIRS=()
NOSTART=""
BINDIRSPEC="-p ${SCRIPTPATH}"
DATADIRSPEC=""
TESTROLLBACK=""
UNKNOWNARGS=()
HOSTEDFLAG=""
HOSTEDSPEC=""
BUCKET=""
GENESIS_NETWORK_DIR=""
GENESIS_NETWORK_DIR_SPEC=""
SKIP_UPDATE=0
TOOLS_OUTPUT_DIR=""
set -o pipefail
# If someone set the environment variable asking us to cleanup
# when we're done, install a trap to do so
# We use an environment variable instead of an arg because
# older scripts won't recognize it and will fail (an issue with tests)
if [ "${CLEANUP_UPDATE_TEMP_DIR}" != "" ]; then
trap "rm -rf ${CLEANUP_UPDATE_TEMP_DIR}" 0
fi
while [ "$1" != "" ]; do
case "$1" in
-i)
UPDATETYPE="install"
INSTALLOPT="-i"
;;
-u)
UPDATETYPE="update"
INSTALLOPT="-u"
;;
-m)
UPDATETYPE="migrate"
INSTALLOPT="-m"
;;
-r)
RESUME_INSTALL=1
;;
-c)
shift
CHANNEL="$1"
;;
-d)
shift
THISDIR=$1
mkdir -p ${THISDIR} >/dev/null
pushd ${THISDIR} >/dev/null
THISDIR=$(pwd -P)
popd >/dev/null
DATADIRS+=(${THISDIR})
DATADIRSPEC+="-d ${THISDIR} "
;;
-p)
shift
BINDIR="$1"
BINDIRSPEC="-p $1"
;;
-n)
NOSTART="-n"
;;
-testrollback)
TESTROLLBACK=1
;;
-hosted)
HOSTEDFLAG="-H"
HOSTEDSPEC="-hosted"
;;
-g)
shift
GENESIS_NETWORK_DIR=$1
GENESIS_NETWORK_DIR_SPEC="-g $1"
;;
-b)
shift
BUCKET="-b $1"
;;
-s)
SKIP_UPDATE=1
;;
-gettools)
shift
TOOLS_OUTPUT_DIR=$1
;;
*)
echo "Unknown option" "$1"
UNKNOWNARGS+=("$1")
;;
esac
shift
done
# If this is an update, we'll validate that before doing anything else.
# If this is an install, we'll create it.
if [ ${RESUME_INSTALL} -eq 0 ]; then
if [ "${BINDIR}" = "" ]; then
BINDIR="${SCRIPTPATH}"
fi
fi
# If -d not specified, don't default any more
if [ "${#DATADIRS[@]}" -eq 0 ]; then
echo "You must specify at least one data directory with `-d`"
exit 1
fi
CURRENTVER=0
ROLLBACK=0
ROLLBACKBIN=0
ROLLBACKDATA=()
NEW_LEDGER=0
RESTART_NODE=0
function check_install_valid() {
# Check for key files that indicate a valid install that can be updated
if [ ! -f "${BINDIR}/algod" ]; then
echo "Missing ${BINDIR}/algod"
return 1
fi
return 0
}
function validate_channel_specified() {
if [ "${CHANNEL}" = "" ]; then
CHANNEL="$((${BINDIR}/algod -c) | head -n 1)"
if [ "${CHANNEL}" = "" ]; then
echo "Unable to determine release channel - please run again with -c <channel>"
return 1
fi
fi
}
function determine_current_version() {
CURRENTVER="$(( ${BINDIR}/algod -v 2>/dev/null || echo 0 ) | head -n 1)"
echo Current Version = ${CURRENTVER}
}
function get_updater_url() {
local UNAME
local OS
local ARCH
UNAME=$(uname)
if [[ "${UNAME}" = "Darwin" ]]; then
OS="darwin"
UNAME=$(uname -m)
if [[ "${UNAME}" = "x86_64" ]]; then
ARCH="amd64"
else
echo "This platform ${UNAME} is not supported by updater."
exit 1
fi
elif [[ "${UNAME}" = "Linux" ]]; then
OS="linux"
UNAME=$(uname -m)
if [[ "${UNAME}" = "x86_64" ]]; then
ARCH="amd64"
elif [[ "${UNAME}" = "armv6l" ]]; then
ARCH="arm"
elif [[ "${UNAME}" = "armv7l" ]]; then
ARCH="arm"
elif [[ "${UNAME}" = "aarch64" ]]; then
ARCH="arm64"
else
echo "This platform ${UNAME} is not supported by updater."
exit 1
fi
else
echo "This operation system ${UNAME} is not supported by updater."
exit 1
fi
UPDATER_FILENAME="install_master_${OS}-${ARCH}.tar.gz"
UPDATER_URL="https://github.com/algorand/go-algorand-doc/raw/master/downloads/installers/${OS}_${ARCH}/${UPDATER_FILENAME}"
}
# check to see if the binary updater exists. if not, it will automatically the correct updater binary for the current platform
function check_for_updater() {
# check if the updater binary exist.
if [ -f "${SCRIPTPATH}/updater" ]; then
return 0
fi
get_updater_url
# check the curl is available.
CURL_VER=$(curl -V 2>/dev/null || true)
if [ "${CURL_VER}" = "" ]; then
# no curl is installed.
echo "updater binary is missing and cannot be downloaded since curl is missing."
if [[ "$(uname)" = "Linux" ]]; then
echo "To install curl, run the following command:"
echo "apt-get update; apt-get install -y curl"
fi
exit 1
fi
CURL_OUT=$(curl -LJO --silent ${UPDATER_URL})
if [ "$?" != "0" ]; then
echo "failed to download updater binary from ${UPDATER_URL} using curl."
echo "${CURL_OUT}"
exit 1
fi
if [ ! -f "${SCRIPTPATH}/${UPDATER_FILENAME}" ]; then
echo "downloaded file ${SCRIPTPATH}/${UPDATER_FILENAME} is missing."
exit
fi
tar -zxvf "${SCRIPTPATH}/${UPDATER_FILENAME}" updater
if [ "$?" != "0" ]; then
echo "failed to extract updater binary from ${SCRIPTPATH}/${UPDATER_FILENAME}"
exit 1
fi
rm -f "${SCRIPTPATH}/${UPDATER_FILENAME}"
echo "updater binary was downloaded"
}
function check_for_update() {
determine_current_version
check_for_updater
LATEST="$(${SCRIPTPATH}/updater ver check -c ${CHANNEL} ${BUCKET} | sed -n '2 p')"
if [ $? -ne 0 ]; then
echo "No remote updates found"
return 1
fi
echo Latest Version = ${LATEST}
if [ ${CURRENTVER} -ge ${LATEST} ]; then
if [ "${UPDATETYPE}" = "install" ]; then
echo No new version found - forcing install anyway
else
echo No new version found
return 1
fi
fi
echo New version found
return 0
}
function download_tools_update() {
local TOOLS_SPECIFIC_VERSION=$1
echo "downloading tools update ${TOOLS_SPECIFIC_VERSION}"
TOOLS_TEMPDIR=$(mktemp -d 2>/dev/null || mktemp -d -t "tmp")
export TOOLS_CLEANUP_UPDATE_TEMP_DIR=${TOOLS_TEMPDIR}
trap "rm -rf ${TOOLS_CLEANUP_UPDATE_TEMP_DIR}" 0
TOOLS_TARFILE=${TOOLS_TEMPDIR}/${LATEST}.tar.gz
if ( ! "${SCRIPTPATH}"/updater gettools -c "${CHANNEL}" -o "${TOOLS_TARFILE}" "${BUCKET}" "${TOOLS_SPECIFIC_VERSION}" ) ; then
echo "Error downloading tools tarfile"
exit 1
fi
echo "Tools tarfile downloaded to ${TOOLS_TARFILE}"
mkdir -p "${TOOLS_OUTPUT_DIR}"
if ( ! tar -xf "${TOOLS_TARFILE}" -C "${TOOLS_OUTPUT_DIR}" ) ; then
echo "Error extracting the tools update file ${TOOLS_TARFILE}"
exit 1
fi
echo "Tools extracted to ${TOOLS_OUTPUT_DIR}"
}
TEMPDIR=""
TARFILE=""
UPDATESRCDIR=""
function download_update() {
SPECIFIC_VERSION=$1
if [ -n "${TOOLS_OUTPUT_DIR}" ]; then
download_tools_update "${SPECIFIC_VERSION}"
fi
TEMPDIR=$(mktemp -d 2>/dev/null || mktemp -d -t "tmp")
export CLEANUP_UPDATE_TEMP_DIR=${TEMPDIR}
trap "rm -rf ${CLEANUP_UPDATE_TEMP_DIR}" 0
TARFILE=${TEMPDIR}/${LATEST}.tar.gz
UPDATESRCDIR=${TEMPDIR}/a
mkdir ${UPDATESRCDIR}
${SCRIPTPATH}/updater ver get -c ${CHANNEL} -o ${TARFILE} ${BUCKET} ${SPECIFIC_VERSION}
if [ $? -ne 0 ]; then
echo Error downloading update file
exit 1
fi
echo Update Downloaded to ${TARFILE}
}
function check_and_download_update() {
check_for_update
if [ $? -ne 0 ]; then return 1; fi
download_update
}
function download_update_for_current_version() {
determine_current_version
echo "Downloading update package for current version ${CURRENTVER}..."
download_update "-v ${CURRENTVER}"
}
function expand_update() {
echo Expanding update...
tar -zxof ${TARFILE} -C ${UPDATESRCDIR}
if [ $? -ne 0 ]; then return 1; fi
validate_update
}
function validate_update() {
echo Validating update...
# We should consider including a version.info file
# that we can compare against the expected version
return 0
}
function shutdown_node() {
echo Stopping node...
if [ "$(pgrep -x algod)" != "" ] || [ "$(pgrep -x kmd)" != "" ] ; then
if [ -f ${BINDIR}/goal ]; then
for DD in ${DATADIRS[@]}; do
if [ -f ${DD}/algod.pid ] || [ -f ${DD}/**/kmd.pid ] ; then
echo Stopping node and waiting...
sudo -n systemctl stop algorand@$(systemd-escape ${DD})
${BINDIR}/goal node stop -d ${DD}
sleep 5
else
echo "Node is running but not in ${DD} - not stopping"
# Clean up zombie (algod|kmd).net files
rm -f ${DD}/algod.net ${DD}/**/kmd.net
fi
done
fi
else
echo ... node not running
fi
RESTART_NODE=1
}
function backup_binaries() {
echo Backing up current binary files...
mkdir -p ${BINDIR}/backup
BACKUPFILES="algod kmd carpenter doberman goal update.sh updater diagcfg"
# add node_exporter to the files list we're going to backup, but only we if had it previously deployed.
[ -f ${BINDIR}/node_exporter ] && BACKUPFILES="${BACKUPFILES} node_exporter"
tar -zcf ${BINDIR}/backup/bin-v${CURRENTVER}.tar.gz -C ${BINDIR} ${BACKUPFILES} >/dev/null 2>&1
}
function backup_data() {
CURDATADIR=$1
BACKUPDIR="${CURDATADIR}/backup"
echo "Backing up current data files from ${CURDATADIR}..."
mkdir -p ${BACKUPDIR}
BACKUPFILES="genesis.json wallet-genesis.id"
tar --no-recursion --exclude='*.log' --exclude='*.log.archive' --exclude='*.tar.gz' -zcf ${BACKUPDIR}/data-v${CURRENTVER}.tar.gz -C ${CURDATADIR} ${BACKUPFILES} >/dev/null 2>&1
}
function backup_current_version() {
backup_binaries
for DD in ${DATADIRS[@]}; do
backup_data ${DD}
done
}
function rollback_binaries() {
echo "Rolling back binary files..."
tar -zxof ${BINDIR}/backup/bin-v${CURRENTVER}.tar.gz -C ${BINDIR}
}
function rollback_data() {
CURDATADIR=$1
BACKUPDIR="${CURDATADIR}/backup"
echo "Rolling back data files in ${CURDATADIR}..."
rm ${CURDATADIR}/wallet-genesis.id
tar -zxof ${BACKUPDIR}/data-v${CURRENTVER}.tar.gz -C ${CURDATADIR}
}
function install_new_binaries() {
if [ ! -d ${UPDATESRCDIR}/bin ]; then
return 0
else
echo Installing new binary files...
ROLLBACKBIN=1
rm -rf ${BINDIR}/new
mkdir ${BINDIR}/new
cp ${UPDATESRCDIR}/bin/* ${BINDIR}/new
mv ${BINDIR}/new/* ${BINDIR}
rm -rf ${BINDIR}/new
fi
}
function reset_wallets_for_new_ledger() {
CURDATADIR=$1
echo "New Ledger - restoring genesis accounts in ${CURDATADIR}"
pushd ${CURDATADIR} >/dev/null
mkdir -p "${NEW_VER}"
for file in *.partkey *.rootkey; do
if [ -e "${file}" ]; then
cp "${file}" "${NEW_VER}/${file}"
echo 'Installed genesis account file: ' "${file}"
fi
done
popd >/dev/null
}
function import_rootkeys() {
CURDATADIR=$1
echo "New Ledger - importing rootkeys for genesis accounts"
${BINDIR}/goal account importrootkey -u -d ${CURDATADIR}
}
function install_new_data() {
if [ ! -d ${UPDATESRCDIR}/data ]; then
return 0
else
CURDATADIR=$1
echo "Installing new data files into ${CURDATADIR}..."
ROLLBACKDATA+=(${CURDATADIR})
cp ${UPDATESRCDIR}/data/* ${CURDATADIR}
fi
}
function copy_genesis_files() {
echo "Copying genesis files locally"
cp -rf ${UPDATESRCDIR}/genesis/ ${BINDIR}/genesisfiles/
}
function check_for_new_ledger() {
CURDATADIR=$1
echo "Checking for new ledger in ${CURDATADIR}"
EXISTING_VER=$(${UPDATESRCDIR}/bin/algod -d ${CURDATADIR} -g ${CURDATADIR}/genesis.json -G)
if [ -z $EXISTING_VER ]; then
if [ -z ${GENESIS_NETWORK_DIR} ]; then
echo "Updating genesis files for default network"
else
echo "Installing genesis files for network ${GENESIS_NETWORK_DIR}"
fi
else
GENESIS_SPLIT=(${EXISTING_VER//-/ })
GENESIS_NETWORK_DIR=${GENESIS_SPLIT[0]}
echo "Updating genesis files for network ${GENESIS_NETWORK_DIR}"
# If that genesis dir doesn't exist, use the default file - this is likely a custom network build
if [ ! -d ${UPDATESRCDIR}/genesis/${GENESIS_NETWORK_DIR} ]; then
GENESIS_NETWORK_DIR=""
fi
fi
NEW_VER=$(${UPDATESRCDIR}/bin/algod -d ${CURDATADIR} -g ${UPDATESRCDIR}/genesis/${GENESIS_NETWORK_DIR}/genesis.json -G)
if [ $? -ne 0 ]; then
echo "Cannot determine new genesis ID. Not updating. This may be a problem!"
return 1
fi
# Copy new genesis.json even if version didn't change; we might have
# changed the file itself in a compatible way.
cp ${UPDATESRCDIR}/genesis/${GENESIS_NETWORK_DIR}/genesis.json ${CURDATADIR}
echo ${NEW_VER} > ${CURDATADIR}/wallet-genesis.id
if [ "${NEW_VER}" != "${EXISTING_VER}" ]; then
echo "New genesis ID, resetting wallets"
NEW_LEDGER=1
reset_wallets_for_new_ledger ${CURDATADIR}
import_rootkeys ${CURDATADIR}
fi
}
# Delete all logs.
function clean_legacy_logs() {
CURDATADIR=$1
echo "Deleting existing log files in ${CURDATADIR}"
rm -f ${CURDATADIR}/node-*.log
rm -f ${CURDATADIR}/node-*.log.archive
return 0
}
function startup_node() {
if [ "${NOSTART}" != "" ]; then
echo Auto-start node disabled - not starting
return
fi
CURDATADIR=$1
echo Starting node in ${CURDATADIR}...
check_install_valid
if [ $? -ne 0 ]; then
fail_and_exit "Installation does not appear to be valid"
fi
sudo -n systemctl start algorand@$(systemd-escape ${CURDATADIR})
if [ $? -ne 0 ]; then
${BINDIR}/goal node start -d ${CURDATADIR} ${HOSTEDFLAG}
fi
}
function startup_nodes() {
for DD in ${DATADIRS[@]}; do
startup_node ${DD}
done
}
function rollback() {
echo Rolling back from failed update...
if [ ${ROLLBACKBIN} -ne 0 ]; then
rollback_binaries
fi
for ROLLBACKDIR in ${ROLLBACKDATA[@]}; do
rollback_data ${ROLLBACKDIR}
done
}
function fail_and_exit() {
echo "*** UPDATE FAILED: $1 ***"
if [ ${ROLLBACK} -ne 0 ]; then
ROLLBACK=0
rollback
check_install_valid
if [ ${RESTART_NODE} -ne 0 ]; then
startup_nodes
fi
exit 0
fi
exit 1
}
function apply_fixups() {
echo "Applying migration fixups..."
# Delete obsolete algorand binary - renamed to 'goal'
rm ${BINDIR}/algorand >/dev/null 2>&1
for DD in ${DATADIRS[@]}; do
clean_legacy_logs ${DD}
# Purge obsolete cadaver files (now agreement.cdv[.archive])
rm -f ${DD}/service*.cadaver
done
}
#--------------------------------------------
# Main Update Driver
# Need to verify the bindir was specified (with -p)
# and that it's a valid directory.
# Unless it's an install
if [ ! -d "${BINDIR}" ]; then
if [ "${UPDATETYPE}" = "install" ]; then
mkdir -p ${BINDIR}
else
fail_and_exit "Missing or invalid binaries path specified '${BINDIR}'"
fi
fi
if [ "${UPDATETYPE}" != "install" ]; then
check_install_valid
if [ $? -ne 0 ]; then
echo "Unable to perform an update - installation does not appear valid"
exit 1
fi
fi
# If we're initiating an update/install, check for an update and if we have a new one,
# expand it and invoke the new update.sh script.
if [ ${RESUME_INSTALL} -eq 0 ]; then
validate_channel_specified
if [ "${UPDATETYPE}" = "migrate" ]; then
download_update_for_current_version
else
check_and_download_update
fi
if [ $? -ne 0 ]; then
# No update - stop here
exit $?
fi
expand_update
if [ $? -ne 0 ]; then
fail_and_exit "Error expanding update"
fi
# Spawn the new update script and exit - this allows us to push update.sh changes that take effect immediately
# Note that the SCRIPTPATH we're passing in should be our binaries directory, which is what we expect to be
# passed as the last argument (if any)
echo "Starting the new update script to complete the installation..."
exec "${UPDATESRCDIR}/bin/${FILENAME}" ${INSTALLOPT} -r -c ${CHANNEL} ${DATADIRSPEC} ${NOSTART} ${BINDIRSPEC} ${HOSTEDSPEC} ${GENESIS_NETWORK_DIR_SPEC} "${UNKNOWNARGS[@]}"
# If we're still here, exec failed.
fail_and_exit "Error executing the new update script - unable to continue"
else
# We're running the script from our expanded update, which is located in the last script's ${TEMPDIR}/a/bin
# We need to define our TEMPDIR and UPDATESRCDIR to match those values; we do so by making them relative
# to where our resuming script lives.
TEMPDIR=${SCRIPTPATH}/../..
UPDATESRCDIR=${SCRIPTPATH}/..
echo "... Resuming installation from the latest update script"
determine_current_version
fi
# Shutdown node before backing up so data is consistent and files aren't locked / in-use.
shutdown_node
if [ ${SKIP_UPDATE} -eq 0 ]; then
backup_current_version
fi
# We don't care about return code - doesn't matter if we failed to archive
ROLLBACK=1
install_new_binaries
if [ $? -ne 0 ]; then
fail_and_exit "Error installing new files"
fi
for DD in ${DATADIRS[@]}; do
install_new_data ${DD}
if [ $? -ne 0 ]; then
fail_and_exit "Error installing data files into ${DD}"
fi
done
copy_genesis_files
for DD in ${DATADIRS[@]}; do
check_for_new_ledger ${DD}
if [ $? -ne 0 ]; then
fail_and_exit "Error updating ledger in ${DD}"
fi
done
if [ "${TESTROLLBACK}" != "" ]; then
fail_and_exit "Simulating update failure - rolling back"
fi
apply_fixups
if [ "${NOSTART}" != "" ]; then
echo "Install complete - restart node manually"
else
startup_nodes
fi
exit 0

View File

@ -0,0 +1,34 @@
ARG GO_VERSION=1.17.5
FROM golang:$GO_VERSION-alpine
# Support additional root CAs
COPY config.dev cert.pem* /certs/
# Debian (for top Go priority)
RUN if [ -e /certs/cert.pem ]; then cp /certs/cert.pem /etc/ssl/certs/ca-certificates.crt; fi
# Alpine
RUN if [ -e /certs/cert.pem ]; then cp /certs/cert.pem /etc/ssl/cert.pem; fi
# Environment variables used by install.sh
ARG URL=https://github.com/algorand/indexer
ARG BRANCH=master
ARG SHA=""
ENV HOME /opt/indexer
WORKDIR /opt/indexer
ENV DEBIAN_FRONTEND noninteractive
RUN apk add --no-cache git bzip2 make bash libtool boost-dev autoconf automake g++ postgresql
# Support additional root CAs
# git
RUN if [ -e /certs/cert.pem ]; then git config --global http.sslCAInfo /certs/cert.pem; fi
# Copy files to container.
COPY images/indexer/disabled.go /tmp/disabled.go
COPY images/indexer/start.sh /tmp/start.sh
COPY images/indexer/install.sh /tmp/install.sh
# Install indexer binaries.
RUN /tmp/install.sh
CMD ["/tmp/start.sh"]

View File

@ -0,0 +1,32 @@
package main
import (
"flag"
"fmt"
"net/http"
"os"
)
func main() {
var port int
var code int
var message string
flag.IntVar(&port, "port", 8980, "port to start the server on.")
flag.IntVar(&code, "code", 400, "response code to use.")
flag.StringVar(&message, "message", "this is all that happens", "response message to display.")
flag.Parse()
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request){
w.WriteHeader(code)
fmt.Fprintf(w, "%s\n", message)
})
fmt.Printf("Starting server at port %d\nResponse code (%d)\nmessage (%s)\n", port, code, message)
if err := http.ListenAndServe(fmt.Sprintf(":%d", port), nil); err != nil {
fmt.Fprintf(os.Stderr, err.Error())
}
}

View File

@ -0,0 +1,24 @@
#!/usr/bin/env bash
# Build indexer and put binary in /tmp.
#
# Configured with environment variables:
# URL - Git repository URL.
# BRANCH - Git branch to clone.
# SHA - (optional) Specific commit hash to checkout.
set -e
# Sometimes indexer is disabled, detect the missing build config.
if [ -z "${BRANCH}" ] || [ -z "${URL}" ]; then
echo "Missing BRANCH or URL environment variable. Skipping install."
exit 0
fi
git clone --single-branch --branch "${BRANCH}" "${URL}" indexer-git
if [ "${SHA}" != "" ]; then
echo "Checking out ${SHA}"
git checkout "${SHA}";
fi
cd indexer-git
make
cp cmd/algorand-indexer/algorand-indexer /tmp

View File

@ -0,0 +1,87 @@
#!/bin/bash
# Start indexer daemon. There are various configurations controlled by
# environment variables.
#
# Configuration:
# DISABLED - If set start a server that returns an error instead of indexer.
# CONNECTION_STRING - the postgres connection string to use.
# SNAPSHOT - snapshot to import, if set don't connect to algod.
# PORT - port to start indexer on.
# ALGOD_ADDR - host:port to connect to for algod.
# ALGOD_TOKEN - token to use when connecting to algod.
export PORT="8980"
export CONNECTION_STRING="host=localhost port=5432 user=algorand password=algorand dbname=indexer_db sslmode=disable"
export ALGOD_ADDR="localhost:4001"
export ALGOD_TOKEN="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
set -e
set -x
start_with_algod() {
echo "Starting indexer against algod."
for i in 1 2 3 4 5; do
wget "${ALGOD_ADDR}"/genesis -O genesis.json && break
echo "Algod not responding... waiting."
sleep 15
done
if [ ! -f genesis.json ]; then
echo "Failed to create genesis file!"
exit 1
fi
# PGPASSWORD=algorand psql --host=algo-indexer-db --port=5432 --username=algorand --dbname=indexer_db -c "DROP DATABASE IF EXISTS postgres"
# PGPASSWORD=algorand psql --host=algo-indexer-db --port=5432 --username=algorand --dbname=indexer_db -c "DROP DATABASE IF EXISTS template0"
# PGPASSWORD=algorand psql --host=algo-indexer-db --port=5432 --username=algorand --dbname=indexer_db -c "DROP DATABASE IF EXISTS template1"
# PGPASSWORD=algorand psql --host=algo-indexer-db --port=5432 --username=algorand -c "DROP DATABASE IF EXISTS indexer_db"
# PGPASSWORD=algorand psql --host=algo-indexer-db --port=5432 --username=algorand -c "CREATE DATABASE indexer_db"
/tmp/algorand-indexer daemon \
--dev-mode \
--server ":$PORT" \
--enable-all-parameters \
-P "$CONNECTION_STRING" \
--algod-net "${ALGOD_ADDR}" \
--algod-token "${ALGOD_TOKEN}" \
--genesis "genesis.json" \
--logfile "/dev/stdout" >> /tmp/command.txt
}
import_and_start_readonly() {
echo "Starting indexer with DB."
# Extract the correct dataset
ls -lh /tmp
mkdir -p /tmp/indexer-snapshot
echo "Extracting ${SNAPSHOT}"
tar -xf "${SNAPSHOT}" -C /tmp/indexer-snapshot
/tmp/algorand-indexer import \
-P "$CONNECTION_STRING" \
--genesis "/tmp/indexer-snapshot/algod/genesis.json" \
/tmp/indexer-snapshot/blocktars/* \
--logfile "/tmp/indexer-log.txt" >> /tmp/command.txt
/tmp/algorand-indexer daemon \
--dev-mode \
--server ":$PORT" \
-P "$CONNECTION_STRING" \
--logfile "/tmp/indexer-log.txt" >> /tmp/command.txt
}
disabled() {
go run /tmp/disabled.go -port "$PORT" -code 400 -message "Indexer disabled for this configuration."
}
if [ ! -z "$DISABLED" ]; then
disabled
elif [ -z "${SNAPSHOT}" ]; then
start_with_algod
else
import_and_start_readonly
fi
sleep infinity

View File

@ -0,0 +1,58 @@
version: '3'
services:
algo-algod:
container_name: "algorand-tilt-algod"
build:
context: .
dockerfile: ./images/algod/Dockerfile
args:
CHANNEL: ""
URL: "https://github.com/algorand/go-algorand"
BRANCH: "master"
SHA: ""
BOOTSTRAP_URL: ""
GENESIS_FILE: ""
TEMPLATE: "images/algod/DevModeNetwork.json"
TOKEN: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
ALGOD_PORT: "4001"
KMD_PORT: "4002"
CDT_PORT: "9392"
ports:
- 4001:4001
- 4002:4002
- 9392:9392
algo-indexer:
container_name: "algorand-tilt-indexer"
build:
context: .
dockerfile: ./images/indexer/Dockerfile
args:
URL: "https://github.com/algorand/indexer"
BRANCH: "develop"
SHA: ""
ports:
- 8980:8980
restart: unless-stopped
environment:
DISABLED: ""
PORT: "8980"
SNAPSHOT: ""
CONNECTION_STRING: "host=algo-indexer-db port=5432 user=algorand password=algorand dbname=indexer_db sslmode=disable"
ALGOD_ADDR: "algo-algod:4001"
ALGOD_TOKEN: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
depends_on:
- algo-indexer-db
- algo-algod
algo-indexer-db:
image: "postgres:13-alpine"
container_name: "algorand-tilt-postgres"
ports:
- 5433:5432
user: postgres
environment:
POSTGRES_USER: algorand
POSTGRES_PASSWORD: algorand
POSTGRES_DB: indexer_db

8
algorand/teal/README Normal file
View File

@ -0,0 +1,8 @@
While these are generated files, due to the non-deterministic behavior
of the pyteal compiler based on what has been done before, we cannot
reliably recreate the exact same teal file from the exact same pyteal
source.
As a result, we need to check the generated files in so that we can
verify the hash of the compiled code while generating upgrade vaa's

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,3 @@
#pragma version 6
pushint 1 // 1
return

View File

@ -0,0 +1,65 @@
#pragma version 6
intcblock 1
pushint TMPL_ADDR_IDX // TMPL_ADDR_IDX
pop
pushbytes TMPL_EMITTER_ID // TMPL_EMITTER_ID
pop
callsub init_0
return
// init
init_0:
global GroupSize
pushint 3 // 3
==
assert
gtxn 0 TypeEnum
intc_0 // pay
==
assert
gtxn 0 Amount
pushint TMPL_SEED_AMT // TMPL_SEED_AMT
==
assert
gtxn 0 RekeyTo
global ZeroAddress
==
assert
gtxn 0 CloseRemainderTo
global ZeroAddress
==
assert
gtxn 1 TypeEnum
pushint 6 // appl
==
assert
gtxn 1 OnCompletion
intc_0 // OptIn
==
assert
gtxn 1 ApplicationID
pushint TMPL_APP_ID // TMPL_APP_ID
==
assert
gtxn 1 RekeyTo
global ZeroAddress
==
assert
gtxn 2 TypeEnum
intc_0 // pay
==
assert
gtxn 2 Amount
pushint 0 // 0
==
assert
gtxn 2 RekeyTo
pushbytes TMPL_APP_ADDRESS // TMPL_APP_ADDRESS
==
assert
gtxn 2 CloseRemainderTo
global ZeroAddress
==
assert
intc_0 // 1
return

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,3 @@
#pragma version 6
pushint 1 // 1
return

View File

@ -0,0 +1,88 @@
#pragma version 6
txn RekeyTo
global ZeroAddress
==
assert
txn Fee
int 0
==
assert
txn TypeEnum
int appl
==
assert
txna ApplicationArgs 1
txna ApplicationArgs 3
txna ApplicationArgs 2
callsub sigcheck_0
assert
int 1
return
// sig_check
sigcheck_0:
store 2
store 1
store 0
byte ""
store 240
byte ""
store 241
load 0
len
store 5
int 0
store 3
int 0
store 4
sigcheck_0_l1:
load 3
load 5
<
bz sigcheck_0_l3
load 1
load 0
load 3
int 65
+
int 1
extract3
btoi
load 0
load 3
int 1
+
int 32
extract3
load 0
load 3
int 33
+
int 32
extract3
ecdsa_pk_recover Secp256k1
store 241
store 240
load 2
load 4
int 20
extract3
load 240
load 241
concat
keccak256
extract 12 20
==
assert
load 3
int 66
+
store 3
load 4
int 20
+
store 4
b sigcheck_0_l1
sigcheck_0_l3:
int 1
retsub

65
algorand/test/NOTES Normal file
View File

@ -0,0 +1,65 @@
index 0
appId 4
textToHexString("guardian");
guardianAddr CLAOUBJPZ5WNLM7ZU237TCOV2WODAGNUX3536PZ3JSWMBN7M46UADLN3GY
('CLAOUBJPZ5WNLM7ZU237TCOV2WODAGNUX3536PZ3JSWMBN7M46UADLN3GY',
'01befa429d57cd18b7f8a4d91a2da9ab4af05d0fbe00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000677561726469616e')
illness illegal arrive clip fork palm skull south impose verify toss ocean shrug vital swift similar depend margin climb uniform risk pizza canal absorb forward
# Locally in your project.
npm install -D typescript
npm install -D ts-node
# Or globally with TypeScript.
npm install -g typescript
npm install -g ts-node
# Depending on configuration, you may also need these
npm install -D tslib @types/node
go to .../ethereum/
npm ci
go to ..../sdk/js
npm ci; npm run build
Then, in a new window, you can run
tsc -p tsconfig-cjs.json --watch
which will track updates
ts-node foo2.ts
https://github.com/barnjamin/sdk-extras/blob/master/py/block_fetcher.py
package main
import (
"context"
"strings"
"github.com/algorand/go-algorand/rpcs"
"github.com/algorand/indexer/fetcher"
"github.com/sirupsen/logrus"
)
var log = logrus.New()
func main() {
f, err := fetcher.ForNetAndToken("http://localhost:4001", strings.Repeat("a", 64), log)
if err != nil {
log.Fatalf("Failed to create fetcher: %+v", err)
}
f.SetBlockHandler(handler)
f.Run(context.Background())
}
func handler(ctx context.Context, cert *rpcs.EncodedBlockCert) error {
for _, stxn := range cert.Block.Payset {
log.Printf("%+v", stxn.SignedTxn.Txn.Type)
}
return nil
}

550
algorand/test/attest.py Normal file
View File

@ -0,0 +1,550 @@
# python3 -m pip install pycryptodomex uvarint pyteal web3 coincurve
import sys
sys.path.append("..")
from admin import PortalCore, Account
from gentest import GenTest
from base64 import b64decode
from typing import List, Tuple, Dict, Any, Optional, Union
import base64
import random
import time
import hashlib
import uuid
import json
from algosdk.v2client.algod import AlgodClient
from algosdk.kmd import KMDClient
from algosdk import account, mnemonic
from algosdk.encoding import decode_address, encode_address
from algosdk.future import transaction
from pyteal import compileTeal, Mode, Expr
from pyteal import *
from algosdk.logic import get_application_address
from vaa_verify import get_vaa_verify
from algosdk.future.transaction import LogicSig
from test_contract import get_test_app
from algosdk.v2client import indexer
import pprint
class AlgoTest(PortalCore):
def __init__(self) -> None:
super().__init__()
def getBalances(self, client: AlgodClient, account: str) -> Dict[int, int]:
balances: Dict[int, int] = dict()
accountInfo = client.account_info(account)
# set key 0 to Algo balance
balances[0] = accountInfo["amount"]
assets: List[Dict[str, Any]] = accountInfo.get("assets", [])
for assetHolding in assets:
assetID = assetHolding["asset-id"]
amount = assetHolding["amount"]
balances[assetID] = amount
return balances
def createTestApp(
self,
client: AlgodClient,
sender: Account,
) -> int:
approval, clear = get_test_app(client)
globalSchema = transaction.StateSchema(num_uints=4, num_byte_slices=30)
localSchema = transaction.StateSchema(num_uints=0, num_byte_slices=16)
app_args = []
txn = transaction.ApplicationCreateTxn(
sender=sender.getAddress(),
on_complete=transaction.OnComplete.NoOpOC,
approval_program=b64decode(approval["result"]),
clear_program=b64decode(clear["result"]),
global_schema=globalSchema,
local_schema=localSchema,
app_args=app_args,
sp=client.suggested_params(),
)
signedTxn = txn.sign(sender.getPrivateKey())
client.send_transaction(signedTxn)
response = self.waitForTransaction(client, signedTxn.get_txid())
assert response.applicationIndex is not None and response.applicationIndex > 0
txn = transaction.PaymentTxn(sender = sender.getAddress(), sp = client.suggested_params(),
receiver = get_application_address(response.applicationIndex), amt = 300000)
signedTxn = txn.sign(sender.getPrivateKey())
client.send_transaction(signedTxn)
return response.applicationIndex
def parseSeqFromLog(self, txn):
try:
return int.from_bytes(b64decode(txn.innerTxns[-1]["logs"][0]), "big")
except Exception as err:
pprint.pprint(txn.__dict__)
raise
def getVAA(self, client, sender, sid, app):
if sid == None:
raise Exception("getVAA called with a sid of None")
saddr = get_application_address(app)
# SOOO, we send a nop txn through to push the block forward
# one
# This is ONLY needed on a local net... the indexer will sit
# on the last block for 30 to 60 seconds... we don't want this
# log in prod since it is wasteful of gas
if (self.INDEXER_ROUND > 512 and not self.args.testnet): # until they fix it
print("indexer is broken in local net... stop/clean/restart the sandbox")
sys.exit(0)
txns = []
txns.append(
transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"nop"],
sp=client.suggested_params(),
)
)
self.sendTxn(client, sender, txns, False)
if self.myindexer == None:
print("indexer address: " + self.INDEXER_ADDRESS)
self.myindexer = indexer.IndexerClient(indexer_token=self.INDEXER_TOKEN, indexer_address=self.INDEXER_ADDRESS)
while True:
nexttoken = ""
while True:
response = self.myindexer.search_transactions( min_round=self.INDEXER_ROUND, note_prefix=self.NOTE_PREFIX, next_page=nexttoken)
# pprint.pprint(response)
for x in response["transactions"]:
# pprint.pprint(x)
for y in x["inner-txns"]:
if "application-transaction" not in y:
continue
if y["application-transaction"]["application-id"] != self.coreid:
continue
if len(y["logs"]) == 0:
continue
args = y["application-transaction"]["application-args"]
if len(args) < 2:
continue
if base64.b64decode(args[0]) != b'publishMessage':
continue
seq = int.from_bytes(base64.b64decode(y["logs"][0]), "big")
if seq != sid:
continue
if y["sender"] != saddr:
continue;
emitter = decode_address(y["sender"])
payload = base64.b64decode(args[1])
# pprint.pprint([seq, y["sender"], payload.hex()])
# sys.exit(0)
return self.gt.genVaa(emitter, seq, payload)
if 'next-token' in response:
nexttoken = response['next-token']
else:
self.INDEXER_ROUND = response['current-round'] + 1
break
time.sleep(1)
def publishMessage(self, client, sender, vaa, appid):
aa = decode_address(get_application_address(appid)).hex()
emitter_addr = self.optin(client, sender, self.coreid, 0, aa)
txns = []
sp = client.suggested_params()
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=appid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"test1", vaa, self.coreid],
foreign_apps = [self.coreid],
accounts=[emitter_addr],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
self.INDEXER_ROUND = resp.confirmedRound
return self.parseSeqFromLog(resp)
def createTestAsset(self, client, sender):
txns = []
a = transaction.PaymentTxn(
sender = sender.getAddress(),
sp = client.suggested_params(),
receiver = get_application_address(self.testid),
amt = 300000
)
txns.append(a)
sp = client.suggested_params()
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.testid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"setup"],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
transaction.assign_group_id(txns)
grp = []
pk = sender.getPrivateKey()
for t in txns:
grp.append(t.sign(pk))
client.send_transactions(grp)
resp = self.waitForTransaction(client, grp[-1].get_txid())
aid = int.from_bytes(resp.__dict__["logs"][0], "big")
print("Opting " + sender.getAddress() + " into " + str(aid))
self.asset_optin(client, sender, aid, sender.getAddress())
txns = []
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.testid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"mint"],
foreign_assets = [aid],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
# self.INDEXER_ROUND = resp.confirmedRound
return aid
def getCreator(self, client, sender, asset_id):
return client.asset_info(asset_id)["params"]["creator"]
def testAttest(self, client, sender, asset_id):
taddr = get_application_address(self.tokenid)
aa = decode_address(taddr).hex()
emitter_addr = self.optin(client, sender, self.coreid, 0, aa)
creator = self.getCreator(client, sender, asset_id)
c = client.account_info(creator)
wormhole = c.get("auth-addr") == taddr
if not wormhole:
creator = self.optin(client, sender, self.tokenid, asset_id, b"native".hex())
txns = []
sp = client.suggested_params()
txns.append(transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"nop"],
sp=sp
))
mfee = self.getMessageFee()
if (mfee > 0):
txns.append(transaction.PaymentTxn(sender = sender.getAddress(), sp = sp, receiver = get_application_address(self.tokenid), amt = mfee))
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"attestToken", asset_id],
foreign_apps = [self.coreid],
foreign_assets = [asset_id],
accounts=[emitter_addr, creator, c["address"], get_application_address(self.coreid)],
sp=sp
)
if (mfee > 0):
a.fee = a.fee * 3
else:
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
# Point us at the correct round
self.INDEXER_ROUND = resp.confirmedRound
# print(encode_address(resp.__dict__["logs"][0]))
# print(encode_address(resp.__dict__["logs"][1]))
# pprint.pprint(resp.__dict__)
return self.parseSeqFromLog(resp)
def transferAsset(self, client, sender, asset_id, quantity, receiver, chain, fee, payload = None):
# pprint.pprint(["transferAsset", asset_id, quantity, receiver, chain, fee])
taddr = get_application_address(self.tokenid)
aa = decode_address(taddr).hex()
emitter_addr = self.optin(client, sender, self.coreid, 0, aa)
# asset_id 0 is ALGO
if asset_id == 0:
wormhole = False
else:
creator = self.getCreator(client, sender, asset_id)
c = client.account_info(creator)
wormhole = c.get("auth-addr") == taddr
txns = []
mfee = self.getMessageFee()
if (mfee > 0):
txns.append(transaction.PaymentTxn(sender = sender.getAddress(), sp = sp, receiver = get_application_address(self.tokenid), amt = mfee))
if not wormhole:
creator = self.optin(client, sender, self.tokenid, asset_id, b"native".hex())
print("non wormhole account " + creator)
sp = client.suggested_params()
if (asset_id != 0) and (not self.asset_optin_check(client, sender, asset_id, creator)):
print("Looks like we need to optin")
txns.append(
transaction.PaymentTxn(
sender=sender.getAddress(),
receiver=creator,
amt=100000,
sp=sp
)
)
# The tokenid app needs to do the optin since it has signature authority
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"optin", asset_id],
foreign_assets = [asset_id],
accounts=[creator],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
self.sendTxn(client, sender, txns, False)
txns = []
txns.insert(0,
transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"nop"],
sp=client.suggested_params(),
)
)
if asset_id == 0:
print("asset_id == 0")
txns.append(transaction.PaymentTxn(
sender=sender.getAddress(),
receiver=creator,
amt=quantity,
sp=sp,
))
accounts=[emitter_addr, creator, creator]
else:
print("asset_id != 0")
txns.append(
transaction.AssetTransferTxn(
sender = sender.getAddress(),
sp = sp,
receiver = creator,
amt = quantity,
index = asset_id
))
accounts=[emitter_addr, creator, c["address"]]
args = [b"sendTransfer", asset_id, quantity, decode_address(receiver), chain, fee]
if None != payload:
args.append(payload)
#pprint.pprint(args)
# print(self.tokenid)
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=args,
foreign_apps = [self.coreid],
foreign_assets = [asset_id],
accounts=accounts,
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
self.INDEXER_ROUND = resp.confirmedRound
# pprint.pprint([self.coreid, self.tokenid, resp.__dict__,
# int.from_bytes(resp.__dict__["logs"][1], "big"),
# int.from_bytes(resp.__dict__["logs"][2], "big"),
# int.from_bytes(resp.__dict__["logs"][3], "big"),
# int.from_bytes(resp.__dict__["logs"][4], "big"),
# int.from_bytes(resp.__dict__["logs"][5], "big")
# ])
# print(encode_address(resp.__dict__["logs"][0]))
# print(encode_address(resp.__dict__["logs"][1]))
return self.parseSeqFromLog(resp)
def asset_optin_check(self, client, sender, asset, receiver):
if receiver not in self.asset_cache:
self.asset_cache[receiver] = {}
if asset in self.asset_cache[receiver]:
return True
ai = client.account_info(receiver)
if "assets" in ai:
for x in ai["assets"]:
if x["asset-id"] == asset:
self.asset_cache[receiver][asset] = True
return True
return False
def asset_optin(self, client, sender, asset, receiver):
if self.asset_optin_check(client, sender, asset, receiver):
return
pprint.pprint(["asset_optin", asset, receiver])
sp = client.suggested_params()
optin_txn = transaction.AssetTransferTxn(
sender = sender.getAddress(),
sp = sp,
receiver = receiver,
amt = 0,
index = asset
)
transaction.assign_group_id([optin_txn])
signed_optin = optin_txn.sign(sender.getPrivateKey())
client.send_transactions([signed_optin])
resp = self.waitForTransaction(client, signed_optin.get_txid())
assert self.asset_optin_check(client, sender, asset, receiver), "The optin failed"
print("woah! optin succeeded")
def simple_test(self):
# q = bytes.fromhex(gt.genAssetMeta(gt.guardianPrivKeys, 1, 1, 1, bytes.fromhex("4523c3F29447d1f32AEa95BEBD00383c4640F1b4"), 1, 8, b"USDC", b"CircleCoin"))
# pprint.pprint(self.parseVAA(q))
# sys.exit(0)
# vaa = self.parseVAA(bytes.fromhex("01000000010100e1232697de3681d67ca0c46fbbc9ea5d282c473daae8fda2b23145e7b7167f9a35888acf80ed9d091af3069108c25324a22d8665241db884dda53ca53a8212d100625436600000000100020000000000000000000000003ee18b2214aff97000d974cf647e7c347e8fa585000000000000000120010000000000000000000000000000000000000000000000000000000005f5e1000000000000000000000000000000000000000000000000004523c3F29447d1f32AEa95BEBD00383c4640F1b400020000000000000000000000000000000000000000000000000000aabbcc00080000000000000000000000000000000000000000000000000000000000000000"))
# pprint.pprint(vaa)
# sys.exit(0)
gt = GenTest(True)
self.gt = gt
self.setup_args()
if self.args.testnet:
self.testnet()
client = self.client = self.getAlgodClient()
self.genTeal()
self.vaa_verify = self.client.compile(get_vaa_verify())
self.vaa_verify["lsig"] = LogicSig(base64.b64decode(self.vaa_verify["result"]))
vaaLogs = []
args = self.args
if self.args.mnemonic:
self.foundation = Account.FromMnemonic(self.args.mnemonic)
if self.foundation == None:
print("Generating the foundation account...")
self.foundation = self.getTemporaryAccount(self.client)
if self.foundation == None:
print("We dont have a account? ")
sys.exit(0)
foundation = self.foundation
seq = int(time.time())
self.coreid = 4
self.tokenid = 6
player = self.getTemporaryAccount(client)
print("token bridge " + str(self.tokenid) + " address " + get_application_address(self.tokenid))
player2 = self.getTemporaryAccount(client)
player3 = self.getTemporaryAccount(client)
# This creates a asset by using another app... you can also just creat the asset from the client sdk like we do in the typescript test
self.testid = self.createTestApp(client, player2)
print("Lets create a brand new non-wormhole asset and try to attest and send it out")
self.testasset = self.createTestAsset(client, player2)
print("test asset id: " + str(self.testasset))
print("Lets try to create an attest for a non-wormhole thing with a huge number of decimals")
# paul - attestFromAlgorand
sid = self.testAttest(client, player2, self.testasset)
print("... track down the generated VAA")
vaa = self.getVAA(client, player, sid, self.tokenid)
v = self.parseVAA(bytes.fromhex(vaa))
print("We got a " + v["Meta"])
if __name__ == "__main__":
core = AlgoTest()
core.simple_test()

48
algorand/test/foo Normal file
View File

@ -0,0 +1,48 @@
{'applicationIndex': None,
'assetIndex': None,
'closeRewards': None,
'closingAmount': None,
'confirmedRound': 69,
'globalStateDelta': None,
'innerTxns': [{'local-state-delta': [{'address': 'RYXQG6MGNASBPG7CHP5B2R4HOPYJGY5S25SZ6YLAMBJ5LRB6DJQZUAQ7QE',
'delta': [{'key': 'AA==',
'value': {'action': 1,
'bytes': 'AAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=='}},
{'key': 'bWV0YQ==',
'value': {'action': 1,
'bytes': 'cHVibGlzaE1lc3NhZ2U='}}]}],
'logs': ['AAAAAAAAAAE='],
'pool-error': '',
'txn': {'txn': {'apaa': ['cHVibGlzaE1lc3NhZ2U=',
'YjnPtmI5z7wAAexzcpldXMhzI5f7CtNcASHg6qkNJvgopTTKtUORs6T1AAAAAGI5z7wgAgAAAAAAAAAAAAAAAEUjw/KUR9HzKuqVvr0AODxGQPG0AAEIVVNEMkMAAAAAAAAAAA==',
'AAAAAAAAAAA='],
'apat': ['RYXQG6MGNASBPG7CHP5B2R4HOPYJGY5S25SZ6YLAMBJ5LRB6DJQZUAQ7QE'],
'apid': 150,
'fv': 68,
'lv': 1068,
'note': 'cHVibGlzaE1lc3NhZ2U=',
'snd': 'AZV2EHAFODMFHNTJ3NHDJKYF4ZZRSUUKI5O472GDD3JYBXZFXZIANBFWVE',
'type': 'appl'}}}],
'localStateDelta': None,
'logs': [],
'poolError': '',
'receiverRewards': None,
'senderRewards': None,
'txn': {'sig': 'WUQv35J6KTg/kFKvU6ateUR0Skkq85+9xDK57oZ/IQHUQkU2NoYraRNrLTwb5Sf/y9PRZGCO0xzEMzLr3TK9DA==',
'txn': {'apaa': ['YXR0ZXN0VG9rZW4=', 'AAAAAAAAAPA='],
'apas': [240],
'apat': ['RYXQG6MGNASBPG7CHP5B2R4HOPYJGY5S25SZ6YLAMBJ5LRB6DJQZUAQ7QE',
'Y55KVPNTESVJFIZXCHI7BPQLQK65CNB3J5V4RQW4W4QTPXC7KH3BOIICKI',
'Y55KVPNTESVJFIZXCHI7BPQLQK65CNB3J5V4RQW4W4QTPXC7KH3BOIICKI',
'YC6MOPFZT6GSQOYKYMQCUVTQ7KCIF4QAUA32Q6EBMSQQ4DAGRYJ2P3NFT4'],
'apfa': [150],
'apid': 171,
'fee': 2000,
'fv': 68,
'gen': 'sandnet-v1',
'gh': 'DyppoDq/KVGRmtli693nOkDgN6/RadREW+ZiHPsEIsE=',
'grp': 'jsuD9/MGrNF458IXz671THTBRHetESK9Z44Yl4nxvRg=',
'lv': 1068,
'snd': 'VDTSI73333EKLV4ZZLOWSF3LFMLJ7E42Q7CAURHIRI4FT7WUPGVR7WH5GI',
'type': 'appl'}}}
1

60
algorand/test/foo.js Normal file
View File

@ -0,0 +1,60 @@
var varint = require('varint')
//
//
t = {
'contract': '0620010181004880220001000000000000000000000000000000000000000000000000000000000000000448880001433204810312443300102212443300088190943d124433002032031244330009320312443301108106124433011922124433011881df0412443301203203124433021022124433020881001244330220802050b9d5cd33b835f53649f25be3ba6e6b8271b6d16c0af8aa97cc11761e417feb1244330209320312442243',
'TMPL_ADDR_IDX': 0,
'TMPL_APP_ADDRESS': '50b9d5cd33b835f53649f25be3ba6e6b8271b6d16c0af8aa97cc11761e417feb',
'TMPL_APP_ID': 607,
'TMPL_EMITTER_ID': '00010000000000000000000000000000000000000000000000000000000000000004',
'TMPL_SEED_AMT': 1002000
}
t2 = {
'contract': '062001018101488008677561726469616e48880001433204810312443300102212443300088190943d124433002032031244330009320312443301108106124433011922124433011881df0412443301203203124433021022124433020881001244330220802050b9d5cd33b835f53\
649f25be3ba6e6b8271b6d16c0af8aa97cc11761e417feb1244330209320312442243',
'TMPL_ADDR_IDX': 1,
'TMPL_APP_ADDRESS': '50b9d5cd33b835f53649f25be3ba6e6b8271b6d16c0af8aa97cc11761e417feb',
'TMPL_APP_ID': 607,
'TMPL_EMITTER_ID': '677561726469616e',
'TMPL_SEED_AMT': 1002000
}
function properHex(v) {
if (v < 10)
return '0' + v.toString(16)
else
return v.toString(16)
}
function populate(v) {
foo = [
'0620010181',
varint.encode(v["TMPL_ADDR_IDX"]).map (n => properHex(n)).join(''),
'4880',
varint.encode(v["TMPL_EMITTER_ID"].length / 2).map (n => properHex(n)).join(''),
v["TMPL_EMITTER_ID"],
'488800014332048103124433001022124433000881',
varint.encode(v["TMPL_SEED_AMT"]).map (n => properHex(n)).join(''),
'124433002032031244330009320312443301108106124433011922124433011881',
varint.encode(v["TMPL_APP_ID"]).map (n => properHex(n)).join(''),
'1244330120320312443302102212443302088100124433022080',
varint.encode(v["TMPL_APP_ADDRESS"].length/2).map (n => properHex(n)).join(''),
v["TMPL_APP_ADDRESS"],
'1244330209320312442243'
].join('')
return foo
}
if (t["contract"] == populate(t)) {
console.log("omg it works!")
} else {
console.log("You are weak")
}
if (t2["contract"] == populate(t2)) {
console.log("omg it works!")
} else {
console.log("You are weak")
}

63
algorand/test/foo2.ts Normal file
View File

@ -0,0 +1,63 @@
const algosdk = require('algosdk');
const TestLib = require('./testlib.ts')
const testLib = new TestLib.TestLib()
const fs = require('fs');
const path = require('path');
import {
submitVAA,
submitVAAHdr,
simpleSignVAA,
parseVAA,
// Account,
} from "../../sdk/js/src/token_bridge/Algorand";
//const AlgorandLib = require('../../sdk/js/src/token_bridge/Algorand.ts')
//const algorandLib = new AlgorandLib.AlgorandLib()
const guardianKeys = [
"beFA429d57cD18b7F8A4d91A2da9AB4AF05d0FBe"
]
const guardianPrivKeys = [
"cfb12303a19cde580bb4dd771639b0d26bc68353645571a8cff516ab2ee113a0"
]
const PYTH_EMITTER = '0x3afda841c1f43dd7d546c8a581ba1f92a139f4133f9f6ab095558f6a359df5d4'
const PYTH_PAYLOAD = '0x50325748000101230abfe0ec3b460bd55fc4fb36356716329915145497202b8eb8bf1af6a0a3b9fe650f0367d4a7ef9815a593ea15d36593f0643aaaf0149bb04be67ab851decd010000002f17254388fffffff70000002eed73d9000000000070d3b43f0000000037faa03d000000000e9e555100000000894af11c0000000037faa03d000000000dda6eb801000000000061a5ff9a'
async function firstTransaction() {
try {
// This is a funded account...
let myAccount = algosdk.mnemonicToSecretKey("intact frozen tooth wealth syrup elevator list book property census imitate attend draft silly fortune afford injury poem section wait main bench feel absent giraffe")
console.log(myAccount)
// Connect your client
const algodToken = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa';
const algodServer = 'http://localhost';
const algodPort = 4001;
let algodClient = new algosdk.Algodv2(algodToken, algodServer, algodPort);
//Check your balance
let accountInfo = await algodClient.accountInformation(myAccount.addr).do();
console.log("Account balance: %d microAlgos", accountInfo.amount);
// let vaa = testLib.genGuardianSetUpgrade(guardianPrivKeys, 0, 1, 1, 1, guardianKeys)
// console.log(vaa)
// console.log(parseVAA(new Uint8Array(Buffer.from(vaa, "hex"))))
// process.exit(0)
let vaa = testLib.createSignedVAA(0, guardianPrivKeys, 1, 1, 1, PYTH_EMITTER, 0, 0, PYTH_PAYLOAD)
console.log(vaa)
let evaa = new Uint8Array(Buffer.from(vaa, "hex"))
let sstate = await submitVAAHdr(evaa, algodClient, myAccount, 4);
console.log(await simpleSignVAA(algodClient, myAccount, sstate.txns));
}
catch (err) {
console.log("err", err);
}
process.exit();
};
firstTransaction();

578
algorand/test/simple.py Normal file
View File

@ -0,0 +1,578 @@
# python3 -m pip install pycryptodomex uvarint pyteal web3 coincurve
import sys
sys.path.append("..")
from admin import PortalCore, Account
from gentest import GenTest
from base64 import b64decode
from typing import List, Tuple, Dict, Any, Optional, Union
import base64
import random
import time
import hashlib
import uuid
import json
from algosdk.v2client.algod import AlgodClient
from algosdk.kmd import KMDClient
from algosdk import account, mnemonic
from algosdk.encoding import decode_address, encode_address
from algosdk.future import transaction
from pyteal import compileTeal, Mode, Expr
from pyteal import *
from algosdk.logic import get_application_address
from vaa_verify import get_vaa_verify
from algosdk.future.transaction import LogicSig
from test_contract import get_test_app
from algosdk.v2client import indexer
import pprint
class AlgoTest(PortalCore):
def __init__(self) -> None:
super().__init__()
def getBalances(self, client: AlgodClient, account: str) -> Dict[int, int]:
balances: Dict[int, int] = dict()
accountInfo = client.account_info(account)
# set key 0 to Algo balance
balances[0] = accountInfo["amount"]
assets: List[Dict[str, Any]] = accountInfo.get("assets", [])
for assetHolding in assets:
assetID = assetHolding["asset-id"]
amount = assetHolding["amount"]
balances[assetID] = amount
return balances
def createTestApp(
self,
client: AlgodClient,
sender: Account,
) -> int:
approval, clear = get_test_app(client)
globalSchema = transaction.StateSchema(num_uints=4, num_byte_slices=30)
localSchema = transaction.StateSchema(num_uints=0, num_byte_slices=16)
app_args = []
txn = transaction.ApplicationCreateTxn(
sender=sender.getAddress(),
on_complete=transaction.OnComplete.NoOpOC,
approval_program=b64decode(approval["result"]),
clear_program=b64decode(clear["result"]),
global_schema=globalSchema,
local_schema=localSchema,
app_args=app_args,
sp=client.suggested_params(),
)
signedTxn = txn.sign(sender.getPrivateKey())
client.send_transaction(signedTxn)
response = self.waitForTransaction(client, signedTxn.get_txid())
assert response.applicationIndex is not None and response.applicationIndex > 0
txn = transaction.PaymentTxn(sender = sender.getAddress(), sp = client.suggested_params(),
receiver = get_application_address(response.applicationIndex), amt = 300000)
signedTxn = txn.sign(sender.getPrivateKey())
client.send_transaction(signedTxn)
return response.applicationIndex
def parseSeqFromLog(self, txn):
return int.from_bytes(b64decode(txn.innerTxns[0]["logs"][0]), "big")
def getVAA(self, client, sender, sid, app):
if sid == None:
raise Exception("getVAA called with a sid of None")
saddr = get_application_address(app)
# SOOO, we send a nop txn through to push the block forward
# one
# This is ONLY needed on a local net... the indexer will sit
# on the last block for 30 to 60 seconds... we don't want this
# log in prod since it is wasteful of gas
if (self.INDEXER_ROUND > 512): # until they fix it
print("indexer is broken in local net... stop/clean/restart the sandbox")
sys.exit(0)
txns = []
txns.append(
transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"nop"],
sp=client.suggested_params(),
)
)
self.sendTxn(client, sender, txns, False)
while True:
nexttoken = ""
while True:
response = self.myindexer.search_transactions( min_round=self.INDEXER_ROUND, note_prefix=self.NOTE_PREFIX, next_page=nexttoken)
# pprint.pprint(response)
for x in response["transactions"]:
# pprint.pprint(x)
for y in x["inner-txns"]:
if y["application-transaction"]["application-id"] != self.coreid:
continue
if len(y["logs"]) == 0:
continue
args = y["application-transaction"]["application-args"]
if len(args) < 2:
continue
if base64.b64decode(args[0]) != b'publishMessage':
continue
seq = int.from_bytes(base64.b64decode(y["logs"][0]), "big")
if seq != sid:
# print(str(seq) + " != " + str(sid))
continue
if y["sender"] != saddr:
continue;
emitter = decode_address(y["sender"])
payload = base64.b64decode(args[1])
# pprint.pprint([seq, y["sender"], payload.hex()])
# sys.exit(0)
return self.gt.genVaa(emitter, seq, payload)
if 'next-token' in response:
nexttoken = response['next-token']
else:
self.INDEXER_ROUND = response['current-round'] + 1
break
time.sleep(1)
def publishMessage(self, client, sender, vaa, appid):
aa = decode_address(get_application_address(appid)).hex()
emitter_addr = self.optin(client, sender, self.coreid, 0, aa)
txns = []
sp = client.suggested_params()
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=appid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"test1", vaa, self.coreid],
foreign_apps = [self.coreid],
accounts=[emitter_addr],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
self.INDEXER_ROUND = resp.confirmedRound
return self.parseSeqFromLog(resp)
def createTestAsset(self, client, sender):
txns = []
a = transaction.PaymentTxn(
sender = sender.getAddress(),
sp = client.suggested_params(),
receiver = get_application_address(self.testid),
amt = 300000
)
txns.append(a)
sp = client.suggested_params()
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.testid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"setup"],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
transaction.assign_group_id(txns)
grp = []
pk = sender.getPrivateKey()
for t in txns:
grp.append(t.sign(pk))
client.send_transactions(grp)
resp = self.waitForTransaction(client, grp[-1].get_txid())
aid = int.from_bytes(resp.__dict__["logs"][0], "big")
print("Opting " + sender.getAddress() + " into " + str(aid))
self.asset_optin(client, sender, aid, sender.getAddress())
txns = []
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.testid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"mint"],
foreign_assets = [aid],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
# self.INDEXER_ROUND = resp.confirmedRound
return aid
def getCreator(self, client, sender, asset_id):
return client.asset_info(asset_id)["params"]["creator"]
def testAttest(self, client, sender, asset_id):
taddr = get_application_address(self.tokenid)
aa = decode_address(taddr).hex()
emitter_addr = self.optin(client, sender, self.coreid, 0, aa)
creator = self.getCreator(client, sender, asset_id)
c = client.account_info(creator)
wormhole = c.get("auth-addr") == taddr
if not wormhole:
creator = self.optin(client, sender, self.tokenid, asset_id, b"native".hex())
txns = []
sp = client.suggested_params()
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"attestToken", asset_id],
foreign_apps = [self.coreid],
foreign_assets = [asset_id],
accounts=[emitter_addr, creator, c["address"]],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
# Point us at the correct round
self.INDEXER_ROUND = resp.confirmedRound
# print(encode_address(resp.__dict__["logs"][0]))
# print(encode_address(resp.__dict__["logs"][1]))
return self.parseSeqFromLog(resp)
def transferAsset(self, client, sender, asset_id, quantity, receiver, chain, fee, payload = None):
taddr = get_application_address(self.tokenid)
aa = decode_address(taddr).hex()
emitter_addr = self.optin(client, sender, self.coreid, 0, aa)
# asset_id 0 is ALGO
if asset_id == 0:
wormhole = False
else:
creator = self.getCreator(client, sender, asset_id)
c = client.account_info(creator)
wormhole = c.get("auth-addr") == taddr
txns = []
if not wormhole:
creator = self.optin(client, sender, self.tokenid, asset_id, b"native".hex())
print("non wormhole account " + creator)
sp = client.suggested_params()
if (asset_id != 0) and (not self.asset_optin_check(client, sender, asset_id, creator)):
print("Looks like we need to optin")
txns.append(
transaction.PaymentTxn(
sender=sender.getAddress(),
receiver=creator,
amt=100000,
sp=sp
)
)
# The tokenid app needs to do the optin since it has signature authority
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"optin", asset_id],
foreign_assets = [asset_id],
accounts=[creator],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
self.sendTxn(client, sender, txns, False)
txns = []
if asset_id == 0:
print("asset_id == 0")
txns.append(transaction.PaymentTxn(
sender=sender.getAddress(),
receiver=creator,
amt=quantity,
sp=sp,
))
accounts=[emitter_addr, creator, creator]
else:
print("asset_id != 0")
txns.append(
transaction.AssetTransferTxn(
sender = sender.getAddress(),
sp = sp,
receiver = creator,
amt = quantity,
index = asset_id
))
accounts=[emitter_addr, creator, c["address"]]
args = [b"sendTransfer", asset_id, quantity, decode_address(receiver), chain, fee]
if None != payload:
args.append(payload)
# pprint.pprint(args)
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=args,
foreign_apps = [self.coreid],
foreign_assets = [asset_id],
accounts=accounts,
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
self.INDEXER_ROUND = resp.confirmedRound
# pprint.pprint(resp.__dict__)
# print(encode_address(resp.__dict__["logs"][0]))
# print(encode_address(resp.__dict__["logs"][1]))
return self.parseSeqFromLog(resp)
def asset_optin_check(self, client, sender, asset, receiver):
if receiver not in self.asset_cache:
self.asset_cache[receiver] = {}
if asset in self.asset_cache[receiver]:
return True
ai = client.account_info(receiver)
if "assets" in ai:
for x in ai["assets"]:
if x["asset-id"] == asset:
self.asset_cache[receiver][asset] = True
return True
return False
def asset_optin(self, client, sender, asset, receiver):
if self.asset_optin_check(client, sender, asset, receiver):
return
pprint.pprint(["asset_optin", asset, receiver])
sp = client.suggested_params()
optin_txn = transaction.AssetTransferTxn(
sender = sender.getAddress(),
sp = sp,
receiver = receiver,
amt = 0,
index = asset
)
transaction.assign_group_id([optin_txn])
signed_optin = optin_txn.sign(sender.getPrivateKey())
client.send_transactions([signed_optin])
resp = self.waitForTransaction(client, signed_optin.get_txid())
assert self.asset_optin_check(client, sender, asset, receiver), "The optin failed"
print("woah! optin succeeded")
def simple_test(self):
# q = bytes.fromhex(gt.genAssetMeta(gt.guardianPrivKeys, 1, 1, 1, bytes.fromhex("4523c3F29447d1f32AEa95BEBD00383c4640F1b4"), 1, 8, b"USDC", b"CircleCoin"))
# pprint.pprint(self.parseVAA(q))
# sys.exit(0)
# vaa = self.parseVAA(bytes.fromhex("01000000011300c412b9e5b304bde8f8633a41568991ca56b7c11a925847f0059e95010ec5241b761719f12d3f4a79d1515e08152b2e8584cd1e8217dd7743c2bf863b78b2bf040001aebade2f601a4e9083585b1bb5f98d421f116e0393f525b95d51afbe69051587531771dc127a5e9d7b74662bb7ac378d44181522dc748b1b0cbfe1b1de6ed39d01024b4e9fc86ac64aaeef84ea14e4265c3c186042a3ae9ab2933bf06c0cbf326b3c2b89e7d9854fc5204a447bd202592a72d1d6db3d007bef9fea0e35953afbd9f1010342e4446ac94545a0447851eda5d5e3b8c97c6f4ef338977562cd4ecbee2b8fea42d536d7655c28a7f7fb2ff5fc8e5775e892d853c9b2e4969f9ce054ede801700104af0d783996ccfd31d6fc6f86b634288cd2f9cc29695cfcbf12d915c1b9c383dc792c7abbe8126cd917fb8658a8de843d64171122db182453584c0c330e8889730105f34d45ec63ec0a0c4535303fd9c83a0fad6b0a112b27306a288c1b46f2a78399754536ecb07f1ab6c32d92ed50b11fef3668b23d5c1ca010ec4c924441367eac0006566671ff859eec8429874ba9e07dd107b22859cf5029928bebec6eb73cdca6752f91bb252bca76cb15ede1121a84a9a54dad126f50f282a47f7d30880ef86a3900076d0d1241e1fc9d039810a8aebd7cab863017c9420eb67f4578577c5ec4d37162723dcd6213ff6895f280a88ba70de1a5b9257fe2937cbdea007e84886abc46dd0108b24dcddaae10f5e12b7085a0c3885a050640af17ba265a448102401854183e9f3ae9a14cad1af64eb57c6f145c6f709d7ed6bb8712a6b315dc2780c9eb42812e0109df696bf506dfcd8fce57968a84d5f773706b117fad31f86bbb089ede77d71a6e54b7729f79a82e7d6e4a6797380796fbcb9ba9428e8fcdf0400515f8205b31c5010a90a03c76fdec510712b2a6ee52cc0b6df5c921437896756f34b3782aa486eb5b5d02df783664257539233502ec25bbda7dd754afc139823da8a43c0d3c91c279000b33549edd8353c4d577cb273b88b545ae547ad01e85161a4fbbbb371cff453d6311c787254e2852c3b874ea60c67d40efc3ee3f24b51bc3fe95cc0a873e8a3fb6000ce2e206214ae2b4b048857f061ed3cf8cef060c67a85ad863f266145238c5d2a85e38b4eb9b3be4d33f502df4c45762504eb43a6bf78f01363d1399b67c354df8000d2d362d64a2e3d1583e1299238829cc11d81e9b9820121c0a2eb91d542aa54c993861e8225bc3e8d028dc128d284118703a4ec69144d69402efd72a29bb9f6b8f000e6bf56fa3ae6303f495f1379b450eb52580d7d9098dd909762e6186d19e06480d2bba8f06602dbd6d3d5deac7080fc2e61bd1be97e442b63435c91fa72b33534c000fad870b47c86f6997286bd4def4bacc5a8abbfef3f730f62183c638131004ea2f706ab73ebfe8f4879bf54f580444acec212e96e41abaf4acfc3383f05478e528001089599974feaab33862cd881af13f1645079bd2fa2ff07ca744674c8556aaf97c5c9c90df332d5b4ad1428776b68612f0b1ecb98c2ebc83f44f42426f180062cd00116aa93eecb4d528afaa07b72484acd5b79ad20e9ad8e55ce37cb9138b4c12a8eb3d10fa7d932b06ac441905e0226d3420101971a72c5488e4bfef222de8c3acd1011203a3e3d8ec938ffbc3a27d8caf50fc925bd25bd286d5ad6077dffd7e205ce0806e166b661d502f8c49acf88d42fde20e6015830d5517a0bfd40f79963ded4d2d006227697a000f68690008a0ae83030f1423aa97121527f65bbbb97925b43b95231bb0478fd650a057cc4b00000000000000072003000000000000000000000000000000000000000000000000000000000007a12000000000000000000000000000000000000000000000000000000000000000000008cf9ee7255420a50c55ef35d4bdcdd8048dee5c3c1333ecd97aff98869ea280780008000000000000000000000000000000000000000000000000000000000007a1206869206d6f6d"))
# pprint.pprint(vaa)
# sys.exit(0)
gt = GenTest(False)
self.gt = gt
client = self.getAlgodClient()
print("Generating the foundation account...")
foundation = self.getTemporaryAccount(client)
player = self.getTemporaryAccount(client)
player2 = self.getTemporaryAccount(client)
player3 = self.getTemporaryAccount(client)
self.coreid = 4
print("coreid = " + str(self.coreid))
self.tokenid = 6
print("token bridge " + str(self.tokenid) + " address " + get_application_address(self.tokenid))
self.testid = self.createTestApp(client, player2)
print("testid " + str(self.testid) + " address " + get_application_address(self.testid))
print("Lets create a brand new non-wormhole asset and try to attest and send it out")
self.testasset = self.createTestAsset(client, player2)
print("test asset id: " + str(self.testasset))
print("Lets try to create an attest for a non-wormhole thing with a huge number of decimals")
# paul - attestFromAlgorand
sid = self.testAttest(client, player2, self.testasset)
print("... track down the generated VAA")
vaa = self.getVAA(client, player, sid, self.tokenid)
v = self.parseVAA(bytes.fromhex(vaa))
print("We got a " + v["Meta"])
# pprint.pprint(self.getBalances(client, player.getAddress()))
# pprint.pprint(self.getBalances(client, player2.getAddress()))
# pprint.pprint(self.getBalances(client, player3.getAddress()))
#
# print("Lets transfer that asset to one of our other accounts... first lets create the vaa")
# # paul - transferFromAlgorand
# sid = self.transferAsset(client, player2, self.testasset, 100, player3.getAddress(), 8, 0)
# print("... track down the generated VAA")
# vaa = self.getVAA(client, player, sid, self.tokenid)
# print(".. and lets pass that to player3")
# self.submitVAA(bytes.fromhex(vaa), client, player3)
#
# pprint.pprint(self.getBalances(client, player.getAddress()))
# pprint.pprint(self.getBalances(client, player2.getAddress()))
# pprint.pprint(self.getBalances(client, player3.getAddress()))
#
# # Lets split it into two parts... the payload and the fee
# print("Lets split it into two parts... the payload and the fee")
# sid = self.transferAsset(client, player2, self.testasset, 1000, player3.getAddress(), 8, 500)
# print("... track down the generated VAA")
# vaa = self.getVAA(client, player, sid, self.tokenid)
# print(".. and lets pass that to player3 with fees being passed to player acting as a relayer")
# self.submitVAA(bytes.fromhex(vaa), client, player)
#
# pprint.pprint(self.getBalances(client, player.getAddress()))
# pprint.pprint(self.getBalances(client, player2.getAddress()))
# pprint.pprint(self.getBalances(client, player3.getAddress()))
#
# # Now it gets tricky, lets create a virgin account...
# pk, addr = account.generate_account()
# emptyAccount = Account(pk)
#
# print("How much is in the empty account? (" + addr + ")")
# pprint.pprint(self.getBalances(client, emptyAccount.getAddress()))
#
# # paul - transferFromAlgorand
# print("Lets transfer algo this time.... first lets create the vaa")
# sid = self.transferAsset(client, player2, 0, 1000000, emptyAccount.getAddress(), 8, 0)
# print("... track down the generated VAA")
# vaa = self.getVAA(client, player, sid, self.tokenid)
## pprint.pprint(vaa)
# print(".. and lets pass that to the empty account.. but use somebody else to relay since we cannot pay for it")
#
# # paul - redeemOnAlgorand
# self.submitVAA(bytes.fromhex(vaa), client, player)
#
# print("=================================================")
#
# print("How much is in the source account now?")
# pprint.pprint(self.getBalances(client, player2.getAddress()))
#
# print("How much is in the empty account now?")
# pprint.pprint(self.getBalances(client, emptyAccount.getAddress()))
#
# print("How much is in the player3 account now?")
# pprint.pprint(self.getBalances(client, player3.getAddress()))
#
# print("Lets transfer more algo.. splut 50/50 with the relayer.. going to player3")
# sid = self.transferAsset(client, player2, 0, 1000000, player3.getAddress(), 8, 500000)
# print("... track down the generated VAA")
# vaa = self.getVAA(client, player, sid, self.tokenid)
# print(".. and lets pass that to player3.. but use the previously empty account to relay it")
# self.submitVAA(bytes.fromhex(vaa), client, emptyAccount)
#
# print("How much is in the source account now?")
# pprint.pprint(self.getBalances(client, player2.getAddress()))
#
# print("How much is in the empty account now?")
# pprint.pprint(self.getBalances(client, emptyAccount.getAddress()))
#
# print("How much is in the player3 account now?")
# pprint.pprint(self.getBalances(client, player3.getAddress()))
#
# print("How about a payload3")
# sid = self.transferAsset(client, player2, 0, 100, player3.getAddress(), 8, 0, b'hi mom')
# print("... track down the generated VAA")
# vaa = self.getVAA(client, player, sid, self.tokenid)
#
# print(".. and lets pass that to the wrong account")
# try:
# self.submitVAA(bytes.fromhex(vaa), client, emptyAccount)
# except:
# print("Exception thrown... nice")
#
# print(".. and lets pass that to the right account")
# self.submitVAA(bytes.fromhex(vaa), client, player3)
# print("player account: " + player.getAddress())
# pprint.pprint(client.account_info(player.getAddress()))
# print("player2 account: " + player2.getAddress())
# pprint.pprint(client.account_info(player2.getAddress()))
# print("foundation account: " + foundation.getAddress())
# pprint.pprint(client.account_info(foundation.getAddress()))
#
# print("core app: " + get_application_address(self.coreid))
# pprint.pprint(client.account_info(get_application_address(self.coreid))),
#
# print("token app: " + get_application_address(self.tokenid))
# pprint.pprint(client.account_info(get_application_address(self.tokenid))),
#
# print("asset app: " + chain_addr)
# pprint.pprint(client.account_info(chain_addr))
core = AlgoTest()
core.simple_test()

1
algorand/test/teal Symbolic link
View File

@ -0,0 +1 @@
../teal

786
algorand/test/test.py Normal file
View File

@ -0,0 +1,786 @@
# python3 -m pip install pycryptodomex uvarint pyteal web3 coincurve
import sys
sys.path.append("..")
from admin import PortalCore, Account
from gentest import GenTest
from base64 import b64decode
from typing import List, Tuple, Dict, Any, Optional, Union
import base64
import random
import time
import hashlib
import uuid
import json
from algosdk.v2client.algod import AlgodClient
from algosdk.kmd import KMDClient
from algosdk import account, mnemonic
from algosdk.encoding import decode_address, encode_address
from algosdk.future import transaction
from pyteal import compileTeal, Mode, Expr
from pyteal import *
from algosdk.logic import get_application_address
from vaa_verify import get_vaa_verify
from algosdk.future.transaction import LogicSig
from test_contract import get_test_app
from algosdk.v2client import indexer
import pprint
class AlgoTest(PortalCore):
def __init__(self) -> None:
super().__init__()
def getBalances(self, client: AlgodClient, account: str) -> Dict[int, int]:
balances: Dict[int, int] = dict()
accountInfo = client.account_info(account)
# set key 0 to Algo balance
balances[0] = accountInfo["amount"]
assets: List[Dict[str, Any]] = accountInfo.get("assets", [])
for assetHolding in assets:
assetID = assetHolding["asset-id"]
amount = assetHolding["amount"]
balances[assetID] = amount
return balances
def createTestApp(
self,
client: AlgodClient,
sender: Account,
) -> int:
approval, clear = get_test_app(client)
globalSchema = transaction.StateSchema(num_uints=4, num_byte_slices=30)
localSchema = transaction.StateSchema(num_uints=0, num_byte_slices=16)
app_args = []
txn = transaction.ApplicationCreateTxn(
sender=sender.getAddress(),
on_complete=transaction.OnComplete.NoOpOC,
approval_program=b64decode(approval["result"]),
clear_program=b64decode(clear["result"]),
global_schema=globalSchema,
local_schema=localSchema,
app_args=app_args,
sp=client.suggested_params(),
)
signedTxn = txn.sign(sender.getPrivateKey())
client.send_transaction(signedTxn)
response = self.waitForTransaction(client, signedTxn.get_txid())
assert response.applicationIndex is not None and response.applicationIndex > 0
txn = transaction.PaymentTxn(sender = sender.getAddress(), sp = client.suggested_params(),
receiver = get_application_address(response.applicationIndex), amt = 300000)
signedTxn = txn.sign(sender.getPrivateKey())
client.send_transaction(signedTxn)
return response.applicationIndex
def parseSeqFromLog(self, txn):
try:
return int.from_bytes(b64decode(txn.innerTxns[-1]["logs"][0]), "big")
except Exception as err:
pprint.pprint(txn.__dict__)
raise
def getVAA(self, client, sender, sid, app):
if sid == None:
raise Exception("getVAA called with a sid of None")
saddr = get_application_address(app)
# SOOO, we send a nop txn through to push the block forward
# one
# This is ONLY needed on a local net... the indexer will sit
# on the last block for 30 to 60 seconds... we don't want this
# log in prod since it is wasteful of gas
if (self.INDEXER_ROUND > 512 and not self.args.testnet): # until they fix it
print("indexer is broken in local net... stop/clean/restart the sandbox")
sys.exit(0)
txns = []
txns.append(
transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"nop"],
sp=client.suggested_params(),
)
)
self.sendTxn(client, sender, txns, False)
if self.myindexer == None:
print("indexer address: " + self.INDEXER_ADDRESS)
self.myindexer = indexer.IndexerClient(indexer_token=self.INDEXER_TOKEN, indexer_address=self.INDEXER_ADDRESS)
while True:
nexttoken = ""
while True:
response = self.myindexer.search_transactions( min_round=self.INDEXER_ROUND, note_prefix=self.NOTE_PREFIX, next_page=nexttoken)
# pprint.pprint(response)
for x in response["transactions"]:
# pprint.pprint(x)
for y in x["inner-txns"]:
if "application-transaction" not in y:
continue
if y["application-transaction"]["application-id"] != self.coreid:
continue
if len(y["logs"]) == 0:
continue
args = y["application-transaction"]["application-args"]
if len(args) < 2:
continue
if base64.b64decode(args[0]) != b'publishMessage':
continue
seq = int.from_bytes(base64.b64decode(y["logs"][0]), "big")
if seq != sid:
continue
if y["sender"] != saddr:
continue;
emitter = decode_address(y["sender"])
payload = base64.b64decode(args[1])
# pprint.pprint([seq, y["sender"], payload.hex()])
# sys.exit(0)
return self.gt.genVaa(emitter, seq, payload)
if 'next-token' in response:
nexttoken = response['next-token']
else:
self.INDEXER_ROUND = response['current-round'] + 1
break
time.sleep(1)
def publishMessage(self, client, sender, vaa, appid):
aa = decode_address(get_application_address(appid)).hex()
emitter_addr = self.optin(client, sender, self.coreid, 0, aa)
txns = []
sp = client.suggested_params()
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=appid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"test1", vaa, self.coreid],
foreign_apps = [self.coreid],
accounts=[emitter_addr],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
self.INDEXER_ROUND = resp.confirmedRound
return self.parseSeqFromLog(resp)
def createTestAsset(self, client, sender):
txns = []
a = transaction.PaymentTxn(
sender = sender.getAddress(),
sp = client.suggested_params(),
receiver = get_application_address(self.testid),
amt = 300000
)
txns.append(a)
sp = client.suggested_params()
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.testid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"setup"],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
transaction.assign_group_id(txns)
grp = []
pk = sender.getPrivateKey()
for t in txns:
grp.append(t.sign(pk))
client.send_transactions(grp)
resp = self.waitForTransaction(client, grp[-1].get_txid())
aid = int.from_bytes(resp.__dict__["logs"][0], "big")
print("Opting " + sender.getAddress() + " into " + str(aid))
self.asset_optin(client, sender, aid, sender.getAddress())
txns = []
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.testid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"mint"],
foreign_assets = [aid],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
# self.INDEXER_ROUND = resp.confirmedRound
return aid
def getCreator(self, client, sender, asset_id):
return client.asset_info(asset_id)["params"]["creator"]
def testAttest(self, client, sender, asset_id):
taddr = get_application_address(self.tokenid)
aa = decode_address(taddr).hex()
emitter_addr = self.optin(client, sender, self.coreid, 0, aa)
if asset_id != 0:
creator = self.getCreator(client, sender, asset_id)
c = client.account_info(creator)
wormhole = c.get("auth-addr") == taddr
else:
c = None
wormhole = False
if not wormhole:
creator = self.optin(client, sender, self.tokenid, asset_id, b"native".hex())
txns = []
sp = client.suggested_params()
txns.append(transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"nop"],
sp=sp
))
mfee = self.getMessageFee()
if (mfee > 0):
txns.append(transaction.PaymentTxn(sender = sender.getAddress(), sp = sp, receiver = get_application_address(self.tokenid), amt = mfee))
accts = [emitter_addr, creator, get_application_address(self.coreid)]
if c != None:
accts.append(c["address"])
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"attestToken", asset_id],
foreign_apps = [self.coreid],
foreign_assets = [asset_id],
accounts=accts,
sp=sp
)
if (mfee > 0):
a.fee = a.fee * 3
else:
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
# Point us at the correct round
self.INDEXER_ROUND = resp.confirmedRound
# print(encode_address(resp.__dict__["logs"][0]))
# print(encode_address(resp.__dict__["logs"][1]))
# pprint.pprint(resp.__dict__)
return self.parseSeqFromLog(resp)
def transferFromAlgorand(self, client, sender, asset_id, quantity, receiver, chain, fee, payload = None):
# pprint.pprint(["transferFromAlgorand", asset_id, quantity, receiver, chain, fee])
taddr = get_application_address(self.tokenid)
aa = decode_address(taddr).hex()
emitter_addr = self.optin(client, sender, self.coreid, 0, aa)
# asset_id 0 is ALGO
if asset_id == 0:
wormhole = False
else:
creator = self.getCreator(client, sender, asset_id)
c = client.account_info(creator)
wormhole = c.get("auth-addr") == taddr
txns = []
mfee = self.getMessageFee()
if (mfee > 0):
txns.append(transaction.PaymentTxn(sender = sender.getAddress(), sp = sp, receiver = get_application_address(self.tokenid), amt = mfee))
if not wormhole:
creator = self.optin(client, sender, self.tokenid, asset_id, b"native".hex())
print("non wormhole account " + creator)
sp = client.suggested_params()
if (asset_id != 0) and (not self.asset_optin_check(client, sender, asset_id, creator)):
print("Looks like we need to optin")
txns.append(
transaction.PaymentTxn(
sender=sender.getAddress(),
receiver=creator,
amt=100000,
sp=sp
)
)
# The tokenid app needs to do the optin since it has signature authority
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"optin", asset_id],
foreign_assets = [asset_id],
accounts=[creator],
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
self.sendTxn(client, sender, txns, False)
txns = []
txns.insert(0,
transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=[b"nop"],
sp=client.suggested_params(),
)
)
if asset_id == 0:
print("asset_id == 0")
txns.append(transaction.PaymentTxn(
sender=sender.getAddress(),
receiver=creator,
amt=quantity,
sp=sp,
))
accounts=[emitter_addr, creator, creator]
else:
print("asset_id != 0")
txns.append(
transaction.AssetTransferTxn(
sender = sender.getAddress(),
sp = sp,
receiver = creator,
amt = quantity,
index = asset_id
))
accounts=[emitter_addr, creator, c["address"]]
args = [b"sendTransfer", asset_id, quantity, decode_address(receiver), chain, fee]
if None != payload:
args.append(payload)
#pprint.pprint(args)
# print(self.tokenid)
a = transaction.ApplicationCallTxn(
sender=sender.getAddress(),
index=self.tokenid,
on_complete=transaction.OnComplete.NoOpOC,
app_args=args,
foreign_apps = [self.coreid],
foreign_assets = [asset_id],
accounts=accounts,
sp=sp
)
a.fee = a.fee * 2
txns.append(a)
resp = self.sendTxn(client, sender, txns, True)
self.INDEXER_ROUND = resp.confirmedRound
# pprint.pprint([self.coreid, self.tokenid, resp.__dict__,
# int.from_bytes(resp.__dict__["logs"][1], "big"),
# int.from_bytes(resp.__dict__["logs"][2], "big"),
# int.from_bytes(resp.__dict__["logs"][3], "big"),
# int.from_bytes(resp.__dict__["logs"][4], "big"),
# int.from_bytes(resp.__dict__["logs"][5], "big")
# ])
# print(encode_address(resp.__dict__["logs"][0]))
# print(encode_address(resp.__dict__["logs"][1]))
return self.parseSeqFromLog(resp)
def asset_optin_check(self, client, sender, asset, receiver):
if receiver not in self.asset_cache:
self.asset_cache[receiver] = {}
if asset in self.asset_cache[receiver]:
return True
ai = client.account_info(receiver)
if "assets" in ai:
for x in ai["assets"]:
if x["asset-id"] == asset:
self.asset_cache[receiver][asset] = True
return True
return False
def asset_optin(self, client, sender, asset, receiver):
if self.asset_optin_check(client, sender, asset, receiver):
return
pprint.pprint(["asset_optin", asset, receiver])
sp = client.suggested_params()
optin_txn = transaction.AssetTransferTxn(
sender = sender.getAddress(),
sp = sp,
receiver = receiver,
amt = 0,
index = asset
)
transaction.assign_group_id([optin_txn])
signed_optin = optin_txn.sign(sender.getPrivateKey())
client.send_transactions([signed_optin])
resp = self.waitForTransaction(client, signed_optin.get_txid())
assert self.asset_optin_check(client, sender, asset, receiver), "The optin failed"
print("woah! optin succeeded")
def simple_test(self):
# q = bytes.fromhex(gt.genAssetMeta(gt.guardianPrivKeys, 1, 1, 1, bytes.fromhex("4523c3F29447d1f32AEa95BEBD00383c4640F1b4"), 1, 8, b"USDC", b"CircleCoin"))
# pprint.pprint(self.parseVAA(q))
# sys.exit(0)
# vaa = self.parseVAA(bytes.fromhex("0100000001010001ca2fbf60ac6227d47dda4fe2e7bccc087f27d22170a212b9800da5b4cbf0d64c52deb2f65ce58be2267bf5b366437c267b5c7b795cd6cea1ac2fee8a1db3ad006225f801000000010001000000000000000000000000000000000000000000000000000000000000000400000000000000012000000000000000000000000000000000000000000000000000000000436f72650200000000000001beFA429d57cD18b7F8A4d91A2da9AB4AF05d0FBe"))
# pprint.pprint(vaa)
# vaa = self.parseVAA(bytes.fromhex("01000000010100c22ce0a3c995fca993cb0e91af74d745b6ec1a04b3adf0bb3e432746b3e2ab5e635b65d34d5148726cac10e84bf5932a7f21b9545c362bd512617aa980e0fbf40062607566000000010001000000000000000000000000000000000000000000000000000000000000000400000000000000012000000000000000000000000000000000000000000000000000000000436f72650200000000000101beFA429d57cD18b7F8A4d91A2da9AB4AF05d0FBe"))
# pprint.pprint(vaa)
# sys.exit(0)
gt = GenTest(True)
self.gt = gt
self.setup_args()
if self.args.testnet:
self.testnet()
client = self.client = self.getAlgodClient()
self.genTeal()
self.vaa_verify = self.client.compile(get_vaa_verify())
self.vaa_verify["lsig"] = LogicSig(base64.b64decode(self.vaa_verify["result"]))
vaaLogs = []
args = self.args
if self.args.mnemonic:
self.foundation = Account.FromMnemonic(self.args.mnemonic)
if self.foundation == None:
print("Generating the foundation account...")
self.foundation = self.getTemporaryAccount(self.client)
if self.foundation == None:
print("We dont have a account? ")
sys.exit(0)
foundation = self.foundation
seq = int(time.time())
print("Creating the PortalCore app")
self.coreid = self.createPortalCoreApp(client=client, sender=foundation)
print("coreid = " + str(self.coreid) + " " + get_application_address(self.coreid))
print("bootstrapping the guardian set...")
bootVAA = bytes.fromhex(gt.genGuardianSetUpgrade(gt.guardianPrivKeys, 1, 1, seq, seq))
self.bootGuardians(bootVAA, client, foundation, self.coreid)
seq += 1
print("grabbing a untrusted account")
player = self.getTemporaryAccount(client)
print(player.getAddress())
print("")
bal = self.getBalances(client, player.getAddress())
pprint.pprint(bal)
print("upgrading the the guardian set using untrusted account...")
upgradeVAA = bytes.fromhex(gt.genGuardianSetUpgrade(gt.guardianPrivKeys, 1, 2, seq, seq))
vaaLogs.append(["guardianUpgrade", upgradeVAA.hex()])
self.submitVAA(upgradeVAA, client, player, self.coreid)
bal = self.getBalances(client, player.getAddress())
pprint.pprint(bal)
seq += 1
print("Create the token bridge")
self.tokenid = self.createTokenBridgeApp(client, foundation)
print("token bridge " + str(self.tokenid) + " address " + get_application_address(self.tokenid))
ret = self.devnetUpgradeVAA()
# pprint.pprint(ret)
print("Submitting core")
self.submitVAA(bytes.fromhex(ret[0]), self.client, foundation, self.coreid)
print("Submitting token")
self.submitVAA(bytes.fromhex(ret[1]), self.client, foundation, self.tokenid)
print("successfully sent upgrade requests")
for r in range(1, 6):
print("Registering chain " + str(r))
v = gt.genRegisterChain(gt.guardianPrivKeys, 2, seq, seq, r)
vaa = bytes.fromhex(v)
# pprint.pprint((v, self.parseVAA(vaa)))
if r == 2:
vaaLogs.append(["registerChain", v])
self.submitVAA(vaa, client, player, self.tokenid)
seq += 1
bal = self.getBalances(client, player.getAddress())
pprint.pprint(bal)
print("Create a asset")
attestVAA = bytes.fromhex(gt.genAssetMeta(gt.guardianPrivKeys, 2, seq, seq, bytes.fromhex("4523c3F29447d1f32AEa95BEBD00383c4640F1b4"), 1, 8, b"USDC", b"CircleCoin"))
# paul - createWrappedOnAlgorand
vaaLogs.append(["createWrappedOnAlgorand", attestVAA.hex()])
self.submitVAA(attestVAA, client, player, self.tokenid)
seq += 1
p = self.parseVAA(attestVAA)
chain_addr = self.optin(client, player, self.tokenid, p["FromChain"], p["Contract"])
print("Create the same asset " + str(seq))
# paul - updateWrappedOnAlgorand
attestVAA = bytes.fromhex(gt.genAssetMeta(gt.guardianPrivKeys, 2, seq, seq, bytes.fromhex("4523c3F29447d1f32AEa95BEBD00383c4640F1b4"), 1, 8, b"USD2C", b"Circle2Coin"))
self.submitVAA(attestVAA, client, player, self.tokenid)
seq += 1
print("Transfer the asset " + str(seq))
transferVAA = bytes.fromhex(gt.genTransfer(gt.guardianPrivKeys, 1, 1, 1, 1, bytes.fromhex("4523c3F29447d1f32AEa95BEBD00383c4640F1b4"), 1, decode_address(player.getAddress()), 8, 0))
# paul - redeemOnAlgorand
vaaLogs.append(["redeemOnAlgorand", transferVAA.hex()])
self.submitVAA(transferVAA, client, player, self.tokenid)
seq += 1
aid = client.account_info(player.getAddress())["assets"][0]["asset-id"]
print("generate an attest of the asset we just received: " + str(aid))
# paul - attestFromAlgorand
self.testAttest(client, player, aid)
print("Create the test app we will use to torture ourselves using a new player")
player2 = self.getTemporaryAccount(client)
print("player2 address " + player2.getAddress())
player3 = self.getTemporaryAccount(client)
print("player3 address " + player3.getAddress())
self.testid = self.createTestApp(client, player2)
print("testid " + str(self.testid) + " address " + get_application_address(self.testid))
print("Sending a message payload to the core contract")
sid = self.publishMessage(client, player, b"you also suck", self.testid)
self.publishMessage(client, player2, b"second suck", self.testid)
self.publishMessage(client, player3, b"last message", self.testid)
print("Lets create a brand new non-wormhole asset and try to attest and send it out")
self.testasset = self.createTestAsset(client, player2)
print("test asset id: " + str(self.testasset))
print("Now lets create an attest of ALGO")
sid = self.testAttest(client, player2, 0)
vaa = self.getVAA(client, player, sid, self.tokenid)
v = self.parseVAA(bytes.fromhex(vaa))
print("We got a " + v["Meta"])
print("Lets try to create an attest for a non-wormhole thing with a huge number of decimals")
# paul - attestFromAlgorand
sid = self.testAttest(client, player2, self.testasset)
print("... track down the generated VAA")
vaa = self.getVAA(client, player, sid, self.tokenid)
v = self.parseVAA(bytes.fromhex(vaa))
print("We got a " + v["Meta"])
pprint.pprint(self.getBalances(client, player.getAddress()))
pprint.pprint(self.getBalances(client, player2.getAddress()))
pprint.pprint(self.getBalances(client, player3.getAddress()))
print("Lets transfer that asset to one of our other accounts... first lets create the vaa")
# paul - transferFromAlgorand
sid = self.transferFromAlgorand(client, player2, self.testasset, 100, player3.getAddress(), 8, 0)
print("... track down the generated VAA")
vaa = self.getVAA(client, player, sid, self.tokenid)
print(".. and lets pass that to player3")
vaaLogs.append(["transferFromAlgorand", vaa])
#pprint.pprint(vaaLogs)
self.submitVAA(bytes.fromhex(vaa), client, player3, self.tokenid)
pprint.pprint(["player", self.getBalances(client, player.getAddress())])
pprint.pprint(["player2", self.getBalances(client, player2.getAddress())])
pprint.pprint(["player3", self.getBalances(client, player3.getAddress())])
# Lets split it into two parts... the payload and the fee
print("Lets split it into two parts... the payload and the fee (400 should go to player, 600 should go to player3)")
sid = self.transferFromAlgorand(client, player2, self.testasset, 1000, player3.getAddress(), 8, 400)
print("... track down the generated VAA")
vaa = self.getVAA(client, player, sid, self.tokenid)
# pprint.pprint(self.parseVAA(bytes.fromhex(vaa)))
print(".. and lets pass that to player3 with fees being passed to player acting as a relayer (" + str(self.tokenid) + ")")
self.submitVAA(bytes.fromhex(vaa), client, player, self.tokenid)
pprint.pprint(["player", self.getBalances(client, player.getAddress())])
pprint.pprint(["player2", self.getBalances(client, player2.getAddress())])
pprint.pprint(["player3", self.getBalances(client, player3.getAddress())])
# sys.exit(0)
# Now it gets tricky, lets create a virgin account...
pk, addr = account.generate_account()
emptyAccount = Account(pk)
print("How much is in the empty account? (" + addr + ")")
pprint.pprint(self.getBalances(client, emptyAccount.getAddress()))
# paul - transferFromAlgorand
print("Lets transfer algo this time.... first lets create the vaa")
sid = self.transferFromAlgorand(client, player2, 0, 1000000, emptyAccount.getAddress(), 8, 0)
print("... track down the generated VAA")
vaa = self.getVAA(client, player, sid, self.tokenid)
# pprint.pprint(vaa)
print(".. and lets pass that to the empty account.. but use somebody else to relay since we cannot pay for it")
# paul - redeemOnAlgorand
self.submitVAA(bytes.fromhex(vaa), client, player, self.tokenid)
print("=================================================")
print("How much is in the source account now?")
pprint.pprint(self.getBalances(client, player2.getAddress()))
print("How much is in the empty account now?")
pprint.pprint(self.getBalances(client, emptyAccount.getAddress()))
print("How much is in the player3 account now?")
pprint.pprint(self.getBalances(client, player3.getAddress()))
print("Lets transfer more algo.. split 40/60 with the relayer.. going to player3")
sid = self.transferFromAlgorand(client, player2, 0, 1000000, player3.getAddress(), 8, 400000)
print("... track down the generated VAA")
vaa = self.getVAA(client, player, sid, self.tokenid)
print(".. and lets pass that to player3.. but use the previously empty account to relay it")
self.submitVAA(bytes.fromhex(vaa), client, emptyAccount, self.tokenid)
print("How much is in the source account now?")
pprint.pprint(self.getBalances(client, player2.getAddress()))
print("How much is in the empty account now?")
pprint.pprint(self.getBalances(client, emptyAccount.getAddress()))
print("How much is in the player3 account now?")
pprint.pprint(self.getBalances(client, player3.getAddress()))
print("How about a payload3")
sid = self.transferFromAlgorand(client, player2, 0, 100, get_application_address(self.testid), 8, 0, self.testid.to_bytes(8, "big")+b'hi mom')
print("... track down the generated VAA")
vaa = self.getVAA(client, player, sid, self.tokenid)
print("testid balance before = ", self.getBalances(client, get_application_address(self.testid)))
print(".. Lets let player3 relay it for us")
self.submitVAA(bytes.fromhex(vaa), client, player3, self.tokenid)
print("testid balance after = ", self.getBalances(client, get_application_address(self.testid)))
print(".. Ok, now it is time to up the message fees")
bal = self.getBalances(client, get_application_address(self.coreid))
print("core contract has " + str(bal) + " algo (" + get_application_address(self.coreid) + ")")
print("core contract has a MessageFee set to " + str(self.getMessageFee()))
seq += 1
v = gt.genGSetFee(gt.guardianPrivKeys, 2, seq, seq, 2000000)
self.submitVAA(bytes.fromhex(v), client, player, self.coreid)
seq += 1
print("core contract now has a MessageFee set to " + str(self.getMessageFee()))
# v = gt.genGSetFee(gt.guardianPrivKeys, 2, seq, seq, 0)
# self.submitVAA(bytes.fromhex(v), client, player, self.coreid)
# seq += 1
# print("core contract is back to " + str(self.getMessageFee()))
print("Generating an attest.. This will cause a message to get published .. which should cause fees to get sent to the core contract")
sid = self.testAttest(client, player2, self.testasset)
print("... track down the generated VAA")
vaa = self.getVAA(client, player, sid, self.tokenid)
v = self.parseVAA(bytes.fromhex(vaa))
print("We got a " + v["Meta"])
bal = self.getBalances(client, get_application_address(self.coreid))
print("core contract has " + str(bal) + " algo (" + get_application_address(self.coreid) + ")")
# print("player account: " + player.getAddress())
# pprint.pprint(client.account_info(player.getAddress()))
# print("player2 account: " + player2.getAddress())
# pprint.pprint(client.account_info(player2.getAddress()))
# print("foundation account: " + foundation.getAddress())
# pprint.pprint(client.account_info(foundation.getAddress()))
#
# print("core app: " + get_application_address(self.coreid))
# pprint.pprint(client.account_info(get_application_address(self.coreid))),
#
# print("token app: " + get_application_address(self.tokenid))
# pprint.pprint(client.account_info(get_application_address(self.tokenid))),
#
# print("asset app: " + chain_addr)
# pprint.pprint(client.account_info(chain_addr))
if __name__ == "__main__":
core = AlgoTest()
core.simple_test()

126
algorand/test/test.ts Normal file
View File

@ -0,0 +1,126 @@
const algosdk = require('algosdk');
const TestLib = require('./testlib.ts')
const testLib = new TestLib.TestLib()
const fs = require('fs');
const path = require('path');
import {
getAlgoClient,
submitVAA,
submitVAAHdr,
simpleSignVAA,
getIsTransferCompletedAlgorand,
parseVAA,
CORE_ID,
TOKEN_BRIDGE_ID,
attestFromAlgorand,
AccountToSigner
} from "@certusone/wormhole-sdk/lib/cjs/algorand/Algorand";
import {
hexStringToUint8Array,
uint8ArrayToHexString,
} from "@certusone/wormhole-sdk/lib/cjs/algorand/TmplSig";
import {
getTempAccounts,
} from "@certusone/wormhole-sdk/lib/cjs/algorand/Helpers";
const guardianKeys = [
"beFA429d57cD18b7F8A4d91A2da9AB4AF05d0FBe"
]
const guardianPrivKeys = [
"cfb12303a19cde580bb4dd771639b0d26bc68353645571a8cff516ab2ee113a0"
]
class AlgoTests {
constructor() {
}
async runTests() {
let seq = Math.floor(new Date().getTime() / 1000.0);
// let t = "01000000000100bc942f5b6da266078844b26cb01bb541e0b5963da5bae9aadfe717ed5376efa711224796fc9e893dbf6f19ef6472a62f9af9241ece016e42da8a076bbf1ffe3c006250770b625077090001000000000000000000000000000000000000000000000000000000000000000400000000625077092000000000000000000000000000000000000000000000000000000000436f72650200000000000101beFA429d57cD18b7F8A4d91A29AB4AF05d0FBe"
// console.log(t)
// console.log(parseVAA(hexStringToUint8Array(t)))
// process.exit(0)
console.log("test start");
let client = getAlgoClient();
let accounts = await getTempAccounts();
let player = AccountToSigner(accounts[0])
console.log("attesting some ALGO");
console.log(await attestFromAlgorand(client, player, 0))
process.exit(0);
// let t = testLib.genTransfer(guardianPrivKeys, 1, 1, 1, 1, "4523c3F29447d1f32AEa95BEBD00383c4640F1b4", 2, uint8ArrayToHexString(algosdk.decodeAddress(player.addr).publicKey, false), 8, 0)
// console.log(t)
// console.log(parseVAA(hexStringToUint8Array(t)))
// process.exit(0)
console.log("seq = ", seq);
console.log("XXX upgrading the the guardian set using untrusted account...", seq)
let upgradeVAA = testLib.genGuardianSetUpgrade(guardianPrivKeys, 0, 1, seq, seq, guardianKeys)
console.log(upgradeVAA)
console.log(parseVAA(hexStringToUint8Array(upgradeVAA)))
let vaa = hexStringToUint8Array(upgradeVAA);
if (await getIsTransferCompletedAlgorand(client, vaa, CORE_ID, player) != false) {
console.log("assert failed 1");
process.exit(-1);
}
await submitVAA(vaa, client, player, CORE_ID)
if (await getIsTransferCompletedAlgorand(client, vaa, CORE_ID, player) != true) {
console.log("assert failed 2");
process.exit(-1);
}
process.exit(0)
seq = seq + 1
console.log("XXX upgrading again...", seq)
upgradeVAA = testLib.genGuardianSetUpgrade(guardianPrivKeys, 1, 2, seq, seq, guardianKeys)
console.log(upgradeVAA)
await submitVAA(hexStringToUint8Array(upgradeVAA), client, player, CORE_ID)
seq = seq + 1
console.log("XXX registering chain 2", seq)
let reg = testLib.genRegisterChain(guardianPrivKeys, 2, 1, seq, 2)
console.log(reg)
await submitVAA(hexStringToUint8Array(reg), client, player, TOKEN_BRIDGE_ID)
seq = seq + 1
console.log("XXX gen asset meta", seq)
let a = testLib.genAssetMeta(guardianPrivKeys, 2, seq, seq, "4523c3F29447d1f32AEa95BEBD00383c4640F1b4", 2, 8, "USDC", "CircleCoin")
console.log(a)
await submitVAA(hexStringToUint8Array(a), client, player, TOKEN_BRIDGE_ID)
seq = seq + 1
console.log("XXX Transfer the asset ")
let transferVAA = testLib.genTransfer(guardianPrivKeys, 2, 1, seq, 1, "4523c3F29447d1f32AEa95BEBD00383c4640F1b4", 2, uint8ArrayToHexString(algosdk.decodeAddress(player.addr).publicKey, false), 8, 0)
await submitVAA(hexStringToUint8Array(transferVAA), client, player, TOKEN_BRIDGE_ID)
seq = seq + 1
console.log("test complete");
}
};
let t = new AlgoTests()
t.runTests()

133
algorand/test/test2.ts Normal file
View File

@ -0,0 +1,133 @@
const algosdk = require('algosdk');
const TestLib = require('./testlib.ts')
const testLib = new TestLib.TestLib()
const fs = require('fs');
const path = require('path');
import {
getAlgoClient,
submitVAA,
submitVAAHdr,
simpleSignVAA,
getIsTransferCompletedAlgorand,
parseVAA,
CORE_ID,
TOKEN_BRIDGE_ID
} from "@certusone/wormhole-sdk/lib/cjs/algorand/Algorand";
import {
hexStringToUint8Array,
uint8ArrayToHexString,
} from "@certusone/wormhole-sdk/lib/cjs/algorand/TmplSig";
import {
getTempAccounts,
} from "@certusone/wormhole-sdk/lib/cjs/algorand/Helpers";
const guardianKeys = [
"beFA429d57cD18b7F8A4d91A2da9AB4AF05d0FBe"
]
const guardianPrivKeys = [
"cfb12303a19cde580bb4dd771639b0d26bc68353645571a8cff516ab2ee113a0"
]
class AlgoTests {
constructor() {
}
async runTests() {
let seq = Math.floor(new Date().getTime() / 1000.0);
// let t = "01000000000100bc942f5b6da266078844b26cb01bb541e0b5963da5bae9aadfe717ed5376efa711224796fc9e893dbf6f19ef6472a62f9af9241ece016e42da8a076bbf1ffe3c006250770b625077090001000000000000000000000000000000000000000000000000000000000000000400000000625077092000000000000000000000000000000000000000000000000000000000436f72650200000000000101beFA429d57cD18b7F8A4d91A29AB4AF05d0FBe"
// console.log(t)
// console.log(parseVAA(hexStringToUint8Array(t)))
// process.exit(0)
console.log("test start");
let client = getAlgoClient();
let accounts = await getTempAccounts();
let player = accounts[0]
let t = testLib.genAssetMeta(guardianPrivKeys, 0, seq, seq, "4523c3F29447d1f32AEa95BEBD00383c4640F1b4", 1, 8, "USDC", "CircleCoin")
console.log(t)
console.log(parseVAA(hexStringToUint8Array(t)))
await submitVAA(hexStringToUint8Array(t), client, player, TOKEN_BRIDGE_ID)
process.exit(0)
// vaaLogs.append(["createWrappedOnAlgorand", attestVAA.hex()])
// self.submitVAA(attestVAA, client, player, self.tokenid)
t = testLib.genTransfer(guardianPrivKeys, 1, 1, 1, 1, "4523c3F29447d1f32AEa95BEBD00383c4640F1b4", 2, uint8ArrayToHexString(algosdk.decodeAddress(player.addr).publicKey, false), 8, 0)
console.log(t)
console.log(parseVAA(hexStringToUint8Array(t)))
process.exit(0)
console.log("seq = ", seq);
console.log("XXX upgrading the the guardian set using untrusted account...", seq)
let upgradeVAA = testLib.genGuardianSetUpgrade(guardianPrivKeys, 0, 1, seq, seq, guardianKeys)
console.log(upgradeVAA)
console.log(parseVAA(hexStringToUint8Array(upgradeVAA)))
let vaa = hexStringToUint8Array(upgradeVAA);
if (await getIsTransferCompletedAlgorand(client, vaa, CORE_ID, player) != false) {
console.log("assert failed 1");
process.exit(-1);
}
await submitVAA(vaa, client, player, CORE_ID)
if (await getIsTransferCompletedAlgorand(client, vaa, CORE_ID, player) != true) {
console.log("assert failed 2");
process.exit(-1);
}
process.exit(0)
seq = seq + 1
console.log("XXX upgrading again...", seq)
upgradeVAA = testLib.genGuardianSetUpgrade(guardianPrivKeys, 1, 2, seq, seq, guardianKeys)
console.log(upgradeVAA)
await submitVAA(hexStringToUint8Array(upgradeVAA), client, player, CORE_ID)
seq = seq + 1
console.log("XXX registering chain 2", seq)
let reg = testLib.genRegisterChain(guardianPrivKeys, 2, 1, seq, 2)
console.log(reg)
await submitVAA(hexStringToUint8Array(reg), client, player, TOKEN_BRIDGE_ID)
seq = seq + 1
console.log("XXX gen asset meta", seq)
let a = testLib.genAssetMeta(guardianPrivKeys, 2, seq, seq, "4523c3F29447d1f32AEa95BEBD00383c4640F1b4", 2, 8, "USDC", "CircleCoin")
console.log(a)
await submitVAA(hexStringToUint8Array(a), client, player, TOKEN_BRIDGE_ID)
seq = seq + 1
console.log("XXX Transfer the asset ")
let transferVAA = testLib.genTransfer(guardianPrivKeys, 2, 1, seq, 1, "4523c3F29447d1f32AEa95BEBD00383c4640F1b4", 2, uint8ArrayToHexString(algosdk.decodeAddress(player.addr).publicKey, false), 8, 0)
await submitVAA(hexStringToUint8Array(transferVAA), client, player, TOKEN_BRIDGE_ID)
seq = seq + 1
console.log("test complete");
}
};
let t = new AlgoTests()
t.runTests()

View File

@ -0,0 +1,163 @@
#!/usr/bin/python3
"""
Copyright 2022 Wormhole Project Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import List, Tuple, Dict, Any, Optional, Union
from pyteal.ast import *
from pyteal.types import *
from pyteal.compiler import *
from pyteal.ir import *
from globals import *
from inlineasm import *
from algosdk.v2client.algod import AlgodClient
from TmplSig import TmplSig
from local_blob import LocalBlob
import sys
def fullyCompileContract(client: AlgodClient, contract: Expr) -> bytes:
teal = compileTeal(contract, mode=Mode.Application, version=6)
response = client.compile(teal)
return response
def clear_app():
return Int(1)
def approve_app():
me = Global.current_application_address()
def nop():
return Seq([Approve()])
def test1():
# Look! a proxy contract that sends message to the core
return Seq(
InnerTxnBuilder.Begin(),
InnerTxnBuilder.SetFields(
{
TxnField.type_enum: TxnType.ApplicationCall,
TxnField.application_id: Btoi(Txn.application_args[2]),
TxnField.application_args: [Bytes("publishMessage"), Txn.application_args[1]],
TxnField.accounts: [Txn.accounts[1]],
TxnField.note: Bytes("publishMessage"),
TxnField.fee: Int(0),
}
),
InnerTxnBuilder.Submit(),
Approve()
)
def setup():
aid = ScratchVar()
return Seq([
# Create a test asset
InnerTxnBuilder.Begin(),
InnerTxnBuilder.SetFields(
{
TxnField.sender: Global.current_application_address(),
TxnField.type_enum: TxnType.AssetConfig,
TxnField.config_asset_name: Bytes("TestAsset"),
TxnField.config_asset_unit_name: Bytes("testAsse"),
TxnField.config_asset_total: Int(int(1e17)),
TxnField.config_asset_decimals: Int(10),
TxnField.config_asset_manager: Global.current_application_address(),
TxnField.config_asset_reserve: Global.current_application_address(),
# We cannot freeze or clawback assets... per the spirit of
TxnField.config_asset_freeze: Global.zero_address(),
TxnField.config_asset_clawback: Global.zero_address(),
TxnField.fee: Int(0),
}
),
InnerTxnBuilder.Submit(),
aid.store(Itob(InnerTxn.created_asset_id())),
App.globalPut(Bytes("asset"), aid.load()),
Log(aid.load()),
Approve()
])
def completeTransfer():
return Seq([
Approve()
])
def mint():
return Seq([
InnerTxnBuilder.Begin(),
InnerTxnBuilder.SetFields(
{
TxnField.sender: Global.current_application_address(),
TxnField.type_enum: TxnType.AssetTransfer,
TxnField.xfer_asset: Btoi(App.globalGet(Bytes("asset"))),
TxnField.asset_amount: Int(100000),
TxnField.asset_receiver: Txn.sender(),
TxnField.fee: Int(0),
}
),
InnerTxnBuilder.Submit(),
Approve()
])
METHOD = Txn.application_args[0]
router = Cond(
[METHOD == Bytes("nop"), nop()],
[METHOD == Bytes("test1"), test1()],
[METHOD == Bytes("setup"), setup()],
[METHOD == Bytes("mint"), mint()],
[METHOD == Bytes("completeTransfer"), completeTransfer()],
)
on_create = Seq( [
Return(Int(1))
])
on_update = Seq( [
Return(Int(1))
] )
on_delete = Seq( [
Return(Int(1))
] )
on_optin = Seq( [
Return(Int(1))
] )
return Cond(
[Txn.application_id() == Int(0), on_create],
[Txn.on_completion() == OnComplete.UpdateApplication, on_update],
[Txn.on_completion() == OnComplete.DeleteApplication, on_delete],
[Txn.on_completion() == OnComplete.OptIn, on_optin],
[Txn.on_completion() == OnComplete.NoOp, router]
)
def get_test_app(client: AlgodClient) -> Tuple[bytes, bytes]:
APPROVAL_PROGRAM = fullyCompileContract(client, approve_app())
CLEAR_STATE_PROGRAM = fullyCompileContract(client, clear_app())
return APPROVAL_PROGRAM, CLEAR_STATE_PROGRAM

495
algorand/test/testlib.ts Normal file
View File

@ -0,0 +1,495 @@
/**
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import {
ChainId,
CHAIN_ID_AVAX,
CHAIN_ID_BSC,
CHAIN_ID_ETH,
CHAIN_ID_FANTOM,
CHAIN_ID_OASIS,
CHAIN_ID_POLYGON,
CHAIN_ID_SOLANA,
CHAIN_ID_TERRA,
} from "@certusone/wormhole-sdk";
const web3EthAbi = require("web3-eth-abi");
const web3Utils = require("web3-utils");
const elliptic = require("elliptic");
export class TestLib {
zeroBytes: string;
singleGuardianKey: string[] = ["beFA429d57cD18b7F8A4d91A2da9AB4AF05d0FBe"];
singleGuardianPrivKey: string[] = [
"cfb12303a19cde580bb4dd771639b0d26bc68353645571a8cff516ab2ee113a0",
];
constructor() {
this.zeroBytes =
"0000000000000000000000000000000000000000000000000000000000000000";
}
hexStringToUint8Array(hs: string): Uint8Array {
if (hs.length % 2 === 1) {
// prepend a 0
hs = "0" + hs;
}
const buf = Buffer.from(hs, "hex");
const retval = Uint8Array.from(buf);
return retval;
}
uint8ArrayToHexString(arr: Uint8Array, add0x: boolean) {
const ret: string = Buffer.from(arr).toString("hex");
if (!add0x) {
return ret;
}
return "0x" + ret;
}
guardianKeys: string[] = [
"52A26Ce40F8CAa8D36155d37ef0D5D783fc614d2",
"389A74E8FFa224aeAD0778c786163a7A2150768C",
"B4459EA6482D4aE574305B239B4f2264239e7599",
"072491bd66F63356090C11Aae8114F5372aBf12B",
"51280eA1fd2B0A1c76Ae29a7d54dda68860A2bfF",
"fa9Aa60CfF05e20E2CcAA784eE89A0A16C2057CB",
"e42d59F8FCd86a1c5c4bA351bD251A5c5B05DF6A",
"4B07fF9D5cE1A6ed58b6e9e7d6974d1baBEc087e",
"c8306B84235D7b0478c61783C50F990bfC44cFc0",
"C8C1035110a13fe788259A4148F871b52bAbcb1B",
"58A2508A20A7198E131503ce26bBE119aA8c62b2",
"8390820f04ddA22AFe03be1c3bb10f4ba6CF94A0",
"1FD6e97387C34a1F36DE0f8341E9D409E06ec45b",
"255a41fC2792209CB998A8287204D40996df9E54",
"bA663B12DD23fbF4FbAC618Be140727986B3BBd0",
"79040E577aC50486d0F6930e160A5C75FD1203C6",
"3580D2F00309A9A85efFAf02564Fc183C0183A96",
"3869795913D3B6dBF3B24a1C7654672c69A23c35",
"1c0Cc52D7673c52DE99785741344662F5b2308a0",
];
guardianPrivKeys: string[] = [
"563d8d2fd4e701901d3846dee7ae7a92c18f1975195264d676f8407ac5976757",
"8d97f25916a755df1d9ef74eb4dbebc5f868cb07830527731e94478cdc2b9d5f",
"9bd728ad7617c05c31382053b57658d4a8125684c0098f740a054d87ddc0e93b",
"5a02c4cd110d20a83a7ce8d1a2b2ae5df252b4e5f6781c7855db5cc28ed2d1b4",
"93d4e3b443bf11f99a00901222c032bd5f63cf73fc1bcfa40829824d121be9b2",
"ea40e40c63c6ff155230da64a2c44fcd1f1c9e50cacb752c230f77771ce1d856",
"87eaabe9c27a82198e618bca20f48f9679c0f239948dbd094005e262da33fe6a",
"61ffed2bff38648a6d36d6ed560b741b1ca53d45391441124f27e1e48ca04770",
"bd12a242c6da318fef8f98002efb98efbf434218a78730a197d981bebaee826e",
"20d3597bb16525b6d09e5fb56feb91b053d961ab156f4807e37d980f50e71aff",
"344b313ffbc0199ff6ca08cacdaf5dc1d85221e2f2dc156a84245bd49b981673",
"848b93264edd3f1a521274ca4da4632989eb5303fd15b14e5ec6bcaa91172b05",
"c6f2046c1e6c172497fc23bd362104e2f4460d0f61984938fa16ef43f27d93f6",
"693b256b1ee6b6fb353ba23274280e7166ab3be8c23c203cc76d716ba4bc32bf",
"13c41508c0da03018d61427910b9922345ced25e2bbce50652e939ee6e5ea56d",
"460ee0ee403be7a4f1eb1c63dd1edaa815fbaa6cf0cf2344dcba4a8acf9aca74",
"b25148579b99b18c8994b0b86e4dd586975a78fa6e7ad6ec89478d7fbafd2683",
"90d7ac6a82166c908b8cf1b352f3c9340a8d1f2907d7146fb7cd6354a5436cca",
"b71d23908e4cf5d6cd973394f3a4b6b164eb1065785feee612efdfd8d30005ed",
];
encoder(type: string, val: any) {
if (type == "uint8")
return web3EthAbi.encodeParameter("uint8", val).substring(2 + (64 - 2));
if (type == "uint16")
return web3EthAbi.encodeParameter("uint16", val).substring(2 + (64 - 4));
if (type == "uint32")
return web3EthAbi.encodeParameter("uint32", val).substring(2 + (64 - 8));
if (type == "uint64")
return web3EthAbi.encodeParameter("uint64", val).substring(2 + (64 - 16));
if (type == "uint128")
return web3EthAbi
.encodeParameter("uint128", val)
.substring(2 + (64 - 32));
if (type == "uint256" || type == "bytes32")
return web3EthAbi.encodeParameter(type, val).substring(2 + (64 - 64));
}
ord(c: any) {
return c.charCodeAt(0);
}
genGuardianSetUpgrade(
signers: any,
guardianSet: number,
targetSet: number,
nonce: number,
seq: number,
guardianKeys: string[]
): string {
const b = [
"0x",
this.zeroBytes.slice(0, 28 * 2),
this.encoder("uint8", this.ord("C")),
this.encoder("uint8", this.ord("o")),
this.encoder("uint8", this.ord("r")),
this.encoder("uint8", this.ord("e")),
this.encoder("uint8", 2),
this.encoder("uint16", 0),
this.encoder("uint32", targetSet),
this.encoder("uint8", guardianKeys.length),
];
guardianKeys.forEach((x) => {
b.push(x);
});
let emitter = "0x" + this.zeroBytes.slice(0, 31 * 2) + "04";
let seconds = Math.floor(new Date().getTime() / 1000.0);
return this.createSignedVAA(
guardianSet,
signers,
seconds,
nonce,
1,
emitter,
seq,
32,
b.join("")
);
}
genGSetFee(
signers: any,
guardianSet: number,
nonce: number,
seq: number,
amt: number
) {
const b = [
"0x",
this.zeroBytes.slice(0, 28 * 2),
this.encoder("uint8", this.ord("C")),
this.encoder("uint8", this.ord("o")),
this.encoder("uint8", this.ord("r")),
this.encoder("uint8", this.ord("e")),
this.encoder("uint8", 3),
this.encoder("uint16", 8),
this.encoder("uint256", Math.floor(amt)),
];
let emitter = "0x" + this.zeroBytes.slice(0, 31 * 2) + "04";
var seconds = Math.floor(new Date().getTime() / 1000.0);
return this.createSignedVAA(
guardianSet,
signers,
seconds,
nonce,
1,
emitter,
seq,
32,
b.join("")
);
}
genGFeePayout(
signers: any,
guardianSet: number,
nonce: number,
seq: number,
amt: number,
dest: Uint8Array
) {
const b = [
"0x",
this.zeroBytes.slice(0, 28 * 2),
this.encoder("uint8", this.ord("C")),
this.encoder("uint8", this.ord("o")),
this.encoder("uint8", this.ord("r")),
this.encoder("uint8", this.ord("e")),
this.encoder("uint8", 4),
this.encoder("uint16", 8),
this.encoder("uint256", Math.floor(amt)),
this.uint8ArrayToHexString(dest, false),
];
let emitter = "0x" + this.zeroBytes.slice(0, 31 * 2) + "04";
var seconds = Math.floor(new Date().getTime() / 1000.0);
return this.createSignedVAA(
guardianSet,
signers,
seconds,
nonce,
1,
emitter,
seq,
32,
b.join("")
);
}
getTokenEmitter(chain: number): string {
if (chain === CHAIN_ID_SOLANA) {
return "c69a1b1a65dd336bf1df6a77afb501fc25db7fc0938cb08595a9ef473265cb4f";
}
if (chain === CHAIN_ID_ETH) {
return "0000000000000000000000000290fb167208af455bb137780163b7b7a9a10c16";
}
if (chain === CHAIN_ID_TERRA) {
return "000000000000000000000000784999135aaa8a3ca5914468852fdddbddd8789d";
}
if (chain === CHAIN_ID_BSC) {
return "0000000000000000000000000290fb167208af455bb137780163b7b7a9a10c16";
}
if (chain === CHAIN_ID_POLYGON) {
return "0000000000000000000000005a58505a96d1dbf8df91cb21b54419fc36e93fde";
}
if (chain === CHAIN_ID_AVAX) {
return "0000000000000000000000000e082f06ff657d94310cb8ce8b0d9a04541d8052";
}
if (chain === CHAIN_ID_OASIS) {
return "0000000000000000000000005848c791e09901b40a9ef749f2a6735b418d7564";
}
if (chain === CHAIN_ID_FANTOM) {
return "0000000000000000000000007C9Fc5741288cDFdD83CeB07f3ea7e22618D79D2";
}
return "";
}
getNftEmitter(chain: ChainId): string {
if (chain === CHAIN_ID_SOLANA) {
return "0def15a24423e1edd1a5ab16f557b9060303ddbab8c803d2ee48f4b78a1cfd6b";
}
if (chain === CHAIN_ID_ETH) {
return "0000000000000000000000006ffd7ede62328b3af38fcd61461bbfc52f5651fe";
}
if (chain === CHAIN_ID_BSC) {
return "0000000000000000000000005a58505a96d1dbf8df91cb21b54419fc36e93fde";
}
if (chain === CHAIN_ID_POLYGON) {
return "00000000000000000000000090bbd86a6fe93d3bc3ed6335935447e75fab7fcf";
}
if (chain === CHAIN_ID_AVAX) {
return "000000000000000000000000f7b6737ca9c4e08ae573f75a97b73d7a813f5de5";
}
if (chain === CHAIN_ID_OASIS) {
return "00000000000000000000000004952D522Ff217f40B5Ef3cbF659EcA7b952a6c1";
}
if (chain === CHAIN_ID_FANTOM) {
return "000000000000000000000000A9c7119aBDa80d4a4E0C06C8F4d8cF5893234535";
}
return "";
}
genRegisterChain(
signers: any,
guardianSet: number,
nonce: number,
seq: number,
chain: string
) {
const b = [
"0x",
this.zeroBytes.slice(0, (32 - 11) * 2),
this.encoder("uint8", this.ord("T")),
this.encoder("uint8", this.ord("o")),
this.encoder("uint8", this.ord("k")),
this.encoder("uint8", this.ord("e")),
this.encoder("uint8", this.ord("n")),
this.encoder("uint8", this.ord("B")),
this.encoder("uint8", this.ord("r")),
this.encoder("uint8", this.ord("i")),
this.encoder("uint8", this.ord("d")),
this.encoder("uint8", this.ord("g")),
this.encoder("uint8", this.ord("e")),
this.encoder("uint8", 1),
this.encoder("uint16", 0),
this.encoder("uint16", chain),
this.getTokenEmitter(parseInt(chain)),
];
let emitter = "0x" + this.zeroBytes.slice(0, 31 * 2) + "04";
var seconds = Math.floor(new Date().getTime() / 1000.0);
return this.createSignedVAA(
guardianSet,
signers,
seconds,
nonce,
1,
emitter,
seq,
32,
b.join("")
);
}
genAssetMeta(
signers: any,
guardianSet: number,
nonce: number,
seq: number,
tokenAddress: string,
chain: number,
decimals: number,
symbol: string,
name: string
) {
const b = [
"0x",
this.encoder("uint8", 2),
this.zeroBytes.slice(0, 64 - tokenAddress.length),
tokenAddress,
this.encoder("uint16", chain),
this.encoder("uint8", decimals),
Buffer.from(symbol).toString("hex"),
this.zeroBytes.slice(0, (32 - symbol.length) * 2),
Buffer.from(name).toString("hex"),
this.zeroBytes.slice(0, (32 - name.length) * 2),
];
// console.log(b.join())
// console.log(b.join('').length)
let emitter = "0x" + this.getTokenEmitter(chain);
let seconds = Math.floor(new Date().getTime() / 1000.0);
return this.createSignedVAA(
guardianSet,
signers,
seconds,
nonce,
chain,
emitter,
seq,
32,
b.join("")
);
}
genTransfer( signers :any, guardianSet :number, nonce:number, seq:number, amount:number, tokenAddress:string, tokenChain:number, toAddress:string, toChain:number, fee:number) {
const b = [
"0x",
this.encoder("uint8", 1),
this.encoder("uint256", Math.floor(amount * 100000000)),
this.zeroBytes.slice(0, (64 - tokenAddress.length)),
tokenAddress,
this.encoder("uint16", tokenChain),
this.zeroBytes.slice(0, (64 - toAddress.length)),
toAddress,
this.encoder("uint16", toChain),
this.encoder("uint256", Math.floor(fee * 100000000)),
];
let emitter = "0x" + this.getTokenEmitter(tokenChain);
let seconds = Math.floor(new Date().getTime() / 1000.0);
return this.createSignedVAA(
guardianSet,
signers,
seconds,
nonce,
tokenChain,
emitter,
seq,
32,
b.join("")
);
}
/**
* Create a packed and signed VAA for testing.
* See https://github.com/certusone/wormhole/blob/dev.v2/design/0001_generic_message_passing.md
*
* @param {} guardianSetIndex The guardian set index
* @param {*} signers The list of private keys for signing the VAA
* @param {*} timestamp The timestamp of VAA
* @param {*} nonce The nonce.
* @param {*} emitterChainId The emitter chain identifier
* @param {*} emitterAddress The emitter chain address, prefixed with 0x
* @param {*} sequence The sequence.
* @param {*} consistencyLevel The reported consistency level
* @param {*} payload This VAA Payload hex string, prefixed with 0x
*/
createSignedVAA(
guardianSetIndex: number,
signers: any,
timestamp: number,
nonce: number,
emitterChainId: number,
emitterAddress: string,
sequence: number,
consistencyLevel: number,
payload: string
) {
console.log(typeof payload);
const body = [
this.encoder("uint32", timestamp),
this.encoder("uint32", nonce),
this.encoder("uint16", emitterChainId),
this.encoder("bytes32", emitterAddress),
this.encoder("uint64", sequence),
this.encoder("uint8", consistencyLevel),
payload.substring(2),
];
const hash = web3Utils.keccak256(web3Utils.keccak256("0x" + body.join("")));
let signatures = "";
for (const i in signers) {
// eslint-disable-next-line new-cap
const ec = new elliptic.ec("secp256k1");
const key = ec.keyFromPrivate(signers[i]);
const signature = key.sign(hash.substr(2), { canonical: true });
const packSig = [
this.encoder("uint8", i),
this.zeroPadBytes(signature.r.toString(16), 32),
this.zeroPadBytes(signature.s.toString(16), 32),
this.encoder("uint8", signature.recoveryParam),
];
signatures += packSig.join("");
}
const vm = [
this.encoder("uint8", 1),
this.encoder("uint32", guardianSetIndex),
this.encoder("uint8", signers.length),
signatures,
body.join(""),
].join("");
return vm;
}
zeroPadBytes(value: string, length: number) {
while (value.length < 2 * length) {
value = "0" + value;
}
return value;
}
}
module.exports = {
TestLib,
};

10
algorand/test/tests.vaa Normal file
View File

@ -0,0 +1,10 @@
[['guardianUpgrade',
'010000000113006a0fd5b12c71afea701d8cf2bf13aed7511b6f1e239b26a28e2635fe5a68af1902f1eec1e085c754cbebdfb5262f78287e7d110d25cd4ad6504bc36e776d5f4d0001500b3dec9b3c712ea9cf40070ef8616f73966f26845894d796ef2a94a42396121192ceb8cca26c575f648d1b384e9d21689751c73295c424a93a4ac2094c57e70102752dce8a19611490b84e80606a40286cb45bbaea2a8f818a8c9c46e763528de971d45d7aabfafda92e494e120f9d478b7eeecb9c654509f0351f22645f90c83100034966869ea35c27670a4993fba23d8c3e7dc7868c956ec4f78069145e1598c5e96b29894ef82b8a7000bf7e17c6a2c125aaca5fae9c1af30e617d0a3b25e562c20004c51abb12086b1dba7cf129e463fcf4bd0d42720abfd0ac93da2bbc5335496f5a18cb952ad9688c0c91ac083e22df5a4cb55470d6bf107ef1d741698b2a6093c20105e97e64fb124345a9feb9a715b701cfbb3a828d0917f255a94e45a11bcc5ed200049a062a686f202a6e1d8cd3424c5d47d0b2cf302094eef25421ca403a68099c010677a8989bc4eefe4ca35d0944077da1a8b751cc27ac1d8075187438d38c3c7dda28fab818efdc8b9514c3b753d23f2ef58af1dcff743d22cebe4baa3fdd2ace8401072bb8f606278bd301d5c43172626d66bd475f74f5d1966f364b6e2a66e587c94c7f5ddd2710bff83337c88920b9207e1ccbb34aeaf20707bf5ede700bc2a245470008e798887c16ead032eac3a9002a306dfd3650432be1ee2023c673aaec1cde884642cb728ca67e27d10df4fb8bad8e0d1590894e2e4f2e69930c380d7476c0fcc400098bee13bd73fe668d66caea447de67a243f45832b4144070716def14f45b3c39733caa04f0f2feeab698de2b938539c6236239f9753b433a8989e33205a20ce9e010aad6dbe3e46e9bc7b49d6a9cd5efd87c4ec4f478847b197695d553b12c0354999282413948e6515039f19ed5298724ddea9a013a14de68246cc0dbf0412b7b5cd010b2f48337c8a4b09e20bede3fca7c05adfe3c9051eb24651e759daa4a20c701f8363ed7a206faf61880e50c0b2c6167b844cb4ee995750255335cc10a05bf9c75f010c5dbcca21cd3bc92b7d24c25de3ee49eadcc314df34cc2a577a3a9960c13f6317291598fd6b73aebbb24af51d916870757fd8193d8d7b2799e8cdeb0fe3d47433000de95a94e0a4b72c471c92b699fa70b25f70db10e7bac3910eaea5a2b60b7ba13a5b0328ab4d3256452b426a761a2aa919d2695f852d4a6134c733530fce9ab84e000e780ebd790827b19ec30beb6a079d530ace0a98b11378ed9b86af99d7ca1f4b0b2c789574fe59b66648196350b1165916e4067185d6ec3b0a7c715a1efe5c5265000f87d2137693af524bf9aebc735e386bf2a8f3ed383ebeb6ffe0c17652ee59d88b19ad688b0270451269217efb5a0f23047627a8ee21862a77a5bf9ca55c53cde50010eaa77d3646bd79b549ce6919266fd7a9e0f64a2faaa5d61965c1f57bbd41ea4264b1fdf4abcb4e396feccdaf4193b8691119f8b2807a020a8badfb64546ddf94011135252460395af21ada39a775a3c48feb1c2462dfea215f96b974e6c27ac1233e517201697e519ca58a79d27227ef37c1a35997f377825ab6452a2f399287b6c2011229ebc2abc2be958ca1bb0551a1ae4cd9420c17289f9885ece52309f0766034000615684bf8d3ce0d217e8060a60e1940bca6f726dc1564c1467d719a6dcf58e60062319c37000000020001000000000000000000000000000000000000000000000000000000000000000400000000000000022000000000000000000000000000000000000000000000000000000000436f7265020000000000021352a26ce40f8caa8d36155d37ef0d5d783fc614d2389a74e8ffa224aead0778c786163a7a2150768cb4459ea6482d4ae574305b239b4f2264239e7599072491bd66f63356090c11aae8114f5372abf12b51280ea1fd2b0a1c76ae29a7d54dda68860a2bfffa9aa60cff05e20e2ccaa784ee89a0a16c2057cbe42d59f8fcd86a1c5c4ba351bd251a5c5b05df6a4b07ff9d5ce1a6ed58b6e9e7d6974d1babec087ec8306b84235d7b0478c61783c50f990bfc44cfc0c8c1035110a13fe788259a4148f871b52babcb1b58a2508a20a7198e131503ce26bbe119aa8c62b28390820f04dda22afe03be1c3bb10f4ba6cf94a01fd6e97387c34a1f36de0f8341e9d409e06ec45b255a41fc2792209cb998a8287204d40996df9e54ba663b12dd23fbf4fbac618be140727986b3bbd079040e577ac50486d0f6930e160a5c75fd1203c63580d2f00309a9a85effaf02564fc183c0183a963869795913d3b6dbf3b24a1c7654672c69a23c351c0cc52d7673c52de99785741344662f5b2308a0'],
['registerChain',
'01000000021300d655215e841c8402dd5a5a59cde64d1cdcf4d630794a950572154c419445bd6b0b55a2867128dbdb07a8af7023586c07ee4c2343b436fd4a7bae4c00175eee89010139205169195a576f34f840f5efec49a0c8cfcb15889b8fa7612260444bef8bdc0fd274edfc7c6a3ae9d3f0f560fe39dee3cb2cef578c4acde75d4524c9569c52010204242bf606b8ebd8bba252e9e37e0f3352c044367f8f58c681164cbd024563e604f7836a59e04713c3c351540eb84514bee8983aac088b565bdeb1588facbf240103f334f37f6054f6b35e10531377f9b60dd0ed8153333fd1ac1577ca7f9f849c092c78f980d474947227fc543ea4b8e0a0156dce70024ae123852b6569a972bc3f0004190eaa4d1d635d74563163061a7febab0f560ea5ef772e9b499abbcc8198641e152b9872e395667c3469457ea7b13474597f571a8dd43198ebf924c4d81f6860000590106ff897e2808d3e6591a2939f597855cb8b9edf9b6cf01c571d06d1015bcd43dcc26384167c09ba110a414fc0a37137bcbf8378b7da55e58c86eec2425e9e0106c6658a3ecb9355b4e4699316918903f2b51ea36c5e9eec7d2243c930b16885056e4c9b60bef18c8f311abfac9b4993ca5a5a5563ef70d8ed4643e8a0294a7cee010726d4b97eb4280a87028fb4246ac91e2278045ce9727ac960b45aa0f2f125efe72f8ecc3d5378cb5974575d1ee292595bbcac71237fe25d194bf28ee64a6ed6d30108dc8377d7ae5f922a004f85124f63afc4559a8b29446612e09bb4e22d3e8e1603286d2264aee4745803da7010505c243523593045d0b814cbfbb7a703d92c2aaf000927757c0421497409138a184ba6405fcf669abee706f2d6dce525460f9926f1e0234d420062e95d69e72253898ec7a9bd54a31b09a67a5e6a3080da419102229e010af7b9de76c6be33ac09227a8067b4e82fcae95f1919f4ed8f902b1a735cc3ebc27e217e96c76662b9d5cd614f0cf78b283e5088ecfb939c7556601bcfecf6a6c3000b1574118db342f783559846872ec5f99347b959ab39c934ab40f4285320243de351e2cf6edf7596249cb4a33a2132200f48ee351a9c4d3ab2cedc052994d8702b010cf6944222b35d390b71a6eb09bc96ccac5cef9e7dcae7604e8db11cb050c59f3514acfb6ec06fce2814c8d7509cd24d4225c2cb0666ea879ddabc86e8946d84d6010d5cccc13fa483890fb5ac0d2929d105d7f8c7aa0f9d8c4ef2e7fed502ae02505f1553d7be46ea8c86aa1ecd6106d48cf3b0e38465dc47af137f72d302c71fb623000e44bf3f46476f30e240468592f23877774b775e16d1182e5df6aea08301f716283b8517673e2a295170dc3810b50831d29617c440aff68702e0d94a95d22dee3a010f95b05c152c60079b1b056638ab61a26452b04d44dacd0bba36d7c65a462b27720f97bb310303e320ef75ab043dd80a2fb1e4869f16689665bf076dcf754be84501105c6a4901e6ead756e6a796d6209cf830fe90acf186aa792297f75b751d13accf4004351ec3d6e49ea795be260112be51cf050a49b4213fb4467ba6e5bd8e4ce10011ab3b325b996897ab7fa7231c5eb8f04533fc1acec6429ad2cbd66a1c8e5b916f1775371411be693eb1fef8a89ec60086ca600a7fff8ffe3fc328aba62e84caaa0012f672dda335d096b2a873ce05c9b6b0759282475900678efc7b3b36a5d3fa3cab7fd4165345f61753376f039aa9207ab263786831d82206bf00f7bbb78b7c3fe80162319c3a0000000400010000000000000000000000000000000000000000000000000000000000000004000000000000000420000000000000000000000000000000000000000000546f6b656e42726964676501000000020000000000000000000000003ee18b2214aff97000d974cf647e7c347e8fa585'],
['createWrappedOnAlgorand',
'010000000213001169470caf4d5f7a27049c1852e6429c0fa7db56584e1514b8cc3d7242b7710f5d15600562f068cd5bb0d515bd038929b62f14396730d06e99c6a7e14025a62900015157522a094c006c8ceb4b2a8b6e5bfaa613663f34d9ce534d4e4b5f581c720b21eb96062324eac0e9b28cf526698cc70b013436f6341312bc3dc2cd2c5e3b6f0102f1c5fcf6b6df200dbaedb7a923b244fd4563ce727b17d39f27c7fd3b9fb5c76e27f827779e47c98c0fb82bb59c40009d94afc68cec2066e6765292d5274399e20003bd48cbcf25a2ae733077f719f48812dcb5db45551308f98281839f9d1b5ebb2728ad199925b5451a2ac90caaf19eb9f8dbf83880cddd4380ccbd7556f3776beb010460d01f541b5c8664e8163db5ff67e553115a5adbe977e896e00bfe558f15e75771eb6e334827f42738120ff474597770b1dd843ae7bae96e2ebc2b2f111ff3850005577275e235cdc0e95e08b3efa44fd8085551b6f7b59aa0f7f091bf7002bcb9480d6be6196bac66f270a1018a5388b8174548a425c624bb65f06b0a0b2f41ba5c01064e9637c161de6e8588fe50022f4da14726733be790b84cce174a80e5142e17a6266381aa2b84fa3df7608430a5ba27aa188b2d389cbd0c73a4b64593d37bccc50007b70a66f4663afae8759c3639d8213e5094db416404b8058b32d10b42521b458b13d19e7ae8f5a4b9be2c47215d12037c74d5501bc5e1b1c5fa62957f8717e9d00108136ef73c58764c22391a8410e9811dd8a978f52b73c2564938357aab095e476761118053a09e0cad08735629360528ef4362823e1956833bcd8eda5482a03df801097ccf894c000cbc28587ab52627a7b8d05c2dc5434cda6f5cd55ae58e979cfb6e01303ad8ac1697dec213a5fbfeec6e77d483d09ee747a9de32618ea9dfc9a5f7010a1cc64605a4de382441c60a889d5a74f77b3328d9d8cd9e5b02ec69597f87cb60085f1cc6e4ffdb48f125c375c00d7e7d93804e9ea6b589f5a9e92bed9dfdca14000b9330b10a041438d52ccfe3b64bf4fd3a5ea5686b755f356d26d954e86b4384a1523b6e469fc61880324a0774650a5eb6502d677c11457a37674cca0cda76f688000cf1c276efcf8662bd0d59d1ca33ebe08121c76e31b0d74620bce9102ae02a078b4a52f8d626a71c4e5ce43b6524424594d8d0c70eb7cc01cc7ea734e48e85797d010d5a2617b7894ac14e15df109101e2779f6dc6027820613e2f4501a26ca850684a05785d7e02372a8184821cfc23d22ddec4631f2574d1338cafb963a58e1503dc010e674380d092aa52e8f6e237cba930551d9d7493427f9958e59d32eb796ab76a2140ca698e78a7198399935a5a73d5c8e363087f3f8f49722b237e455a15554af3000fcb79ea3c70a75238f841f7c539bfb3ab8c00b539ed9f72b92b26b0edcaf6211a298812b07e52fb9a3a4d1c52aec35453a9c65e48be569b4e6533a22c7d679060011004d7b9d453ddb5834bfb60564d673a1ed2b51aad9db321d72ab3e1089df778467bab44624e4aa2916585cfad0f42ba03b146a57cc40f61a2ea4862a700b7945c0011257311c376bab7646f83a81b85cc3e18545e5a980eef0d8e6a217861bdb34b18342148527c9018811a46a8e2eb38cb8ab25c66dd4d4a817cc031ff163d4f32eb0112d071f99d8b4cf54b034b2276ab76fd324965f9b9a35efc56109b99e637948c024c97988497b61d169eee4f27b649d6dbdc770452be51bc01315ce3f831c9ff000162319c3c000000080001ec7372995d5cc8732397fb0ad35c0121e0eaa90d26f828a534cab54391b3a4f5000000000000000820020000000000000000000000004523c3f29447d1f32aea95bebd00383c4640f1b40001085553444300000000000000000000000000000000000000000000000000000000436972636c65436f696e00000000000000000000000000000000000000000000'],
['redeemOnAlgorand',
'01000000011300d3f66cd953524cd3dd33396bdd24a6e5deb3df3c0404be778b4b939579212e1c66f479a85a3a00c812c775e58ca34878931d955d1939877e2a5e3c4419cd11960101c47a513100d6540304c245f310b6832eaf2477afce3bf2909ea5447732e6bcb651746141998c1de50306ee94eb65b711888c65a43a42528cee0be005151716360002c124c4012062c9b1cbf5f31ca551f50c553f7a97bbd0b1a50fe9e5d5589a7f101ca550bda30503b1b3bb4c092ef9a605ddfc0be08c3634c4b292399a54d63e4200032263333ae6daa49f05ec9deca9122755c9206bd72dfdac3d9c8dcdfb2db284b0265c77f9ee1b1c00e4f298869b1c492bff805045c0d0eef8fc3c0fdfbb82f01b0104a40415447bfadfffaf7d9bca8492dfc7c94dcc15e7ae3cc4f20cb34531ed17977e7711284010d71c868b69a87547b6faa09685aff8048cfc6fffc839d0be0b6a0105dfc1b0de5216abb57b524ce51f336060c3d1625ed28b55c5cd5cf1632c38f9c062f168f3d0f0838daf9c13a4ef48a8c159207045c3cb8e43d4a76baf591223c2000686180a55def6a45e8ea71595df89a69583c671a83e9e89bc5b73c55ed88c49fc5e32c47b8abdf1552bfc311d2dac988811f40c74eef3eb9437cf35b52d30239a0007b082d16d31d8e06d8ad494834b18d3b3d316a399b4d36bfb426e5b60ff81fd2e632da3466d2fa68fc0ec7bb0964cf969cd125966873ce52a1e846d4d8f0495440108487ff6b4956cea68fc8aa225ce47f93a15477f66db4362d0602d62a5b790428578253062d34b8c61665a6637572b095da9037025f426274f3ef40e7ecc493b5b0009b39dbed5560d5de7075fb43496e52b061313c6eb591ece2cde513de818881c30672e4fc180f6e3fbabf6a94813adb0d66a99b38ee27dfc71ad3fc4058c3c976e000a0bfd12ece2d2c845dc13293380784695e98e8ae598c3053426baaf65c9c962960271c8a69e52458f385be5617dde1b571d341a82112fe9aef1468140faebca00000bc0a81d0a8e9278011a0a4e0257401f35bcec9d98222ae2e9f44a39b6899d54551482f68c89040b60315a806c1f7cf18cb65bed3e18c8cf1059999e5107e720f2000c1939897f4dc63ec9ef38844ceb665caabab1f758a8543feff43b4bdd151dc6a56b93ff56d25fc4c0fe0c78d01df793fb4e3bdc2dea55f18694a40bc43dadc654010d1f41f8d5067eccdbf047180de85521ed3bbffd91ec2f157833e9514444d39e8c65c9f9897dcefc5e44469b080021e1715d85688369d24ae89b4858eaf2c89faf000e3310037efb7a37917725a6575f471427686d56e641bd4aa21394f8202f3f52db2f45fcb718ab4550a468bceed0c13ee8040c0da173caf87a7a5c1908f7270811000f6673460a92bce84565571f2b398f1692eb393adbf47b70bc60ca7755208aaf771010726fd66f2953a33fb472dbdcd91fb78b2993b279050d2d15b3b37c347c6e01108421b71f1568cf2f12d25bfbf2755561772c661a6f2cdeae02bbe8204fa8a53632536c4e580af27551a3304f2c87bff489b516e1d9570c1e203c8e0eea8c86520011186b2e2f0a42f554c604679085aeb7226ec2a9807d867ce51f40af5d58ce6a433981cd00f843b5223ebd764026c8a729c32ae92013dd27a8bed271d85a09733600127765a06677c2a9b060e15b361917eeefda093f3951fef93cb9787b05230dad9207635fee6f987a50202cbe3613da1f1e9c51ef09b885004d763da72403a231bd0062319c3e000000010001ec7372995d5cc8732397fb0ad35c0121e0eaa90d26f828a534cab54391b3a4f5000000000000000120010000000000000000000000000000000000000000000000000000000005f5e1000000000000000000000000004523c3f29447d1f32aea95bebd00383c4640f1b40001e6b2b620e5c346848e30a40310ef30a359b466148ab5c04f96e024e40de143ac00080000000000000000000000000000000000000000000000000000000000000000'],
['transferFromAlgorand',
'01000000011300aec87f926f0d7bbe5c37dd580f5e94e7aa9ee8837741ce9a2964efae9f977dac79492e7ed5f84d489aa6d1f3d752c9febaf846589927d1dbe8cd69e7d01e9d44010156195e8b1383bf1d76e5c715f27caa7a6d29cf2aaf10c67eae3ecb855be013f500981e56c346c2122ff2a0a274745093adc6e263ca8b220649fdaedd041be47a0102aeb73afa6c75122d52666da208b1b29221ff5f894da8fd11f9d8ab9c9fee9f6e238231f4a48200637d29fe70b327f7b6cc536d326635d944e80aca84aaf2908000035612a27bb95f0e280502ea7d4631afd2b24d41428f22f58bcd5ba9477be4de8536ab20b4bad90f3f95f3303a1c116ca78c4cce9478c2769aaad74c04b53f17f40104b1fc79501049806997bc793694601566f5d56c3abf5ea3126b0c1c24a97e9e8e592ed505de471ad372f679beee35a5e467144627d72343834c5f96b1afa1b9b400050af609532bb0ea524a35a76c4a46dea2d6a31c545bbba705d99b42259acb77d90f19e2866d7dc12468691988291c154e900fc90d7b6f666ff9f63a66d163331100063bec6164adc6529b2f5d02c9a0da90d3656d4d9bb7a6f9b31f2c00e914d2001f3724cab7fb6a914ee2de2ef622c129e0ccf7a9d9784b6a70deff5cdb0c6bc8a50107179150acf55b02e0a12e484c8f274e411d82333428dfe06440ec7cf51373fc6f2fc5241225c06c8c3ad662ca236e07ce06fc4988c1e2620dcd07e977d3c872480008e476380cdcc35d61881e372f1b6643a8aaa577fdbfc253626997e38773185bf50ae33a4c6127b77369670718fa473a52d6b593e3cb3af479c451bdfeefcf2bd30009674551361eaaff891d590ca699d5b03504d866fe91a8118bb1681e0759edbfca1f2b358398d4d46e6916d9378bcab01e7ff21fd770e63222b2ef6a98c262d83e010a8ad0bc1d8eb89b4a8d7b119d62d737202895e3d80a0ca7367a21bd6587cc6aba0cdb19c089b03d622a1ad6a939ee285f78c3f52e87a81fa51d0fea7238280aba010b13857b3845216d66fe84985bd8d5a16b2440ea0a3e5afc43c19c802fe7c360dd5534c1df47e8ba0c5b4166e13becf5454ac3567fdbdcadcaf625d46ae7a188d7010c70e3859ea2d4c05769bfb72649f0266eef24e310558a7136d931161cafea91674cc3ded48c6715f3326e365ad862444cb867688b834ac12bf0b651d81764951e010dd03c3f06009a37a2c716259c1e86793a5bd9b9bab9cc3d2cc2983a766e87aeb275a9a9a047d81b57bb2b9b747138c391b77768a639b284e662ae5c15504d48a1000e9ccfade4aea4f1d5e1e053a32d1a93c39e855044a09cfd41f974f61986e42ab21d103c1c2c82a1cff1d2f0f79ad64ad8c214f88c1a84518bc6ad3656f827c6f0010f09ed95a03909d4a40041676a7f460c35ea7b9464a855a3aa1df9e2b4841e5ec42b08ca2c1f9997274b9f07120180e8d5b8eeceada584c79cbfc39d983b6dfaba00101c48e160de738770847abef02cdd0a3594dbc80de68403004f987e7676f954df2aa24e5280f5ecf5a61f865af1185220a805a64211b732e3672071801f75a86901117004184bcfa26f19efed445980f65e2450101bf20bb70d36c66e244bc10c1ff33aee94550a5af547e346ffd913335b33b374776b81d408b54dad046a18afcc9f0112420f47b6b5bd0c4b5c5e5c8db36e490c8d250e1e2087a7b43b63fac0f7f6bc6f28593877c492e27c560e39afcd8aa99847feb0193759be61fae2e8130a8993150062319c42002f37460008a8f52a34e2b19ac916d3cc27d3b07bfbfb03f31e0075a2a85ffe99be89d3887c00000000000000032001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000006150008f888ac4b7756ad193876ff85decf58eaa82f5094d528d7cd9b3119737b1e8d2700080000000000000000000000000000000000000000000000000000000000000000']]

10
algorand/testnet-update Executable file
View File

@ -0,0 +1,10 @@
#!/bin/bash -f
set -x
#./sandbox down; ./sandbox clean; ./sandbox up dev; python3 admin.py --devnet --boot
python3 admin.py --testnet --genTeal --upgradeVAA --submit --mnemonic="$1"
python3 admin.py --testnet --updateCore --mnemonic="$1"
python3 admin.py --testnet --updateToken --mnemonic="$1"
# python3 admin.py --devnet --upgradeVAA --submit --updateToken

1033
algorand/token_bridge.py Normal file

File diff suppressed because it is too large Load Diff

126
algorand/vaa_verify.py Normal file
View File

@ -0,0 +1,126 @@
#!/usr/bin/python3
"""
================================================================================================
The VAA Signature Verify Stateless Program
Copyright 2022 Wormhole Project Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------------------------------------------------------------
This program verifies a subset of the signatures in a VAA against the guardian set. This
program works in tandem with the VAA Processor stateful program.
The difference between this version and the Randlabs version is I removed most of the asserts
since we are going to have to completely validate the arguments again in the
TokenBridge contract.
We also cannot retroactively see/verify what arguments were passed into this
function unless all the arguments are in the Txn.application_args so
everything has to get moved out of the lsig args and into the txn_args
================================================================================================
"""
from pyteal.ast import *
from pyteal.types import *
from pyteal.compiler import *
from pyteal.ir import *
from globals import *
from inlineasm import *
import sys
SLOTID_RECOVERED_PK_X = 240
SLOTID_RECOVERED_PK_Y = 241
@Subroutine(TealType.uint64)
def sig_check(signatures, dhash, keys):
"""
Verifies some signatures of a VAA. Due to computation budget limitations,
this can't verify all signatures in one go. Instead, it just makes sure that
whatever signatures it's given correspond to the given keys.
In addition, none of the arguments are validated here beyond the fact that
the signatures are valid given the keys and the message hash. In particular,
the message hash is also not validated here. Thus, the proper way to use
this function is by calling it (by the client) before the token bridge
program. Then the token bridge program verify each input + that the right
program was called. If it failed to verify any of these, then signature
verification could be bypaseed.
"""
si = ScratchVar(TealType.uint64) # signature index (zero-based)
ki = ScratchVar(TealType.uint64) # key index
slen = ScratchVar(TealType.uint64) # signature length
rec_pk_x = ScratchVar(TealType.bytes, SLOTID_RECOVERED_PK_X)
rec_pk_y = ScratchVar(TealType.bytes, SLOTID_RECOVERED_PK_Y)
return Seq(
[
rec_pk_x.store(Bytes("")),
rec_pk_y.store(Bytes("")),
slen.store(Len(signatures)),
For(Seq([
si.store(Int(0)),
ki.store(Int(0))
]),
si.load() < slen.load(),
Seq([
si.store(si.load() + Int(66)),
ki.store(ki.load() + Int(20))
])).Do(
Seq([
InlineAssembly(
"ecdsa_pk_recover Secp256k1",
dhash,
Btoi(Extract(signatures, si.load() + Int(65), Int(1))),
Extract(signatures, si.load() + Int(1), Int(32)), # R
Extract(signatures, si.load() + Int(33), Int(32)), # S
type=TealType.none),
# returned values in stack, pass to scratch-vars
InlineAssembly("store " + str(SLOTID_RECOVERED_PK_Y)),
InlineAssembly("store " + str(SLOTID_RECOVERED_PK_X)),
# Generate Ethereum-type public key, compare with guardian key.
Assert(Extract(keys, ki.load(), Int(20)) == Substring(Keccak256(Concat(rec_pk_x.load(), rec_pk_y.load())), Int(12), Int(32)))
])
),
Return(Int(1))
]
)
def vaa_verify_program():
signatures = Txn.application_args[1]
keys = Txn.application_args[2]
dhash = Txn.application_args[3]
return Seq([
Assert(Txn.rekey_to() == Global.zero_address()),
Assert(Txn.fee() == Int(0)),
Assert(Txn.type_enum() == TxnType.ApplicationCall),
Assert(sig_check(signatures, dhash, keys)),
Approve()]
)
def get_vaa_verify():
teal = compileTeal(vaa_verify_program(), mode=Mode.Signature, version=6)
with open("teal/vaa_verify.teal", "w") as f:
f.write(teal)
return teal

592
algorand/wormhole_core.py Normal file
View File

@ -0,0 +1,592 @@
from time import time, sleep
from typing import List, Tuple, Dict, Any, Optional, Union
from base64 import b64decode
import base64
import random
import hashlib
import uuid
import sys
import json
import uvarint
from local_blob import LocalBlob
from TmplSig import TmplSig
from algosdk.v2client.algod import AlgodClient
from algosdk.kmd import KMDClient
from algosdk import account, mnemonic
from algosdk.encoding import decode_address
from algosdk.future import transaction
from pyteal import compileTeal, Mode, Expr
from pyteal import *
from algosdk.logic import get_application_address
from algosdk.future.transaction import LogicSigAccount
from inspect import currentframe
import pprint
max_keys = 15
max_bytes_per_key = 127
bits_per_byte = 8
bits_per_key = max_bytes_per_key * bits_per_byte
max_bytes = max_bytes_per_key * max_keys
max_bits = bits_per_byte * max_bytes
def fullyCompileContract(genTeal, client: AlgodClient, contract: Expr, name) -> bytes:
if genTeal:
teal = compileTeal(contract, mode=Mode.Application, version=6, assembleConstants=True, optimize=OptimizeOptions(scratch_slots=True))
with open(name, "w") as f:
print("Writing " + name)
f.write(teal)
else:
with open(name, "r") as f:
print("Reading " + name)
teal = f.read()
response = client.compile(teal)
return response
def getCoreContracts( genTeal, approve_name, clear_name,
client: AlgodClient,
seed_amt: int,
tmpl_sig: TmplSig,
devMode: bool
) -> Tuple[bytes, bytes]:
def vaa_processor_program(seed_amt: int, tmpl_sig: TmplSig):
blob = LocalBlob()
def MagicAssert(a) -> Expr:
if devMode:
return Assert(And(a, Int(currentframe().f_back.f_lineno)))
else:
return Assert(a)
@Subroutine(TealType.bytes)
def encode_uvarint(val: Expr, b: Expr):
buff = ScratchVar()
return Seq(
buff.store(b),
Concat(
buff.load(),
If(
val >= Int(128),
encode_uvarint(
val >> Int(7),
Extract(Itob((val & Int(255)) | Int(128)), Int(7), Int(1)),
),
Extract(Itob(val & Int(255)), Int(7), Int(1)),
),
),
)
@Subroutine(TealType.bytes)
def get_sig_address(acct_seq_start: Expr, emitter: Expr):
# We could iterate over N items and encode them for a more general interface
# but we inline them directly here
return Sha512_256(
Concat(
Bytes("Program"),
# ADDR_IDX aka sequence start
tmpl_sig.get_bytecode_chunk(0),
encode_uvarint(acct_seq_start, Bytes("")),
# EMMITTER_ID
tmpl_sig.get_bytecode_chunk(1),
encode_uvarint(Len(emitter), Bytes("")),
emitter,
# APP_ID
tmpl_sig.get_bytecode_chunk(2),
encode_uvarint(Global.current_application_id(), Bytes("")),
# TMPL_APP_ADDRESS
tmpl_sig.get_bytecode_chunk(3),
encode_uvarint(Len(Global.current_application_address()), Bytes("")),
Global.current_application_address(),
tmpl_sig.get_bytecode_chunk(4),
)
)
@Subroutine(TealType.uint64)
def optin():
# Alias for readability
algo_seed = Gtxn[0]
optin = Gtxn[1]
well_formed_optin = And(
# Check that we're paying it
algo_seed.type_enum() == TxnType.Payment,
algo_seed.amount() == Int(seed_amt),
# Check that its an opt in to us
optin.type_enum() == TxnType.ApplicationCall,
optin.on_completion() == OnComplete.OptIn,
# Not strictly necessary since we wouldn't be seeing this unless it was us, but...
optin.application_id() == Global.current_application_id(),
)
return Seq(
# Make sure its a valid optin
MagicAssert(well_formed_optin),
# Init by writing to the full space available for the sender (Int(0))
blob.zero(Int(0)),
# we gucci
Int(1)
)
def nop():
return Seq([Approve()])
def publishMessage():
seq = ScratchVar()
fee = ScratchVar()
pmt = Gtxn[Txn.group_index() - Int(1)]
return Seq([
# Lets see if we were handed the correct account to store the sequence number in
MagicAssert(Txn.accounts[1] == get_sig_address(Int(0), Txn.sender())),
fee.store(App.globalGet(Bytes("MessageFee"))),
If(fee.load() > Int(0), Seq([
MagicAssert(And(
pmt.type_enum() == TxnType.Payment,
pmt.amount() >= fee.load(),
pmt.receiver() == Global.current_application_address(),
pmt.rekey_to() == Global.zero_address()
)),
])),
# emitter sequence number
seq.store(Itob(Btoi(blob.read(Int(1), Int(0), Int(8))) + Int(1))),
Pop(blob.write(Int(1), Int(0), seq.load())),
# Log it so that we can look for this on the guardian network
Log(seq.load()),
blob.meta(Int(1), Bytes("publishMessage")),
Approve()
])
def hdlGovernance(isBoot: Expr):
off = ScratchVar()
a = ScratchVar()
emitter = ScratchVar()
dest = ScratchVar()
fee = ScratchVar()
idx = ScratchVar()
set = ScratchVar()
len = ScratchVar()
v = ScratchVar()
tchain = ScratchVar()
return Seq([
# All governance must be done with the most recent guardian set
set.store(App.globalGet(Bytes("currentGuardianSetIndex"))),
If(set.load() != Int(0), Seq([
idx.store(Extract(Txn.application_args[1], Int(1), Int(4))),
MagicAssert(Btoi(idx.load()) == set.load()),
])),
# The offset of the chain
off.store(Btoi(Extract(Txn.application_args[1], Int(5), Int(1))) * Int(66) + Int(14)),
# Correct source chain?
MagicAssert(Extract(Txn.application_args[1], off.load(), Int(2)) == Bytes("base16", "0001")),
# Correct emitter?
MagicAssert(Extract(Txn.application_args[1], off.load() + Int(2), Int(32)) == Bytes("base16", "0000000000000000000000000000000000000000000000000000000000000004")),
# Get us to the payload
off.store(off.load() + Int(43)),
# Is this a governance message?
MagicAssert(Extract(Txn.application_args[1], off.load(), Int(32)) == Bytes("base16", "00000000000000000000000000000000000000000000000000000000436f7265")),
off.store(off.load() + Int(32)),
# What is the target of this governance message?
tchain.store(Extract(Txn.application_args[1], off.load() + Int(1), Int(2))),
# Needs to point at us or to all chains
MagicAssert(Or(tchain.load() == Bytes("base16", "0008"), tchain.load() == Bytes("base16", "0000"))),
a.store(Btoi(Extract(Txn.application_args[1], off.load(), Int(1)))),
Cond(
[a.load() == Int(1), Seq([
# ContractUpgrade is a VAA that instructs an implementation on a specific chain to upgrade itself
#
# In the case of Algorand, it contains the hash of the program that we are allowed to upgrade ourselves to. We would then run the upgrade program itself
# to perform the actual upgrade
off.store(off.load() + Int(3)),
App.globalPut(Bytes("validUpdateApproveHash"), Extract(Txn.application_args[1], off.load(), Int(32)))
])],
[a.load() == Int(2), Seq([
# We are updating the guardian set
# This should point at all chains
# move off to point at the NewGuardianSetIndex and grab it
off.store(off.load() + Int(3)),
v.store(Extract(Txn.application_args[1], off.load(), Int(4))),
idx.store(Btoi(v.load())),
# Lets see if the user handed us the correct memory... no hacky hacky
MagicAssert(Txn.accounts[3] == get_sig_address(idx.load(), Bytes("guardian"))),
# Make sure it is different and we can only walk forward
If(isBoot == Int(0), Seq(
MagicAssert(Txn.accounts[3] != Txn.accounts[2]),
MagicAssert(idx.load() > (set.load()))
)),
# Write this away till the next time
App.globalPut(Bytes("currentGuardianSetIndex"), idx.load()),
# Write everything out to the auxilliary storage
off.store(off.load() + Int(4)),
len.store(Btoi(Extract(Txn.application_args[1], off.load(), Int(1)))),
# Lets not let us get bricked by somebody submitting a stupid guardian set...
MagicAssert(len.load() > Int(0)),
Pop(blob.write(Int(3), Int(0), Extract(Txn.application_args[1], off.load(), Int(1) + (Int(20) * len.load())))),
# Make this block expire.. as long as it is
# not being used to sign itself. We stick the
# expiration 1000 bytes into the account...
#
# 19200 is approx 24 hours assuming a 4.5 seconds per block (24 * 3600 / 4.5) = 19200
If(Txn.accounts[3] != Txn.accounts[2],
Pop(blob.write(Int(2), Int(1000), Itob(Txn.first_valid() + Int(19200))))),
blob.meta(Int(3), Bytes("guardian"))
])],
[a.load() == Int(3), Seq([
off.store(off.load() + Int(1)),
MagicAssert(tchain.load() == Bytes("base16", "0008")),
off.store(off.load() + Int(2) + Int(24)),
fee.store(Btoi(Extract(Txn.application_args[1], off.load(), Int(8)))),
App.globalPut(Bytes("MessageFee"), fee.load()),
])],
[a.load() == Int(4), Seq([
off.store(off.load() + Int(1)),
MagicAssert(tchain.load() == Bytes("base16", "0008")),
off.store(off.load() + Int(26)),
fee.store(Btoi(Extract(Txn.application_args[1], off.load(), Int(8)))),
off.store(off.load() + Int(8)),
dest.store(Extract(Txn.application_args[1], off.load(), Int(32))),
InnerTxnBuilder.Begin(),
InnerTxnBuilder.SetFields(
{
TxnField.type_enum: TxnType.Payment,
TxnField.receiver: dest.load(),
TxnField.amount: fee.load(),
TxnField.fee: Int(0),
}
),
InnerTxnBuilder.Submit(),
])]
),
Approve()
])
def init():
return Seq([
# You better lose yourself in the music, the moment
App.globalPut(Bytes("vphash"), Txn.application_args[2]),
# You own it, you better never let it go
MagicAssert(Txn.sender() == Global.creator_address()),
# You only get one shot, do not miss your chance to blow
MagicAssert(App.globalGet(Bytes("booted")) == Int(0)),
App.globalPut(Bytes("booted"), Bytes("true")),
# This opportunity comes once in a lifetime
checkForDuplicate(),
# You can do anything you set your mind to...
hdlGovernance(Int(1))
])
def verifySigs():
return Seq([
Approve(),
])
@Subroutine(TealType.none)
def checkForDuplicate():
off = ScratchVar()
emitter = ScratchVar()
sequence = ScratchVar()
b = ScratchVar()
byte_offset = ScratchVar()
return Seq(
# VM only is version 1
MagicAssert(Btoi(Extract(Txn.application_args[1], Int(0), Int(1))) == Int(1)),
off.store(Btoi(Extract(Txn.application_args[1], Int(5), Int(1))) * Int(66) + Int(14)), # The offset of the emitter
# emitter is chain/contract-address
emitter.store(Extract(Txn.application_args[1], off.load(), Int(34))),
sequence.store(Btoi(Extract(Txn.application_args[1], off.load() + Int(34), Int(8)))),
# They passed us the correct account? In this case, byte_offset points at the whole block
byte_offset.store(sequence.load() / Int(max_bits)),
MagicAssert(Txn.accounts[1] == get_sig_address(byte_offset.load(), emitter.load())),
# Now, lets go grab the raw byte
byte_offset.store((sequence.load() / Int(8)) % Int(max_bytes)),
b.store(blob.get_byte(Int(1), byte_offset.load())),
# I would hope we've never seen this packet before... throw an exception if we have
MagicAssert(GetBit(b.load(), sequence.load() % Int(8)) == Int(0)),
# Lets mark this bit so that we never see it again
blob.set_byte(Int(1), byte_offset.load(), SetBit(b.load(), sequence.load() % Int(8), Int(1))),
blob.meta(Int(1), Bytes("duplicate"))
)
STATELESS_LOGIC_HASH = App.globalGet(Bytes("vphash"))
def verifyVAA():
i = ScratchVar()
a = ScratchVar()
total_guardians = ScratchVar()
guardian_keys = ScratchVar()
num_sigs = ScratchVar()
off = ScratchVar()
digest = ScratchVar()
hits = ScratchVar()
s = ScratchVar()
eoff = ScratchVar()
guardian = ScratchVar()
return Seq([
# We have a guardian set? We have OUR guardian set?
MagicAssert(Txn.accounts[2] == get_sig_address(Btoi(Extract(Txn.application_args[1], Int(1), Int(4))), Bytes("guardian"))),
blob.checkMeta(Int(2), Bytes("guardian")),
# Lets grab the total keyset
total_guardians.store(blob.get_byte(Int(2), Int(0))),
guardian_keys.store(blob.read(Int(2), Int(1), Int(1) + Int(20) * total_guardians.load())),
# I wonder if this is an expired guardian set
s.store(Btoi(blob.read(Int(2), Int(1000), Int(1008)))),
If(s.load() != Int(0),
MagicAssert(Txn.first_valid() < s.load())),
hits.store(Bytes("base16", "0x00000000")),
# How many signatures are in this vaa?
num_sigs.store(Btoi(Extract(Txn.application_args[1], Int(5), Int(1)))),
# Lets create a digest of THIS vaa...
off.store(Int(6) + (num_sigs.load() * Int(66))),
digest.store(Keccak256(Keccak256(Extract(Txn.application_args[1], off.load(), Len(Txn.application_args[1]) - off.load())))),
# We have enough signatures?
MagicAssert(And(
total_guardians.load() > Int(0),
num_sigs.load() <= total_guardians.load(),
num_sigs.load() > ((total_guardians.load() * Int(2)) / Int(3)),
)),
# Point it at the start of the signatures in the VAA
off.store(Int(6)),
# We'll check that the preceding transactions properly verify
# all of the signatures. Due to size limitations, there will be
# multiple 'verifySigs' calls to achieve this. First we walk
# backwards from the current instruction to find all the
# 'verifySigs' calls. We do it this way because it's possible
# that the VAA transactions are composed with some other
# contracts calls, so we do not rely in absolute transaction
# indices.
#
# | | ... |
# | | something else |
# | |----------------|
# | | verifySigs |
# | | verifySigs |
# | | verifySigs |
# | | verifyVAA | <- we are here now
# | |----------------|
# v | ... |
MagicAssert(Txn.group_index() > Int(0)),
# the first 'verifySigs' tx is the one before us
i.store(Txn.group_index() - Int(1)),
MagicAssert(Gtxn[i.load()].application_args.length() > Int(0)),
a.store(Gtxn[i.load()].application_args[0]),
# Go back until we hit 'something else' or run out of
# transactions (we allow nops too)
While (And(i.load() > Int(0), Or(a.load() == Bytes("verifySigs"), a.load() == Bytes("nop")))).Do(Seq([
i.store(i.load() - Int(1)),
If (Gtxn[i.load()].application_args.length() > Int(0),
a.store(Gtxn[i.load()].application_args[0]),
Seq([
a.store(Bytes("")),
Break()
]))
])),
If(And(a.load() != Bytes("verifySigs"), a.load() != Bytes("nop")), i.store(i.load() + Int(1))),
# Now look through the whole group of 'verifySigs'
While(i.load() <= Txn.group_index()).Do(Seq([
MagicAssert(And(
Gtxn[i.load()].type_enum() == TxnType.ApplicationCall,
Gtxn[i.load()].rekey_to() == Global.zero_address(),
Gtxn[i.load()].application_id() == Txn.application_id(),
Gtxn[i.load()].accounts[1] == Txn.accounts[1],
Gtxn[i.load()].accounts[2] == Txn.accounts[2],
)),
a.store(Gtxn[i.load()].application_args[0]),
Cond(
[a.load() == Bytes("verifySigs"), Seq([
# Lets see if they are actually verifying the correct signatures!
# What signatures did this verifySigs check?
s.store(Gtxn[i.load()].application_args[1]),
# Look at the vaa and confirm those were the expected signatures we should have been checking
# at this point in the process
MagicAssert(Extract(Txn.application_args[1], off.load(), Len(s.load())) == s.load()),
# Where is the end pointer...
eoff.store(off.load() + Len(s.load())),
# Now we will reset s and collect the keys
s.store(Bytes("")),
While(off.load() < eoff.load()).Do(Seq( [
# Lets see if we ever reuse the same signature more then once (same guardian over and over)
guardian.store(Btoi(Extract(Txn.application_args[1], off.load(), Int(1)))),
MagicAssert(GetBit(hits.load(), guardian.load()) == Int(0)),
hits.store(SetBit(hits.load(), guardian.load(), Int(1))),
# This extracts out of the keys THIS guardian's public key
s.store(Concat(s.load(), Extract(guardian_keys.load(), guardian.load() * Int(20), Int(20)))),
off.store(off.load() + Int(66))
])),
MagicAssert(And(
Gtxn[i.load()].application_args[2] == s.load(), # Does the keyset passed into the verify routines match what it should be?
Gtxn[i.load()].sender() == STATELESS_LOGIC_HASH, # Was it signed with our code?
Gtxn[i.load()].application_args[3] == digest.load() # Was it verifying the same vaa?
)),
])],
[a.load() == Bytes("nop"), Seq([])], # if there is a function call not listed here, it will throw an error
[a.load() == Bytes("verifyVAA"), Seq([])],
[Int(1) == Int(1), Seq([Reject()])] # Nothing should get snuck in between...
),
i.store(i.load() + Int(1))
])
),
# Did we verify all the signatures? If the answer is no, something is sus
MagicAssert(off.load() == Int(6) + (num_sigs.load() * Int(66))),
Approve(),
])
def governance():
return Seq([
checkForDuplicate(), # Verify this is not a duplicate message and then make sure we never see it again
MagicAssert(And(
Gtxn[Txn.group_index() - Int(1)].type_enum() == TxnType.ApplicationCall,
Gtxn[Txn.group_index() - Int(1)].application_id() == Txn.application_id(),
Gtxn[Txn.group_index() - Int(1)].application_args[0] == Bytes("verifyVAA"),
Gtxn[Txn.group_index() - Int(1)].sender() == Txn.sender(),
Gtxn[Txn.group_index() - Int(1)].rekey_to() == Global.zero_address(),
# Lets see if the vaa we are about to process was actually verified by the core
Gtxn[Txn.group_index() - Int(1)].application_args[1] == Txn.application_args[1],
# What checks should I give myself
Gtxn[Txn.group_index()].rekey_to() == Global.zero_address(),
Gtxn[Txn.group_index()].sender() == Txn.sender(),
# We all opted into the same accounts?
Gtxn[Txn.group_index() - Int(1)].accounts[0] == Txn.accounts[0],
Gtxn[Txn.group_index() - Int(1)].accounts[1] == Txn.accounts[1],
Gtxn[Txn.group_index() - Int(1)].accounts[2] == Txn.accounts[2],
)),
hdlGovernance(Int(0)),
Approve(),
])
METHOD = Txn.application_args[0]
on_delete = Seq([Reject()])
router = Cond(
[METHOD == Bytes("publishMessage"), publishMessage()],
[METHOD == Bytes("nop"), nop()],
[METHOD == Bytes("init"), init()],
[METHOD == Bytes("verifySigs"), verifySigs()],
[METHOD == Bytes("verifyVAA"), verifyVAA()],
[METHOD == Bytes("governance"), governance()],
)
on_create = Seq( [
App.globalPut(Bytes("MessageFee"), Int(0)),
App.globalPut(Bytes("vphash"), Bytes("")),
App.globalPut(Bytes("currentGuardianSetIndex"), Int(0)),
App.globalPut(Bytes("validUpdateApproveHash"), Bytes("")),
App.globalPut(Bytes("validUpdateClearHash"), Bytes("base16", "73be5fd7cd378289177bf4a7ca5433ab30d91b417381bba8bd704aff2dec424f")), # empty clear state program
Return(Int(1))
])
progHash = ScratchVar()
progSet = ScratchVar()
clearHash = ScratchVar()
clearSet = ScratchVar()
def getOnUpdate():
if devMode:
return Seq( [
Return(Txn.sender() == Global.creator_address()),
])
else:
return Seq( [
MagicAssert(Sha512_256(Concat(Bytes("Program"), Txn.approval_program())) == App.globalGet(Bytes("validUpdateApproveHash"))),
MagicAssert(Sha512_256(Concat(Bytes("Program"), Txn.clear_state_program())) == App.globalGet(Bytes("validUpdateClearHash"))),
Return(Int(1))
] )
on_update = getOnUpdate()
on_optin = Seq( [
Return(optin())
])
return Cond(
[Txn.application_id() == Int(0), on_create],
[Txn.on_completion() == OnComplete.UpdateApplication, on_update],
[Txn.on_completion() == OnComplete.DeleteApplication, on_delete],
[Txn.on_completion() == OnComplete.OptIn, on_optin],
[Txn.on_completion() == OnComplete.NoOp, router]
)
def clear_state_program():
return Int(1)
if not devMode:
client = AlgodClient("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "https://testnet-api.algonode.cloud")
APPROVAL_PROGRAM = fullyCompileContract(genTeal, client, vaa_processor_program(seed_amt, tmpl_sig), approve_name)
CLEAR_STATE_PROGRAM = fullyCompileContract(genTeal, client, clear_state_program(), clear_name)
return APPROVAL_PROGRAM, CLEAR_STATE_PROGRAM

View File

@ -0,0 +1,97 @@
apiVersion: v1
kind: Service
metadata:
labels:
app: algorand
name: algorand
spec:
ports:
- name: postgres
port: 5432
protocol: TCP
- name: algod
port: 4001
targetPort: algod
- name: kmd
port: 4002
targetPort: kmd
- name: indexer
port: 8980
targetPort: indexer
selector:
app: algorand
---
apiVersion: apps/v1
kind: StatefulSet
metadata:
labels:
app: algorand
name: algorand
spec:
replicas: 1
selector:
matchLabels:
app: algorand
serviceName: algorand
template:
metadata:
labels:
app: algorand
spec:
containers:
- image: postgres:13-alpine
name: algorand-postgres
ports:
- containerPort: 5432
resources: {}
env:
- name: POSTGRES_USER
value: algorand
- name: POSTGRES_PASSWORD
value: algorand
- name: POSTGRES_DB
value: indexer_db
- name: algorand-algod
image: algorand-algod
command:
- /bin/sh
- -c
- /opt/start_algod.sh
ports:
- containerPort: 4001
name: algod
protocol: TCP
- containerPort: 4002
name: kmd
protocol: TCP
readinessProbe:
tcpSocket:
port: 4001
- name: algorand-indexer
image: algorand-indexer
command:
- /bin/sh
- -c
- /tmp/start.sh
ports:
- containerPort: 8980
name: indexer
protocol: TCP
readinessProbe:
tcpSocket:
port: 8980
- name: algorand-contracts
image: algorand-contracts
command:
- /bin/sh
- -c
- "sh deploy.sh && touch success && sleep infinity"
readinessProbe:
exec:
command:
- test
- -e
- "success"
initialDelaySeconds: 5
periodSeconds: 5
restartPolicy: Always

View File

@ -1,62 +0,0 @@
---
apiVersion: v1
kind: Service
metadata:
name: algorand
labels:
app: algorand
spec:
clusterIP: None
selector:
app: algorand
ports:
- name: algod
port: 4001
targetPort: algod
- name: kmd
port: 4002
targetPort: kmd
---
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: algorand
spec:
selector:
matchLabels:
app: algorand
serviceName: algorand
template:
metadata:
labels:
app: algorand
spec:
restartPolicy: Always
terminationGracePeriodSeconds: 0
containers:
- name: algod
image: algorand
command:
- /bin/sh
- -c
- ./goal network start -r /network && sleep infinity
ports:
- containerPort: 4001
name: algod
protocol: TCP
readinessProbe:
tcpSocket:
port: 4001
- name: goal-kmd
image: algorand
command:
- /bin/sh
- -c
- ./goal kmd start -d /network/Node && ./goal account list && /setup/setup.sh && sleep infinity
ports:
- containerPort: 4002
name: kmd
protocol: TCP
readinessProbe:
tcpSocket:
port: 4002

View File

@ -69,7 +69,7 @@ spec:
command:
- /bin/sh
- -c
- "npm run migrate && npx truffle exec scripts/deploy_test_token.js && npx truffle exec scripts/register_solana_chain.js && npx truffle exec scripts/register_terra_chain.js && npx truffle exec scripts/register_bsc_chain.js && nc -lkp 2000 0.0.0.0"
- "npm run migrate && npx truffle exec scripts/deploy_test_token.js && npx truffle exec scripts/register_solana_chain.js && npx truffle exec scripts/register_terra_chain.js && npx truffle exec scripts/register_bsc_chain.js && npx truffle exec scripts/register_algo_chain.js && nc -lkp 2000 0.0.0.0"
readinessProbe:
periodSeconds: 1
failureThreshold: 300
@ -122,7 +122,7 @@ spec:
command:
- /bin/sh
- -c
- "sed -i 's/CHAIN_ID=0x2/CHAIN_ID=0x4/g' .env && npm run migrate && npx truffle exec scripts/deploy_test_token.js && npx truffle exec scripts/register_solana_chain.js && npx truffle exec scripts/register_terra_chain.js && npx truffle exec scripts/register_eth_chain.js && nc -lkp 2000 0.0.0.0"
- "sed -i 's/CHAIN_ID=0x2/CHAIN_ID=0x4/g' .env && npm run migrate && npx truffle exec scripts/deploy_test_token.js && npx truffle exec scripts/register_solana_chain.js && npx truffle exec scripts/register_terra_chain.js && npx truffle exec scripts/register_eth_chain.js && npx truffle exec scripts/register_algo_chain.js && nc -lkp 2000 0.0.0.0"
readinessProbe:
periodSeconds: 1
failureThreshold: 300

View File

@ -0,0 +1,32 @@
// run this script with truffle exec
const jsonfile = require("jsonfile");
const TokenBridge = artifacts.require("TokenBridge");
const TokenImplementation = artifacts.require("TokenImplementation");
const BridgeImplementationFullABI = jsonfile.readFileSync(
"../build/contracts/BridgeImplementation.json"
).abi;
const algoTokenBridgeVAA = process.env.REGISTER_ALGO_TOKEN_BRIDGE_VAA;
module.exports = async function(callback) {
try {
const accounts = await web3.eth.getAccounts();
const initialized = new web3.eth.Contract(
BridgeImplementationFullABI,
TokenBridge.address
);
// Register the ALGO endpoint
await initialized.methods
.registerChain("0x" + algoTokenBridgeVAA)
.send({
value: 0,
from: accounts[0],
gasLimit: 2000000,
});
callback();
} catch (e) {
callback(e);
}
};

View File

@ -197,6 +197,11 @@
"nftBridgeEmitterAddress": "00000000000000000000000026b4afb60d6c903165150c6f0aa14f8016be4aec",
"nftBridgeAddress": "0x26b4afb60d6c903165150c6f0aa14f8016be4aec"
}
},
"8": {
"contracts": {
"tokenBridgeEmitterAddress": "8edf5b0e108c3a1a0a4b704cc89591f2ad8d50df24e991567e640ed720a94be2"
}
}
},
"gancheDefaults": [

View File

@ -75,6 +75,7 @@ solTokenBridge=$(jq --raw-output '.chains."1".contracts.tokenBridgeEmitterAddres
ethTokenBridge=$(jq --raw-output '.chains."2".contracts.tokenBridgeEmitterAddress' $addressesJson)
terraTokenBridge=$(jq --raw-output '.chains."3".contracts.tokenBridgeEmitterAddress' $addressesJson)
bscTokenBridge=$(jq --raw-output '.chains."4".contracts.tokenBridgeEmitterAddress' $addressesJson)
algoTokenBridge=$(jq --raw-output '.chains."8".contracts.tokenBridgeEmitterAddress' $addressesJson)
solNFTBridge=$(jq --raw-output '.chains."1".contracts.nftBridgeEmitterAddress' $addressesJson)
ethNFTBridge=$(jq --raw-output '.chains."2".contracts.nftBridgeEmitterAddress' $addressesJson)
@ -93,6 +94,7 @@ solTokenBridgeVAA=$(npm --prefix clients/token_bridge run --silent main -- gener
ethTokenBridgeVAA=$(npm --prefix clients/token_bridge run --silent main -- generate_register_chain_vaa 2 0x${ethTokenBridge} --guardian_secret ${guardiansPrivateCSV} )
terraTokenBridgeVAA=$(npm --prefix clients/token_bridge run --silent main -- generate_register_chain_vaa 3 0x${terraTokenBridge} --guardian_secret ${guardiansPrivateCSV})
bscTokenBridgeVAA=$(npm --prefix clients/token_bridge run --silent main -- generate_register_chain_vaa 4 0x${bscTokenBridge} --guardian_secret ${guardiansPrivateCSV})
algoTokenBridgeVAA=$(npm --prefix clients/token_bridge run --silent main -- generate_register_chain_vaa 8 0x${algoTokenBridge} --guardian_secret ${guardiansPrivateCSV})
# 5) create nft bridge registration VAAs
@ -115,6 +117,7 @@ solTokenBridge="REGISTER_SOL_TOKEN_BRIDGE_VAA"
ethTokenBridge="REGISTER_ETH_TOKEN_BRIDGE_VAA"
terraTokenBridge="REGISTER_TERRA_TOKEN_BRIDGE_VAA"
bscTokenBridge="REGISTER_BSC_TOKEN_BRIDGE_VAA"
algoTokenBridge="REGISTER_ALGO_TOKEN_BRIDGE_VAA"
solNFTBridge="REGISTER_SOL_NFT_BRIDGE_VAA"
ethNFTBridge="REGISTER_ETH_NFT_BRIDGE_VAA"
@ -149,6 +152,10 @@ upsert_env_file $envFile $terraNFTBridge $terraNFTBridgeVAA
upsert_env_file $ethFile $bscTokenBridge $bscTokenBridgeVAA
upsert_env_file $envFile $bscTokenBridge $bscTokenBridgeVAA
# algo token bridge
upsert_env_file $ethFile $algoTokenBridge $algoTokenBridgeVAA
upsert_env_file $envFile $algoTokenBridge $algoTokenBridgeVAA
# 7) copy the local .env file to the solana & terra dirs, if the script is running on the host machine
# chain dirs will not exist if running in docker for Tilt, only if running locally. check before copying.
@ -159,7 +166,7 @@ if [[ -d ./ethereum ]]; then
fi
# copy the hex envFile to each of the non-EVM chains
for envDest in ./solana/.env ./terra/tools/.env; do
for envDest in ./solana/.env ./terra/tools/.env ./algorand/.env; do
dirname=$(dirname $envDest)
if [[ -d "$dirname" ]]; then
echo "copying $envFile to $envDest"

View File

@ -98,6 +98,7 @@ pushd /usr/src/clients/token_bridge
node main.js solana execute_governance_vaa "$REGISTER_ETH_TOKEN_BRIDGE_VAA"
node main.js solana execute_governance_vaa "$REGISTER_TERRA_TOKEN_BRIDGE_VAA"
node main.js solana execute_governance_vaa "$REGISTER_BSC_TOKEN_BRIDGE_VAA"
node main.js solana execute_governance_vaa "$REGISTER_ALGO_TOKEN_BRIDGE_VAA"
popd
pushd /usr/src/clients/nft_bridge

View File

@ -1,20 +0,0 @@
module.exports = {
env: {
browser: true,
commonjs: true,
es2021: true,
mocha: true
},
extends: [
'standard'
],
parser: '@typescript-eslint/parser',
parserOptions: {
ecmaVersion: 12
},
plugins: [
'@typescript-eslint'
],
rules: {
}
}

View File

@ -1,108 +0,0 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
# Next.js build output
.next
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and *not* Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
teal/wormhole/pyteal/__pycache__/*
teal/wormhole/build/*.teal
test/temp/*.teal

View File

@ -1,341 +0,0 @@
# Pricecaster Service V2
## Introduction
This service consumes prices from "price fetchers" and feeds blockchain publishers.
The current implementation is a Wormhole client that uses the JS SDK to get VAAs from Pyth network and feed the payload and cryptographic verification data to a transaction group for validation. Subsequently, the data is optionally processed and stored, either price or metrics. For details regarding Wormhole VAAs see design documents:
https://github.com/certusone/wormhole/tree/dev.v2/whitepapers
## System Overview
**The objective is to receive signed messages -named as Verifiable Attestments (VAAs) in Wormhole jargon- from our relayer backend (Pricecaster) , verify them against a fixed (and upgradeable) set of "guardian public keys" and process them, publishing on-chain price information or doing governance chores depending on the VAA payload.**
The design is based in two contracts that work in tandem, a **Stateful contract (VAA_Processor)** that accepts calls for verifying and commiting VAAs, and also mantains the global guardian set; and a **verifier stateless contract** that does the computational work of ECDSA signature verification.
Due to computation and space limits, the validation of the 19 guardian signatures against the payload is partitioned so each stateless contract validates a subset of the guardian signatures. If ECDSA decompress and validation opcodes are used, that yields 650+1750 = 2400 computation units * 7 = 16800, leaving 3200 free units for remaining opcodes.
In our design, We call **verification step** to each of the app calls + stateless logic involved in verifying a block of signatures.
Keep in mind that *not all* the 19 signatures must be present in a VAA verification, but at least 1 + (2/3) of the current guardian set.
The maximum number of signatures in each verification step is fixed at contract compilation stage, so with this in mind and example values:
* let $N_S$ be the total signatures to verify $(19)$
* let $N_V$ be the number of signatures per verification step $(7)$,
* the required number of transactions $N_T = \lceil{N_S/N_V}\rceil = \lceil{19/7}\rceil = 3$
* Each transaction-step $T_i$ will verify signatures $[j..k]$ where $j = i \times N_V$, $k = min(N_S-1, j+N_V-1)$, so for $T_0 = [0..6]$, $T_1 = [7..13]$, $T_2 = [14..18]$.
The verification process inputs consist of:
1. the set of current guardian public keys,
2. the signed message digest (VAA information fields + generic payload),
3. the set of signatures in the VAA header.
With the above in mind, and considering the space and computation limits in the current Algorand protocol, the typical flow for verifying a VAA for 19 guardians using step-size of 7, would be based on the following transaction group:
| TX# | App calls | Stateless logic |
| --- | --------- | --------------- |
| 0 | _args_: guardian_pk[0..6], _txnote_: signed_digest | _args_: sig[0..6] |
| 1 | _args_: guardian_pk[7..13], _txnote_: signed_digest | _args_: sig[7..13] |
| 2 | _args_: guardian_pk[14..18], _txnote_: signed_digest | _args_: sig[14..18] |
| 3 | VAA consume call | N/A |
The current design requires the last call to be a call to an authorized application. This is intended to process VAA price data. The authorized appid must be set accordingly using the `setauthid` call in the VAA Processor contract after deployment.
If no call is going to be made, a dummy app call must be inserted in group for the transaction group to succeed.
To mantain the long-term transaction costs predictable, when not all signatures are provided but > TRUNC(N_S*2/3)+1, the number of transactions in the group does not change, but a transaction may have zero signatures as input, e.g for a VAA with 14 signatures:
| TX# | App calls | Stateless logic |
| --- | --------- | --------------- |
| 0 | _args_: guardian_pk[0..6], _txnote_: signed_digest | _args_: sig[0..6] |
| 1 | _args_: guardian_pk[7..13], _txnote_: signed_digest | _args_: sig[7..13] |
| 2 | _args_: guardian_pk[14..18], _txnote_: signed_digest | _args_: **empty** |
| 3 | VAA consume call | N/A |
The backend will currently **call the Pricekeeper V2 contract to store data** as the last TX group. See below for details on how Pricekeeper works.
Regarding stateless logic we can say that,
* Its code is constant and it's known program hash is validated by the stateful program.
* Asserts that the appropiate stateful program is called using known AppId embedded at compile stage.
* Passing signature subset through arguments does not pose any higher risk since any tampered signature will make the operation to fail;
* The signed digest and public keys are retrieved through transaction note field and argument. This limits for the current design the maximum digest size to 1000 bytes and the maximum number of public keys -and guardians to ~64.
* Verification is performed using TEAL5 ECDSA opcodes. If any signature do not verify, transaction fails and subsequently, the entire transaction group aborts.
For the stateful app-calls we consider,
* Global state stores guardian public-keys, entry count (set size) and guardian set expiration time.
* Initial state after deployment could be set through a bootstrap call, using last guardian-set-change governance VAA if available.
* Sender must be stateless logic
* Argument 1 must contain guardian public keys for guardians $[k..j]$
* Argument 2 must contain current guardian size set
* Note field must contain signed digest.
* Passed guardian keys $[k..j]$ must match the current global state.
* Passed guardian size set must match the current global state.
* Last TX in the verification step (total group size-1) triggers VAA processing according to fields (e.g: do governance chores, unpack Pyth price ticker, etc). Last TX in the entire group must be an authorized application call.
**VAA Structure**
VAA structure is defined in:
https://github.com/certusone/wormhole/blob/dev.v2/whitepapers/0001_generic_message_passing.md
Governance VAAs:
https://github.com/certusone/wormhole/blob/dev.v2/whitepapers/0002_governance_messaging.md
Sample Ethereum Struct Reference:
https://github.com/certusone/wormhole/blob/dev.v2/ethereum/contracts/Structs.sol
```
VAA
i Bytes Field
0 1 Version
1 4 GuardianSetIndex
5 1 LenSignatures (LN)
6 66*LN Signatures where each S = { guardianIndex (1),r(32),s(32),v(1) }
-------------------------------------< hashed/signed body starts here.
4 timestamp
4 Nonce
2 emitterChainId
32 emitterAddress
8 sequence
1 consistencyLevel
N payload
--------------------------------------< hashed/signed body ends here.
```
**VAA Commitment**
Each VAA is uniquely identified by tuple (emitter_chain_id, emitter_address, sequence). We are currently interested in VAAs for:
* Governance operations:
* Upgrade guardian set
* Upgrade contract [this is necessary for non-publishers?]
* Pyth Ticker Data
## Pricekeeper V2 App
The Pricekeeper V2 App mantains a record of product/asset symbols (e.g ALGO/USD, BTC/USDT) and the price and metrics information associated. As the original Pyth Payload is 150-bytes long and it wouldn't fit in the key-value entry of the global state, the Pricekeeper contract slices the Pyth fields to a more compact format, discarding unneeded information.
The Pricekeeper V2 App will allow storage to succeed only if:
* Sender is the contract owner.
* Call is part of a group where all application calls are from the expected VAA processor Appid,
* Call is part of a group where the verification slot has all bits set.
At deployment, the priceKeeper V2 contract must have the "vaapid" global field set accordingly.
Consumers must interpret the stored bytes as fields organized as:
```
Bytes
32 productId
32 priceId
8 price
1 price_type
4 exponent
8 twap value
8 twac value
8 confidence
8 timestamp (based on Solana contract call time)
```
## Installation
Prepare all Node packages with:
```
npm install
```
## Deployment of Applications
Use the deployment tools in `tools` subdirectory.
* To deploy the VAA processor and Pricekeeper V2 app to use with Wormhole, make sure you have Python environment running (preferably >=3.7.0), and `pyteal` installed with `pip3`.
* The deployment program will: generate all TEAL files from PyTEAL sources, deploy the VAA Processor application, deploy the Pricekeeper V2 contract, compile the stateless program and set the correct parameters for the contracts: authid, vphash in VAA Processor and vaapid in the Pricekeeper app.
For example, using `deploy-wh` with sample output:
```
$ node tools\deploy-wh.js tools\gkeys.test 1000 OPDM7ACAW64Q4VBWAL77Z5SHSJVZZ44V3BAN7W44U43SUXEOUENZMZYOQU testnet keys\owner.key
Pricecaster v2 Apps Deployment Tool
Copyright 2022 Wormhole Project Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Parameters for deployment:
From: OPDM7ACAW64Q4VBWAL77Z5SHSJVZZ44V3BAN7W44U43SUXEOUENZMZYOQU
Network: testnet
Guardian expiration time: 1000
Guardian Keys: (1) 13947Bd48b18E53fdAeEe77F3473391aC727C638
Enter YES to confirm parameters, anything else to abort. YES
Compiling programs ...
,VAA Processor Program
Compiling approval program...
Written to teal/wormhole/build/vaa-processor-approval.teal
Compiling clear state program...
Written to teal/wormhole/build/vaa-processor-clear.teal
,
,Pricekeeper V2 Program
Compiling approval program...
Written to teal/wormhole/build/pricekeeper-v2-approval.teal
Compiling clear state program...
Written to teal/wormhole/build/pricekeeper-v2-clear.teal
,
Creating VAA Processor...
txId: WS7GE5A6YAADHVNH5OU337MK7T325AE2GML5S3RWK2VTNCQ23HWA
Deployment App Id: 52438261
Creating Pricekeeper V2...
txId: FICS3HFALLJTMFGEVC65IQ67NCYRJATR32QWZS5VMKGEXHBJJUVA
Deployment App Id: 52438280
Setting VAA Processor authid parameter...
txId: 5NVJGG32DRWAURD3LUHPELJAZTFMM6HLAJPPGNPXNDC5FJFDNVUQ
Compiling verify VAA stateless code...
,VAA Verify Stateless Program
Compiling...
Written to teal/wormhole/build/vaa-verify.teal
,
Stateless program address: KRNYKVVWZDCNOPLL63ZHFOKG2IIY7REBYTPVR5TJLD67JR6FMRJXYW63TI
Setting VAA Processor stateless code...
txId: 5NVJGG32DRWAURD3LUHPELJAZTFMM6HLAJPPGNPXNDC5FJFDNVUQ
Writing deployment results file DEPLOY-1639769594911...
Writing stateless code binary file VAA-VERIFY-1639769594911.BIN...
Bye.
```
* To operate, the stateless contract address must be supplied with funds to pay fees when submitting transactions.
* Use the generated `DEPLOY-XXX` file to set values in the `settings-worm.ts` file (or your current one): app ids and stateless hash.
* Copy the generated `VAA-VERIFY-xxx` file as `vaa-verify.bin` under the `bin` directory.
## Backend Configuration
The backend will read configuration from a `settings.ts` file pointed by the `PRICECASTER_SETTINGS` environment variable.
### Diagnosing failed transactions
If a transaction fails, a diagnostic system is available where the group TX is dumped in a directory. To use this, set the relevant settings file:
```
algo: {
...
dumpFailedTx: true,
dumpFailedTxDirectory: './dump'
},
```
The dump directory will be filled with files named `failed-xxxx.stxn`. You can use this file and `goal clerk` to trigger the stateless logic checks:
```
root@47d99e4cfffc:~/testnetwork/Node# goal clerk dryrun -t failed-1641324602942.stxn
tx[0] trace:
1 intcblock 1 8 0 32 66 20 => <empty stack>
9 bytecblock 0x => <empty stack>
12 txn Fee => (1000 0x3e8)
14 pushint 1000 => (1000 0x3e8)
.
.
.
47 txn ApplicationID => (622608992 0x251c4260)
49 pushint 596576475 => (596576475 0x238f08db)
55 == => (0 0x0)
56 assert =>
56 assert failed pc=56
REJECT
ERROR: assert failed pc=56
```
In this example output, this means the logic failed due to mismatched stateful application id.
For a stateful run, you must do a remote dryrun. This is done by:
```
goal clerk dryrun -t failed-1641324602942.stxn --dryrun-dump -o dump.dr
goal clerk dryrun-remote -D dump.dr -v
```
## Running the system
Check the `package.json` file for `npm run tart-xxx` automated commands.
## Tests
Tests can be run for the old `Pricekeeper` contract, and for the new set of Wormhole client contracts:
`npm run pkeeper-sc-test`
`npm run wormhole-sc-test`
Backend tests will come shortly.
## Appendix
### Common errors
**TransactionPool.Remember: transaction XMGXHGC4GVEHQD2T7MZDKTFJWFRY5TFXX2WECCXBWTOZVHC7QLAA: overspend, account X**
If account X is the stateless program address, this means that this account is without enough balance to pay the fees for each TX group.
### Sample Pyth VAA
This is a sample signed VAA from Pyth that we process.
**Base64**
```
AQAAAAABAFv4FwzmQ+mPX0PYbc4TC5rX/z0B5OxZSJ80YZyjJN+CZLespNQSyq/qJHqvqjbM09AoCYQCzFv5oz9Sv8hnwaYBYaX/mgAACFkAATr9qEHB9D3X1UbIpYG6H5KhOfQTP59qsJVVj2o1nfXUAAAAAAAAABIgUDJXSAABASMKv+DsO0YL1V/E+zY1ZxYymRUUVJcgK464vxr2oKO5/mUPA2fUp++YFaWT6hXTZZPwZDqq8BSbsEvmerhR3s0BAAAALxclQ4j////3AAAALu1z2QAAAAAAcNO0PwAAAAA3+qA9AAAAAA6eVVEAAAAAiUrxHAAAAAA3+qA9AAAAAA3abrgBAAAAAABhpf+a
```
**Hex-Decoded**
```
010000000001005bf8170ce643e98f5f43d86dce130b9ad7ff3d01e4ec59489f34619ca324df8264b7aca4d412caafea247aafaa36ccd3d028098402cc5bf9a33f52bfc867c1a60161a5ff9a0000085900013afda841c1f43dd7d546c8a581ba1f92a139f4133f9f6ab095558f6a359df5d400000000000000122050325748000101230abfe0ec3b460bd55fc4fb36356716329915145497202b8eb8bf1af6a0a3b9fe650f0367d4a7ef9815a593ea15d36593f0643aaaf0149bb04be67ab851decd010000002f17254388fffffff70000002eed73d9000000000070d3b43f0000000037faa03d000000000e9e555100000000894af11c0000000037faa03d000000000dda6eb801000000000061a5ff9a
```
**Field-Decoded**
```
01 version
00000000 guardian-set-index
01 signature-count
00 sig index 0
5bf8170ce643e98f5f43d86dce130b9ad7ff3d01e4ec59489f34619ca324df8264b7aca4d412caafea247aafaa36ccd3d028098402cc5bf9a33f52bfc867c1a601 sig 0
61a5ff9a timestamp
00000859 nonce
0001 chain-id
3afda841c1f43dd7d546c8a581ba1f92a139f4133f9f6ab095558f6a359df5d4 emitter-address
0000000000000012 sequence
20 consistency-level
payload:
503257480001 header
01 payload-id
230abfe0ec3b460bd55fc4fb36356716329915145497202b8eb8bf1af6a0a3b9 product_id
fe650f0367d4a7ef9815a593ea15d36593f0643aaaf0149bb04be67ab851decd price_id
01 price_type
0000002f17254388 price
fffffff7 exponent
0000002eed73d900 twap value
0000000070d3b43f twap numerator for next upd
0000000037faa03d twap denom for next upd
000000000e9e5551 twac value
00000000894af11c twac numerator for next upd
0000000037faa03d twac denom for next upd
000000000dda6eb8 confidence
01 status
00 corporate_act
0000000061a5ff9a timestamp (based on Solana contract call time)
```

View File

@ -1,64 +0,0 @@
/* eslint-disable no-unused-vars */
/* eslint-disable camelcase */
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export type Symbol = {
name: string,
productId: string,
priceId: string,
publishIntervalSecs: number,
pubCount: number
}
export type VAA = {
version: number,
guardian_set_index: number,
signatures: [],
timestamp: number,
nonce: number,
emitter_chain: number,
emitter_address: [],
sequence: number,
consistency_level: number,
payload: []
}
export type PythData = {
vaaBody: Buffer,
signatures: Buffer,
// Informational fields.
symbol?: string,
price_type?: number,
price?: BigInt,
exponent?: number,
twap?: BigInt,
twap_num_upd?: BigInt,
twap_denom_upd?: BigInt,
twac?: BigInt,
twac_num_upd?: BigInt,
twac_denom_upd?: BigInt,
confidence?: BigInt,
status?: number,
corporate_act?: number,
timestamp?: BigInt
}

View File

@ -1,132 +0,0 @@
/* eslint-disable camelcase */
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A generic Price ticker information class
*/
export class PriceTicker {
constructor (
symbol: string,
price: BigInt,
price_type: number,
confidence: BigInt,
exponent: number,
twap: BigInt,
twac: BigInt,
timestamp: BigInt,
user_data?: any) {
this._symbol = symbol
this._price = price
this._price_type = price_type
this._confidence = confidence
this._exponent = exponent
this._timestamp = timestamp
this._twap = twap
this._twac = twac
this._user_data = user_data
}
private _symbol: string
public get symbol (): string {
return this._symbol
}
public set symbol (value: string) {
this._symbol = value
}
/** price */
private _price: BigInt;
public get price (): BigInt {
return this._price
}
public set price (value: BigInt) {
this._price = value
}
/** price_type */
private _price_type: number
public get price_type (): number {
return this._price_type
}
public set price_type (value: number) {
this._price_type = value
}
/** a confidence interval */
private _confidence: BigInt;
public get confidence (): BigInt {
return this._confidence
}
public set confidence (value: BigInt) {
this._confidence = value
}
/** exponent (fixed point) */
private _exponent: number;
public get exponent (): number {
return this._exponent
}
public set exponent (value: number) {
this._exponent = value
}
/** time in blockchain network units */
private _timestamp: BigInt;
public get timestamp (): BigInt {
return this._timestamp
}
public set timestamp (value: BigInt) {
this._timestamp = value
}
private _twac: BigInt
public get twac (): BigInt {
return this._twac
}
public set twac (value: BigInt) {
this._twac = value
}
private _twap: BigInt
public get twap (): BigInt {
return this._twap
}
public set twap (value: BigInt) {
this._twap = value
}
private _user_data: any
public get user_data (): any {
return this._user_data
}
public set user_data (value: any) {
this._user_data = value
}
}

View File

@ -1,55 +0,0 @@
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Options } from '@randlabs/js-logger'
import { Symbol } from './basetypes'
export interface IAppSettings extends Record<string, unknown> {
log: Options,
algo: {
token: string,
api: string,
port: string,
dumpFailedTx: boolean,
dumpFailedTxDirectory?: string
},
apps: {
priceKeeperV2AppId: number,
ownerAddress: string,
ownerKeyFile: string,
vaaVerifyProgramBinFile: string,
vaaVerifyProgramHash: string,
vaaProcessorAppId: number,
},
pyth: {
chainId: number,
emitterAddress: string,
},
debug?: {
logAllVaa?: boolean,
}
wormhole: {
spyServiceHost: string
},
strategy: {
bufferSize: number
},
symbols: Symbol[]
}

View File

@ -1,25 +0,0 @@
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export function sleep (ms: number) {
return new Promise((resolve) => {
setTimeout(resolve, ms)
})
}

View File

@ -1,33 +0,0 @@
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* eslint-disable no-unused-vars */
export enum StatusCode {
OK,
NO_TICKER,
ERROR_CREATE_MESSAGE,
ERROR_SUBMIT_MESSAGE,
GENERAL_ERROR
}
// export const StatusToString = {
// StatusCode.OK: 'Operation successful'
// }

View File

@ -1,23 +0,0 @@
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export interface IEngine {
start(): Promise<void>
}

View File

@ -1,124 +0,0 @@
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { IEngine } from './IEngine'
import { StrategyLastPrice } from '../strategy/strategyLastPrice'
import { IAppSettings } from '../common/settings'
import { IPriceFetcher } from '../fetcher/IPriceFetcher'
import { IPublisher, PublishInfo } from '../publisher/IPublisher'
import { PriceTicker } from '../common/priceTicker'
import { StatusCode } from '../common/statusCodes'
import { WormholePythPriceFetcher } from '../fetcher/WormholePythPriceFetcher'
import { Symbol } from 'backend/common/basetypes'
import { Pricekeeper2Publisher } from '../publisher/Pricekeeper2Publisher'
import * as Logger from '@randlabs/js-logger'
import { sleep } from '../common/sleep'
const fs = require('fs')
const algosdk = require('algosdk')
type WorkerRoutineStatus = {
status: StatusCode,
reason?: string,
tick?: PriceTicker,
pub?: PublishInfo
}
async function workerRoutine (sym: Symbol, fetcher: IPriceFetcher, publisher: IPublisher): Promise<WorkerRoutineStatus> {
const tick = fetcher.queryData(sym.productId + sym.priceId)
if (tick === undefined) {
return { status: StatusCode.NO_TICKER }
}
const pub = await publisher.publish(tick)
return { status: pub.status, reason: pub.reason, tick, pub }
}
export class WormholeClientEngine implements IEngine {
private settings: IAppSettings
private shouldQuit: boolean
constructor (settings: IAppSettings) {
this.settings = settings
this.shouldQuit = false
}
async start () {
process.on('SIGINT', () => {
console.log('Received SIGINT')
Logger.finalize()
this.shouldQuit = true
})
let mnemo, verifyProgramBinary
try {
mnemo = fs.readFileSync(this.settings.apps.ownerKeyFile)
verifyProgramBinary = Uint8Array.from(fs.readFileSync(this.settings.apps.vaaVerifyProgramBinFile))
} catch (e) {
throw new Error('Cannot read account and/or verify program source: ' + e)
}
const publisher = new Pricekeeper2Publisher(this.settings.apps.vaaProcessorAppId,
this.settings.apps.priceKeeperV2AppId,
this.settings.apps.ownerAddress,
verifyProgramBinary,
this.settings.apps.vaaVerifyProgramHash,
algosdk.mnemonicToSecretKey(mnemo.toString()),
this.settings.algo.token,
this.settings.algo.api,
this.settings.algo.port,
this.settings.algo.dumpFailedTx,
this.settings.algo.dumpFailedTxDirectory
)
const fetcher = new WormholePythPriceFetcher(this.settings.wormhole.spyServiceHost,
this.settings.pyth.chainId,
this.settings.pyth.emitterAddress,
this.settings.symbols,
new StrategyLastPrice(this.settings.strategy.bufferSize))
Logger.info('Waiting for fetcher to boot...')
await fetcher.start()
Logger.info('Waiting for publisher to boot...')
await publisher.start()
for (const sym of this.settings.symbols) {
sym.pubCount = 0
Logger.info(`Starting worker for symbol ${sym.name}, interval ${sym.publishIntervalSecs}s`)
setInterval(this.callWorkerRoutine, sym.publishIntervalSecs * 1000, sym, fetcher, publisher)
}
while (!this.shouldQuit) {
await sleep(1000)
}
}
async callWorkerRoutine (sym: Symbol, fetcher: IPriceFetcher, publisher: IPublisher) {
const wrs = await workerRoutine(sym, fetcher, publisher)
switch (wrs.status) {
case StatusCode.OK: {
Logger.info(`${sym.name} [#${sym.pubCount++}] price: ${wrs.tick!.price} ± ${wrs.tick!.confidence} exp: ${wrs.tick!.exponent} t: ${wrs.tick!.timestamp} TxID: ${wrs.pub!.txid}`)
break
}
case StatusCode.NO_TICKER:
Logger.warn(`${sym.name}: No ticker available from fetcher data source`)
break
default:
Logger.error(`${sym.name}: Error. Reason: ` + wrs.reason)
}
}
}

View File

@ -1,38 +0,0 @@
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { IStrategy } from '../strategy/strategy'
export interface IPriceFetcher {
start(): void
stop(): void
hasData(): boolean
/**
* Set price aggregation strategy for this fetcher.
* @param IStrategy The local price aggregation strategy
*/
setStrategy(s: IStrategy): void
/**
* Get the current price of a symbol, according to running strategy.
*/
queryData(id: string): any | undefined
}

View File

@ -1,152 +0,0 @@
/* eslint-disable camelcase */
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {
importCoreWasm,
setDefaultWasm
} from '@certusone/wormhole-sdk/lib/cjs/solana/wasm'
import {
createSpyRPCServiceClient, subscribeSignedVAA
} from '@certusone/wormhole-spydk'
import { SpyRPCServiceClient } from '@certusone/wormhole-spydk/lib/cjs/proto/spy/v1/spy'
import { PythData, Symbol, VAA } from 'backend/common/basetypes'
import { IStrategy } from '../strategy/strategy'
import { IPriceFetcher } from './IPriceFetcher'
import * as Logger from '@randlabs/js-logger'
export class WormholePythPriceFetcher implements IPriceFetcher {
private symbolMap: Map<string, {
name: string,
publishIntervalSecs: number,
pythData: PythData | undefined
}>
private client: SpyRPCServiceClient
private pythEmitterAddress: { s: string, data: number[] }
private pythChainId: number
private strategy: IStrategy
private stream: any
private _hasData: boolean
private coreWasm: any
constructor (spyRpcServiceHost: string, pythChainId: number, pythEmitterAddress: string, symbols: Symbol[], strategy: IStrategy) {
setDefaultWasm('node')
this._hasData = false
this.client = createSpyRPCServiceClient(spyRpcServiceHost)
this.pythChainId = pythChainId
this.pythEmitterAddress = {
data: Buffer.from(pythEmitterAddress, 'hex').toJSON().data,
s: pythEmitterAddress
}
this.strategy = strategy
this.symbolMap = new Map()
symbols.forEach((sym) => {
this.symbolMap.set(sym.productId + sym.priceId, {
name: sym.name,
publishIntervalSecs: sym.publishIntervalSecs,
pythData: undefined
})
})
}
async start () {
this.coreWasm = await importCoreWasm()
// eslint-disable-next-line camelcase
this.stream = await subscribeSignedVAA(this.client,
{
filters:
[{
emitterFilter: {
chainId: this.pythChainId,
emitterAddress: this.pythEmitterAddress.s
}
}]
})
this.stream.on('data', (data: { vaaBytes: Buffer }) => {
try {
this._hasData = true
this.onPythData(data.vaaBytes)
} catch (e) {
Logger.error(`Failed to parse VAA data. \nReason: ${e}\nData: ${data}`)
}
})
this.stream.on('error', (e: Error) => {
Logger.error('Stream error: ' + e)
})
}
stop (): void {
this._hasData = false
}
setStrategy (s: IStrategy) {
this.strategy = s
}
hasData (): boolean {
// Return when any price is ready
return this._hasData
}
queryData (id: string): any | undefined {
const v = this.symbolMap.get(id)
if (v === undefined) {
Logger.error(`Unsupported symbol with identifier ${id}`)
} else {
return v.pythData
}
}
private async onPythData (vaaBytes: Buffer) {
// console.log(vaaBytes.toString('hex'))
const v: VAA = this.coreWasm.parse_vaa(new Uint8Array(vaaBytes))
const payload = Buffer.from(v.payload)
const productId = payload.slice(7, 7 + 32)
const priceId = payload.slice(7 + 32, 7 + 32 + 32)
// console.log(productId.toString('hex'), priceId.toString('hex'))
const k = productId.toString('hex') + priceId.toString('hex')
const sym = this.symbolMap.get(k)
if (sym !== undefined) {
sym.pythData = {
symbol: sym.name,
vaaBody: vaaBytes.slice(6 + v.signatures.length * 66),
signatures: vaaBytes.slice(6, 6 + v.signatures.length * 66),
price_type: payload.readInt8(71),
price: payload.readBigUInt64BE(72),
exponent: payload.readInt32BE(80),
confidence: payload.readBigUInt64BE(132),
status: payload.readInt8(140),
corporate_act: payload.readInt8(141),
timestamp: payload.readBigUInt64BE(142)
}
}
// if (pythPayload.status === 0) {
// console.log('WARNING: Symbol trading status currently halted (0). Publication will be skipped.')
// } else
// eslint-disable-next-line no-lone-blocks
}
}

View File

@ -1,49 +0,0 @@
/* eslint-disable func-call-spacing */
/* eslint-disable no-unused-vars */
/**
* Pricecaster Service.
*
* Main program file.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as Config from '@randlabs/js-config-reader'
import { IAppSettings } from './common/settings'
import { exit } from 'process'
import { WormholeClientEngine } from './engine/WormholeEngine'
import * as Logger from '@randlabs/js-logger'
const charm = require('charm')();
(async () => {
charm.pipe(process.stdout)
charm.reset()
charm.foreground('cyan').display('bright')
console.log('Pricecaster Service Backend -- (c) 2022 Wormhole Project Contributors\n')
charm.foreground('white')
let settings: IAppSettings
try {
await Config.initialize<IAppSettings>({ envVar: 'PRICECASTER_SETTINGS' })
settings = Config.get<IAppSettings>()
await Logger.initialize(settings.log)
} catch (e: any) {
console.error('Cannot initialize configuration: ' + e.toString())
exit(1)
}
const engine = new WormholeClientEngine(settings)
await engine.start()
})()

View File

@ -1,37 +0,0 @@
/* eslint-disable no-unused-vars */
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { PriceTicker } from '../common/priceTicker'
import { StatusCode } from '../common/statusCodes'
export type PublishInfo = {
status: StatusCode,
reason?: '',
msgb64?: '',
block?: BigInt
txid?: string
}
export interface IPublisher {
start(): void
stop(): void
publish(data: any): Promise<PublishInfo>
}

View File

@ -1,16 +0,0 @@
import { IPublisher, PublishInfo } from '../publisher/IPublisher'
import { PriceTicker } from '../common/priceTicker'
export class NullPublisher implements IPublisher {
start (): void {
throw new Error('Method not implemented.')
}
stop (): void {
throw new Error('Method not implemented.')
}
publish (tick: PriceTicker): Promise<PublishInfo> {
throw new Error('Method not implemented.')
}
}

View File

@ -1,112 +0,0 @@
import algosdk from 'algosdk'
import { IPublisher, PublishInfo } from './IPublisher'
import { StatusCode } from '../common/statusCodes'
import { PythData } from 'backend/common/basetypes'
const PricecasterLib = require('../../lib/pricecaster')
const tools = require('../../tools/app-tools')
export class Pricekeeper2Publisher implements IPublisher {
private algodClient: algosdk.Algodv2
private pclib: any
private account: algosdk.Account
private vaaProcessorAppId: number
private vaaProcessorOwner: string
private numOfVerifySteps: number = 0
private guardianCount: number = 0
private stepSize: number = 0
private dumpFailedTx: boolean
private dumpFailedTxDirectory: string | undefined
private compiledVerifyProgram: { bytes: Uint8Array, hash: string } = { bytes: new Uint8Array(), hash: '' }
constructor (vaaProcessorAppId: number,
priceKeeperAppId: number,
vaaProcessorOwner: string,
verifyProgramBinary: Uint8Array,
verifyProgramHash: string,
signKey: algosdk.Account,
algoClientToken: string,
algoClientServer: string,
algoClientPort: string,
dumpFailedTx: boolean = false,
dumpFailedTxDirectory: string = './') {
this.account = signKey
this.compiledVerifyProgram.bytes = verifyProgramBinary
this.compiledVerifyProgram.hash = verifyProgramHash
this.vaaProcessorAppId = vaaProcessorAppId
this.vaaProcessorOwner = vaaProcessorOwner
this.dumpFailedTx = dumpFailedTx
this.dumpFailedTxDirectory = dumpFailedTxDirectory
this.algodClient = new algosdk.Algodv2(algoClientToken, algoClientServer, algoClientPort)
this.pclib = new PricecasterLib.PricecasterLib(this.algodClient)
this.pclib.setAppId('vaaProcessor', vaaProcessorAppId)
this.pclib.setAppId('pricekeeper', priceKeeperAppId)
this.pclib.enableDumpFailedTx(this.dumpFailedTx)
this.pclib.setDumpFailedTxDirectory(this.dumpFailedTxDirectory)
}
async start () {
}
stop () {
}
signCallback (sender: string, tx: algosdk.Transaction) {
const txSigned = tx.signTxn(this.account.sk)
return txSigned
}
async publish (data: PythData): Promise<PublishInfo> {
const publishInfo: PublishInfo = { status: StatusCode.OK }
const txParams = await this.algodClient.getTransactionParams().do()
txParams.fee = 1000
txParams.flatFee = true
this.guardianCount = await tools.readAppGlobalStateByKey(this.algodClient, this.vaaProcessorAppId, this.vaaProcessorOwner, 'gscount')
this.stepSize = await tools.readAppGlobalStateByKey(this.algodClient, this.vaaProcessorAppId, this.vaaProcessorOwner, 'vssize')
this.numOfVerifySteps = Math.ceil(this.guardianCount / this.stepSize)
if (this.guardianCount === 0 || this.stepSize === 0) {
throw new Error('cannot get guardian count and/or step-size from global state')
}
//
// (!)
// Stateless programs cannot access state nor stack from stateful programs, so
// for the VAA Verify program to use the guardian set, we pass the global state as TX argument,
// (and check it against the current global list to be sure it's ok). This way it can be read by
// VAA verifier as a stateless program CAN DO READS of call transaction arguments in a group.
// The same technique is used for the note field, where the payload is set.
//
try {
const guardianKeys = []
const buf = Buffer.alloc(8)
for (let i = 0; i < this.guardianCount; i++) {
buf.writeBigUInt64BE(BigInt(i++))
const gk = await tools.readAppGlobalStateByKey(this.algodClient, this.vaaProcessorAppId, this.vaaProcessorOwner, buf.toString())
guardianKeys.push(Buffer.from(gk, 'base64').toString('hex'))
}
const strSig = data.signatures.toString('hex')
const gid = this.pclib.beginTxGroup()
const sigSubsets = []
for (let i = 0; i < this.numOfVerifySteps; i++) {
const st = this.stepSize * i
const sigSetLen = 132 * this.stepSize
const keySubset = guardianKeys.slice(st, i < this.numOfVerifySteps - 1 ? st + this.stepSize : undefined)
sigSubsets.push(strSig.slice(i * sigSetLen, i < this.numOfVerifySteps - 1 ? ((i * sigSetLen) + sigSetLen) : undefined))
this.pclib.addVerifyTx(gid, this.compiledVerifyProgram.hash, txParams, data.vaaBody, keySubset, this.guardianCount)
}
this.pclib.addPriceStoreTx(gid, this.vaaProcessorOwner, txParams, data.symbol, data.vaaBody.slice(51))
const txId = await this.pclib.commitVerifyTxGroup(gid, this.compiledVerifyProgram.bytes, sigSubsets, this.vaaProcessorOwner, this.signCallback.bind(this))
publishInfo.txid = txId
} catch (e: any) {
publishInfo.status = StatusCode.ERROR_SUBMIT_MESSAGE
publishInfo.reason = e.response.text ? e.response.text : e.toString()
return publishInfo
}
return publishInfo
}
}

View File

@ -1,55 +0,0 @@
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { PriceTicker } from '../common/priceTicker'
/**
* Implements a strategy for obtaining an asset price from
* a set of received prices in a buffer.
*/
export interface IStrategy {
/**
*
* @param size The size of the buffer
*/
createBuffer(size: number): void
/**
* Clear price buffer
*/
clearBuffer(): void
/**
* Returns the current number of items in buffer
*/
bufferCount(): number
/**
* Put a new price in buffer.
* @param priceData The price data to put
* @returns true if successful.
*/
put(ticker: PriceTicker): boolean
/**
* Get the calculated price according to selected strategy.
*/
getPrice(): PriceTicker | undefined
}

View File

@ -1,57 +0,0 @@
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { PriceTicker } from '../common/priceTicker'
import { IStrategy } from './strategy'
/**
* A base class for queue-based buffer strategies
*/
export abstract class StrategyBase implements IStrategy {
protected buffer!: PriceTicker[]
protected bufSize!: number
constructor (bufSize: number = 10) {
this.createBuffer(bufSize)
}
createBuffer (maxSize: number): void {
this.buffer = []
this.bufSize = maxSize
}
clearBuffer (): void {
this.buffer.length = 0
}
bufferCount (): number {
return this.buffer.length
}
put (ticker: PriceTicker): boolean {
if (this.buffer.length === this.bufSize) {
this.buffer.shift()
}
this.buffer.push(ticker)
return true
}
abstract getPrice(): PriceTicker | undefined
}

View File

@ -1,33 +0,0 @@
import { PriceTicker } from '../common/priceTicker'
import { StrategyBase } from './strategyBase'
/**
* Pricecaster Service.
*
* Fetcher backend component.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This strategy just caches the last provided price,
* acting as a single-item buffer.
*/
export class StrategyLastPrice extends StrategyBase {
getPrice (): PriceTicker | undefined {
const ret = this.buffer[this.buffer.length - 1]
this.clearBuffer()
return ret
}
}

View File

@ -1 +0,0 @@
Output Failed transaction dumps here.

View File

@ -1 +0,0 @@
REPLACE WITH MNEMONIC WORDS.

View File

@ -1 +0,0 @@
declare module 'PricecasterLib';

View File

@ -1,587 +0,0 @@
/**
*
* Pricecaster Service Utility Library.
*
* Copyright 2022 Wormhole Project Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
const algosdk = require('algosdk')
const fs = require('fs')
// eslint-disable-next-line camelcase
const tools = require('../tools/app-tools')
const crypto = require('crypto')
const ContractInfo = {
pricekeeper: {
approvalProgramFile: 'teal/wormhole/build/pricekeeper-v2-approval.teal',
clearStateProgramFile: 'teal/wormhole/build/pricekeeper-v2-clear.teal',
compiledApproval: {
bytes: undefined,
hash: undefined
},
compiledClearState: {
bytes: undefined,
hash: undefined
},
appId: 0
},
vaaProcessor: {
approvalProgramFile: 'teal/wormhole/build/vaa-processor-approval.teal',
clearStateProgramFile: 'teal/wormhole/build/vaa-processor-clear.teal',
approvalProgramHash: '',
compiledApproval: {
bytes: undefined,
hash: undefined
},
compiledClearState: {
bytes: undefined,
hash: undefined
},
appId: 0
}
}
// --------------------------------------------------------------------------------------
class PricecasterLib {
constructor(algodClient, ownerAddr = undefined) {
this.algodClient = algodClient
this.ownerAddr = ownerAddr
this.minFee = 1000
this.groupTxSet = {}
this.lsigs = {}
this.dumpFailedTx = false
this.dumpFailedTxDirectory = './'
/** Set the file dumping feature on failed group transactions
* @param {boolean} f Set to true to enable function, false to disable.
*/
this.enableDumpFailedTx = function (f) {
this.dumpFailedTx = f
}
/** Set the file dumping feature output directory
* @param {string} dir The output directory.
*/
this.setDumpFailedTxDirectory = function (dir) {
this.dumpFailedTxDirectory = dir
}
/** Sets a contract approval program filename
* @param {string} filename New file name to use.
*/
this.setApprovalProgramFile = function (contract, filename) {
ContractInfo[contract].approvalProgramFile = filename
}
/** Sets a contract clear state program filename
* @param {string} filename New file name to use.
*/
this.setClearStateProgramFile = function (contract, filename) {
ContractInfo[contract].clearStateProgramFile = filename
}
/**
* Set Application Id for a contract.
* @param {number} applicationId application id
* @returns {void}
*/
this.setAppId = function (contract, applicationId) {
ContractInfo[contract].appId = applicationId
}
/**
* Get the Application id for a specific contract
* @returns The requested application Id
*/
this.getAppId = function (contract) {
return ContractInfo[contract].appId
}
/**
* Get minimum fee to pay for transactions.
* @return {Number} minimum transaction fee
*/
this.minTransactionFee = function () {
return this.minFee
}
/**
* Internal function.
* Read application local state related to the account.
* @param {String} accountAddr account to retrieve local state
* @return {Array} an array containing all the {key: value} pairs of the local state
*/
this.readLocalState = function (accountAddr) {
return tools.readAppLocalState(this.algodClient, this.appId, accountAddr)
}
/**
* Internal function.
* Read application global state.
* @return {Array} an array containing all the {key: value} pairs of the global state
* @returns {void}
*/
this.readGlobalState = function () {
return tools.readAppGlobalState(this.algodClient, this.appId, this.ownerAddr)
}
/**
* Print local state of accountAddr on stdout.
* @param {String} accountAddr account to retrieve local state
* @returns {void}
*/
this.printLocalState = async function (accountAddr) {
await tools.printAppLocalState(this.algodClient, this.appId, accountAddr)
}
/**
* Print application global state on stdout.
* @returns {void}
*/
this.printGlobalState = async function () {
await tools.printAppGlobalState(this.algodClient, this.appId, this.ownerAddr)
}
/**
* Internal function.
* Read application local state variable related to accountAddr.
* @param {String} accountAddr account to retrieve local state
* @param {String} key variable key to get the value associated
* @return {String/Number} it returns the value associated to the key that could be an address, a number or a
* base64 string containing a ByteArray
*/
this.readLocalStateByKey = function (accountAddr, key) {
return tools.readAppLocalStateByKey(this.algodClient, this.appId, accountAddr, key)
}
/**
* Internal function.
* Read application global state variable.
* @param {String} key variable key to get the value associated
* @return {String/Number} it returns the value associated to the key that could be an address,
* a number or a base64 string containing a ByteArray
*/
this.readGlobalStateByKey = function (key) {
return tools.readAppGlobalStateByKey(this.algodClient, this.appId, this.ownerAddr, key)
}
/**
* Compile program that programFilename contains.
* @param {String} programFilename filepath to the program to compile
* @return {String} base64 string containing the compiled program
*/
this.compileProgram = async function (programBytes) {
const compileResponse = await this.algodClient.compile(programBytes).do()
const compiledBytes = new Uint8Array(Buffer.from(compileResponse.result, 'base64'))
return { bytes: compiledBytes, hash: compileResponse.hash }
}
/**
* Compile clear state program.
*/
this.compileClearProgram = async function (contract) {
const program = fs.readFileSync(ContractInfo[contract].clearStateProgramFile, 'utf8')
ContractInfo[contract].compiledClearState = await this.compileProgram(program)
}
/**
* Compile approval program.
*/
this.compileApprovalProgram = async function (contract) {
const program = fs.readFileSync(ContractInfo[contract].approvalProgramFile, 'utf8')
ContractInfo[contract].compiledApproval = await this.compileProgram(program)
}
/**
* Helper function to retrieve the application id from a createApp transaction response.
* @param {Object} txResponse object containig the transactionResponse of the createApp call
* @return {Number} application id of the created application
*/
this.appIdFromCreateAppResponse = function (txResponse) {
return txResponse['application-index']
}
/**
* Create an application based on the default approval and clearState programs or based on the specified files.
* @param {String} sender account used to sign the createApp transaction
* @param {Function} signCallback callback with prototype signCallback(sender, tx) used to sign transactions
* @return {String} transaction id of the created application
*/
this.createApp = async function (sender, contract, localInts, localBytes, globalInts, globalBytes, appArgs, signCallback) {
const onComplete = algosdk.OnApplicationComplete.NoOpOC
// get node suggested parameters
const params = await algodClient.getTransactionParams().do()
params.fee = this.minFee
params.flatFee = true
await this.compileApprovalProgram(contract)
await this.compileClearProgram(contract)
// create unsigned transaction
const txApp = algosdk.makeApplicationCreateTxn(
sender, params, onComplete,
ContractInfo[contract].compiledApproval.bytes,
ContractInfo[contract].compiledClearState.bytes,
localInts, localBytes, globalInts, globalBytes, appArgs
)
const txId = txApp.txID().toString()
// Sign the transaction
const txAppSigned = signCallback(sender, txApp)
// Submit the transaction
await algodClient.sendRawTransaction(txAppSigned).do()
return txId
}
/**
* Create the VAA Processor application based on the default approval and clearState programs or based on the specified files.
* @param {String} sender account used to sign the createApp transaction
* @param {String} gexpTime Guardian key set expiration time
* @param {String} gsindex Index of the guardian key set
* @param {String} gkeys Guardian keys listed as a single array
* @param {Function} signCallback callback with prototype signCallback(sender, tx) used to sign transactions
* @return {String} transaction id of the created application
*/
this.createVaaProcessorApp = async function (sender, gexpTime, gsindex, gkeys, signCallback) {
return await this.createApp(sender, 'vaaProcessor', 0, 0, 5, 20,
[new Uint8Array(Buffer.from(gkeys, 'hex')),
algosdk.encodeUint64(parseInt(gexpTime)),
algosdk.encodeUint64(parseInt(gsindex))], signCallback)
}
/**
* Create the Pricekeeper application based on the default approval and clearState programs or based on the specified files.
* @param {String} sender account used to sign the createApp transaction
* @param {String} vaaProcessorAppId The application id of the VAA Processor program associated.
* @param {Function} signCallback callback with prototype signCallback(sender, tx) used to sign transactions
* @return {String} transaction id of the created application
*/
this.createPricekeeperApp = async function (sender, vaaProcessorAppId, signCallback) {
return await this.createApp(sender, 'pricekeeper', 0, 0, 1, 63,
[algosdk.encodeUint64(parseInt(vaaProcessorAppId))], signCallback)
}
/**
* Internal function.
* Call application specifying args and accounts.
* @param {String} sender caller address
* @param {Array} appArgs array of arguments to pass to application call
* @param {Array} appAccounts array of accounts to pass to application call
* @param {Function} signCallback callback with prototype signCallback(sender, tx) used to sign transactions
* @return {String} transaction id of the transaction
*/
this.callApp = async function (sender, contract, appArgs, appAccounts, signCallback) {
// get node suggested parameters
const params = await this.algodClient.getTransactionParams().do()
params.fee = this.minFee
params.flatFee = true
// create unsigned transaction
const txApp = algosdk.makeApplicationNoOpTxn(sender, params, ContractInfo[contract].appId, appArgs, appAccounts.length === 0 ? undefined : appAccounts)
const txId = txApp.txID().toString()
// Sign the transaction
const txAppSigned = signCallback(sender, txApp)
// Submit the transaction
await this.algodClient.sendRawTransaction(txAppSigned).do()
return txId
}
/**
* ClearState sender. Remove all the sender associated local data.
* @param {String} sender account to ClearState
* @param {Function} signCallback callback with prototype signCallback(sender, tx) used to sign transactions
* @return {[String]} transaction id of one of the transactions of the group
*/
this.clearApp = async function (sender, signCallback, forcedAppId) {
// get node suggested parameters
const params = await this.algodClient.getTransactionParams().do()
params.fee = this.minFee
params.flatFee = true
let appId = this.appId
if (forcedAppId) {
appId = forcedAppId
}
// create unsigned transaction
const txApp = algosdk.makeApplicationClearStateTxn(sender, params, appId)
const txId = txApp.txID().toString()
// Sign the transaction
const txAppSigned = signCallback(sender, txApp)
// Submit the transaction
await this.algodClient.sendRawTransaction(txAppSigned).do()
return txId
}
/**
* Permanent delete the application.
* @param {String} sender owner account
* @param {Function} signCallback callback with prototype signCallback(sender, tx) used to sign transactions
* @param {Function} applicationId use this application id instead of the one set
* @return {String} transaction id of one of the transactions of the group
*/
this.deleteApp = async function (sender, signCallback, applicationId) {
// get node suggested parameters
const params = await this.algodClient.getTransactionParams().do()
params.fee = this.minFee
params.flatFee = true
if (!applicationId) {
applicationId = this.appId
}
// create unsigned transaction
const txApp = algosdk.makeApplicationDeleteTxn(sender, params, applicationId)
const txId = txApp.txID().toString()
// Sign the transaction
const txAppSigned = signCallback(sender, txApp)
// Submit the transaction
await this.algodClient.sendRawTransaction(txAppSigned).do()
return txId
}
/**
* Helper function to wait until transaction txId is included in a block/round.
* @param {String} txId transaction id to wait for
* @return {VOID} VOID
*/
this.waitForConfirmation = async function (txId) {
const status = (await this.algodClient.status().do())
let lastRound = status['last-round']
// eslint-disable-next-line no-constant-condition
while (true) {
const pendingInfo = await this.algodClient.pendingTransactionInformation(txId).do()
if (pendingInfo['confirmed-round'] !== null && pendingInfo['confirmed-round'] > 0) {
// Got the completed Transaction
return pendingInfo['confirmed-round']
}
lastRound += 1
await this.algodClient.statusAfterBlock(lastRound).do()
}
}
/**
* Helper function to wait until transaction txId is included in a block/round
* and returns the transaction response associated to the transaction.
* @param {String} txId transaction id to get transaction response
* @return {Object} returns an object containing response information
*/
this.waitForTransactionResponse = async function (txId) {
// Wait for confirmation
await this.waitForConfirmation(txId)
// display results
return this.algodClient.pendingTransactionInformation(txId).do()
}
/**
* VAA Processor: Sets the stateless logic program hash
* @param {*} sender Sender account
* @param {*} hash The stateless logic program hash
* @returns Transaction identifier.
*/
this.setVAAVerifyProgramHash = async function (sender, hash, signCallback) {
if (!algosdk.isValidAddress(sender)) {
throw new Error('Invalid sender address: ' + sender)
}
const appArgs = []
appArgs.push(new Uint8Array(Buffer.from('setvphash')),
algosdk.decodeAddress(hash).publicKey)
return await this.callApp(sender, 'vaaProcessor', appArgs, [], signCallback)
}
/**
* VAA Processor: Sets the authorized application id for last call
* @param {*} sender Sender account
* @param {*} appId The assigned appId
* @returns Transaction identifier.
*/
this.setAuthorizedAppId = async function (sender, appId, signCallback) {
if (!algosdk.isValidAddress(sender)) {
throw new Error('Invalid sender address: ' + sender)
}
const appArgs = []
appArgs.push(new Uint8Array(Buffer.from('setauthid')),
algosdk.encodeUint64(appId))
return await this.callApp(sender, 'vaaProcessor', appArgs, [], signCallback)
}
/**
* Starts a begin...commit section for commiting grouped transactions.
*/
this.beginTxGroup = function () {
const gid = crypto.randomBytes(16).toString('hex')
this.groupTxSet[gid] = []
return gid
}
/**
* Adds a transaction to the group.
* @param {} tx Transaction to add.
*/
this.addTxToGroup = function (gid, tx) {
if (this.groupTxSet[gid] === undefined) {
throw new Error('unknown tx group id')
}
this.groupTxSet[gid].push(tx)
}
/**
* @param {*} sender The sender account.
* @param {function} signCallback The sign callback routine.
* @returns Transaction id.
*/
this.commitTxGroup = async function (gid, sender, signCallback) {
if (this.groupTxSet[gid] === undefined) {
throw new Error('unknown tx group id')
}
algosdk.assignGroupID(this.groupTxSet[gid])
// Sign the transactions
const signedTxns = []
for (const tx of this.groupTxSet[gid]) {
signedTxns.push(signCallback(sender, tx))
}
// Submit the transaction
const tx = await this.algodClient.sendRawTransaction(signedTxns).do()
delete this.groupTxSet[gid]
return tx.txId
}
/**
* @param {*} sender The sender account.
* @param {*} programBytes Compiled program bytes.
* @param {*} totalSignatureCount Total signatures present in the VAA.
* @param {*} sigSubsets An hex string with the signature subsets i..j for logicsig arguments.
* @param {*} lastTxSender The sender of the last TX in the group.
* @param {*} signCallback The signing callback function to use in the last TX of the group.
* @returns Transaction id.
*/
this.commitVerifyTxGroup = async function (gid, programBytes, totalSignatureCount, sigSubsets, lastTxSender, signCallback) {
if (this.groupTxSet[gid] === undefined) {
throw new Error('unknown group id')
}
algosdk.assignGroupID(this.groupTxSet[gid])
const signedGroup = []
let i = 0
for (const tx of this.groupTxSet[gid]) {
// All transactions except last must be signed by stateless code.
// console.log(`sigSubsets[${i}]: ${sigSubsets[i])
if (i === this.groupTxSet[gid].length - 1) {
signedGroup.push(signCallback(lastTxSender, tx))
} else {
const lsig = new algosdk.LogicSigAccount(programBytes, [Buffer.from(sigSubsets[i], 'hex'), algosdk.encodeUint64(totalSignatureCount)])
const stxn = algosdk.signLogicSigTransaction(tx, lsig)
signedGroup.push(stxn.blob)
}
i++
}
// Submit the transaction
let tx
try {
tx = await this.algodClient.sendRawTransaction(signedGroup).do()
} catch (e) {
if (this.dumpFailedTx) {
const id = tx ? tx.txId : Date.now().toString()
const filename = `${this.dumpFailedTxDirectory}/failed-${id}.stxn`
if (fs.existsSync(filename)) {
fs.unlinkSync(filename)
}
for (let i = 0; i < signedGroup.length; ++i) {
fs.appendFileSync(filename, signedGroup[i])
}
}
throw e
}
delete this.groupTxSet[gid]
return tx.txId
}
/**
* VAA Processor: Add a verification step to a transaction group.
* @param {*} sender The sender account (typically the VAA verification stateless program)
* @param {*} payload The VAA payload.
* @param {*} gksubset An hex string containing the keys for the guardian subset in this step.
* @param {*} totalguardians The total number of known guardians.
*/
this.addVerifyTx = function (gid, sender, params, payload, gksubset, totalguardians) {
if (this.groupTxSet[gid] === undefined) {
throw new Error('unknown group id')
}
const appArgs = []
appArgs.push(new Uint8Array(Buffer.from('verify')),
new Uint8Array(Buffer.from(gksubset.join(''), 'hex')),
algosdk.encodeUint64(parseInt(totalguardians)))
const tx = algosdk.makeApplicationNoOpTxn(sender,
params,
ContractInfo.vaaProcessor.appId,
appArgs, undefined, undefined, undefined,
new Uint8Array(payload))
this.groupTxSet[gid].push(tx)
return tx.txID()
}
/**
* Pricekeeper-V2: Add store price transaction to TX Group.
* @param {*} sender The sender account (typically the VAA verification stateless program)
* @param {*} sym The symbol identifying the product to store price for.
* @param {*} payload The VAA payload.
*/
this.addPriceStoreTx = function (gid, sender, params, sym, payload) {
if (this.groupTxSet[gid] === undefined) {
throw new Error('unknown group id')
}
const appArgs = []
appArgs.push(new Uint8Array(Buffer.from('store')),
new Uint8Array(Buffer.from(sym)),
new Uint8Array(payload))
const tx = algosdk.makeApplicationNoOpTxn(sender,
params,
ContractInfo.pricekeeper.appId,
appArgs)
this.groupTxSet[gid].push(tx)
return tx.txID()
}
}
}
module.exports = {
PricecasterLib
}

View File

@ -1 +0,0 @@
Log file output in this directory.

File diff suppressed because it is too large Load Diff

View File

@ -1,47 +0,0 @@
{
"name": "pricecaster",
"version": "2.0.0",
"description": "Pricecaster V2 Service",
"main": "index.js",
"scripts": {
"compile": "tsc",
"build": "rimraf build && npm run compile",
"prepack": "npm run build",
"start": "npm run compile && cross-env PRICECASTER_SETTINGS=./settings/settings-worm.js node build/main.js",
"test-wormhole-sc": "mocha test/wormhole-sc-test.js --timeout 60000 --bail --allow-uncaught"
},
"author": "Randlabs inc",
"license": "ISC",
"dependencies": {
"@certusone/wormhole-sdk": "^0.1.3",
"@certusone/wormhole-spydk": "^0.0.1",
"@improbable-eng/grpc-web-node-http-transport": "^0.15.0",
"@pythnetwork/client": "^2.3.1",
"@randlabs/js-config-reader": "^1.1.0",
"@randlabs/js-logger": "^1.2.0",
"algosdk": "^1.12.0",
"base58-universal": "^1.0.0",
"charm": "^1.0.2",
"elliptic": "^6.5.4",
"esm": "^3.2.25",
"ethers": "^5.5.1",
"js-sha512": "^0.8.0",
"web3-eth-abi": "^1.6.1",
"web3-utils": "^1.6.1"
},
"devDependencies": {
"@types/superagent": "^4.1.13",
"@typescript-eslint/eslint-plugin": "^4.32.0",
"@typescript-eslint/parser": "^4.32.0",
"chai": "^4.3.4",
"chai-as-promised": "^7.1.1",
"cross-env": "^7.0.3",
"eslint": "^7.32.0",
"eslint-config-standard": "^16.0.3",
"eslint-plugin-import": "^2.24.2",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^5.1.0",
"mocha": "^9.1.2",
"rimraf": "^3.0.2"
}
}

View File

@ -1,5 +0,0 @@
#!/bin/bash
goal app create --creator "$OWNER_ADDR" --global-ints 4 --global-byteslices 20 --local-byteslices 0 --local-ints 0 --approval-prog vaa-processor-approval.teal --clear-prog vaa-processor-clear.teal --app-arg "b64:$GKEYSBASE64" --app-arg int:0 --app-arg int:0 -o create.txn
algokey -t create.txn -o create.stxn sign -m "$OWNER_MNEMO"
goal clerk rawsend -f create.stxn

View File

@ -1,4 +0,0 @@
#!/bin/bash
goal app delete --app-id $1 --from "$OWNER_ADDR" -o delete.txn
algokey -t delete.txn -o delete.stxn sign -m "$OWNER_MNEMO"
goal clerk rawsend -f delete.stxn

Some files were not shown because too many files have changed in this diff Show More