Added proper json marshalling of UniqueTxs in the AVM

This commit is contained in:
StephenButtolph 2020-03-27 16:54:47 -04:00
parent 1fe5092e9b
commit 4b36f9d041
28 changed files with 239 additions and 490 deletions

View File

@ -16,7 +16,7 @@ var (
// Asset ...
type Asset struct {
ID ids.ID `serialize:"true"`
ID ids.ID `serialize:"true" json:"assetID"`
}
// AssetID returns the ID of the contained asset

View File

@ -10,6 +10,7 @@ import (
"github.com/ava-labs/gecko/snow"
"github.com/ava-labs/gecko/utils/math"
"github.com/ava-labs/gecko/vms/components/codec"
"github.com/ava-labs/gecko/vms/components/verify"
)
var (
@ -29,10 +30,10 @@ var (
type BaseTx struct {
metadata
NetID uint32 `serialize:"true"` // ID of the network this chain lives on
BCID ids.ID `serialize:"true"` // ID of the chain on which this transaction exists (prevents replay attacks)
Outs []*TransferableOutput `serialize:"true"` // The outputs of this transaction
Ins []*TransferableInput `serialize:"true"` // The inputs to this transaction
NetID uint32 `serialize:"true" json:"networkID"` // ID of the network this chain lives on
BCID ids.ID `serialize:"true" json:"blockchainID"` // ID of the chain on which this transaction exists (prevents replay attacks)
Outs []*TransferableOutput `serialize:"true" json:"outputs"` // The outputs of this transaction
Ins []*TransferableInput `serialize:"true" json:"inputs"` // The inputs to this transaction
}
// NetworkID is the ID of the network on which this transaction exists
@ -155,11 +156,11 @@ func (t *BaseTx) SyntacticVerify(ctx *snow.Context, c codec.Codec, _ int) error
}
// SemanticVerify that this transaction is valid to be spent.
func (t *BaseTx) SemanticVerify(vm *VM, uTx *UniqueTx, creds []*Credential) error {
func (t *BaseTx) SemanticVerify(vm *VM, uTx *UniqueTx, creds []verify.Verifiable) error {
for i, in := range t.Ins {
cred := creds[i]
fxIndex, err := vm.getFx(cred.Cred)
fxIndex, err := vm.getFx(cred)
if err != nil {
return err
}
@ -178,7 +179,7 @@ func (t *BaseTx) SemanticVerify(vm *VM, uTx *UniqueTx, creds []*Credential) erro
return errIncompatibleFx
}
err = fx.VerifyTransfer(uTx, utxo.Out, in.In, cred.Cred)
err = fx.VerifyTransfer(uTx, utxo.Out, in.In, cred)
if err == nil {
continue
}
@ -215,7 +216,7 @@ func (t *BaseTx) SemanticVerify(vm *VM, uTx *UniqueTx, creds []*Credential) erro
return errIncompatibleFx
}
if err := fx.VerifyTransfer(uTx, utxo.Out, in.In, cred.Cred); err != nil {
if err := fx.VerifyTransfer(uTx, utxo.Out, in.In, cred); err != nil {
return err
}
}

View File

@ -834,11 +834,9 @@ func TestBaseTxSemanticVerify(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
tx.Creds = append(tx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -849,11 +847,11 @@ func TestBaseTxSemanticVerify(t *testing.T) {
tx.Initialize(b)
uTx := &UniqueTx{
TxState: &TxState{
Tx: tx,
},
vm: vm,
txID: tx.ID(),
t: &txState{
tx: tx,
},
}
if err := tx.UnsignedTx.SemanticVerify(vm, uTx, tx.Creds); err != nil {
@ -913,9 +911,7 @@ func TestBaseTxSemanticVerifyUnknownFx(t *testing.T) {
},
}}}
tx.Creds = append(tx.Creds, &Credential{
Cred: &testVerifiable{},
})
tx.Creds = append(tx.Creds, &testVerifiable{})
b, err := vm.codec.Marshal(tx)
if err != nil {
@ -924,11 +920,11 @@ func TestBaseTxSemanticVerifyUnknownFx(t *testing.T) {
tx.Initialize(b)
uTx := &UniqueTx{
TxState: &TxState{
Tx: tx,
},
vm: vm,
txID: tx.ID(),
t: &txState{
tx: tx,
},
}
if err := tx.UnsignedTx.SemanticVerify(vm, uTx, tx.Creds); err == nil {
@ -1001,11 +997,9 @@ func TestBaseTxSemanticVerifyWrongAssetID(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
tx.Creds = append(tx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -1016,11 +1010,11 @@ func TestBaseTxSemanticVerifyWrongAssetID(t *testing.T) {
tx.Initialize(b)
uTx := &UniqueTx{
TxState: &TxState{
Tx: tx,
},
vm: vm,
txID: tx.ID(),
t: &txState{
tx: tx,
},
}
if err := tx.UnsignedTx.SemanticVerify(vm, uTx, tx.Creds); err == nil {
@ -1098,11 +1092,9 @@ func TestBaseTxSemanticVerifyUnauthorizedFx(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
tx.Creds = append(tx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -1113,11 +1105,11 @@ func TestBaseTxSemanticVerifyUnauthorizedFx(t *testing.T) {
tx.Initialize(b)
uTx := &UniqueTx{
TxState: &TxState{
Tx: tx,
},
vm: vm,
txID: tx.ID(),
t: &txState{
tx: tx,
},
}
if err := tx.UnsignedTx.SemanticVerify(vm, uTx, tx.Creds); err == nil {
@ -1175,11 +1167,9 @@ func TestBaseTxSemanticVerifyInvalidSignature(t *testing.T) {
},
}}}
tx.Creds = append(tx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
[crypto.SECP256K1RSigLen]byte{},
},
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
[crypto.SECP256K1RSigLen]byte{},
},
})
@ -1190,11 +1180,11 @@ func TestBaseTxSemanticVerifyInvalidSignature(t *testing.T) {
tx.Initialize(b)
uTx := &UniqueTx{
TxState: &TxState{
Tx: tx,
},
vm: vm,
txID: tx.ID(),
t: &txState{
tx: tx,
},
}
if err := tx.UnsignedTx.SemanticVerify(vm, uTx, tx.Creds); err == nil {
@ -1265,11 +1255,9 @@ func TestBaseTxSemanticVerifyMissingUTXO(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
tx.Creds = append(tx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -1280,11 +1268,11 @@ func TestBaseTxSemanticVerifyMissingUTXO(t *testing.T) {
tx.Initialize(b)
uTx := &UniqueTx{
TxState: &TxState{
Tx: tx,
},
vm: vm,
txID: tx.ID(),
t: &txState{
tx: tx,
},
}
if err := tx.UnsignedTx.SemanticVerify(vm, uTx, tx.Creds); err == nil {
@ -1355,11 +1343,9 @@ func TestBaseTxSemanticVerifyInvalidUTXO(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
tx.Creds = append(tx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -1370,11 +1356,11 @@ func TestBaseTxSemanticVerifyInvalidUTXO(t *testing.T) {
tx.Initialize(b)
uTx := &UniqueTx{
TxState: &TxState{
Tx: tx,
},
vm: vm,
txID: tx.ID(),
t: &txState{
tx: tx,
},
}
if err := tx.UnsignedTx.SemanticVerify(vm, uTx, tx.Creds); err == nil {
@ -1459,11 +1445,9 @@ func TestBaseTxSemanticVerifyPendingInvalidUTXO(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
pendingTx.Creds = append(pendingTx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
pendingTx.Creds = append(pendingTx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -1522,11 +1506,9 @@ func TestBaseTxSemanticVerifyPendingInvalidUTXO(t *testing.T) {
fixedSig = [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
tx.Creds = append(tx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -1537,11 +1519,11 @@ func TestBaseTxSemanticVerifyPendingInvalidUTXO(t *testing.T) {
tx.Initialize(b)
uTx := &UniqueTx{
TxState: &TxState{
Tx: tx,
},
vm: vm,
txID: tx.ID(),
t: &txState{
tx: tx,
},
}
if err := tx.UnsignedTx.SemanticVerify(vm, uTx, tx.Creds); err == nil {
@ -1626,11 +1608,9 @@ func TestBaseTxSemanticVerifyPendingWrongAssetID(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
pendingTx.Creds = append(pendingTx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
pendingTx.Creds = append(pendingTx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -1689,11 +1669,9 @@ func TestBaseTxSemanticVerifyPendingWrongAssetID(t *testing.T) {
fixedSig = [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
tx.Creds = append(tx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -1704,11 +1682,11 @@ func TestBaseTxSemanticVerifyPendingWrongAssetID(t *testing.T) {
tx.Initialize(b)
uTx := &UniqueTx{
TxState: &TxState{
Tx: tx,
},
vm: vm,
txID: tx.ID(),
t: &txState{
tx: tx,
},
}
if err := tx.UnsignedTx.SemanticVerify(vm, uTx, tx.Creds); err == nil {
@ -1807,11 +1785,9 @@ func TestBaseTxSemanticVerifyPendingUnauthorizedFx(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
pendingTx.Creds = append(pendingTx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
pendingTx.Creds = append(pendingTx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -1858,9 +1834,7 @@ func TestBaseTxSemanticVerifyPendingUnauthorizedFx(t *testing.T) {
},
}}}
tx.Creds = append(tx.Creds, &Credential{
Cred: &testVerifiable{},
})
tx.Creds = append(tx.Creds, &testVerifiable{})
b, err = vm.codec.Marshal(tx)
if err != nil {
@ -1869,11 +1843,11 @@ func TestBaseTxSemanticVerifyPendingUnauthorizedFx(t *testing.T) {
tx.Initialize(b)
uTx := &UniqueTx{
TxState: &TxState{
Tx: tx,
},
vm: vm,
txID: tx.ID(),
t: &txState{
tx: tx,
},
}
if err := tx.UnsignedTx.SemanticVerify(vm, uTx, tx.Creds); err == nil {
@ -1972,11 +1946,9 @@ func TestBaseTxSemanticVerifyPendingInvalidSignature(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
pendingTx.Creds = append(pendingTx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
pendingTx.Creds = append(pendingTx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -2023,11 +1995,9 @@ func TestBaseTxSemanticVerifyPendingInvalidSignature(t *testing.T) {
},
}}}
tx.Creds = append(tx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
[crypto.SECP256K1RSigLen]byte{},
},
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
[crypto.SECP256K1RSigLen]byte{},
},
})
@ -2038,11 +2008,11 @@ func TestBaseTxSemanticVerifyPendingInvalidSignature(t *testing.T) {
tx.Initialize(b)
uTx := &UniqueTx{
TxState: &TxState{
Tx: tx,
},
vm: vm,
txID: tx.ID(),
t: &txState{
tx: tx,
},
}
if err := tx.UnsignedTx.SemanticVerify(vm, uTx, tx.Creds); err == nil {

View File

@ -11,6 +11,7 @@ import (
"github.com/ava-labs/gecko/snow"
"github.com/ava-labs/gecko/vms/components/codec"
"github.com/ava-labs/gecko/vms/components/verify"
)
const (
@ -32,10 +33,10 @@ var (
// CreateAssetTx is a transaction that creates a new asset.
type CreateAssetTx struct {
BaseTx `serialize:"true"`
Name string `serialize:"true"`
Symbol string `serialize:"true"`
Denomination byte `serialize:"true"`
States []*InitialState `serialize:"true"`
Name string `serialize:"true" json:"name"`
Symbol string `serialize:"true" json:"symbol"`
Denomination byte `serialize:"true" json:"denomination"`
States []*InitialState `serialize:"true" json:"initialStates"`
}
// InitialStates track which virtual machines, and the initial state of these
@ -111,7 +112,7 @@ func (t *CreateAssetTx) SyntacticVerify(ctx *snow.Context, c codec.Codec, numFxs
}
// SemanticVerify that this transaction is well-formed.
func (t *CreateAssetTx) SemanticVerify(vm *VM, uTx *UniqueTx, creds []*Credential) error {
func (t *CreateAssetTx) SemanticVerify(vm *VM, uTx *UniqueTx, creds []verify.Verifiable) error {
return t.BaseTx.SemanticVerify(vm, uTx, creds)
}

View File

@ -1,36 +0,0 @@
// (c) 2019-2020, Ava Labs, Inc. All rights reserved.
// See the file LICENSE for licensing terms.
package avm
import (
"errors"
"github.com/ava-labs/gecko/vms/components/verify"
)
var (
errNilCredential = errors.New("nil credential is not valid")
errNilFxCredential = errors.New("nil feature extension credential is not valid")
)
// Credential ...
type Credential struct {
Cred verify.Verifiable `serialize:"true"`
}
// Credential returns the feature extension credential that this Credential is
// using.
func (cred *Credential) Credential() verify.Verifiable { return cred.Cred }
// Verify implements the verify.Verifiable interface
func (cred *Credential) Verify() error {
switch {
case cred == nil:
return errNilCredential
case cred.Cred == nil:
return errNilFxCredential
default:
return cred.Cred.Verify()
}
}

View File

@ -1,36 +0,0 @@
// (c) 2019-2020, Ava Labs, Inc. All rights reserved.
// See the file LICENSE for licensing terms.
package avm
import (
"testing"
)
func TestCredentialVerifyNil(t *testing.T) {
cred := (*Credential)(nil)
if err := cred.Verify(); err == nil {
t.Fatalf("Should have errored due to nil credential")
}
}
func TestCredentialVerifyNilFx(t *testing.T) {
cred := &Credential{}
if err := cred.Verify(); err == nil {
t.Fatalf("Should have errored due to nil fx credential")
}
}
func TestCredential(t *testing.T) {
cred := &Credential{
Cred: &testVerifiable{},
}
if err := cred.Verify(); err != nil {
t.Fatal(err)
}
if cred.Credential() != cred.Cred {
t.Fatalf("Should have returned the fx credential")
}
}

View File

@ -19,8 +19,8 @@ var (
// InitialState ...
type InitialState struct {
FxID uint32 `serialize:"true"`
Outs []verify.Verifiable `serialize:"true"`
FxID uint32 `serialize:"true" json:"fxID"`
Outs []verify.Verifiable `serialize:"true" json:"outputs"`
}
// Verify implements the verify.Verifiable interface

View File

@ -9,72 +9,19 @@ import (
"sort"
"github.com/ava-labs/gecko/utils"
"github.com/ava-labs/gecko/vms/components/codec"
"github.com/ava-labs/gecko/vms/components/verify"
)
var (
errNilOperableOutput = errors.New("nil operable output is not valid")
errNilOperableFxOutput = errors.New("nil operable feature extension output is not valid")
errNilOperableInput = errors.New("nil operable input is not valid")
errNilOperableFxInput = errors.New("nil operable feature extension input is not valid")
)
// OperableOutput ...
type OperableOutput struct {
Out verify.Verifiable `serialize:"true"`
}
// Output returns the feature extension output that this Output is using.
func (out *OperableOutput) Output() verify.Verifiable { return out.Out }
// Verify implements the verify.Verifiable interface
func (out *OperableOutput) Verify() error {
switch {
case out == nil:
return errNilOperableOutput
case out.Out == nil:
return errNilOperableFxOutput
default:
return out.Out.Verify()
}
}
type innerSortOperableOutputs struct {
outs []*OperableOutput
codec codec.Codec
}
func (outs *innerSortOperableOutputs) Less(i, j int) bool {
iOut := outs.outs[i]
jOut := outs.outs[j]
iBytes, err := outs.codec.Marshal(&iOut.Out)
if err != nil {
return false
}
jBytes, err := outs.codec.Marshal(&jOut.Out)
if err != nil {
return false
}
return bytes.Compare(iBytes, jBytes) == -1
}
func (outs *innerSortOperableOutputs) Len() int { return len(outs.outs) }
func (outs *innerSortOperableOutputs) Swap(i, j int) { o := outs.outs; o[j], o[i] = o[i], o[j] }
func sortOperableOutputs(outs []*OperableOutput, c codec.Codec) {
sort.Sort(&innerSortOperableOutputs{outs: outs, codec: c})
}
func isSortedOperableOutputs(outs []*OperableOutput, c codec.Codec) bool {
return sort.IsSorted(&innerSortOperableOutputs{outs: outs, codec: c})
}
// OperableInput ...
type OperableInput struct {
UTXOID `serialize:"true"`
In verify.Verifiable `serialize:"true"`
In verify.Verifiable `serialize:"true" json:"input"`
}
// Input returns the feature extension input that this Input is using.

View File

@ -7,76 +7,8 @@ import (
"testing"
"github.com/ava-labs/gecko/ids"
"github.com/ava-labs/gecko/vms/components/codec"
)
func TestOperableOutputVerifyNil(t *testing.T) {
oo := (*OperableOutput)(nil)
if err := oo.Verify(); err == nil {
t.Fatalf("Should have errored due to nil operable output")
}
}
func TestOperableOutputVerifyNilFx(t *testing.T) {
oo := &OperableOutput{}
if err := oo.Verify(); err == nil {
t.Fatalf("Should have errored due to nil operable fx output")
}
}
func TestOperableOutputVerify(t *testing.T) {
oo := &OperableOutput{
Out: &testVerifiable{},
}
if err := oo.Verify(); err != nil {
t.Fatal(err)
}
if oo.Output() != oo.Out {
t.Fatalf("Should have returned the fx output")
}
}
func TestOperableOutputSorting(t *testing.T) {
c := codec.NewDefault()
c.RegisterType(&TestTransferable{})
c.RegisterType(&testVerifiable{})
outs := []*OperableOutput{
&OperableOutput{
Out: &TestTransferable{Val: 1},
},
&OperableOutput{
Out: &TestTransferable{Val: 0},
},
&OperableOutput{
Out: &TestTransferable{Val: 0},
},
&OperableOutput{
Out: &testVerifiable{},
},
}
if isSortedOperableOutputs(outs, c) {
t.Fatalf("Shouldn't be sorted")
}
sortOperableOutputs(outs, c)
if !isSortedOperableOutputs(outs, c) {
t.Fatalf("Should be sorted")
}
if result := outs[0].Out.(*TestTransferable).Val; result != 0 {
t.Fatalf("Val expected: %d ; result: %d", 0, result)
}
if result := outs[1].Out.(*TestTransferable).Val; result != 0 {
t.Fatalf("Val expected: %d ; result: %d", 0, result)
}
if result := outs[2].Out.(*TestTransferable).Val; result != 1 {
t.Fatalf("Val expected: %d ; result: %d", 0, result)
}
if _, ok := outs[3].Out.(*testVerifiable); !ok {
t.Fatalf("testVerifiable expected")
}
}
func TestOperableInputVerifyNil(t *testing.T) {
oi := (*OperableInput)(nil)
if err := oi.Verify(); err == nil {

View File

@ -10,6 +10,7 @@ import (
"github.com/ava-labs/gecko/utils"
"github.com/ava-labs/gecko/vms/components/codec"
"github.com/ava-labs/gecko/vms/components/verify"
)
var (
@ -21,8 +22,8 @@ var (
type Operation struct {
Asset `serialize:"true"`
Ins []*OperableInput `serialize:"true"`
Outs []*OperableOutput `serialize:"true"`
Ins []*OperableInput `serialize:"true" json:"inputs"`
Outs []verify.Verifiable `serialize:"true" json:"outputs"`
}
// Verify implements the verify.Verifiable interface
@ -48,7 +49,7 @@ func (op *Operation) Verify(c codec.Codec) error {
return err
}
}
if !isSortedOperableOutputs(op.Outs, c) {
if !isSortedVerifiables(op.Outs, c) {
return errOutputsNotSorted
}

View File

@ -8,6 +8,7 @@ import (
"github.com/ava-labs/gecko/ids"
"github.com/ava-labs/gecko/vms/components/codec"
"github.com/ava-labs/gecko/vms/components/verify"
)
func TestOperationVerifyNil(t *testing.T) {
@ -45,21 +46,6 @@ func TestOperationVerifyInvalidInput(t *testing.T) {
}
}
func TestOperationVerifyInvalidOutput(t *testing.T) {
c := codec.NewDefault()
op := &Operation{
Asset: Asset{
ID: ids.Empty,
},
Outs: []*OperableOutput{
&OperableOutput{},
},
}
if err := op.Verify(c); err == nil {
t.Fatalf("Should have errored due to an invalid output")
}
}
func TestOperationVerifyInputsNotSorted(t *testing.T) {
c := codec.NewDefault()
op := &Operation{
@ -96,13 +82,9 @@ func TestOperationVerifyOutputsNotSorted(t *testing.T) {
Asset: Asset{
ID: ids.Empty,
},
Outs: []*OperableOutput{
&OperableOutput{
Out: &TestTransferable{Val: 1},
},
&OperableOutput{
Out: &TestTransferable{Val: 0},
},
Outs: []verify.Verifiable{
&TestTransferable{Val: 1},
&TestTransferable{Val: 0},
},
}
if err := op.Verify(c); err == nil {
@ -116,10 +98,8 @@ func TestOperationVerify(t *testing.T) {
Asset: Asset{
ID: ids.Empty,
},
Outs: []*OperableOutput{
&OperableOutput{
Out: &testVerifiable{},
},
Outs: []verify.Verifiable{
&testVerifiable{},
},
}
if err := op.Verify(c); err != nil {

View File

@ -9,6 +9,7 @@ import (
"github.com/ava-labs/gecko/ids"
"github.com/ava-labs/gecko/snow"
"github.com/ava-labs/gecko/vms/components/codec"
"github.com/ava-labs/gecko/vms/components/verify"
)
var (
@ -20,7 +21,7 @@ var (
// OperationTx is a transaction with no credentials.
type OperationTx struct {
BaseTx `serialize:"true"`
Ops []*Operation `serialize:"true"`
Ops []*Operation `serialize:"true" json:"operations"`
}
// Operations track which ops this transaction is performing. The returned array
@ -63,7 +64,7 @@ func (t *OperationTx) UTXOs() []*UTXO {
Asset: Asset{
ID: asset,
},
Out: out.Out,
Out: out,
})
}
}
@ -106,7 +107,7 @@ func (t *OperationTx) SyntacticVerify(ctx *snow.Context, c codec.Codec, numFxs i
}
// SemanticVerify that this transaction is well-formed.
func (t *OperationTx) SemanticVerify(vm *VM, uTx *UniqueTx, creds []*Credential) error {
func (t *OperationTx) SemanticVerify(vm *VM, uTx *UniqueTx, creds []verify.Verifiable) error {
if err := t.BaseTx.SemanticVerify(vm, uTx, creds); err != nil {
return err
}
@ -123,7 +124,7 @@ func (t *OperationTx) SemanticVerify(vm *VM, uTx *UniqueTx, creds []*Credential)
ins = append(ins, in.In)
cred := creds[i+offset]
credIntfs = append(credIntfs, cred.Cred)
credIntfs = append(credIntfs, cred)
utxoID := in.InputID()
utxo, err := vm.state.UTXO(utxoID)
@ -165,7 +166,7 @@ func (t *OperationTx) SemanticVerify(vm *VM, uTx *UniqueTx, creds []*Credential)
}
offset += len(op.Ins)
for _, out := range op.Outs {
outs = append(outs, out.Out)
outs = append(outs, out)
}
var fxObj interface{}

View File

@ -66,11 +66,9 @@ func TestPrefixedSetsAndGets(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
tx.Creds = append(tx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})

View File

@ -163,7 +163,7 @@ func (service *Service) GetAssetDescription(_ *http.Request, args *GetAssetDescr
if status := tx.Status(); !status.Fetched() {
return errUnknownAssetID
}
createAssetTx, ok := tx.t.tx.UnsignedTx.(*CreateAssetTx)
createAssetTx, ok := tx.UnsignedTx.(*CreateAssetTx)
if !ok {
return errTxNotCreateAsset
}
@ -700,7 +700,7 @@ func (service *Service) Send(r *http.Request, args *SendArgs, reply *SendReply)
cred.Sigs = append(cred.Sigs, fixedSig)
}
tx.Creds = append(tx.Creds, &Credential{Cred: cred})
tx.Creds = append(tx.Creds, cred)
}
b, err := service.vm.codec.Marshal(tx)
@ -849,19 +849,15 @@ func (service *Service) CreateMintTx(r *http.Request, args *CreateMintTxArgs, re
},
},
},
Outs: []*OperableOutput{
&OperableOutput{
&secp256k1fx.MintOutput{
OutputOwners: out.OutputOwners,
},
Outs: []verify.Verifiable{
&secp256k1fx.MintOutput{
OutputOwners: out.OutputOwners,
},
&OperableOutput{
&secp256k1fx.TransferOutput{
Amt: uint64(args.Amount),
OutputOwners: secp256k1fx.OutputOwners{
Threshold: 1,
Addrs: []ids.ShortID{to},
},
&secp256k1fx.TransferOutput{
Amt: uint64(args.Amount),
OutputOwners: secp256k1fx.OutputOwners{
Threshold: 1,
Addrs: []ids.ShortID{to},
},
},
},
@ -963,11 +959,11 @@ func (service *Service) SignMintTx(r *http.Request, args *SignMintTxArgs, reply
}
if len(tx.Creds) == 0 {
tx.Creds = append(tx.Creds, &Credential{Cred: &secp256k1fx.Credential{}})
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{})
}
cred := tx.Creds[0]
switch cred := cred.Cred.(type) {
switch cred := cred.(type) {
case *secp256k1fx.Credential:
if len(cred.Sigs) != size {
cred.Sigs = make([][crypto.SECP256K1RSigLen]byte, size)

View File

@ -287,11 +287,9 @@ func TestStateTXs(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
tx.Creds = append(tx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
tx.Creds = append(tx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})

View File

@ -25,7 +25,7 @@ var (
type TransferableOutput struct {
Asset `serialize:"true"`
Out FxTransferable `serialize:"true"`
Out FxTransferable `serialize:"true" json:"output"`
}
// Output returns the feature extension output that this Output is using.
@ -90,7 +90,7 @@ type TransferableInput struct {
UTXOID `serialize:"true"`
Asset `serialize:"true"`
In FxTransferable `serialize:"true"`
In FxTransferable `serialize:"true" json:"input"`
}
// Input returns the feature extension input that this Input is using.

View File

@ -7,9 +7,9 @@ import (
"errors"
"github.com/ava-labs/gecko/ids"
"github.com/ava-labs/gecko/snow"
"github.com/ava-labs/gecko/vms/components/codec"
"github.com/ava-labs/gecko/vms/components/verify"
)
var (
@ -31,7 +31,7 @@ type UnsignedTx interface {
InputUTXOs() []*UTXOID
UTXOs() []*UTXO
SyntacticVerify(ctx *snow.Context, c codec.Codec, numFxs int) error
SemanticVerify(vm *VM, uTx *UniqueTx, creds []*Credential) error
SemanticVerify(vm *VM, uTx *UniqueTx, creds []verify.Verifiable) error
}
// Tx is the core operation that can be performed. The tx uses the UTXO model.
@ -40,14 +40,14 @@ type UnsignedTx interface {
// attempting to consume and the inputs consume sufficient state to produce the
// outputs.
type Tx struct {
UnsignedTx `serialize:"true"`
UnsignedTx `serialize:"true" json:"unsignedTx"`
Creds []*Credential `serialize:"true"` // The credentials of this transaction
Creds []verify.Verifiable `serialize:"true" json:"credentials"` // The credentials of this transaction
}
// Credentials describes the authorization that allows the Inputs to consume the
// specified UTXOs. The returned array should not be modified.
func (t *Tx) Credentials() []*Credential { return t.Creds }
func (t *Tx) Credentials() []verify.Verifiable { return t.Creds }
// SyntacticVerify verifies that this transaction is well-formed.
func (t *Tx) SyntacticVerify(ctx *snow.Context, c codec.Codec, numFxs int) error {

View File

@ -9,6 +9,7 @@ import (
"github.com/ava-labs/gecko/ids"
"github.com/ava-labs/gecko/utils/units"
"github.com/ava-labs/gecko/vms/components/codec"
"github.com/ava-labs/gecko/vms/components/verify"
"github.com/ava-labs/gecko/vms/secp256k1fx"
)
@ -68,10 +69,8 @@ func TestTxInvalidCredential(t *testing.T) {
},
},
}},
Creds: []*Credential{
&Credential{
Cred: &testVerifiable{err: errUnneededAddress},
},
Creds: []verify.Verifiable{
&testVerifiable{err: errUnneededAddress},
},
}
@ -139,13 +138,9 @@ func TestTxInvalidUnsignedTx(t *testing.T) {
},
},
}},
Creds: []*Credential{
&Credential{
Cred: &testVerifiable{},
},
&Credential{
Cred: &testVerifiable{},
},
Creds: []verify.Verifiable{
&testVerifiable{},
&testVerifiable{},
},
}
@ -214,10 +209,8 @@ func TestTxInvalidNumberOfCredentials(t *testing.T) {
},
},
},
Creds: []*Credential{
&Credential{
Cred: &testVerifiable{},
},
Creds: []verify.Verifiable{
&testVerifiable{},
},
}

View File

@ -21,16 +21,18 @@ var (
// UniqueTx provides a de-duplication service for txs. This only provides a
// performance boost
type UniqueTx struct {
*TxState
vm *VM
txID ids.ID
t *txState
}
type txState struct {
// TxState ...
type TxState struct {
*Tx
unique, verifiedTx, verifiedState bool
validity error
tx *Tx
inputs ids.Set
inputUTXOs []*UTXOID
utxos []*UTXO
@ -42,51 +44,51 @@ type txState struct {
}
func (tx *UniqueTx) refresh() {
if tx.t == nil {
tx.t = &txState{}
if tx.TxState == nil {
tx.TxState = &TxState{}
}
if tx.t.unique {
if tx.unique {
return
}
unique := tx.vm.state.UniqueTx(tx)
prevTx := tx.t.tx
prevTx := tx.Tx
if unique == tx {
// If no one was in the cache, make sure that there wasn't an
// intermediate object whose state I must reflect
if status, err := tx.vm.state.Status(tx.ID()); err == nil {
tx.t.status = status
tx.t.unique = true
tx.status = status
tx.unique = true
}
} else {
// If someone is in the cache, they must be up to date
// This ensures that every unique tx object points to the same tx state
tx.t = unique.t
tx.TxState = unique.TxState
}
if tx.t.tx != nil {
if tx.Tx != nil {
return
}
if prevTx == nil {
if innerTx, err := tx.vm.state.Tx(tx.ID()); err == nil {
tx.t.tx = innerTx
tx.Tx = innerTx
}
} else {
tx.t.tx = prevTx
tx.Tx = prevTx
}
}
// Evict is called when this UniqueTx will no longer be returned from a cache
// lookup
func (tx *UniqueTx) Evict() { tx.t.unique = false } // Lock is already held here
func (tx *UniqueTx) Evict() { tx.unique = false } // Lock is already held here
func (tx *UniqueTx) setStatus(status choices.Status) error {
tx.refresh()
if tx.t.status == status {
if tx.status == status {
return nil
}
tx.t.status = status
tx.status = status
return tx.vm.state.SetStatus(tx.ID(), status)
}
@ -125,10 +127,10 @@ func (tx *UniqueTx) Accept() {
tx.vm.pubsub.Publish("accepted", txID)
tx.t.deps = nil // Needed to prevent a memory leak
tx.deps = nil // Needed to prevent a memory leak
if tx.t.onDecide != nil {
tx.t.onDecide(choices.Accepted)
if tx.onDecide != nil {
tx.onDecide(choices.Accepted)
}
}
@ -148,24 +150,24 @@ func (tx *UniqueTx) Reject() {
tx.vm.pubsub.Publish("rejected", txID)
tx.t.deps = nil // Needed to prevent a memory leak
tx.deps = nil // Needed to prevent a memory leak
if tx.t.onDecide != nil {
tx.t.onDecide(choices.Rejected)
if tx.onDecide != nil {
tx.onDecide(choices.Rejected)
}
}
// Status returns the current status of this transaction
func (tx *UniqueTx) Status() choices.Status {
tx.refresh()
return tx.t.status
return tx.status
}
// Dependencies returns the set of transactions this transaction builds on
func (tx *UniqueTx) Dependencies() []snowstorm.Tx {
tx.refresh()
if tx.t.tx == nil || len(tx.t.deps) != 0 {
return tx.t.deps
if tx.Tx == nil || len(tx.deps) != 0 {
return tx.deps
}
txIDs := ids.Set{}
@ -173,61 +175,61 @@ func (tx *UniqueTx) Dependencies() []snowstorm.Tx {
txID, _ := in.InputSource()
if !txIDs.Contains(txID) {
txIDs.Add(txID)
tx.t.deps = append(tx.t.deps, &UniqueTx{
tx.deps = append(tx.deps, &UniqueTx{
vm: tx.vm,
txID: txID,
})
}
}
for _, assetID := range tx.t.tx.AssetIDs().List() {
for _, assetID := range tx.Tx.AssetIDs().List() {
if !txIDs.Contains(assetID) {
txIDs.Add(assetID)
tx.t.deps = append(tx.t.deps, &UniqueTx{
tx.deps = append(tx.deps, &UniqueTx{
vm: tx.vm,
txID: assetID,
})
}
}
return tx.t.deps
return tx.deps
}
// InputIDs returns the set of utxoIDs this transaction consumes
func (tx *UniqueTx) InputIDs() ids.Set {
tx.refresh()
if tx.t.tx == nil || tx.t.inputs.Len() != 0 {
return tx.t.inputs
if tx.Tx == nil || tx.inputs.Len() != 0 {
return tx.inputs
}
for _, utxo := range tx.InputUTXOs() {
tx.t.inputs.Add(utxo.InputID())
tx.inputs.Add(utxo.InputID())
}
return tx.t.inputs
return tx.inputs
}
// InputUTXOs returns the utxos that will be consumed on tx acceptance
func (tx *UniqueTx) InputUTXOs() []*UTXOID {
tx.refresh()
if tx.t.tx == nil || len(tx.t.inputUTXOs) != 0 {
return tx.t.inputUTXOs
if tx.Tx == nil || len(tx.inputUTXOs) != 0 {
return tx.inputUTXOs
}
tx.t.inputUTXOs = tx.t.tx.InputUTXOs()
return tx.t.inputUTXOs
tx.inputUTXOs = tx.Tx.InputUTXOs()
return tx.inputUTXOs
}
// UTXOs returns the utxos that will be added to the UTXO set on tx acceptance
func (tx *UniqueTx) UTXOs() []*UTXO {
tx.refresh()
if tx.t.tx == nil || len(tx.t.utxos) != 0 {
return tx.t.utxos
if tx.Tx == nil || len(tx.utxos) != 0 {
return tx.utxos
}
tx.t.utxos = tx.t.tx.UTXOs()
return tx.t.utxos
tx.utxos = tx.Tx.UTXOs()
return tx.utxos
}
// Bytes returns the binary representation of this transaction
func (tx *UniqueTx) Bytes() []byte {
tx.refresh()
return tx.t.tx.Bytes()
return tx.Tx.Bytes()
}
// Verify the validity of this transaction
@ -248,39 +250,39 @@ func (tx *UniqueTx) Verify() error {
func (tx *UniqueTx) SyntacticVerify() error {
tx.refresh()
if tx.t.tx == nil {
if tx.Tx == nil {
return errUnknownTx
}
if tx.t.verifiedTx {
return tx.t.validity
if tx.verifiedTx {
return tx.validity
}
tx.t.verifiedTx = true
tx.t.validity = tx.t.tx.SyntacticVerify(tx.vm.ctx, tx.vm.codec, len(tx.vm.fxs))
return tx.t.validity
tx.verifiedTx = true
tx.validity = tx.Tx.SyntacticVerify(tx.vm.ctx, tx.vm.codec, len(tx.vm.fxs))
return tx.validity
}
// SemanticVerify the validity of this transaction
func (tx *UniqueTx) SemanticVerify() error {
tx.SyntacticVerify()
if tx.t.validity != nil || tx.t.verifiedState {
return tx.t.validity
if tx.validity != nil || tx.verifiedState {
return tx.validity
}
tx.t.verifiedState = true
tx.t.validity = tx.t.tx.SemanticVerify(tx.vm, tx)
tx.verifiedState = true
tx.validity = tx.Tx.SemanticVerify(tx.vm, tx)
if tx.t.validity == nil {
if tx.validity == nil {
tx.vm.pubsub.Publish("verified", tx.ID())
}
return tx.t.validity
return tx.validity
}
// UnsignedBytes returns the unsigned bytes of the transaction
func (tx *UniqueTx) UnsignedBytes() []byte {
b, err := tx.vm.codec.Marshal(&tx.t.tx.UnsignedTx)
b, err := tx.vm.codec.Marshal(&tx.Tx.UnsignedTx)
tx.vm.ctx.Log.AssertNoError(err)
return b
}

View File

@ -19,7 +19,7 @@ type UTXO struct {
UTXOID `serialize:"true"`
Asset `serialize:"true"`
Out verify.Verifiable `serialize:"true"`
Out verify.Verifiable `serialize:"true" json:"output"`
}
// Verify implements the verify.Verifiable interface

View File

@ -17,8 +17,8 @@ var (
// UTXOID ...
type UTXOID struct {
// Serialized:
TxID ids.ID `serialize:"true"`
OutputIndex uint32 `serialize:"true"`
TxID ids.ID `serialize:"true" json:"txID"`
OutputIndex uint32 `serialize:"true" json:"outputIndex"`
// Cached:
id ids.ID

View File

@ -264,7 +264,7 @@ func (vm *VM) IssueTx(b []byte, onDecide func(choices.Status)) (ids.ID, error) {
return ids.ID{}, err
}
vm.issueTx(tx)
tx.t.onDecide = onDecide
tx.onDecide = onDecide
return tx.ID(), nil
}
@ -402,18 +402,18 @@ func (vm *VM) parseTx(b []byte) (*UniqueTx, error) {
rawTx.Initialize(b)
tx := &UniqueTx{
TxState: &TxState{
Tx: rawTx,
},
vm: vm,
txID: rawTx.ID(),
t: &txState{
tx: rawTx,
},
}
if err := tx.SyntacticVerify(); err != nil {
return nil, err
}
if tx.Status() == choices.Unknown {
if err := vm.state.SetTx(tx.ID(), tx.t.tx); err != nil {
if err := vm.state.SetTx(tx.ID(), tx.Tx); err != nil {
return nil, err
}
tx.setStatus(choices.Processing)
@ -449,7 +449,7 @@ func (vm *VM) verifyFxUsage(fxID int, assetID ids.ID) bool {
if status := tx.Status(); !status.Fetched() {
return false
}
createAssetTx, ok := tx.t.tx.UnsignedTx.(*CreateAssetTx)
createAssetTx, ok := tx.UnsignedTx.(*CreateAssetTx)
if !ok {
return false
}

View File

@ -5,6 +5,7 @@ package avm
import (
"bytes"
"encoding/json"
"testing"
"github.com/ava-labs/gecko/database/memdb"
@ -16,6 +17,7 @@ import (
"github.com/ava-labs/gecko/utils/hashing"
"github.com/ava-labs/gecko/utils/units"
"github.com/ava-labs/gecko/vms/components/codec"
"github.com/ava-labs/gecko/vms/components/verify"
"github.com/ava-labs/gecko/vms/secp256k1fx"
)
@ -300,13 +302,11 @@ func TestTxSerialization(t *testing.T) {
Asset: Asset{
ID: asset,
},
Outs: []*OperableOutput{
&OperableOutput{
Out: &secp256k1fx.MintOutput{
OutputOwners: secp256k1fx.OutputOwners{
Threshold: 1,
Addrs: []ids.ShortID{keys[0].PublicKey().Address()},
},
Outs: []verify.Verifiable{
&secp256k1fx.MintOutput{
OutputOwners: secp256k1fx.OutputOwners{
Threshold: 1,
Addrs: []ids.ShortID{keys[0].PublicKey().Address()},
},
},
},
@ -478,11 +478,9 @@ func TestIssueTx(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
newTx.Creds = append(newTx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
newTx.Creds = append(newTx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -622,11 +620,9 @@ func TestIssueDependentTx(t *testing.T) {
fixedSig := [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
firstTx.Creds = append(firstTx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
firstTx.Creds = append(firstTx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -675,11 +671,9 @@ func TestIssueDependentTx(t *testing.T) {
fixedSig = [crypto.SECP256K1RSigLen]byte{}
copy(fixedSig[:], sig)
secondTx.Creds = append(secondTx.Creds, &Credential{
Cred: &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
secondTx.Creds = append(secondTx.Creds, &secp256k1fx.Credential{
Sigs: [][crypto.SECP256K1RSigLen]byte{
fixedSig,
},
})
@ -701,7 +695,14 @@ func TestIssueDependentTx(t *testing.T) {
t.Fatalf("Wrong message")
}
if txs := vm.PendingTxs(); len(txs) != 2 {
txs := vm.PendingTxs()
if len(txs) != 2 {
t.Fatalf("Should have returned %d tx(s)", 2)
}
jsonBytes, err := json.Marshal(txs[0])
if err != nil {
t.Fatal(err)
}
t.Fatalf("%s", jsonBytes)
}

View File

@ -15,7 +15,7 @@ var (
// Credential ...
type Credential struct {
Sigs [][crypto.SECP256K1RSigLen]byte `serialize:"true"`
Sigs [][crypto.SECP256K1RSigLen]byte `serialize:"true" json:"signatures"`
}
// Verify ...

View File

@ -16,7 +16,7 @@ var (
// Input ...
type Input struct {
SigIndices []uint32 `serialize:"true"`
SigIndices []uint32 `serialize:"true" json:"signatureIndices"`
}
// Verify this input is syntactically valid

View File

@ -18,8 +18,8 @@ var (
// OutputOwners ...
type OutputOwners struct {
Threshold uint32 `serialize:"true"`
Addrs []ids.ShortID `serialize:"true"`
Threshold uint32 `serialize:"true" json:"threshold"`
Addrs []ids.ShortID `serialize:"true" json:"addresses"`
}
// Addresses returns the addresses that manage this output

View File

@ -13,7 +13,7 @@ var (
// TransferInput ...
type TransferInput struct {
Amt uint64 `serialize:"true"`
Amt uint64 `serialize:"true" json:"amount"`
Input `serialize:"true"`
}

View File

@ -13,8 +13,8 @@ var (
// TransferOutput ...
type TransferOutput struct {
Amt uint64 `serialize:"true"`
Locktime uint64 `serialize:"true"`
Amt uint64 `serialize:"true" json:"amount"`
Locktime uint64 `serialize:"true" json:"locktime"`
OutputOwners `serialize:"true"`
}