2022-11-17 07:37:29 -08:00
|
|
|
package vaa
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
2022-11-23 04:06:29 -08:00
|
|
|
"fmt"
|
2022-11-17 07:37:29 -08:00
|
|
|
|
2022-11-23 04:06:29 -08:00
|
|
|
"github.com/pkg/errors"
|
2023-04-05 06:33:28 -07:00
|
|
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/transactions"
|
2022-11-23 04:06:29 -08:00
|
|
|
"github.com/wormhole-foundation/wormhole-explorer/api/internal/pagination"
|
2023-01-23 06:45:09 -08:00
|
|
|
"github.com/wormhole-foundation/wormhole/sdk/vaa"
|
2022-11-17 07:37:29 -08:00
|
|
|
"go.mongodb.org/mongo-driver/bson"
|
|
|
|
"go.mongodb.org/mongo-driver/mongo"
|
|
|
|
"go.mongodb.org/mongo-driver/mongo/options"
|
|
|
|
"go.uber.org/zap"
|
|
|
|
)
|
|
|
|
|
2022-11-23 04:06:29 -08:00
|
|
|
// Repository definition
|
2022-11-17 07:37:29 -08:00
|
|
|
type Repository struct {
|
|
|
|
db *mongo.Database
|
|
|
|
logger *zap.Logger
|
|
|
|
collections struct {
|
2023-04-05 06:33:28 -07:00
|
|
|
vaas *mongo.Collection
|
|
|
|
vaasPythnet *mongo.Collection
|
|
|
|
invalidVaas *mongo.Collection
|
|
|
|
vaaCount *mongo.Collection
|
|
|
|
globalTransactions *mongo.Collection
|
2022-11-17 07:37:29 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-23 04:06:29 -08:00
|
|
|
// NewRepository create a new Repository.
|
2022-11-17 07:37:29 -08:00
|
|
|
func NewRepository(db *mongo.Database, logger *zap.Logger) *Repository {
|
|
|
|
return &Repository{db: db,
|
|
|
|
logger: logger.With(zap.String("module", "VaaRepository")),
|
|
|
|
collections: struct {
|
2023-04-05 06:33:28 -07:00
|
|
|
vaas *mongo.Collection
|
|
|
|
vaasPythnet *mongo.Collection
|
|
|
|
invalidVaas *mongo.Collection
|
|
|
|
vaaCount *mongo.Collection
|
|
|
|
globalTransactions *mongo.Collection
|
|
|
|
}{
|
|
|
|
vaas: db.Collection("vaas"),
|
|
|
|
vaasPythnet: db.Collection("vaasPythnet"),
|
|
|
|
invalidVaas: db.Collection("invalid_vaas"),
|
|
|
|
vaaCount: db.Collection("vaaCounts"),
|
|
|
|
globalTransactions: db.Collection("globalTransactions"),
|
|
|
|
},
|
|
|
|
}
|
2022-11-17 07:37:29 -08:00
|
|
|
}
|
|
|
|
|
2023-04-05 06:33:28 -07:00
|
|
|
// FindVaasBySolanaTxHash searches the database for VAAs that match a given Solana transaction hash.
|
|
|
|
func (r *Repository) FindVaasBySolanaTxHash(
|
|
|
|
ctx context.Context,
|
|
|
|
txHash string,
|
|
|
|
includeParsedPayload bool,
|
|
|
|
) ([]*VaaDoc, error) {
|
2023-02-03 11:46:02 -08:00
|
|
|
|
2023-04-05 06:33:28 -07:00
|
|
|
// Find globalTransactions that match the given Solana TxHash
|
|
|
|
cur, err := r.collections.globalTransactions.Find(
|
|
|
|
ctx,
|
|
|
|
bson.D{bson.E{"originTx.nativeTxHash", txHash}},
|
|
|
|
nil,
|
|
|
|
)
|
2022-11-17 07:37:29 -08:00
|
|
|
if err != nil {
|
2022-11-23 04:06:29 -08:00
|
|
|
requestID := fmt.Sprintf("%v", ctx.Value("requestid"))
|
2023-04-05 06:33:28 -07:00
|
|
|
r.logger.Error("failed to find globalTransactions by Solana TxHash",
|
|
|
|
zap.Error(err),
|
|
|
|
zap.String("requestID", requestID),
|
|
|
|
)
|
2022-11-23 04:06:29 -08:00
|
|
|
return nil, errors.WithStack(err)
|
2022-11-17 07:37:29 -08:00
|
|
|
}
|
2023-02-03 11:46:02 -08:00
|
|
|
|
2023-04-05 06:33:28 -07:00
|
|
|
// Read results from cursor
|
|
|
|
var globalTxs []transactions.GlobalTransactionDoc
|
|
|
|
err = cur.All(ctx, &globalTxs)
|
2022-11-17 07:37:29 -08:00
|
|
|
if err != nil {
|
2022-11-23 04:06:29 -08:00
|
|
|
requestID := fmt.Sprintf("%v", ctx.Value("requestid"))
|
2023-04-05 06:33:28 -07:00
|
|
|
r.logger.Error("failed to decode cursor to []GlobalTransactionDoc",
|
|
|
|
zap.Error(err),
|
|
|
|
zap.String("requestID", requestID),
|
|
|
|
)
|
2022-11-23 04:06:29 -08:00
|
|
|
return nil, errors.WithStack(err)
|
2022-11-17 07:37:29 -08:00
|
|
|
}
|
2023-02-03 11:46:02 -08:00
|
|
|
|
2023-03-22 10:18:11 -07:00
|
|
|
// If no results were found, return an empty slice instead of nil.
|
2023-04-05 06:33:28 -07:00
|
|
|
if len(globalTxs) == 0 {
|
|
|
|
result := make([]*VaaDoc, 0)
|
|
|
|
return result, nil
|
2023-03-22 10:18:11 -07:00
|
|
|
}
|
2023-04-05 06:33:28 -07:00
|
|
|
if len(globalTxs) > 1 {
|
|
|
|
return nil, fmt.Errorf("expected at most one transaction, but found %d", len(globalTxs))
|
2022-11-17 07:37:29 -08:00
|
|
|
}
|
2022-11-23 04:06:29 -08:00
|
|
|
|
2023-04-05 06:33:28 -07:00
|
|
|
// Find VAAs that match the given VAA ID
|
|
|
|
q := Query().
|
|
|
|
SetID(globalTxs[0].ID).
|
|
|
|
IncludeParsedPayload(includeParsedPayload)
|
|
|
|
return r.FindVaas(ctx, q)
|
2022-11-17 07:37:29 -08:00
|
|
|
}
|
|
|
|
|
2023-04-05 06:33:28 -07:00
|
|
|
// FindVaas searches the database for VAAs matching the given filters.
|
|
|
|
func (r *Repository) FindVaas(
|
2023-01-31 10:24:34 -08:00
|
|
|
ctx context.Context,
|
|
|
|
q *VaaQuery,
|
2023-02-03 11:46:02 -08:00
|
|
|
) ([]*VaaDoc, error) {
|
2023-01-31 10:24:34 -08:00
|
|
|
|
|
|
|
// build a query pipeline based on input parameters
|
|
|
|
var pipeline mongo.Pipeline
|
|
|
|
{
|
2023-02-09 09:28:39 -08:00
|
|
|
// specify sorting criteria
|
|
|
|
pipeline = append(pipeline, bson.D{
|
2023-04-28 06:18:07 -07:00
|
|
|
{"$sort", bson.D{q.getSortPredicate()}},
|
2023-02-09 09:28:39 -08:00
|
|
|
})
|
|
|
|
|
2023-04-05 06:33:28 -07:00
|
|
|
// filter by _id
|
|
|
|
if q.id != "" {
|
|
|
|
pipeline = append(pipeline, bson.D{
|
|
|
|
{"$match", bson.D{bson.E{"_id", q.id}}},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-01-31 10:24:34 -08:00
|
|
|
// filter by emitterChain
|
|
|
|
if q.chainId != 0 {
|
|
|
|
pipeline = append(pipeline, bson.D{
|
|
|
|
{"$match", bson.D{bson.E{"emitterChain", q.chainId}}},
|
|
|
|
})
|
|
|
|
}
|
2023-01-27 08:47:17 -08:00
|
|
|
|
2023-01-31 10:24:34 -08:00
|
|
|
// filter by emitterAddr
|
|
|
|
if q.emitter != "" {
|
|
|
|
pipeline = append(pipeline, bson.D{
|
|
|
|
{"$match", bson.D{bson.E{"emitterAddr", q.emitter}}},
|
|
|
|
})
|
|
|
|
}
|
2023-01-27 08:47:17 -08:00
|
|
|
|
2023-01-31 10:24:34 -08:00
|
|
|
// filter by sequence
|
|
|
|
if q.sequence != "" {
|
|
|
|
pipeline = append(pipeline, bson.D{
|
|
|
|
{"$match", bson.D{bson.E{"sequence", q.sequence}}},
|
|
|
|
})
|
|
|
|
}
|
2023-01-27 08:47:17 -08:00
|
|
|
|
2023-03-27 12:14:38 -07:00
|
|
|
// filter by txHash
|
|
|
|
if q.txHash != "" {
|
|
|
|
pipeline = append(pipeline, bson.D{
|
|
|
|
{"$match", bson.D{bson.E{"txHash", q.txHash}}},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-01-31 10:24:34 -08:00
|
|
|
// left outer join on the `parsedVaa` collection
|
|
|
|
pipeline = append(pipeline, bson.D{
|
|
|
|
{"$lookup", bson.D{
|
|
|
|
{"from", "parsedVaa"},
|
|
|
|
{"localField", "_id"},
|
|
|
|
{"foreignField", "_id"},
|
|
|
|
{"as", "payload"},
|
2023-01-27 08:47:17 -08:00
|
|
|
}},
|
2023-01-31 10:24:34 -08:00
|
|
|
})
|
|
|
|
|
|
|
|
// add parsed payload fields
|
|
|
|
pipeline = append(pipeline, bson.D{
|
|
|
|
{"$addFields", bson.D{
|
2023-02-01 04:59:51 -08:00
|
|
|
{"payload", bson.M{"$arrayElemAt": []interface{}{"$payload.result", 0}}},
|
|
|
|
{"appId", bson.M{"$arrayElemAt": []interface{}{"$payload.appId", 0}}},
|
2023-01-31 10:24:34 -08:00
|
|
|
}},
|
|
|
|
})
|
2023-01-27 08:47:17 -08:00
|
|
|
|
2023-04-05 06:33:28 -07:00
|
|
|
// left outer join on the `globalTransaction` collection
|
|
|
|
pipeline = append(pipeline, bson.D{
|
|
|
|
{"$lookup", bson.D{
|
|
|
|
{"from", "globalTransactions"},
|
|
|
|
{"localField", "_id"},
|
|
|
|
{"foreignField", "_id"},
|
|
|
|
{"as", "globalTransaction"},
|
|
|
|
}},
|
|
|
|
})
|
|
|
|
|
|
|
|
// add globalTransaction fields
|
|
|
|
pipeline = append(pipeline, bson.D{
|
|
|
|
{"$addFields", bson.D{
|
|
|
|
{"nativeTxHash", bson.M{"$arrayElemAt": []interface{}{"$globalTransaction.originTx.nativeTxHash", 0}}},
|
|
|
|
}},
|
|
|
|
})
|
|
|
|
|
2023-02-01 04:59:51 -08:00
|
|
|
// filter by appId
|
|
|
|
if q.appId != "" {
|
|
|
|
pipeline = append(pipeline, bson.D{
|
|
|
|
{"$match", bson.D{bson.E{"appId", q.appId}}},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-02-09 09:28:39 -08:00
|
|
|
// skip initial results
|
|
|
|
if q.Pagination.Skip != 0 {
|
|
|
|
pipeline = append(pipeline, bson.D{
|
|
|
|
{"$skip", q.Pagination.Skip},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-01-31 10:24:34 -08:00
|
|
|
// limit size of results
|
|
|
|
pipeline = append(pipeline, bson.D{
|
2023-02-09 09:28:39 -08:00
|
|
|
{"$limit", q.Pagination.Limit},
|
2023-01-31 10:24:34 -08:00
|
|
|
})
|
2023-01-27 08:47:17 -08:00
|
|
|
}
|
|
|
|
|
2023-01-31 10:24:34 -08:00
|
|
|
// execute the aggregation pipeline
|
|
|
|
var err error
|
|
|
|
var cur *mongo.Cursor
|
2023-01-27 08:47:17 -08:00
|
|
|
if q.chainId == vaa.ChainIDPythNet {
|
2023-01-31 10:24:34 -08:00
|
|
|
cur, err = r.collections.vaasPythnet.Aggregate(ctx, pipeline)
|
2023-01-27 08:47:17 -08:00
|
|
|
} else {
|
2023-01-31 10:24:34 -08:00
|
|
|
cur, err = r.collections.vaas.Aggregate(ctx, pipeline)
|
2023-01-27 08:47:17 -08:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
requestID := fmt.Sprintf("%v", ctx.Value("requestid"))
|
|
|
|
r.logger.Error("failed execute Aggregate command to get vaa with payload",
|
|
|
|
zap.Error(err), zap.Any("q", q), zap.String("requestID", requestID))
|
|
|
|
return nil, errors.WithStack(err)
|
|
|
|
}
|
|
|
|
|
2023-01-31 10:24:34 -08:00
|
|
|
// read results from cursor
|
2023-02-03 11:46:02 -08:00
|
|
|
var vaasWithPayload []*VaaDoc
|
2023-01-27 08:47:17 -08:00
|
|
|
err = cur.All(ctx, &vaasWithPayload)
|
|
|
|
if err != nil {
|
|
|
|
requestID := fmt.Sprintf("%v", ctx.Value("requestid"))
|
2023-02-03 11:46:02 -08:00
|
|
|
r.logger.Error("failed decoding cursor to []*VaaDoc",
|
2023-01-31 10:24:34 -08:00
|
|
|
zap.Error(err),
|
|
|
|
zap.Any("q", q),
|
|
|
|
zap.String("requestID", requestID),
|
|
|
|
)
|
2023-01-27 08:47:17 -08:00
|
|
|
return nil, errors.WithStack(err)
|
|
|
|
}
|
|
|
|
|
2023-03-22 10:18:11 -07:00
|
|
|
// If no results were found, return an empty slice instead of nil.
|
|
|
|
if vaasWithPayload == nil {
|
|
|
|
vaasWithPayload = make([]*VaaDoc, 0)
|
|
|
|
}
|
|
|
|
|
2023-04-05 06:33:28 -07:00
|
|
|
// If the payload field was not requested, remove it from the results.
|
|
|
|
if !q.includeParsedPayload && q.appId == "" {
|
|
|
|
for i := range vaasWithPayload {
|
|
|
|
vaasWithPayload[i].Payload = nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-31 10:24:34 -08:00
|
|
|
return vaasWithPayload, nil
|
2023-01-27 08:47:17 -08:00
|
|
|
}
|
|
|
|
|
2022-11-23 05:15:16 -08:00
|
|
|
// GetVaaCount get a count of vaa by chainID.
|
|
|
|
func (r *Repository) GetVaaCount(ctx context.Context, q *VaaQuery) ([]*VaaStats, error) {
|
2023-02-09 09:28:39 -08:00
|
|
|
|
|
|
|
cur, err := r.collections.vaaCount.Find(ctx, q.toBSON(), q.findOptions())
|
2022-11-17 07:37:29 -08:00
|
|
|
if err != nil {
|
2022-11-23 04:06:29 -08:00
|
|
|
requestID := fmt.Sprintf("%v", ctx.Value("requestid"))
|
2022-11-23 05:15:16 -08:00
|
|
|
r.logger.Error("failed execute Find command to get vaaCount",
|
2022-11-23 04:06:29 -08:00
|
|
|
zap.Error(err), zap.String("requestID", requestID))
|
2022-11-23 05:15:16 -08:00
|
|
|
return nil, errors.WithStack(err)
|
|
|
|
}
|
|
|
|
var varCounts []*VaaStats
|
|
|
|
err = cur.All(ctx, &varCounts)
|
|
|
|
if err != nil {
|
|
|
|
requestID := fmt.Sprintf("%v", ctx.Value("requestid"))
|
|
|
|
r.logger.Error("failed decoding cursor to []*VaaStats", zap.Error(err), zap.Any("q", q),
|
|
|
|
zap.String("requestID", requestID))
|
|
|
|
return nil, errors.WithStack(err)
|
2022-11-17 07:37:29 -08:00
|
|
|
}
|
2022-11-23 05:15:16 -08:00
|
|
|
return varCounts, nil
|
2022-11-17 07:37:29 -08:00
|
|
|
}
|
|
|
|
|
2022-11-23 04:06:29 -08:00
|
|
|
// VaaQuery respresent a query for the vaa mongodb document.
|
2022-11-17 07:37:29 -08:00
|
|
|
type VaaQuery struct {
|
|
|
|
pagination.Pagination
|
2023-04-05 06:33:28 -07:00
|
|
|
id string
|
|
|
|
chainId vaa.ChainID
|
|
|
|
emitter string
|
|
|
|
sequence string
|
|
|
|
txHash string
|
|
|
|
appId string
|
|
|
|
includeParsedPayload bool
|
2022-11-17 07:37:29 -08:00
|
|
|
}
|
|
|
|
|
2022-11-23 04:06:29 -08:00
|
|
|
// Query create a new VaaQuery with default pagination vaues.
|
2022-11-17 07:37:29 -08:00
|
|
|
func Query() *VaaQuery {
|
2023-02-10 10:55:45 -08:00
|
|
|
p := pagination.Default()
|
|
|
|
return &VaaQuery{Pagination: *p}
|
2022-11-17 07:37:29 -08:00
|
|
|
}
|
|
|
|
|
2023-04-05 06:33:28 -07:00
|
|
|
// SetChain sets the id field of the VaaQuery struct.
|
|
|
|
func (q *VaaQuery) SetID(id string) *VaaQuery {
|
|
|
|
q.id = id
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
|
2022-11-23 04:06:29 -08:00
|
|
|
// SetChain set the chainId field of the VaaQuery struct.
|
2022-11-17 07:37:29 -08:00
|
|
|
func (q *VaaQuery) SetChain(chainID vaa.ChainID) *VaaQuery {
|
|
|
|
q.chainId = chainID
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
|
2022-11-23 04:06:29 -08:00
|
|
|
// SetEmitter set the emitter field of the VaaQuery struct.
|
2022-11-17 07:37:29 -08:00
|
|
|
func (q *VaaQuery) SetEmitter(emitter string) *VaaQuery {
|
|
|
|
q.emitter = emitter
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
|
2022-11-23 04:06:29 -08:00
|
|
|
// SetSequence set the sequence field of the VaaQuery struct.
|
2022-12-14 11:26:45 -08:00
|
|
|
func (q *VaaQuery) SetSequence(seq string) *VaaQuery {
|
2022-11-17 07:37:29 -08:00
|
|
|
q.sequence = seq
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
|
2022-11-23 04:06:29 -08:00
|
|
|
// SetPagination set the pagination field of the VaaQuery struct.
|
2022-11-17 07:37:29 -08:00
|
|
|
func (q *VaaQuery) SetPagination(p *pagination.Pagination) *VaaQuery {
|
|
|
|
q.Pagination = *p
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
|
2023-01-26 06:54:41 -08:00
|
|
|
// SetTxHash set the txHash field of the VaaQuery struct.
|
|
|
|
func (q *VaaQuery) SetTxHash(txHash string) *VaaQuery {
|
|
|
|
q.txHash = txHash
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
|
2023-02-01 04:59:51 -08:00
|
|
|
func (q *VaaQuery) SetAppId(appId string) *VaaQuery {
|
|
|
|
q.appId = appId
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
|
2023-04-05 06:33:28 -07:00
|
|
|
func (q *VaaQuery) IncludeParsedPayload(val bool) *VaaQuery {
|
|
|
|
q.includeParsedPayload = val
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
|
2022-11-17 07:37:29 -08:00
|
|
|
func (q *VaaQuery) toBSON() *bson.D {
|
|
|
|
r := bson.D{}
|
2023-04-05 06:33:28 -07:00
|
|
|
if q.id != "" {
|
|
|
|
r = append(r, bson.E{"_id", q.id})
|
|
|
|
}
|
2022-11-17 07:37:29 -08:00
|
|
|
if q.chainId > 0 {
|
|
|
|
r = append(r, bson.E{"emitterChain", q.chainId})
|
|
|
|
}
|
|
|
|
if q.emitter != "" {
|
|
|
|
r = append(r, bson.E{"emitterAddr", q.emitter})
|
|
|
|
}
|
2022-12-14 11:26:45 -08:00
|
|
|
if q.sequence != "" {
|
2022-11-17 07:37:29 -08:00
|
|
|
r = append(r, bson.E{"sequence", q.sequence})
|
|
|
|
}
|
2023-01-26 06:54:41 -08:00
|
|
|
if q.txHash != "" {
|
|
|
|
r = append(r, bson.E{"txHash", q.txHash})
|
|
|
|
}
|
2022-11-17 07:37:29 -08:00
|
|
|
return &r
|
|
|
|
}
|
2023-02-09 09:28:39 -08:00
|
|
|
|
2023-04-28 06:18:07 -07:00
|
|
|
func (q *VaaQuery) getSortPredicate() bson.E {
|
|
|
|
return bson.E{"timestamp", q.GetSortInt()}
|
|
|
|
}
|
|
|
|
|
2023-02-09 09:28:39 -08:00
|
|
|
func (q *VaaQuery) findOptions() *options.FindOptions {
|
|
|
|
|
2023-04-28 06:18:07 -07:00
|
|
|
sort := bson.D{q.getSortPredicate()}
|
2023-02-09 09:28:39 -08:00
|
|
|
|
|
|
|
return options.
|
|
|
|
Find().
|
|
|
|
SetSort(sort).
|
|
|
|
SetLimit(q.Limit).
|
|
|
|
SetSkip(q.Skip)
|
|
|
|
}
|