Integrating blockchain watcher data into mongodb (#820)

* Handle log-message-published generated by blockchain-watcher in parser, tx-tracker y analytics

* Add deployment for staging-testnet environment
Improve logs

* Add operation endpoints

---------

Co-authored-by: Agustin Pazos <agpazos85@gmail.com>
This commit is contained in:
ftocal 2023-11-27 12:31:35 -03:00 committed by GitHub
parent 54b450e86f
commit 7c467f5267
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
91 changed files with 1994 additions and 704 deletions

View File

@ -105,11 +105,17 @@ func Run() {
logger.Fatal("failed to create metrics instance", zap.Error(err))
}
// create and start a consumer.
logger.Info("initializing metrics consumer...")
vaaConsumeFunc := newVAAConsume(rootCtx, config, logger)
consumer := consumer.New(vaaConsumeFunc, metric.Push, logger, config.P2pNetwork)
consumer.Start(rootCtx)
// create and start a vaa consumer.
logger.Info("initializing vaa consumer...")
vaaConsumeFunc := newVAAConsumeFunc(rootCtx, config, logger)
vaaConsumer := consumer.New(vaaConsumeFunc, metric.Push, logger, config.P2pNetwork)
vaaConsumer.Start(rootCtx)
// create and start a notification consumer.
logger.Info("initializing notification consumer...")
notificationConsumeFunc := newNotificationConsumeFunc(rootCtx, config, logger)
notificationConsumer := consumer.New(notificationConsumeFunc, metric.Push, logger, config.P2pNetwork)
notificationConsumer.Start(rootCtx)
// create and start server.
logger.Info("initializing infrastructure server...")
@ -146,23 +152,34 @@ func Run() {
}
// Creates a callbacks depending on whether the execution is local (memory queue) or not (SQS queue)
func newVAAConsume(appCtx context.Context, config *config.Configuration, logger *zap.Logger) queue.VAAConsumeFunc {
sqsConsumer, err := newSQSConsumer(appCtx, config)
func newVAAConsumeFunc(appCtx context.Context, config *config.Configuration, logger *zap.Logger) queue.ConsumeFunc {
sqsConsumer, err := newSQSConsumer(appCtx, config, config.PipelineSQSUrl)
if err != nil {
logger.Fatal("failed to create sqs consumer", zap.Error(err))
}
vaaQueue := queue.NewVaaSqs(sqsConsumer, logger)
vaaQueue := queue.NewEventSqs(sqsConsumer, queue.NewVaaConverter(logger), logger)
return vaaQueue.Consume
}
func newSQSConsumer(appCtx context.Context, config *config.Configuration) (*sqs_client.Consumer, error) {
func newNotificationConsumeFunc(ctx context.Context, cfg *config.Configuration, logger *zap.Logger) queue.ConsumeFunc {
sqsConsumer, err := newSQSConsumer(ctx, cfg, cfg.NotificationsSQSUrl)
if err != nil {
logger.Fatal("failed to create sqs consumer", zap.Error(err))
}
vaaQueue := queue.NewEventSqs(sqsConsumer, queue.NewNotificationEvent(logger), logger)
return vaaQueue.Consume
}
func newSQSConsumer(appCtx context.Context, config *config.Configuration, sqsUrl string) (*sqs_client.Consumer, error) {
awsconfig, err := newAwsConfig(appCtx, config)
if err != nil {
return nil, err
}
return sqs_client.NewConsumer(awsconfig, config.SQSUrl,
return sqs_client.NewConsumer(awsconfig, sqsUrl,
sqs_client.WithMaxMessages(10),
sqs_client.WithVisibilityTimeout(120))
}
@ -211,7 +228,8 @@ func newHealthChecks(
}
healthChecks := []health.Check{
health.SQS(awsConfig, config.SQSUrl),
health.SQS(awsConfig, config.PipelineSQSUrl),
health.SQS(awsConfig, config.NotificationsSQSUrl),
health.Influx(influxCli),
health.Mongo(db),
}

View File

@ -94,7 +94,7 @@ func (r *TokenResolver) GetTransferredTokenByVaa(ctx context.Context, vaa *sdk.V
token, err := createToken(result.StandardizedProperties, vaa.EmitterChain)
if err != nil {
r.logger.Error("Creating transferred token",
r.logger.Debug("Creating transferred token",
zap.String("vaaId", vaa.MessageID()),
zap.Error(err))
return nil, ErrUnknownToken

View File

@ -17,7 +17,8 @@ type Configuration struct {
AwsAccessKeyID string `env:"AWS_ACCESS_KEY_ID"`
AwsSecretAccessKey string `env:"AWS_SECRET_ACCESS_KEY"`
AwsRegion string `env:"AWS_REGION"`
SQSUrl string `env:"SQS_URL"`
PipelineSQSUrl string `env:"PIPELINE_SQS_URL"`
NotificationsSQSUrl string `env:"NOTIFICATIONS_SQS_URL"`
InfluxUrl string `env:"INFLUX_URL"`
InfluxToken string `env:"INFLUX_TOKEN"`
InfluxOrganization string `env:"INFLUX_ORGANIZATION"`

View File

@ -11,14 +11,14 @@ import (
// Consumer consumer struct definition.
type Consumer struct {
consume queue.VAAConsumeFunc
consume queue.ConsumeFunc
pushMetric metric.MetricPushFunc
logger *zap.Logger
p2pNetwork string
}
// New creates a new vaa consumer.
func New(consume queue.VAAConsumeFunc, pushMetric metric.MetricPushFunc, logger *zap.Logger, p2pNetwork string) *Consumer {
func New(consume queue.ConsumeFunc, pushMetric metric.MetricPushFunc, logger *zap.Logger, p2pNetwork string) *Consumer {
return &Consumer{consume: consume, pushMetric: pushMetric, logger: logger, p2pNetwork: p2pNetwork}
}
@ -44,7 +44,7 @@ func (c *Consumer) Start(ctx context.Context) {
}
// push vaa metrics.
err = c.pushMetric(ctx, vaa)
err = c.pushMetric(ctx, &metric.Params{TrackID: event.TrackID, Vaa: vaa, VaaIsSigned: event.VaaIsSigned})
if err != nil {
msg.Failed()
continue

View File

@ -1,46 +0,0 @@
package infrastructure
import (
"fmt"
"github.com/gofiber/fiber/v2"
health "github.com/wormhole-foundation/wormhole-explorer/common/health"
"go.uber.org/zap"
)
// Controller definition.
type Controller struct {
checks []health.Check
logger *zap.Logger
}
// NewController creates a Controller instance.
func NewController(checks []health.Check, logger *zap.Logger) *Controller {
return &Controller{checks: checks, logger: logger}
}
// HealthCheck handler for the endpoint /health.
func (c *Controller) HealthCheck(ctx *fiber.Ctx) error {
return ctx.JSON(struct {
Status string `json:"status"`
}{Status: "OK"})
}
// ReadyCheck handler for the endpoint /ready.
func (c *Controller) ReadyCheck(ctx *fiber.Ctx) error {
rctx := ctx.Context()
requestID := fmt.Sprintf("%v", rctx.Value("requestid"))
for _, check := range c.checks {
if err := check(rctx); err != nil {
c.logger.Error("Ready check failed", zap.Error(err), zap.String("requestID", requestID))
return ctx.Status(fiber.StatusInternalServerError).JSON(struct {
Ready string `json:"ready"`
Error string `json:"error"`
}{Ready: "NO", Error: err.Error()})
}
}
return ctx.Status(fiber.StatusOK).JSON(struct {
Ready string `json:"ready"`
}{Ready: "OK"})
}

View File

@ -4,7 +4,6 @@ import (
"github.com/ansrivas/fiberprometheus/v2"
"github.com/gofiber/fiber/v2"
"github.com/gofiber/fiber/v2/middleware/pprof"
"github.com/wormhole-foundation/wormhole-explorer/analytics/http/infrastructure"
"github.com/wormhole-foundation/wormhole-explorer/analytics/http/vaa"
health "github.com/wormhole-foundation/wormhole-explorer/common/health"
"go.uber.org/zap"
@ -29,7 +28,7 @@ func NewServer(logger *zap.Logger, port string, pprofEnabled bool, vaaController
app.Use(pprof.New())
}
ctrl := infrastructure.NewController(checks, logger)
ctrl := health.NewController(checks, logger)
api := app.Group("/api")
api.Get("/health", ctrl.HealthCheck)
api.Get("/ready", ctrl.ReadyCheck)

View File

@ -1,6 +1,8 @@
package vaa
import (
"fmt"
"github.com/gofiber/fiber/v2"
"github.com/wormhole-foundation/wormhole-explorer/analytics/metric"
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
@ -44,7 +46,8 @@ func (c *Controller) PushVAAMetrics(ctx *fiber.Ctx) error {
return err
}
err = c.pushMetric(ctx.Context(), vaa)
trackID := fmt.Sprintf("controller-%s", vaa.MessageID())
err = c.pushMetric(ctx.Context(), &metric.Params{TrackID: trackID, Vaa: vaa})
if err != nil {
c.logger.Error("Error pushing metric", zap.Error(err))
return err

View File

@ -77,39 +77,61 @@ func New(
}
// Push implement MetricPushFunc definition.
func (m *Metric) Push(ctx context.Context, vaa *sdk.VAA) error {
func (m *Metric) Push(ctx context.Context, params *Params) error {
err1 := m.vaaCountMeasurement(ctx, vaa)
var err1, err2, err3, err4 error
err2 := m.vaaCountAllMessagesMeasurement(ctx, vaa)
isVaaSigned := params.VaaIsSigned
transferredToken, err := m.getTransferredTokenByVaa(ctx, vaa)
if err != nil {
m.logger.Warn("failed to obtain transferred token for this VAA",
zap.String("vaaId", vaa.MessageID()),
zap.Error(err))
if err != token.ErrUnknownToken {
return err
}
if isVaaSigned {
err1 = m.vaaCountMeasurement(ctx, params)
err2 = m.vaaCountAllMessagesMeasurement(ctx, params)
}
err3 := m.volumeMeasurement(ctx, vaa, transferredToken.Clone())
if params.Vaa.EmitterChain != sdk.ChainIDPythNet {
err4 := upsertTransferPrices(
ctx,
m.logger,
vaa,
m.transferPrices,
func(tokenID string, timestamp time.Time) (decimal.Decimal, error) {
priceData, err := m.notionalCache.Get(tokenID)
if err != nil {
return decimal.NewFromInt(0), err
transferredToken, err := m.getTransferredTokenByVaa(ctx, params.Vaa)
if err != nil {
if err != token.ErrUnknownToken {
m.logger.Error("Failed to obtain transferred token for this VAA",
zap.String("trackId", params.TrackID),
zap.String("vaaId", params.Vaa.MessageID()),
zap.Error(err))
return err
}
return priceData.NotionalUsd, nil
},
transferredToken.Clone(),
)
}
if transferredToken != nil {
if isVaaSigned {
err3 = m.volumeMeasurement(ctx, params.Vaa, transferredToken.Clone())
}
err4 = upsertTransferPrices(
ctx,
m.logger,
params.Vaa,
m.transferPrices,
func(tokenID string, timestamp time.Time) (decimal.Decimal, error) {
priceData, err := m.notionalCache.Get(tokenID)
if err != nil {
return decimal.NewFromInt(0), err
}
return priceData.NotionalUsd, nil
},
transferredToken.Clone(),
)
} else {
m.logger.Warn("Cannot obtain transferred token for this VAA",
zap.String("trackId", params.TrackID),
zap.String("vaaId", params.Vaa.MessageID()),
)
}
}
//TODO if we had go 1.20, we could just use `errors.Join(err1, err2, err3, ...)` here.
if err1 != nil || err2 != nil || err3 != nil || err4 != nil {
@ -135,10 +157,10 @@ func (m *Metric) Close() {
}
// vaaCountMeasurement creates a new point for the `vaa_count` measurement.
func (m *Metric) vaaCountMeasurement(ctx context.Context, vaa *sdk.VAA) error {
func (m *Metric) vaaCountMeasurement(ctx context.Context, p *Params) error {
// Create a new point
point, err := MakePointForVaaCount(vaa)
point, err := MakePointForVaaCount(p.Vaa)
if err != nil {
return fmt.Errorf("failed to generate data point for vaa count measurement: %w", err)
}
@ -151,8 +173,9 @@ func (m *Metric) vaaCountMeasurement(ctx context.Context, vaa *sdk.VAA) error {
err = m.apiBucket30Days.WritePoint(ctx, point)
if err != nil {
m.logger.Error("failed to write metric",
zap.String("trackId", p.TrackID),
zap.String("measurement", point.Name()),
zap.Uint16("chain_id", uint16(vaa.EmitterChain)),
zap.Uint16("chain_id", uint16(p.Vaa.EmitterChain)),
zap.Error(err),
)
m.metrics.IncFailedMeasurement(VaaCountMeasurement)
@ -164,15 +187,16 @@ func (m *Metric) vaaCountMeasurement(ctx context.Context, vaa *sdk.VAA) error {
}
// vaaCountAllMessagesMeasurement creates a new point for the `vaa_count_all_messages` measurement.
func (m *Metric) vaaCountAllMessagesMeasurement(ctx context.Context, vaa *sdk.VAA) error {
func (m *Metric) vaaCountAllMessagesMeasurement(ctx context.Context, params *Params) error {
// Quite often we get VAAs that are older than 24 hours.
// We do not want to generate metrics for those, and moreover influxDB
// returns an error when we try to do so.
if time.Since(vaa.Timestamp) > time.Hour*24 {
if time.Since(params.Vaa.Timestamp) > time.Hour*24 {
m.logger.Debug("vaa is older than 24 hours, skipping",
zap.Time("timestamp", vaa.Timestamp),
zap.String("vaaId", vaa.UniqueID()),
zap.String("trackId", params.TrackID),
zap.Time("timestamp", params.Vaa.Timestamp),
zap.String("vaaId", params.Vaa.UniqueID()),
)
return nil
}
@ -180,16 +204,16 @@ func (m *Metric) vaaCountAllMessagesMeasurement(ctx context.Context, vaa *sdk.VA
// Create a new point
point := influxdb2.
NewPointWithMeasurement(VaaAllMessagesMeasurement).
AddTag("chain_id", strconv.Itoa(int(vaa.EmitterChain))).
AddTag("chain_id", strconv.Itoa(int(params.Vaa.EmitterChain))).
AddField("count", 1).
SetTime(generateUniqueTimestamp(vaa))
SetTime(generateUniqueTimestamp(params.Vaa))
// Write the point to influx
err := m.apiBucket24Hours.WritePoint(ctx, point)
if err != nil {
m.logger.Error("failed to write metric",
zap.String("measurement", VaaAllMessagesMeasurement),
zap.Uint16("chain_id", uint16(vaa.EmitterChain)),
zap.Uint16("chain_id", uint16(params.Vaa.EmitterChain)),
zap.Error(err),
)
m.metrics.IncFailedMeasurement(VaaAllMessagesMeasurement)
@ -310,7 +334,7 @@ func MakePointForVaaVolume(params *MakePointForVaaVolumeParams) (*write.Point, e
// Do not generate this metric when the TransferredToken is undefined
if params.TransferredToken == nil {
if params.Logger != nil {
params.Logger.Warn("transferred token is undefined",
params.Logger.Warn("Transferred token is undefined",
zap.String("vaaId", params.Vaa.MessageID()),
)
}

View File

@ -29,6 +29,14 @@ type TransferPriceDoc struct {
TokenAmount string `bson:"tokenAmount"`
// UsdAmount is the value in USD of the token being transferred.
UsdAmount string `bson:"usdAmount"`
// TokenChain is the chain ID of the token being transferred.
TokenChain uint16 `bson:"tokenChain"`
// TokenAddress is the address of the token being transferred.
TokenAddress string `bson:"tokenAddress"`
// CoinGeckoID is the CoinGecko ID of the token being transferred.
CoinGeckoID string `bson:"coinGeckoId"`
// UpdatedAt is the timestamp the document was updated.
UpdatedAt time.Time `bson:"updatedAt"`
}
func upsertTransferPrices(
@ -92,10 +100,14 @@ func upsertTransferPrices(
SymbolPriceUsd: notionalUSD.Truncate(8).String(),
TokenAmount: tokenAmount.Truncate(8).String(),
UsdAmount: usdAmount.Truncate(8).String(),
TokenChain: uint16(transferredToken.TokenChain),
TokenAddress: transferredToken.TokenAddress.String(),
CoinGeckoID: tokenMeta.CoingeckoID,
UpdatedAt: time.Now(),
},
}
_, err = transferPrices.UpdateByID(
context.Background(),
ctx,
vaa.MessageID(),
update,
options.Update().SetUpsert(true),

View File

@ -6,5 +6,11 @@ import (
"github.com/wormhole-foundation/wormhole/sdk/vaa"
)
type Params struct {
TrackID string
Vaa *vaa.VAA
VaaIsSigned bool
}
// MetricPushFunc is a function to push metrics
type MetricPushFunc func(context.Context, *vaa.VAA) error
type MetricPushFunc func(context.Context, *Params) error

View File

@ -0,0 +1,115 @@
package queue
import (
"encoding/json"
"fmt"
"strconv"
"time"
"github.com/wormhole-foundation/wormhole-explorer/common/events"
"go.uber.org/zap"
)
// VaaEvent represents a vaa data to be handle by the pipeline.
type VaaEvent struct {
ID string `json:"id"`
ChainID uint16 `json:"emitterChain"`
EmitterAddress string `json:"emitterAddr"`
Sequence string `json:"sequence"`
GuardianSetIndex uint32 `json:"guardianSetIndex"`
Vaa []byte `json:"vaas"`
IndexedAt time.Time `json:"indexedAt"`
Timestamp *time.Time `json:"timestamp"`
UpdatedAt *time.Time `json:"updatedAt"`
TxHash string `json:"txHash"`
Version uint16 `json:"version"`
Revision uint16 `json:"revision"`
}
// VaaConverter converts a message from a VAAEvent.
func NewVaaConverter(log *zap.Logger) ConverterFunc {
return func(msg string) (*Event, error) {
// unmarshal message to vaaEvent
var vaaEvent VaaEvent
err := json.Unmarshal([]byte(msg), &vaaEvent)
if err != nil {
return nil, err
}
return &Event{
TrackID: fmt.Sprintf("pipeline-%s", vaaEvent.ID),
ID: vaaEvent.ID,
ChainID: vaaEvent.ChainID,
EmitterAddress: vaaEvent.EmitterAddress,
Sequence: vaaEvent.Sequence,
Vaa: vaaEvent.Vaa,
Timestamp: vaaEvent.Timestamp,
VaaIsSigned: true,
}, nil
}
}
func NewNotificationEvent(log *zap.Logger) ConverterFunc {
return func(msg string) (*Event, error) {
// unmarshal message to NotificationEvent
var notification events.NotificationEvent
err := json.Unmarshal([]byte(msg), &notification)
if err != nil {
return nil, err
}
if notification.Event != events.SignedVaaType && notification.Event != events.LogMessagePublishedMesageType {
log.Debug("Skip event type", zap.String("trackId", notification.TrackID), zap.String("type", notification.Event))
return nil, nil
}
switch notification.Event {
case events.SignedVaaType:
signedVaa, err := events.GetEventData[events.SignedVaa](&notification)
if err != nil {
log.Error("Error decoding signedVAA from notification event", zap.String("trackId", notification.TrackID), zap.Error(err))
return nil, nil
}
return &Event{
TrackID: notification.TrackID,
ID: signedVaa.ID,
ChainID: signedVaa.EmitterChain,
EmitterAddress: signedVaa.EmitterAddress,
Sequence: strconv.FormatUint(signedVaa.Sequence, 10),
Timestamp: &signedVaa.Timestamp,
VaaIsSigned: false,
}, nil
case events.LogMessagePublishedMesageType:
plm, err := events.GetEventData[events.LogMessagePublished](&notification)
if err != nil {
log.Error("Error decoding publishedLogMessage from notification event", zap.String("trackId", notification.TrackID), zap.Error(err))
return nil, nil
}
vaa, err := events.CreateUnsignedVAA(&plm)
if err != nil {
log.Error("Error creating unsigned vaa", zap.String("trackId", notification.TrackID), zap.Error(err))
return nil, err
}
vaaBytes, err := vaa.MarshalBinary()
if err != nil {
return nil, err
}
return &Event{
TrackID: notification.TrackID,
ID: vaa.MessageID(),
ChainID: plm.ChainID,
EmitterAddress: plm.Attributes.Sender,
Sequence: strconv.FormatUint(plm.Attributes.Sequence, 10),
Timestamp: &plm.BlockTime,
Vaa: vaaBytes,
VaaIsSigned: false,
}, nil
}
return nil, nil
}
}

View File

@ -16,19 +16,24 @@ type SQSOption func(*SQS)
// SQS represents a VAA queue in SQS.
type SQS struct {
consumer *sqs_client.Consumer
ch chan ConsumerMessage
chSize int
wg sync.WaitGroup
logger *zap.Logger
consumer *sqs_client.Consumer
ch chan ConsumerMessage
converter ConverterFunc
chSize int
wg sync.WaitGroup
logger *zap.Logger
}
// NewVaaSqs creates a VAA queue in SQS instances.
func NewVaaSqs(consumer *sqs_client.Consumer, logger *zap.Logger, opts ...SQSOption) *SQS {
// ConverterFunc converts a message from a sqs message.
type ConverterFunc func(string) (*Event, error)
// NewEventSqs creates a VAA queue in SQS instances.
func NewEventSqs(consumer *sqs_client.Consumer, converter ConverterFunc, logger *zap.Logger, opts ...SQSOption) *SQS {
s := &SQS{
consumer: consumer,
chSize: 10,
logger: logger}
consumer: consumer,
converter: converter,
chSize: 10,
logger: logger}
for _, opt := range opts {
opt(s)
}
@ -62,18 +67,17 @@ func (q *SQS) Consume(ctx context.Context) <-chan ConsumerMessage {
continue
}
// unmarshal message to vaaEvent
var vaaEvent VaaEvent
err = json.Unmarshal([]byte(sqsEvent.Message), &vaaEvent)
// converts message to event
event, err := q.converter(sqsEvent.Message)
if err != nil {
q.logger.Error("Error decoding vaaEvent message from SQSEvent", zap.Error(err))
q.logger.Error("Error converting event message from SQSEvent", zap.Error(err))
continue
}
q.wg.Add(1)
q.ch <- &sqsConsumerMessage{
id: msg.ReceiptHandle,
data: &vaaEvent,
data: event,
wg: &q.wg,
logger: q.logger,
consumer: q.consumer,
@ -94,7 +98,7 @@ func (q *SQS) Close() {
}
type sqsConsumerMessage struct {
data *VaaEvent
data *Event
consumer *sqs_client.Consumer
wg *sync.WaitGroup
id *string
@ -103,7 +107,7 @@ type sqsConsumerMessage struct {
ctx context.Context
}
func (m *sqsConsumerMessage) Data() *VaaEvent {
func (m *sqsConsumerMessage) Data() *Event {
return m.data
}

View File

@ -1,38 +0,0 @@
package queue
import (
"context"
"time"
)
type sqsEvent struct {
MessageID string `json:"MessageId"`
Message string `json:"Message"`
}
// VaaEvent represents a vaa data to be handle by the pipeline.
type VaaEvent struct {
ID string `json:"id"`
ChainID uint16 `json:"emitterChain"`
EmitterAddress string `json:"emitterAddr"`
Sequence string `json:"sequence"`
GuardianSetIndex uint32 `json:"guardianSetIndex"`
Vaa []byte `json:"vaas"`
IndexedAt time.Time `json:"indexedAt"`
Timestamp *time.Time `json:"timestamp"`
UpdatedAt *time.Time `json:"updatedAt"`
TxHash string `json:"txHash"`
Version uint16 `json:"version"`
Revision uint16 `json:"revision"`
}
// ConsumerMessage defition.
type ConsumerMessage interface {
Data() *VaaEvent
Done()
Failed()
IsExpired() bool
}
// VAAConsumeFunc is a function to consume VAAEvent.
type VAAConsumeFunc func(context.Context) <-chan ConsumerMessage

34
analytics/queue/types.go Normal file
View File

@ -0,0 +1,34 @@
package queue
import (
"context"
"time"
)
type sqsEvent struct {
MessageID string `json:"MessageId"`
Message string `json:"Message"`
}
// Event represents a event data to be handle.
type Event struct {
TrackID string
ID string
ChainID uint16
EmitterAddress string
Sequence string
Vaa []byte
Timestamp *time.Time
VaaIsSigned bool
}
// ConsumerMessage defition.
type ConsumerMessage interface {
Data() *Event
Done()
Failed()
IsExpired() bool
}
// ConsumeFunc is a function to consume VAAEvent.
type ConsumeFunc func(context.Context) <-chan ConsumerMessage

View File

@ -0,0 +1,86 @@
package operations
import (
"time"
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
)
// OperationDto operation data transfer object.
type OperationDto struct {
ID string `bson:"_id"`
TxHash string `bson:"txHash"`
Symbol string `bson:"symbol"`
UsdAmount string `bson:"usdAmount"`
TokenAmount string `bson:"tokenAmount"`
Vaa *VaaDto `bson:"vaa"`
SourceTx *OriginTx `bson:"originTx" json:"originTx"`
DestinationTx *DestinationTx `bson:"destinationTx" json:"destinationTx"`
Payload map[string]any `bson:"payload"`
StandardizedProperties *StandardizedProperties `bson:"standardizedProperties"`
}
// StandardizedProperties represents the standardized properties of a operation.
type StandardizedProperties struct {
AppIds []string `json:"appIds" bson:"appIds"`
FromChain sdk.ChainID `json:"fromChain" bson:"fromChain"`
FromAddress string `json:"fromAddress" bson:"fromAddress"`
ToChain sdk.ChainID `json:"toChain" bson:"toChain"`
ToAddress string `json:"toAddress" bson:"toAddress"`
TokenChain sdk.ChainID `json:"tokenChain" bson:"tokenChain"`
TokenAddress string `json:"tokenAddress" bson:"tokenAddress"`
Amount string `json:"amount" bson:"amount"`
FeeAddress string `json:"feeAddress" bson:"feeAddress"`
FeeChain sdk.ChainID `json:"feeChain" bson:"feeChain"`
Fee string `json:"fee" bson:"fee"`
}
// VaaDto vaa data transfer object.
type VaaDto struct {
ID string `bson:"_id" json:"id"`
Version uint8 `bson:"version" json:"version"`
EmitterChain sdk.ChainID `bson:"emitterChain" json:"emitterChain"`
EmitterAddr string `bson:"emitterAddr" json:"emitterAddr"`
EmitterNativeAddr string `json:"emitterNativeAddr,omitempty"`
Sequence string `bson:"sequence" json:"-"`
GuardianSetIndex uint32 `bson:"guardianSetIndex" json:"guardianSetIndex"`
Vaa []byte `bson:"vaas" json:"vaa"`
Timestamp *time.Time `bson:"timestamp" json:"timestamp"`
UpdatedAt *time.Time `bson:"updatedAt" json:"updatedAt"`
IndexedAt *time.Time `bson:"indexedAt" json:"indexedAt"`
}
// GlobalTransactionDoc definitions.
type GlobalTransactionDoc struct {
ID string `bson:"_id" json:"id"`
OriginTx *OriginTx `bson:"originTx" json:"originTx"`
DestinationTx *DestinationTx `bson:"destinationTx" json:"destinationTx"`
}
// OriginTx represents a origin transaction.
type OriginTx struct {
TxHash string `bson:"nativeTxHash" json:"txHash"`
From string `bson:"from" json:"from"`
Status string `bson:"status" json:"status"`
Timestamp *time.Time `bson:"timestamp" json:"timestamp"`
Attribute *AttributeDoc `bson:"attribute" json:"attribute"`
}
// AttributeDoc represents a custom attribute for a origin transaction.
type AttributeDoc struct {
Type string `bson:"type" json:"type"`
Value map[string]any `bson:"value" json:"value"`
}
// DestinationTx represents a destination transaction.
type DestinationTx struct {
ChainID sdk.ChainID `bson:"chainId" json:"chainId"`
Status string `bson:"status" json:"status"`
Method string `bson:"method" json:"method"`
TxHash string `bson:"txHash" json:"txHash"`
From string `bson:"from" json:"from"`
To string `bson:"to" json:"to"`
BlockNumber string `bson:"blockNumber" json:"blockNumber"`
Timestamp *time.Time `bson:"timestamp" json:"timestamp"`
UpdatedAt *time.Time `bson:"updatedAt" json:"updatedAt"`
}

View File

@ -0,0 +1,238 @@
package operations
import (
"context"
"fmt"
"regexp"
"strings"
"github.com/wormhole-foundation/wormhole-explorer/api/internal/errors"
"github.com/wormhole-foundation/wormhole-explorer/api/internal/pagination"
"github.com/wormhole-foundation/wormhole-explorer/common/utils"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"go.uber.org/zap"
)
// Repository definition
type Repository struct {
db *mongo.Database
logger *zap.Logger
collections struct {
vaas *mongo.Collection
parsedVaa *mongo.Collection
globalTransactions *mongo.Collection
}
}
// NewRepository create a new Repository.
func NewRepository(db *mongo.Database, logger *zap.Logger) *Repository {
return &Repository{db: db,
logger: logger.With(zap.String("module", "OperationRepository")),
collections: struct {
vaas *mongo.Collection
parsedVaa *mongo.Collection
globalTransactions *mongo.Collection
}{
vaas: db.Collection("vaas"),
parsedVaa: db.Collection("parsedVaa"),
globalTransactions: db.Collection("globalTransactions"),
},
}
}
// FindById returns the operations for the given chainID/emitter/seq.
func (r *Repository) FindById(ctx context.Context, id string) (*OperationDto, error) {
var pipeline mongo.Pipeline
// filter vaas by id
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.D{{Key: "_id", Value: id}}}})
// lookup vaas
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "vaas"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "vaas"}}}})
// lookup globalTransactions
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "globalTransactions"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "globalTransactions"}}}})
// lookup transferPrices
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "transferPrices"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "transferPrices"}}}})
// lookup parsedVaa
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "parsedVaa"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "parsedVaa"}}}})
// add fields
pipeline = append(pipeline, bson.D{{Key: "$addFields", Value: bson.D{
{Key: "payload", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$parsedVaa.parsedPayload", 0}}}},
{Key: "vaa", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$vaas", 0}}}},
{Key: "standardizedProperties", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$parsedVaa.standardizedProperties", 0}}}},
{Key: "symbol", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.symbol", 0}}}},
{Key: "usdAmount", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.usdAmount", 0}}}},
{Key: "tokenAmount", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.tokenAmount", 0}}}},
}}})
// unset
pipeline = append(pipeline, bson.D{{Key: "$unset", Value: bson.A{"transferPrices", "parsedVaa"}}})
// Execute the aggregation pipeline
cur, err := r.collections.globalTransactions.Aggregate(ctx, pipeline)
if err != nil {
r.logger.Error("failed execute aggregation pipeline", zap.Error(err))
return nil, err
}
// Read results from cursor
var operations []*OperationDto
err = cur.All(ctx, &operations)
if err != nil {
r.logger.Error("failed to decode cursor", zap.Error(err))
return nil, err
}
// Check if there is only one operation
if len(operations) > 1 {
r.logger.Error("invalid number of operations", zap.Int("count", len(operations)))
return nil, fmt.Errorf("invalid number of operations")
}
if len(operations) == 0 {
return nil, errors.ErrNotFound
}
return operations[0], nil
}
type mongoID struct {
Id string `bson:"_id"`
}
// findOperationsIdByAddressOrTxHash returns all operations filtered by address or txHash.
func findOperationsIdByAddressOrTxHash(ctx context.Context, db *mongo.Database, q string, pagination *pagination.Pagination) ([]string, error) {
qHexa := strings.ToLower(q)
if !utils.StartsWith0x(q) {
qHexa = "0x" + strings.ToLower(qHexa)
}
matchGlobalTransactions := bson.D{{Key: "$match", Value: bson.D{{Key: "$or", Value: bson.A{
bson.D{{Key: "originTx.from", Value: bson.M{"$eq": qHexa}}},
bson.D{{Key: "originTx.from", Value: bson.M{"$eq": q}}},
bson.D{{Key: "originTx.nativeTxHash", Value: bson.M{"$eq": qHexa}}},
bson.D{{Key: "originTx.nativeTxHash", Value: bson.M{"$eq": q}}},
bson.D{{Key: "originTx.attribute.value.originTxHash", Value: bson.M{"$eq": qHexa}}},
bson.D{{Key: "originTx.attribute.value.originTxHash", Value: bson.M{"$eq": q}}},
bson.D{{Key: "destinationTx.txHash", Value: bson.M{"$eq": qHexa}}},
bson.D{{Key: "destinationTx.txHash", Value: bson.M{"$eq": q}}},
}}}}}
matchParsedVaa := bson.D{{Key: "$match", Value: bson.D{{Key: "$or", Value: bson.A{
bson.D{{Key: "standardizedProperties.toAddress", Value: bson.M{"$eq": qHexa}}},
bson.D{{Key: "standardizedProperties.toAddress", Value: bson.M{"$eq": q}}},
}}}}}
globalTransactionFilter := bson.D{{Key: "$unionWith", Value: bson.D{{Key: "coll", Value: "globalTransactions"}, {Key: "pipeline", Value: bson.A{matchGlobalTransactions}}}}}
parserFilter := bson.D{{Key: "$unionWith", Value: bson.D{{Key: "coll", Value: "parsedVaa"}, {Key: "pipeline", Value: bson.A{matchParsedVaa}}}}}
group := bson.D{{Key: "$group", Value: bson.D{{Key: "_id", Value: "$_id"}}}}
pipeline := []bson.D{globalTransactionFilter, parserFilter, group}
cur, err := db.Collection("_operationsTemporal").Aggregate(ctx, pipeline)
if err != nil {
return nil, err
}
var documents []mongoID
err = cur.All(ctx, &documents)
if err != nil {
return nil, err
}
var ids []string
for _, doc := range documents {
ids = append(ids, doc.Id)
}
return ids, nil
}
// QueryFilterIsVaaID checks if q is a vaaID.
func QueryFilterIsVaaID(ctx context.Context, q string) []string {
// check if q is a vaaID
isVaaID := regexp.MustCompile(`\d+/\w+/\d+`).MatchString(q)
if isVaaID {
return []string{q}
}
return []string{}
}
// FindAll returns all operations filtered by q.
func (r *Repository) FindAll(ctx context.Context, q string, pagination *pagination.Pagination) ([]*OperationDto, error) {
var pipeline mongo.Pipeline
// get all ids by that match q
if q != "" {
var ids []string
// find all ids that match q (vaaID)
ids = QueryFilterIsVaaID(ctx, q)
if len(ids) == 0 {
// find all ids that match q (address or txHash)
var err error
ids, err = findOperationsIdByAddressOrTxHash(ctx, r.db, q, pagination)
if err != nil {
return nil, err
}
if len(ids) == 0 {
return []*OperationDto{}, nil
}
}
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.D{{Key: "_id", Value: bson.D{{Key: "$in", Value: ids}}}}}})
}
// sort
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: bson.D{bson.E{Key: "originTx.timestamp", Value: pagination.GetSortInt()}}}})
// Skip initial results
pipeline = append(pipeline, bson.D{{Key: "$skip", Value: pagination.Skip}})
// Limit size of results
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: pagination.Limit}})
// lookup vaas
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "vaas"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "vaas"}}}})
// lookup globalTransactions
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "globalTransactions"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "globalTransactions"}}}})
// lookup transferPrices
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "transferPrices"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "transferPrices"}}}})
// lookup parsedVaa
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "parsedVaa"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "parsedVaa"}}}})
// add fields
pipeline = append(pipeline, bson.D{{Key: "$addFields", Value: bson.D{
{Key: "payload", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$parsedVaa.parsedPayload", 0}}}},
{Key: "vaa", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$vaas", 0}}}},
{Key: "standardizedProperties", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$parsedVaa.standardizedProperties", 0}}}},
{Key: "symbol", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.symbol", 0}}}},
{Key: "usdAmount", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.usdAmount", 0}}}},
{Key: "tokenAmount", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.tokenAmount", 0}}}},
}}})
// unset
pipeline = append(pipeline, bson.D{{Key: "$unset", Value: bson.A{"transferPrices", "parsedVaa"}}})
// Execute the aggregation pipeline
cur, err := r.collections.globalTransactions.Aggregate(ctx, pipeline)
if err != nil {
r.logger.Error("failed execute aggregation pipeline", zap.Error(err))
return nil, err
}
// Read results from cursor
var operations []*OperationDto
err = cur.All(ctx, &operations)
if err != nil {
r.logger.Error("failed to decode cursor", zap.Error(err))
return nil, err
}
return operations, nil
}

View File

@ -0,0 +1,41 @@
package operations
import (
"context"
"fmt"
"github.com/wormhole-foundation/wormhole-explorer/api/internal/pagination"
"github.com/wormhole-foundation/wormhole-explorer/api/types"
"github.com/wormhole-foundation/wormhole/sdk/vaa"
"go.uber.org/zap"
)
type Service struct {
repo *Repository
logger *zap.Logger
}
// NewService create a new Service.
func NewService(repo *Repository, logger *zap.Logger) *Service {
return &Service{repo: repo, logger: logger.With(zap.String("module", "OperationService"))}
}
// FindById returns the operations for the given chainID/emitter/seq.
func (s *Service) FindById(ctx context.Context, chainID vaa.ChainID,
emitter *types.Address, seq string) (*OperationDto, error) {
id := fmt.Sprintf("%d/%s/%s", chainID, emitter.Hex(), seq)
operation, err := s.repo.FindById(ctx, id)
if err != nil {
return nil, err
}
return operation, nil
}
// FindAll returns all operations filtered by q.
func (s *Service) FindAll(ctx context.Context, q string, pagination *pagination.Pagination) ([]*OperationDto, error) {
operations, err := s.repo.FindAll(ctx, q, pagination)
if err != nil {
return nil, err
}
return operations, nil
}

View File

@ -29,6 +29,7 @@ import (
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/heartbeats"
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/infrastructure"
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/observations"
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/operations"
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/relays"
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/transactions"
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/vaa"
@ -151,6 +152,7 @@ func main() {
rootLogger,
)
relaysRepo := relays.NewRepository(db.Database, rootLogger)
operationsRepo := operations.NewRepository(db.Database, rootLogger)
// Set up services
rootLogger.Info("initializing services")
@ -162,6 +164,7 @@ func main() {
heartbeatsService := heartbeats.NewService(heartbeatsRepo, rootLogger)
transactionsService := transactions.NewService(transactionsRepo, cache, time.Duration(cfg.Cache.MetricExpiration)*time.Second, rootLogger)
relaysService := relays.NewService(relaysRepo, rootLogger)
operationsService := operations.NewService(operationsRepo, rootLogger)
// Set up a custom error handler
response.SetEnableStackTrace(*cfg)
@ -203,7 +206,7 @@ func main() {
// Set up route handlers
app.Get("/swagger.json", GetSwagger)
wormscan.RegisterRoutes(app, rootLogger, addressService, vaaService, obsService, governorService, infrastructureService, transactionsService, relaysService)
wormscan.RegisterRoutes(app, rootLogger, addressService, vaaService, obsService, governorService, infrastructureService, transactionsService, relaysService, operationsService)
guardian.RegisterRoutes(cfg, app, rootLogger, vaaService, governorService, heartbeatsService)
// Set up gRPC handlers

View File

@ -212,6 +212,11 @@ func ExtractAddressFromPath(c *fiber.Ctx, l *zap.Logger) string {
return c.Params("id")
}
// ExtractQueryParam parses the `q` parameter from query params.
func ExtractQueryParam(c *fiber.Ctx, l *zap.Logger) string {
return c.Query("q")
}
// GetTxHash parses the `txHash` parameter from query params.
func GetTxHash(c *fiber.Ctx, l *zap.Logger) (*types.TxHash, error) {

View File

@ -0,0 +1,88 @@
package operations
import (
"strconv"
"github.com/gofiber/fiber/v2"
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/operations"
"github.com/wormhole-foundation/wormhole-explorer/api/middleware"
"go.uber.org/zap"
)
// Controller is the controller for the operation resource.
type Controller struct {
srv *operations.Service
logger *zap.Logger
}
// NewController create a new controler.
func NewController(operationService *operations.Service, logger *zap.Logger) *Controller {
return &Controller{
srv: operationService,
logger: logger.With(zap.String("module", "OperationsController")),
}
}
// FindAll godoc
// @Description Find all operations.
// @Tags wormholescan
// @ID get-operations
// @Param q query string false "search query"
// @Param page query integer false "page number"
// @Param size query integer false "page size"
// @Success 200 {object} []OperationResponse
// @Failure 400
// @Failure 500
// @Router /api/v1/operations [get]
func (c *Controller) FindAll(ctx *fiber.Ctx) error {
// Extract query parameters
pagination, err := middleware.ExtractPagination(ctx)
if err != nil {
return err
}
// Extract q search query parameter
q := middleware.ExtractQueryParam(ctx, c.logger)
// Find operations by q search param.
operations, err := c.srv.FindAll(ctx.Context(), q, pagination)
if err != nil {
return err
}
// build response
response := toListOperationResponse(operations, q, c.logger)
return ctx.JSON(response)
}
// FindById godoc
// @Description Find operations by ID (chainID/emitter/sequence).
// @Tags wormholescan
// @ID get-operation-by-id
// @Param chain_id path integer true "id of the blockchain"
// @Param emitter path string true "address of the emitter"
// @Param seq path integer true "sequence of the VAA"
// @Success 200 {object} OperationResponse
// @Failure 400
// @Failure 500
// @Router /api/v1/operations/{chain_id}/{emitter}/{seq} [get]
func (c *Controller) FindById(ctx *fiber.Ctx) error {
// Extract query params
chainID, emitter, seq, err := middleware.ExtractVAAParams(ctx, c.logger)
if err != nil {
return err
}
// Find operations by chainID, emitter and sequence.
operation, err := c.srv.FindById(ctx.Context(), chainID, emitter, strconv.FormatUint(seq, 10))
if err != nil {
return err
}
// build response
response, err := toOperationResponse(operation, c.logger)
if err != nil {
return err
}
return ctx.JSON(response)
}

View File

@ -0,0 +1,298 @@
package operations
import (
"strconv"
"strings"
"time"
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/operations"
"github.com/wormhole-foundation/wormhole-explorer/api/internal/errors"
"github.com/wormhole-foundation/wormhole-explorer/common/domain"
"github.com/wormhole-foundation/wormhole-explorer/common/utils"
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
"go.uber.org/zap"
)
// OperationResponse definition.
type OperationResponse struct {
ID string `json:"id"`
EmitterChain sdk.ChainID `json:"emitterChain"`
EmitterAddress EmitterAddress `json:"emitterAddress"`
Sequence string `json:"sequence"`
Vaa []byte `json:"vaa,omitempty"`
Content *Content `json:"content,omitempty"`
SourceChain *SourceChain `json:"sourceChain,omitempty"`
TargetChain *TargetChain `json:"targetChain,omitempty"`
Data map[string]any `json:"data,omitempty"`
}
// EmitterAddress definition.
type EmitterAddress struct {
Hex string `json:"hex,omitempty"`
Native string `json:"native,omitempty"`
}
// Content definition.
type Content struct {
Payload map[string]any `json:"payload,omitempty"`
StandardizedProperties *operations.StandardizedProperties `json:"standardizedProperties,omitempty"`
}
// SourceChain definition.
type SourceChain struct {
ChainId sdk.ChainID `json:"chainId"`
Timestamp *time.Time `json:"timestamp"`
Transaction Transaction `json:"transaction"`
From string `json:"from"`
Status string `json:"status"`
Data *Data `json:"attribute,omitempty"`
}
// TxHash definition.
type Transaction struct {
TxHash string `json:"txHash"`
SecondTxHash *string `json:"secondTxHash,omitempty"`
}
// TargetChain definition.
type TargetChain struct {
ChainId sdk.ChainID `json:"chainId"`
Timestamp *time.Time `json:"timestamp"`
Transaction Transaction `json:"transaction"`
Status string `json:"status"`
From string `json:"from"`
To string `json:"to"`
}
// Data represents a custom attribute for a origin transaction.
type Data struct {
Type string `bson:"type" json:"type"`
Value map[string]any `bson:"value" json:"value"`
}
type ListOperationResponse struct {
Operations []*OperationResponse `json:"operations"`
Match string `json:"matched"`
}
// toOperationResponse converts an operations.OperationDto to an OperationResponse.
func toOperationResponse(operation *operations.OperationDto, log *zap.Logger) (*OperationResponse, error) {
// Get emitter chain, address and sequence from operation.
chainID, address, sequence, err := getChainEmitterSequence(operation)
if err != nil {
log.Error("Error parsing chainId, address, sequence from operation ID",
zap.Error(err),
zap.String("operationID", operation.ID))
return nil, err
}
// Get emitter native address from chainID and address.
emitterNativeAddress, err := domain.TranslateEmitterAddress(chainID, address)
if err != nil {
log.Warn("failed to translate emitter address",
zap.Stringer("chain", chainID),
zap.String("address", address),
zap.Error(err),
)
return nil, err
}
// Get rawVAA from operation.
var rawVAA []byte
if operation.Vaa != nil {
rawVAA = operation.Vaa.Vaa
}
// Get content from operation.
var content Content
if len(operation.Payload) > 0 || operation.StandardizedProperties != nil {
content = Content{
Payload: operation.Payload,
StandardizedProperties: operation.StandardizedProperties,
}
}
// Get sourceChain and targetChain events
sourceChain, targetChain := getChainEvents(chainID, operation)
r := OperationResponse{
ID: operation.ID,
EmitterChain: chainID,
EmitterAddress: EmitterAddress{
Hex: address,
Native: emitterNativeAddress,
},
Sequence: sequence,
Vaa: rawVAA,
Content: &content,
Data: getAdditionalData(operation),
SourceChain: sourceChain,
TargetChain: targetChain,
}
return &r, nil
}
// getChainEmitterSequence returns the chainID, address, sequence for the given operation.
func getChainEmitterSequence(operation *operations.OperationDto) (sdk.ChainID, string, string, error) {
if operation.Vaa != nil {
return operation.Vaa.EmitterChain, operation.Vaa.EmitterAddr, operation.Vaa.Sequence, nil
} else {
// Get emitter chain, address, sequence by operation ID.
id := strings.Split(operation.ID, "/")
if len(id) != 3 {
return 0, "", "", errors.ErrInternalError
}
chainID, err := strconv.ParseUint(id[0], 10, 16)
if err != nil {
return 0, "", "", err
}
return sdk.ChainID(chainID), id[1], id[2], nil
}
}
func getAdditionalData(operation *operations.OperationDto) map[string]interface{} {
ok := operation.Symbol == "" && operation.TokenAmount == "" && operation.UsdAmount == ""
if ok {
return nil
}
return map[string]interface{}{
"symbol": operation.Symbol,
"tokenAmount": operation.TokenAmount,
"usdAmount": operation.UsdAmount,
}
}
// getChainEvents returns the sourceChain and targetChain events for the given operation.
func getChainEvents(chainID sdk.ChainID, operation *operations.OperationDto) (*SourceChain, *TargetChain) {
if operation.SourceTx == nil && operation.DestinationTx == nil {
return nil, nil
}
// if len(operation.GlobalTransations) == 0 {
// return nil, nil
// }
// build sourceChain
var sourceChain *SourceChain
if operation.SourceTx != nil {
var data *Data
if operation.SourceTx.Attribute != nil {
data = &Data{
Type: operation.SourceTx.Attribute.Type,
Value: operation.SourceTx.Attribute.Value,
}
}
// transactions
var secondTxHash *string
if data != nil {
attributeTxHash, ok := data.Value["originTxHash"]
if ok {
txHash, ok := attributeTxHash.(string)
if ok {
secondTxHash = &txHash
}
}
}
transaction := Transaction{
TxHash: operation.SourceTx.TxHash,
SecondTxHash: secondTxHash,
}
sourceChain = &SourceChain{
ChainId: chainID,
Timestamp: operation.SourceTx.Timestamp,
Transaction: transaction,
From: operation.SourceTx.From,
Status: operation.SourceTx.Status,
Data: data,
}
}
// build targetChain
var targetChain *TargetChain
if operation.DestinationTx != nil {
targetChain = &TargetChain{
ChainId: operation.DestinationTx.ChainID,
Timestamp: operation.DestinationTx.Timestamp,
Transaction: Transaction{
TxHash: operation.DestinationTx.TxHash,
},
Status: operation.DestinationTx.Status,
From: operation.DestinationTx.From,
To: operation.DestinationTx.To,
}
}
return sourceChain, targetChain
}
func toListOperationResponse(operations []*operations.OperationDto, q string, log *zap.Logger) ListOperationResponse {
response := ListOperationResponse{
Operations: make([]*OperationResponse, 0, len(operations)),
}
for i := range operations {
r, err := toOperationResponse(operations[i], log)
if err == nil {
response.Operations = append(response.Operations, r)
}
}
response.Match = buildMatchedField(response, q)
return response
}
func buildMatchedField(operations ListOperationResponse, q string) string {
if q == "" {
return ""
}
if len(operations.Operations) == 0 {
return ""
}
operation := operations.Operations[0]
if operation.ID == q {
return "vaaId"
}
// format q to match values
qHexa := strings.ToLower(q)
if !utils.StartsWith0x(q) {
qHexa = "0x" + strings.ToLower(qHexa)
}
// matched by sourceChain txHash
if operation.SourceChain != nil {
if operation.SourceChain.Transaction.TxHash == q || operation.SourceChain.Transaction.TxHash == qHexa {
return "txHash"
}
if operation.SourceChain.Data != nil {
if operation.SourceChain.Data.Value["OriginTxHash"] == q || operation.SourceChain.Data.Value["originTxHash"] == qHexa {
return "txHash"
}
}
}
// matched by targetChain txHash
if operation.TargetChain != nil {
if operation.TargetChain.Transaction.TxHash == q || operation.TargetChain.Transaction.TxHash == qHexa {
return "txHash"
}
}
// matched by sourceChain from address
if operation.SourceChain != nil {
if operation.SourceChain.From == q || operation.SourceChain.From == qHexa {
return "address"
}
}
// matched by standardizedProperties to address
if operation.Content.StandardizedProperties.ToAddress == q || operation.Content.StandardizedProperties.ToAddress == qHexa {
return "address"
}
return ""
}

View File

@ -10,6 +10,7 @@ import (
govsvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/governor"
infrasvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/infrastructure"
obssvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/observations"
opsvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/operations"
relayssvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/relays"
trxsvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/transactions"
vaasvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/vaa"
@ -17,6 +18,7 @@ import (
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/governor"
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/infrastructure"
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/observations"
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/operations"
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/relays"
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/transactions"
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/vaa"
@ -44,6 +46,7 @@ func RegisterRoutes(
infrastructureService *infrasvc.Service,
transactionsService *trxsvc.Service,
relaysService *relayssvc.Service,
operationsService *opsvc.Service,
) {
// Set up controllers
@ -54,6 +57,7 @@ func RegisterRoutes(
infrastructureCtrl := infrastructure.NewController(infrastructureService)
transactionCtrl := transactions.NewController(transactionsService, rootLogger)
relaysCtrl := relays.NewController(relaysService, rootLogger)
opsCtrl := operations.NewController(operationsService, rootLogger)
// Set up route handlers
api := app.Group("/api/v1")
@ -78,6 +82,11 @@ func RegisterRoutes(
api.Get("/transactions", transactionCtrl.ListTransactions)
api.Get("/transactions/:chain/:emitter/:sequence", transactionCtrl.GetTransactionByID)
// operations resource
operations := api.Group("/operations")
operations.Get("/", opsCtrl.FindAll)
operations.Get("/:chain/:emitter/:sequence", opsCtrl.FindById)
// vaas resource
vaas := api.Group("/vaas")
vaas.Use(cache.New(cacheConfig))

View File

@ -26,13 +26,13 @@ func TestNotionalCache_renderRegexp(t *testing.T) {
}
key := nc.renderRegExp()
assert.Equal(t, "*staging-mainnet:WORMSCAN:NOTIONAL:SYMBOL:*", key)
assert.Equal(t, "*staging-mainnet:WORMSCAN:NOTIONAL:TOKEN:*", key)
nc = &NotionalCache{
client: nil,
prefix: "",
}
key = nc.renderRegExp()
assert.Equal(t, "*WORMSCAN:NOTIONAL:SYMBOL:*", key)
assert.Equal(t, "*WORMSCAN:NOTIONAL:TOKEN:*", key)
}

View File

@ -1,63 +0,0 @@
package domain
import (
"encoding/json"
"time"
)
const (
SignedVaaType = "signed-vaa"
PublishedLogMessageType = "published-log-message"
)
type NotificationEvent struct {
TrackID string `json:"trackId"`
Source string `json:"source"`
Type string `json:"type"`
Payload json.RawMessage `json:"payload"`
}
func NewNotificationEvent[T EventPayload](trackID, source, _type string, payload T) (*NotificationEvent, error) {
p, err := json.Marshal(payload)
if err != nil {
return nil, err
}
return &NotificationEvent{
TrackID: trackID,
Source: source,
Type: _type,
Payload: json.RawMessage(p),
}, nil
}
type EventPayload interface {
SignedVaa | PublishedLogMessage
}
func GetEventPayload[T EventPayload](e *NotificationEvent) (T, error) {
var payload T
err := json.Unmarshal(e.Payload, &payload)
return payload, err
}
type SignedVaa struct {
ID string `json:"id"`
EmitterChain uint16 `json:"emitterChain"`
EmitterAddr string `json:"emitterAddr"`
Sequence uint64 `json:"sequence"`
GuardianSetIndex uint32 `json:"guardianSetIndex"`
Timestamp time.Time `json:"timestamp"`
Vaa []byte `json:"vaa"`
TxHash string `json:"txHash"`
Version int `json:"version"`
}
type PublishedLogMessage struct {
ID string `json:"id"`
EmitterChain uint16 `json:"emitterChain"`
EmitterAddr string `json:"emitterAddr"`
Sequence uint64 `json:"sequence"`
Timestamp time.Time `json:"timestamp"`
Vaa []byte `json:"vaa"`
TxHash string `json:"txHash"`
}

74
common/events/types.go Normal file
View File

@ -0,0 +1,74 @@
package events
import (
"encoding/json"
"time"
)
const (
SignedVaaType = "signed-vaa"
LogMessagePublishedMesageType = "log-message-published"
)
type NotificationEvent struct {
TrackID string `json:"trackId"`
Source string `json:"source"`
Event string `json:"event"`
Version string `json:"version"`
Timestamp time.Time `json:"timestamp"`
Data json.RawMessage `json:"data"`
}
func NewNotificationEvent[T EventData](trackID, source, _type string, data T) (*NotificationEvent, error) {
p, err := json.Marshal(data)
if err != nil {
return nil, err
}
return &NotificationEvent{
TrackID: trackID,
Source: source,
Event: _type,
Data: json.RawMessage(p),
Version: "1",
Timestamp: time.Now(),
}, nil
}
type EventData interface {
SignedVaa | LogMessagePublished
}
func GetEventData[T EventData](e *NotificationEvent) (T, error) {
var data T
err := json.Unmarshal(e.Data, &data)
return data, err
}
type SignedVaa struct {
ID string `json:"id"`
EmitterChain uint16 `json:"emitterChain"`
EmitterAddress string `json:"emitterAddress"`
Sequence uint64 `json:"sequence"`
GuardianSetIndex uint32 `json:"guardianSetIndex"`
Timestamp time.Time `json:"timestamp"`
Vaa []byte `json:"vaa"`
TxHash string `json:"txHash"`
Version int `json:"version"`
}
type LogMessagePublished struct {
ChainID uint16 `json:"chainId"`
Emitter string `json:"emitter"`
TxHash string `json:"txHash"`
BlockHeight string `json:"blockHeight"`
BlockTime time.Time `json:"blockTime"`
Attributes PublishedLogMessageAttributes `json:"attributes"`
}
type PublishedLogMessageAttributes struct {
Sender string `json:"sender"`
Sequence uint64 `json:"sequence"`
Nonce uint32 `json:"nonce"`
Payload string `json:"payload"`
ConsistencyLevel uint8 `json:"consistencyLevel"`
}

View File

@ -1,4 +1,4 @@
package domain
package events
import (
"encoding/json"
@ -13,8 +13,8 @@ func Test_GetEventPayload(t *testing.T) {
body := `{
"trackId": "63e16082da939a263512a307",
"source": "fly",
"type": "signed-vaa",
"payload": {
"event": "signed-vaa",
"data": {
"id": "2/000000000000000000000000f890982f9310df57d00f659cf4fd87e65aded8d7/162727",
"emitterChain": 2,
"emitterAddr": "000000000000000000000000f890982f9310df57d00f659cf4fd87e65aded8d7",
@ -32,8 +32,8 @@ func Test_GetEventPayload(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, "63e16082da939a263512a307", event.TrackID)
assert.Equal(t, "fly", event.Source)
assert.Equal(t, SignedVaaType, event.Type)
signedVaa, err := GetEventPayload[SignedVaa](&event)
assert.Equal(t, SignedVaaType, event.Event)
signedVaa, err := GetEventData[SignedVaa](&event)
assert.NoError(t, err)
assert.Equal(t, "2/000000000000000000000000f890982f9310df57d00f659cf4fd87e65aded8d7/162727", signedVaa.ID)
}
@ -43,7 +43,7 @@ func Test_GetEventPayload_Error(t *testing.T) {
body := `{
"trackId": "63e16082da939a263512a307",
"source": "fly",
"type": "signed-vaa"
"event": "signed-vaa"
}`
event := NotificationEvent{}
@ -51,7 +51,7 @@ func Test_GetEventPayload_Error(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, "63e16082da939a263512a307", event.TrackID)
assert.Equal(t, "fly", event.Source)
assert.Equal(t, SignedVaaType, event.Type)
_, err = GetEventPayload[SignedVaa](&event)
assert.Equal(t, SignedVaaType, event.Event)
_, err = GetEventData[SignedVaa](&event)
assert.Error(t, err)
}

35
common/events/vaa.go Normal file
View File

@ -0,0 +1,35 @@
package events
import (
"encoding/hex"
"fmt"
"strings"
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
)
func CreateUnsignedVAA(plm *LogMessagePublished) (*sdk.VAA, error) {
address, err := sdk.StringToAddress(plm.Attributes.Sender)
if err != nil {
return nil, fmt.Errorf("error converting emitter address: %w", err)
}
payload, err := hex.DecodeString(strings.TrimPrefix(plm.Attributes.Payload, "0x"))
if err != nil {
return nil, fmt.Errorf("error converting payload: %w", err)
}
vaa := sdk.VAA{
Version: sdk.SupportedVAAVersion,
GuardianSetIndex: 1,
EmitterChain: sdk.ChainID(plm.ChainID),
EmitterAddress: address,
Sequence: plm.Attributes.Sequence,
Timestamp: plm.BlockTime,
Payload: payload,
Nonce: plm.Attributes.Nonce,
ConsistencyLevel: plm.Attributes.ConsistencyLevel,
}
return &vaa, nil
}

View File

@ -1,21 +1,20 @@
package infrastructure
package health
import (
"fmt"
"github.com/gofiber/fiber/v2"
"github.com/wormhole-foundation/wormhole-explorer/common/health"
"go.uber.org/zap"
)
// Controller definition.
type Controller struct {
checks []health.Check
checks []Check
logger *zap.Logger
}
// NewController creates a Controller instance.
func NewController(checks []health.Check, logger *zap.Logger) *Controller {
func NewController(checks []Check, logger *zap.Logger) *Controller {
return &Controller{checks: checks, logger: logger}
}

View File

@ -45,10 +45,12 @@ require (
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect
github.com/deepmap/oapi-codegen v1.12.4 // indirect
github.com/dfuse-io/logging v0.0.0-20201110202154-26697de88c79 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/ethereum/go-ethereum v1.11.3 // indirect
github.com/fatih/color v1.9.0 // indirect
github.com/gagliardetto/binary v0.7.7 // indirect
github.com/gagliardetto/treeout v0.1.4 // indirect
github.com/go-redis/redis/v8 v8.11.5 // indirect
github.com/gofiber/adaptor/v2 v2.1.31 // indirect
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect
github.com/golang/protobuf v1.5.2 // indirect

View File

@ -133,6 +133,8 @@ github.com/deepmap/oapi-codegen v1.12.4/go.mod h1:3lgHGMu6myQ2vqbbTXH2H1o4eXFTGn
github.com/dfuse-io/logging v0.0.0-20201110202154-26697de88c79 h1:+HRtcJejUYA/2rnyTMbOaZ4g7f4aVuFduTV/03dbpLY=
github.com/dfuse-io/logging v0.0.0-20201110202154-26697de88c79/go.mod h1:V+ED4kT/t/lKtH99JQmKIb0v9WL3VaYkJ36CfHlVECI=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@ -144,6 +146,7 @@ github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5Kwzbycv
github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
github.com/gagliardetto/binary v0.7.7 h1:QZpT38+sgoPg+TIQjH94sLbl/vX+nlIRA37pEyOsjfY=
github.com/gagliardetto/binary v0.7.7/go.mod h1:mUuay5LL8wFVnIlecHakSZMvcdqfs+CsotR5n77kyjM=
github.com/gagliardetto/gofuzz v1.2.2/go.mod h1:bkH/3hYLZrMLbfYWA0pWzXmi5TTRZnu4pMGZBkqMKvY=
@ -163,6 +166,8 @@ github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI=
github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo=
github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY=
github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
@ -364,12 +369,14 @@ github.com/near/borsh-go v0.3.1 h1:ukNbhJlPKxfua0/nIuMZhggSU8zvtRP/VyC25LLqPUA=
github.com/near/borsh-go v0.3.1/go.mod h1:NeMochZp7jN/pYFuxLkrZtmLqbADmnp/y1+/dL+AsyQ=
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE=
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
github.com/opsgenie/opsgenie-go-sdk-v2 v1.2.19 h1:JernwK3Bgd5x+UJPV6S2LPYoBF+DFOYBoQ5JeJPVBNc=
github.com/opsgenie/opsgenie-go-sdk-v2 v1.2.19/go.mod h1:4OjcxgwdXzezqytxN534MooNmrxRD50geWZxTD7845s=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
@ -807,7 +814,6 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df h1:5Pf6pFKu98ODmgnpvkJ3kFUOQGGLIzLIkbzUHp47618=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
@ -899,6 +905,7 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

View File

@ -198,7 +198,7 @@ func TestCheckTxShouldBeUpdated(t *testing.T) {
inputGetGlobalTransactionByIDFunc: func(ctx context.Context, id string) (storage.TransactionUpdate, error) {
return storage.TransactionUpdate{}, storage.ErrDocNotFound
},
expectedUpdate: true,
expectedUpdate: false,
expectedError: ErrInvalidTxStatus,
},
}
@ -208,7 +208,7 @@ func TestCheckTxShouldBeUpdated(t *testing.T) {
t.Run(tc.name, func(t *testing.T) {
update, err := checkTxShouldBeUpdated(context.Background(), tc.inputTx, tc.inputGetGlobalTransactionByIDFunc)
if update != tc.expectedUpdate {
t.Errorf("expected update %v, got %v", tc.expectedUpdate, update)
t.Errorf("%s - expected update %v, got %v", tc.name, tc.expectedUpdate, update)
}
if err != tc.expectedError {
t.Errorf("expected error %v, got %v", tc.expectedError, err)

View File

@ -46,10 +46,21 @@ spec:
value: "8000"
- name: LOG_LEVEL
value: "INFO"
- name: SQS_URL
value: {{ .SQS_URL }}
- name: PIPELINE_SQS_URL
valueFrom:
configMapKeyRef:
name: analytics
key: pipeline-sqs-url
- name: NOTIFICATIONS_SQS_URL
valueFrom:
configMapKeyRef:
name: analytics
key: notifications-sqs-url
- name: AWS_REGION
value: {{ .SQS_AWS_REGION }}
valueFrom:
configMapKeyRef:
name: analytics
key: aws-region
- name: PPROF_ENABLED
value: "{{ .PPROF_ENABLED }}"
- name: P2P_NETWORK

View File

@ -0,0 +1,10 @@
---
kind: ConfigMap
apiVersion: v1
metadata:
name: analytics
namespace: {{ .NAMESPACE }}
data:
aws-region: {{ .SQS_AWS_REGION }}
pipeline-sqs-url: {{ .PIPELINE_SQS_URL }}
notifications-sqs-url: {{ .NOTIFICATIONS_SQS_URL }}

View File

@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=64Mi
RESOURCES_LIMITS_CPU=200m
RESOURCES_REQUESTS_MEMORY=32Mi
RESOURCES_REQUESTS_CPU=100m
SQS_URL=
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
P2P_NETWORK=mainnet
PPROF_ENABLED=false

View File

@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=128Mi
RESOURCES_LIMITS_CPU=200m
RESOURCES_REQUESTS_MEMORY=64Mi
RESOURCES_REQUESTS_CPU=100m
SQS_URL=
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
P2P_NETWORK=testnet
PPROF_ENABLED=false

View File

@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=64Mi
RESOURCES_LIMITS_CPU=200m
RESOURCES_REQUESTS_MEMORY=32Mi
RESOURCES_REQUESTS_CPU=100m
SQS_URL=
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
P2P_NETWORK=mainnet
PPROF_ENABLED=true

View File

@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=128Mi
RESOURCES_LIMITS_CPU=200m
RESOURCES_REQUESTS_MEMORY=64Mi
RESOURCES_REQUESTS_CPU=100m
SQS_URL=
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
P2P_NETWORK=testnet
PPROF_ENABLED=false

View File

@ -0,0 +1,10 @@
---
kind: ConfigMap
apiVersion: v1
metadata:
name: parser
namespace: {{ .NAMESPACE }}
data:
aws-region: {{ .SQS_AWS_REGION }}
pipeline-sqs-url: {{ .PIPELINE_SQS_URL }}
notifications-sqs-url: {{ .NOTIFICATIONS_SQS_URL }}

View File

@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=256Mi
RESOURCES_LIMITS_CPU=500m
RESOURCES_REQUESTS_MEMORY=128Mi
RESOURCES_REQUESTS_CPU=250m
SQS_URL=
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan
VAA_PAYLOAD_PARSER_TIMEOUT=10

View File

@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=30Mi
RESOURCES_LIMITS_CPU=20m
RESOURCES_REQUESTS_MEMORY=15Mi
RESOURCES_REQUESTS_CPU=10m
SQS_URL=
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan-testnet
VAA_PAYLOAD_PARSER_TIMEOUT=10

View File

@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=256Mi
RESOURCES_LIMITS_CPU=500m
RESOURCES_REQUESTS_MEMORY=128Mi
RESOURCES_REQUESTS_CPU=250m
SQS_URL=
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan
VAA_PAYLOAD_PARSER_TIMEOUT=10

View File

@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=30Mi
RESOURCES_LIMITS_CPU=20m
RESOURCES_REQUESTS_MEMORY=15Mi
RESOURCES_REQUESTS_CPU=10m
SQS_URL=
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan-testnet
VAA_PAYLOAD_PARSER_TIMEOUT=10

View File

@ -56,10 +56,21 @@ spec:
configMapKeyRef:
name: config
key: mongo-database
- name: SQS_URL
value: {{ .SQS_URL }}
- name: PIPELINE_SQS_URL
valueFrom:
configMapKeyRef:
name: parser
key: pipeline-sqs-url
- name: NOTIFICATIONS_SQS_URL
valueFrom:
configMapKeyRef:
name: parser
key: notifications-sqs-url
- name: AWS_REGION
value: {{ .SQS_AWS_REGION }}
valueFrom:
configMapKeyRef:
name: parser
key: aws-region
- name: VAA_PAYLOAD_PARSER_URL
value: {{ .VAA_PAYLOAD_PARSER_URL }}
- name: VAA_PAYLOAD_PARSER_TIMEOUT

View File

@ -6,4 +6,5 @@ metadata:
namespace: {{ .NAMESPACE }}
data:
aws-region: {{ .SQS_AWS_REGION }}
pipeline-sqs-url: {{ .PIPELINE_SQS_URL }}
pipeline-sqs-url: {{ .PIPELINE_SQS_URL }}
notifications-sqs-url: {{ .NOTIFICATIONS_SQS_URL }}

View File

@ -8,6 +8,7 @@ RESOURCES_LIMITS_CPU=500m
RESOURCES_REQUESTS_MEMORY=128Mi
RESOURCES_REQUESTS_CPU=250m
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
P2P_NETWORK=mainnet
AWS_IAM_ROLE=

View File

@ -8,6 +8,7 @@ RESOURCES_LIMITS_CPU=20m
RESOURCES_REQUESTS_MEMORY=15Mi
RESOURCES_REQUESTS_CPU=10m
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
P2P_NETWORK=testnet
AWS_IAM_ROLE=

View File

@ -8,6 +8,7 @@ RESOURCES_LIMITS_CPU=60m
RESOURCES_REQUESTS_MEMORY=15Mi
RESOURCES_REQUESTS_CPU=40m
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
P2P_NETWORK=mainnet
AWS_IAM_ROLE=

View File

@ -8,6 +8,7 @@ RESOURCES_LIMITS_CPU=20m
RESOURCES_REQUESTS_MEMORY=15Mi
RESOURCES_REQUESTS_CPU=10m
PIPELINE_SQS_URL=
NOTIFICATIONS_SQS_URL=
SQS_AWS_REGION=
P2P_NETWORK=testnet
AWS_IAM_ROLE=

View File

@ -61,6 +61,11 @@ spec:
configMapKeyRef:
name: tx-tracker
key: pipeline-sqs-url
- name: NOTIFICATIONS_SQS_URL
valueFrom:
configMapKeyRef:
name: tx-tracker
key: notifications-sqs-url
- name: AWS_REGION
valueFrom:
configMapKeyRef:

View File

@ -69,7 +69,7 @@ func newSNSProducer(ctx context.Context, cfg WorkerConfiguration, alertClient al
// newVAATopicProducerFunc creates a new VAA topic producer function from the given configuration.
func newVAATopicProducerFunc(ctx context.Context, cfg WorkerConfiguration, alertClient alert.AlertClient, metricsClient metrics.Metrics, logger *zap.Logger) (producer.PushFunc, error) {
if !cfg.NotifyEnabled {
return func(context.Context, *producer.NotificationEvent) error {
return func(context.Context, *producer.Notification) error {
return nil
}, nil
}

View File

@ -216,7 +216,7 @@ func newMetrics(enviroment string) metrics.Metrics {
// Creates a callback to publish VAA messages to a redis pubsub
func newVAARedisProducerFunc(ctx context.Context, isLocal bool, logger *zap.Logger) (producer.PushFunc, error) {
if isLocal {
return func(context.Context, *producer.NotificationEvent) error {
return func(context.Context, *producer.Notification) error {
return nil
}, nil
}

View File

@ -1,40 +0,0 @@
package producer
import (
"context"
"time"
)
// PushFunc is a function to push VAAEvent.
type PushFunc func(context.Context, *NotificationEvent) error
type NotificationEvent struct {
TrackID string `json:"trackId"`
Source string `json:"source"`
Type string `json:"type"`
Payload SignedVaa `json:"payload"`
}
type SignedVaa struct {
ID string `json:"id"`
EmitterChain uint16 `json:"emitterChain"`
EmitterAddr string `json:"emitterAddr"`
Sequence uint64 `json:"sequence"`
GuardianSetIndex uint32 `json:"guardianSetIndex"`
Timestamp time.Time `json:"timestamp"`
Vaa []byte `json:"vaa"`
TxHash string `json:"txHash"`
Version int `json:"version"`
}
// NewComposite returns a PushFunc that calls all the given producers.
func NewComposite(producers ...PushFunc) PushFunc {
return func(ctx context.Context, event *NotificationEvent) error {
for _, producer := range producers {
if err := producer(ctx, event); err != nil {
return err
}
}
return nil
}
}

29
fly/producer/types.go Normal file
View File

@ -0,0 +1,29 @@
package producer
import (
"context"
"github.com/wormhole-foundation/wormhole-explorer/common/events"
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
)
// PushFunc is a function to push VAAEvent.
type PushFunc func(context.Context, *Notification) error
type Notification struct {
ID string
Event *events.NotificationEvent
EmitterChain sdk.ChainID
}
// NewComposite returns a PushFunc that calls all the given producers.
func NewComposite(producers ...PushFunc) PushFunc {
return func(ctx context.Context, event *Notification) error {
for _, producer := range producers {
if err := producer(ctx, event); err != nil {
return err
}
}
return nil
}
}

View File

@ -18,6 +18,6 @@ func NewVAAInMemory(logger *zap.Logger) *VAAInMemory {
}
// Push pushes a VAAEvent to memory.
func (m *VAAInMemory) Push(ctx context.Context, event *NotificationEvent) error {
func (m *VAAInMemory) Push(context.Context, *Notification) error {
return nil
}

View File

@ -23,8 +23,8 @@ func NewRedisProducer(c *redis.Client, channel string) *RedisProducer {
}
// Push pushes a NotificationEvent to redis.
func (p *RedisProducer) Push(ctx context.Context, event *NotificationEvent) error {
body, err := json.Marshal(event)
func (p *RedisProducer) Push(ctx context.Context, n *Notification) error {
body, err := json.Marshal(n.Event)
if err != nil {
return err
}

View File

@ -31,17 +31,16 @@ func NewSNSProducer(producer *sns.Producer, alertClient alert.AlertClient, metri
}
// Push pushes a VAAEvent to SNS.
func (p *SNSProducer) Push(ctx context.Context, event *NotificationEvent) error {
body, err := json.Marshal(event)
func (p *SNSProducer) Push(ctx context.Context, n *Notification) error {
body, err := json.Marshal(n.Event)
if err != nil {
return err
}
deduplicationID := fmt.Sprintf("gossip-event-%s", event.Payload.ID)
p.logger.Debug("Publishing signedVaa event", zap.String("groupID", event.Payload.ID))
err = p.producer.SendMessage(ctx, event.Payload.ID, deduplicationID, string(body))
deduplicationID := fmt.Sprintf("gossip-event-%s", n.ID)
p.logger.Debug("Publishing signedVaa event", zap.String("groupID", n.ID))
err = p.producer.SendMessage(ctx, n.ID, deduplicationID, string(body))
if err == nil {
p.metrics.IncVaaSendNotification(vaa.ChainID(event.Payload.EmitterChain))
p.metrics.IncVaaSendNotification(vaa.ChainID(n.EmitterChain))
}
return err
}

View File

@ -12,6 +12,7 @@ import (
eth_common "github.com/ethereum/go-ethereum/common"
"github.com/wormhole-foundation/wormhole-explorer/common/client/alert"
"github.com/wormhole-foundation/wormhole-explorer/common/domain"
"github.com/wormhole-foundation/wormhole-explorer/common/events"
flyAlert "github.com/wormhole-foundation/wormhole-explorer/fly/internal/alert"
"github.com/wormhole-foundation/wormhole-explorer/fly/internal/metrics"
"github.com/wormhole-foundation/wormhole-explorer/fly/internal/track"
@ -120,24 +121,23 @@ func (s *Repository) UpsertVaa(ctx context.Context, v *vaa.VAA, serializedVaa []
s.updateVAACount(v.EmitterChain)
// send signedvaa event to topic.
event := &producer.NotificationEvent{
TrackID: track.GetTrackID(v.MessageID()),
Source: "fly",
Type: domain.SignedVaaType,
Payload: producer.SignedVaa{
event, newErr := events.NewNotificationEvent[events.SignedVaa](
track.GetTrackID(v.MessageID()), "fly", events.SignedVaaType,
events.SignedVaa{
ID: v.MessageID(),
EmitterChain: uint16(v.EmitterChain),
EmitterAddr: v.EmitterAddress.String(),
EmitterAddress: v.EmitterAddress.String(),
Sequence: v.Sequence,
GuardianSetIndex: v.GuardianSetIndex,
Timestamp: v.Timestamp,
Vaa: serializedVaa,
TxHash: vaaDoc.TxHash,
Version: int(v.Version),
},
})
if newErr != nil {
return newErr
}
err = s.afterUpdate(ctx, event)
err = s.afterUpdate(ctx, &producer.Notification{ID: v.MessageID(), Event: event, EmitterChain: v.EmitterChain})
}
return err
}

View File

@ -2,6 +2,7 @@ package backfiller
import (
"context"
"fmt"
"time"
"github.com/wormhole-foundation/wormhole-explorer/common/client/alert"
@ -22,7 +23,7 @@ func Run(config *config.BackfillerConfiguration) {
logger := logger.New("wormhole-explorer-parser", logger.WithLevel(config.LogLevel))
logger.Info("Starting wormhole-explorer-parser as backfiller ...")
logger.Info("Starting wormhole-explorer-parser as backfiller ...")
startTime, err := time.Parse(time.RFC3339, config.StartTime)
if err != nil {
@ -58,7 +59,7 @@ func Run(config *config.BackfillerConfiguration) {
vaaRepository := vaa.NewRepository(db.Database, logger)
//create a processor
processor := processor.New(parserVAAAPIClient, parserRepository, alert.NewDummyClient(), metrics.NewDummyMetrics(), logger)
eventProcessor := processor.New(parserVAAAPIClient, parserRepository, alert.NewDummyClient(), metrics.NewDummyMetrics(), logger)
logger.Info("Started wormhole-explorer-parser as backfiller")
@ -81,7 +82,8 @@ func Run(config *config.BackfillerConfiguration) {
}
for _, v := range vaas {
logger.Debug("Processing vaa", zap.String("id", v.ID))
_, err := processor.Process(rootCtx, v.Vaa)
p := &processor.Params{Vaa: v.Vaa, TrackID: fmt.Sprintf("backfiller-%s", v.ID)}
_, err := eventProcessor.Process(rootCtx, p)
if err != nil {
logger.Error("Failed to process vaa", zap.String("id", v.ID), zap.Error(err))
}

View File

@ -14,6 +14,7 @@ import (
"github.com/wormhole-foundation/wormhole-explorer/common/client/alert"
vaaPayloadParser "github.com/wormhole-foundation/wormhole-explorer/common/client/parser"
"github.com/wormhole-foundation/wormhole-explorer/common/dbutil"
"github.com/wormhole-foundation/wormhole-explorer/common/health"
"github.com/wormhole-foundation/wormhole-explorer/common/logger"
"github.com/wormhole-foundation/wormhole-explorer/parser/config"
"github.com/wormhole-foundation/wormhole-explorer/parser/consumer"
@ -26,6 +27,7 @@ import (
"github.com/wormhole-foundation/wormhole-explorer/parser/parser"
"github.com/wormhole-foundation/wormhole-explorer/parser/processor"
"github.com/wormhole-foundation/wormhole-explorer/parser/queue"
"go.mongodb.org/mongo-driver/mongo"
"go.uber.org/zap"
)
@ -82,20 +84,36 @@ func Run() {
logger.Fatal("failed to create parse vaa api client")
}
// get consumer function.
sqsConsumer, vaaConsumeFunc := newVAAConsume(rootCtx, config, metrics, logger)
// get vaa consumer function.
vaaConsumeFunc := newVAAConsume(rootCtx, config, metrics, logger)
//get notification consumer function.
notificationConsumeFunc := newNotificationConsume(rootCtx, config, metrics, logger)
// create a repository
repository := parser.NewRepository(db.Database, logger)
// get health check functions.
logger.Info("creating health check functions...")
healthChecks, err := newHealthChecks(rootCtx, config, db.Database)
if err != nil {
logger.Fatal("failed to create health checks", zap.Error(err))
}
//create a processor
processor := processor.New(parserVAAAPIClient, repository, alertClient, metrics, logger)
// create and start a consumer
consumer := consumer.New(vaaConsumeFunc, processor.Process, metrics, logger)
consumer.Start(rootCtx)
// create and start a vaaConsumer
vaaConsumer := consumer.New(vaaConsumeFunc, processor.Process, metrics, logger)
vaaConsumer.Start(rootCtx)
// create and start a notificationConsumer
notificationConsumer := consumer.New(notificationConsumeFunc, processor.Process, metrics, logger)
notificationConsumer.Start(rootCtx)
vaaRepository := vaa.NewRepository(db.Database, logger)
vaaController := vaa.NewController(vaaRepository, processor.Process, logger)
server := infrastructure.NewServer(logger, config.Port, config.PprofEnabled, config.IsQueueConsumer(), sqsConsumer, db.Database, vaaController)
server := infrastructure.NewServer(logger, config.Port, config.PprofEnabled, vaaController, healthChecks...)
server.Start()
logger.Info("Started wormhole-explorer-parser")
@ -149,25 +167,36 @@ func newAwsConfig(appCtx context.Context, cfg *config.ServiceConfiguration) (aws
return awsconfig.LoadDefaultConfig(appCtx, awsconfig.WithRegion(region))
}
func newVAAConsume(appCtx context.Context, config *config.ServiceConfiguration, metrics metrics.Metrics, logger *zap.Logger) (*sqs.Consumer, queue.VAAConsumeFunc) {
sqsConsumer, err := newSQSConsumer(appCtx, config)
func newVAAConsume(appCtx context.Context, config *config.ServiceConfiguration, metrics metrics.Metrics, logger *zap.Logger) queue.ConsumeFunc {
sqsConsumer, err := newSQSConsumer(appCtx, config, config.PipelineSQSUrl)
if err != nil {
logger.Fatal("failed to create sqs consumer", zap.Error(err))
}
filterConsumeFunc := newFilterFunc(config)
vaaQueue := queue.NewVAASQS(sqsConsumer, filterConsumeFunc, metrics, logger)
return sqsConsumer, vaaQueue.Consume
vaaQueue := queue.NewEventSQS(sqsConsumer, queue.NewVaaConverter(logger), filterConsumeFunc, metrics, logger)
return vaaQueue.Consume
}
func newNotificationConsume(appCtx context.Context, config *config.ServiceConfiguration, metrics metrics.Metrics, logger *zap.Logger) queue.ConsumeFunc {
sqsConsumer, err := newSQSConsumer(appCtx, config, config.NotificationsSQSUrl)
if err != nil {
logger.Fatal("failed to create sqs consumer", zap.Error(err))
}
filterConsumeFunc := newFilterFunc(config)
vaaQueue := queue.NewEventSQS(sqsConsumer, queue.NewNotificationEvent(logger), filterConsumeFunc, metrics, logger)
return vaaQueue.Consume
}
// Create a new SQS consumer.
func newSQSConsumer(appCtx context.Context, config *config.ServiceConfiguration) (*sqs.Consumer, error) {
func newSQSConsumer(appCtx context.Context, config *config.ServiceConfiguration, sqsUrl string) (*sqs.Consumer, error) {
awsconfig, err := newAwsConfig(appCtx, config)
if err != nil {
return nil, err
}
return sqs.NewConsumer(awsconfig, config.SQSUrl,
return sqs.NewConsumer(awsconfig, sqsUrl,
sqs.WithMaxMessages(10),
sqs.WithVisibilityTimeout(120))
}
@ -201,3 +230,22 @@ func newAlertClient(cfg *config.ServiceConfiguration) (alert.AlertClient, error)
return alert.NewAlertService(alertConfig, parserAlert.LoadAlerts)
}
func newHealthChecks(
ctx context.Context,
config *config.ServiceConfiguration,
db *mongo.Database,
) ([]health.Check, error) {
awsConfig, err := newAwsConfig(ctx, config)
if err != nil {
return nil, err
}
healthChecks := []health.Check{
health.SQS(awsConfig, config.PipelineSQSUrl),
health.SQS(awsConfig, config.NotificationsSQSUrl),
health.Mongo(db),
}
return healthChecks, nil
}

View File

@ -26,7 +26,8 @@ type ServiceConfiguration struct {
AwsAccessKeyID string `env:"AWS_ACCESS_KEY_ID"`
AwsSecretAccessKey string `env:"AWS_SECRET_ACCESS_KEY"`
AwsRegion string `env:"AWS_REGION"`
SQSUrl string `env:"SQS_URL"`
PipelineSQSUrl string `env:"PIPELINE_SQS_URL"`
NotificationsSQSUrl string `env:"NOTIFICATIONS_SQS_URL"`
VaaPayloadParserURL string `env:"VAA_PAYLOAD_PARSER_URL, required"`
VaaPayloadParserTimeout int64 `env:"VAA_PAYLOAD_PARSER_TIMEOUT, required"`
PprofEnabled bool `env:"PPROF_ENABLED,default=false"`

View File

@ -11,14 +11,14 @@ import (
// Consumer consumer struct definition.
type Consumer struct {
consume queue.VAAConsumeFunc
consume queue.ConsumeFunc
process processor.ProcessorFunc
metrics metrics.Metrics
logger *zap.Logger
}
// New creates a new vaa consumer.
func New(consume queue.VAAConsumeFunc, process processor.ProcessorFunc, metrics metrics.Metrics, logger *zap.Logger) *Consumer {
func New(consume queue.ConsumeFunc, process processor.ProcessorFunc, metrics metrics.Metrics, logger *zap.Logger) *Consumer {
return &Consumer{consume: consume, process: process, metrics: metrics, logger: logger}
}
@ -30,19 +30,28 @@ func (c *Consumer) Start(ctx context.Context) {
// check id message is expired.
if msg.IsExpired() {
c.logger.Warn("Message with vaa expired", zap.String("id", event.ID))
c.logger.Warn("Event expired", zap.String("id", event.ID))
msg.Failed()
continue
}
c.metrics.IncVaaUnexpired(event.ChainID)
_, err := c.process(ctx, event.Vaa)
params := &processor.Params{
TrackID: event.TrackID,
Vaa: event.Vaa,
}
_, err := c.process(ctx, params)
if err != nil {
c.logger.Error("Error processing parsed vaa",
c.logger.Error("Error processing event",
zap.String("trackId", event.TrackID),
zap.String("id", event.ID),
zap.Error(err))
msg.Failed()
continue
} else {
c.logger.Debug("Event processed",
zap.String("trackId", event.TrackID),
zap.String("id", event.ID))
}
msg.Done()
}

View File

@ -7,7 +7,7 @@ require (
github.com/joho/godotenv v1.4.0 // Configuration environment
github.com/pkg/errors v0.9.1
github.com/sethvargo/go-envconfig v0.6.0 // Configuration environment
github.com/stretchr/testify v1.8.1 // indirect; Testing
github.com/stretchr/testify v1.8.1 // Testing
github.com/wormhole-foundation/wormhole/sdk v0.0.0-20230426150516-e695fad0bed8
go.mongodb.org/mongo-driver v1.11.2
go.uber.org/zap v1.24.0
@ -33,6 +33,7 @@ require (
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.22 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.28 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.0.2 // indirect
github.com/aws/aws-sdk-go-v2/service/sns v1.20.2 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.1.1 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.1.1 // indirect
github.com/aws/smithy-go v1.13.5 // indirect
@ -41,8 +42,12 @@ require (
github.com/btcsuite/btcd/btcec/v2 v2.2.0 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/cosmos/btcutil v1.0.5 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect
github.com/deepmap/oapi-codegen v1.8.2 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/ethereum/go-ethereum v1.10.21 // indirect
github.com/go-redis/redis/v8 v8.11.5 // indirect
github.com/gofiber/adaptor/v2 v2.1.31 // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/golang/snappy v0.0.4 // indirect
@ -51,6 +56,8 @@ require (
github.com/hashicorp/go-retryablehttp v0.5.1 // indirect
github.com/holiman/uint256 v1.2.1 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/influxdata/influxdb-client-go/v2 v2.12.2 // indirect
github.com/influxdata/line-protocol v0.0.0-20210311194329-9aa0e372d097 // indirect
github.com/klauspost/compress v1.16.3 // indirect
github.com/konsorten/go-windows-terminal-sequences v1.0.3 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
@ -61,6 +68,7 @@ require (
github.com/mr-tron/base58 v1.2.0 // indirect
github.com/opsgenie/opsgenie-go-sdk-v2 v1.2.19 // indirect
github.com/philhofer/fwd v1.1.2 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_model v0.3.0 // indirect
github.com/prometheus/common v0.42.0 // indirect
github.com/prometheus/procfs v0.10.1 // indirect
@ -83,10 +91,12 @@ require (
go.uber.org/goleak v1.1.12 // indirect
go.uber.org/multierr v1.8.0 // indirect
golang.org/x/crypto v0.7.0 // indirect
golang.org/x/net v0.8.0 // indirect
golang.org/x/sync v0.2.0 // indirect
golang.org/x/sys v0.9.0 // indirect
golang.org/x/text v0.8.0 // indirect
google.golang.org/protobuf v1.30.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
// Needed for cosmos-sdk based chains. See

View File

@ -66,6 +66,8 @@ github.com/aws/aws-sdk-go-v2/internal/ini v1.3.28 h1:KeTxcGdNnQudb46oOl4d90f2I33
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.28/go.mod h1:yRZVr/iT0AqyHeep00SZ4YfBAKojXz08w3XMBscdi0c=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.0.2 h1:4AH9fFjUlVktQMznF+YN33aWNXaR4VgDXyP28qokJC0=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.0.2/go.mod h1:45MfaXZ0cNbeuT0KQ1XJylq8A6+OpVV2E5kvY/Kq+u8=
github.com/aws/aws-sdk-go-v2/service/sns v1.20.2 h1:MU/v2qtfGjKexJ09BMqE8pXo9xYMhT13FXjKgFc0cFw=
github.com/aws/aws-sdk-go-v2/service/sns v1.20.2/go.mod h1:VN2n9SOMS1lNbh5YD7o+ho0/rgfifSrK//YYNiVVF5E=
github.com/aws/aws-sdk-go-v2/service/sqs v1.20.2 h1:CSNIo1jiw7KrkdgZjCOnotu6yuB3IybhKLuSQrTLNfo=
github.com/aws/aws-sdk-go-v2/service/sqs v1.20.2/go.mod h1:1ttxGjUHZliCQMpPss1sU5+Ph/5NvdMFRzr96bv8gm0=
github.com/aws/aws-sdk-go-v2/service/sso v1.1.1 h1:37QubsarExl5ZuCBlnRP+7l1tNwZPBSTqpTBrPH98RU=
@ -97,18 +99,28 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX
github.com/cosmos/btcutil v1.0.5 h1:t+ZFcX77LpKtDBhjucvnOH8C2l2ioGsBNEQ3jef8xFk=
github.com/cosmos/btcutil v1.0.5/go.mod h1:IyB7iuqZMJlthe2tkIFL33xPyzbFYP0XVdS8P5lUPis=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 h1:HbphB4TFFXpv7MNrT52FGrrgVXF1owhMVTHFZIlnvd4=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0/go.mod h1:DZGJHZMqrU4JJqFAWUS2UO1+lbSKsdiOoYi9Zzey7Fc=
github.com/deepmap/oapi-codegen v1.8.2 h1:SegyeYGcdi0jLLrpbCMoJxnUUn8GBXHsvr4rbzjuhfU=
github.com/deepmap/oapi-codegen v1.8.2/go.mod h1:YLgSKSDv/bZQB7N4ws6luhozi3cEdRktEqrX88CvjIw=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/ethereum/go-ethereum v1.10.21 h1:5lqsEx92ZaZzRyOqBEXux4/UR06m296RGzN3ol3teJY=
github.com/ethereum/go-ethereum v1.10.21/go.mod h1:EYFyF19u3ezGLD4RqOkLq+ZCXzYbLoNDdZlMt7kyKFg=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/getkin/kin-openapi v0.61.0/go.mod h1:7Yn5whZr5kJi6t+kShccXS8ae1APpYTW6yheSwk8Yi4=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJpoZOs=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
@ -120,6 +132,10 @@ github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI=
github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gofiber/adaptor/v2 v2.1.31 h1:E7LJre4uBc+RDsQfHCE+LKVkFcciSMYu4KhzbvoWgKU=
github.com/gofiber/adaptor/v2 v2.1.31/go.mod h1:vdSG9JhOhOLYjE4j14fx6sJvLJNFVf9o6rSyB5GkU4s=
@ -159,6 +175,7 @@ github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiu
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
@ -188,6 +205,7 @@ github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/hashicorp/go-cleanhttp v0.5.0 h1:wvCrVc9TjDls6+YGAF2hAifE1E5U1+b4tH6KdvN3Gig=
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-retryablehttp v0.5.1 h1:Vsx5XKPqPs3M6sM4U4GWyUqFS8aBiL9U5gkgvpkg4SE=
@ -199,6 +217,10 @@ github.com/holiman/uint256 v1.2.1/go.mod h1:y4ga/t+u+Xwd7CpDgZESaRcWy0I7XMlTMA25
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/influxdata/influxdb-client-go/v2 v2.12.2 h1:uYABKdrEKlYm+++qfKdbgaHKBPmoWR5wpbmj6MBB/2g=
github.com/influxdata/influxdb-client-go/v2 v2.12.2/go.mod h1:YteV91FiQxRdccyJ2cHvj2f/5sq4y4Njqu1fQzsQCOU=
github.com/influxdata/line-protocol v0.0.0-20210311194329-9aa0e372d097 h1:vilfsDSy7TDxedi9gyBkMvAirat/oRcL0lFdJBf6tdM=
github.com/influxdata/line-protocol v0.0.0-20210311194329-9aa0e372d097/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg=
@ -227,8 +249,19 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg=
github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/matryer/moq v0.0.0-20190312154309-6cfb0558e1bd/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ=
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
@ -249,6 +282,9 @@ github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o=
github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
github.com/opsgenie/opsgenie-go-sdk-v2 v1.2.19 h1:JernwK3Bgd5x+UJPV6S2LPYoBF+DFOYBoQ5JeJPVBNc=
github.com/opsgenie/opsgenie-go-sdk-v2 v1.2.19/go.mod h1:4OjcxgwdXzezqytxN534MooNmrxRD50geWZxTD7845s=
github.com/philhofer/fwd v1.1.1/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
@ -318,6 +354,7 @@ github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpE
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
@ -337,6 +374,8 @@ github.com/valyala/fasthttp v1.43.0/go.mod h1:f6VbjjoI3z1NDOZOv17o6RvtRSWxC77seB
github.com/valyala/fasthttp v1.44.0/go.mod h1:f6VbjjoI3z1NDOZOv17o6RvtRSWxC77seBFc2uWtgiY=
github.com/valyala/fasthttp v1.47.0 h1:y7moDoxYzMooFpT5aHgNgVOQDrS3qlkfiP9mDtGGK9c=
github.com/valyala/fasthttp v1.47.0/go.mod h1:k2zXd82h/7UZc3VOdJ2WaUqt1uZ/XpXAfE9i+HBC3lA=
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8=
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
github.com/wormhole-foundation/wormhole/sdk v0.0.0-20230426150516-e695fad0bed8 h1:rrOyHd+H9a6Op1iUyZNCaI5v9D1syq8jDAYyX/2Q4L8=
@ -378,6 +417,8 @@ golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
@ -447,6 +488,7 @@ golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81R
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
@ -457,6 +499,8 @@ golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE=
golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ=
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -484,6 +528,7 @@ golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5h
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -492,11 +537,14 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -511,6 +559,7 @@ golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -530,6 +579,7 @@ golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s=
golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
@ -538,6 +588,7 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
@ -546,6 +597,8 @@ golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
@ -680,13 +733,16 @@ gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLks
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@ -1,39 +0,0 @@
package infrastructure
import (
"github.com/gofiber/fiber/v2"
"go.uber.org/zap"
)
// Controller definition.
type Controller struct {
srv *Service
logger *zap.Logger
}
// NewController creates a Controller instance.
func NewController(serv *Service, logger *zap.Logger) *Controller {
return &Controller{srv: serv, logger: logger}
}
// HealthCheck handler for the endpoint /health.
func (c *Controller) HealthCheck(ctx *fiber.Ctx) error {
return ctx.JSON(struct {
Status string `json:"status"`
}{Status: "OK"})
}
// ReadyCheck handler for the endpoint /ready.
func (c *Controller) ReadyCheck(ctx *fiber.Ctx) error {
ready, err := c.srv.CheckIsReady(ctx.Context())
if ready {
return ctx.Status(fiber.StatusOK).JSON(struct {
Ready string `json:"ready"`
}{Ready: "OK"})
}
c.logger.Error("Ready check failed", zap.Error(err))
return ctx.Status(fiber.StatusInternalServerError).JSON(struct {
Ready string `json:"ready"`
Error string `json:"error"`
}{Ready: "NO", Error: err.Error()})
}

View File

@ -1,19 +0,0 @@
package infrastructure
// MongoStatus represent a mongo server status.
type MongoStatus struct {
Ok int32 `bson:"ok"`
Host string `bson:"host"`
Version string `bson:"version"`
Process string `bson:"process"`
Pid int32 `bson:"pid"`
Uptime int32 `bson:"uptime"`
Connections *MongoConnections `bson:"connections"`
}
// MongoConnections represents a mongo server connection.
type MongoConnections struct {
Current int32 `bson:"current"`
Available int32 `bson:"available"`
TotalCreated int32 `bson:"totalCreated"`
}

View File

@ -1,46 +0,0 @@
package infrastructure
import (
"context"
"fmt"
"github.com/pkg/errors"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"go.uber.org/zap"
)
// Repository definition.
type Repository struct {
db *mongo.Database
logger *zap.Logger
}
// NewRepository create a new Repository instance.
func NewRepository(db *mongo.Database, logger *zap.Logger) *Repository {
return &Repository{db: db,
logger: logger.With(zap.String("module", "InfraestructureRepository")),
}
}
// GetMongoStatus get mongo server status.
func (r *Repository) GetMongoStatus(ctx context.Context) (*MongoStatus, error) {
command := bson.D{{Key: "serverStatus", Value: 1}}
result := r.db.RunCommand(ctx, command)
if result.Err() != nil {
requestID := fmt.Sprintf("%v", ctx.Value("requestid"))
r.logger.Error("failed execute command mongo serverStatus",
zap.Error(result.Err()), zap.String("requestID", requestID))
return nil, errors.WithStack(result.Err())
}
var mongoStatus MongoStatus
err := result.Decode(&mongoStatus)
if err != nil {
requestID := fmt.Sprintf("%v", ctx.Value("requestid"))
r.logger.Error("failed decoding cursor to *MongoStatus", zap.Error(err),
zap.String("requestID", requestID))
return nil, errors.WithStack(err)
}
return &mongoStatus, nil
}

View File

@ -4,9 +4,8 @@ import (
"github.com/ansrivas/fiberprometheus/v2"
"github.com/gofiber/fiber/v2"
"github.com/gofiber/fiber/v2/middleware/pprof"
"github.com/wormhole-foundation/wormhole-explorer/common/health"
"github.com/wormhole-foundation/wormhole-explorer/parser/http/vaa"
"github.com/wormhole-foundation/wormhole-explorer/parser/internal/sqs"
"go.mongodb.org/mongo-driver/mongo"
"go.uber.org/zap"
)
@ -16,11 +15,8 @@ type Server struct {
logger *zap.Logger
}
func NewServer(logger *zap.Logger, port string, pprofEnabled bool, isQueueConsumer bool, consumer *sqs.Consumer,
db *mongo.Database, vaaController *vaa.Controller) *Server {
repository := NewRepository(db, logger)
service := NewService(repository, consumer, isQueueConsumer, logger)
ctrl := NewController(service, logger)
func NewServer(logger *zap.Logger, port string, pprofEnabled bool, vaaController *vaa.Controller, checks ...health.Check) *Server {
ctrl := health.NewController(checks, logger)
app := fiber.New(fiber.Config{DisableStartupMessage: true})
// config use of middlware.

View File

@ -1,83 +0,0 @@
package infrastructure
import (
"context"
"errors"
"fmt"
"github.com/wormhole-foundation/wormhole-explorer/parser/internal/sqs"
"go.uber.org/zap"
)
// Service definition.
type Service struct {
repo *Repository
consumer *sqs.Consumer
isQueueConsumer bool
logger *zap.Logger
}
// NewService create a new Service instance.
func NewService(dao *Repository, consumer *sqs.Consumer, isQueueConsumer bool, logger *zap.Logger) *Service {
return &Service{repo: dao, consumer: consumer, isQueueConsumer: isQueueConsumer, logger: logger.With(zap.String("module", "Infraestructureervice"))}
}
// CheckIsReady check if the service is ready.
func (s *Service) CheckIsReady(ctx context.Context) (bool, error) {
// check if mongodb is ready
isMongoReady, err := s.CheckMongoServerStatus(ctx)
if err != nil {
return false, err
}
// check if aws sqs is ready
isAwsSQSReady, err := s.CheckAwsSQS(ctx)
if err != nil {
return false, err
}
if !(isMongoReady && isAwsSQSReady) {
return false, errors.New("error services not ready")
}
return true, nil
}
// CheckMongoServerStatus check mongodb status.
func (s *Service) CheckMongoServerStatus(ctx context.Context) (bool, error) {
mongoStatus, err := s.repo.GetMongoStatus(ctx)
if err != nil {
return false, err
}
// check mongo server status
mongoStatusCheck := (mongoStatus.Ok == 1 && mongoStatus.Pid > 0 && mongoStatus.Uptime > 0)
if !mongoStatusCheck {
return false, fmt.Errorf("mongo server not ready (Ok = %v, Pid = %v, Uptime = %v)", mongoStatus.Ok, mongoStatus.Pid, mongoStatus.Uptime)
}
// check mongo connections
if mongoStatus.Connections.Available <= 0 {
return false, fmt.Errorf("mongo server without available connections (availableConection = %v)", mongoStatus.Connections.Available)
}
return true, nil
}
// CheckAwsSQS check aws sqs status.
func (s *Service) CheckAwsSQS(ctx context.Context) (bool, error) {
// vaa queue handle in memory [local enviroment]
if !s.isQueueConsumer {
return true, nil
}
// get queue attributes
queueAttributes, err := s.consumer.GetQueueAttributes(ctx)
if err != nil || queueAttributes == nil {
return false, err
}
// check queue created
createdTimestamp := queueAttributes.Attributes["CreatedTimestamp"]
if createdTimestamp == "" {
return false, errors.New("error createdTimestamp attributes does not exist")
}
return createdTimestamp != "", nil
}

View File

@ -1,6 +1,8 @@
package vaa
import (
"fmt"
"github.com/gofiber/fiber/v2"
"github.com/wormhole-foundation/wormhole-explorer/parser/processor"
"go.uber.org/zap"
@ -34,7 +36,9 @@ func (c *Controller) Parse(ctx *fiber.Ctx) error {
return err
}
vaaParsed, err := c.processor(ctx.Context(), vaa.Vaa)
trackID := fmt.Sprintf("controller-%s", payload.ID)
vaaParsed, err := c.processor(ctx.Context(), &processor.Params{Vaa: vaa.Vaa, TrackID: trackID})
if err != nil {
return err
}

View File

@ -36,9 +36,9 @@ func New(parser vaaPayloadParser.ParserVAAAPIClient, repository *parser.Reposito
}
}
func (p *Processor) Process(ctx context.Context, vaaBytes []byte) (*parser.ParsedVaaUpdate, error) {
func (p *Processor) Process(ctx context.Context, params *Params) (*parser.ParsedVaaUpdate, error) {
// unmarshal vaa.
vaa, err := sdk.Unmarshal(vaaBytes)
vaa, err := sdk.Unmarshal(params.Vaa)
if err != nil {
return nil, err
}
@ -63,6 +63,7 @@ func (p *Processor) Process(ctx context.Context, vaaBytes []byte) (*parser.Parse
// send alert when exists and error calling vaa-payload-parser component.
alertContext := alert.AlertContext{
Details: map[string]string{
"trackID": params.TrackID,
"chainID": vaa.EmitterChain.String(),
"emitterAddress": emitterAddress,
"sequence": sequence,
@ -74,7 +75,8 @@ func (p *Processor) Process(ctx context.Context, vaaBytes []byte) (*parser.Parse
}
p.logger.Info("VAA cannot be parsed", zap.Error(err),
zap.Uint16("chainID", chainID),
zap.String("trackId", params.TrackID),
zap.Uint16("chainId", chainID),
zap.String("address", emitterAddress),
zap.String("sequence", sequence))
return nil, nil
@ -82,7 +84,7 @@ func (p *Processor) Process(ctx context.Context, vaaBytes []byte) (*parser.Parse
p.metrics.IncVaaPayloadParserSuccessCount(chainID)
p.metrics.IncVaaParsed(chainID)
standardizedProperties := p.transformStandarizedProperties(vaa.MessageID(), vaaParseResponse.StandardizedProperties)
standardizedProperties := p.transformStandarizedProperties(params.TrackID, vaa.MessageID(), vaaParseResponse.StandardizedProperties)
// create ParsedVaaUpdate to upsert.
now := time.Now()
@ -102,11 +104,13 @@ func (p *Processor) Process(ctx context.Context, vaaBytes []byte) (*parser.Parse
err = p.repository.UpsertParsedVaa(ctx, vaaParsed)
if err != nil {
p.logger.Error("Error inserting vaa in repository",
zap.String("trackId", params.TrackID),
zap.String("id", vaaParsed.ID),
zap.Error(err))
// send alert when exists and error inserting parsed vaa.
alertContext := alert.AlertContext{
Details: map[string]string{
"trackID": params.TrackID,
"chainID": vaa.EmitterChain.String(),
"emitterAddress": emitterAddress,
"sequence": sequence,
@ -118,16 +122,16 @@ func (p *Processor) Process(ctx context.Context, vaaBytes []byte) (*parser.Parse
}
p.metrics.IncVaaParsedInserted(chainID)
p.logger.Info("parsed VAA was successfully persisted", zap.String("id", vaaParsed.ID))
p.logger.Info("parsed VAA was successfully persisted", zap.String("trackId", params.TrackID), zap.String("id", vaaParsed.ID))
return &vaaParsed, nil
}
// transformStandarizedProperties transform amount and fee amount.
func (p *Processor) transformStandarizedProperties(vaaID string, sp vaaPayloadParser.StandardizedProperties) vaaPayloadParser.StandardizedProperties {
func (p *Processor) transformStandarizedProperties(trackID, vaaID string, sp vaaPayloadParser.StandardizedProperties) vaaPayloadParser.StandardizedProperties {
// transform amount.
amount := p.transformAmount(sp.TokenChain, sp.TokenAddress, sp.Amount, vaaID)
amount := p.transformAmount(sp.TokenChain, trackID, sp.TokenAddress, sp.Amount, vaaID)
// transform fee amount.
feeAmount := p.transformAmount(sp.FeeChain, sp.FeeAddress, sp.Fee, vaaID)
feeAmount := p.transformAmount(sp.FeeChain, trackID, sp.FeeAddress, sp.Fee, vaaID)
// create StandardizedProperties.
return vaaPayloadParser.StandardizedProperties{
AppIds: sp.AppIds,
@ -145,7 +149,7 @@ func (p *Processor) transformStandarizedProperties(vaaID string, sp vaaPayloadPa
}
// transformAmount transform amount and fee amount.
func (p *Processor) transformAmount(chainID sdk.ChainID, nativeAddress, amount, vaaID string) string {
func (p *Processor) transformAmount(chainID sdk.ChainID, trackID, nativeAddress, amount, vaaID string) string {
if chainID == sdk.ChainIDUnset || nativeAddress == "" || amount == "" {
return ""
@ -154,6 +158,7 @@ func (p *Processor) transformAmount(chainID sdk.ChainID, nativeAddress, amount,
nativeHex, err := domain.DecodeNativeAddressToHex(sdk.ChainID(chainID), nativeAddress)
if err != nil {
p.logger.Warn("Native address cannot be transformed to hex",
zap.String("trackId", trackID),
zap.String("vaaId", vaaID),
zap.String("nativeAddress", nativeAddress),
zap.Uint16("chain", uint16(chainID)))
@ -163,6 +168,7 @@ func (p *Processor) transformAmount(chainID sdk.ChainID, nativeAddress, amount,
addr, err := sdk.StringToAddress(nativeHex)
if err != nil {
p.logger.Warn("Address cannot be parsed",
zap.String("trackId", trackID),
zap.String("vaaId", vaaID),
zap.String("nativeAddress", nativeAddress),
zap.Uint16("chain", uint16(chainID)))
@ -174,6 +180,7 @@ func (p *Processor) transformAmount(chainID sdk.ChainID, nativeAddress, amount,
tokenMeta, ok := domain.GetTokenByAddress(sdk.ChainID(chainID), addr.String())
if !ok {
p.logger.Warn("Token metadata not found",
zap.String("trackId", trackID),
zap.String("vaaId", vaaID),
zap.String("nativeAddress", nativeAddress),
zap.Uint16("chain", uint16(chainID)))
@ -184,6 +191,7 @@ func (p *Processor) transformAmount(chainID sdk.ChainID, nativeAddress, amount,
bigAmount, ok = bigAmount.SetString(amount, 10)
if !ok {
p.logger.Error("Cannot parse amount",
zap.String("trackId", trackID),
zap.String("vaaId", vaaID),
zap.String("amount", amount),
zap.String("nativeAddress", nativeAddress),

View File

@ -6,5 +6,10 @@ import (
"github.com/wormhole-foundation/wormhole-explorer/parser/parser"
)
type Params struct {
TrackID string
Vaa []byte
}
// ProcessorFunc is a function to process vaa message.
type ProcessorFunc func(context.Context, []byte) (*parser.ParsedVaaUpdate, error)
type ProcessorFunc func(context.Context, *Params) (*parser.ParsedVaaUpdate, error)

116
parser/queue/converter.go Normal file
View File

@ -0,0 +1,116 @@
package queue
import (
"encoding/json"
"fmt"
"strconv"
"time"
"github.com/wormhole-foundation/wormhole-explorer/common/events"
"go.uber.org/zap"
)
// VaaEvent represents a vaa data to be handle by the pipeline.
type VaaEvent struct {
ID string `json:"id"`
ChainID uint16 `json:"emitterChain"`
EmitterAddress string `json:"emitterAddr"`
Sequence string `json:"sequence"`
GuardianSetIndex uint32 `json:"guardianSetIndex"`
Vaa []byte `json:"vaas"`
IndexedAt time.Time `json:"indexedAt"`
Timestamp *time.Time `json:"timestamp"`
UpdatedAt *time.Time `json:"updatedAt"`
TxHash string `json:"txHash"`
Version uint16 `json:"version"`
Revision uint16 `json:"revision"`
}
// VaaConverter converts a message from a VAAEvent.
func NewVaaConverter(log *zap.Logger) ConverterFunc {
return func(msg string) (*Event, error) {
// unmarshal message to vaaEvent
var vaaEvent VaaEvent
err := json.Unmarshal([]byte(msg), &vaaEvent)
if err != nil {
return nil, err
}
return &Event{
TrackID: fmt.Sprintf("pipeline-%s", vaaEvent.ID),
ID: vaaEvent.ID,
ChainID: vaaEvent.ChainID,
EmitterAddress: vaaEvent.EmitterAddress,
Sequence: vaaEvent.Sequence,
Vaa: vaaEvent.Vaa,
Timestamp: vaaEvent.Timestamp,
TxHash: vaaEvent.TxHash,
}, nil
}
}
func NewNotificationEvent(log *zap.Logger) ConverterFunc {
return func(msg string) (*Event, error) {
// unmarshal message to NotificationEvent
var notification events.NotificationEvent
err := json.Unmarshal([]byte(msg), &notification)
if err != nil {
return nil, err
}
if notification.Event != events.SignedVaaType && notification.Event != events.LogMessagePublishedMesageType {
log.Debug("Skip event type", zap.String("trackId", notification.TrackID), zap.String("type", notification.Event))
return nil, nil
}
switch notification.Event {
case events.SignedVaaType:
signedVaaEvent, err := events.GetEventData[events.SignedVaa](&notification)
if err != nil {
log.Error("Error decoding signedVAA from notification event", zap.String("trackId", notification.TrackID), zap.Error(err))
return nil, nil
}
return &Event{
TrackID: notification.TrackID,
ID: signedVaaEvent.ID,
ChainID: signedVaaEvent.EmitterChain,
EmitterAddress: signedVaaEvent.EmitterAddress,
Sequence: strconv.FormatUint(signedVaaEvent.Sequence, 10),
Vaa: signedVaaEvent.Vaa,
Timestamp: &signedVaaEvent.Timestamp,
TxHash: signedVaaEvent.TxHash,
}, nil
case events.LogMessagePublishedMesageType:
plm, err := events.GetEventData[events.LogMessagePublished](&notification)
if err != nil {
log.Error("Error decoding publishedLogMessage from notification event", zap.String("trackId", notification.TrackID), zap.Error(err))
return nil, nil
}
vaa, err := events.CreateUnsignedVAA(&plm)
if err != nil {
log.Error("Error creating unsigned vaa", zap.String("trackId", notification.TrackID), zap.Error(err))
return nil, err
}
vaaBytes, err := vaa.MarshalBinary()
if err != nil {
return nil, err
}
return &Event{
TrackID: notification.TrackID,
ID: vaa.MessageID(),
ChainID: plm.ChainID,
EmitterAddress: plm.Attributes.Sender,
Sequence: strconv.FormatUint(plm.Attributes.Sequence, 10),
Vaa: vaaBytes,
Timestamp: &plm.BlockTime,
TxHash: plm.TxHash,
}, nil
}
return nil, nil
}
}

View File

@ -0,0 +1,87 @@
package queue
import (
"encoding/json"
"testing"
"time"
"github.com/stretchr/testify/assert"
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
"go.uber.org/zap"
)
func TestNotificationEvent(t *testing.T) {
log := zap.NewExample()
converter := NewNotificationEvent(log)
msg := `{
"trackId":"chain-event-0xb437df51c6c9af58eff89e328f84d8bece25d718baf088899c9692782fe29c43-10012515",
"source":"blockchain-watcher",
"event":"log-message-published",
"timestamp":"2023-11-10T14:20:45.159Z",
"version":"1",
"data":{
"chainId":2,
"emitter":"0x706abc4e45d419950511e474c7b9ed348a4a716c",
"txHash":"0xb437df51c6c9af58eff89e328f84d8bece25d718baf088899c9692782fe29c43",
"blockHeight":"10012515",
"blockTime":"2023-11-09T09:06:24.000Z",
"attributes":{
"sender":"0xe9d87dD072B0bcE6aA9335d590cfB0342870d7B0",
"sequence":1,
"payload":"0x7b226e65766572223a7b22676f6e6e61223a7b2267697665223a7b22796f75223a227570227d7d7d7d",
"nonce":1699520760,
"consistencyLevel":200
}
}
}`
event, err := converter(msg)
assert.NoError(t, err)
assert.NotNil(t, event)
assert.Equal(t, "chain-event-0xb437df51c6c9af58eff89e328f84d8bece25d718baf088899c9692782fe29c43-10012515", event.TrackID)
assert.Equal(t, "2/000000000000000000000000e9d87dd072b0bce6aa9335d590cfb0342870d7b0/1", event.ID)
assert.Equal(t, uint16(2), event.ChainID)
assert.Equal(t, "0xe9d87dD072B0bcE6aA9335d590cfB0342870d7B0", event.EmitterAddress)
vaa, err := sdk.Unmarshal(event.Vaa)
assert.NoError(t, err)
assert.NotNil(t, vaa)
expectedPayload := []byte{123, 34, 110, 101, 118, 101, 114, 34, 58, 123, 34, 103, 111, 110, 110, 97, 34, 58, 123, 34, 103, 105, 118, 101, 34, 58, 123, 34, 121, 111, 117, 34, 58, 34, 117, 112, 34, 125, 125, 125, 125}
assert.Equal(t, expectedPayload, vaa.Payload)
}
func TestSqsEvent(t *testing.T) {
log := zap.NewExample()
converter := NewNotificationEvent(log)
msg := `
{
"Type" : "Notification",
"MessageId" : "14d855ca-ad78-59c5-b30e-0802e1362944",
"SequenceNumber" : "10000000000040190002",
"TopicArn" : "arn:aws:sns:us-east-2:581679387567:notification-chain-events-dev-testnet.fifo",
"Subject" : "blockchain-watcher",
"Message" : "{\"trackId\":\"chain-event-0xb6b7af602aa098fbd8c88da2c2e4a316eef22f0ee621c5ca7616992c3fd9d3fe-10012893\",\"source\":\"blockchain-watcher\",\"event\":\"log-message-published\",\"timestamp\":\"2023-11-10T15:19:42.320Z\",\"version\":\"1\",\"data\":{\"chainId\":2,\"emitter\":\"0x706abc4e45d419950511e474c7b9ed348a4a716c\",\"txHash\":\"0xb6b7af602aa098fbd8c88da2c2e4a316eef22f0ee621c5ca7616992c3fd9d3fe\",\"blockHeight\":\"10012893\",\"blockTime\":\"2023-11-09T10:41:24.000Z\",\"attributes\":{\"sender\":\"0x28D8F1Be96f97C1387e94A53e00eCcFb4E75175a\",\"sequence\":3418,\"payload\":\"0x010017000000000000000000000000b5b6bf4224f75762dae40c862dc899431ea1778300000040000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000654cb74d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007a12000000000000000000000000000000000000000000000000000000006a6fd66db0002000000000000000000000000e310c47fa3e3f011a6e3108e3c725cff4900199b00000000000000000000000090995dbd1aae85872451b50a569de947d34ac4ee000000000000000000000000d1463b4fe86166768d2ff51b1a928bebb5c9f375000000000000000000000000e310c47fa3e3f011a6e3108e3c725cff4900199b00\",\"nonce\":0,\"consistencyLevel\":200}}}",
"Timestamp" : "2023-11-10T15:19:42.548Z",
"UnsubscribeURL" : "https://sns.us-east-2.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:us-east-2:581679387567:notification-chain-events-dev-testnet.fifo:2e1cc196-afd8-4efb-b9b3-27c38e688494"
}
`
// unmarshal body to sqsEvent from sns/sqs subscription
var sqsEvent sqsEvent
err := json.Unmarshal([]byte(msg), &sqsEvent)
assert.NoError(t, err)
event, err := converter(sqsEvent.Message)
assert.NoError(t, err)
assert.NotNil(t, event)
assert.Equal(t, "2/00000000000000000000000028d8f1be96f97c1387e94a53e00eccfb4e75175a/3418", event.ID)
assert.Equal(t, uint16(2), event.ChainID)
assert.Equal(t, "0x28D8F1Be96f97C1387e94A53e00eCcFb4E75175a", event.EmitterAddress)
assert.Equal(t, "0xb6b7af602aa098fbd8c88da2c2e4a316eef22f0ee621c5ca7616992c3fd9d3fe", event.TxHash)
vaa, err := sdk.Unmarshal(event.Vaa)
assert.NoError(t, err)
assert.NotNil(t, vaa)
expectedTimestamp, err := time.Parse(time.RFC3339, "2023-11-09T10:41:24.000Z")
assert.NoError(t, err)
assert.Equal(t, uint64(3418), vaa.Sequence)
assert.Equal(t, sdk.ChainIDEthereum, vaa.EmitterChain)
assert.Equal(t, expectedTimestamp.UTC(), vaa.Timestamp.UTC())
assert.Equal(t, uint8(200), vaa.ConsistencyLevel)
}

View File

@ -21,18 +21,23 @@ type SQS struct {
chSize int
wg sync.WaitGroup
filterConsume FilterConsumeFunc
converter ConverterFunc
metrics metrics.Metrics
logger *zap.Logger
}
// FilterConsumeFunc filter vaaa func definition.
type FilterConsumeFunc func(vaaEvent *VaaEvent) bool
type FilterConsumeFunc func(*Event) bool
// NewVAASQS creates a VAA queue in SQS instances.
func NewVAASQS(consumer *sqs.Consumer, filterConsume FilterConsumeFunc, metrics metrics.Metrics, logger *zap.Logger, opts ...SQSOption) *SQS {
// ConverterFunc converts a message from a sqs message.
type ConverterFunc func(string) (*Event, error)
// NewEventSQS creates a VAA queue in SQS instances.
func NewEventSQS(consumer *sqs.Consumer, converter ConverterFunc, filterConsume FilterConsumeFunc, metrics metrics.Metrics, logger *zap.Logger, opts ...SQSOption) *SQS {
s := &SQS{
consumer: consumer,
chSize: 10,
converter: converter,
filterConsume: filterConsume,
metrics: metrics,
logger: logger}
@ -70,28 +75,32 @@ func (q *SQS) Consume(ctx context.Context) <-chan ConsumerMessage {
continue
}
// unmarshal message to vaaEvent
var vaaEvent VaaEvent
err = json.Unmarshal([]byte(sqsEvent.Message), &vaaEvent)
// unmarshal message to event
event, err := q.converter(sqsEvent.Message)
if err != nil {
q.logger.Error("Error decoding vaaEvent message from SQSEvent", zap.Error(err))
q.logger.Error("Error decoding event message from SQSEvent", zap.Error(err))
continue
}
q.metrics.IncVaaConsumedQueue(vaaEvent.ChainID)
if event == nil {
continue
}
q.metrics.IncVaaConsumedQueue(event.ChainID)
// filter vaaEvent by p2p net.
if q.filterConsume(&vaaEvent) {
if q.filterConsume(event) {
if err := q.consumer.DeleteMessage(ctx, msg.ReceiptHandle); err != nil {
q.logger.Error("Error deleting message from SQS", zap.Error(err))
}
continue
}
q.metrics.IncVaaUnfiltered(vaaEvent.ChainID)
q.metrics.IncVaaUnfiltered(event.ChainID)
q.wg.Add(1)
q.ch <- &sqsConsumerMessage{
id: msg.ReceiptHandle,
data: &vaaEvent,
data: event,
wg: &q.wg,
logger: q.logger,
consumer: q.consumer,
@ -112,7 +121,7 @@ func (q *SQS) Close() {
}
type sqsConsumerMessage struct {
data *VaaEvent
data *Event
consumer *sqs.Consumer
wg *sync.WaitGroup
id *string
@ -121,7 +130,7 @@ type sqsConsumerMessage struct {
ctx context.Context
}
func (m *sqsConsumerMessage) Data() *VaaEvent {
func (m *sqsConsumerMessage) Data() *Event {
return m.data
}

View File

@ -3,11 +3,11 @@ package queue
import "github.com/wormhole-foundation/wormhole/sdk/vaa"
// PythFilter filter vaa event from pyth chain.
func PythFilter(vaaEvent *VaaEvent) bool {
func PythFilter(vaaEvent *Event) bool {
return vaaEvent.ChainID == uint16(vaa.ChainIDPythNet)
}
// NonFilter non filter vaa evant.
func NonFilter(vaaEvent *VaaEvent) bool {
func NonFilter(vaaEvent *Event) bool {
return false
}

View File

@ -1,38 +0,0 @@
package queue
import (
"context"
"time"
)
type sqsEvent struct {
MessageID string `json:"MessageId"`
Message string `json:"Message"`
}
// VaaEvent represents a vaa data to be handle by the pipeline.
type VaaEvent struct {
ID string `json:"id"`
ChainID uint16 `json:"emitterChain"`
EmitterAddress string `json:"emitterAddr"`
Sequence string `json:"sequence"`
GuardianSetIndex uint32 `json:"guardianSetIndex"`
Vaa []byte `json:"vaas"`
IndexedAt time.Time `json:"indexedAt"`
Timestamp *time.Time `json:"timestamp"`
UpdatedAt *time.Time `json:"updatedAt"`
TxHash string `json:"txHash"`
Version uint16 `json:"version"`
Revision uint16 `json:"revision"`
}
// ConsumerMessage defition.
type ConsumerMessage interface {
Data() *VaaEvent
Done()
Failed()
IsExpired() bool
}
// VAAConsumeFunc is a function to consume VAAEvent.
type VAAConsumeFunc func(context.Context) <-chan ConsumerMessage

34
parser/queue/types.go Normal file
View File

@ -0,0 +1,34 @@
package queue
import (
"context"
"time"
)
type sqsEvent struct {
MessageID string `json:"MessageId"`
Message string `json:"Message"`
}
// Event represents a event data to be handle.
type Event struct {
TrackID string
ID string
ChainID uint16
EmitterAddress string
Sequence string
Vaa []byte
Timestamp *time.Time
TxHash string
}
// ConsumerMessage defition.
type ConsumerMessage interface {
Data() *Event
Done()
Failed()
IsExpired() bool
}
// ConsumeFunc is a function to consume Event.
type ConsumeFunc func(context.Context) <-chan ConsumerMessage

View File

@ -22,8 +22,6 @@ require (
)
require (
github.com/algorand/go-algorand-sdk v1.23.0 // indirect
github.com/algorand/go-codec/codec v1.1.8 // indirect
github.com/andybalholm/brotli v1.0.5 // indirect
github.com/aws/aws-sdk-go-v2 v1.17.4 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.28 // indirect
@ -36,7 +34,6 @@ require (
github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect
github.com/cenkalti/backoff/v4 v4.1.3 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/cosmos/btcutil v1.0.5 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect
github.com/deepmap/oapi-codegen v1.8.2 // indirect

View File

@ -38,11 +38,6 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/algorand/go-algorand-sdk v1.23.0 h1:wlEV6OgDVc/sLeF2y41bwNG/Lr8EoMnN87Ur8N2Gyyo=
github.com/algorand/go-algorand-sdk v1.23.0/go.mod h1:7i2peZBcE48kfoxNZnLA+mklKh812jBKvQ+t4bn0KBQ=
github.com/algorand/go-codec v1.1.8/go.mod h1:XhzVs6VVyWMLu6cApb9/192gBjGRVGm5cX5j203Heg4=
github.com/algorand/go-codec/codec v1.1.8 h1:lsFuhcOH2LiEhpBH3BVUUkdevVmwCRyvb7FCAAPeY6U=
github.com/algorand/go-codec/codec v1.1.8/go.mod h1:tQ3zAJ6ijTps6V+wp8KsGDnPC2uhHVC7ANyrtkIY0bA=
github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs=
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
@ -82,8 +77,6 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cosmos/btcutil v1.0.5 h1:t+ZFcX77LpKtDBhjucvnOH8C2l2ioGsBNEQ3jef8xFk=
github.com/cosmos/btcutil v1.0.5/go.mod h1:IyB7iuqZMJlthe2tkIFL33xPyzbFYP0XVdS8P5lUPis=
github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
@ -368,7 +361,6 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/test-go/testify v1.1.4 h1:Tf9lntrKUMHiXQ07qBScBTSA0dhYQlu83hswqelv1iE=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tinylib/msgp v1.1.6/go.mod h1:75BAfg2hauQhs3qedfdDZmWAPcFMAvJE5b9rGOMufyw=
@ -397,7 +389,6 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.mongodb.org/mongo-driver v1.11.2 h1:+1v2rDQUWNcGW7/7E0Jvdz51V38XXxJfhzbV17aNHCw=
go.mongodb.org/mongo-driver v1.11.2/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8=
@ -461,7 +452,6 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -497,7 +487,6 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
@ -571,7 +560,6 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@ -646,7 +634,6 @@ golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc
golang.org/x/tools v0.0.0-20201022035929-9cf592e881e9/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View File

@ -7,7 +7,7 @@ import (
"fmt"
"github.com/go-redis/redis/v8"
"github.com/wormhole-foundation/wormhole-explorer/common/domain"
"github.com/wormhole-foundation/wormhole-explorer/common/events"
"go.uber.org/zap"
)
@ -49,16 +49,16 @@ func (r *RedisSubscriber) subscribe(ctx context.Context) {
ch := r.pubSub.Channel()
go func() {
for msg := range ch {
var notification domain.NotificationEvent
var notification events.NotificationEvent
err := json.Unmarshal([]byte(msg.Payload), &notification)
if err != nil {
r.logger.Error("Error decoding vaaEvent message from SQSEvent", zap.Error(err))
continue
}
switch notification.Type {
case domain.SignedVaaType:
signedVaa, err := domain.GetEventPayload[domain.SignedVaa](&notification)
switch notification.Event {
case events.SignedVaaType:
signedVaa, err := events.GetEventData[events.SignedVaa](&notification)
if err != nil {
r.logger.Error("Error decoding signedVAA from notification event", zap.String("trackId", notification.TrackID), zap.Error(err))
continue

View File

@ -33,10 +33,11 @@ func fetchEthTx(
}
defer client.Close()
nativeTxHash := txHashLowerCaseWith0x(txHash)
// query transaction data
var txReply ethGetTransactionByHashResponse
{
err = client.CallContext(ctx, rateLimiter, &txReply, "eth_getTransactionByHash", "0x"+txHash)
err = client.CallContext(ctx, rateLimiter, &txReply, "eth_getTransactionByHash", nativeTxHash)
if err != nil {
return nil, fmt.Errorf("failed to get tx by hash: %w", err)
}
@ -48,7 +49,7 @@ func fetchEthTx(
// build results and return
txDetail := &TxDetail{
From: strings.ToLower(txReply.From),
NativeTxHash: fmt.Sprintf("0x%s", strings.ToLower(txHash)),
NativeTxHash: nativeTxHash,
}
return txDetail, nil
}

View File

@ -68,10 +68,15 @@ func main() {
server := infrastructure.NewServer(logger, cfg.MonitoringPort, cfg.PprofEnabled, vaaController, healthChecks...)
server.Start()
// create and start a consumer.
// create and start a pipeline consumer.
vaaConsumeFunc := newVAAConsumeFunc(rootCtx, cfg, metrics, logger)
consumer := consumer.New(vaaConsumeFunc, &cfg.RpcProviderSettings, rootCtx, logger, repository, metrics, cfg.P2pNetwork)
consumer.Start(rootCtx)
vaaConsumer := consumer.New(vaaConsumeFunc, &cfg.RpcProviderSettings, rootCtx, logger, repository, metrics, cfg.P2pNetwork)
vaaConsumer.Start(rootCtx)
// create and start a notification consumer.
notificationConsumeFunc := newNotificationConsumeFunc(rootCtx, cfg, metrics, logger)
notificationConsumer := consumer.New(notificationConsumeFunc, &cfg.RpcProviderSettings, rootCtx, logger, repository, metrics, cfg.P2pNetwork)
notificationConsumer.Start(rootCtx)
logger.Info("Started wormhole-explorer-tx-tracker")
@ -103,18 +108,34 @@ func newVAAConsumeFunc(
cfg *config.ServiceSettings,
metrics metrics.Metrics,
logger *zap.Logger,
) queue.VAAConsumeFunc {
) queue.ConsumeFunc {
sqsConsumer, err := newSqsConsumer(ctx, cfg)
sqsConsumer, err := newSqsConsumer(ctx, cfg, cfg.PipelineSqsUrl)
if err != nil {
logger.Fatal("failed to create sqs consumer", zap.Error(err))
}
vaaQueue := queue.NewVaaSqs(sqsConsumer, metrics, logger)
vaaQueue := queue.NewEventSqs(sqsConsumer, queue.NewVaaConverter(logger), metrics, logger)
return vaaQueue.Consume
}
func newSqsConsumer(ctx context.Context, cfg *config.ServiceSettings) (*sqs.Consumer, error) {
func newNotificationConsumeFunc(
ctx context.Context,
cfg *config.ServiceSettings,
metrics metrics.Metrics,
logger *zap.Logger,
) queue.ConsumeFunc {
sqsConsumer, err := newSqsConsumer(ctx, cfg, cfg.NotificationsSqsUrl)
if err != nil {
logger.Fatal("failed to create sqs consumer", zap.Error(err))
}
vaaQueue := queue.NewEventSqs(sqsConsumer, queue.NewNotificationEvent(logger), metrics, logger)
return vaaQueue.Consume
}
func newSqsConsumer(ctx context.Context, cfg *config.ServiceSettings, sqsUrl string) (*sqs.Consumer, error) {
awsconfig, err := newAwsConfig(ctx, cfg)
if err != nil {
@ -123,7 +144,7 @@ func newSqsConsumer(ctx context.Context, cfg *config.ServiceSettings) (*sqs.Cons
consumer, err := sqs.NewConsumer(
awsconfig,
cfg.PipelineSqsUrl,
sqsUrl,
sqs.WithMaxMessages(10),
sqs.WithVisibilityTimeout(4*60),
)
@ -174,6 +195,7 @@ func makeHealthChecks(
plugins := []health.Check{
health.SQS(awsConfig, config.PipelineSqsUrl),
health.SQS(awsConfig, config.NotificationsSqsUrl),
health.Mongo(db),
}

View File

@ -49,11 +49,12 @@ type ServiceSettings struct {
}
type AwsSettings struct {
AwsEndpoint string `split_words:"true" required:"false"`
AwsAccessKeyID string `split_words:"true" required:"false"`
AwsSecretAccessKey string `split_words:"true" required:"false"`
AwsRegion string `split_words:"true" required:"true"`
PipelineSqsUrl string `split_words:"true" required:"true"`
AwsEndpoint string `split_words:"true" required:"false"`
AwsAccessKeyID string `split_words:"true" required:"false"`
AwsSecretAccessKey string `split_words:"true" required:"false"`
AwsRegion string `split_words:"true" required:"true"`
PipelineSqsUrl string `split_words:"true" required:"true"`
NotificationsSqsUrl string `split_words:"true" required:"true"`
}
type MongodbSettings struct {

View File

@ -14,7 +14,7 @@ import (
// Consumer consumer struct definition.
type Consumer struct {
consumeFunc queue.VAAConsumeFunc
consumeFunc queue.ConsumeFunc
rpcProviderSettings *config.RpcProviderSettings
logger *zap.Logger
repository *Repository
@ -24,7 +24,7 @@ type Consumer struct {
// New creates a new vaa consumer.
func New(
consumeFunc queue.VAAConsumeFunc,
consumeFunc queue.ConsumeFunc,
rpcProviderSettings *config.RpcProviderSettings,
ctx context.Context,
logger *zap.Logger,
@ -68,7 +68,7 @@ func (c *Consumer) process(ctx context.Context, msg queue.ConsumerMessage) {
// Do not process messages from PythNet
if event.ChainID == sdk.ChainIDPythNet {
c.logger.Debug("Skipping expired PythNet message", zap.String("vaaId", event.ID))
c.logger.Debug("Skipping expired PythNet message", zap.String("trackId", event.TrackID), zap.String("vaaId", event.ID))
return
}
@ -76,6 +76,7 @@ func (c *Consumer) process(ctx context.Context, msg queue.ConsumerMessage) {
// Process the VAA
p := ProcessSourceTxParams{
TrackID: event.TrackID,
Timestamp: event.Timestamp,
VaaId: event.ID,
ChainId: event.ChainID,
@ -89,19 +90,23 @@ func (c *Consumer) process(ctx context.Context, msg queue.ConsumerMessage) {
// Log a message informing the processing status
if errors.Is(err, chains.ErrChainNotSupported) {
c.logger.Info("Skipping VAA - chain not supported",
zap.String("trackId", event.TrackID),
zap.String("vaaId", event.ID),
)
} else if errors.Is(err, ErrAlreadyProcessed) {
c.logger.Warn("Message already processed - skipping",
zap.String("trackId", event.TrackID),
zap.String("vaaId", event.ID),
)
} else if err != nil {
c.logger.Error("Failed to process originTx",
zap.String("trackId", event.TrackID),
zap.String("vaaId", event.ID),
zap.Error(err),
)
} else {
c.logger.Info("Transaction processed successfully",
zap.String("trackId", event.TrackID),
zap.String("id", event.ID),
)
c.metrics.IncOriginTxInserted(uint16(event.ChainID))

View File

@ -23,6 +23,7 @@ const (
// ProcessSourceTxParams is a struct that contains the parameters for the ProcessSourceTx method.
type ProcessSourceTxParams struct {
TrackID string
Timestamp *time.Time
ChainId sdk.ChainID
VaaId string
@ -85,6 +86,7 @@ func ProcessSourceTx(
return nil, fmt.Errorf("failed to process transaction: %w", err)
} else {
logger.Warn("failed to process transaction",
zap.String("trackId", params.TrackID),
zap.String("vaaId", params.VaaId),
zap.Any("vaaTimestamp", params.Timestamp),
zap.Int("retries", retries),
@ -98,10 +100,11 @@ func ProcessSourceTx(
// Store source transaction details in the database
p := UpsertDocumentParams{
VaaId: params.VaaId,
ChainId: params.ChainId,
TxDetail: txDetail,
TxStatus: domain.SourceTxStatusConfirmed,
VaaId: params.VaaId,
ChainId: params.ChainId,
Timestamp: params.Timestamp,
TxDetail: txDetail,
TxStatus: domain.SourceTxStatusConfirmed,
}
err = repository.UpsertDocument(ctx, &p)

View File

@ -38,10 +38,11 @@ func NewRepository(logger *zap.Logger, db *mongo.Database) *Repository {
// UpsertDocumentParams is a struct that contains the parameters for the upsertDocument method.
type UpsertDocumentParams struct {
VaaId string
ChainId sdk.ChainID
TxDetail *chains.TxDetail
TxStatus domain.SourceTxStatus
VaaId string
ChainId sdk.ChainID
TxDetail *chains.TxDetail
TxStatus domain.SourceTxStatus
Timestamp *time.Time
}
func (r *Repository) UpsertDocument(ctx context.Context, params *UpsertDocumentParams) error {
@ -53,6 +54,9 @@ func (r *Repository) UpsertDocument(ctx context.Context, params *UpsertDocumentP
if params.TxDetail != nil {
fields = append(fields, primitive.E{Key: "nativeTxHash", Value: params.TxDetail.NativeTxHash})
fields = append(fields, primitive.E{Key: "from", Value: params.TxDetail.From})
if params.Timestamp != nil {
fields = append(fields, primitive.E{Key: "timestamp", Value: params.Timestamp})
}
if params.TxDetail.Attribute != nil {
fields = append(fields, primitive.E{Key: "attribute", Value: params.TxDetail.Attribute})
}

View File

@ -26,7 +26,7 @@ func NewServer(logger *zap.Logger, port string, pprofEnabled bool, vaaController
}
app.Use(prometheus.Middleware)
ctrl := NewController(checks, logger)
ctrl := health.NewController(checks, logger)
api := app.Group("/api")
api.Get("/health", ctrl.HealthCheck)
api.Get("/ready", ctrl.ReadyCheck)

View File

@ -0,0 +1,109 @@
package queue
import (
"encoding/json"
"fmt"
"strconv"
"time"
"github.com/wormhole-foundation/wormhole-explorer/common/events"
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
"go.uber.org/zap"
)
// VaaEvent represents a vaa data to be handle by the pipeline.
type VaaEvent struct {
ID string `json:"id"`
ChainID sdk.ChainID `json:"emitterChain"`
EmitterAddress string `json:"emitterAddr"`
Sequence string `json:"sequence"`
GuardianSetIndex uint32 `json:"guardianSetIndex"`
Vaa []byte `json:"vaas"`
IndexedAt time.Time `json:"indexedAt"`
Timestamp *time.Time `json:"timestamp"`
UpdatedAt *time.Time `json:"updatedAt"`
TxHash string `json:"txHash"`
Version uint16 `json:"version"`
Revision uint16 `json:"revision"`
}
// VaaConverter converts a message from a VAAEvent.
func NewVaaConverter(log *zap.Logger) ConverterFunc {
return func(msg string) (*Event, error) {
// unmarshal message to vaaEvent
var vaaEvent VaaEvent
err := json.Unmarshal([]byte(msg), &vaaEvent)
if err != nil {
return nil, err
}
return &Event{
TrackID: fmt.Sprintf("pipeline-%s", vaaEvent.ID),
ID: vaaEvent.ID,
ChainID: vaaEvent.ChainID,
EmitterAddress: vaaEvent.EmitterAddress,
Sequence: vaaEvent.Sequence,
Timestamp: vaaEvent.Timestamp,
TxHash: vaaEvent.TxHash,
}, nil
}
}
func NewNotificationEvent(log *zap.Logger) ConverterFunc {
return func(msg string) (*Event, error) {
// unmarshal message to NotificationEvent
var notification events.NotificationEvent
err := json.Unmarshal([]byte(msg), &notification)
if err != nil {
return nil, err
}
if notification.Event != events.SignedVaaType && notification.Event != events.LogMessagePublishedMesageType {
log.Debug("Skip event type", zap.String("trackId", notification.TrackID), zap.String("type", notification.Event))
return nil, nil
}
switch notification.Event {
case events.SignedVaaType:
signedVaa, err := events.GetEventData[events.SignedVaa](&notification)
if err != nil {
log.Error("Error decoding signedVAA from notification event", zap.String("trackId", notification.TrackID), zap.Error(err))
return nil, nil
}
return &Event{
TrackID: notification.TrackID,
ID: signedVaa.ID,
ChainID: sdk.ChainID(signedVaa.EmitterChain),
EmitterAddress: signedVaa.EmitterAddress,
Sequence: strconv.FormatUint(signedVaa.Sequence, 10),
Timestamp: &signedVaa.Timestamp,
TxHash: signedVaa.TxHash,
}, nil
case events.LogMessagePublishedMesageType:
plm, err := events.GetEventData[events.LogMessagePublished](&notification)
if err != nil {
log.Error("Error decoding publishedLogMessage from notification event", zap.String("trackId", notification.TrackID), zap.Error(err))
return nil, nil
}
vaa, err := events.CreateUnsignedVAA(&plm)
if err != nil {
log.Error("Error creating unsigned vaa", zap.String("trackId", notification.TrackID), zap.Error(err))
return nil, err
}
return &Event{
TrackID: notification.TrackID,
ID: vaa.MessageID(),
ChainID: sdk.ChainID(plm.ChainID),
EmitterAddress: plm.Attributes.Sender,
Sequence: strconv.FormatUint(plm.Attributes.Sequence, 10),
Timestamp: &plm.BlockTime,
TxHash: plm.TxHash,
}, nil
}
return nil, nil
}
}

View File

@ -17,24 +17,29 @@ type SQSOption func(*SQS)
// SQS represents a VAA queue in SQS.
type SQS struct {
consumer *sqs_client.Consumer
ch chan ConsumerMessage
chSize int
wg sync.WaitGroup
metrics metrics.Metrics
logger *zap.Logger
consumer *sqs_client.Consumer
ch chan ConsumerMessage
converter ConverterFunc
chSize int
wg sync.WaitGroup
metrics metrics.Metrics
logger *zap.Logger
}
// FilterConsumeFunc filter vaaa func definition.
type FilterConsumeFunc func(vaaEvent *VaaEvent) bool
// NewVaaSqs creates a VAA queue in SQS instances.
func NewVaaSqs(consumer *sqs_client.Consumer, metrics metrics.Metrics, logger *zap.Logger, opts ...SQSOption) *SQS {
// ConverterFunc converts a message from a sqs message.
type ConverterFunc func(string) (*Event, error)
// NewEventSqs creates a VAA queue in SQS instances.
func NewEventSqs(consumer *sqs_client.Consumer, converter ConverterFunc, metrics metrics.Metrics, logger *zap.Logger, opts ...SQSOption) *SQS {
s := &SQS{
consumer: consumer,
chSize: 10,
metrics: metrics,
logger: logger}
consumer: consumer,
chSize: 10,
metrics: metrics,
converter: converter,
logger: logger}
for _, opt := range opts {
opt(s)
}
@ -68,19 +73,18 @@ func (q *SQS) Consume(ctx context.Context) <-chan ConsumerMessage {
continue
}
// unmarshal message to vaaEvent
var vaaEvent VaaEvent
err = json.Unmarshal([]byte(sqsEvent.Message), &vaaEvent)
// unmarshal message to event
event, err := q.converter(sqsEvent.Message)
if err != nil {
q.logger.Error("Error decoding vaaEvent message from SQSEvent", zap.Error(err))
continue
}
q.metrics.IncVaaConsumedQueue(uint16(vaaEvent.ChainID))
q.metrics.IncVaaConsumedQueue(uint16(event.ChainID))
q.wg.Add(1)
q.ch <- &sqsConsumerMessage{
id: msg.ReceiptHandle,
data: &vaaEvent,
data: event,
wg: &q.wg,
logger: q.logger,
consumer: q.consumer,
@ -101,7 +105,7 @@ func (q *SQS) Close() {
}
type sqsConsumerMessage struct {
data *VaaEvent
data *Event
consumer *sqs_client.Consumer
wg *sync.WaitGroup
id *string
@ -110,7 +114,7 @@ type sqsConsumerMessage struct {
ctx context.Context
}
func (m *sqsConsumerMessage) Data() *VaaEvent {
func (m *sqsConsumerMessage) Data() *Event {
return m.data
}

View File

@ -1,40 +0,0 @@
package queue
import (
"context"
"time"
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
)
type sqsEvent struct {
MessageID string `json:"MessageId"`
Message string `json:"Message"`
}
// VaaEvent represents a vaa data to be handle by the pipeline.
type VaaEvent struct {
ID string `json:"id"`
ChainID sdk.ChainID `json:"emitterChain"`
EmitterAddress string `json:"emitterAddr"`
Sequence string `json:"sequence"`
GuardianSetIndex uint32 `json:"guardianSetIndex"`
Vaa []byte `json:"vaas"`
IndexedAt time.Time `json:"indexedAt"`
Timestamp *time.Time `json:"timestamp"`
UpdatedAt *time.Time `json:"updatedAt"`
TxHash string `json:"txHash"`
Version uint16 `json:"version"`
Revision uint16 `json:"revision"`
}
// ConsumerMessage defition.
type ConsumerMessage interface {
Data() *VaaEvent
Done()
Failed()
IsExpired() bool
}
// VAAConsumeFunc is a function to consume VAAEvent.
type VAAConsumeFunc func(context.Context) <-chan ConsumerMessage

35
tx-tracker/queue/types.go Normal file
View File

@ -0,0 +1,35 @@
package queue
import (
"context"
"time"
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
)
type sqsEvent struct {
MessageID string `json:"MessageId"`
Message string `json:"Message"`
}
// Event represents a event data to be handle.
type Event struct {
TrackID string
ID string
ChainID sdk.ChainID
EmitterAddress string
Sequence string
Timestamp *time.Time
TxHash string
}
// ConsumerMessage defition.
type ConsumerMessage interface {
Data() *Event
Done()
Failed()
IsExpired() bool
}
// ConsumeFunc is a function to consume Event.
type ConsumeFunc func(context.Context) <-chan ConsumerMessage