Merge to base branch
This commit is contained in:
commit
cda84a0aee
|
@ -72,14 +72,14 @@ func addVaaCountCommand(parent *cobra.Command) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func addVaaVolumeFromFileCommand(parent *cobra.Command) {
|
func addVaaVolumeFromFileCommand(parent *cobra.Command) {
|
||||||
var input, output, prices, vaaPayloadParserURL string
|
var input, output, prices, vaaPayloadParserURL, p2pNetwork string
|
||||||
|
|
||||||
//vaa-volume from csv file
|
//vaa-volume from csv file
|
||||||
vaaVolumeFileCmd := &cobra.Command{
|
vaaVolumeFileCmd := &cobra.Command{
|
||||||
Use: "file",
|
Use: "file",
|
||||||
Short: "Generate volume metrics from a VAA csv file",
|
Short: "Generate volume metrics from a VAA csv file",
|
||||||
Run: func(_ *cobra.Command, _ []string) {
|
Run: func(_ *cobra.Command, _ []string) {
|
||||||
metrics.RunVaaVolumeFromFile(input, output, prices, vaaPayloadParserURL)
|
metrics.RunVaaVolumeFromFile(input, output, prices, vaaPayloadParserURL, p2pNetwork)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -96,17 +96,21 @@ func addVaaVolumeFromFileCommand(parent *cobra.Command) {
|
||||||
vaaVolumeFileCmd.Flags().StringVar(&vaaPayloadParserURL, "vaa-payload-parser-url", "", "VAA payload parser URL")
|
vaaVolumeFileCmd.Flags().StringVar(&vaaPayloadParserURL, "vaa-payload-parser-url", "", "VAA payload parser URL")
|
||||||
vaaVolumeFileCmd.MarkFlagRequired("vaa-payload-parser-url")
|
vaaVolumeFileCmd.MarkFlagRequired("vaa-payload-parser-url")
|
||||||
|
|
||||||
|
//p2p-network flag
|
||||||
|
vaaVolumeFileCmd.Flags().StringVar(&p2pNetwork, "p2p-network", "", "P2P network")
|
||||||
|
vaaVolumeFileCmd.MarkFlagRequired("p2p-network")
|
||||||
|
|
||||||
parent.AddCommand(vaaVolumeFileCmd)
|
parent.AddCommand(vaaVolumeFileCmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
func addVaaVolumeFromMongoCommand(parent *cobra.Command) {
|
func addVaaVolumeFromMongoCommand(parent *cobra.Command) {
|
||||||
var mongoUri, mongoDb, output, prices, vaaPayloadParserURL string
|
var mongoUri, mongoDb, output, prices, vaaPayloadParserURL, p2pNetwork string
|
||||||
//vaa-volume from MongoDB
|
//vaa-volume from MongoDB
|
||||||
vaaVolumeMongoCmd := &cobra.Command{
|
vaaVolumeMongoCmd := &cobra.Command{
|
||||||
Use: "mongo",
|
Use: "mongo",
|
||||||
Short: "Generate volume metrics from MongoDB",
|
Short: "Generate volume metrics from MongoDB",
|
||||||
Run: func(_ *cobra.Command, _ []string) {
|
Run: func(_ *cobra.Command, _ []string) {
|
||||||
metrics.RunVaaVolumeFromMongo(mongoUri, mongoDb, output, prices, vaaPayloadParserURL)
|
metrics.RunVaaVolumeFromMongo(mongoUri, mongoDb, output, prices, vaaPayloadParserURL, p2pNetwork)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -124,6 +128,10 @@ func addVaaVolumeFromMongoCommand(parent *cobra.Command) {
|
||||||
vaaVolumeMongoCmd.Flags().StringVar(&vaaPayloadParserURL, "vaa-payload-parser-url", "", "VAA payload parser URL")
|
vaaVolumeMongoCmd.Flags().StringVar(&vaaPayloadParserURL, "vaa-payload-parser-url", "", "VAA payload parser URL")
|
||||||
vaaVolumeMongoCmd.MarkFlagRequired("vaa-payload-parser-url")
|
vaaVolumeMongoCmd.MarkFlagRequired("vaa-payload-parser-url")
|
||||||
|
|
||||||
|
//p2p-network flag
|
||||||
|
vaaVolumeMongoCmd.Flags().StringVar(&p2pNetwork, "p2p-network", "", "P2P network")
|
||||||
|
vaaVolumeMongoCmd.MarkFlagRequired("p2p-network")
|
||||||
|
|
||||||
parent.AddCommand(vaaVolumeMongoCmd)
|
parent.AddCommand(vaaVolumeMongoCmd)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -141,16 +149,20 @@ func addVaaVolumeCommand(parent *cobra.Command) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func addPricesCommand(root *cobra.Command) {
|
func addPricesCommand(root *cobra.Command) {
|
||||||
var output string
|
var output, p2pNetwork string
|
||||||
vaaCountCmd := &cobra.Command{
|
vaaCountCmd := &cobra.Command{
|
||||||
Use: "history",
|
Use: "history",
|
||||||
Short: "Generate notional price history for symbol",
|
Short: "Generate notional price history for symbol",
|
||||||
Run: func(_ *cobra.Command, _ []string) {
|
Run: func(_ *cobra.Command, _ []string) {
|
||||||
prices.RunPrices(output)
|
prices.RunPrices(output, p2pNetwork)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
// output flag
|
// output flag
|
||||||
vaaCountCmd.Flags().StringVar(&output, "output", "", "path to output file")
|
vaaCountCmd.Flags().StringVar(&output, "output", "", "path to output file")
|
||||||
vaaCountCmd.MarkFlagRequired("output")
|
vaaCountCmd.MarkFlagRequired("output")
|
||||||
|
|
||||||
|
//p2p-network flag
|
||||||
|
vaaCountCmd.Flags().StringVar(&p2pNetwork, "p2p-network", "", "P2P network")
|
||||||
|
vaaCountCmd.MarkFlagRequired("p2p-network")
|
||||||
root.AddCommand(vaaCountCmd)
|
root.AddCommand(vaaCountCmd)
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,15 +22,17 @@ type VaaConverter struct {
|
||||||
PriceCache *prices.CoinPricesCache
|
PriceCache *prices.CoinPricesCache
|
||||||
Metrics metrics.Metrics
|
Metrics metrics.Metrics
|
||||||
GetTransferredTokenByVaa token.GetTransferredTokenByVaa
|
GetTransferredTokenByVaa token.GetTransferredTokenByVaa
|
||||||
|
TokenProvider *domain.TokenProvider
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewVaaConverter(priceCache *prices.CoinPricesCache, GetTransferredTokenByVaa token.GetTransferredTokenByVaa) *VaaConverter {
|
func NewVaaConverter(priceCache *prices.CoinPricesCache, GetTransferredTokenByVaa token.GetTransferredTokenByVaa, tokenProvider *domain.TokenProvider) *VaaConverter {
|
||||||
return &VaaConverter{
|
return &VaaConverter{
|
||||||
MissingTokens: make(map[sdk.Address]sdk.ChainID),
|
MissingTokens: make(map[sdk.Address]sdk.ChainID),
|
||||||
MissingTokensCounter: make(map[sdk.Address]int),
|
MissingTokensCounter: make(map[sdk.Address]int),
|
||||||
PriceCache: priceCache,
|
PriceCache: priceCache,
|
||||||
Metrics: metrics.NewNoopMetrics(),
|
Metrics: metrics.NewNoopMetrics(),
|
||||||
GetTransferredTokenByVaa: GetTransferredTokenByVaa,
|
GetTransferredTokenByVaa: GetTransferredTokenByVaa,
|
||||||
|
TokenProvider: tokenProvider,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -47,7 +49,7 @@ func (c *VaaConverter) Convert(ctx context.Context, vaaBytes []byte) (string, er
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look up token metadata
|
// Look up token metadata
|
||||||
tokenMetadata, ok := domain.GetTokenByAddress(transferredToken.TokenChain, transferredToken.TokenAddress.String())
|
tokenMetadata, ok := c.TokenProvider.GetTokenByAddress(transferredToken.TokenChain, transferredToken.TokenAddress.String())
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
||||||
// if not found, add to missing tokens
|
// if not found, add to missing tokens
|
||||||
|
@ -74,6 +76,7 @@ func (c *VaaConverter) Convert(ctx context.Context, vaaBytes []byte) (string, er
|
||||||
},
|
},
|
||||||
Metrics: c.Metrics,
|
Metrics: c.Metrics,
|
||||||
TransferredToken: transferredToken,
|
TransferredToken: transferredToken,
|
||||||
|
TokenProvider: c.TokenProvider,
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/analytics/cmd/token"
|
"github.com/wormhole-foundation/wormhole-explorer/analytics/cmd/token"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/analytics/prices"
|
"github.com/wormhole-foundation/wormhole-explorer/analytics/prices"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/client/parser"
|
"github.com/wormhole-foundation/wormhole-explorer/common/client/parser"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/common/domain"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/logger"
|
"github.com/wormhole-foundation/wormhole-explorer/common/logger"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
@ -22,7 +23,7 @@ type LineParser struct {
|
||||||
|
|
||||||
// read a csv file with VAAs and convert into a decoded csv file
|
// read a csv file with VAAs and convert into a decoded csv file
|
||||||
// ready to upload to the database
|
// ready to upload to the database
|
||||||
func RunVaaVolumeFromFile(inputFile, outputFile, pricesFile, vaaPayloadParserURL string) {
|
func RunVaaVolumeFromFile(inputFile, outputFile, pricesFile, vaaPayloadParserURL, p2pNetwork string) {
|
||||||
|
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
// build logger
|
// build logger
|
||||||
|
@ -39,6 +40,9 @@ func RunVaaVolumeFromFile(inputFile, outputFile, pricesFile, vaaPayloadParserURL
|
||||||
// create a token resolver
|
// create a token resolver
|
||||||
tokenResolver := token.NewTokenResolver(parserVAAAPIClient, logger)
|
tokenResolver := token.NewTokenResolver(parserVAAAPIClient, logger)
|
||||||
|
|
||||||
|
// create a token provider
|
||||||
|
tokenProvider := domain.NewTokenProvider(p2pNetwork)
|
||||||
|
|
||||||
// open input file
|
// open input file
|
||||||
f, err := os.Open(inputFile)
|
f, err := os.Open(inputFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -65,7 +69,7 @@ func RunVaaVolumeFromFile(inputFile, outputFile, pricesFile, vaaPayloadParserURL
|
||||||
logger.Info("loading historical prices...")
|
logger.Info("loading historical prices...")
|
||||||
priceCache := prices.NewCoinPricesCache(pricesFile)
|
priceCache := prices.NewCoinPricesCache(pricesFile)
|
||||||
priceCache.InitCache()
|
priceCache.InitCache()
|
||||||
converter := NewVaaConverter(priceCache, tokenResolver.GetTransferredTokenByVaa)
|
converter := NewVaaConverter(priceCache, tokenResolver.GetTransferredTokenByVaa, tokenProvider)
|
||||||
lp := NewLineParser(converter)
|
lp := NewLineParser(converter)
|
||||||
logger.Info("loaded historical prices")
|
logger.Info("loaded historical prices")
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,7 @@ import (
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/analytics/prices"
|
"github.com/wormhole-foundation/wormhole-explorer/analytics/prices"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/client/parser"
|
"github.com/wormhole-foundation/wormhole-explorer/common/client/parser"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/dbutil"
|
"github.com/wormhole-foundation/wormhole-explorer/common/dbutil"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/common/domain"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/logger"
|
"github.com/wormhole-foundation/wormhole-explorer/common/logger"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/repository"
|
"github.com/wormhole-foundation/wormhole-explorer/common/repository"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
|
@ -17,7 +18,7 @@ import (
|
||||||
|
|
||||||
// read a csv file with VAAs and convert into a decoded csv file
|
// read a csv file with VAAs and convert into a decoded csv file
|
||||||
// ready to upload to the database
|
// ready to upload to the database
|
||||||
func RunVaaVolumeFromMongo(mongoUri, mongoDb, outputFile, pricesFile, vaaPayloadParserURL string) {
|
func RunVaaVolumeFromMongo(mongoUri, mongoDb, outputFile, pricesFile, vaaPayloadParserURL, p2pNetwork string) {
|
||||||
|
|
||||||
rootCtx := context.Background()
|
rootCtx := context.Background()
|
||||||
|
|
||||||
|
@ -44,6 +45,9 @@ func RunVaaVolumeFromMongo(mongoUri, mongoDb, outputFile, pricesFile, vaaPayload
|
||||||
// create a token resolver
|
// create a token resolver
|
||||||
tokenResolver := token.NewTokenResolver(parserVAAAPIClient, logger)
|
tokenResolver := token.NewTokenResolver(parserVAAAPIClient, logger)
|
||||||
|
|
||||||
|
// create a token provider
|
||||||
|
tokenProvider := domain.NewTokenProvider(p2pNetwork)
|
||||||
|
|
||||||
// create missing tokens file
|
// create missing tokens file
|
||||||
missingTokensFile := "missing_tokens.csv"
|
missingTokensFile := "missing_tokens.csv"
|
||||||
fmissingTokens, err := os.Create(missingTokensFile)
|
fmissingTokens, err := os.Create(missingTokensFile)
|
||||||
|
@ -63,7 +67,7 @@ func RunVaaVolumeFromMongo(mongoUri, mongoDb, outputFile, pricesFile, vaaPayload
|
||||||
logger.Info("loading historical prices...")
|
logger.Info("loading historical prices...")
|
||||||
priceCache := prices.NewCoinPricesCache(pricesFile)
|
priceCache := prices.NewCoinPricesCache(pricesFile)
|
||||||
priceCache.InitCache()
|
priceCache.InitCache()
|
||||||
converter := NewVaaConverter(priceCache, tokenResolver.GetTransferredTokenByVaa)
|
converter := NewVaaConverter(priceCache, tokenResolver.GetTransferredTokenByVaa, tokenProvider)
|
||||||
logger.Info("loaded historical prices")
|
logger.Info("loaded historical prices")
|
||||||
|
|
||||||
endTime := time.Now()
|
endTime := time.Now()
|
||||||
|
|
|
@ -14,7 +14,7 @@ import (
|
||||||
// go througth the symbol list provided by wormhole
|
// go througth the symbol list provided by wormhole
|
||||||
// and fetch the history from coingecko
|
// and fetch the history from coingecko
|
||||||
// and save it to a file
|
// and save it to a file
|
||||||
func RunPrices(output string) {
|
func RunPrices(output, p2pNetwork string) {
|
||||||
|
|
||||||
// build logger
|
// build logger
|
||||||
logger := logger.New("wormhole-explorer-analytics")
|
logger := logger.New("wormhole-explorer-analytics")
|
||||||
|
@ -29,7 +29,9 @@ func RunPrices(output string) {
|
||||||
}
|
}
|
||||||
defer pricesOutput.Close()
|
defer pricesOutput.Close()
|
||||||
|
|
||||||
tokens := domain.GetAllTokens()
|
// create token provider
|
||||||
|
tokenProvider := domain.NewTokenProvider(p2pNetwork)
|
||||||
|
tokens := tokenProvider.GetAllTokens()
|
||||||
logger.Info("found tokens", zap.Int("count", len(tokens)))
|
logger.Info("found tokens", zap.Int("count", len(tokens)))
|
||||||
for index, token := range tokens {
|
for index, token := range tokens {
|
||||||
logger.Info("processing token",
|
logger.Info("processing token",
|
||||||
|
|
|
@ -27,6 +27,7 @@ import (
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/client/parser"
|
"github.com/wormhole-foundation/wormhole-explorer/common/client/parser"
|
||||||
sqs_client "github.com/wormhole-foundation/wormhole-explorer/common/client/sqs"
|
sqs_client "github.com/wormhole-foundation/wormhole-explorer/common/client/sqs"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/dbutil"
|
"github.com/wormhole-foundation/wormhole-explorer/common/dbutil"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/common/domain"
|
||||||
health "github.com/wormhole-foundation/wormhole-explorer/common/health"
|
health "github.com/wormhole-foundation/wormhole-explorer/common/health"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/logger"
|
"github.com/wormhole-foundation/wormhole-explorer/common/logger"
|
||||||
"go.mongodb.org/mongo-driver/mongo"
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
|
@ -97,19 +98,28 @@ func Run() {
|
||||||
// create a token resolver
|
// create a token resolver
|
||||||
tokenResolver := token.NewTokenResolver(parserVAAAPIClient, logger)
|
tokenResolver := token.NewTokenResolver(parserVAAAPIClient, logger)
|
||||||
|
|
||||||
|
// create a token provider
|
||||||
|
tokenProvider := domain.NewTokenProvider(config.P2pNetwork)
|
||||||
|
|
||||||
// create a metrics instance
|
// create a metrics instance
|
||||||
logger.Info("initializing metrics instance...")
|
logger.Info("initializing metrics instance...")
|
||||||
metric, err := metric.New(rootCtx, db.Database, influxCli, config.InfluxOrganization, config.InfluxBucketInfinite,
|
metric, err := metric.New(rootCtx, db.Database, influxCli, config.InfluxOrganization, config.InfluxBucketInfinite,
|
||||||
config.InfluxBucket30Days, config.InfluxBucket24Hours, notionalCache, metrics, tokenResolver.GetTransferredTokenByVaa, logger)
|
config.InfluxBucket30Days, config.InfluxBucket24Hours, notionalCache, metrics, tokenResolver.GetTransferredTokenByVaa, tokenProvider, logger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Fatal("failed to create metrics instance", zap.Error(err))
|
logger.Fatal("failed to create metrics instance", zap.Error(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
// create and start a consumer.
|
// create and start a vaa consumer.
|
||||||
logger.Info("initializing metrics consumer...")
|
logger.Info("initializing vaa consumer...")
|
||||||
vaaConsumeFunc := newVAAConsume(rootCtx, config, logger)
|
vaaConsumeFunc := newVAAConsumeFunc(rootCtx, config, logger)
|
||||||
consumer := consumer.New(vaaConsumeFunc, metric.Push, logger, config.P2pNetwork)
|
vaaConsumer := consumer.New(vaaConsumeFunc, metric.Push, logger, config.P2pNetwork)
|
||||||
consumer.Start(rootCtx)
|
vaaConsumer.Start(rootCtx)
|
||||||
|
|
||||||
|
// create and start a notification consumer.
|
||||||
|
logger.Info("initializing notification consumer...")
|
||||||
|
notificationConsumeFunc := newNotificationConsumeFunc(rootCtx, config, logger)
|
||||||
|
notificationConsumer := consumer.New(notificationConsumeFunc, metric.Push, logger, config.P2pNetwork)
|
||||||
|
notificationConsumer.Start(rootCtx)
|
||||||
|
|
||||||
// create and start server.
|
// create and start server.
|
||||||
logger.Info("initializing infrastructure server...")
|
logger.Info("initializing infrastructure server...")
|
||||||
|
@ -146,23 +156,34 @@ func Run() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creates a callbacks depending on whether the execution is local (memory queue) or not (SQS queue)
|
// Creates a callbacks depending on whether the execution is local (memory queue) or not (SQS queue)
|
||||||
func newVAAConsume(appCtx context.Context, config *config.Configuration, logger *zap.Logger) queue.VAAConsumeFunc {
|
func newVAAConsumeFunc(appCtx context.Context, config *config.Configuration, logger *zap.Logger) queue.ConsumeFunc {
|
||||||
sqsConsumer, err := newSQSConsumer(appCtx, config)
|
sqsConsumer, err := newSQSConsumer(appCtx, config, config.PipelineSQSUrl)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Fatal("failed to create sqs consumer", zap.Error(err))
|
logger.Fatal("failed to create sqs consumer", zap.Error(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
vaaQueue := queue.NewVaaSqs(sqsConsumer, logger)
|
vaaQueue := queue.NewEventSqs(sqsConsumer, queue.NewVaaConverter(logger), logger)
|
||||||
return vaaQueue.Consume
|
return vaaQueue.Consume
|
||||||
}
|
}
|
||||||
|
|
||||||
func newSQSConsumer(appCtx context.Context, config *config.Configuration) (*sqs_client.Consumer, error) {
|
func newNotificationConsumeFunc(ctx context.Context, cfg *config.Configuration, logger *zap.Logger) queue.ConsumeFunc {
|
||||||
|
|
||||||
|
sqsConsumer, err := newSQSConsumer(ctx, cfg, cfg.NotificationsSQSUrl)
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatal("failed to create sqs consumer", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
vaaQueue := queue.NewEventSqs(sqsConsumer, queue.NewNotificationEvent(logger), logger)
|
||||||
|
return vaaQueue.Consume
|
||||||
|
}
|
||||||
|
|
||||||
|
func newSQSConsumer(appCtx context.Context, config *config.Configuration, sqsUrl string) (*sqs_client.Consumer, error) {
|
||||||
awsconfig, err := newAwsConfig(appCtx, config)
|
awsconfig, err := newAwsConfig(appCtx, config)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return sqs_client.NewConsumer(awsconfig, config.SQSUrl,
|
return sqs_client.NewConsumer(awsconfig, sqsUrl,
|
||||||
sqs_client.WithMaxMessages(10),
|
sqs_client.WithMaxMessages(10),
|
||||||
sqs_client.WithVisibilityTimeout(120))
|
sqs_client.WithVisibilityTimeout(120))
|
||||||
}
|
}
|
||||||
|
@ -211,7 +232,8 @@ func newHealthChecks(
|
||||||
}
|
}
|
||||||
|
|
||||||
healthChecks := []health.Check{
|
healthChecks := []health.Check{
|
||||||
health.SQS(awsConfig, config.SQSUrl),
|
health.SQS(awsConfig, config.PipelineSQSUrl),
|
||||||
|
health.SQS(awsConfig, config.NotificationsSQSUrl),
|
||||||
health.Influx(influxCli),
|
health.Influx(influxCli),
|
||||||
health.Mongo(db),
|
health.Mongo(db),
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,7 +94,7 @@ func (r *TokenResolver) GetTransferredTokenByVaa(ctx context.Context, vaa *sdk.V
|
||||||
|
|
||||||
token, err := createToken(result.StandardizedProperties, vaa.EmitterChain)
|
token, err := createToken(result.StandardizedProperties, vaa.EmitterChain)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.logger.Error("Creating transferred token",
|
r.logger.Debug("Creating transferred token",
|
||||||
zap.String("vaaId", vaa.MessageID()),
|
zap.String("vaaId", vaa.MessageID()),
|
||||||
zap.Error(err))
|
zap.Error(err))
|
||||||
return nil, ErrUnknownToken
|
return nil, ErrUnknownToken
|
||||||
|
|
|
@ -17,7 +17,8 @@ type Configuration struct {
|
||||||
AwsAccessKeyID string `env:"AWS_ACCESS_KEY_ID"`
|
AwsAccessKeyID string `env:"AWS_ACCESS_KEY_ID"`
|
||||||
AwsSecretAccessKey string `env:"AWS_SECRET_ACCESS_KEY"`
|
AwsSecretAccessKey string `env:"AWS_SECRET_ACCESS_KEY"`
|
||||||
AwsRegion string `env:"AWS_REGION"`
|
AwsRegion string `env:"AWS_REGION"`
|
||||||
SQSUrl string `env:"SQS_URL"`
|
PipelineSQSUrl string `env:"PIPELINE_SQS_URL"`
|
||||||
|
NotificationsSQSUrl string `env:"NOTIFICATIONS_SQS_URL"`
|
||||||
InfluxUrl string `env:"INFLUX_URL"`
|
InfluxUrl string `env:"INFLUX_URL"`
|
||||||
InfluxToken string `env:"INFLUX_TOKEN"`
|
InfluxToken string `env:"INFLUX_TOKEN"`
|
||||||
InfluxOrganization string `env:"INFLUX_ORGANIZATION"`
|
InfluxOrganization string `env:"INFLUX_ORGANIZATION"`
|
||||||
|
|
|
@ -11,14 +11,14 @@ import (
|
||||||
|
|
||||||
// Consumer consumer struct definition.
|
// Consumer consumer struct definition.
|
||||||
type Consumer struct {
|
type Consumer struct {
|
||||||
consume queue.VAAConsumeFunc
|
consume queue.ConsumeFunc
|
||||||
pushMetric metric.MetricPushFunc
|
pushMetric metric.MetricPushFunc
|
||||||
logger *zap.Logger
|
logger *zap.Logger
|
||||||
p2pNetwork string
|
p2pNetwork string
|
||||||
}
|
}
|
||||||
|
|
||||||
// New creates a new vaa consumer.
|
// New creates a new vaa consumer.
|
||||||
func New(consume queue.VAAConsumeFunc, pushMetric metric.MetricPushFunc, logger *zap.Logger, p2pNetwork string) *Consumer {
|
func New(consume queue.ConsumeFunc, pushMetric metric.MetricPushFunc, logger *zap.Logger, p2pNetwork string) *Consumer {
|
||||||
return &Consumer{consume: consume, pushMetric: pushMetric, logger: logger, p2pNetwork: p2pNetwork}
|
return &Consumer{consume: consume, pushMetric: pushMetric, logger: logger, p2pNetwork: p2pNetwork}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ func (c *Consumer) Start(ctx context.Context) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// push vaa metrics.
|
// push vaa metrics.
|
||||||
err = c.pushMetric(ctx, vaa)
|
err = c.pushMetric(ctx, &metric.Params{TrackID: event.TrackID, Vaa: vaa, VaaIsSigned: event.VaaIsSigned})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
msg.Failed()
|
msg.Failed()
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -1,46 +0,0 @@
|
||||||
package infrastructure
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
health "github.com/wormhole-foundation/wormhole-explorer/common/health"
|
|
||||||
"go.uber.org/zap"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Controller definition.
|
|
||||||
type Controller struct {
|
|
||||||
checks []health.Check
|
|
||||||
logger *zap.Logger
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewController creates a Controller instance.
|
|
||||||
func NewController(checks []health.Check, logger *zap.Logger) *Controller {
|
|
||||||
return &Controller{checks: checks, logger: logger}
|
|
||||||
}
|
|
||||||
|
|
||||||
// HealthCheck handler for the endpoint /health.
|
|
||||||
func (c *Controller) HealthCheck(ctx *fiber.Ctx) error {
|
|
||||||
return ctx.JSON(struct {
|
|
||||||
Status string `json:"status"`
|
|
||||||
}{Status: "OK"})
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReadyCheck handler for the endpoint /ready.
|
|
||||||
func (c *Controller) ReadyCheck(ctx *fiber.Ctx) error {
|
|
||||||
rctx := ctx.Context()
|
|
||||||
requestID := fmt.Sprintf("%v", rctx.Value("requestid"))
|
|
||||||
for _, check := range c.checks {
|
|
||||||
if err := check(rctx); err != nil {
|
|
||||||
c.logger.Error("Ready check failed", zap.Error(err), zap.String("requestID", requestID))
|
|
||||||
return ctx.Status(fiber.StatusInternalServerError).JSON(struct {
|
|
||||||
Ready string `json:"ready"`
|
|
||||||
Error string `json:"error"`
|
|
||||||
}{Ready: "NO", Error: err.Error()})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ctx.Status(fiber.StatusOK).JSON(struct {
|
|
||||||
Ready string `json:"ready"`
|
|
||||||
}{Ready: "OK"})
|
|
||||||
|
|
||||||
}
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"github.com/ansrivas/fiberprometheus/v2"
|
"github.com/ansrivas/fiberprometheus/v2"
|
||||||
"github.com/gofiber/fiber/v2"
|
"github.com/gofiber/fiber/v2"
|
||||||
"github.com/gofiber/fiber/v2/middleware/pprof"
|
"github.com/gofiber/fiber/v2/middleware/pprof"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/analytics/http/infrastructure"
|
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/analytics/http/vaa"
|
"github.com/wormhole-foundation/wormhole-explorer/analytics/http/vaa"
|
||||||
health "github.com/wormhole-foundation/wormhole-explorer/common/health"
|
health "github.com/wormhole-foundation/wormhole-explorer/common/health"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
|
@ -29,7 +28,7 @@ func NewServer(logger *zap.Logger, port string, pprofEnabled bool, vaaController
|
||||||
app.Use(pprof.New())
|
app.Use(pprof.New())
|
||||||
}
|
}
|
||||||
|
|
||||||
ctrl := infrastructure.NewController(checks, logger)
|
ctrl := health.NewController(checks, logger)
|
||||||
api := app.Group("/api")
|
api := app.Group("/api")
|
||||||
api.Get("/health", ctrl.HealthCheck)
|
api.Get("/health", ctrl.HealthCheck)
|
||||||
api.Get("/ready", ctrl.ReadyCheck)
|
api.Get("/ready", ctrl.ReadyCheck)
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
package vaa
|
package vaa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
"github.com/gofiber/fiber/v2"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/analytics/metric"
|
"github.com/wormhole-foundation/wormhole-explorer/analytics/metric"
|
||||||
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
|
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
|
||||||
|
@ -44,7 +46,8 @@ func (c *Controller) PushVAAMetrics(ctx *fiber.Ctx) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = c.pushMetric(ctx.Context(), vaa)
|
trackID := fmt.Sprintf("controller-%s", vaa.MessageID())
|
||||||
|
err = c.pushMetric(ctx.Context(), &metric.Params{TrackID: trackID, Vaa: vaa})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.logger.Error("Error pushing metric", zap.Error(err))
|
c.logger.Error("Error pushing metric", zap.Error(err))
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -38,6 +38,7 @@ type Metric struct {
|
||||||
notionalCache wormscanNotionalCache.NotionalLocalCacheReadable
|
notionalCache wormscanNotionalCache.NotionalLocalCacheReadable
|
||||||
metrics metrics.Metrics
|
metrics metrics.Metrics
|
||||||
getTransferredTokenByVaa token.GetTransferredTokenByVaa
|
getTransferredTokenByVaa token.GetTransferredTokenByVaa
|
||||||
|
tokenProvider *domain.TokenProvider
|
||||||
logger *zap.Logger
|
logger *zap.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,6 +54,7 @@ func New(
|
||||||
notionalCache wormscanNotionalCache.NotionalLocalCacheReadable,
|
notionalCache wormscanNotionalCache.NotionalLocalCacheReadable,
|
||||||
metrics metrics.Metrics,
|
metrics metrics.Metrics,
|
||||||
getTransferredTokenByVaa token.GetTransferredTokenByVaa,
|
getTransferredTokenByVaa token.GetTransferredTokenByVaa,
|
||||||
|
tokenProvider *domain.TokenProvider,
|
||||||
logger *zap.Logger,
|
logger *zap.Logger,
|
||||||
) (*Metric, error) {
|
) (*Metric, error) {
|
||||||
|
|
||||||
|
@ -72,33 +74,47 @@ func New(
|
||||||
notionalCache: notionalCache,
|
notionalCache: notionalCache,
|
||||||
metrics: metrics,
|
metrics: metrics,
|
||||||
getTransferredTokenByVaa: getTransferredTokenByVaa,
|
getTransferredTokenByVaa: getTransferredTokenByVaa,
|
||||||
|
tokenProvider: tokenProvider,
|
||||||
}
|
}
|
||||||
return &m, nil
|
return &m, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Push implement MetricPushFunc definition.
|
// Push implement MetricPushFunc definition.
|
||||||
func (m *Metric) Push(ctx context.Context, vaa *sdk.VAA) error {
|
func (m *Metric) Push(ctx context.Context, params *Params) error {
|
||||||
|
|
||||||
err1 := m.vaaCountMeasurement(ctx, vaa)
|
var err1, err2, err3, err4 error
|
||||||
|
|
||||||
err2 := m.vaaCountAllMessagesMeasurement(ctx, vaa)
|
isVaaSigned := params.VaaIsSigned
|
||||||
|
|
||||||
transferredToken, err := m.getTransferredTokenByVaa(ctx, vaa)
|
if isVaaSigned {
|
||||||
|
err1 = m.vaaCountMeasurement(ctx, params)
|
||||||
|
|
||||||
|
err2 = m.vaaCountAllMessagesMeasurement(ctx, params)
|
||||||
|
}
|
||||||
|
|
||||||
|
if params.Vaa.EmitterChain != sdk.ChainIDPythNet {
|
||||||
|
|
||||||
|
transferredToken, err := m.getTransferredTokenByVaa(ctx, params.Vaa)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
m.logger.Warn("failed to obtain transferred token for this VAA",
|
|
||||||
zap.String("vaaId", vaa.MessageID()),
|
|
||||||
zap.Error(err))
|
|
||||||
if err != token.ErrUnknownToken {
|
if err != token.ErrUnknownToken {
|
||||||
|
m.logger.Error("Failed to obtain transferred token for this VAA",
|
||||||
|
zap.String("trackId", params.TrackID),
|
||||||
|
zap.String("vaaId", params.Vaa.MessageID()),
|
||||||
|
zap.Error(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
err3 := m.volumeMeasurement(ctx, vaa, transferredToken.Clone())
|
if transferredToken != nil {
|
||||||
|
|
||||||
err4 := upsertTransferPrices(
|
if isVaaSigned {
|
||||||
|
err3 = m.volumeMeasurement(ctx, params, transferredToken.Clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
err4 = upsertTransferPrices(
|
||||||
ctx,
|
ctx,
|
||||||
m.logger,
|
m.logger,
|
||||||
vaa,
|
params.Vaa,
|
||||||
m.transferPrices,
|
m.transferPrices,
|
||||||
func(tokenID string, timestamp time.Time) (decimal.Decimal, error) {
|
func(tokenID string, timestamp time.Time) (decimal.Decimal, error) {
|
||||||
|
|
||||||
|
@ -109,13 +125,29 @@ func (m *Metric) Push(ctx context.Context, vaa *sdk.VAA) error {
|
||||||
return priceData.NotionalUsd, nil
|
return priceData.NotionalUsd, nil
|
||||||
},
|
},
|
||||||
transferredToken.Clone(),
|
transferredToken.Clone(),
|
||||||
|
m.tokenProvider,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
m.logger.Warn("Cannot obtain transferred token for this VAA",
|
||||||
|
zap.String("trackId", params.TrackID),
|
||||||
|
zap.String("vaaId", params.Vaa.MessageID()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//TODO if we had go 1.20, we could just use `errors.Join(err1, err2, err3, ...)` here.
|
//TODO if we had go 1.20, we could just use `errors.Join(err1, err2, err3, ...)` here.
|
||||||
if err1 != nil || err2 != nil || err3 != nil || err4 != nil {
|
if err1 != nil || err2 != nil || err3 != nil || err4 != nil {
|
||||||
return fmt.Errorf("err1=%w, err2=%w, err3=%w err4=%w", err1, err2, err3, err4)
|
return fmt.Errorf("err1=%w, err2=%w, err3=%w err4=%w", err1, err2, err3, err4)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if params.Vaa.EmitterChain != sdk.ChainIDPythNet {
|
||||||
|
m.logger.Info("Transaction processed successfully",
|
||||||
|
zap.String("trackId", params.TrackID),
|
||||||
|
zap.String("vaaId", params.Vaa.MessageID()))
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,10 +167,10 @@ func (m *Metric) Close() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// vaaCountMeasurement creates a new point for the `vaa_count` measurement.
|
// vaaCountMeasurement creates a new point for the `vaa_count` measurement.
|
||||||
func (m *Metric) vaaCountMeasurement(ctx context.Context, vaa *sdk.VAA) error {
|
func (m *Metric) vaaCountMeasurement(ctx context.Context, p *Params) error {
|
||||||
|
|
||||||
// Create a new point
|
// Create a new point
|
||||||
point, err := MakePointForVaaCount(vaa)
|
point, err := MakePointForVaaCount(p.Vaa)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to generate data point for vaa count measurement: %w", err)
|
return fmt.Errorf("failed to generate data point for vaa count measurement: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -150,9 +182,9 @@ func (m *Metric) vaaCountMeasurement(ctx context.Context, vaa *sdk.VAA) error {
|
||||||
// Write the point to influx
|
// Write the point to influx
|
||||||
err = m.apiBucket30Days.WritePoint(ctx, point)
|
err = m.apiBucket30Days.WritePoint(ctx, point)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
m.logger.Error("failed to write metric",
|
m.logger.Error("Failed to write metric",
|
||||||
zap.String("measurement", point.Name()),
|
zap.String("measurement", point.Name()),
|
||||||
zap.Uint16("chain_id", uint16(vaa.EmitterChain)),
|
zap.Uint16("chain_id", uint16(p.Vaa.EmitterChain)),
|
||||||
zap.Error(err),
|
zap.Error(err),
|
||||||
)
|
)
|
||||||
m.metrics.IncFailedMeasurement(VaaCountMeasurement)
|
m.metrics.IncFailedMeasurement(VaaCountMeasurement)
|
||||||
|
@ -164,15 +196,16 @@ func (m *Metric) vaaCountMeasurement(ctx context.Context, vaa *sdk.VAA) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// vaaCountAllMessagesMeasurement creates a new point for the `vaa_count_all_messages` measurement.
|
// vaaCountAllMessagesMeasurement creates a new point for the `vaa_count_all_messages` measurement.
|
||||||
func (m *Metric) vaaCountAllMessagesMeasurement(ctx context.Context, vaa *sdk.VAA) error {
|
func (m *Metric) vaaCountAllMessagesMeasurement(ctx context.Context, params *Params) error {
|
||||||
|
|
||||||
// Quite often we get VAAs that are older than 24 hours.
|
// Quite often we get VAAs that are older than 24 hours.
|
||||||
// We do not want to generate metrics for those, and moreover influxDB
|
// We do not want to generate metrics for those, and moreover influxDB
|
||||||
// returns an error when we try to do so.
|
// returns an error when we try to do so.
|
||||||
if time.Since(vaa.Timestamp) > time.Hour*24 {
|
if time.Since(params.Vaa.Timestamp) > time.Hour*24 {
|
||||||
m.logger.Debug("vaa is older than 24 hours, skipping",
|
m.logger.Debug("vaa is older than 24 hours, skipping",
|
||||||
zap.Time("timestamp", vaa.Timestamp),
|
zap.String("trackId", params.TrackID),
|
||||||
zap.String("vaaId", vaa.UniqueID()),
|
zap.Time("timestamp", params.Vaa.Timestamp),
|
||||||
|
zap.String("vaaId", params.Vaa.UniqueID()),
|
||||||
)
|
)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -180,16 +213,16 @@ func (m *Metric) vaaCountAllMessagesMeasurement(ctx context.Context, vaa *sdk.VA
|
||||||
// Create a new point
|
// Create a new point
|
||||||
point := influxdb2.
|
point := influxdb2.
|
||||||
NewPointWithMeasurement(VaaAllMessagesMeasurement).
|
NewPointWithMeasurement(VaaAllMessagesMeasurement).
|
||||||
AddTag("chain_id", strconv.Itoa(int(vaa.EmitterChain))).
|
AddTag("chain_id", strconv.Itoa(int(params.Vaa.EmitterChain))).
|
||||||
AddField("count", 1).
|
AddField("count", 1).
|
||||||
SetTime(generateUniqueTimestamp(vaa))
|
SetTime(generateUniqueTimestamp(params.Vaa))
|
||||||
|
|
||||||
// Write the point to influx
|
// Write the point to influx
|
||||||
err := m.apiBucket24Hours.WritePoint(ctx, point)
|
err := m.apiBucket24Hours.WritePoint(ctx, point)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
m.logger.Error("failed to write metric",
|
m.logger.Error("Failed to write metric",
|
||||||
zap.String("measurement", VaaAllMessagesMeasurement),
|
zap.String("measurement", VaaAllMessagesMeasurement),
|
||||||
zap.Uint16("chain_id", uint16(vaa.EmitterChain)),
|
zap.Uint16("chain_id", uint16(params.Vaa.EmitterChain)),
|
||||||
zap.Error(err),
|
zap.Error(err),
|
||||||
)
|
)
|
||||||
m.metrics.IncFailedMeasurement(VaaAllMessagesMeasurement)
|
m.metrics.IncFailedMeasurement(VaaAllMessagesMeasurement)
|
||||||
|
@ -201,12 +234,12 @@ func (m *Metric) vaaCountAllMessagesMeasurement(ctx context.Context, vaa *sdk.VA
|
||||||
}
|
}
|
||||||
|
|
||||||
// volumeMeasurement creates a new point for the `vaa_volume_v2` measurement.
|
// volumeMeasurement creates a new point for the `vaa_volume_v2` measurement.
|
||||||
func (m *Metric) volumeMeasurement(ctx context.Context, vaa *sdk.VAA, token *token.TransferredToken) error {
|
func (m *Metric) volumeMeasurement(ctx context.Context, params *Params, token *token.TransferredToken) error {
|
||||||
|
|
||||||
// Generate a data point for the volume metric
|
// Generate a data point for the volume metric
|
||||||
p := MakePointForVaaVolumeParams{
|
p := MakePointForVaaVolumeParams{
|
||||||
Logger: m.logger,
|
Logger: m.logger,
|
||||||
Vaa: vaa,
|
Vaa: params.Vaa,
|
||||||
TokenPriceFunc: func(tokenID string, timestamp time.Time) (decimal.Decimal, error) {
|
TokenPriceFunc: func(tokenID string, timestamp time.Time) (decimal.Decimal, error) {
|
||||||
|
|
||||||
priceData, err := m.notionalCache.Get(tokenID)
|
priceData, err := m.notionalCache.Get(tokenID)
|
||||||
|
@ -218,6 +251,7 @@ func (m *Metric) volumeMeasurement(ctx context.Context, vaa *sdk.VAA, token *tok
|
||||||
},
|
},
|
||||||
Metrics: m.metrics,
|
Metrics: m.metrics,
|
||||||
TransferredToken: token,
|
TransferredToken: token,
|
||||||
|
TokenProvider: m.tokenProvider,
|
||||||
}
|
}
|
||||||
point, err := MakePointForVaaVolume(&p)
|
point, err := MakePointForVaaVolume(&p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -234,8 +268,9 @@ func (m *Metric) volumeMeasurement(ctx context.Context, vaa *sdk.VAA, token *tok
|
||||||
m.metrics.IncFailedMeasurement(VaaVolumeMeasurement)
|
m.metrics.IncFailedMeasurement(VaaVolumeMeasurement)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
m.logger.Info("Wrote a data point for the volume metric",
|
m.logger.Debug("Wrote a data point for the volume metric",
|
||||||
zap.String("vaaId", vaa.MessageID()),
|
zap.String("vaaId", params.Vaa.MessageID()),
|
||||||
|
zap.String("trackId", params.TrackID),
|
||||||
zap.String("measurement", point.Name()),
|
zap.String("measurement", point.Name()),
|
||||||
zap.Any("tags", point.TagList()),
|
zap.Any("tags", point.TagList()),
|
||||||
zap.Any("fields", point.FieldList()),
|
zap.Any("fields", point.FieldList()),
|
||||||
|
@ -283,6 +318,9 @@ type MakePointForVaaVolumeParams struct {
|
||||||
|
|
||||||
// TransferredToken is the token that was transferred in the VAA.
|
// TransferredToken is the token that was transferred in the VAA.
|
||||||
TransferredToken *token.TransferredToken
|
TransferredToken *token.TransferredToken
|
||||||
|
|
||||||
|
// TokenProvider is used to obtain token metadata.
|
||||||
|
TokenProvider *domain.TokenProvider
|
||||||
}
|
}
|
||||||
|
|
||||||
// MakePointForVaaVolume builds the InfluxDB volume metric for a given VAA
|
// MakePointForVaaVolume builds the InfluxDB volume metric for a given VAA
|
||||||
|
@ -299,7 +337,7 @@ func MakePointForVaaVolume(params *MakePointForVaaVolumeParams) (*write.Point, e
|
||||||
// Do not generate this metric when the emitter chain is unset
|
// Do not generate this metric when the emitter chain is unset
|
||||||
if params.Vaa.EmitterChain.String() == sdk.ChainIDUnset.String() {
|
if params.Vaa.EmitterChain.String() == sdk.ChainIDUnset.String() {
|
||||||
if params.Logger != nil {
|
if params.Logger != nil {
|
||||||
params.Logger.Warn("emitter chain is unset",
|
params.Logger.Warn("Emitter chain is unset",
|
||||||
zap.String("vaaId", params.Vaa.MessageID()),
|
zap.String("vaaId", params.Vaa.MessageID()),
|
||||||
zap.Uint16("emitterChain", uint16(params.Vaa.EmitterChain)),
|
zap.Uint16("emitterChain", uint16(params.Vaa.EmitterChain)),
|
||||||
)
|
)
|
||||||
|
@ -310,7 +348,7 @@ func MakePointForVaaVolume(params *MakePointForVaaVolumeParams) (*write.Point, e
|
||||||
// Do not generate this metric when the TransferredToken is undefined
|
// Do not generate this metric when the TransferredToken is undefined
|
||||||
if params.TransferredToken == nil {
|
if params.TransferredToken == nil {
|
||||||
if params.Logger != nil {
|
if params.Logger != nil {
|
||||||
params.Logger.Warn("transferred token is undefined",
|
params.Logger.Warn("Transferred token is undefined",
|
||||||
zap.String("vaaId", params.Vaa.MessageID()),
|
zap.String("vaaId", params.Vaa.MessageID()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -335,7 +373,7 @@ func MakePointForVaaVolume(params *MakePointForVaaVolumeParams) (*write.Point, e
|
||||||
// Get the token metadata
|
// Get the token metadata
|
||||||
//
|
//
|
||||||
// This is complementary data about the token that is not present in the VAA itself.
|
// This is complementary data about the token that is not present in the VAA itself.
|
||||||
tokenMeta, ok := domain.GetTokenByAddress(params.TransferredToken.TokenChain, params.TransferredToken.TokenAddress.String())
|
tokenMeta, ok := params.TokenProvider.GetTokenByAddress(params.TransferredToken.TokenChain, params.TransferredToken.TokenAddress.String())
|
||||||
if !ok {
|
if !ok {
|
||||||
params.Metrics.IncMissingToken(params.TransferredToken.TokenChain.String(), params.TransferredToken.TokenAddress.String())
|
params.Metrics.IncMissingToken(params.TransferredToken.TokenChain.String(), params.TransferredToken.TokenAddress.String())
|
||||||
// We don't have metadata for this token, so we can't compute the volume-related fields
|
// We don't have metadata for this token, so we can't compute the volume-related fields
|
||||||
|
@ -346,6 +384,12 @@ func MakePointForVaaVolume(params *MakePointForVaaVolumeParams) (*write.Point, e
|
||||||
//
|
//
|
||||||
// Moreover, many flux queries depend on the existence of the `volume` field,
|
// Moreover, many flux queries depend on the existence of the `volume` field,
|
||||||
// and would break if we had measurements without it.
|
// and would break if we had measurements without it.
|
||||||
|
params.Logger.Warn("Cannot obtain this token",
|
||||||
|
zap.String("vaaId", params.Vaa.MessageID()),
|
||||||
|
zap.String("tokenAddress", params.TransferredToken.TokenAddress.String()),
|
||||||
|
zap.Uint16("tokenChain", uint16(params.TransferredToken.TokenChain)),
|
||||||
|
zap.Any("tokenMetadata", tokenMeta),
|
||||||
|
)
|
||||||
point.AddField("volume", uint64(0))
|
point.AddField("volume", uint64(0))
|
||||||
return point, nil
|
return point, nil
|
||||||
}
|
}
|
||||||
|
@ -367,7 +411,7 @@ func MakePointForVaaVolume(params *MakePointForVaaVolumeParams) (*write.Point, e
|
||||||
if err != nil {
|
if err != nil {
|
||||||
params.Metrics.IncMissingNotional(tokenMeta.Symbol.String())
|
params.Metrics.IncMissingNotional(tokenMeta.Symbol.String())
|
||||||
if params.Logger != nil {
|
if params.Logger != nil {
|
||||||
params.Logger.Warn("failed to obtain notional for this token",
|
params.Logger.Warn("Failed to obtain notional for this token",
|
||||||
zap.String("vaaId", params.Vaa.MessageID()),
|
zap.String("vaaId", params.Vaa.MessageID()),
|
||||||
zap.String("tokenAddress", params.TransferredToken.TokenAddress.String()),
|
zap.String("tokenAddress", params.TransferredToken.TokenAddress.String()),
|
||||||
zap.Uint16("tokenChain", uint16(params.TransferredToken.TokenChain)),
|
zap.Uint16("tokenChain", uint16(params.TransferredToken.TokenChain)),
|
||||||
|
|
|
@ -29,6 +29,14 @@ type TransferPriceDoc struct {
|
||||||
TokenAmount string `bson:"tokenAmount"`
|
TokenAmount string `bson:"tokenAmount"`
|
||||||
// UsdAmount is the value in USD of the token being transferred.
|
// UsdAmount is the value in USD of the token being transferred.
|
||||||
UsdAmount string `bson:"usdAmount"`
|
UsdAmount string `bson:"usdAmount"`
|
||||||
|
// TokenChain is the chain ID of the token being transferred.
|
||||||
|
TokenChain uint16 `bson:"tokenChain"`
|
||||||
|
// TokenAddress is the address of the token being transferred.
|
||||||
|
TokenAddress string `bson:"tokenAddress"`
|
||||||
|
// CoinGeckoID is the CoinGecko ID of the token being transferred.
|
||||||
|
CoinGeckoID string `bson:"coinGeckoId"`
|
||||||
|
// UpdatedAt is the timestamp the document was updated.
|
||||||
|
UpdatedAt time.Time `bson:"updatedAt"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func upsertTransferPrices(
|
func upsertTransferPrices(
|
||||||
|
@ -38,6 +46,7 @@ func upsertTransferPrices(
|
||||||
transferPrices *mongo.Collection,
|
transferPrices *mongo.Collection,
|
||||||
tokenPriceFunc func(tokenID string, timestamp time.Time) (decimal.Decimal, error),
|
tokenPriceFunc func(tokenID string, timestamp time.Time) (decimal.Decimal, error),
|
||||||
transferredToken *token.TransferredToken,
|
transferredToken *token.TransferredToken,
|
||||||
|
tokenProvider *domain.TokenProvider,
|
||||||
) error {
|
) error {
|
||||||
|
|
||||||
// Do not generate this metric for PythNet VAAs
|
// Do not generate this metric for PythNet VAAs
|
||||||
|
@ -53,7 +62,7 @@ func upsertTransferPrices(
|
||||||
// Get the token metadata
|
// Get the token metadata
|
||||||
//
|
//
|
||||||
// This is complementary data about the token that is not present in the VAA itself.
|
// This is complementary data about the token that is not present in the VAA itself.
|
||||||
tokenMeta, ok := domain.GetTokenByAddress(transferredToken.TokenChain, transferredToken.TokenAddress.String())
|
tokenMeta, ok := tokenProvider.GetTokenByAddress(transferredToken.TokenChain, transferredToken.TokenAddress.String())
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -92,10 +101,14 @@ func upsertTransferPrices(
|
||||||
SymbolPriceUsd: notionalUSD.Truncate(8).String(),
|
SymbolPriceUsd: notionalUSD.Truncate(8).String(),
|
||||||
TokenAmount: tokenAmount.Truncate(8).String(),
|
TokenAmount: tokenAmount.Truncate(8).String(),
|
||||||
UsdAmount: usdAmount.Truncate(8).String(),
|
UsdAmount: usdAmount.Truncate(8).String(),
|
||||||
|
TokenChain: uint16(transferredToken.TokenChain),
|
||||||
|
TokenAddress: transferredToken.TokenAddress.String(),
|
||||||
|
CoinGeckoID: tokenMeta.CoingeckoID,
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
_, err = transferPrices.UpdateByID(
|
_, err = transferPrices.UpdateByID(
|
||||||
context.Background(),
|
ctx,
|
||||||
vaa.MessageID(),
|
vaa.MessageID(),
|
||||||
update,
|
update,
|
||||||
options.Update().SetUpsert(true),
|
options.Update().SetUpsert(true),
|
||||||
|
|
|
@ -6,5 +6,11 @@ import (
|
||||||
"github.com/wormhole-foundation/wormhole/sdk/vaa"
|
"github.com/wormhole-foundation/wormhole/sdk/vaa"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type Params struct {
|
||||||
|
TrackID string
|
||||||
|
Vaa *vaa.VAA
|
||||||
|
VaaIsSigned bool
|
||||||
|
}
|
||||||
|
|
||||||
// MetricPushFunc is a function to push metrics
|
// MetricPushFunc is a function to push metrics
|
||||||
type MetricPushFunc func(context.Context, *vaa.VAA) error
|
type MetricPushFunc func(context.Context, *Params) error
|
||||||
|
|
|
@ -0,0 +1,115 @@
|
||||||
|
package queue
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/common/events"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// VaaEvent represents a vaa data to be handle by the pipeline.
|
||||||
|
type VaaEvent struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
ChainID uint16 `json:"emitterChain"`
|
||||||
|
EmitterAddress string `json:"emitterAddr"`
|
||||||
|
Sequence string `json:"sequence"`
|
||||||
|
GuardianSetIndex uint32 `json:"guardianSetIndex"`
|
||||||
|
Vaa []byte `json:"vaas"`
|
||||||
|
IndexedAt time.Time `json:"indexedAt"`
|
||||||
|
Timestamp *time.Time `json:"timestamp"`
|
||||||
|
UpdatedAt *time.Time `json:"updatedAt"`
|
||||||
|
TxHash string `json:"txHash"`
|
||||||
|
Version uint16 `json:"version"`
|
||||||
|
Revision uint16 `json:"revision"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// VaaConverter converts a message from a VAAEvent.
|
||||||
|
func NewVaaConverter(log *zap.Logger) ConverterFunc {
|
||||||
|
|
||||||
|
return func(msg string) (*Event, error) {
|
||||||
|
// unmarshal message to vaaEvent
|
||||||
|
var vaaEvent VaaEvent
|
||||||
|
err := json.Unmarshal([]byte(msg), &vaaEvent)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &Event{
|
||||||
|
TrackID: fmt.Sprintf("pipeline-%s", vaaEvent.ID),
|
||||||
|
ID: vaaEvent.ID,
|
||||||
|
ChainID: vaaEvent.ChainID,
|
||||||
|
EmitterAddress: vaaEvent.EmitterAddress,
|
||||||
|
Sequence: vaaEvent.Sequence,
|
||||||
|
Vaa: vaaEvent.Vaa,
|
||||||
|
Timestamp: vaaEvent.Timestamp,
|
||||||
|
VaaIsSigned: true,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewNotificationEvent(log *zap.Logger) ConverterFunc {
|
||||||
|
|
||||||
|
return func(msg string) (*Event, error) {
|
||||||
|
// unmarshal message to NotificationEvent
|
||||||
|
var notification events.NotificationEvent
|
||||||
|
err := json.Unmarshal([]byte(msg), ¬ification)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if notification.Event != events.SignedVaaType && notification.Event != events.LogMessagePublishedMesageType {
|
||||||
|
log.Debug("Skip event type", zap.String("trackId", notification.TrackID), zap.String("type", notification.Event))
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
switch notification.Event {
|
||||||
|
case events.SignedVaaType:
|
||||||
|
signedVaa, err := events.GetEventData[events.SignedVaa](¬ification)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Error decoding signedVAA from notification event", zap.String("trackId", notification.TrackID), zap.Error(err))
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Event{
|
||||||
|
TrackID: notification.TrackID,
|
||||||
|
ID: signedVaa.ID,
|
||||||
|
ChainID: signedVaa.EmitterChain,
|
||||||
|
EmitterAddress: signedVaa.EmitterAddress,
|
||||||
|
Sequence: strconv.FormatUint(signedVaa.Sequence, 10),
|
||||||
|
Timestamp: &signedVaa.Timestamp,
|
||||||
|
VaaIsSigned: false,
|
||||||
|
}, nil
|
||||||
|
case events.LogMessagePublishedMesageType:
|
||||||
|
plm, err := events.GetEventData[events.LogMessagePublished](¬ification)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Error decoding publishedLogMessage from notification event", zap.String("trackId", notification.TrackID), zap.Error(err))
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
vaa, err := events.CreateUnsignedVAA(&plm)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Error creating unsigned vaa", zap.String("trackId", notification.TrackID), zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
vaaBytes, err := vaa.MarshalBinary()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Event{
|
||||||
|
TrackID: notification.TrackID,
|
||||||
|
ID: vaa.MessageID(),
|
||||||
|
ChainID: plm.ChainID,
|
||||||
|
EmitterAddress: plm.Attributes.Sender,
|
||||||
|
Sequence: strconv.FormatUint(plm.Attributes.Sequence, 10),
|
||||||
|
Timestamp: &plm.BlockTime,
|
||||||
|
Vaa: vaaBytes,
|
||||||
|
VaaIsSigned: false,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
}
|
|
@ -18,15 +18,20 @@ type SQSOption func(*SQS)
|
||||||
type SQS struct {
|
type SQS struct {
|
||||||
consumer *sqs_client.Consumer
|
consumer *sqs_client.Consumer
|
||||||
ch chan ConsumerMessage
|
ch chan ConsumerMessage
|
||||||
|
converter ConverterFunc
|
||||||
chSize int
|
chSize int
|
||||||
wg sync.WaitGroup
|
wg sync.WaitGroup
|
||||||
logger *zap.Logger
|
logger *zap.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewVaaSqs creates a VAA queue in SQS instances.
|
// ConverterFunc converts a message from a sqs message.
|
||||||
func NewVaaSqs(consumer *sqs_client.Consumer, logger *zap.Logger, opts ...SQSOption) *SQS {
|
type ConverterFunc func(string) (*Event, error)
|
||||||
|
|
||||||
|
// NewEventSqs creates a VAA queue in SQS instances.
|
||||||
|
func NewEventSqs(consumer *sqs_client.Consumer, converter ConverterFunc, logger *zap.Logger, opts ...SQSOption) *SQS {
|
||||||
s := &SQS{
|
s := &SQS{
|
||||||
consumer: consumer,
|
consumer: consumer,
|
||||||
|
converter: converter,
|
||||||
chSize: 10,
|
chSize: 10,
|
||||||
logger: logger}
|
logger: logger}
|
||||||
for _, opt := range opts {
|
for _, opt := range opts {
|
||||||
|
@ -62,18 +67,17 @@ func (q *SQS) Consume(ctx context.Context) <-chan ConsumerMessage {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// unmarshal message to vaaEvent
|
// converts message to event
|
||||||
var vaaEvent VaaEvent
|
event, err := q.converter(sqsEvent.Message)
|
||||||
err = json.Unmarshal([]byte(sqsEvent.Message), &vaaEvent)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
q.logger.Error("Error decoding vaaEvent message from SQSEvent", zap.Error(err))
|
q.logger.Error("Error converting event message from SQSEvent", zap.Error(err))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
q.wg.Add(1)
|
q.wg.Add(1)
|
||||||
q.ch <- &sqsConsumerMessage{
|
q.ch <- &sqsConsumerMessage{
|
||||||
id: msg.ReceiptHandle,
|
id: msg.ReceiptHandle,
|
||||||
data: &vaaEvent,
|
data: event,
|
||||||
wg: &q.wg,
|
wg: &q.wg,
|
||||||
logger: q.logger,
|
logger: q.logger,
|
||||||
consumer: q.consumer,
|
consumer: q.consumer,
|
||||||
|
@ -94,7 +98,7 @@ func (q *SQS) Close() {
|
||||||
}
|
}
|
||||||
|
|
||||||
type sqsConsumerMessage struct {
|
type sqsConsumerMessage struct {
|
||||||
data *VaaEvent
|
data *Event
|
||||||
consumer *sqs_client.Consumer
|
consumer *sqs_client.Consumer
|
||||||
wg *sync.WaitGroup
|
wg *sync.WaitGroup
|
||||||
id *string
|
id *string
|
||||||
|
@ -103,7 +107,7 @@ type sqsConsumerMessage struct {
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *sqsConsumerMessage) Data() *VaaEvent {
|
func (m *sqsConsumerMessage) Data() *Event {
|
||||||
return m.data
|
return m.data
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,38 +0,0 @@
|
||||||
package queue
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type sqsEvent struct {
|
|
||||||
MessageID string `json:"MessageId"`
|
|
||||||
Message string `json:"Message"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// VaaEvent represents a vaa data to be handle by the pipeline.
|
|
||||||
type VaaEvent struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
ChainID uint16 `json:"emitterChain"`
|
|
||||||
EmitterAddress string `json:"emitterAddr"`
|
|
||||||
Sequence string `json:"sequence"`
|
|
||||||
GuardianSetIndex uint32 `json:"guardianSetIndex"`
|
|
||||||
Vaa []byte `json:"vaas"`
|
|
||||||
IndexedAt time.Time `json:"indexedAt"`
|
|
||||||
Timestamp *time.Time `json:"timestamp"`
|
|
||||||
UpdatedAt *time.Time `json:"updatedAt"`
|
|
||||||
TxHash string `json:"txHash"`
|
|
||||||
Version uint16 `json:"version"`
|
|
||||||
Revision uint16 `json:"revision"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// ConsumerMessage defition.
|
|
||||||
type ConsumerMessage interface {
|
|
||||||
Data() *VaaEvent
|
|
||||||
Done()
|
|
||||||
Failed()
|
|
||||||
IsExpired() bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// VAAConsumeFunc is a function to consume VAAEvent.
|
|
||||||
type VAAConsumeFunc func(context.Context) <-chan ConsumerMessage
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
package queue
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type sqsEvent struct {
|
||||||
|
MessageID string `json:"MessageId"`
|
||||||
|
Message string `json:"Message"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Event represents a event data to be handle.
|
||||||
|
type Event struct {
|
||||||
|
TrackID string
|
||||||
|
ID string
|
||||||
|
ChainID uint16
|
||||||
|
EmitterAddress string
|
||||||
|
Sequence string
|
||||||
|
Vaa []byte
|
||||||
|
Timestamp *time.Time
|
||||||
|
VaaIsSigned bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConsumerMessage defition.
|
||||||
|
type ConsumerMessage interface {
|
||||||
|
Data() *Event
|
||||||
|
Done()
|
||||||
|
Failed()
|
||||||
|
IsExpired() bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConsumeFunc is a function to consume VAAEvent.
|
||||||
|
type ConsumeFunc func(context.Context) <-chan ConsumerMessage
|
163
api/docs/docs.go
163
api/docs/docs.go
|
@ -1,4 +1,5 @@
|
||||||
// Code generated by swaggo/swag. DO NOT EDIT
|
// Code generated by swaggo/swag. DO NOT EDIT.
|
||||||
|
|
||||||
package docs
|
package docs
|
||||||
|
|
||||||
import "github.com/swaggo/swag"
|
import "github.com/swaggo/swag"
|
||||||
|
@ -574,11 +575,6 @@ const docTemplate = `{
|
||||||
"200": {
|
"200": {
|
||||||
"description": "OK",
|
"description": "OK",
|
||||||
"schema": {
|
"schema": {
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"status": {
|
"status": {
|
||||||
|
@ -586,8 +582,6 @@ const docTemplate = `{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"400": {
|
"400": {
|
||||||
"description": "Bad Request"
|
"description": "Bad Request"
|
||||||
|
@ -899,11 +893,6 @@ const docTemplate = `{
|
||||||
"200": {
|
"200": {
|
||||||
"description": "OK",
|
"description": "OK",
|
||||||
"schema": {
|
"schema": {
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"ready": {
|
"ready": {
|
||||||
|
@ -911,8 +900,6 @@ const docTemplate = `{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"400": {
|
"400": {
|
||||||
"description": "Bad Request"
|
"description": "Bad Request"
|
||||||
|
@ -1736,11 +1723,6 @@ const docTemplate = `{
|
||||||
"200": {
|
"200": {
|
||||||
"description": "OK",
|
"description": "OK",
|
||||||
"schema": {
|
"schema": {
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"vaaBytes": {
|
"vaaBytes": {
|
||||||
|
@ -1751,8 +1733,6 @@ const docTemplate = `{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"400": {
|
"400": {
|
||||||
"description": "Bad Request"
|
"description": "Bad Request"
|
||||||
|
@ -1797,11 +1777,6 @@ const docTemplate = `{
|
||||||
"200": {
|
"200": {
|
||||||
"description": "OK",
|
"description": "OK",
|
||||||
"schema": {
|
"schema": {
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"vaaBytes": {
|
"vaaBytes": {
|
||||||
|
@ -1812,8 +1787,6 @@ const docTemplate = `{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"400": {
|
"400": {
|
||||||
"description": "Bad Request"
|
"description": "Bad Request"
|
||||||
|
@ -2435,9 +2408,137 @@ const docTemplate = `{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"relays.DeliveryReponse": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"execution": {
|
||||||
|
"$ref": "#/definitions/relays.ResultExecutionResponse"
|
||||||
|
},
|
||||||
|
"relayGasUsed": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"relays.InstructionsResponse": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"encodedExecutionInfo": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"extraReceiverValue": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"_hex": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"_isBigNumber": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"refundAddress": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"refundChainId": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"refundDeliveryProvider": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"requestedReceiverValue": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"_hex": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"_isBigNumber": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"senderAddress": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"sourceDeliveryProvider": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"targetAddress": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"targetChainId": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"vaaKeys": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"relays.RelayDataResponse": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"delivery": {
|
||||||
|
"$ref": "#/definitions/relays.DeliveryReponse"
|
||||||
|
},
|
||||||
|
"fromTxHash": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"instructions": {
|
||||||
|
"$ref": "#/definitions/relays.InstructionsResponse"
|
||||||
|
},
|
||||||
|
"toTxHash": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"relays.RelayResponse": {
|
"relays.RelayResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additionalProperties": true
|
"properties": {
|
||||||
|
"completedAt": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"data": {
|
||||||
|
"$ref": "#/definitions/relays.RelayDataResponse"
|
||||||
|
},
|
||||||
|
"failedAt": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"receivedAt": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"relayer": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"relays.ResultExecutionResponse": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"detail": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"gasUsed": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"refundStatus": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"revertString": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"transactionHash": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"response.Response-address_AddressOverview": {
|
"response.Response-address_AddressOverview": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
@ -3046,6 +3147,8 @@ var SwaggerInfo = &swag.Spec{
|
||||||
Description: "Wormhole Guardian API\nThis is the API for the Wormhole Guardian and Explorer.\nThe API has two namespaces: wormholescan and guardian.\nwormholescan is the namespace for the explorer and the new endpoints. The prefix is /api/v1.\nguardian is the legacy namespace backguard compatible with guardian node API. The prefix is /v1.\nThis API is public and does not require authentication although some endpoints are rate limited.\nCheck each endpoint documentation for more information.",
|
Description: "Wormhole Guardian API\nThis is the API for the Wormhole Guardian and Explorer.\nThe API has two namespaces: wormholescan and guardian.\nwormholescan is the namespace for the explorer and the new endpoints. The prefix is /api/v1.\nguardian is the legacy namespace backguard compatible with guardian node API. The prefix is /v1.\nThis API is public and does not require authentication although some endpoints are rate limited.\nCheck each endpoint documentation for more information.",
|
||||||
InfoInstanceName: "swagger",
|
InfoInstanceName: "swagger",
|
||||||
SwaggerTemplate: docTemplate,
|
SwaggerTemplate: docTemplate,
|
||||||
|
LeftDelim: "{{",
|
||||||
|
RightDelim: "}}",
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -567,11 +567,6 @@
|
||||||
"200": {
|
"200": {
|
||||||
"description": "OK",
|
"description": "OK",
|
||||||
"schema": {
|
"schema": {
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"status": {
|
"status": {
|
||||||
|
@ -579,8 +574,6 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"400": {
|
"400": {
|
||||||
"description": "Bad Request"
|
"description": "Bad Request"
|
||||||
|
@ -892,11 +885,6 @@
|
||||||
"200": {
|
"200": {
|
||||||
"description": "OK",
|
"description": "OK",
|
||||||
"schema": {
|
"schema": {
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"ready": {
|
"ready": {
|
||||||
|
@ -904,8 +892,6 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"400": {
|
"400": {
|
||||||
"description": "Bad Request"
|
"description": "Bad Request"
|
||||||
|
@ -1729,11 +1715,6 @@
|
||||||
"200": {
|
"200": {
|
||||||
"description": "OK",
|
"description": "OK",
|
||||||
"schema": {
|
"schema": {
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"vaaBytes": {
|
"vaaBytes": {
|
||||||
|
@ -1744,8 +1725,6 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"400": {
|
"400": {
|
||||||
"description": "Bad Request"
|
"description": "Bad Request"
|
||||||
|
@ -1790,11 +1769,6 @@
|
||||||
"200": {
|
"200": {
|
||||||
"description": "OK",
|
"description": "OK",
|
||||||
"schema": {
|
"schema": {
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"vaaBytes": {
|
"vaaBytes": {
|
||||||
|
@ -1805,8 +1779,6 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"400": {
|
"400": {
|
||||||
"description": "Bad Request"
|
"description": "Bad Request"
|
||||||
|
@ -2428,9 +2400,137 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"relays.DeliveryReponse": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"execution": {
|
||||||
|
"$ref": "#/definitions/relays.ResultExecutionResponse"
|
||||||
|
},
|
||||||
|
"relayGasUsed": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"relays.InstructionsResponse": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"encodedExecutionInfo": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"extraReceiverValue": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"_hex": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"_isBigNumber": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"refundAddress": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"refundChainId": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"refundDeliveryProvider": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"requestedReceiverValue": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"_hex": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"_isBigNumber": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"senderAddress": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"sourceDeliveryProvider": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"targetAddress": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"targetChainId": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"vaaKeys": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"relays.RelayDataResponse": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"delivery": {
|
||||||
|
"$ref": "#/definitions/relays.DeliveryReponse"
|
||||||
|
},
|
||||||
|
"fromTxHash": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"instructions": {
|
||||||
|
"$ref": "#/definitions/relays.InstructionsResponse"
|
||||||
|
},
|
||||||
|
"toTxHash": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"relays.RelayResponse": {
|
"relays.RelayResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additionalProperties": true
|
"properties": {
|
||||||
|
"completedAt": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"data": {
|
||||||
|
"$ref": "#/definitions/relays.RelayDataResponse"
|
||||||
|
},
|
||||||
|
"failedAt": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"receivedAt": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"relayer": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"relays.ResultExecutionResponse": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"detail": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"gasUsed": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"refundStatus": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"revertString": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"transactionHash": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"response.Response-address_AddressOverview": {
|
"response.Response-address_AddressOverview": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
|
|
@ -396,8 +396,91 @@ definitions:
|
||||||
tokenChain:
|
tokenChain:
|
||||||
$ref: '#/definitions/vaa.ChainID'
|
$ref: '#/definitions/vaa.ChainID'
|
||||||
type: object
|
type: object
|
||||||
|
relays.DeliveryReponse:
|
||||||
|
properties:
|
||||||
|
execution:
|
||||||
|
$ref: '#/definitions/relays.ResultExecutionResponse'
|
||||||
|
relayGasUsed:
|
||||||
|
type: integer
|
||||||
|
type: object
|
||||||
|
relays.InstructionsResponse:
|
||||||
|
properties:
|
||||||
|
encodedExecutionInfo:
|
||||||
|
type: string
|
||||||
|
extraReceiverValue:
|
||||||
|
properties:
|
||||||
|
_hex:
|
||||||
|
type: string
|
||||||
|
_isBigNumber:
|
||||||
|
type: boolean
|
||||||
|
type: object
|
||||||
|
refundAddress:
|
||||||
|
type: string
|
||||||
|
refundChainId:
|
||||||
|
type: integer
|
||||||
|
refundDeliveryProvider:
|
||||||
|
type: string
|
||||||
|
requestedReceiverValue:
|
||||||
|
properties:
|
||||||
|
_hex:
|
||||||
|
type: string
|
||||||
|
_isBigNumber:
|
||||||
|
type: boolean
|
||||||
|
type: object
|
||||||
|
senderAddress:
|
||||||
|
type: string
|
||||||
|
sourceDeliveryProvider:
|
||||||
|
type: string
|
||||||
|
targetAddress:
|
||||||
|
type: string
|
||||||
|
targetChainId:
|
||||||
|
type: integer
|
||||||
|
vaaKeys:
|
||||||
|
items: {}
|
||||||
|
type: array
|
||||||
|
type: object
|
||||||
|
relays.RelayDataResponse:
|
||||||
|
properties:
|
||||||
|
delivery:
|
||||||
|
$ref: '#/definitions/relays.DeliveryReponse'
|
||||||
|
fromTxHash:
|
||||||
|
type: string
|
||||||
|
instructions:
|
||||||
|
$ref: '#/definitions/relays.InstructionsResponse'
|
||||||
|
toTxHash:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
relays.RelayResponse:
|
relays.RelayResponse:
|
||||||
additionalProperties: true
|
properties:
|
||||||
|
completedAt:
|
||||||
|
type: string
|
||||||
|
data:
|
||||||
|
$ref: '#/definitions/relays.RelayDataResponse'
|
||||||
|
failedAt:
|
||||||
|
type: string
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
receivedAt:
|
||||||
|
type: string
|
||||||
|
relayer:
|
||||||
|
type: string
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
relays.ResultExecutionResponse:
|
||||||
|
properties:
|
||||||
|
detail:
|
||||||
|
type: string
|
||||||
|
gasUsed:
|
||||||
|
type: string
|
||||||
|
refundStatus:
|
||||||
|
type: string
|
||||||
|
revertString:
|
||||||
|
type: string
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
transactionHash:
|
||||||
|
type: string
|
||||||
type: object
|
type: object
|
||||||
response.Response-address_AddressOverview:
|
response.Response-address_AddressOverview:
|
||||||
properties:
|
properties:
|
||||||
|
@ -1204,9 +1287,7 @@ paths:
|
||||||
"200":
|
"200":
|
||||||
description: OK
|
description: OK
|
||||||
schema:
|
schema:
|
||||||
allOf:
|
properties:
|
||||||
- type: object
|
|
||||||
- properties:
|
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
type: object
|
type: object
|
||||||
|
@ -1421,9 +1502,7 @@ paths:
|
||||||
"200":
|
"200":
|
||||||
description: OK
|
description: OK
|
||||||
schema:
|
schema:
|
||||||
allOf:
|
properties:
|
||||||
- type: object
|
|
||||||
- properties:
|
|
||||||
ready:
|
ready:
|
||||||
type: string
|
type: string
|
||||||
type: object
|
type: object
|
||||||
|
@ -1998,9 +2077,7 @@ paths:
|
||||||
"200":
|
"200":
|
||||||
description: OK
|
description: OK
|
||||||
schema:
|
schema:
|
||||||
allOf:
|
properties:
|
||||||
- type: object
|
|
||||||
- properties:
|
|
||||||
vaaBytes:
|
vaaBytes:
|
||||||
items:
|
items:
|
||||||
type: integer
|
type: integer
|
||||||
|
@ -2036,9 +2113,7 @@ paths:
|
||||||
"200":
|
"200":
|
||||||
description: OK
|
description: OK
|
||||||
schema:
|
schema:
|
||||||
allOf:
|
properties:
|
||||||
- type: object
|
|
||||||
- properties:
|
|
||||||
vaaBytes:
|
vaaBytes:
|
||||||
items:
|
items:
|
||||||
type: integer
|
type: integer
|
||||||
|
|
|
@ -4,8 +4,8 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/common"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/vaa"
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/vaa"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/types"
|
|
||||||
"go.mongodb.org/mongo-driver/bson"
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
"go.mongodb.org/mongo-driver/mongo"
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
|
@ -32,25 +32,28 @@ func NewRepository(db *mongo.Database, logger *zap.Logger) *Repository {
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetAddressOverviewParams struct {
|
type GetAddressOverviewParams struct {
|
||||||
Address *types.Address
|
Address string
|
||||||
Skip int64
|
Skip int64
|
||||||
Limit int64
|
Limit int64
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Repository) GetAddressOverview(ctx context.Context, params *GetAddressOverviewParams) (*AddressOverview, error) {
|
func (r *Repository) GetAddressOverview(ctx context.Context, params *GetAddressOverviewParams) (*AddressOverview, error) {
|
||||||
|
|
||||||
|
ids, err := common.FindVaasIdsByFromAddressOrToAddress(ctx, r.db, params.Address)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(ids) == 0 {
|
||||||
|
var result []*vaa.VaaDoc
|
||||||
|
return &AddressOverview{Vaas: result}, nil
|
||||||
|
}
|
||||||
|
|
||||||
// build a query pipeline based on input parameters
|
// build a query pipeline based on input parameters
|
||||||
var pipeline mongo.Pipeline
|
var pipeline mongo.Pipeline
|
||||||
{
|
{
|
||||||
// filter by address
|
// filter by list ids
|
||||||
pipeline = append(pipeline, bson.D{
|
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.D{{Key: "_id", Value: bson.D{{Key: "$in", Value: ids}}}}}})
|
||||||
{"$match", bson.D{
|
|
||||||
{"$or", bson.A{
|
|
||||||
bson.D{{"result.fromAddress", bson.D{{"$eq", "0x" + params.Address.Hex()}}}},
|
|
||||||
bson.D{{"result.toAddress", bson.M{"$eq": "0x" + params.Address.Hex()}}},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
})
|
|
||||||
|
|
||||||
// specify sorting criteria
|
// specify sorting criteria
|
||||||
pipeline = append(pipeline, bson.D{
|
pipeline = append(pipeline, bson.D{
|
||||||
|
|
|
@ -5,7 +5,6 @@ import (
|
||||||
|
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/internal/pagination"
|
"github.com/wormhole-foundation/wormhole-explorer/api/internal/pagination"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/response"
|
"github.com/wormhole-foundation/wormhole-explorer/api/response"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/types"
|
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -26,7 +25,7 @@ func NewService(r *Repository, logger *zap.Logger) *Service {
|
||||||
|
|
||||||
func (s *Service) GetAddressOverview(
|
func (s *Service) GetAddressOverview(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
address *types.Address,
|
address string,
|
||||||
pagination *pagination.Pagination,
|
pagination *pagination.Pagination,
|
||||||
) (*response.Response[*AddressOverview], error) {
|
) (*response.Response[*AddressOverview], error) {
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,56 @@
|
||||||
|
package common
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/common/utils"
|
||||||
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
|
)
|
||||||
|
|
||||||
|
type mongoID struct {
|
||||||
|
Id string `bson:"_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func FindVaasIdsByFromAddressOrToAddress(
|
||||||
|
ctx context.Context,
|
||||||
|
db *mongo.Database,
|
||||||
|
address string,
|
||||||
|
) ([]string, error) {
|
||||||
|
addressHexa := strings.ToLower(address)
|
||||||
|
if !utils.StartsWith0x(address) {
|
||||||
|
addressHexa = "0x" + strings.ToLower(addressHexa)
|
||||||
|
}
|
||||||
|
|
||||||
|
matchForToAddress := bson.D{{Key: "$match", Value: bson.D{{Key: "$or", Value: bson.A{
|
||||||
|
bson.D{{Key: "standardizedProperties.toAddress", Value: bson.M{"$eq": addressHexa}}},
|
||||||
|
bson.D{{Key: "standardizedProperties.toAddress", Value: bson.M{"$eq": address}}},
|
||||||
|
}}}}}
|
||||||
|
|
||||||
|
matchForFromAddress := bson.D{{Key: "$match", Value: bson.D{{Key: "$or", Value: bson.A{
|
||||||
|
bson.D{{Key: "originTx.from", Value: bson.M{"$eq": addressHexa}}},
|
||||||
|
bson.D{{Key: "originTx.from", Value: bson.M{"$eq": address}}},
|
||||||
|
}}}}}
|
||||||
|
|
||||||
|
toAddressFilter := bson.D{{Key: "$unionWith", Value: bson.D{{Key: "coll", Value: "parsedVaa"}, {Key: "pipeline", Value: bson.A{matchForToAddress}}}}}
|
||||||
|
fromAddressFilter := bson.D{{Key: "$unionWith", Value: bson.D{{Key: "coll", Value: "globalTransactions"}, {Key: "pipeline", Value: bson.A{matchForFromAddress}}}}}
|
||||||
|
group := bson.D{{Key: "$group", Value: bson.D{{Key: "_id", Value: "$_id"}}}}
|
||||||
|
|
||||||
|
pipeline := []bson.D{fromAddressFilter, toAddressFilter, group}
|
||||||
|
|
||||||
|
cur, err := db.Collection("_temporal").Aggregate(ctx, pipeline)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
var documents []mongoID
|
||||||
|
err = cur.All(ctx, &documents)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
var ids []string
|
||||||
|
for _, doc := range documents {
|
||||||
|
ids = append(ids, doc.Id)
|
||||||
|
}
|
||||||
|
return ids, nil
|
||||||
|
}
|
|
@ -0,0 +1,86 @@
|
||||||
|
package operations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
|
||||||
|
)
|
||||||
|
|
||||||
|
// OperationDto operation data transfer object.
|
||||||
|
type OperationDto struct {
|
||||||
|
ID string `bson:"_id"`
|
||||||
|
TxHash string `bson:"txHash"`
|
||||||
|
Symbol string `bson:"symbol"`
|
||||||
|
UsdAmount string `bson:"usdAmount"`
|
||||||
|
TokenAmount string `bson:"tokenAmount"`
|
||||||
|
Vaa *VaaDto `bson:"vaa"`
|
||||||
|
SourceTx *OriginTx `bson:"originTx" json:"originTx"`
|
||||||
|
DestinationTx *DestinationTx `bson:"destinationTx" json:"destinationTx"`
|
||||||
|
Payload map[string]any `bson:"payload"`
|
||||||
|
StandardizedProperties *StandardizedProperties `bson:"standardizedProperties"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// StandardizedProperties represents the standardized properties of a operation.
|
||||||
|
type StandardizedProperties struct {
|
||||||
|
AppIds []string `json:"appIds" bson:"appIds"`
|
||||||
|
FromChain sdk.ChainID `json:"fromChain" bson:"fromChain"`
|
||||||
|
FromAddress string `json:"fromAddress" bson:"fromAddress"`
|
||||||
|
ToChain sdk.ChainID `json:"toChain" bson:"toChain"`
|
||||||
|
ToAddress string `json:"toAddress" bson:"toAddress"`
|
||||||
|
TokenChain sdk.ChainID `json:"tokenChain" bson:"tokenChain"`
|
||||||
|
TokenAddress string `json:"tokenAddress" bson:"tokenAddress"`
|
||||||
|
Amount string `json:"amount" bson:"amount"`
|
||||||
|
FeeAddress string `json:"feeAddress" bson:"feeAddress"`
|
||||||
|
FeeChain sdk.ChainID `json:"feeChain" bson:"feeChain"`
|
||||||
|
Fee string `json:"fee" bson:"fee"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// VaaDto vaa data transfer object.
|
||||||
|
type VaaDto struct {
|
||||||
|
ID string `bson:"_id" json:"id"`
|
||||||
|
Version uint8 `bson:"version" json:"version"`
|
||||||
|
EmitterChain sdk.ChainID `bson:"emitterChain" json:"emitterChain"`
|
||||||
|
EmitterAddr string `bson:"emitterAddr" json:"emitterAddr"`
|
||||||
|
EmitterNativeAddr string `json:"emitterNativeAddr,omitempty"`
|
||||||
|
Sequence string `bson:"sequence" json:"-"`
|
||||||
|
GuardianSetIndex uint32 `bson:"guardianSetIndex" json:"guardianSetIndex"`
|
||||||
|
Vaa []byte `bson:"vaas" json:"vaa"`
|
||||||
|
Timestamp *time.Time `bson:"timestamp" json:"timestamp"`
|
||||||
|
UpdatedAt *time.Time `bson:"updatedAt" json:"updatedAt"`
|
||||||
|
IndexedAt *time.Time `bson:"indexedAt" json:"indexedAt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GlobalTransactionDoc definitions.
|
||||||
|
type GlobalTransactionDoc struct {
|
||||||
|
ID string `bson:"_id" json:"id"`
|
||||||
|
OriginTx *OriginTx `bson:"originTx" json:"originTx"`
|
||||||
|
DestinationTx *DestinationTx `bson:"destinationTx" json:"destinationTx"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginTx represents a origin transaction.
|
||||||
|
type OriginTx struct {
|
||||||
|
TxHash string `bson:"nativeTxHash" json:"txHash"`
|
||||||
|
From string `bson:"from" json:"from"`
|
||||||
|
Status string `bson:"status" json:"status"`
|
||||||
|
Timestamp *time.Time `bson:"timestamp" json:"timestamp"`
|
||||||
|
Attribute *AttributeDoc `bson:"attribute" json:"attribute"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttributeDoc represents a custom attribute for a origin transaction.
|
||||||
|
type AttributeDoc struct {
|
||||||
|
Type string `bson:"type" json:"type"`
|
||||||
|
Value map[string]any `bson:"value" json:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// DestinationTx represents a destination transaction.
|
||||||
|
type DestinationTx struct {
|
||||||
|
ChainID sdk.ChainID `bson:"chainId" json:"chainId"`
|
||||||
|
Status string `bson:"status" json:"status"`
|
||||||
|
Method string `bson:"method" json:"method"`
|
||||||
|
TxHash string `bson:"txHash" json:"txHash"`
|
||||||
|
From string `bson:"from" json:"from"`
|
||||||
|
To string `bson:"to" json:"to"`
|
||||||
|
BlockNumber string `bson:"blockNumber" json:"blockNumber"`
|
||||||
|
Timestamp *time.Time `bson:"timestamp" json:"timestamp"`
|
||||||
|
UpdatedAt *time.Time `bson:"updatedAt" json:"updatedAt"`
|
||||||
|
}
|
|
@ -0,0 +1,238 @@
|
||||||
|
package operations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/internal/errors"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/internal/pagination"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/common/utils"
|
||||||
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Repository definition
|
||||||
|
type Repository struct {
|
||||||
|
db *mongo.Database
|
||||||
|
logger *zap.Logger
|
||||||
|
collections struct {
|
||||||
|
vaas *mongo.Collection
|
||||||
|
parsedVaa *mongo.Collection
|
||||||
|
globalTransactions *mongo.Collection
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRepository create a new Repository.
|
||||||
|
func NewRepository(db *mongo.Database, logger *zap.Logger) *Repository {
|
||||||
|
return &Repository{db: db,
|
||||||
|
logger: logger.With(zap.String("module", "OperationRepository")),
|
||||||
|
collections: struct {
|
||||||
|
vaas *mongo.Collection
|
||||||
|
parsedVaa *mongo.Collection
|
||||||
|
globalTransactions *mongo.Collection
|
||||||
|
}{
|
||||||
|
vaas: db.Collection("vaas"),
|
||||||
|
parsedVaa: db.Collection("parsedVaa"),
|
||||||
|
globalTransactions: db.Collection("globalTransactions"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindById returns the operations for the given chainID/emitter/seq.
|
||||||
|
func (r *Repository) FindById(ctx context.Context, id string) (*OperationDto, error) {
|
||||||
|
|
||||||
|
var pipeline mongo.Pipeline
|
||||||
|
|
||||||
|
// filter vaas by id
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.D{{Key: "_id", Value: id}}}})
|
||||||
|
|
||||||
|
// lookup vaas
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "vaas"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "vaas"}}}})
|
||||||
|
|
||||||
|
// lookup globalTransactions
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "globalTransactions"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "globalTransactions"}}}})
|
||||||
|
|
||||||
|
// lookup transferPrices
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "transferPrices"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "transferPrices"}}}})
|
||||||
|
|
||||||
|
// lookup parsedVaa
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "parsedVaa"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "parsedVaa"}}}})
|
||||||
|
|
||||||
|
// add fields
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$addFields", Value: bson.D{
|
||||||
|
{Key: "payload", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$parsedVaa.parsedPayload", 0}}}},
|
||||||
|
{Key: "vaa", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$vaas", 0}}}},
|
||||||
|
{Key: "standardizedProperties", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$parsedVaa.standardizedProperties", 0}}}},
|
||||||
|
{Key: "symbol", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.symbol", 0}}}},
|
||||||
|
{Key: "usdAmount", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.usdAmount", 0}}}},
|
||||||
|
{Key: "tokenAmount", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.tokenAmount", 0}}}},
|
||||||
|
}}})
|
||||||
|
|
||||||
|
// unset
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$unset", Value: bson.A{"transferPrices", "parsedVaa"}}})
|
||||||
|
|
||||||
|
// Execute the aggregation pipeline
|
||||||
|
cur, err := r.collections.globalTransactions.Aggregate(ctx, pipeline)
|
||||||
|
if err != nil {
|
||||||
|
r.logger.Error("failed execute aggregation pipeline", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read results from cursor
|
||||||
|
var operations []*OperationDto
|
||||||
|
err = cur.All(ctx, &operations)
|
||||||
|
if err != nil {
|
||||||
|
r.logger.Error("failed to decode cursor", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if there is only one operation
|
||||||
|
if len(operations) > 1 {
|
||||||
|
r.logger.Error("invalid number of operations", zap.Int("count", len(operations)))
|
||||||
|
return nil, fmt.Errorf("invalid number of operations")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(operations) == 0 {
|
||||||
|
return nil, errors.ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return operations[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type mongoID struct {
|
||||||
|
Id string `bson:"_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// findOperationsIdByAddressOrTxHash returns all operations filtered by address or txHash.
|
||||||
|
func findOperationsIdByAddressOrTxHash(ctx context.Context, db *mongo.Database, q string, pagination *pagination.Pagination) ([]string, error) {
|
||||||
|
qHexa := strings.ToLower(q)
|
||||||
|
if !utils.StartsWith0x(q) {
|
||||||
|
qHexa = "0x" + strings.ToLower(qHexa)
|
||||||
|
}
|
||||||
|
|
||||||
|
matchGlobalTransactions := bson.D{{Key: "$match", Value: bson.D{{Key: "$or", Value: bson.A{
|
||||||
|
bson.D{{Key: "originTx.from", Value: bson.M{"$eq": qHexa}}},
|
||||||
|
bson.D{{Key: "originTx.from", Value: bson.M{"$eq": q}}},
|
||||||
|
bson.D{{Key: "originTx.nativeTxHash", Value: bson.M{"$eq": qHexa}}},
|
||||||
|
bson.D{{Key: "originTx.nativeTxHash", Value: bson.M{"$eq": q}}},
|
||||||
|
bson.D{{Key: "originTx.attribute.value.originTxHash", Value: bson.M{"$eq": qHexa}}},
|
||||||
|
bson.D{{Key: "originTx.attribute.value.originTxHash", Value: bson.M{"$eq": q}}},
|
||||||
|
bson.D{{Key: "destinationTx.txHash", Value: bson.M{"$eq": qHexa}}},
|
||||||
|
bson.D{{Key: "destinationTx.txHash", Value: bson.M{"$eq": q}}},
|
||||||
|
}}}}}
|
||||||
|
|
||||||
|
matchParsedVaa := bson.D{{Key: "$match", Value: bson.D{{Key: "$or", Value: bson.A{
|
||||||
|
bson.D{{Key: "standardizedProperties.toAddress", Value: bson.M{"$eq": qHexa}}},
|
||||||
|
bson.D{{Key: "standardizedProperties.toAddress", Value: bson.M{"$eq": q}}},
|
||||||
|
}}}}}
|
||||||
|
|
||||||
|
globalTransactionFilter := bson.D{{Key: "$unionWith", Value: bson.D{{Key: "coll", Value: "globalTransactions"}, {Key: "pipeline", Value: bson.A{matchGlobalTransactions}}}}}
|
||||||
|
parserFilter := bson.D{{Key: "$unionWith", Value: bson.D{{Key: "coll", Value: "parsedVaa"}, {Key: "pipeline", Value: bson.A{matchParsedVaa}}}}}
|
||||||
|
group := bson.D{{Key: "$group", Value: bson.D{{Key: "_id", Value: "$_id"}}}}
|
||||||
|
pipeline := []bson.D{globalTransactionFilter, parserFilter, group}
|
||||||
|
|
||||||
|
cur, err := db.Collection("_operationsTemporal").Aggregate(ctx, pipeline)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
var documents []mongoID
|
||||||
|
err = cur.All(ctx, &documents)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
var ids []string
|
||||||
|
for _, doc := range documents {
|
||||||
|
ids = append(ids, doc.Id)
|
||||||
|
}
|
||||||
|
return ids, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryFilterIsVaaID checks if q is a vaaID.
|
||||||
|
func QueryFilterIsVaaID(ctx context.Context, q string) []string {
|
||||||
|
// check if q is a vaaID
|
||||||
|
isVaaID := regexp.MustCompile(`\d+/\w+/\d+`).MatchString(q)
|
||||||
|
if isVaaID {
|
||||||
|
return []string{q}
|
||||||
|
}
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindAll returns all operations filtered by q.
|
||||||
|
func (r *Repository) FindAll(ctx context.Context, q string, pagination *pagination.Pagination) ([]*OperationDto, error) {
|
||||||
|
|
||||||
|
var pipeline mongo.Pipeline
|
||||||
|
|
||||||
|
// get all ids by that match q
|
||||||
|
if q != "" {
|
||||||
|
var ids []string
|
||||||
|
// find all ids that match q (vaaID)
|
||||||
|
ids = QueryFilterIsVaaID(ctx, q)
|
||||||
|
if len(ids) == 0 {
|
||||||
|
// find all ids that match q (address or txHash)
|
||||||
|
var err error
|
||||||
|
ids, err = findOperationsIdByAddressOrTxHash(ctx, r.db, q, pagination)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(ids) == 0 {
|
||||||
|
return []*OperationDto{}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.D{{Key: "_id", Value: bson.D{{Key: "$in", Value: ids}}}}}})
|
||||||
|
}
|
||||||
|
|
||||||
|
// sort
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: bson.D{bson.E{Key: "originTx.timestamp", Value: pagination.GetSortInt()}}}})
|
||||||
|
|
||||||
|
// Skip initial results
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$skip", Value: pagination.Skip}})
|
||||||
|
|
||||||
|
// Limit size of results
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: pagination.Limit}})
|
||||||
|
|
||||||
|
// lookup vaas
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "vaas"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "vaas"}}}})
|
||||||
|
|
||||||
|
// lookup globalTransactions
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "globalTransactions"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "globalTransactions"}}}})
|
||||||
|
|
||||||
|
// lookup transferPrices
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "transferPrices"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "transferPrices"}}}})
|
||||||
|
|
||||||
|
// lookup parsedVaa
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{{Key: "from", Value: "parsedVaa"}, {Key: "localField", Value: "_id"}, {Key: "foreignField", Value: "_id"}, {Key: "as", Value: "parsedVaa"}}}})
|
||||||
|
|
||||||
|
// add fields
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$addFields", Value: bson.D{
|
||||||
|
{Key: "payload", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$parsedVaa.parsedPayload", 0}}}},
|
||||||
|
{Key: "vaa", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$vaas", 0}}}},
|
||||||
|
{Key: "standardizedProperties", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$parsedVaa.standardizedProperties", 0}}}},
|
||||||
|
{Key: "symbol", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.symbol", 0}}}},
|
||||||
|
{Key: "usdAmount", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.usdAmount", 0}}}},
|
||||||
|
{Key: "tokenAmount", Value: bson.D{{Key: "$arrayElemAt", Value: bson.A{"$transferPrices.tokenAmount", 0}}}},
|
||||||
|
}}})
|
||||||
|
|
||||||
|
// unset
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$unset", Value: bson.A{"transferPrices", "parsedVaa"}}})
|
||||||
|
|
||||||
|
// Execute the aggregation pipeline
|
||||||
|
cur, err := r.collections.globalTransactions.Aggregate(ctx, pipeline)
|
||||||
|
if err != nil {
|
||||||
|
r.logger.Error("failed execute aggregation pipeline", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read results from cursor
|
||||||
|
var operations []*OperationDto
|
||||||
|
err = cur.All(ctx, &operations)
|
||||||
|
if err != nil {
|
||||||
|
r.logger.Error("failed to decode cursor", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return operations, nil
|
||||||
|
}
|
|
@ -0,0 +1,41 @@
|
||||||
|
package operations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/internal/pagination"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/types"
|
||||||
|
"github.com/wormhole-foundation/wormhole/sdk/vaa"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Service struct {
|
||||||
|
repo *Repository
|
||||||
|
logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewService create a new Service.
|
||||||
|
func NewService(repo *Repository, logger *zap.Logger) *Service {
|
||||||
|
return &Service{repo: repo, logger: logger.With(zap.String("module", "OperationService"))}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindById returns the operations for the given chainID/emitter/seq.
|
||||||
|
func (s *Service) FindById(ctx context.Context, chainID vaa.ChainID,
|
||||||
|
emitter *types.Address, seq string) (*OperationDto, error) {
|
||||||
|
id := fmt.Sprintf("%d/%s/%s", chainID, emitter.Hex(), seq)
|
||||||
|
operation, err := s.repo.FindById(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return operation, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindAll returns all operations filtered by q.
|
||||||
|
func (s *Service) FindAll(ctx context.Context, q string, pagination *pagination.Pagination) ([]*OperationDto, error) {
|
||||||
|
operations, err := s.repo.FindAll(ctx, q, pagination)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return operations, nil
|
||||||
|
}
|
|
@ -3,6 +3,7 @@ package relays
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
errs "github.com/wormhole-foundation/wormhole-explorer/api/internal/errors"
|
errs "github.com/wormhole-foundation/wormhole-explorer/api/internal/errors"
|
||||||
|
@ -31,8 +32,8 @@ func NewRepository(db *mongo.Database, logger *zap.Logger) *Repository {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Repository) FindOne(ctx context.Context, q *RelaysQuery) (*RelayResponse, error) {
|
func (r *Repository) FindOne(ctx context.Context, q *RelaysQuery) (*RelayDoc, error) {
|
||||||
response := make(RelayResponse)
|
var response RelayDoc
|
||||||
err := r.collections.relays.FindOne(ctx, q.toBSON()).Decode(&response)
|
err := r.collections.relays.FindOne(ctx, q.toBSON()).Decode(&response)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Is(err, mongo.ErrNoDocuments) {
|
if errors.Is(err, mongo.ErrNoDocuments) {
|
||||||
|
@ -53,7 +54,117 @@ type RelaysQuery struct {
|
||||||
sequence string
|
sequence string
|
||||||
}
|
}
|
||||||
|
|
||||||
type RelayResponse map[string]interface{}
|
type RelayDoc struct {
|
||||||
|
ID string `bson:"_id"`
|
||||||
|
Data struct {
|
||||||
|
Status string `bson:"status"`
|
||||||
|
ReceivedAt time.Time `bson:"receivedAt"`
|
||||||
|
CompletedAt *time.Time `bson:"completedAt"`
|
||||||
|
ToTxHash *string `bson:"toTxHash"`
|
||||||
|
Metadata *struct {
|
||||||
|
Attempts int `bson:"attempts"`
|
||||||
|
ExecutionStartTime int64 `bson:"executionStartTime"`
|
||||||
|
EmitterChain int `bson:"emitterChain"`
|
||||||
|
DidMatchDeliveryProvider bool `bson:"didMatchDeliveryProvider"`
|
||||||
|
DidParse bool `bson:"didParse"`
|
||||||
|
Instructions struct {
|
||||||
|
EncodedExecutionInfo string `bson:"encodedExecutionInfo"`
|
||||||
|
RefundAddress string `bson:"refundAddress"`
|
||||||
|
SourceDeliveryProvider string `bson:"sourceDeliveryProvider"`
|
||||||
|
SenderAddress string `bson:"senderAddress"`
|
||||||
|
VaaKeys []any `bson:"vaaKeys"`
|
||||||
|
ExtraReceiverValue struct {
|
||||||
|
Hex string `bson:"_hex"`
|
||||||
|
IsBigNumber bool `bson:"_isBigNumber"`
|
||||||
|
} `bson:"extraReceiverValue"`
|
||||||
|
TargetAddress string `bson:"targetAddress"`
|
||||||
|
RequestedReceiverValue struct {
|
||||||
|
Hex string `bson:"_hex"`
|
||||||
|
IsBigNumber bool `bson:"_isBigNumber"`
|
||||||
|
} `bson:"requestedReceiverValue"`
|
||||||
|
RefundChainID int `bson:"refundChainId"`
|
||||||
|
RefundDeliveryProvider string `bson:"refundDeliveryProvider"`
|
||||||
|
TargetChainID int `bson:"targetChainId"`
|
||||||
|
} `bson:"instructions"`
|
||||||
|
DeliveryRecord struct {
|
||||||
|
MaxRefund string `bson:"maxRefund"`
|
||||||
|
Budget string `bson:"budget"`
|
||||||
|
TargetChainAssetPriceUSD float64 `bson:"targetChainAssetPriceUSD"`
|
||||||
|
WalletNonce int `bson:"walletNonce"`
|
||||||
|
TransactionHashes []string `bson:"transactionHashes"`
|
||||||
|
HasAdditionalVaas bool `bson:"hasAdditionalVaas"`
|
||||||
|
AdditionalVaasDidFetch bool `bson:"additionalVaasDidFetch"`
|
||||||
|
WalletAcquisitionEndTime int64 `bson:"walletAcquisitionEndTime"`
|
||||||
|
WalletAcquisitionDidSucceed bool `bson:"walletAcquisitionDidSucceed"`
|
||||||
|
WalletBalanceAfter string `bson:"walletBalanceAfter"`
|
||||||
|
ResultLog struct {
|
||||||
|
TransactionHash string `bson:"transactionHash"`
|
||||||
|
VaaHash string `bson:"vaaHash"`
|
||||||
|
RefundStatus string `bson:"refundStatus"`
|
||||||
|
RevertString string `bson:"revertString"`
|
||||||
|
Status string `bson:"status"`
|
||||||
|
GasUsed string `bson:"gasUsed"`
|
||||||
|
SourceChain string `bson:"sourceChain"`
|
||||||
|
SourceVaaSequence string `bson:"sourceVaaSequence"`
|
||||||
|
} `bson:"resultLog"`
|
||||||
|
ResultString string `bson:"resultString"`
|
||||||
|
AdditionalVaaKeysPrintable string `bson:"additionalVaaKeysPrintable"`
|
||||||
|
BudgetUsd float64 `bson:"budgetUsd"`
|
||||||
|
WalletAcquisitionStartTime int64 `bson:"walletAcquisitionStartTime"`
|
||||||
|
GasUnitsEstimate int `bson:"gasUnitsEstimate"`
|
||||||
|
EstimatedTransactionFeeEther string `bson:"estimatedTransactionFeeEther"`
|
||||||
|
TargetChainDecimals int `bson:"targetChainDecimals"`
|
||||||
|
DeliveryInstructionsPrintable struct {
|
||||||
|
Payload string `bson:"payload"`
|
||||||
|
EncodedExecutionInfo string `bson:"encodedExecutionInfo"`
|
||||||
|
RefundDeliveryProvider string `bson:"refundDeliveryProvider"`
|
||||||
|
SourceDeliveryProvider string `bson:"sourceDeliveryProvider"`
|
||||||
|
SenderAddress string `bson:"senderAddress"`
|
||||||
|
TargetAddress string `bson:"targetAddress"`
|
||||||
|
RequestedReceiverValue string `bson:"requestedReceiverValue"`
|
||||||
|
ExtraReceiverValue string `bson:"extraReceiverValue"`
|
||||||
|
RefundChainID string `bson:"refundChainId"`
|
||||||
|
RefundAddress string `bson:"refundAddress"`
|
||||||
|
VaaKeys []any `bson:"vaaKeys"`
|
||||||
|
TargetChainID string `bson:"targetChainId"`
|
||||||
|
} `bson:"deliveryInstructionsPrintable"`
|
||||||
|
WalletAddress string `bson:"walletAddress"`
|
||||||
|
GasUsed int `bson:"gasUsed"`
|
||||||
|
GasPrice string `bson:"gasPrice"`
|
||||||
|
ReceiverValue string `bson:"receiverValue"`
|
||||||
|
MaxRefundUsd float64 `bson:"maxRefundUsd"`
|
||||||
|
GasPriceEstimate string `bson:"gasPriceEstimate"`
|
||||||
|
TransactionDidSubmit bool `bson:"transactionDidSubmit"`
|
||||||
|
EstimatedTransactionFee string `bson:"estimatedTransactionFee"`
|
||||||
|
TransactionSubmitTimeStart int64 `bson:"transactionSubmitTimeStart"`
|
||||||
|
TransactionSubmitTimeEnd int64 `bson:"transactionSubmitTimeEnd"`
|
||||||
|
ResultLogDidParse bool `bson:"resultLogDidParse"`
|
||||||
|
ChainID int `bson:"chainId"`
|
||||||
|
ReceiverValueUsd float64 `bson:"receiverValueUsd"`
|
||||||
|
WalletBalanceBefore string `bson:"walletBalanceBefore"`
|
||||||
|
} `bson:"deliveryRecord"`
|
||||||
|
RawVaaHex string `bson:"rawVaaHex"`
|
||||||
|
PayloadType int `bson:"payloadType"`
|
||||||
|
MaxAttempts int `bson:"maxAttempts"`
|
||||||
|
DidError bool `bson:"didError"`
|
||||||
|
ExecutionEndTime int64 `bson:"executionEndTime"`
|
||||||
|
EmitterAddress string `bson:"emitterAddress"`
|
||||||
|
DidSubmitTransaction bool `bson:"didSubmitTransaction"`
|
||||||
|
Sequence string `bson:"sequence"`
|
||||||
|
} `bson:"metadata"`
|
||||||
|
Sequence string `bson:"sequence"`
|
||||||
|
Vaa string `bson:"vaa"`
|
||||||
|
FromTxHash string `bson:"fromTxHash"`
|
||||||
|
MaxAttempts int `bson:"maxAttempts"`
|
||||||
|
AddedTimes int `bson:"addedTimes"`
|
||||||
|
ErrorMessage any `bson:"errorMessage"`
|
||||||
|
EmitterChain int `bson:"emitterChain"`
|
||||||
|
EmitterAddress string `bson:"emitterAddress"`
|
||||||
|
FailedAt *time.Time `bson:"failedAt"`
|
||||||
|
} `bson:"data"`
|
||||||
|
Event string `bson:"event"`
|
||||||
|
Origin string `bson:"origin"`
|
||||||
|
}
|
||||||
|
|
||||||
func (q *RelaysQuery) toBSON() *bson.D {
|
func (q *RelaysQuery) toBSON() *bson.D {
|
||||||
r := bson.D{}
|
r := bson.D{}
|
||||||
|
|
|
@ -24,7 +24,7 @@ func (s *Service) FindByVAA(
|
||||||
chainID vaa.ChainID,
|
chainID vaa.ChainID,
|
||||||
emitterAddr *types.Address,
|
emitterAddr *types.Address,
|
||||||
seq string,
|
seq string,
|
||||||
) (*RelayResponse, error) {
|
) (*RelayDoc, error) {
|
||||||
|
|
||||||
query := Query().
|
query := Query().
|
||||||
SetChain(chainID).
|
SetChain(chainID).
|
||||||
|
|
|
@ -12,11 +12,11 @@ import (
|
||||||
"github.com/influxdata/influxdb-client-go/v2/api"
|
"github.com/influxdata/influxdb-client-go/v2/api"
|
||||||
"github.com/mitchellh/mapstructure"
|
"github.com/mitchellh/mapstructure"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/common"
|
||||||
errs "github.com/wormhole-foundation/wormhole-explorer/api/internal/errors"
|
errs "github.com/wormhole-foundation/wormhole-explorer/api/internal/errors"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/internal/pagination"
|
"github.com/wormhole-foundation/wormhole-explorer/api/internal/pagination"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/internal/tvl"
|
"github.com/wormhole-foundation/wormhole-explorer/api/internal/tvl"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/domain"
|
"github.com/wormhole-foundation/wormhole-explorer/common/domain"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/utils"
|
|
||||||
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
|
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
|
||||||
"go.mongodb.org/mongo-driver/bson"
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
"go.mongodb.org/mongo-driver/mongo"
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
|
@ -893,119 +893,76 @@ func (r *Repository) ListTransactionsByAddress(
|
||||||
pagination *pagination.Pagination,
|
pagination *pagination.Pagination,
|
||||||
) ([]TransactionDto, error) {
|
) ([]TransactionDto, error) {
|
||||||
|
|
||||||
// Build the aggregation pipeline
|
ids, err := common.FindVaasIdsByFromAddressOrToAddress(ctx, r.db, address)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(ids) == 0 {
|
||||||
|
return []TransactionDto{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
var pipeline mongo.Pipeline
|
var pipeline mongo.Pipeline
|
||||||
{
|
|
||||||
// filter transactions by destination address
|
|
||||||
{
|
|
||||||
const fieldName = "standardizedProperties.toAddress"
|
|
||||||
|
|
||||||
// If the address is non-EVM, it could be case sensitive (i.e. Solana), so we can't alter it.
|
// filter by ids
|
||||||
var nonEvmFilter = bson.D{{fieldName, bson.M{"$eq": address}}}
|
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.D{{Key: "_id", Value: bson.D{{Key: "$in", Value: ids}}}}}})
|
||||||
|
|
||||||
// If the address is EVM, we must normalize it to the format used in the database,
|
// inner join on the `parsedVaa` collection
|
||||||
// which is a 0x prefix and all lowercase characters.
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{
|
||||||
var evmFilter bson.D
|
{Key: "from", Value: "parsedVaa"},
|
||||||
if utils.StartsWith0x(address) {
|
{Key: "localField", Value: "_id"},
|
||||||
evmFilter = bson.D{{fieldName, bson.M{"$eq": strings.ToLower(address)}}}
|
{Key: "foreignField", Value: "_id"},
|
||||||
} else {
|
{Key: "as", Value: "parsedVaa"},
|
||||||
evmFilter = bson.D{{fieldName, bson.M{"$eq": "0x" + strings.ToLower(address)}}}
|
}}})
|
||||||
}
|
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.D{{Key: "parsedVaa", Value: bson.D{{Key: "$ne", Value: []any{}}}}}}})
|
||||||
|
|
||||||
pipeline = append(pipeline, bson.D{{"$match", bson.D{{"$or", bson.A{nonEvmFilter, evmFilter}}}}})
|
// sort by timestamp
|
||||||
}
|
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: bson.D{bson.E{Key: "timestamp", Value: pagination.GetSortInt()}}}})
|
||||||
|
|
||||||
// specify sorting criteria
|
// Skip initial results
|
||||||
pipeline = append(pipeline, bson.D{
|
pipeline = append(pipeline, bson.D{{Key: "$skip", Value: pagination.Skip}})
|
||||||
{"$sort", bson.D{bson.E{"indexedAt", -1}}},
|
|
||||||
})
|
// Limit size of results
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: pagination.Limit}})
|
||||||
|
|
||||||
// left outer join on the `transferPrices` collection
|
// left outer join on the `transferPrices` collection
|
||||||
pipeline = append(pipeline, bson.D{
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{
|
||||||
{"$lookup", bson.D{
|
{Key: "from", Value: "transferPrices"},
|
||||||
{"from", "transferPrices"},
|
{Key: "localField", Value: "_id"},
|
||||||
{"localField", "_id"},
|
{Key: "foreignField", Value: "_id"},
|
||||||
{"foreignField", "_id"},
|
{Key: "as", Value: "transferPrices"},
|
||||||
{"as", "transferPrices"},
|
}}})
|
||||||
}},
|
|
||||||
})
|
|
||||||
|
|
||||||
// left outer join on the `vaas` collection
|
|
||||||
pipeline = append(pipeline, bson.D{
|
|
||||||
{"$lookup", bson.D{
|
|
||||||
{"from", "vaas"},
|
|
||||||
{"localField", "_id"},
|
|
||||||
{"foreignField", "_id"},
|
|
||||||
{"as", "vaas"},
|
|
||||||
}},
|
|
||||||
})
|
|
||||||
|
|
||||||
// left outer join on the `vaaIdTxHash` collection
|
// left outer join on the `vaaIdTxHash` collection
|
||||||
pipeline = append(pipeline, bson.D{
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{
|
||||||
{"$lookup", bson.D{
|
{Key: "from", Value: "vaaIdTxHash"},
|
||||||
{"from", "vaaIdTxHash"},
|
{Key: "localField", Value: "_id"},
|
||||||
{"localField", "_id"},
|
{Key: "foreignField", Value: "_id"},
|
||||||
{"foreignField", "_id"},
|
{Key: "as", Value: "vaaIdTxHash"},
|
||||||
{"as", "vaaIdTxHash"},
|
}}})
|
||||||
}},
|
|
||||||
})
|
|
||||||
|
|
||||||
// left outer join on the `parsedVaa` collection
|
|
||||||
pipeline = append(pipeline, bson.D{
|
|
||||||
{"$lookup", bson.D{
|
|
||||||
{"from", "parsedVaa"},
|
|
||||||
{"localField", "_id"},
|
|
||||||
{"foreignField", "_id"},
|
|
||||||
{"as", "parsedVaa"},
|
|
||||||
}},
|
|
||||||
})
|
|
||||||
|
|
||||||
// left outer join on the `globalTransactions` collection
|
// left outer join on the `globalTransactions` collection
|
||||||
pipeline = append(pipeline, bson.D{
|
pipeline = append(pipeline, bson.D{{Key: "$lookup", Value: bson.D{
|
||||||
{"$lookup", bson.D{
|
{Key: "from", Value: "globalTransactions"},
|
||||||
{"from", "globalTransactions"},
|
{Key: "localField", Value: "_id"},
|
||||||
{"localField", "_id"},
|
{Key: "foreignField", Value: "_id"},
|
||||||
{"foreignField", "_id"},
|
{Key: "as", Value: "globalTransactions"},
|
||||||
{"as", "globalTransactions"},
|
}}})
|
||||||
}},
|
|
||||||
})
|
|
||||||
|
|
||||||
// add nested fields
|
// add nested fields
|
||||||
pipeline = append(pipeline, bson.D{
|
pipeline = append(pipeline, bson.D{
|
||||||
{"$addFields", bson.D{
|
{Key: "$addFields", Value: bson.D{
|
||||||
{"txHash", bson.M{"$arrayElemAt": []interface{}{"$vaaIdTxHash.txHash", 0}}},
|
{Key: "txHash", Value: bson.M{"$arrayElemAt": []interface{}{"$vaaIdTxHash.txHash", 0}}},
|
||||||
{"timestamp", bson.M{"$arrayElemAt": []interface{}{"$vaas.timestamp", 0}}},
|
{Key: "payload", Value: bson.M{"$arrayElemAt": []interface{}{"$parsedVaa.parsedPayload", 0}}},
|
||||||
{"payload", bson.M{"$arrayElemAt": []interface{}{"$parsedVaa.parsedPayload", 0}}},
|
{Key: "standardizedProperties", Value: bson.M{"$arrayElemAt": []interface{}{"$parsedVaa.standardizedProperties", 0}}},
|
||||||
{"standardizedProperties", bson.M{"$arrayElemAt": []interface{}{"$parsedVaa.standardizedProperties", 0}}},
|
{Key: "symbol", Value: bson.M{"$arrayElemAt": []interface{}{"$transferPrices.symbol", 0}}},
|
||||||
{"symbol", bson.M{"$arrayElemAt": []interface{}{"$transferPrices.symbol", 0}}},
|
{Key: "usdAmount", Value: bson.M{"$arrayElemAt": []interface{}{"$transferPrices.usdAmount", 0}}},
|
||||||
{"usdAmount", bson.M{"$arrayElemAt": []interface{}{"$transferPrices.usdAmount", 0}}},
|
{Key: "tokenAmount", Value: bson.M{"$arrayElemAt": []interface{}{"$transferPrices.tokenAmount", 0}}},
|
||||||
{"tokenAmount", bson.M{"$arrayElemAt": []interface{}{"$transferPrices.tokenAmount", 0}}},
|
|
||||||
}},
|
}},
|
||||||
})
|
})
|
||||||
|
|
||||||
// Sorting criteria
|
|
||||||
pipeline = append(pipeline, bson.D{
|
|
||||||
{"$sort", bson.D{bson.E{"timestamp", pagination.GetSortInt()}}},
|
|
||||||
})
|
|
||||||
|
|
||||||
// Unset unused fields
|
|
||||||
pipeline = append(pipeline, bson.D{
|
|
||||||
{"$unset", []interface{}{"transferPrices", "vaas", "vaaTxIdHash", "parsedVaa"}},
|
|
||||||
})
|
|
||||||
|
|
||||||
// Skip initial results
|
|
||||||
pipeline = append(pipeline, bson.D{
|
|
||||||
{"$skip", pagination.Skip},
|
|
||||||
})
|
|
||||||
|
|
||||||
// Limit size of results
|
|
||||||
pipeline = append(pipeline, bson.D{
|
|
||||||
{"$limit", pagination.Limit},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execute the aggregation pipeline
|
// Execute the aggregation pipeline
|
||||||
cur, err := r.collections.parsedVaa.Aggregate(ctx, pipeline)
|
cur, err := r.collections.vaas.Aggregate(ctx, pipeline)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.logger.Error("failed execute aggregation pipeline", zap.Error(err))
|
r.logger.Error("failed execute aggregation pipeline", zap.Error(err))
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
|
@ -22,6 +22,7 @@ type Service struct {
|
||||||
cache cache.Cache
|
cache cache.Cache
|
||||||
expiration time.Duration
|
expiration time.Duration
|
||||||
supportedChainIDs map[vaa.ChainID]string
|
supportedChainIDs map[vaa.ChainID]string
|
||||||
|
tokenProvider *domain.TokenProvider
|
||||||
logger *zap.Logger
|
logger *zap.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,10 +35,10 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewService create a new Service.
|
// NewService create a new Service.
|
||||||
func NewService(repo *Repository, cache cache.Cache, expiration time.Duration, logger *zap.Logger) *Service {
|
func NewService(repo *Repository, cache cache.Cache, expiration time.Duration, tokenProvider *domain.TokenProvider, logger *zap.Logger) *Service {
|
||||||
supportedChainIDs := domain.GetSupportedChainIDs()
|
supportedChainIDs := domain.GetSupportedChainIDs()
|
||||||
return &Service{repo: repo, supportedChainIDs: supportedChainIDs,
|
return &Service{repo: repo, supportedChainIDs: supportedChainIDs,
|
||||||
cache: cache, expiration: expiration, logger: logger.With(zap.String("module", "TransactionService"))}
|
cache: cache, expiration: expiration, tokenProvider: tokenProvider, logger: logger.With(zap.String("module", "TransactionService"))}
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetTransactionCount get the last transactions.
|
// GetTransactionCount get the last transactions.
|
||||||
|
@ -104,7 +105,7 @@ func (s *Service) GetTokenByChainAndAddress(ctx context.Context, chainID vaa.Cha
|
||||||
}
|
}
|
||||||
|
|
||||||
//get token by contractID (chainID + tokenAddress)
|
//get token by contractID (chainID + tokenAddress)
|
||||||
tokenMetadata, ok := domain.GetTokenByAddress(chainID, tokenAddress.Hex())
|
tokenMetadata, ok := s.tokenProvider.GetTokenByAddress(chainID, tokenAddress.Hex())
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, errs.ErrNotFound
|
return nil, errs.ErrNotFound
|
||||||
}
|
}
|
||||||
|
@ -159,3 +160,7 @@ func (s *Service) GetTransactionByID(
|
||||||
// Return matching document
|
// Return matching document
|
||||||
return &output[0], nil
|
return &output[0], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Service) GetTokenProvider() *domain.TokenProvider {
|
||||||
|
return s.tokenProvider
|
||||||
|
}
|
||||||
|
|
|
@ -72,10 +72,13 @@ func (r *Repository) FindVaasByTxHashWorkaround(
|
||||||
{"$or", bson.A{
|
{"$or", bson.A{
|
||||||
bson.D{{"originTx.nativeTxHash", bson.M{"$eq": query.txHash}}},
|
bson.D{{"originTx.nativeTxHash", bson.M{"$eq": query.txHash}}},
|
||||||
bson.D{{"originTx.nativeTxHash", bson.M{"$eq": "0x" + query.txHash}}},
|
bson.D{{"originTx.nativeTxHash", bson.M{"$eq": "0x" + query.txHash}}},
|
||||||
|
bson.D{{"originTx.attribute.value.originTxHash", bson.M{"$eq": query.txHash}}},
|
||||||
|
bson.D{{"originTx.attribute.value.originTxHash", bson.M{"$eq": "0x" + query.txHash}}},
|
||||||
}},
|
}},
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
)
|
)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
requestID := fmt.Sprintf("%v", ctx.Value("requestid"))
|
requestID := fmt.Sprintf("%v", ctx.Value("requestid"))
|
||||||
r.logger.Error("failed to find globalTransactions by TxHash",
|
r.logger.Error("failed to find globalTransactions by TxHash",
|
||||||
|
|
11
api/main.go
11
api/main.go
|
@ -29,6 +29,7 @@ import (
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/heartbeats"
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/heartbeats"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/infrastructure"
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/infrastructure"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/observations"
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/observations"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/operations"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/relays"
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/relays"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/transactions"
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/transactions"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/vaa"
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/vaa"
|
||||||
|
@ -42,6 +43,7 @@ import (
|
||||||
wormscanCache "github.com/wormhole-foundation/wormhole-explorer/common/client/cache"
|
wormscanCache "github.com/wormhole-foundation/wormhole-explorer/common/client/cache"
|
||||||
vaaPayloadParser "github.com/wormhole-foundation/wormhole-explorer/common/client/parser"
|
vaaPayloadParser "github.com/wormhole-foundation/wormhole-explorer/common/client/parser"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/dbutil"
|
"github.com/wormhole-foundation/wormhole-explorer/common/dbutil"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/common/domain"
|
||||||
xlogger "github.com/wormhole-foundation/wormhole-explorer/common/logger"
|
xlogger "github.com/wormhole-foundation/wormhole-explorer/common/logger"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/utils"
|
"github.com/wormhole-foundation/wormhole-explorer/common/utils"
|
||||||
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
|
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
|
||||||
|
@ -151,6 +153,10 @@ func main() {
|
||||||
rootLogger,
|
rootLogger,
|
||||||
)
|
)
|
||||||
relaysRepo := relays.NewRepository(db.Database, rootLogger)
|
relaysRepo := relays.NewRepository(db.Database, rootLogger)
|
||||||
|
operationsRepo := operations.NewRepository(db.Database, rootLogger)
|
||||||
|
|
||||||
|
// create token provider
|
||||||
|
tokenProvider := domain.NewTokenProvider(cfg.P2pNetwork)
|
||||||
|
|
||||||
// Set up services
|
// Set up services
|
||||||
rootLogger.Info("initializing services")
|
rootLogger.Info("initializing services")
|
||||||
|
@ -160,8 +166,9 @@ func main() {
|
||||||
governorService := governor.NewService(governorRepo, rootLogger)
|
governorService := governor.NewService(governorRepo, rootLogger)
|
||||||
infrastructureService := infrastructure.NewService(infrastructureRepo, rootLogger)
|
infrastructureService := infrastructure.NewService(infrastructureRepo, rootLogger)
|
||||||
heartbeatsService := heartbeats.NewService(heartbeatsRepo, rootLogger)
|
heartbeatsService := heartbeats.NewService(heartbeatsRepo, rootLogger)
|
||||||
transactionsService := transactions.NewService(transactionsRepo, cache, time.Duration(cfg.Cache.MetricExpiration)*time.Second, rootLogger)
|
transactionsService := transactions.NewService(transactionsRepo, cache, time.Duration(cfg.Cache.MetricExpiration)*time.Second, tokenProvider, rootLogger)
|
||||||
relaysService := relays.NewService(relaysRepo, rootLogger)
|
relaysService := relays.NewService(relaysRepo, rootLogger)
|
||||||
|
operationsService := operations.NewService(operationsRepo, rootLogger)
|
||||||
|
|
||||||
// Set up a custom error handler
|
// Set up a custom error handler
|
||||||
response.SetEnableStackTrace(*cfg)
|
response.SetEnableStackTrace(*cfg)
|
||||||
|
@ -203,7 +210,7 @@ func main() {
|
||||||
|
|
||||||
// Set up route handlers
|
// Set up route handlers
|
||||||
app.Get("/swagger.json", GetSwagger)
|
app.Get("/swagger.json", GetSwagger)
|
||||||
wormscan.RegisterRoutes(app, rootLogger, addressService, vaaService, obsService, governorService, infrastructureService, transactionsService, relaysService)
|
wormscan.RegisterRoutes(app, rootLogger, addressService, vaaService, obsService, governorService, infrastructureService, transactionsService, relaysService, operationsService)
|
||||||
guardian.RegisterRoutes(cfg, app, rootLogger, vaaService, governorService, heartbeatsService)
|
guardian.RegisterRoutes(cfg, app, rootLogger, vaaService, governorService, heartbeatsService)
|
||||||
|
|
||||||
// Set up gRPC handlers
|
// Set up gRPC handlers
|
||||||
|
|
|
@ -208,22 +208,13 @@ func ExtractAddressFromQueryParams(c *fiber.Ctx, l *zap.Logger) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExtractAddressFromPath parses the `id` parameter from the route path.
|
// ExtractAddressFromPath parses the `id` parameter from the route path.
|
||||||
func ExtractAddressFromPath(c *fiber.Ctx, l *zap.Logger) (*types.Address, error) {
|
func ExtractAddressFromPath(c *fiber.Ctx, l *zap.Logger) string {
|
||||||
|
return c.Params("id")
|
||||||
val := c.Params("id")
|
|
||||||
|
|
||||||
// Attempt to parse the address
|
|
||||||
addr, err := types.StringToAddress(val, true /*acceptSolanaFormat*/)
|
|
||||||
if err != nil {
|
|
||||||
requestID := fmt.Sprintf("%v", c.Locals("requestid"))
|
|
||||||
l.Error("failed to decode address",
|
|
||||||
zap.Error(err),
|
|
||||||
zap.String("requestID", requestID),
|
|
||||||
)
|
|
||||||
return nil, response.NewInvalidParamError(c, "MALFORMED ADDR", errors.WithStack(err))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return addr, nil
|
// ExtractQueryParam parses the `q` parameter from query params.
|
||||||
|
func ExtractQueryParam(c *fiber.Ctx, l *zap.Logger) string {
|
||||||
|
return c.Query("q")
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetTxHash parses the `txHash` parameter from query params.
|
// GetTxHash parses the `txHash` parameter from query params.
|
||||||
|
|
|
@ -38,10 +38,7 @@ func NewController(srv *address.Service, logger *zap.Logger) *Controller {
|
||||||
// @Router /api/v1/address/{address} [get]
|
// @Router /api/v1/address/{address} [get]
|
||||||
func (c *Controller) FindById(ctx *fiber.Ctx) error {
|
func (c *Controller) FindById(ctx *fiber.Ctx) error {
|
||||||
|
|
||||||
address, err := middleware.ExtractAddressFromPath(ctx, c.logger)
|
address := middleware.ExtractAddressFromPath(ctx, c.logger)
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
pagination, err := middleware.ExtractPagination(ctx)
|
pagination, err := middleware.ExtractPagination(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -0,0 +1,88 @@
|
||||||
|
package operations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/gofiber/fiber/v2"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/operations"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/middleware"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Controller is the controller for the operation resource.
|
||||||
|
type Controller struct {
|
||||||
|
srv *operations.Service
|
||||||
|
logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewController create a new controler.
|
||||||
|
func NewController(operationService *operations.Service, logger *zap.Logger) *Controller {
|
||||||
|
return &Controller{
|
||||||
|
srv: operationService,
|
||||||
|
logger: logger.With(zap.String("module", "OperationsController")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindAll godoc
|
||||||
|
// @Description Find all operations.
|
||||||
|
// @Tags wormholescan
|
||||||
|
// @ID get-operations
|
||||||
|
// @Param q query string false "search query"
|
||||||
|
// @Param page query integer false "page number"
|
||||||
|
// @Param size query integer false "page size"
|
||||||
|
// @Success 200 {object} []OperationResponse
|
||||||
|
// @Failure 400
|
||||||
|
// @Failure 500
|
||||||
|
// @Router /api/v1/operations [get]
|
||||||
|
func (c *Controller) FindAll(ctx *fiber.Ctx) error {
|
||||||
|
// Extract query parameters
|
||||||
|
pagination, err := middleware.ExtractPagination(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract q search query parameter
|
||||||
|
q := middleware.ExtractQueryParam(ctx, c.logger)
|
||||||
|
|
||||||
|
// Find operations by q search param.
|
||||||
|
operations, err := c.srv.FindAll(ctx.Context(), q, pagination)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// build response
|
||||||
|
response := toListOperationResponse(operations, q, c.logger)
|
||||||
|
return ctx.JSON(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindById godoc
|
||||||
|
// @Description Find operations by ID (chainID/emitter/sequence).
|
||||||
|
// @Tags wormholescan
|
||||||
|
// @ID get-operation-by-id
|
||||||
|
// @Param chain_id path integer true "id of the blockchain"
|
||||||
|
// @Param emitter path string true "address of the emitter"
|
||||||
|
// @Param seq path integer true "sequence of the VAA"
|
||||||
|
// @Success 200 {object} OperationResponse
|
||||||
|
// @Failure 400
|
||||||
|
// @Failure 500
|
||||||
|
// @Router /api/v1/operations/{chain_id}/{emitter}/{seq} [get]
|
||||||
|
func (c *Controller) FindById(ctx *fiber.Ctx) error {
|
||||||
|
// Extract query params
|
||||||
|
chainID, emitter, seq, err := middleware.ExtractVAAParams(ctx, c.logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find operations by chainID, emitter and sequence.
|
||||||
|
operation, err := c.srv.FindById(ctx.Context(), chainID, emitter, strconv.FormatUint(seq, 10))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// build response
|
||||||
|
response, err := toOperationResponse(operation, c.logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return ctx.JSON(response)
|
||||||
|
}
|
|
@ -0,0 +1,298 @@
|
||||||
|
package operations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/operations"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/internal/errors"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/common/domain"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/common/utils"
|
||||||
|
|
||||||
|
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// OperationResponse definition.
|
||||||
|
type OperationResponse struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
EmitterChain sdk.ChainID `json:"emitterChain"`
|
||||||
|
EmitterAddress EmitterAddress `json:"emitterAddress"`
|
||||||
|
Sequence string `json:"sequence"`
|
||||||
|
Vaa []byte `json:"vaa,omitempty"`
|
||||||
|
Content *Content `json:"content,omitempty"`
|
||||||
|
SourceChain *SourceChain `json:"sourceChain,omitempty"`
|
||||||
|
TargetChain *TargetChain `json:"targetChain,omitempty"`
|
||||||
|
Data map[string]any `json:"data,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// EmitterAddress definition.
|
||||||
|
type EmitterAddress struct {
|
||||||
|
Hex string `json:"hex,omitempty"`
|
||||||
|
Native string `json:"native,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Content definition.
|
||||||
|
type Content struct {
|
||||||
|
Payload map[string]any `json:"payload,omitempty"`
|
||||||
|
StandardizedProperties *operations.StandardizedProperties `json:"standardizedProperties,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SourceChain definition.
|
||||||
|
type SourceChain struct {
|
||||||
|
ChainId sdk.ChainID `json:"chainId"`
|
||||||
|
Timestamp *time.Time `json:"timestamp"`
|
||||||
|
Transaction Transaction `json:"transaction"`
|
||||||
|
From string `json:"from"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
Data *Data `json:"attribute,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TxHash definition.
|
||||||
|
type Transaction struct {
|
||||||
|
TxHash string `json:"txHash"`
|
||||||
|
SecondTxHash *string `json:"secondTxHash,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TargetChain definition.
|
||||||
|
type TargetChain struct {
|
||||||
|
ChainId sdk.ChainID `json:"chainId"`
|
||||||
|
Timestamp *time.Time `json:"timestamp"`
|
||||||
|
Transaction Transaction `json:"transaction"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
From string `json:"from"`
|
||||||
|
To string `json:"to"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Data represents a custom attribute for a origin transaction.
|
||||||
|
type Data struct {
|
||||||
|
Type string `bson:"type" json:"type"`
|
||||||
|
Value map[string]any `bson:"value" json:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListOperationResponse struct {
|
||||||
|
Operations []*OperationResponse `json:"operations"`
|
||||||
|
Match string `json:"matched"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// toOperationResponse converts an operations.OperationDto to an OperationResponse.
|
||||||
|
func toOperationResponse(operation *operations.OperationDto, log *zap.Logger) (*OperationResponse, error) {
|
||||||
|
// Get emitter chain, address and sequence from operation.
|
||||||
|
chainID, address, sequence, err := getChainEmitterSequence(operation)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Error parsing chainId, address, sequence from operation ID",
|
||||||
|
zap.Error(err),
|
||||||
|
zap.String("operationID", operation.ID))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get emitter native address from chainID and address.
|
||||||
|
emitterNativeAddress, err := domain.TranslateEmitterAddress(chainID, address)
|
||||||
|
if err != nil {
|
||||||
|
log.Warn("failed to translate emitter address",
|
||||||
|
zap.Stringer("chain", chainID),
|
||||||
|
zap.String("address", address),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get rawVAA from operation.
|
||||||
|
var rawVAA []byte
|
||||||
|
if operation.Vaa != nil {
|
||||||
|
rawVAA = operation.Vaa.Vaa
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get content from operation.
|
||||||
|
var content Content
|
||||||
|
if len(operation.Payload) > 0 || operation.StandardizedProperties != nil {
|
||||||
|
content = Content{
|
||||||
|
Payload: operation.Payload,
|
||||||
|
StandardizedProperties: operation.StandardizedProperties,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get sourceChain and targetChain events
|
||||||
|
sourceChain, targetChain := getChainEvents(chainID, operation)
|
||||||
|
r := OperationResponse{
|
||||||
|
ID: operation.ID,
|
||||||
|
EmitterChain: chainID,
|
||||||
|
EmitterAddress: EmitterAddress{
|
||||||
|
Hex: address,
|
||||||
|
Native: emitterNativeAddress,
|
||||||
|
},
|
||||||
|
Sequence: sequence,
|
||||||
|
Vaa: rawVAA,
|
||||||
|
Content: &content,
|
||||||
|
Data: getAdditionalData(operation),
|
||||||
|
SourceChain: sourceChain,
|
||||||
|
TargetChain: targetChain,
|
||||||
|
}
|
||||||
|
|
||||||
|
return &r, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getChainEmitterSequence returns the chainID, address, sequence for the given operation.
|
||||||
|
func getChainEmitterSequence(operation *operations.OperationDto) (sdk.ChainID, string, string, error) {
|
||||||
|
if operation.Vaa != nil {
|
||||||
|
return operation.Vaa.EmitterChain, operation.Vaa.EmitterAddr, operation.Vaa.Sequence, nil
|
||||||
|
} else {
|
||||||
|
// Get emitter chain, address, sequence by operation ID.
|
||||||
|
id := strings.Split(operation.ID, "/")
|
||||||
|
if len(id) != 3 {
|
||||||
|
return 0, "", "", errors.ErrInternalError
|
||||||
|
}
|
||||||
|
chainID, err := strconv.ParseUint(id[0], 10, 16)
|
||||||
|
if err != nil {
|
||||||
|
return 0, "", "", err
|
||||||
|
}
|
||||||
|
return sdk.ChainID(chainID), id[1], id[2], nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getAdditionalData(operation *operations.OperationDto) map[string]interface{} {
|
||||||
|
ok := operation.Symbol == "" && operation.TokenAmount == "" && operation.UsdAmount == ""
|
||||||
|
if ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return map[string]interface{}{
|
||||||
|
"symbol": operation.Symbol,
|
||||||
|
"tokenAmount": operation.TokenAmount,
|
||||||
|
"usdAmount": operation.UsdAmount,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// getChainEvents returns the sourceChain and targetChain events for the given operation.
|
||||||
|
func getChainEvents(chainID sdk.ChainID, operation *operations.OperationDto) (*SourceChain, *TargetChain) {
|
||||||
|
if operation.SourceTx == nil && operation.DestinationTx == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// if len(operation.GlobalTransations) == 0 {
|
||||||
|
// return nil, nil
|
||||||
|
// }
|
||||||
|
|
||||||
|
// build sourceChain
|
||||||
|
var sourceChain *SourceChain
|
||||||
|
if operation.SourceTx != nil {
|
||||||
|
var data *Data
|
||||||
|
if operation.SourceTx.Attribute != nil {
|
||||||
|
data = &Data{
|
||||||
|
Type: operation.SourceTx.Attribute.Type,
|
||||||
|
Value: operation.SourceTx.Attribute.Value,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// transactions
|
||||||
|
var secondTxHash *string
|
||||||
|
if data != nil {
|
||||||
|
attributeTxHash, ok := data.Value["originTxHash"]
|
||||||
|
if ok {
|
||||||
|
txHash, ok := attributeTxHash.(string)
|
||||||
|
if ok {
|
||||||
|
secondTxHash = &txHash
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
transaction := Transaction{
|
||||||
|
TxHash: operation.SourceTx.TxHash,
|
||||||
|
SecondTxHash: secondTxHash,
|
||||||
|
}
|
||||||
|
|
||||||
|
sourceChain = &SourceChain{
|
||||||
|
ChainId: chainID,
|
||||||
|
Timestamp: operation.SourceTx.Timestamp,
|
||||||
|
Transaction: transaction,
|
||||||
|
From: operation.SourceTx.From,
|
||||||
|
Status: operation.SourceTx.Status,
|
||||||
|
Data: data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// build targetChain
|
||||||
|
var targetChain *TargetChain
|
||||||
|
if operation.DestinationTx != nil {
|
||||||
|
targetChain = &TargetChain{
|
||||||
|
ChainId: operation.DestinationTx.ChainID,
|
||||||
|
Timestamp: operation.DestinationTx.Timestamp,
|
||||||
|
Transaction: Transaction{
|
||||||
|
TxHash: operation.DestinationTx.TxHash,
|
||||||
|
},
|
||||||
|
Status: operation.DestinationTx.Status,
|
||||||
|
From: operation.DestinationTx.From,
|
||||||
|
To: operation.DestinationTx.To,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sourceChain, targetChain
|
||||||
|
}
|
||||||
|
|
||||||
|
func toListOperationResponse(operations []*operations.OperationDto, q string, log *zap.Logger) ListOperationResponse {
|
||||||
|
response := ListOperationResponse{
|
||||||
|
Operations: make([]*OperationResponse, 0, len(operations)),
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range operations {
|
||||||
|
r, err := toOperationResponse(operations[i], log)
|
||||||
|
if err == nil {
|
||||||
|
response.Operations = append(response.Operations, r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Match = buildMatchedField(response, q)
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildMatchedField(operations ListOperationResponse, q string) string {
|
||||||
|
if q == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if len(operations.Operations) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
operation := operations.Operations[0]
|
||||||
|
if operation.ID == q {
|
||||||
|
return "vaaId"
|
||||||
|
}
|
||||||
|
|
||||||
|
// format q to match values
|
||||||
|
qHexa := strings.ToLower(q)
|
||||||
|
if !utils.StartsWith0x(q) {
|
||||||
|
qHexa = "0x" + strings.ToLower(qHexa)
|
||||||
|
}
|
||||||
|
|
||||||
|
// matched by sourceChain txHash
|
||||||
|
if operation.SourceChain != nil {
|
||||||
|
if operation.SourceChain.Transaction.TxHash == q || operation.SourceChain.Transaction.TxHash == qHexa {
|
||||||
|
return "txHash"
|
||||||
|
}
|
||||||
|
|
||||||
|
if operation.SourceChain.Data != nil {
|
||||||
|
if operation.SourceChain.Data.Value["OriginTxHash"] == q || operation.SourceChain.Data.Value["originTxHash"] == qHexa {
|
||||||
|
return "txHash"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// matched by targetChain txHash
|
||||||
|
if operation.TargetChain != nil {
|
||||||
|
if operation.TargetChain.Transaction.TxHash == q || operation.TargetChain.Transaction.TxHash == qHexa {
|
||||||
|
return "txHash"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// matched by sourceChain from address
|
||||||
|
if operation.SourceChain != nil {
|
||||||
|
if operation.SourceChain.From == q || operation.SourceChain.From == qHexa {
|
||||||
|
return "address"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// matched by standardizedProperties to address
|
||||||
|
if operation.Content.StandardizedProperties.ToAddress == q || operation.Content.StandardizedProperties.ToAddress == qHexa {
|
||||||
|
return "address"
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
|
@ -2,6 +2,7 @@ package relays
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
"github.com/gofiber/fiber/v2"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/relays"
|
"github.com/wormhole-foundation/wormhole-explorer/api/handlers/relays"
|
||||||
|
@ -40,5 +41,112 @@ func (c *Controller) FindOne(ctx *fiber.Ctx) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return ctx.JSON(relay)
|
response := c.makeResponse(relay)
|
||||||
|
return ctx.JSON(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Controller) makeResponse(doc *relays.RelayDoc) *RelayResponse {
|
||||||
|
var data *RelayDataResponse
|
||||||
|
if doc.Data.Metadata != nil {
|
||||||
|
data = &RelayDataResponse{
|
||||||
|
FromTxHash: doc.Data.FromTxHash,
|
||||||
|
ToTxHash: doc.Data.ToTxHash,
|
||||||
|
Delivery: DeliveryReponse{
|
||||||
|
ResultExecution: ResultExecutionResponse{
|
||||||
|
TransactionHash: doc.Data.Metadata.DeliveryRecord.ResultLog.TransactionHash,
|
||||||
|
RefundStatus: doc.Data.Metadata.DeliveryRecord.ResultLog.RefundStatus,
|
||||||
|
RevertString: doc.Data.Metadata.DeliveryRecord.ResultLog.RevertString,
|
||||||
|
Status: doc.Data.Metadata.DeliveryRecord.ResultLog.Status,
|
||||||
|
GasUsed: doc.Data.Metadata.DeliveryRecord.ResultLog.GasUsed,
|
||||||
|
Detail: doc.Data.Metadata.DeliveryRecord.ResultString,
|
||||||
|
},
|
||||||
|
RelayGasUsed: doc.Data.Metadata.DeliveryRecord.GasUsed,
|
||||||
|
},
|
||||||
|
Instructions: InstructionsResponse{
|
||||||
|
EncodedExecutionInfo: doc.Data.Metadata.Instructions.EncodedExecutionInfo,
|
||||||
|
RefundAddress: doc.Data.Metadata.Instructions.RefundAddress,
|
||||||
|
SourceDeliveryProvider: doc.Data.Metadata.Instructions.SourceDeliveryProvider,
|
||||||
|
SenderAddress: doc.Data.Metadata.Instructions.SenderAddress,
|
||||||
|
VaaKeys: doc.Data.Metadata.Instructions.VaaKeys,
|
||||||
|
ExtraReceiverValue: struct {
|
||||||
|
Hex string `json:"_hex"`
|
||||||
|
IsBigNumber bool `json:"_isBigNumber"`
|
||||||
|
}{
|
||||||
|
Hex: doc.Data.Metadata.Instructions.ExtraReceiverValue.Hex,
|
||||||
|
IsBigNumber: doc.Data.Metadata.Instructions.ExtraReceiverValue.IsBigNumber,
|
||||||
|
},
|
||||||
|
TargetAddress: doc.Data.Metadata.Instructions.TargetAddress,
|
||||||
|
RequestedReceiverValue: struct {
|
||||||
|
Hex string `json:"_hex"`
|
||||||
|
IsBigNumber bool `json:"_isBigNumber"`
|
||||||
|
}{
|
||||||
|
Hex: doc.Data.Metadata.Instructions.RequestedReceiverValue.Hex,
|
||||||
|
IsBigNumber: doc.Data.Metadata.Instructions.RequestedReceiverValue.IsBigNumber,
|
||||||
|
},
|
||||||
|
RefundChainID: doc.Data.Metadata.Instructions.RefundChainID,
|
||||||
|
RefundDeliveryProvider: doc.Data.Metadata.Instructions.RefundDeliveryProvider,
|
||||||
|
TargetChainID: doc.Data.Metadata.Instructions.TargetChainID,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &RelayResponse{
|
||||||
|
ID: doc.ID,
|
||||||
|
Relayer: doc.Origin,
|
||||||
|
ReceivedAt: doc.Data.ReceivedAt,
|
||||||
|
Status: doc.Data.Status,
|
||||||
|
CompletedAt: doc.Data.CompletedAt,
|
||||||
|
FailedAt: doc.Data.FailedAt,
|
||||||
|
Data: data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type RelayResponse struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Relayer string `json:"relayer"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
ReceivedAt time.Time `json:"receivedAt"`
|
||||||
|
CompletedAt *time.Time `json:"completedAt"`
|
||||||
|
FailedAt *time.Time `json:"failedAt"`
|
||||||
|
Data *RelayDataResponse `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RelayDataResponse struct {
|
||||||
|
FromTxHash string `json:"fromTxHash"`
|
||||||
|
ToTxHash *string `json:"toTxHash"`
|
||||||
|
Instructions InstructionsResponse `json:"instructions"`
|
||||||
|
Delivery DeliveryReponse `json:"delivery"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DeliveryReponse struct {
|
||||||
|
ResultExecution ResultExecutionResponse `json:"execution"`
|
||||||
|
RelayGasUsed int `json:"relayGasUsed"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ResultExecutionResponse struct {
|
||||||
|
TransactionHash string `json:"transactionHash"`
|
||||||
|
RefundStatus string `json:"refundStatus"`
|
||||||
|
RevertString string `json:"revertString"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
GasUsed string `json:"gasUsed"`
|
||||||
|
Detail string `json:"detail"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type InstructionsResponse struct {
|
||||||
|
EncodedExecutionInfo string `json:"encodedExecutionInfo"`
|
||||||
|
RefundAddress string `json:"refundAddress"`
|
||||||
|
SourceDeliveryProvider string `json:"sourceDeliveryProvider"`
|
||||||
|
SenderAddress string `json:"senderAddress"`
|
||||||
|
VaaKeys []any `json:"vaaKeys"`
|
||||||
|
ExtraReceiverValue struct {
|
||||||
|
Hex string `json:"_hex"`
|
||||||
|
IsBigNumber bool `json:"_isBigNumber"`
|
||||||
|
} `json:"extraReceiverValue"`
|
||||||
|
TargetAddress string `json:"targetAddress"`
|
||||||
|
RequestedReceiverValue struct {
|
||||||
|
Hex string `json:"_hex"`
|
||||||
|
IsBigNumber bool `json:"_isBigNumber"`
|
||||||
|
} `json:"requestedReceiverValue"`
|
||||||
|
RefundChainID int `json:"refundChainId"`
|
||||||
|
RefundDeliveryProvider string `json:"refundDeliveryProvider"`
|
||||||
|
TargetChainID int `json:"targetChainId"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,7 @@ import (
|
||||||
govsvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/governor"
|
govsvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/governor"
|
||||||
infrasvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/infrastructure"
|
infrasvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/infrastructure"
|
||||||
obssvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/observations"
|
obssvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/observations"
|
||||||
|
opsvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/operations"
|
||||||
relayssvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/relays"
|
relayssvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/relays"
|
||||||
trxsvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/transactions"
|
trxsvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/transactions"
|
||||||
vaasvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/vaa"
|
vaasvc "github.com/wormhole-foundation/wormhole-explorer/api/handlers/vaa"
|
||||||
|
@ -17,6 +18,7 @@ import (
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/governor"
|
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/governor"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/infrastructure"
|
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/infrastructure"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/observations"
|
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/observations"
|
||||||
|
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/operations"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/relays"
|
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/relays"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/transactions"
|
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/transactions"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/vaa"
|
"github.com/wormhole-foundation/wormhole-explorer/api/routes/wormscan/vaa"
|
||||||
|
@ -44,6 +46,7 @@ func RegisterRoutes(
|
||||||
infrastructureService *infrasvc.Service,
|
infrastructureService *infrasvc.Service,
|
||||||
transactionsService *trxsvc.Service,
|
transactionsService *trxsvc.Service,
|
||||||
relaysService *relayssvc.Service,
|
relaysService *relayssvc.Service,
|
||||||
|
operationsService *opsvc.Service,
|
||||||
) {
|
) {
|
||||||
|
|
||||||
// Set up controllers
|
// Set up controllers
|
||||||
|
@ -54,6 +57,7 @@ func RegisterRoutes(
|
||||||
infrastructureCtrl := infrastructure.NewController(infrastructureService)
|
infrastructureCtrl := infrastructure.NewController(infrastructureService)
|
||||||
transactionCtrl := transactions.NewController(transactionsService, rootLogger)
|
transactionCtrl := transactions.NewController(transactionsService, rootLogger)
|
||||||
relaysCtrl := relays.NewController(relaysService, rootLogger)
|
relaysCtrl := relays.NewController(relaysService, rootLogger)
|
||||||
|
opsCtrl := operations.NewController(operationsService, rootLogger)
|
||||||
|
|
||||||
// Set up route handlers
|
// Set up route handlers
|
||||||
api := app.Group("/api/v1")
|
api := app.Group("/api/v1")
|
||||||
|
@ -78,6 +82,11 @@ func RegisterRoutes(
|
||||||
api.Get("/transactions", transactionCtrl.ListTransactions)
|
api.Get("/transactions", transactionCtrl.ListTransactions)
|
||||||
api.Get("/transactions/:chain/:emitter/:sequence", transactionCtrl.GetTransactionByID)
|
api.Get("/transactions/:chain/:emitter/:sequence", transactionCtrl.GetTransactionByID)
|
||||||
|
|
||||||
|
// operations resource
|
||||||
|
operations := api.Group("/operations")
|
||||||
|
operations.Get("/", opsCtrl.FindAll)
|
||||||
|
operations.Get("/:chain/:emitter/:sequence", opsCtrl.FindById)
|
||||||
|
|
||||||
// vaas resource
|
// vaas resource
|
||||||
vaas := api.Group("/vaas")
|
vaas := api.Group("/vaas")
|
||||||
vaas.Use(cache.New(cacheConfig))
|
vaas.Use(cache.New(cacheConfig))
|
||||||
|
|
|
@ -169,7 +169,7 @@ func (c *Controller) GetTopAssets(ctx *fiber.Ctx) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look up the token symbol
|
// Look up the token symbol
|
||||||
tokenMeta, ok := domain.GetTokenByAddress(assetDTOs[i].TokenChain, assetDTOs[i].TokenAddress)
|
tokenMeta, ok := c.srv.GetTokenProvider().GetTokenByAddress(assetDTOs[i].TokenChain, assetDTOs[i].TokenAddress)
|
||||||
if ok {
|
if ok {
|
||||||
asset.Symbol = tokenMeta.Symbol.String()
|
asset.Symbol = tokenMeta.Symbol.String()
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,11 @@
|
||||||
"network": "devnet",
|
"network": "devnet",
|
||||||
"chainId": 1,
|
"chainId": 1,
|
||||||
"rpcs": ["https://api.devnet.solana.com"],
|
"rpcs": ["https://api.devnet.solana.com"],
|
||||||
"timeout": 10000
|
"timeout": 10000,
|
||||||
|
"rateLimit": {
|
||||||
|
"period": 10000,
|
||||||
|
"limit": 40
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"ethereum": {
|
"ethereum": {
|
||||||
"name": "ethereum",
|
"name": "ethereum",
|
||||||
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"platforms": {
|
||||||
|
"ethereum": {
|
||||||
|
"name": "ethereum",
|
||||||
|
"network": "mainnet",
|
||||||
|
"chainId": 2,
|
||||||
|
"rpcs": ["https://rpc.ankr.com/eth"],
|
||||||
|
"timeout": 10000
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
{}
|
|
@ -1,6 +1,7 @@
|
||||||
/** @type {import('ts-jest').JestConfigWithTsJest} */
|
/** @type {import('ts-jest').JestConfigWithTsJest} */
|
||||||
module.exports = {
|
module.exports = {
|
||||||
moduleFileExtensions: ["js", "json", "ts"],
|
moduleFileExtensions: ["js", "json", "ts"],
|
||||||
|
setupFiles: ["<rootDir>/src/infrastructure/log.ts"],
|
||||||
roots: ["test", "src"],
|
roots: ["test", "src"],
|
||||||
testRegex: ".*\\.test\\.ts$",
|
testRegex: ".*\\.test\\.ts$",
|
||||||
transform: {
|
transform: {
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
"config": "^3.3.9",
|
"config": "^3.3.9",
|
||||||
"dotenv": "^16.3.1",
|
"dotenv": "^16.3.1",
|
||||||
"ethers": "^5",
|
"ethers": "^5",
|
||||||
|
"mollitia": "^0.1.0",
|
||||||
"prom-client": "^15.0.0",
|
"prom-client": "^15.0.0",
|
||||||
"uuid": "^9.0.1",
|
"uuid": "^9.0.1",
|
||||||
"winston": "3.8.2"
|
"winston": "3.8.2"
|
||||||
|
@ -9683,6 +9684,11 @@
|
||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/mollitia": {
|
||||||
|
"version": "0.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mollitia/-/mollitia-0.1.0.tgz",
|
||||||
|
"integrity": "sha512-lbbFJdhrNEuReGlbsMqXyTnTiO8Pt+8rKAlLcVyRPNmsRyL+YWR3MlC9Sx8UkOSSLIAUNvIoNlXqD/BWBv9TFQ=="
|
||||||
|
},
|
||||||
"node_modules/ms": {
|
"node_modules/ms": {
|
||||||
"version": "2.1.2",
|
"version": "2.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||||
|
@ -19057,6 +19063,11 @@
|
||||||
"integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
|
"integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
|
"mollitia": {
|
||||||
|
"version": "0.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mollitia/-/mollitia-0.1.0.tgz",
|
||||||
|
"integrity": "sha512-lbbFJdhrNEuReGlbsMqXyTnTiO8Pt+8rKAlLcVyRPNmsRyL+YWR3MlC9Sx8UkOSSLIAUNvIoNlXqD/BWBv9TFQ=="
|
||||||
|
},
|
||||||
"ms": {
|
"ms": {
|
||||||
"version": "2.1.2",
|
"version": "2.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||||
|
|
|
@ -23,6 +23,7 @@
|
||||||
"config": "^3.3.9",
|
"config": "^3.3.9",
|
||||||
"dotenv": "^16.3.1",
|
"dotenv": "^16.3.1",
|
||||||
"ethers": "^5",
|
"ethers": "^5",
|
||||||
|
"mollitia": "^0.1.0",
|
||||||
"prom-client": "^15.0.0",
|
"prom-client": "^15.0.0",
|
||||||
"uuid": "^9.0.1",
|
"uuid": "^9.0.1",
|
||||||
"winston": "3.8.2"
|
"winston": "3.8.2"
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
import { setTimeout } from "timers/promises";
|
import { setTimeout } from "timers/promises";
|
||||||
import winston from "winston";
|
import winston from "winston";
|
||||||
import { Handler } from "../entities";
|
import { Handler } from "../entities";
|
||||||
|
import { StatRepository } from "../repositories";
|
||||||
|
|
||||||
export abstract class RunPollingJob {
|
export abstract class RunPollingJob {
|
||||||
private interval: number;
|
private interval: number;
|
||||||
|
private id: string;
|
||||||
|
private statRepo?: StatRepository;
|
||||||
private running: boolean = false;
|
private running: boolean = false;
|
||||||
protected abstract logger: winston.Logger;
|
protected abstract logger: winston.Logger;
|
||||||
protected abstract preHook(): Promise<void>;
|
protected abstract preHook(): Promise<void>;
|
||||||
|
@ -11,9 +14,11 @@ export abstract class RunPollingJob {
|
||||||
protected abstract get(): Promise<any[]>;
|
protected abstract get(): Promise<any[]>;
|
||||||
protected abstract persist(): Promise<void>;
|
protected abstract persist(): Promise<void>;
|
||||||
|
|
||||||
constructor(interval: number) {
|
constructor(interval: number, id: string, statRepo?: StatRepository) {
|
||||||
this.interval = interval;
|
this.interval = interval;
|
||||||
|
this.id = id;
|
||||||
this.running = true;
|
this.running = true;
|
||||||
|
this.statRepo = statRepo;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async run(handlers: Handler[]): Promise<void> {
|
public async run(handlers: Handler[]): Promise<void> {
|
||||||
|
@ -31,18 +36,21 @@ export abstract class RunPollingJob {
|
||||||
try {
|
try {
|
||||||
items = await this.get();
|
items = await this.get();
|
||||||
await Promise.all(handlers.map((handler) => handler(items)));
|
await Promise.all(handlers.map((handler) => handler(items)));
|
||||||
} catch (e) {
|
} catch (e: Error | any) {
|
||||||
this.logger.error("Error processing items", e);
|
this.logger.error("Error processing items", e);
|
||||||
|
this.statRepo?.count("job_runs_total", { id: this.id, status: "error" });
|
||||||
await setTimeout(this.interval);
|
await setTimeout(this.interval);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.persist();
|
await this.persist();
|
||||||
|
this.statRepo?.count("job_runs_total", { id: this.id, status: "success" });
|
||||||
await setTimeout(this.interval);
|
await setTimeout(this.interval);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public async stop(): Promise<void> {
|
public async stop(): Promise<void> {
|
||||||
this.running = false;
|
this.running = false;
|
||||||
|
this.statRepo?.count("job_runs_stopped", { id: this.id });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,7 @@ export class PollEvmLogs extends RunPollingJob {
|
||||||
statsRepository: StatRepository,
|
statsRepository: StatRepository,
|
||||||
cfg: PollEvmLogsConfig
|
cfg: PollEvmLogsConfig
|
||||||
) {
|
) {
|
||||||
super(cfg.interval ?? 1_000);
|
super(cfg.interval ?? 1_000, cfg.id, statsRepository);
|
||||||
this.blockRepo = blockRepo;
|
this.blockRepo = blockRepo;
|
||||||
this.metadataRepo = metadataRepo;
|
this.metadataRepo = metadataRepo;
|
||||||
this.statsRepository = statsRepository;
|
this.statsRepository = statsRepository;
|
||||||
|
|
|
@ -12,7 +12,7 @@ export class HandleSolanaTransactions<T> {
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
cfg: HandleSolanaTxConfig,
|
cfg: HandleSolanaTxConfig,
|
||||||
mapper: (txs: solana.Transaction) => Promise<T[]>,
|
mapper: (tx: solana.Transaction) => Promise<T[]>,
|
||||||
target?: (parsed: T[]) => Promise<void>
|
target?: (parsed: T[]) => Promise<void>
|
||||||
) {
|
) {
|
||||||
this.cfg = cfg;
|
this.cfg = cfg;
|
||||||
|
|
|
@ -8,10 +8,9 @@ export class PollSolanaTransactions extends RunPollingJob {
|
||||||
private metadataRepo: MetadataRepository<PollSolanaTransactionsMetadata>;
|
private metadataRepo: MetadataRepository<PollSolanaTransactionsMetadata>;
|
||||||
private slotRepository: SolanaSlotRepository;
|
private slotRepository: SolanaSlotRepository;
|
||||||
private statsRepo: StatRepository;
|
private statsRepo: StatRepository;
|
||||||
|
|
||||||
private latestSlot?: number;
|
private latestSlot?: number;
|
||||||
private slotCursor?: number;
|
private slotCursor?: number;
|
||||||
private lastRange?: { fromSlot: number; toSlot: number };
|
private lastRange?: Range;
|
||||||
protected logger: winston.Logger;
|
protected logger: winston.Logger;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
|
@ -20,7 +19,7 @@ export class PollSolanaTransactions extends RunPollingJob {
|
||||||
statsRepo: StatRepository,
|
statsRepo: StatRepository,
|
||||||
cfg: PollSolanaTransactionsConfig
|
cfg: PollSolanaTransactionsConfig
|
||||||
) {
|
) {
|
||||||
super(1_000);
|
super(1_000, cfg.id, statsRepo);
|
||||||
|
|
||||||
this.metadataRepo = metadataRepo;
|
this.metadataRepo = metadataRepo;
|
||||||
this.slotRepository = slotRepo;
|
this.slotRepository = slotRepo;
|
||||||
|
@ -36,8 +35,8 @@ export class PollSolanaTransactions extends RunPollingJob {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async hasNext(): Promise<boolean> {
|
async hasNext(): Promise<boolean> {
|
||||||
if (this.cfg.toSlot && this.slotCursor && this.slotCursor > this.cfg.toSlot) {
|
if (this.cfg.toSlot && this.slotCursor && this.slotCursor >= this.cfg.toSlot) {
|
||||||
this.logger.info(
|
this.logger.info(
|
||||||
`Finished processing all slots from ${this.cfg.fromSlot} to ${this.cfg.toSlot}`
|
`Finished processing all slots from ${this.cfg.fromSlot} to ${this.cfg.toSlot}`
|
||||||
);
|
);
|
||||||
|
@ -68,8 +67,15 @@ export class PollSolanaTransactions extends RunPollingJob {
|
||||||
}
|
}
|
||||||
|
|
||||||
// signatures for address goes back from current sig
|
// signatures for address goes back from current sig
|
||||||
const afterSignature = fromBlock.transactions[0].transaction.signatures[0];
|
const afterSignature = fromBlock.transactions[0]?.transaction.signatures[0];
|
||||||
let beforeSignature = toBlock.transactions[0].transaction.signatures[0];
|
let beforeSignature =
|
||||||
|
toBlock.transactions[toBlock.transactions.length - 1]?.transaction.signatures[0];
|
||||||
|
if (!afterSignature || !beforeSignature) {
|
||||||
|
throw new Error(
|
||||||
|
`No signature presents in transactions. After: ${afterSignature}. Before: ${beforeSignature} [slots: ${range.fromSlot} - ${range.toSlot}]`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
let currentSignaturesCount = this.cfg.signaturesLimit;
|
let currentSignaturesCount = this.cfg.signaturesLimit;
|
||||||
|
|
||||||
let results: solana.Transaction[] = [];
|
let results: solana.Transaction[] = [];
|
||||||
|
@ -100,7 +106,7 @@ export class PollSolanaTransactions extends RunPollingJob {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private getSlotRange(latestSlot: number): { fromSlot: number; toSlot: number } {
|
private getSlotRange(latestSlot: number): Range {
|
||||||
let fromSlot = this.slotCursor ? this.slotCursor + 1 : this.cfg.fromSlot ?? latestSlot;
|
let fromSlot = this.slotCursor ? this.slotCursor + 1 : this.cfg.fromSlot ?? latestSlot;
|
||||||
// cfg.fromSlot is present and is greater than current slot height, then we allow to skip slots.
|
// cfg.fromSlot is present and is greater than current slot height, then we allow to skip slots.
|
||||||
if (this.slotCursor && this.cfg.fromSlot && this.cfg.fromSlot > this.slotCursor) {
|
if (this.slotCursor && this.cfg.fromSlot && this.cfg.fromSlot > this.slotCursor) {
|
||||||
|
@ -178,3 +184,8 @@ export class PollSolanaTransactionsConfig {
|
||||||
export type PollSolanaTransactionsMetadata = {
|
export type PollSolanaTransactionsMetadata = {
|
||||||
lastSlot: number;
|
lastSlot: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type Range = {
|
||||||
|
fromSlot: number;
|
||||||
|
toSlot: number;
|
||||||
|
};
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
export enum ErrorType {
|
export enum ErrorType {
|
||||||
SkippedSlot,
|
SkippedSlot,
|
||||||
NoBlockOrBlockTime,
|
NoBlockOrBlockTime,
|
||||||
|
Ratelimit,
|
||||||
}
|
}
|
||||||
|
|
||||||
export class SolanaFailure extends Error {
|
export class SolanaFailure extends Error {
|
||||||
|
|
|
@ -7,6 +7,7 @@ import {
|
||||||
PromStatRepository,
|
PromStatRepository,
|
||||||
StaticJobRepository,
|
StaticJobRepository,
|
||||||
Web3SolanaSlotRepository,
|
Web3SolanaSlotRepository,
|
||||||
|
RateLimitedSolanaSlotRepository,
|
||||||
} from "./repositories";
|
} from "./repositories";
|
||||||
import { JobRepository } from "../domain/repositories";
|
import { JobRepository } from "../domain/repositories";
|
||||||
import { RepositoriesStrategy } from "./repositories/strategies/RepositoriesStrategy";
|
import { RepositoriesStrategy } from "./repositories/strategies/RepositoriesStrategy";
|
||||||
|
|
|
@ -23,6 +23,10 @@ export type PlatformConfig = {
|
||||||
chainId: number;
|
chainId: number;
|
||||||
rpcs: string[];
|
rpcs: string[];
|
||||||
timeout?: number;
|
timeout?: number;
|
||||||
|
rateLimit?: {
|
||||||
|
period: number;
|
||||||
|
limit: number;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -11,6 +11,7 @@ import { DynamicStrategy } from "./strategies/DynamicStrategy";
|
||||||
* On the reliability side, only knows how to timeout.
|
* On the reliability side, only knows how to timeout.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
const HEXADECIMAL_PREFIX = "0x";
|
||||||
const CHAIN = "ethereum";
|
const CHAIN = "ethereum";
|
||||||
|
|
||||||
export class EvmJsonRPCBlockRepository implements EvmBlockRepository, DynamicStrategy {
|
export class EvmJsonRPCBlockRepository implements EvmBlockRepository, DynamicStrategy {
|
||||||
|
@ -66,12 +67,14 @@ export class EvmJsonRPCBlockRepository implements EvmBlockRepository, DynamicStr
|
||||||
|
|
||||||
const reqs: any[] = [];
|
const reqs: any[] = [];
|
||||||
for (let blockNumber of blockNumbers) {
|
for (let blockNumber of blockNumbers) {
|
||||||
const blockNumberStr = blockNumber.toString();
|
const blockNumberStrParam = `${HEXADECIMAL_PREFIX}${blockNumber.toString(16)}`;
|
||||||
|
const blockNumberStrId = blockNumber.toString();
|
||||||
|
|
||||||
reqs.push({
|
reqs.push({
|
||||||
jsonrpc: "2.0",
|
jsonrpc: "2.0",
|
||||||
id: blockNumberStr,
|
id: blockNumberStrId,
|
||||||
method: "eth_getBlockByNumber",
|
method: "eth_getBlockByNumber",
|
||||||
params: [blockNumberStr, false],
|
params: [blockNumberStrParam, false],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -146,8 +149,8 @@ export class EvmJsonRPCBlockRepository implements EvmBlockRepository, DynamicStr
|
||||||
const parsedFilters = {
|
const parsedFilters = {
|
||||||
topics: filter.topics,
|
topics: filter.topics,
|
||||||
address: filter.addresses,
|
address: filter.addresses,
|
||||||
fromBlock: `0x${filter.fromBlock.toString(16)}`,
|
fromBlock: `${HEXADECIMAL_PREFIX}${filter.fromBlock.toString(16)}`,
|
||||||
toBlock: `0x${filter.toBlock.toString(16)}`,
|
toBlock: `${HEXADECIMAL_PREFIX}${filter.toBlock.toString(16)}`,
|
||||||
};
|
};
|
||||||
|
|
||||||
let response: { result: Log[]; error?: ErrorBlock };
|
let response: { result: Log[]; error?: ErrorBlock };
|
||||||
|
@ -182,13 +185,13 @@ export class EvmJsonRPCBlockRepository implements EvmBlockRepository, DynamicStr
|
||||||
/**
|
/**
|
||||||
* Loosely based on the wormhole-dashboard implementation (minus some specially crafted blocks when null result is obtained)
|
* Loosely based on the wormhole-dashboard implementation (minus some specially crafted blocks when null result is obtained)
|
||||||
*/
|
*/
|
||||||
private async getBlock(blockNumberOrTag: bigint | EvmTag): Promise<EvmBlock> {
|
private async getBlock(blockNumberOrTag: EvmTag): Promise<EvmBlock> {
|
||||||
let response: { result?: EvmBlock; error?: ErrorBlock };
|
let response: { result?: EvmBlock; error?: ErrorBlock };
|
||||||
try {
|
try {
|
||||||
response = await this.httpClient.post<typeof response>(this.rpc.href, {
|
response = await this.httpClient.post<typeof response>(this.rpc.href, {
|
||||||
jsonrpc: "2.0",
|
jsonrpc: "2.0",
|
||||||
method: "eth_getBlockByNumber",
|
method: "eth_getBlockByNumber",
|
||||||
params: [blockNumberOrTag.toString(), false], // this means we'll get a light block (no txs)
|
params: [blockNumberOrTag, false], // this means we'll get a light block (no txs)
|
||||||
id: 1,
|
id: 1,
|
||||||
});
|
});
|
||||||
} catch (e: HttpClientError | any) {
|
} catch (e: HttpClientError | any) {
|
||||||
|
|
|
@ -0,0 +1,27 @@
|
||||||
|
import fs from "fs";
|
||||||
|
import { MetadataRepository } from "../../domain/repositories";
|
||||||
|
|
||||||
|
export class FileMetadataRepo implements MetadataRepository<any> {
|
||||||
|
private readonly dirPath: string;
|
||||||
|
|
||||||
|
constructor(dirPath: string) {
|
||||||
|
this.dirPath = dirPath;
|
||||||
|
if (!fs.existsSync(this.dirPath)) {
|
||||||
|
fs.mkdirSync(this.dirPath, { recursive: true });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(id: string): Promise<any> {
|
||||||
|
const filePath = `${this.dirPath}/${id}.json`;
|
||||||
|
|
||||||
|
return fs.promises
|
||||||
|
.readFile(filePath, "utf8")
|
||||||
|
.then(JSON.parse)
|
||||||
|
.catch((err) => null);
|
||||||
|
}
|
||||||
|
|
||||||
|
async save(id: string, metadata: any): Promise<void> {
|
||||||
|
const filePath = `${this.dirPath}/${id}.json`;
|
||||||
|
return fs.promises.writeFile(filePath, JSON.stringify(metadata), "utf8");
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,130 @@
|
||||||
|
import axios, { AxiosError, AxiosInstance } from "axios";
|
||||||
|
import { setTimeout } from "timers/promises";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A simple HTTP client with exponential backoff retries and 429 handling.
|
||||||
|
*/
|
||||||
|
export class HttpClient {
|
||||||
|
private initialDelay: number = 1_000;
|
||||||
|
private maxDelay: number = 60_000;
|
||||||
|
private retries: number = 0;
|
||||||
|
private timeout: number = 5_000;
|
||||||
|
private axios: AxiosInstance;
|
||||||
|
|
||||||
|
constructor(options?: HttpClientOptions) {
|
||||||
|
options?.initialDelay && (this.initialDelay = options.initialDelay);
|
||||||
|
options?.maxDelay && (this.maxDelay = options.maxDelay);
|
||||||
|
options?.retries && (this.retries = options.retries);
|
||||||
|
options?.timeout && (this.timeout = options.timeout);
|
||||||
|
this.axios = axios.create();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async post<T>(url: string, body: any, opts?: HttpClientOptions): Promise<T> {
|
||||||
|
return this.executeWithRetry(url, "POST", body, opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async execute<T>(
|
||||||
|
url: string,
|
||||||
|
method: string,
|
||||||
|
body?: any,
|
||||||
|
opts?: HttpClientOptions
|
||||||
|
): Promise<T> {
|
||||||
|
let response;
|
||||||
|
try {
|
||||||
|
response = await this.axios.request<T>({
|
||||||
|
url: url,
|
||||||
|
method: method,
|
||||||
|
data: body,
|
||||||
|
timeout: opts?.timeout ?? this.timeout,
|
||||||
|
signal: AbortSignal.timeout(opts?.timeout ?? this.timeout),
|
||||||
|
});
|
||||||
|
} catch (err: AxiosError | any) {
|
||||||
|
// Connection / timeout error:
|
||||||
|
if (err instanceof AxiosError) {
|
||||||
|
throw new HttpClientError(err.message ?? err.code, { status: err?.status ?? 0 }, err);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new HttpClientError(err.message ?? err.code, undefined, err);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(response.status > 200) && !(response.status < 300)) {
|
||||||
|
throw new HttpClientError(undefined, response, response.data);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async executeWithRetry<T>(
|
||||||
|
url: string,
|
||||||
|
method: string,
|
||||||
|
body?: any,
|
||||||
|
opts?: HttpClientOptions
|
||||||
|
): Promise<T> {
|
||||||
|
const maxRetries = opts?.retries ?? this.retries;
|
||||||
|
let retries = 0;
|
||||||
|
const initialDelay = opts?.initialDelay ?? this.initialDelay;
|
||||||
|
const maxDelay = opts?.maxDelay ?? this.maxDelay;
|
||||||
|
while (maxRetries >= 0) {
|
||||||
|
try {
|
||||||
|
return await this.execute(url, method, body, opts);
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof HttpClientError) {
|
||||||
|
if (retries < maxRetries) {
|
||||||
|
const retryAfter = err.getRetryAfter(maxDelay, err);
|
||||||
|
if (retryAfter) {
|
||||||
|
await setTimeout(retryAfter, { ref: false });
|
||||||
|
} else {
|
||||||
|
const timeout = Math.min(initialDelay * 2 ** maxRetries, maxDelay);
|
||||||
|
await setTimeout(timeout, { ref: false });
|
||||||
|
}
|
||||||
|
retries++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Failed to reach ${url}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type HttpClientOptions = {
|
||||||
|
initialDelay?: number;
|
||||||
|
maxDelay?: number;
|
||||||
|
retries?: number;
|
||||||
|
timeout?: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export class HttpClientError extends Error {
|
||||||
|
public readonly status?: number;
|
||||||
|
public readonly data?: any;
|
||||||
|
public readonly headers?: any;
|
||||||
|
|
||||||
|
constructor(message?: string, response?: { status: number; headers?: any }, data?: any) {
|
||||||
|
super(message ?? `Unexpected status code: ${response?.status}`);
|
||||||
|
this.status = response?.status;
|
||||||
|
this.data = data;
|
||||||
|
this.headers = response?.headers;
|
||||||
|
Error.captureStackTrace(this, this.constructor);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the Retry-After header and returns the value in milliseconds.
|
||||||
|
* @param maxDelay
|
||||||
|
* @param error
|
||||||
|
* @throws {HttpClientError} if retry-after is bigger than maxDelay.
|
||||||
|
* @returns the retry-after value in milliseconds.
|
||||||
|
*/
|
||||||
|
public getRetryAfter(maxDelay: number, error: HttpClientError): number | undefined {
|
||||||
|
const retryAfter = this.headers?.get("Retry-After");
|
||||||
|
if (retryAfter) {
|
||||||
|
const value = parseInt(retryAfter) * 1000; // header value is in seconds
|
||||||
|
if (value <= maxDelay) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -107,6 +107,7 @@ export class SnsEventRepository implements SnsRepository, StaticStrategy {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.logger.info(`Published ${events.length} events to SNS`);
|
||||||
return {
|
return {
|
||||||
status: SUCCESS_STATUS,
|
status: SUCCESS_STATUS,
|
||||||
};
|
};
|
||||||
|
@ -120,8 +121,6 @@ export class SnsEventRepository implements SnsRepository, StaticStrategy {
|
||||||
this.logger.error(`Error publishing events to SNS: ${result.reason ?? result.reasons}`);
|
this.logger.error(`Error publishing events to SNS: ${result.reason ?? result.reasons}`);
|
||||||
throw new Error(`Error publishing events to SNS: ${result.reason}`);
|
throw new Error(`Error publishing events to SNS: ${result.reason}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.logger.info(`Published ${events.length} events to SNS`);
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,4 +12,5 @@ export * from "./SnsEventRepository";
|
||||||
export * from "./EvmJsonRPCBlockRepository";
|
export * from "./EvmJsonRPCBlockRepository";
|
||||||
export * from "./PromStatRepository";
|
export * from "./PromStatRepository";
|
||||||
export * from "./StaticJobRepository";
|
export * from "./StaticJobRepository";
|
||||||
export * from "./Web3SolanaSlotRepository";
|
export * from "./solana/Web3SolanaSlotRepository";
|
||||||
|
export * from "./solana/RateLimitedSolanaSlotRepository";
|
||||||
|
|
|
@ -0,0 +1,102 @@
|
||||||
|
import { Circuit, Ratelimit, RatelimitError, Retry, RetryMode } from "mollitia";
|
||||||
|
import { solana } from "../../../domain/entities";
|
||||||
|
import { SolanaSlotRepository } from "../../../domain/repositories";
|
||||||
|
import { Fallible, SolanaFailure, ErrorType } from "../../../domain/errors";
|
||||||
|
import winston from "../../../infrastructure/log";
|
||||||
|
import { DynamicStrategy } from "../strategies/DynamicStrategy";
|
||||||
|
|
||||||
|
const CHAIN = "solana";
|
||||||
|
const NAME = "solana-slotRepo";
|
||||||
|
|
||||||
|
export class RateLimitedSolanaSlotRepository implements SolanaSlotRepository, DynamicStrategy {
|
||||||
|
delegate: SolanaSlotRepository;
|
||||||
|
breaker: Circuit;
|
||||||
|
logger: winston.Logger = winston.child({ module: "RateLimitedSolanaSlotRepository" });
|
||||||
|
|
||||||
|
constructor(delegate: SolanaSlotRepository, opts: Options = { period: 10_000, limit: 50 }) {
|
||||||
|
this.delegate = delegate;
|
||||||
|
this.breaker = new Circuit({
|
||||||
|
options: {
|
||||||
|
modules: [
|
||||||
|
new Ratelimit({ limitPeriod: opts.period, limitForPeriod: opts.limit }),
|
||||||
|
new Retry({
|
||||||
|
attempts: 1,
|
||||||
|
interval: 10_000,
|
||||||
|
fastFirst: false,
|
||||||
|
mode: RetryMode.LINEAR,
|
||||||
|
factor: 1,
|
||||||
|
onRejection: (err: Error | any) => {
|
||||||
|
if (err.message?.startsWith("429 Too Many Requests")) {
|
||||||
|
this.logger.warn("Got 429 from solana RPC node. Retrying in 10 secs...");
|
||||||
|
return 10_000; // Wait 10 secs if we get a 429
|
||||||
|
} else {
|
||||||
|
return false; // Dont retry, let the caller handle it
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
apply(chain: string): boolean {
|
||||||
|
return chain === CHAIN;
|
||||||
|
}
|
||||||
|
|
||||||
|
getName(): string {
|
||||||
|
return NAME;
|
||||||
|
}
|
||||||
|
|
||||||
|
createInstance(): RateLimitedSolanaSlotRepository {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getLatestSlot(commitment: string): Promise<number> {
|
||||||
|
return this.breaker.fn(() => this.delegate.getLatestSlot(commitment)).execute();
|
||||||
|
}
|
||||||
|
|
||||||
|
async getBlock(slot: number, finality?: string): Promise<Fallible<solana.Block, SolanaFailure>> {
|
||||||
|
try {
|
||||||
|
const result: Fallible<solana.Block, SolanaFailure> = await this.breaker
|
||||||
|
.fn(() => this.delegate.getBlock(slot, finality))
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
if (!result.isOk()) {
|
||||||
|
throw result.getError();
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} catch (err: SolanaFailure | any) {
|
||||||
|
// this needs more handling due to delegate.getBlock returning a Fallible with a SolanaFailure
|
||||||
|
if (err instanceof RatelimitError) {
|
||||||
|
return Fallible.error(new SolanaFailure(0, err.message, ErrorType.Ratelimit));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (err instanceof SolanaFailure) {
|
||||||
|
return Fallible.error(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Fallible.error(new SolanaFailure(err, err?.message ?? "unknown error"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getSignaturesForAddress(
|
||||||
|
address: string,
|
||||||
|
beforeSig: string,
|
||||||
|
afterSig: string,
|
||||||
|
limit: number
|
||||||
|
): Promise<solana.ConfirmedSignatureInfo[]> {
|
||||||
|
return this.breaker
|
||||||
|
.fn(() => this.delegate.getSignaturesForAddress(address, beforeSig, afterSig, limit))
|
||||||
|
.execute(address, beforeSig, afterSig, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
getTransactions(sigs: solana.ConfirmedSignatureInfo[]): Promise<solana.Transaction[]> {
|
||||||
|
return this.breaker.fn(() => this.delegate.getTransactions(sigs)).execute(sigs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Options = {
|
||||||
|
period: number;
|
||||||
|
limit: number;
|
||||||
|
};
|
|
@ -5,36 +5,21 @@ import {
|
||||||
VersionedTransactionResponse,
|
VersionedTransactionResponse,
|
||||||
SolanaJSONRPCError,
|
SolanaJSONRPCError,
|
||||||
} from "@solana/web3.js";
|
} from "@solana/web3.js";
|
||||||
import { solana } from "../../domain/entities";
|
import { solana } from "../../../domain/entities";
|
||||||
import { SolanaSlotRepository } from "../../domain/repositories";
|
import { SolanaSlotRepository } from "../../../domain/repositories";
|
||||||
import { Fallible, SolanaFailure } from "../../domain/errors";
|
import { Fallible, SolanaFailure } from "../../../domain/errors";
|
||||||
import { DynamicStrategy } from "./strategies/DynamicStrategy";
|
|
||||||
|
|
||||||
const COMMITMENT_FINALIZED = "finalized";
|
const COMMITMENT_FINALIZED = "finalized";
|
||||||
const COMMITMENT_CONDIRMED = "confirmed";
|
const COMMITMENT_CONDIRMED = "confirmed";
|
||||||
const LEGACY_VERSION = "legacy";
|
const LEGACY_VERSION = "legacy";
|
||||||
const CHAIN = "solana";
|
|
||||||
const NAME = "solana-slotRepo";
|
|
||||||
|
|
||||||
export class Web3SolanaSlotRepository implements SolanaSlotRepository, DynamicStrategy {
|
export class Web3SolanaSlotRepository implements SolanaSlotRepository {
|
||||||
private connection: Connection;
|
private connection: Connection;
|
||||||
|
|
||||||
constructor(connection: Connection) {
|
constructor(connection: Connection) {
|
||||||
this.connection = connection;
|
this.connection = connection;
|
||||||
}
|
}
|
||||||
|
|
||||||
apply(chain: string): boolean {
|
|
||||||
return chain === CHAIN;
|
|
||||||
}
|
|
||||||
|
|
||||||
getName(): string {
|
|
||||||
return NAME;
|
|
||||||
}
|
|
||||||
|
|
||||||
createInstance(): Web3SolanaSlotRepository {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getLatestSlot(commitment: string): Promise<number> {
|
getLatestSlot(commitment: string): Promise<number> {
|
||||||
return this.connection.getSlot(commitment as Commitment);
|
return this.connection.getSlot(commitment as Commitment);
|
||||||
}
|
}
|
||||||
|
@ -64,7 +49,7 @@ export class Web3SolanaSlotRepository implements SolanaSlotRepository, DynamicSt
|
||||||
return Fallible.error(new SolanaFailure(err.code, err.message));
|
return Fallible.error(new SolanaFailure(err.code, err.message));
|
||||||
}
|
}
|
||||||
|
|
||||||
return Fallible.error(new SolanaFailure(0, err.message));
|
throw err;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,10 +4,10 @@ import { PromStatRepository } from "../PromStatRepository";
|
||||||
import { SNSClient } from "@aws-sdk/client-sns";
|
import { SNSClient } from "@aws-sdk/client-sns";
|
||||||
import { Config } from "../../config";
|
import { Config } from "../../config";
|
||||||
import { EvmJsonRPCBlockRepository } from "../EvmJsonRPCBlockRepository";
|
import { EvmJsonRPCBlockRepository } from "../EvmJsonRPCBlockRepository";
|
||||||
import { Web3SolanaSlotRepository } from "../Web3SolanaSlotRepository";
|
|
||||||
import { Connection } from "@solana/web3.js";
|
import { Connection } from "@solana/web3.js";
|
||||||
import { DynamicStrategy } from "./DynamicStrategy";
|
import { DynamicStrategy } from "./DynamicStrategy";
|
||||||
import { StaticStrategy } from "./StaticStrategy";
|
import { StaticStrategy } from "./StaticStrategy";
|
||||||
|
import { RateLimitedSolanaSlotRepository, Web3SolanaSlotRepository } from "..";
|
||||||
|
|
||||||
export class RepositoriesStrategy {
|
export class RepositoriesStrategy {
|
||||||
private snsClient?: SNSClient;
|
private snsClient?: SNSClient;
|
||||||
|
@ -42,11 +42,14 @@ export class RepositoriesStrategy {
|
||||||
const platform = this.cfg.platforms[chain];
|
const platform = this.cfg.platforms[chain];
|
||||||
if (!platform) throw new Error(`No config for chain ${chain}`);
|
if (!platform) throw new Error(`No config for chain ${chain}`);
|
||||||
|
|
||||||
const repositories: DynamicStrategy[] = [
|
const repositories = [
|
||||||
new EvmJsonRPCBlockRepository(this.cfg),
|
new EvmJsonRPCBlockRepository(this.cfg),
|
||||||
|
new RateLimitedSolanaSlotRepository(
|
||||||
new Web3SolanaSlotRepository(
|
new Web3SolanaSlotRepository(
|
||||||
new Connection(platform.rpcs[0], { disableRetryOnRateLimit: true }),
|
new Connection(platform.rpcs[0], { disableRetryOnRateLimit: true })
|
||||||
),
|
),
|
||||||
|
platform.rateLimit
|
||||||
|
)
|
||||||
];
|
];
|
||||||
|
|
||||||
repositories.forEach((repository) => {
|
repositories.forEach((repository) => {
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
import { describe, jest, it, expect } from "@jest/globals";
|
||||||
|
import {
|
||||||
|
HandleSolanaTransactions,
|
||||||
|
HandleSolanaTxConfig,
|
||||||
|
} from "../../../../src/domain/actions/solana/HandleSolanaTransactions";
|
||||||
|
import { solana } from "../../../../src/domain/entities";
|
||||||
|
|
||||||
|
let solanaTxs: solana.Transaction[];
|
||||||
|
|
||||||
|
describe("HandleSolanaTransactions", () => {
|
||||||
|
let handleSolanaTransactions: HandleSolanaTransactions<any>;
|
||||||
|
const mockConfig: HandleSolanaTxConfig = {
|
||||||
|
programId: "mockProgramId",
|
||||||
|
};
|
||||||
|
|
||||||
|
it("should handle Solana transactions", async () => {
|
||||||
|
givenSolanaTransactions();
|
||||||
|
handleSolanaTransactions = new HandleSolanaTransactions<any>(
|
||||||
|
mockConfig,
|
||||||
|
async (tx: solana.Transaction) => {
|
||||||
|
return [tx];
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await handleSolanaTransactions.handle(solanaTxs);
|
||||||
|
|
||||||
|
expect(result).toEqual(solanaTxs);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle Solana transactions with a target", async () => {
|
||||||
|
givenSolanaTransactions();
|
||||||
|
const mockTarget = jest.fn<(parsed: any[]) => Promise<void>>();
|
||||||
|
handleSolanaTransactions = new HandleSolanaTransactions<any>(
|
||||||
|
mockConfig,
|
||||||
|
async (tx: solana.Transaction) => {
|
||||||
|
return [tx];
|
||||||
|
},
|
||||||
|
mockTarget
|
||||||
|
);
|
||||||
|
const mockTransactions: solana.Transaction[] = await handleSolanaTransactions.handle(solanaTxs);
|
||||||
|
|
||||||
|
expect(mockTarget).toHaveBeenCalledWith(mockTransactions);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const givenSolanaTransactions = () =>
|
||||||
|
(solanaTxs = [
|
||||||
|
{
|
||||||
|
slot: 1,
|
||||||
|
transaction: {
|
||||||
|
message: {
|
||||||
|
accountKeys: [],
|
||||||
|
instructions: [],
|
||||||
|
compiledInstructions: [],
|
||||||
|
},
|
||||||
|
signatures: [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]);
|
|
@ -96,10 +96,35 @@ describe("PollSolanaTransactions", () => {
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should be able to read transactions from last known slot and stop when toSlot is reached", async () => {
|
||||||
|
const latestSlot = 100;
|
||||||
|
const lastSlot = 10;
|
||||||
|
const toSlot = 50;
|
||||||
|
const expectedSigs = givenSigs();
|
||||||
|
const expectedTxs = givenTxs();
|
||||||
|
|
||||||
|
givenCfg({ toSlot });
|
||||||
|
givenStatsRepository();
|
||||||
|
givenMetadataRepository({ lastSlot });
|
||||||
|
givenSolanaSlotRepository(latestSlot, givenBlock(1), expectedSigs, expectedTxs);
|
||||||
|
givenPollSolanaTransactions();
|
||||||
|
|
||||||
|
pollSolanaTransactions.run([handlers.working]);
|
||||||
|
|
||||||
|
await thenWaitForAssertion(
|
||||||
|
() =>
|
||||||
|
expect(metadataSaveSpy).toHaveBeenCalledWith(cfg.id, {
|
||||||
|
lastSlot: toSlot,
|
||||||
|
}),
|
||||||
|
() => expect(pollSolanaTransactions.hasNext()).resolves.toBe(false)
|
||||||
|
);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
const givenCfg = () => {
|
const givenCfg = (overrides?: Partial<PollSolanaTransactionsConfig>) => {
|
||||||
cfg = new PollSolanaTransactionsConfig("anId", "programID", "confirmed");
|
cfg = new PollSolanaTransactionsConfig("anId", "programID", "confirmed");
|
||||||
|
Object.assign(cfg, overrides);
|
||||||
};
|
};
|
||||||
|
|
||||||
const givenMetadataRepository = (data?: PollSolanaTransactionsMetadata) => {
|
const givenMetadataRepository = (data?: PollSolanaTransactionsMetadata) => {
|
||||||
|
|
|
@ -140,7 +140,7 @@ const givenBlocksArePresent = (blockNumbers: bigint[]) => {
|
||||||
const requests = blockNumbers.map((blockNumber) => ({
|
const requests = blockNumbers.map((blockNumber) => ({
|
||||||
jsonrpc: "2.0",
|
jsonrpc: "2.0",
|
||||||
method: "eth_getBlockByNumber",
|
method: "eth_getBlockByNumber",
|
||||||
params: [blockNumber.toString(), false],
|
params: [`0x${blockNumber.toString(16)}`, false],
|
||||||
id: blockNumber.toString(),
|
id: blockNumber.toString(),
|
||||||
}));
|
}));
|
||||||
const response = blockNumbers.map((blockNumber) => ({
|
const response = blockNumbers.map((blockNumber) => ({
|
||||||
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
import { expect, describe, it } from "@jest/globals";
|
||||||
|
import {
|
||||||
|
Web3SolanaSlotRepository,
|
||||||
|
RateLimitedSolanaSlotRepository,
|
||||||
|
} from "../../../src/infrastructure/repositories";
|
||||||
|
|
||||||
|
const repoMock = {
|
||||||
|
getSlot: () => Promise.resolve(100),
|
||||||
|
getLatestSlot: () => Promise.resolve(100),
|
||||||
|
getBlock: () => Promise.resolve({ blockTime: 100, transactions: [] }),
|
||||||
|
getSignaturesForAddress: () => Promise.resolve([]),
|
||||||
|
getTransactions: () => Promise.resolve([]),
|
||||||
|
} as any as Web3SolanaSlotRepository;
|
||||||
|
|
||||||
|
describe("RateLimitedSolanaSlotRepository", () => {
|
||||||
|
describe("getLatestSlot", () => {
|
||||||
|
it("should fail when ratelimit is exceeded", async () => {
|
||||||
|
const repository = new RateLimitedSolanaSlotRepository(repoMock, { period: 1000, limit: 1 });
|
||||||
|
|
||||||
|
await repository.getLatestSlot("confirmed");
|
||||||
|
await expect(repository.getLatestSlot("confirmed")).rejects.toThrowError();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getBlock", () => {
|
||||||
|
it("should fail when ratelimit is exceeded", async () => {
|
||||||
|
const repository = new RateLimitedSolanaSlotRepository(repoMock, { period: 1000, limit: 1 });
|
||||||
|
|
||||||
|
await repository.getBlock(1);
|
||||||
|
const failure = await repository.getBlock(1);
|
||||||
|
|
||||||
|
expect(failure.getError()).toHaveProperty("message", "Ratelimited");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getSignaturesForAddress", () => {
|
||||||
|
it("should fail when ratelimit is exceeded", async () => {
|
||||||
|
const repository = new RateLimitedSolanaSlotRepository(repoMock, { period: 1000, limit: 1 });
|
||||||
|
|
||||||
|
await repository.getSignaturesForAddress("address", "before", "after", 1);
|
||||||
|
await expect(
|
||||||
|
repository.getSignaturesForAddress("address", "before", "after", 1)
|
||||||
|
).rejects.toThrowError();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getTransactions", () => {
|
||||||
|
it("should fail when ratelimit is exceeded", async () => {
|
||||||
|
const repository = new RateLimitedSolanaSlotRepository(repoMock, { period: 1000, limit: 1 });
|
||||||
|
|
||||||
|
await repository.getTransactions([]);
|
||||||
|
await expect(repository.getTransactions([])).rejects.toThrowError();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -5,8 +5,8 @@ import {
|
||||||
EvmJsonRPCBlockRepository,
|
EvmJsonRPCBlockRepository,
|
||||||
FileMetadataRepository,
|
FileMetadataRepository,
|
||||||
PromStatRepository,
|
PromStatRepository,
|
||||||
|
RateLimitedSolanaSlotRepository,
|
||||||
SnsEventRepository,
|
SnsEventRepository,
|
||||||
Web3SolanaSlotRepository,
|
|
||||||
} from "../../../src/infrastructure/repositories";
|
} from "../../../src/infrastructure/repositories";
|
||||||
|
|
||||||
describe("RepositoriesBuilder", () => {
|
describe("RepositoriesBuilder", () => {
|
||||||
|
@ -41,6 +41,6 @@ describe("RepositoriesBuilder", () => {
|
||||||
expect(repos.getMetadataRepository()).toBeInstanceOf(FileMetadataRepository);
|
expect(repos.getMetadataRepository()).toBeInstanceOf(FileMetadataRepository);
|
||||||
expect(repos.getSnsEventRepository()).toBeInstanceOf(SnsEventRepository);
|
expect(repos.getSnsEventRepository()).toBeInstanceOf(SnsEventRepository);
|
||||||
expect(repos.getStatsRepository()).toBeInstanceOf(PromStatRepository);
|
expect(repos.getStatsRepository()).toBeInstanceOf(PromStatRepository);
|
||||||
expect(repos.getSolanaSlotRepository()).toBeInstanceOf(Web3SolanaSlotRepository);
|
expect(repos.getSolanaSlotRepository()).toBeInstanceOf(RateLimitedSolanaSlotRepository);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,47 +1,9 @@
|
||||||
import { expect, describe, it } from "@jest/globals";
|
import { expect, describe, it } from "@jest/globals";
|
||||||
|
import { PublicKey } from "@solana/web3.js";
|
||||||
import { solana } from "../../../src/domain/entities";
|
import { solana } from "../../../src/domain/entities";
|
||||||
import { Web3SolanaSlotRepository } from "../../../src/infrastructure/repositories";
|
import { Web3SolanaSlotRepository } from "../../../src/infrastructure/repositories";
|
||||||
|
|
||||||
describe("Web3SolanaSlotRepository", () => {
|
describe("Web3SolanaSlotRepository", () => {
|
||||||
const chain = "solana";
|
|
||||||
|
|
||||||
describe("strategy", () => {
|
|
||||||
it("should be apply Web3SolanaSlotRepository", async () => {
|
|
||||||
// Given
|
|
||||||
const connectionMock = {};
|
|
||||||
const repo = new Web3SolanaSlotRepository(connectionMock as any);
|
|
||||||
|
|
||||||
// When
|
|
||||||
const result = await repo.apply(chain);
|
|
||||||
|
|
||||||
// Then
|
|
||||||
expect(result).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should be get name metadata", async () => {
|
|
||||||
// Given
|
|
||||||
const connectionMock = {};
|
|
||||||
const repo = new Web3SolanaSlotRepository(connectionMock as any);
|
|
||||||
|
|
||||||
// When
|
|
||||||
const result = await repo.getName();
|
|
||||||
|
|
||||||
// Then
|
|
||||||
expect(result).toBe("solana-slotRepo");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should be create instance", async () => {
|
|
||||||
// Given
|
|
||||||
const connectionMock = {};
|
|
||||||
const repo = new Web3SolanaSlotRepository(connectionMock as any);
|
|
||||||
|
|
||||||
// When
|
|
||||||
const result = await repo.createInstance();
|
|
||||||
|
|
||||||
// Then
|
|
||||||
expect(result).toBeInstanceOf(Web3SolanaSlotRepository);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("getLatestSlot", () => {
|
describe("getLatestSlot", () => {
|
||||||
it("should return the latest slot number", async () => {
|
it("should return the latest slot number", async () => {
|
||||||
|
@ -64,7 +26,32 @@ describe("Web3SolanaSlotRepository", () => {
|
||||||
// Given
|
// Given
|
||||||
const expected = {
|
const expected = {
|
||||||
blockTime: 100,
|
blockTime: 100,
|
||||||
transactions: [],
|
transactions: [
|
||||||
|
{
|
||||||
|
signature: "signature1",
|
||||||
|
slot: 100,
|
||||||
|
transaction: {
|
||||||
|
message: {
|
||||||
|
version: "legacy",
|
||||||
|
accountKeys: [new PublicKey("3u8hJUVTA4jH1wYAyUur7FFZVQ8H635K3tSHHF4ssjQ5")],
|
||||||
|
instructions: [],
|
||||||
|
compiledInstructions: [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
signature: "signature1",
|
||||||
|
slot: 100,
|
||||||
|
transaction: {
|
||||||
|
message: {
|
||||||
|
version: 0,
|
||||||
|
staticAccountKeys: [new PublicKey("3u8hJUVTA4jH1wYAyUur7FFZVQ8H635K3tSHHF4ssjQ5")],
|
||||||
|
instructions: [],
|
||||||
|
compiledInstructions: [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
};
|
};
|
||||||
const connectionMock = {
|
const connectionMock = {
|
||||||
getBlock: (slot: number) => Promise.resolve(expected),
|
getBlock: (slot: number) => Promise.resolve(expected),
|
||||||
|
@ -78,6 +65,17 @@ describe("Web3SolanaSlotRepository", () => {
|
||||||
expect(block.blockTime).toBe(expected.blockTime);
|
expect(block.blockTime).toBe(expected.blockTime);
|
||||||
expect(block.transactions).toHaveLength(expected.transactions.length);
|
expect(block.transactions).toHaveLength(expected.transactions.length);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should return an error when the block is not found", async () => {
|
||||||
|
const connectionMock = {
|
||||||
|
getBlock: (slot: number) => Promise.resolve(null),
|
||||||
|
};
|
||||||
|
const repository = new Web3SolanaSlotRepository(connectionMock as any);
|
||||||
|
|
||||||
|
const block = await repository.getBlock(100);
|
||||||
|
|
||||||
|
expect(block.getError()).toBeDefined();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("getSignaturesForAddress", () => {
|
describe("getSignaturesForAddress", () => {
|
||||||
|
|
|
@ -26,13 +26,13 @@ func TestNotionalCache_renderRegexp(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
key := nc.renderRegExp()
|
key := nc.renderRegExp()
|
||||||
assert.Equal(t, "*staging-mainnet:WORMSCAN:NOTIONAL:SYMBOL:*", key)
|
assert.Equal(t, "*staging-mainnet:WORMSCAN:NOTIONAL:TOKEN:*", key)
|
||||||
|
|
||||||
nc = &NotionalCache{
|
nc = &NotionalCache{
|
||||||
client: nil,
|
client: nil,
|
||||||
prefix: "",
|
prefix: "",
|
||||||
}
|
}
|
||||||
key = nc.renderRegExp()
|
key = nc.renderRegExp()
|
||||||
assert.Equal(t, "*WORMSCAN:NOTIONAL:SYMBOL:*", key)
|
assert.Equal(t, "*WORMSCAN:NOTIONAL:TOKEN:*", key)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,63 +0,0 @@
|
||||||
package domain
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
SignedVaaType = "signed-vaa"
|
|
||||||
PublishedLogMessageType = "published-log-message"
|
|
||||||
)
|
|
||||||
|
|
||||||
type NotificationEvent struct {
|
|
||||||
TrackID string `json:"trackId"`
|
|
||||||
Source string `json:"source"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
Payload json.RawMessage `json:"payload"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewNotificationEvent[T EventPayload](trackID, source, _type string, payload T) (*NotificationEvent, error) {
|
|
||||||
p, err := json.Marshal(payload)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &NotificationEvent{
|
|
||||||
TrackID: trackID,
|
|
||||||
Source: source,
|
|
||||||
Type: _type,
|
|
||||||
Payload: json.RawMessage(p),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type EventPayload interface {
|
|
||||||
SignedVaa | PublishedLogMessage
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetEventPayload[T EventPayload](e *NotificationEvent) (T, error) {
|
|
||||||
var payload T
|
|
||||||
err := json.Unmarshal(e.Payload, &payload)
|
|
||||||
return payload, err
|
|
||||||
}
|
|
||||||
|
|
||||||
type SignedVaa struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
EmitterChain uint16 `json:"emitterChain"`
|
|
||||||
EmitterAddr string `json:"emitterAddr"`
|
|
||||||
Sequence uint64 `json:"sequence"`
|
|
||||||
GuardianSetIndex uint32 `json:"guardianSetIndex"`
|
|
||||||
Timestamp time.Time `json:"timestamp"`
|
|
||||||
Vaa []byte `json:"vaa"`
|
|
||||||
TxHash string `json:"txHash"`
|
|
||||||
Version int `json:"version"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type PublishedLogMessage struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
EmitterChain uint16 `json:"emitterChain"`
|
|
||||||
EmitterAddr string `json:"emitterAddr"`
|
|
||||||
Sequence uint64 `json:"sequence"`
|
|
||||||
Timestamp time.Time `json:"timestamp"`
|
|
||||||
Vaa []byte `json:"vaa"`
|
|
||||||
TxHash string `json:"txHash"`
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,16 @@
|
||||||
|
package domain
|
||||||
|
|
||||||
|
func manualTestnetTokenList() []TokenMetadata {
|
||||||
|
return []TokenMetadata{
|
||||||
|
{TokenChain: 1, TokenAddress: "069b8857feab8184fb687f634618c035dac439dc1aeb3b5598a0f00000000001", Symbol: "SOL", CoingeckoID: "wrapped-solana", Decimals: 9},
|
||||||
|
{TokenChain: 2, TokenAddress: "000000000000000000000000b4fbf271143f4fbf7b91a5ded31805e42b2208d6", Symbol: "WETH", CoingeckoID: "weth", Decimals: 18},
|
||||||
|
{TokenChain: 2, TokenAddress: "00000000000000000000000011fe4b6ae13d2a6055c8d9cf65c55bac32b5d844", Symbol: "DAI", CoingeckoID: "dai", Decimals: 18},
|
||||||
|
{TokenChain: 4, TokenAddress: "000000000000000000000000ae13d989dac2f0debff460ac112a837c89baa7cd", Symbol: "WBNB", CoingeckoID: "wbnb", Decimals: 18},
|
||||||
|
{TokenChain: 5, TokenAddress: "0000000000000000000000009c3c9283d3e44854697cd22d3faa240cfb032889", Symbol: "WMATIC", CoingeckoID: "wmatic", Decimals: 18},
|
||||||
|
{TokenChain: 6, TokenAddress: "0000000000000000000000005425890298aed601595a70ab815c96711a31bc65", Symbol: "USDC", CoingeckoID: "usd-coin", Decimals: 6},
|
||||||
|
{TokenChain: 6, TokenAddress: "000000000000000000000000d00ae08403b9bbb9124bb305c09058e32c39a48c", Symbol: "WAVAX", CoingeckoID: "wrapped-avax", Decimals: 18},
|
||||||
|
{TokenChain: 10, TokenAddress: "000000000000000000000000f1277d1ed8ad466beddf92ef448a132661956621", Symbol: "WFTM", CoingeckoID: "wrapped-fantom", Decimals: 18},
|
||||||
|
{TokenChain: 14, TokenAddress: "000000000000000000000000f194afdf50b03e69bd7d057c1aa9e10c9954e4c9", Symbol: "CELO", CoingeckoID: "celo", Decimals: 18},
|
||||||
|
{TokenChain: 16, TokenAddress: "000000000000000000000000d909178cc99d318e4d46e7e66a972955859670e1", Symbol: "GLMR", CoingeckoID: "wrapped-moonbeam", Decimals: 18},
|
||||||
|
{TokenChain: 21, TokenAddress: "587c29de216efd4219573e08a1f6964d4fa7cb714518c2c8a0f29abfa264327d", Symbol: "SUI", CoingeckoID: "sui", Decimals: 9}}
|
||||||
|
}
|
|
@ -24,20 +24,37 @@ type TokenMetadata struct {
|
||||||
Decimals int64
|
Decimals int64
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
type TokenProvider struct {
|
||||||
tokenMetadata = generatedMainnetTokenList()
|
p2pNetwork string
|
||||||
tokenMetadataByContractID = make(map[string]*TokenMetadata)
|
tokenMetadata []TokenMetadata
|
||||||
tokenMetadataByCoingeckoID = make(map[string]*TokenMetadata)
|
tokenMetadataByContractID map[string]*TokenMetadata
|
||||||
)
|
tokenMetadataByCoingeckoID map[string]*TokenMetadata
|
||||||
|
}
|
||||||
|
|
||||||
func (t *TokenMetadata) GetTokenID() string {
|
func (t *TokenMetadata) GetTokenID() string {
|
||||||
return fmt.Sprintf("%d/%s", t.TokenChain, t.TokenAddress)
|
return fmt.Sprintf("%d/%s", t.TokenChain, t.TokenAddress)
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func makeContractID(tokenChain sdk.ChainID, tokenAddress string) string {
|
||||||
|
return fmt.Sprintf("%d-%s", tokenChain, tokenAddress)
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewTokenProvider(p2pNetwork string) *TokenProvider {
|
||||||
|
var tokenMetadata []TokenMetadata
|
||||||
|
|
||||||
|
switch p2pNetwork {
|
||||||
|
case P2pMainNet:
|
||||||
|
tokenMetadata = generatedMainnetTokenList()
|
||||||
|
case P2pTestNet:
|
||||||
|
tokenMetadata = manualTestnetTokenList()
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("unknown p2p network: %s", p2pNetwork))
|
||||||
|
}
|
||||||
|
|
||||||
|
tokenMetadataByContractID := make(map[string]*TokenMetadata)
|
||||||
|
tokenMetadataByCoingeckoID := make(map[string]*TokenMetadata)
|
||||||
|
|
||||||
for i := range tokenMetadata {
|
for i := range tokenMetadata {
|
||||||
|
|
||||||
// populate the map `tokenMetadataByCoingeckoID`
|
// populate the map `tokenMetadataByCoingeckoID`
|
||||||
coingeckoID := tokenMetadata[i].CoingeckoID
|
coingeckoID := tokenMetadata[i].CoingeckoID
|
||||||
if coingeckoID != "" {
|
if coingeckoID != "" {
|
||||||
|
@ -50,26 +67,28 @@ func init() {
|
||||||
tokenMetadataByContractID[contractID] = &tokenMetadata[i]
|
tokenMetadataByContractID[contractID] = &tokenMetadata[i]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return &TokenProvider{
|
||||||
|
p2pNetwork: p2pNetwork,
|
||||||
|
tokenMetadata: tokenMetadata,
|
||||||
|
tokenMetadataByContractID: tokenMetadataByContractID,
|
||||||
|
tokenMetadataByCoingeckoID: tokenMetadataByCoingeckoID,
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeContractID(tokenChain sdk.ChainID, tokenAddress string) string {
|
|
||||||
return fmt.Sprintf("%d-%s", tokenChain, tokenAddress)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetAllTokens returns a list of all tokens that exist in the database.
|
// GetAllTokens returns a list of all tokens that exist in the database.
|
||||||
//
|
//
|
||||||
// The caller must not modify the `[]TokenMetadata` returned.
|
// The caller must not modify the `[]TokenMetadata` returned.
|
||||||
func GetAllTokens() []TokenMetadata {
|
func (t *TokenProvider) GetAllTokens() []TokenMetadata {
|
||||||
return tokenMetadata
|
return t.tokenMetadata
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetAllCoingeckoIDs returns a list of all coingecko IDs that exist in the database.
|
// GetAllCoingeckoIDs returns a list of all coingecko IDs that exist in the database.
|
||||||
func GetAllCoingeckoIDs() []string {
|
func (t *TokenProvider) GetAllCoingeckoIDs() []string {
|
||||||
|
|
||||||
// use a map to remove duplicates
|
// use a map to remove duplicates
|
||||||
uniqueIDs := make(map[string]bool, len(tokenMetadata))
|
uniqueIDs := make(map[string]bool, len(t.tokenMetadata))
|
||||||
for i := range tokenMetadata {
|
for i := range t.tokenMetadata {
|
||||||
uniqueIDs[tokenMetadata[i].CoingeckoID] = true
|
uniqueIDs[t.tokenMetadata[i].CoingeckoID] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// collect keys into a slice
|
// collect keys into a slice
|
||||||
|
@ -84,9 +103,9 @@ func GetAllCoingeckoIDs() []string {
|
||||||
// GetTokenByCoingeckoID returns information about a token identified by its coingecko ID.
|
// GetTokenByCoingeckoID returns information about a token identified by its coingecko ID.
|
||||||
//
|
//
|
||||||
// The caller must not modify the `*TokenMetadata` returned.
|
// The caller must not modify the `*TokenMetadata` returned.
|
||||||
func GetTokenByCoingeckoID(coingeckoID string) (*TokenMetadata, bool) {
|
func (t *TokenProvider) GetTokenByCoingeckoID(coingeckoID string) (*TokenMetadata, bool) {
|
||||||
|
|
||||||
result, ok := tokenMetadataByCoingeckoID[coingeckoID]
|
result, ok := t.tokenMetadataByCoingeckoID[coingeckoID]
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
|
@ -97,14 +116,18 @@ func GetTokenByCoingeckoID(coingeckoID string) (*TokenMetadata, bool) {
|
||||||
// GetTokenByAddress returns information about a token identified by its original mint address.
|
// GetTokenByAddress returns information about a token identified by its original mint address.
|
||||||
//
|
//
|
||||||
// The caller must not modify the `*TokenMetadata` returned.
|
// The caller must not modify the `*TokenMetadata` returned.
|
||||||
func GetTokenByAddress(tokenChain sdk.ChainID, tokenAddress string) (*TokenMetadata, bool) {
|
func (t *TokenProvider) GetTokenByAddress(tokenChain sdk.ChainID, tokenAddress string) (*TokenMetadata, bool) {
|
||||||
|
|
||||||
key := makeContractID(tokenChain, tokenAddress)
|
key := makeContractID(tokenChain, tokenAddress)
|
||||||
|
|
||||||
result, ok := tokenMetadataByContractID[key]
|
result, ok := t.tokenMetadataByContractID[key]
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
return result, true
|
return result, true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *TokenProvider) GetP2pNewtork() string {
|
||||||
|
return t.p2pNetwork
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,74 @@
|
||||||
|
package events
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
SignedVaaType = "signed-vaa"
|
||||||
|
LogMessagePublishedMesageType = "log-message-published"
|
||||||
|
)
|
||||||
|
|
||||||
|
type NotificationEvent struct {
|
||||||
|
TrackID string `json:"trackId"`
|
||||||
|
Source string `json:"source"`
|
||||||
|
Event string `json:"event"`
|
||||||
|
Version string `json:"version"`
|
||||||
|
Timestamp time.Time `json:"timestamp"`
|
||||||
|
Data json.RawMessage `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewNotificationEvent[T EventData](trackID, source, _type string, data T) (*NotificationEvent, error) {
|
||||||
|
p, err := json.Marshal(data)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &NotificationEvent{
|
||||||
|
TrackID: trackID,
|
||||||
|
Source: source,
|
||||||
|
Event: _type,
|
||||||
|
Data: json.RawMessage(p),
|
||||||
|
Version: "1",
|
||||||
|
Timestamp: time.Now(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type EventData interface {
|
||||||
|
SignedVaa | LogMessagePublished
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetEventData[T EventData](e *NotificationEvent) (T, error) {
|
||||||
|
var data T
|
||||||
|
err := json.Unmarshal(e.Data, &data)
|
||||||
|
return data, err
|
||||||
|
}
|
||||||
|
|
||||||
|
type SignedVaa struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
EmitterChain uint16 `json:"emitterChain"`
|
||||||
|
EmitterAddress string `json:"emitterAddress"`
|
||||||
|
Sequence uint64 `json:"sequence"`
|
||||||
|
GuardianSetIndex uint32 `json:"guardianSetIndex"`
|
||||||
|
Timestamp time.Time `json:"timestamp"`
|
||||||
|
Vaa []byte `json:"vaa"`
|
||||||
|
TxHash string `json:"txHash"`
|
||||||
|
Version int `json:"version"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type LogMessagePublished struct {
|
||||||
|
ChainID uint16 `json:"chainId"`
|
||||||
|
Emitter string `json:"emitter"`
|
||||||
|
TxHash string `json:"txHash"`
|
||||||
|
BlockHeight string `json:"blockHeight"`
|
||||||
|
BlockTime time.Time `json:"blockTime"`
|
||||||
|
Attributes PublishedLogMessageAttributes `json:"attributes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PublishedLogMessageAttributes struct {
|
||||||
|
Sender string `json:"sender"`
|
||||||
|
Sequence uint64 `json:"sequence"`
|
||||||
|
Nonce uint32 `json:"nonce"`
|
||||||
|
Payload string `json:"payload"`
|
||||||
|
ConsistencyLevel uint8 `json:"consistencyLevel"`
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
package domain
|
package events
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
@ -13,8 +13,8 @@ func Test_GetEventPayload(t *testing.T) {
|
||||||
body := `{
|
body := `{
|
||||||
"trackId": "63e16082da939a263512a307",
|
"trackId": "63e16082da939a263512a307",
|
||||||
"source": "fly",
|
"source": "fly",
|
||||||
"type": "signed-vaa",
|
"event": "signed-vaa",
|
||||||
"payload": {
|
"data": {
|
||||||
"id": "2/000000000000000000000000f890982f9310df57d00f659cf4fd87e65aded8d7/162727",
|
"id": "2/000000000000000000000000f890982f9310df57d00f659cf4fd87e65aded8d7/162727",
|
||||||
"emitterChain": 2,
|
"emitterChain": 2,
|
||||||
"emitterAddr": "000000000000000000000000f890982f9310df57d00f659cf4fd87e65aded8d7",
|
"emitterAddr": "000000000000000000000000f890982f9310df57d00f659cf4fd87e65aded8d7",
|
||||||
|
@ -32,8 +32,8 @@ func Test_GetEventPayload(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, "63e16082da939a263512a307", event.TrackID)
|
assert.Equal(t, "63e16082da939a263512a307", event.TrackID)
|
||||||
assert.Equal(t, "fly", event.Source)
|
assert.Equal(t, "fly", event.Source)
|
||||||
assert.Equal(t, SignedVaaType, event.Type)
|
assert.Equal(t, SignedVaaType, event.Event)
|
||||||
signedVaa, err := GetEventPayload[SignedVaa](&event)
|
signedVaa, err := GetEventData[SignedVaa](&event)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, "2/000000000000000000000000f890982f9310df57d00f659cf4fd87e65aded8d7/162727", signedVaa.ID)
|
assert.Equal(t, "2/000000000000000000000000f890982f9310df57d00f659cf4fd87e65aded8d7/162727", signedVaa.ID)
|
||||||
}
|
}
|
||||||
|
@ -43,7 +43,7 @@ func Test_GetEventPayload_Error(t *testing.T) {
|
||||||
body := `{
|
body := `{
|
||||||
"trackId": "63e16082da939a263512a307",
|
"trackId": "63e16082da939a263512a307",
|
||||||
"source": "fly",
|
"source": "fly",
|
||||||
"type": "signed-vaa"
|
"event": "signed-vaa"
|
||||||
}`
|
}`
|
||||||
|
|
||||||
event := NotificationEvent{}
|
event := NotificationEvent{}
|
||||||
|
@ -51,7 +51,7 @@ func Test_GetEventPayload_Error(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, "63e16082da939a263512a307", event.TrackID)
|
assert.Equal(t, "63e16082da939a263512a307", event.TrackID)
|
||||||
assert.Equal(t, "fly", event.Source)
|
assert.Equal(t, "fly", event.Source)
|
||||||
assert.Equal(t, SignedVaaType, event.Type)
|
assert.Equal(t, SignedVaaType, event.Event)
|
||||||
_, err = GetEventPayload[SignedVaa](&event)
|
_, err = GetEventData[SignedVaa](&event)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
}
|
}
|
|
@ -0,0 +1,35 @@
|
||||||
|
package events
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
sdk "github.com/wormhole-foundation/wormhole/sdk/vaa"
|
||||||
|
)
|
||||||
|
|
||||||
|
func CreateUnsignedVAA(plm *LogMessagePublished) (*sdk.VAA, error) {
|
||||||
|
|
||||||
|
address, err := sdk.StringToAddress(plm.Attributes.Sender)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error converting emitter address: %w", err)
|
||||||
|
}
|
||||||
|
payload, err := hex.DecodeString(strings.TrimPrefix(plm.Attributes.Payload, "0x"))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error converting payload: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
vaa := sdk.VAA{
|
||||||
|
Version: sdk.SupportedVAAVersion,
|
||||||
|
GuardianSetIndex: 1,
|
||||||
|
EmitterChain: sdk.ChainID(plm.ChainID),
|
||||||
|
EmitterAddress: address,
|
||||||
|
Sequence: plm.Attributes.Sequence,
|
||||||
|
Timestamp: plm.BlockTime,
|
||||||
|
Payload: payload,
|
||||||
|
Nonce: plm.Attributes.Nonce,
|
||||||
|
ConsistencyLevel: plm.Attributes.ConsistencyLevel,
|
||||||
|
}
|
||||||
|
|
||||||
|
return &vaa, nil
|
||||||
|
}
|
|
@ -1,21 +1,20 @@
|
||||||
package infrastructure
|
package health
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
"github.com/gofiber/fiber/v2"
|
||||||
"github.com/wormhole-foundation/wormhole-explorer/common/health"
|
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Controller definition.
|
// Controller definition.
|
||||||
type Controller struct {
|
type Controller struct {
|
||||||
checks []health.Check
|
checks []Check
|
||||||
logger *zap.Logger
|
logger *zap.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewController creates a Controller instance.
|
// NewController creates a Controller instance.
|
||||||
func NewController(checks []health.Check, logger *zap.Logger) *Controller {
|
func NewController(checks []Check, logger *zap.Logger) *Controller {
|
||||||
return &Controller{checks: checks, logger: logger}
|
return &Controller{checks: checks, logger: logger}
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,10 +45,12 @@ require (
|
||||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect
|
||||||
github.com/deepmap/oapi-codegen v1.12.4 // indirect
|
github.com/deepmap/oapi-codegen v1.12.4 // indirect
|
||||||
github.com/dfuse-io/logging v0.0.0-20201110202154-26697de88c79 // indirect
|
github.com/dfuse-io/logging v0.0.0-20201110202154-26697de88c79 // indirect
|
||||||
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||||
github.com/ethereum/go-ethereum v1.11.3 // indirect
|
github.com/ethereum/go-ethereum v1.11.3 // indirect
|
||||||
github.com/fatih/color v1.9.0 // indirect
|
github.com/fatih/color v1.9.0 // indirect
|
||||||
github.com/gagliardetto/binary v0.7.7 // indirect
|
github.com/gagliardetto/binary v0.7.7 // indirect
|
||||||
github.com/gagliardetto/treeout v0.1.4 // indirect
|
github.com/gagliardetto/treeout v0.1.4 // indirect
|
||||||
|
github.com/go-redis/redis/v8 v8.11.5 // indirect
|
||||||
github.com/gofiber/adaptor/v2 v2.1.31 // indirect
|
github.com/gofiber/adaptor/v2 v2.1.31 // indirect
|
||||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect
|
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect
|
||||||
github.com/golang/protobuf v1.5.2 // indirect
|
github.com/golang/protobuf v1.5.2 // indirect
|
||||||
|
|
|
@ -133,6 +133,8 @@ github.com/deepmap/oapi-codegen v1.12.4/go.mod h1:3lgHGMu6myQ2vqbbTXH2H1o4eXFTGn
|
||||||
github.com/dfuse-io/logging v0.0.0-20201110202154-26697de88c79 h1:+HRtcJejUYA/2rnyTMbOaZ4g7f4aVuFduTV/03dbpLY=
|
github.com/dfuse-io/logging v0.0.0-20201110202154-26697de88c79 h1:+HRtcJejUYA/2rnyTMbOaZ4g7f4aVuFduTV/03dbpLY=
|
||||||
github.com/dfuse-io/logging v0.0.0-20201110202154-26697de88c79/go.mod h1:V+ED4kT/t/lKtH99JQmKIb0v9WL3VaYkJ36CfHlVECI=
|
github.com/dfuse-io/logging v0.0.0-20201110202154-26697de88c79/go.mod h1:V+ED4kT/t/lKtH99JQmKIb0v9WL3VaYkJ36CfHlVECI=
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
||||||
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||||
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||||
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
||||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||||
|
@ -144,6 +146,7 @@ github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5Kwzbycv
|
||||||
github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
|
github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
|
||||||
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
|
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
|
||||||
github.com/gagliardetto/binary v0.7.7 h1:QZpT38+sgoPg+TIQjH94sLbl/vX+nlIRA37pEyOsjfY=
|
github.com/gagliardetto/binary v0.7.7 h1:QZpT38+sgoPg+TIQjH94sLbl/vX+nlIRA37pEyOsjfY=
|
||||||
github.com/gagliardetto/binary v0.7.7/go.mod h1:mUuay5LL8wFVnIlecHakSZMvcdqfs+CsotR5n77kyjM=
|
github.com/gagliardetto/binary v0.7.7/go.mod h1:mUuay5LL8wFVnIlecHakSZMvcdqfs+CsotR5n77kyjM=
|
||||||
github.com/gagliardetto/gofuzz v1.2.2/go.mod h1:bkH/3hYLZrMLbfYWA0pWzXmi5TTRZnu4pMGZBkqMKvY=
|
github.com/gagliardetto/gofuzz v1.2.2/go.mod h1:bkH/3hYLZrMLbfYWA0pWzXmi5TTRZnu4pMGZBkqMKvY=
|
||||||
|
@ -163,6 +166,8 @@ github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9
|
||||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||||
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
|
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
|
||||||
github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
|
github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
|
||||||
|
github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI=
|
||||||
|
github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo=
|
||||||
github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY=
|
github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY=
|
||||||
github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I=
|
github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I=
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
|
@ -364,12 +369,14 @@ github.com/near/borsh-go v0.3.1 h1:ukNbhJlPKxfua0/nIuMZhggSU8zvtRP/VyC25LLqPUA=
|
||||||
github.com/near/borsh-go v0.3.1/go.mod h1:NeMochZp7jN/pYFuxLkrZtmLqbADmnp/y1+/dL+AsyQ=
|
github.com/near/borsh-go v0.3.1/go.mod h1:NeMochZp7jN/pYFuxLkrZtmLqbADmnp/y1+/dL+AsyQ=
|
||||||
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
|
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
|
||||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||||
|
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||||
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
||||||
|
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
|
||||||
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
||||||
github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE=
|
|
||||||
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
||||||
|
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
|
||||||
github.com/opsgenie/opsgenie-go-sdk-v2 v1.2.19 h1:JernwK3Bgd5x+UJPV6S2LPYoBF+DFOYBoQ5JeJPVBNc=
|
github.com/opsgenie/opsgenie-go-sdk-v2 v1.2.19 h1:JernwK3Bgd5x+UJPV6S2LPYoBF+DFOYBoQ5JeJPVBNc=
|
||||||
github.com/opsgenie/opsgenie-go-sdk-v2 v1.2.19/go.mod h1:4OjcxgwdXzezqytxN534MooNmrxRD50geWZxTD7845s=
|
github.com/opsgenie/opsgenie-go-sdk-v2 v1.2.19/go.mod h1:4OjcxgwdXzezqytxN534MooNmrxRD50geWZxTD7845s=
|
||||||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||||
|
@ -807,7 +814,6 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T
|
||||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df h1:5Pf6pFKu98ODmgnpvkJ3kFUOQGGLIzLIkbzUHp47618=
|
|
||||||
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
||||||
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
||||||
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||||
|
@ -899,6 +905,7 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||||
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||||
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||||
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
|
|
@ -198,7 +198,7 @@ func TestCheckTxShouldBeUpdated(t *testing.T) {
|
||||||
inputGetGlobalTransactionByIDFunc: func(ctx context.Context, id string) (storage.TransactionUpdate, error) {
|
inputGetGlobalTransactionByIDFunc: func(ctx context.Context, id string) (storage.TransactionUpdate, error) {
|
||||||
return storage.TransactionUpdate{}, storage.ErrDocNotFound
|
return storage.TransactionUpdate{}, storage.ErrDocNotFound
|
||||||
},
|
},
|
||||||
expectedUpdate: true,
|
expectedUpdate: false,
|
||||||
expectedError: ErrInvalidTxStatus,
|
expectedError: ErrInvalidTxStatus,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -208,7 +208,7 @@ func TestCheckTxShouldBeUpdated(t *testing.T) {
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
update, err := checkTxShouldBeUpdated(context.Background(), tc.inputTx, tc.inputGetGlobalTransactionByIDFunc)
|
update, err := checkTxShouldBeUpdated(context.Background(), tc.inputTx, tc.inputGetGlobalTransactionByIDFunc)
|
||||||
if update != tc.expectedUpdate {
|
if update != tc.expectedUpdate {
|
||||||
t.Errorf("expected update %v, got %v", tc.expectedUpdate, update)
|
t.Errorf("%s - expected update %v, got %v", tc.name, tc.expectedUpdate, update)
|
||||||
}
|
}
|
||||||
if err != tc.expectedError {
|
if err != tc.expectedError {
|
||||||
t.Errorf("expected error %v, got %v", tc.expectedError, err)
|
t.Errorf("expected error %v, got %v", tc.expectedError, err)
|
||||||
|
|
|
@ -46,10 +46,21 @@ spec:
|
||||||
value: "8000"
|
value: "8000"
|
||||||
- name: LOG_LEVEL
|
- name: LOG_LEVEL
|
||||||
value: "INFO"
|
value: "INFO"
|
||||||
- name: SQS_URL
|
- name: PIPELINE_SQS_URL
|
||||||
value: {{ .SQS_URL }}
|
valueFrom:
|
||||||
|
configMapKeyRef:
|
||||||
|
name: analytics
|
||||||
|
key: pipeline-sqs-url
|
||||||
|
- name: NOTIFICATIONS_SQS_URL
|
||||||
|
valueFrom:
|
||||||
|
configMapKeyRef:
|
||||||
|
name: analytics
|
||||||
|
key: notifications-sqs-url
|
||||||
- name: AWS_REGION
|
- name: AWS_REGION
|
||||||
value: {{ .SQS_AWS_REGION }}
|
valueFrom:
|
||||||
|
configMapKeyRef:
|
||||||
|
name: analytics
|
||||||
|
key: aws-region
|
||||||
- name: PPROF_ENABLED
|
- name: PPROF_ENABLED
|
||||||
value: "{{ .PPROF_ENABLED }}"
|
value: "{{ .PPROF_ENABLED }}"
|
||||||
- name: P2P_NETWORK
|
- name: P2P_NETWORK
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
---
|
||||||
|
kind: ConfigMap
|
||||||
|
apiVersion: v1
|
||||||
|
metadata:
|
||||||
|
name: analytics
|
||||||
|
namespace: {{ .NAMESPACE }}
|
||||||
|
data:
|
||||||
|
aws-region: {{ .SQS_AWS_REGION }}
|
||||||
|
pipeline-sqs-url: {{ .PIPELINE_SQS_URL }}
|
||||||
|
notifications-sqs-url: {{ .NOTIFICATIONS_SQS_URL }}
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=64Mi
|
||||||
RESOURCES_LIMITS_CPU=200m
|
RESOURCES_LIMITS_CPU=200m
|
||||||
RESOURCES_REQUESTS_MEMORY=32Mi
|
RESOURCES_REQUESTS_MEMORY=32Mi
|
||||||
RESOURCES_REQUESTS_CPU=100m
|
RESOURCES_REQUESTS_CPU=100m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
P2P_NETWORK=mainnet
|
P2P_NETWORK=mainnet
|
||||||
PPROF_ENABLED=false
|
PPROF_ENABLED=false
|
||||||
|
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=128Mi
|
||||||
RESOURCES_LIMITS_CPU=200m
|
RESOURCES_LIMITS_CPU=200m
|
||||||
RESOURCES_REQUESTS_MEMORY=64Mi
|
RESOURCES_REQUESTS_MEMORY=64Mi
|
||||||
RESOURCES_REQUESTS_CPU=100m
|
RESOURCES_REQUESTS_CPU=100m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
P2P_NETWORK=testnet
|
P2P_NETWORK=testnet
|
||||||
PPROF_ENABLED=false
|
PPROF_ENABLED=false
|
||||||
|
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=64Mi
|
||||||
RESOURCES_LIMITS_CPU=200m
|
RESOURCES_LIMITS_CPU=200m
|
||||||
RESOURCES_REQUESTS_MEMORY=32Mi
|
RESOURCES_REQUESTS_MEMORY=32Mi
|
||||||
RESOURCES_REQUESTS_CPU=100m
|
RESOURCES_REQUESTS_CPU=100m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
P2P_NETWORK=mainnet
|
P2P_NETWORK=mainnet
|
||||||
PPROF_ENABLED=true
|
PPROF_ENABLED=true
|
||||||
|
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=128Mi
|
||||||
RESOURCES_LIMITS_CPU=200m
|
RESOURCES_LIMITS_CPU=200m
|
||||||
RESOURCES_REQUESTS_MEMORY=64Mi
|
RESOURCES_REQUESTS_MEMORY=64Mi
|
||||||
RESOURCES_REQUESTS_CPU=100m
|
RESOURCES_REQUESTS_CPU=100m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
P2P_NETWORK=testnet
|
P2P_NETWORK=testnet
|
||||||
PPROF_ENABLED=false
|
PPROF_ENABLED=false
|
||||||
|
|
|
@ -46,8 +46,9 @@ data:
|
||||||
"commitment": "confirmed",
|
"commitment": "confirmed",
|
||||||
"interval": 5000,
|
"interval": 5000,
|
||||||
"signaturesLimit": 100,
|
"signaturesLimit": 100,
|
||||||
"programId": "3u8hJUVTA4jH1wYAyUur7FFZVQ8H635K3tSHHF4ssjQ5",
|
"programId": "Bridge1p5gheXUvJ6jGWGeCsgPKgnE3YgdGKRVCMY9o",
|
||||||
"chain": "solana"
|
"chain": "solana",
|
||||||
|
"network": "devnet"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"handlers": [
|
"handlers": [
|
||||||
|
@ -56,7 +57,7 @@ data:
|
||||||
"target": "sns",
|
"target": "sns",
|
||||||
"mapper": "solanaLogMessagePublishedMapper",
|
"mapper": "solanaLogMessagePublishedMapper",
|
||||||
"config": {
|
"config": {
|
||||||
"programId": "3u8hJUVTA4jH1wYAyUur7FFZVQ8H635K3tSHHF4ssjQ5"
|
"programId": "Bridge1p5gheXUvJ6jGWGeCsgPKgnE3YgdGKRVCMY9o"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -6,7 +6,7 @@ RESOURCES_LIMITS_MEMORY=30Mi
|
||||||
RESOURCES_LIMITS_CPU=20m
|
RESOURCES_LIMITS_CPU=20m
|
||||||
RESOURCES_REQUESTS_MEMORY=15Mi
|
RESOURCES_REQUESTS_MEMORY=15Mi
|
||||||
RESOURCES_REQUESTS_CPU=10m
|
RESOURCES_REQUESTS_CPU=10m
|
||||||
P2P_NETWORK=mainnet
|
P2P_NETWORK=testnet
|
||||||
COINGECKO_URL=https://api.coingecko.com/api/v3
|
COINGECKO_URL=https://api.coingecko.com/api/v3
|
||||||
NOTIONAL_CHANNEL=WORMSCAN:NOTIONAL
|
NOTIONAL_CHANNEL=WORMSCAN:NOTIONAL
|
||||||
LOG_LEVEL=INFO
|
LOG_LEVEL=INFO
|
||||||
|
|
|
@ -6,7 +6,7 @@ RESOURCES_LIMITS_MEMORY=30Mi
|
||||||
RESOURCES_LIMITS_CPU=20m
|
RESOURCES_LIMITS_CPU=20m
|
||||||
RESOURCES_REQUESTS_MEMORY=15Mi
|
RESOURCES_REQUESTS_MEMORY=15Mi
|
||||||
RESOURCES_REQUESTS_CPU=10m
|
RESOURCES_REQUESTS_CPU=10m
|
||||||
P2P_NETWORK=mainnet
|
P2P_NETWORK=testnet
|
||||||
COINGECKO_URL=https://api.coingecko.com/api/v3
|
COINGECKO_URL=https://api.coingecko.com/api/v3
|
||||||
NOTIONAL_CHANNEL=WORMSCAN:NOTIONAL
|
NOTIONAL_CHANNEL=WORMSCAN:NOTIONAL
|
||||||
LOG_LEVEL=INFO
|
LOG_LEVEL=INFO
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
---
|
||||||
|
kind: ConfigMap
|
||||||
|
apiVersion: v1
|
||||||
|
metadata:
|
||||||
|
name: parser
|
||||||
|
namespace: {{ .NAMESPACE }}
|
||||||
|
data:
|
||||||
|
aws-region: {{ .SQS_AWS_REGION }}
|
||||||
|
pipeline-sqs-url: {{ .PIPELINE_SQS_URL }}
|
||||||
|
notifications-sqs-url: {{ .NOTIFICATIONS_SQS_URL }}
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=256Mi
|
||||||
RESOURCES_LIMITS_CPU=500m
|
RESOURCES_LIMITS_CPU=500m
|
||||||
RESOURCES_REQUESTS_MEMORY=128Mi
|
RESOURCES_REQUESTS_MEMORY=128Mi
|
||||||
RESOURCES_REQUESTS_CPU=250m
|
RESOURCES_REQUESTS_CPU=250m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan
|
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan
|
||||||
VAA_PAYLOAD_PARSER_TIMEOUT=10
|
VAA_PAYLOAD_PARSER_TIMEOUT=10
|
||||||
|
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=30Mi
|
||||||
RESOURCES_LIMITS_CPU=20m
|
RESOURCES_LIMITS_CPU=20m
|
||||||
RESOURCES_REQUESTS_MEMORY=15Mi
|
RESOURCES_REQUESTS_MEMORY=15Mi
|
||||||
RESOURCES_REQUESTS_CPU=10m
|
RESOURCES_REQUESTS_CPU=10m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan-testnet
|
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan-testnet
|
||||||
VAA_PAYLOAD_PARSER_TIMEOUT=10
|
VAA_PAYLOAD_PARSER_TIMEOUT=10
|
||||||
|
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=256Mi
|
||||||
RESOURCES_LIMITS_CPU=500m
|
RESOURCES_LIMITS_CPU=500m
|
||||||
RESOURCES_REQUESTS_MEMORY=128Mi
|
RESOURCES_REQUESTS_MEMORY=128Mi
|
||||||
RESOURCES_REQUESTS_CPU=250m
|
RESOURCES_REQUESTS_CPU=250m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan
|
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan
|
||||||
VAA_PAYLOAD_PARSER_TIMEOUT=10
|
VAA_PAYLOAD_PARSER_TIMEOUT=10
|
||||||
|
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=30Mi
|
||||||
RESOURCES_LIMITS_CPU=20m
|
RESOURCES_LIMITS_CPU=20m
|
||||||
RESOURCES_REQUESTS_MEMORY=15Mi
|
RESOURCES_REQUESTS_MEMORY=15Mi
|
||||||
RESOURCES_REQUESTS_CPU=10m
|
RESOURCES_REQUESTS_CPU=10m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan-testnet
|
VAA_PAYLOAD_PARSER_URL=http://wormscan-vaa-payload-parser.wormscan-testnet
|
||||||
VAA_PAYLOAD_PARSER_TIMEOUT=10
|
VAA_PAYLOAD_PARSER_TIMEOUT=10
|
||||||
|
|
|
@ -56,10 +56,21 @@ spec:
|
||||||
configMapKeyRef:
|
configMapKeyRef:
|
||||||
name: config
|
name: config
|
||||||
key: mongo-database
|
key: mongo-database
|
||||||
- name: SQS_URL
|
- name: PIPELINE_SQS_URL
|
||||||
value: {{ .SQS_URL }}
|
valueFrom:
|
||||||
|
configMapKeyRef:
|
||||||
|
name: parser
|
||||||
|
key: pipeline-sqs-url
|
||||||
|
- name: NOTIFICATIONS_SQS_URL
|
||||||
|
valueFrom:
|
||||||
|
configMapKeyRef:
|
||||||
|
name: parser
|
||||||
|
key: notifications-sqs-url
|
||||||
- name: AWS_REGION
|
- name: AWS_REGION
|
||||||
value: {{ .SQS_AWS_REGION }}
|
valueFrom:
|
||||||
|
configMapKeyRef:
|
||||||
|
name: parser
|
||||||
|
key: aws-region
|
||||||
- name: VAA_PAYLOAD_PARSER_URL
|
- name: VAA_PAYLOAD_PARSER_URL
|
||||||
value: {{ .VAA_PAYLOAD_PARSER_URL }}
|
value: {{ .VAA_PAYLOAD_PARSER_URL }}
|
||||||
- name: VAA_PAYLOAD_PARSER_TIMEOUT
|
- name: VAA_PAYLOAD_PARSER_TIMEOUT
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
---
|
||||||
|
kind: ConfigMap
|
||||||
|
apiVersion: v1
|
||||||
|
metadata:
|
||||||
|
name: tx-tracker
|
||||||
|
namespace: {{ .NAMESPACE }}
|
||||||
|
data:
|
||||||
|
aws-region: {{ .SQS_AWS_REGION }}
|
||||||
|
pipeline-sqs-url: {{ .PIPELINE_SQS_URL }}
|
||||||
|
notifications-sqs-url: {{ .NOTIFICATIONS_SQS_URL }}
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=256Mi
|
||||||
RESOURCES_LIMITS_CPU=500m
|
RESOURCES_LIMITS_CPU=500m
|
||||||
RESOURCES_REQUESTS_MEMORY=128Mi
|
RESOURCES_REQUESTS_MEMORY=128Mi
|
||||||
RESOURCES_REQUESTS_CPU=250m
|
RESOURCES_REQUESTS_CPU=250m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
P2P_NETWORK=mainnet
|
P2P_NETWORK=mainnet
|
||||||
AWS_IAM_ROLE=
|
AWS_IAM_ROLE=
|
||||||
|
@ -40,6 +41,9 @@ CELO_REQUESTS_PER_MINUTE=12
|
||||||
ETHEREUM_BASE_URL=https://rpc.ankr.com/eth
|
ETHEREUM_BASE_URL=https://rpc.ankr.com/eth
|
||||||
ETHEREUM_REQUESTS_PER_MINUTE=12
|
ETHEREUM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
EVMOS_BASE_URL=https://tm.evmos.lava.build
|
||||||
|
EVMOS_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
FANTOM_BASE_URL=https://rpc.ankr.com/fantom
|
FANTOM_BASE_URL=https://rpc.ankr.com/fantom
|
||||||
FANTOM_REQUESTS_PER_MINUTE=12
|
FANTOM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
@ -54,6 +58,9 @@ KARURA_REQUESTS_PER_MINUTE=12
|
||||||
KLAYTN_BASE_URL=https://klaytn-mainnet-rpc.allthatnode.com:8551
|
KLAYTN_BASE_URL=https://klaytn-mainnet-rpc.allthatnode.com:8551
|
||||||
KLAYTN_REQUESTS_PER_MINUTE=12
|
KLAYTN_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
KUJIRA_BASE_URL=https://kujira-rpc.ibs.team
|
||||||
|
KUJIRA_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
MOONBEAM_BASE_URL=https://rpc.api.moonbeam.network
|
MOONBEAM_BASE_URL=https://rpc.api.moonbeam.network
|
||||||
MOONBEAM_REQUESTS_PER_MINUTE=12
|
MOONBEAM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
@ -69,6 +76,9 @@ OSMOSIS_REQUESTS_PER_MINUTE=12
|
||||||
POLYGON_BASE_URL=https://rpc.ankr.com/polygon
|
POLYGON_BASE_URL=https://rpc.ankr.com/polygon
|
||||||
POLYGON_REQUESTS_PER_MINUTE=12
|
POLYGON_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
SEI_BASE_URL=https://rpc.ankr.com/sei
|
||||||
|
SEI_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
SOLANA_BASE_URL=https://api.mainnet-beta.solana.com
|
SOLANA_BASE_URL=https://api.mainnet-beta.solana.com
|
||||||
SOLANA_REQUESTS_PER_MINUTE=12
|
SOLANA_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=30Mi
|
||||||
RESOURCES_LIMITS_CPU=20m
|
RESOURCES_LIMITS_CPU=20m
|
||||||
RESOURCES_REQUESTS_MEMORY=15Mi
|
RESOURCES_REQUESTS_MEMORY=15Mi
|
||||||
RESOURCES_REQUESTS_CPU=10m
|
RESOURCES_REQUESTS_CPU=10m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
P2P_NETWORK=testnet
|
P2P_NETWORK=testnet
|
||||||
AWS_IAM_ROLE=
|
AWS_IAM_ROLE=
|
||||||
|
@ -40,6 +41,9 @@ CELO_REQUESTS_PER_MINUTE=12
|
||||||
ETHEREUM_BASE_URL=https://rpc.ankr.com/eth_goerli
|
ETHEREUM_BASE_URL=https://rpc.ankr.com/eth_goerli
|
||||||
ETHEREUM_REQUESTS_PER_MINUTE=12
|
ETHEREUM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
EVMOS_BASE_URL=https://evmos-testnet-rpc.polkachu.com
|
||||||
|
EVMOS_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
FANTOM_BASE_URL=https://rpc.testnet.fantom.network
|
FANTOM_BASE_URL=https://rpc.testnet.fantom.network
|
||||||
FANTOM_REQUESTS_PER_MINUTE=12
|
FANTOM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
@ -52,6 +56,9 @@ KARURA_REQUESTS_PER_MINUTE=12
|
||||||
KLAYTN_BASE_URL=https://api.baobab.klaytn.net:8651
|
KLAYTN_BASE_URL=https://api.baobab.klaytn.net:8651
|
||||||
KLAYTN_REQUESTS_PER_MINUTE=12
|
KLAYTN_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
KUJIRA_BASE_URL=https://kujira-testnet-rpc.polkachu.com
|
||||||
|
KUJIRA_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
MOONBEAM_BASE_URL=https://rpc.api.moonbase.moonbeam.network
|
MOONBEAM_BASE_URL=https://rpc.api.moonbase.moonbeam.network
|
||||||
MOONBEAM_REQUESTS_PER_MINUTE=12
|
MOONBEAM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
@ -67,6 +74,9 @@ OSMOSIS_REQUESTS_PER_MINUTE=12
|
||||||
POLYGON_BASE_URL=https://rpc.ankr.com/polygon_mumbai
|
POLYGON_BASE_URL=https://rpc.ankr.com/polygon_mumbai
|
||||||
POLYGON_REQUESTS_PER_MINUTE=12
|
POLYGON_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
SEI_BASE_URL=https://sei-a2-rpc.brocha.in
|
||||||
|
SEI_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
SOLANA_BASE_URL=https://api.devnet.solana.com
|
SOLANA_BASE_URL=https://api.devnet.solana.com
|
||||||
SOLANA_REQUESTS_PER_MINUTE=12
|
SOLANA_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=30Mi
|
||||||
RESOURCES_LIMITS_CPU=60m
|
RESOURCES_LIMITS_CPU=60m
|
||||||
RESOURCES_REQUESTS_MEMORY=15Mi
|
RESOURCES_REQUESTS_MEMORY=15Mi
|
||||||
RESOURCES_REQUESTS_CPU=40m
|
RESOURCES_REQUESTS_CPU=40m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
P2P_NETWORK=mainnet
|
P2P_NETWORK=mainnet
|
||||||
AWS_IAM_ROLE=
|
AWS_IAM_ROLE=
|
||||||
|
@ -40,6 +41,9 @@ CELO_REQUESTS_PER_MINUTE=12
|
||||||
ETHEREUM_BASE_URL=https://rpc.ankr.com/eth
|
ETHEREUM_BASE_URL=https://rpc.ankr.com/eth
|
||||||
ETHEREUM_REQUESTS_PER_MINUTE=12
|
ETHEREUM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
EVMOS_BASE_URL=https://tm.evmos.lava.build
|
||||||
|
EVMOS_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
FANTOM_BASE_URL=https://rpc.ankr.com/fantom
|
FANTOM_BASE_URL=https://rpc.ankr.com/fantom
|
||||||
FANTOM_REQUESTS_PER_MINUTE=12
|
FANTOM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
@ -54,6 +58,9 @@ KARURA_REQUESTS_PER_MINUTE=12
|
||||||
KLAYTN_BASE_URL=https://klaytn-mainnet-rpc.allthatnode.com:8551
|
KLAYTN_BASE_URL=https://klaytn-mainnet-rpc.allthatnode.com:8551
|
||||||
KLAYTN_REQUESTS_PER_MINUTE=12
|
KLAYTN_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
KUJIRA_BASE_URL=https://kujira-rpc.ibs.team
|
||||||
|
KUJIRA_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
MOONBEAM_BASE_URL=https://rpc.api.moonbeam.network
|
MOONBEAM_BASE_URL=https://rpc.api.moonbeam.network
|
||||||
MOONBEAM_REQUESTS_PER_MINUTE=12
|
MOONBEAM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
@ -69,6 +76,9 @@ OSMOSIS_REQUESTS_PER_MINUTE=12
|
||||||
POLYGON_BASE_URL=https://rpc.ankr.com/polygon
|
POLYGON_BASE_URL=https://rpc.ankr.com/polygon
|
||||||
POLYGON_REQUESTS_PER_MINUTE=12
|
POLYGON_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
SEI_BASE_URL=https://rpc.ankr.com/sei
|
||||||
|
SEI_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
SOLANA_BASE_URL=https://api.mainnet-beta.solana.com
|
SOLANA_BASE_URL=https://api.mainnet-beta.solana.com
|
||||||
SOLANA_REQUESTS_PER_MINUTE=12
|
SOLANA_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,8 @@ RESOURCES_LIMITS_MEMORY=30Mi
|
||||||
RESOURCES_LIMITS_CPU=20m
|
RESOURCES_LIMITS_CPU=20m
|
||||||
RESOURCES_REQUESTS_MEMORY=15Mi
|
RESOURCES_REQUESTS_MEMORY=15Mi
|
||||||
RESOURCES_REQUESTS_CPU=10m
|
RESOURCES_REQUESTS_CPU=10m
|
||||||
SQS_URL=
|
PIPELINE_SQS_URL=
|
||||||
|
NOTIFICATIONS_SQS_URL=
|
||||||
SQS_AWS_REGION=
|
SQS_AWS_REGION=
|
||||||
P2P_NETWORK=testnet
|
P2P_NETWORK=testnet
|
||||||
AWS_IAM_ROLE=
|
AWS_IAM_ROLE=
|
||||||
|
@ -40,6 +41,9 @@ CELO_REQUESTS_PER_MINUTE=12
|
||||||
ETHEREUM_BASE_URL=https://rpc.ankr.com/eth_goerli
|
ETHEREUM_BASE_URL=https://rpc.ankr.com/eth_goerli
|
||||||
ETHEREUM_REQUESTS_PER_MINUTE=12
|
ETHEREUM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
EVMOS_BASE_URL=https://evmos-testnet-rpc.polkachu.com
|
||||||
|
EVMOS_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
FANTOM_BASE_URL=https://rpc.testnet.fantom.network
|
FANTOM_BASE_URL=https://rpc.testnet.fantom.network
|
||||||
FANTOM_REQUESTS_PER_MINUTE=12
|
FANTOM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
@ -52,6 +56,9 @@ KARURA_REQUESTS_PER_MINUTE=12
|
||||||
KLAYTN_BASE_URL=https://api.baobab.klaytn.net:8651
|
KLAYTN_BASE_URL=https://api.baobab.klaytn.net:8651
|
||||||
KLAYTN_REQUESTS_PER_MINUTE=12
|
KLAYTN_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
KUJIRA_BASE_URL=https://kujira-testnet-rpc.polkachu.com
|
||||||
|
KUJIRA_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
MOONBEAM_BASE_URL=https://rpc.api.moonbase.moonbeam.network
|
MOONBEAM_BASE_URL=https://rpc.api.moonbase.moonbeam.network
|
||||||
MOONBEAM_REQUESTS_PER_MINUTE=12
|
MOONBEAM_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
@ -67,6 +74,9 @@ OSMOSIS_REQUESTS_PER_MINUTE=12
|
||||||
POLYGON_BASE_URL=https://rpc.ankr.com/polygon_mumbai
|
POLYGON_BASE_URL=https://rpc.ankr.com/polygon_mumbai
|
||||||
POLYGON_REQUESTS_PER_MINUTE=12
|
POLYGON_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
SEI_BASE_URL=https://sei-a2-rpc.brocha.in
|
||||||
|
SEI_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
SOLANA_BASE_URL=https://api.devnet.solana.com
|
SOLANA_BASE_URL=https://api.devnet.solana.com
|
||||||
SOLANA_REQUESTS_PER_MINUTE=12
|
SOLANA_REQUESTS_PER_MINUTE=12
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue