Merge branch 'metaplex-foundation:master' into master

This commit is contained in:
stegaBOB 2021-09-20 17:07:11 -04:00 committed by GitHub
commit c131b61a58
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 412 additions and 158 deletions

View File

@ -16,9 +16,17 @@ import {
MAX_SYMBOL_LENGTH,
MAX_URI_LENGTH,
METADATA_PREFIX,
decodeMetadata,
getAuctionExtended,
decodeAuction,
} from '../../actions';
import { AccountInfo, Connection, PublicKey } from '@solana/web3.js';
import { AccountAndPubkey, MetaState, ProcessAccountsFunc } from './types';
import {
AccountAndPubkey,
MetaState,
ProcessAccountsFunc,
UpdateStateValueFunc,
} from './types';
import { isMetadataPartOfStore } from './isMetadataPartOfStore';
import { processAuctions } from './processAuctions';
import { processMetaplexAccounts } from './processMetaplexAccounts';
@ -28,6 +36,23 @@ import { ParsedAccount } from '../accounts/types';
import { getEmptyMetaState } from './getEmptyMetaState';
import { getMultipleAccounts } from '../accounts/getMultipleAccounts';
export const USE_SPEED_RUN = false;
const WHITELISTED_METADATA = ['98vYFjBYS9TguUMWQRPjy2SZuxKuUMcqR4vnQiLjZbte'];
const WHITELISTED_AUCTION = ['D8wMB5iLZnsV7XQjpwqXaDynUtFuDs7cRXvEGNj1NF1e'];
const AUCTION_TO_METADATA: Record<string, string[]> = {
D8wMB5iLZnsV7XQjpwqXaDynUtFuDs7cRXvEGNj1NF1e: [
'98vYFjBYS9TguUMWQRPjy2SZuxKuUMcqR4vnQiLjZbte',
],
};
const AUCTION_TO_VAULT: Record<string, string> = {
D8wMB5iLZnsV7XQjpwqXaDynUtFuDs7cRXvEGNj1NF1e:
'3wHCBd3fYRPWjd5GqzrXanLJUKRyU3nECKbTPKfVwcFX',
};
const WHITELISTED_AUCTION_MANAGER = [
'3HD2C8oCL8dpqbXo8hq3CMw6tRSZDZJGajLxnrZ3ZkYx',
];
const WHITELISTED_VAULT = ['3wHCBd3fYRPWjd5GqzrXanLJUKRyU3nECKbTPKfVwcFX'];
async function getProgramAccounts(
connection: Connection,
programId: StringPublicKey,
@ -82,6 +107,221 @@ async function getProgramAccounts(
return data;
}
export const limitedLoadAccounts = async (connection: Connection) => {
const tempCache: MetaState = getEmptyMetaState();
const updateTemp = makeSetter(tempCache);
const forEach =
(fn: ProcessAccountsFunc) => async (accounts: AccountAndPubkey[]) => {
for (const account of accounts) {
await fn(account, updateTemp, false);
}
};
const pullMetadata = async (metadata: string) => {
const mdKey = new PublicKey(metadata);
const md = await connection.getAccountInfo(mdKey);
const mdObject = decodeMetadata(
Buffer.from(md?.data || new Uint8Array([])),
);
const editionKey = await getEdition(mdObject.mint);
const editionData = await connection.getAccountInfo(
new PublicKey(editionKey),
);
if (md) {
//@ts-ignore
md.owner = md.owner.toBase58();
processMetaData(
{
pubkey: metadata,
account: md,
},
updateTemp,
false,
);
if (editionData) {
//@ts-ignore
editionData.owner = editionData.owner.toBase58();
processMetaData(
{
pubkey: editionKey,
account: editionData,
},
updateTemp,
false,
);
}
}
};
const pullAuction = async (auction: string) => {
const auctionExtendedKey = await getAuctionExtended({
auctionProgramId: AUCTION_ID,
resource: AUCTION_TO_VAULT[auction],
});
const auctionData = await getMultipleAccounts(
connection,
[auction, auctionExtendedKey],
'single',
);
if (auctionData) {
auctionData.keys.map((pubkey, i) => {
processAuctions(
{
pubkey,
account: auctionData.array[i],
},
updateTemp,
false,
);
});
}
};
const pullAuctionManager = async (auctionManager: string) => {
const auctionManagerKey = new PublicKey(auctionManager);
const auctionManagerData = await connection.getAccountInfo(
auctionManagerKey,
);
if (auctionManagerData) {
//@ts-ignore
auctionManagerData.owner = auctionManagerData.owner.toBase58();
processMetaplexAccounts(
{
pubkey: auctionManager,
account: auctionManagerData,
},
updateTemp,
false,
);
}
};
const pullVault = async (vault: string) => {
const vaultKey = new PublicKey(vault);
const vaultData = await connection.getAccountInfo(vaultKey);
if (vaultData) {
//@ts-ignore
vaultData.owner = vaultData.owner.toBase58();
processVaultData(
{
pubkey: vault,
account: vaultData,
},
updateTemp,
false,
);
}
};
const promises = [
...WHITELISTED_METADATA.map(md => pullMetadata(md)),
...WHITELISTED_AUCTION.map(a => pullAuction(a)),
...WHITELISTED_AUCTION_MANAGER.map(a => pullAuctionManager(a)),
...WHITELISTED_VAULT.map(a => pullVault(a)),
// bidder metadata pull
...WHITELISTED_AUCTION.map(a =>
getProgramAccounts(connection, AUCTION_ID, {
filters: [
{
memcmp: {
offset: 32,
bytes: a,
},
},
],
}).then(forEach(processAuctions)),
),
// bidder pot pull
...WHITELISTED_AUCTION.map(a =>
getProgramAccounts(connection, AUCTION_ID, {
filters: [
{
memcmp: {
offset: 64,
bytes: a,
},
},
],
}).then(forEach(processAuctions)),
),
// safety deposit pull
...WHITELISTED_VAULT.map(v =>
getProgramAccounts(connection, VAULT_ID, {
filters: [
{
memcmp: {
offset: 1,
bytes: v,
},
},
],
}).then(forEach(processVaultData)),
),
// bid redemptions
...WHITELISTED_AUCTION_MANAGER.map(a =>
getProgramAccounts(connection, METAPLEX_ID, {
filters: [
{
memcmp: {
offset: 9,
bytes: a,
},
},
],
}).then(forEach(processMetaplexAccounts)),
),
// safety deposit configs
...WHITELISTED_AUCTION_MANAGER.map(a =>
getProgramAccounts(connection, METAPLEX_ID, {
filters: [
{
memcmp: {
offset: 1,
bytes: a,
},
},
],
}).then(forEach(processMetaplexAccounts)),
),
// prize tracking tickets
...Object.keys(AUCTION_TO_METADATA)
.map(key =>
AUCTION_TO_METADATA[key]
.map(md =>
getProgramAccounts(connection, METAPLEX_ID, {
filters: [
{
memcmp: {
offset: 1,
bytes: md,
},
},
],
}).then(forEach(processMetaplexAccounts)),
)
.flat(),
)
.flat(),
// whitelisted creators
getProgramAccounts(connection, METAPLEX_ID, {
filters: [
{
dataSize: MAX_WHITELISTED_CREATOR_SIZE,
},
],
}).then(forEach(processMetaplexAccounts)),
];
await Promise.all(promises);
await postProcessMetadata(tempCache, true);
return tempCache;
};
export const loadAccounts = async (connection: Connection, all: boolean) => {
const tempCache: MetaState = getEmptyMetaState();
const updateTemp = makeSetter(tempCache);
@ -93,161 +333,152 @@ export const loadAccounts = async (connection: Connection, all: boolean) => {
}
};
let isSelectivePullMetadata = false;
const pullMetadata = async (creators: AccountAndPubkey[]) => {
await forEach(processMetaplexAccounts)(creators);
const whitelistedCreators = Object.values(
tempCache.whitelistedCreatorsByCreator,
);
if (whitelistedCreators.length > 3) {
console.log(' too many creators, pulling all nfts in one go');
additionalPromises.push(
getProgramAccounts(connection, METADATA_PROGRAM_ID).then(
forEach(processMetaData),
),
);
} else {
console.log('pulling optimized nfts');
isSelectivePullMetadata = true;
for (let i = 0; i < MAX_CREATOR_LIMIT; i++) {
for (let j = 0; j < whitelistedCreators.length; j++) {
additionalPromises.push(
getProgramAccounts(connection, METADATA_PROGRAM_ID, {
filters: [
{
memcmp: {
offset:
1 + // key
32 + // update auth
32 + // mint
4 + // name string length
MAX_NAME_LENGTH + // name
4 + // uri string length
MAX_URI_LENGTH + // uri
4 + // symbol string length
MAX_SYMBOL_LENGTH + // symbol
2 + // seller fee basis points
1 + // whether or not there is a creators vec
4 + // creators vec length
i * MAX_CREATOR_LEN,
bytes: whitelistedCreators[j].info.address,
},
},
],
}).then(forEach(processMetaData)),
);
}
}
}
};
const pullEditions = async () => {
console.log('Pulling editions for optimized metadata');
let setOf100MetadataEditionKeys: string[] = [];
const editionPromises: Promise<{
keys: string[];
array: AccountInfo<Buffer>[];
}>[] = [];
for (let i = 0; i < tempCache.metadata.length; i++) {
let edition: StringPublicKey;
if (tempCache.metadata[i].info.editionNonce != null) {
edition = (
await PublicKey.createProgramAddress(
[
Buffer.from(METADATA_PREFIX),
toPublicKey(METADATA_PROGRAM_ID).toBuffer(),
toPublicKey(tempCache.metadata[i].info.mint).toBuffer(),
new Uint8Array([tempCache.metadata[i].info.editionNonce || 0]),
],
toPublicKey(METADATA_PROGRAM_ID),
)
).toBase58();
} else {
edition = await getEdition(tempCache.metadata[i].info.mint);
}
setOf100MetadataEditionKeys.push(edition);
if (setOf100MetadataEditionKeys.length >= 100) {
editionPromises.push(
getMultipleAccounts(
connection,
setOf100MetadataEditionKeys,
'recent',
),
);
setOf100MetadataEditionKeys = [];
}
}
if (setOf100MetadataEditionKeys.length >= 0) {
editionPromises.push(
getMultipleAccounts(connection, setOf100MetadataEditionKeys, 'recent'),
);
setOf100MetadataEditionKeys = [];
}
const responses = await Promise.all(editionPromises);
for (let i = 0; i < responses.length; i++) {
const returnedAccounts = responses[i];
for (let j = 0; j < returnedAccounts.array.length; j++) {
processMetaData(
{
pubkey: returnedAccounts.keys[j],
account: returnedAccounts.array[j],
},
updateTemp,
all,
);
}
}
console.log(
'Edition size',
Object.keys(tempCache.editions).length,
Object.keys(tempCache.masterEditions).length,
);
};
const IS_BIG_STORE =
all || process.env.NEXT_PUBLIC_BIG_STORE?.toLowerCase() === 'true';
console.log(`Is big store: ${IS_BIG_STORE}`);
const additionalPromises: Promise<void>[] = [];
const basePromises = [
getProgramAccounts(connection, VAULT_ID).then(forEach(processVaultData)),
getProgramAccounts(connection, AUCTION_ID).then(forEach(processAuctions)),
getProgramAccounts(connection, METAPLEX_ID).then(
forEach(processMetaplexAccounts),
),
IS_BIG_STORE
? getProgramAccounts(connection, METADATA_PROGRAM_ID).then(
forEach(processMetaData),
)
: getProgramAccounts(connection, METAPLEX_ID, {
filters: [
{
dataSize: MAX_WHITELISTED_CREATOR_SIZE,
},
],
}).then(pullMetadata),
getProgramAccounts(connection, METAPLEX_ID, {
filters: [
{
dataSize: MAX_WHITELISTED_CREATOR_SIZE,
},
],
}).then(pullMetadata),
];
await Promise.all(basePromises);
const additionalPromises: Promise<void>[] = getAdditionalPromises(
connection,
tempCache,
forEach,
);
await Promise.all(additionalPromises);
await postProcessMetadata(tempCache, all);
console.log('Metadata size', tempCache.metadata.length);
if (isSelectivePullMetadata) {
await pullEditions();
}
await pullEditions(connection, updateTemp, tempCache, all);
return tempCache;
};
const pullEditions = async (
connection: Connection,
updateTemp: UpdateStateValueFunc,
tempCache: MetaState,
all: boolean,
) => {
console.log('Pulling editions for optimized metadata');
let setOf100MetadataEditionKeys: string[] = [];
const editionPromises: Promise<{
keys: string[];
array: AccountInfo<Buffer>[];
}>[] = [];
for (let i = 0; i < tempCache.metadata.length; i++) {
let edition: StringPublicKey;
if (tempCache.metadata[i].info.editionNonce != null) {
edition = (
await PublicKey.createProgramAddress(
[
Buffer.from(METADATA_PREFIX),
toPublicKey(METADATA_PROGRAM_ID).toBuffer(),
toPublicKey(tempCache.metadata[i].info.mint).toBuffer(),
new Uint8Array([tempCache.metadata[i].info.editionNonce || 0]),
],
toPublicKey(METADATA_PROGRAM_ID),
)
).toBase58();
} else {
edition = await getEdition(tempCache.metadata[i].info.mint);
}
setOf100MetadataEditionKeys.push(edition);
if (setOf100MetadataEditionKeys.length >= 100) {
editionPromises.push(
getMultipleAccounts(connection, setOf100MetadataEditionKeys, 'recent'),
);
setOf100MetadataEditionKeys = [];
}
}
if (setOf100MetadataEditionKeys.length >= 0) {
editionPromises.push(
getMultipleAccounts(connection, setOf100MetadataEditionKeys, 'recent'),
);
setOf100MetadataEditionKeys = [];
}
const responses = await Promise.all(editionPromises);
for (let i = 0; i < responses.length; i++) {
const returnedAccounts = responses[i];
for (let j = 0; j < returnedAccounts.array.length; j++) {
processMetaData(
{
pubkey: returnedAccounts.keys[j],
account: returnedAccounts.array[j],
},
updateTemp,
all,
);
}
}
console.log(
'Edition size',
Object.keys(tempCache.editions).length,
Object.keys(tempCache.masterEditions).length,
);
};
const getAdditionalPromises = (
connection: Connection,
tempCache: MetaState,
forEach: any,
): Promise<void>[] => {
console.log('pulling optimized nfts');
const whitelistedCreators = Object.values(
tempCache.whitelistedCreatorsByCreator,
);
const additionalPromises: Promise<void>[] = [];
for (let i = 0; i < MAX_CREATOR_LIMIT; i++) {
for (let j = 0; j < whitelistedCreators.length; j++) {
additionalPromises.push(
getProgramAccounts(connection, METADATA_PROGRAM_ID, {
filters: [
{
memcmp: {
offset:
1 + // key
32 + // update auth
32 + // mint
4 + // name string length
MAX_NAME_LENGTH + // name
4 + // uri string length
MAX_URI_LENGTH + // uri
4 + // symbol string length
MAX_SYMBOL_LENGTH + // symbol
2 + // seller fee basis points
1 + // whether or not there is a creators vec
4 + // creators vec length
i * MAX_CREATOR_LEN,
bytes: whitelistedCreators[j].info.address,
},
},
],
}).then(forEach(processMetaData)),
);
}
}
return additionalPromises;
};
export const makeSetter =
(state: MetaState) =>
(prop: keyof MetaState, key: string, value: ParsedAccount<any>) => {

View File

@ -2,7 +2,11 @@ import React, { useCallback, useContext, useEffect, useState } from 'react';
import { queryExtendedMetadata } from './queryExtendedMetadata';
import { subscribeAccountsChange } from './subscribeAccountsChange';
import { getEmptyMetaState } from './getEmptyMetaState';
import { loadAccounts } from './loadAccounts';
import {
limitedLoadAccounts,
loadAccounts,
USE_SPEED_RUN,
} from './loadAccounts';
import { MetaContextState, MetaState } from './types';
import { useConnection } from '../connection';
import { useStore } from '../store';
@ -57,7 +61,9 @@ export function MetaProvider({ children = null as any }) {
console.log('-----> Query started');
const nextState = await loadAccounts(connection, all);
const nextState = !USE_SPEED_RUN
? await loadAccounts(connection, all)
: await limitedLoadAccounts(connection);
console.log('------->Query finished');

View File

@ -19,7 +19,6 @@ export const processMetaData: ProcessAccountsFunc = (
setter,
) => {
if (!isMetadataAccount(account)) return;
try {
if (isMetadataV1Account(account)) {
const metadata = decodeMetadata(account.data);

View File

@ -1,3 +1,2 @@
REACT_APP_STORE_OWNER_ADDRESS_ADDRESS=
REACT_APP_STORE_ADDRESS=
REACT_APP_BIG_STORE=FALSE
REACT_APP_STORE_ADDRESS=

View File

@ -94,4 +94,4 @@
"react-dom": "*"
},
"license": "MIT"
}
}

View File

@ -5,13 +5,14 @@ import {
cache,
ParsedAccount,
StringPublicKey,
useMeta,
USE_SPEED_RUN,
} from '@oyster/common';
export const useHighestBidForAuction = (
auctionPubkey: StringPublicKey | string,
) => {
const bids = useBidsForAuction(auctionPubkey);
const winner = useMemo(() => {
return bids?.[0];
}, [bids]);
@ -29,17 +30,18 @@ export const useBidsForAuction = (auctionPubkey: StringPublicKey | string) => {
: auctionPubkey,
[auctionPubkey],
);
const { bidderMetadataByAuctionAndBidder } = useMeta();
const [bids, setBids] = useState<ParsedAccount<BidderMetadata>[]>([]);
useEffect(() => {
const dispose = cache.emitter.onCache(args => {
if (args.parser === BidderMetadataParser) {
setBids(getBids(id));
setBids(getBids(bidderMetadataByAuctionAndBidder, id));
}
});
setBids(getBids(id));
setBids(getBids(bidderMetadataByAuctionAndBidder, id));
return () => {
dispose();
@ -49,21 +51,38 @@ export const useBidsForAuction = (auctionPubkey: StringPublicKey | string) => {
return bids;
};
const getBids = (id?: StringPublicKey) => {
return cache
.byParser(BidderMetadataParser)
.filter(key => {
const bidder = cache.get(key) as ParsedAccount<BidderMetadata>;
if (!bidder) {
return false;
}
const getBids = (
bidderMetadataByAuctionAndBidder: Record<
string,
ParsedAccount<BidderMetadata>
>,
id?: StringPublicKey,
) => {
// I have no idea why, but cache doesnt work with speed run and i couldnt figure it out for the life of me,
// because that file is so confusing I have no idea how it works.
// so we use the tempCache for pulling bids. B come save me.- J
let bids;
if (USE_SPEED_RUN) {
bids = Object.values(bidderMetadataByAuctionAndBidder).filter(
b => b.info.auctionPubkey === id,
);
} else {
bids = cache
.byParser(BidderMetadataParser)
.filter(key => {
const bidder = cache.get(key) as ParsedAccount<BidderMetadata>;
return id === bidder.info.auctionPubkey;
})
.map(key => {
const bidder = cache.get(key) as ParsedAccount<BidderMetadata>;
return bidder;
})
if (!bidder) {
return false;
}
return id === bidder.info.auctionPubkey;
})
.map(key => {
const bidder = cache.get(key) as ParsedAccount<BidderMetadata>;
return bidder;
});
}
return bids
.sort((a, b) => {
const lastBidDiff = b.info.lastBid.sub(a.info.lastBid).toNumber();
if (lastBidDiff === 0) {