lang: Zero copy deserialization (#202)

This commit is contained in:
Armani Ferrante 2021-04-18 03:07:48 +08:00 committed by GitHub
parent 218c2d25ce
commit b6afb30474
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 978 additions and 80 deletions

View File

@ -19,7 +19,7 @@ _examples: &examples
- npm install -g mocha
- npm install -g ts-mocha
- npm install -g typescript
- npm install -g @project-serum/anchor
- cd ts && yarn && yarn build && npm link && cd ../
- npm install -g @project-serum/serum
- npm install -g @project-serum/common
- npm install -g @solana/spl-token
@ -28,7 +28,7 @@ _examples: &examples
- export PATH="/home/travis/.local/share/solana/install/active_release/bin:$PATH"
- export NODE_PATH="/home/travis/.nvm/versions/node/v$NODE_VERSION/lib/node_modules/:$NODE_PATH"
- yes | solana-keygen new
- cargo install --git https://github.com/project-serum/anchor anchor-cli --locked
- cargo install --path $TRAVIS_BUILD_DIR/cli anchor-cli --locked
jobs:
include:
@ -59,6 +59,7 @@ jobs:
- pushd examples/events && anchor test && popd
- pushd examples/cashiers-check && anchor test && popd
- pushd examples/typescript && yarn && anchor test && popd
- pushd examples/zero-copy && yarn && anchor test && popd
- <<: *examples
name: Runs the examples 2
script:

View File

@ -14,6 +14,8 @@ incremented for features.
## Features
* lang: Allows one to specify multiple `with` targets when creating associated acconts ([#197](https://github.com/project-serum/anchor/pull/197)).
* lang, ts: Add array support ([#202](https://github.com/project-serum/anchor/pull/202)).
* lang: Zero copy deserialization for accounts ([#202](https://github.com/project-serum/anchor/pull/202)).
## [0.4.3] - 2021-04-13

7
Cargo.lock generated
View File

@ -181,6 +181,7 @@ dependencies = [
"anchor-derive-accounts",
"base64 0.13.0",
"borsh",
"bytemuck",
"solana-program",
"thiserror",
]
@ -451,6 +452,12 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7"
[[package]]
name = "bytemuck"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bed57e2090563b83ba8f83366628ce535a7584c9afa4c9fc0612a03925c6df58"
[[package]]
name = "byteorder"
version = "1.3.4"

View File

@ -0,0 +1,2 @@
cluster = "localnet"
wallet = "~/.config/solana/id.json"

View File

@ -0,0 +1,4 @@
[workspace]
members = [
"programs/*"
]

View File

@ -0,0 +1,12 @@
// Migrations are an early feature. Currently, they're nothing more than this
// single deploy script that's invoked from the CLI, injecting a provider
// configured from the workspace's Anchor.toml.
const anchor = require("@project-serum/anchor");
module.exports = async function (provider) {
// Configure client to use the provider.
anchor.setProvider(provider);
// Add your deploy script here.
}

View File

@ -0,0 +1,18 @@
[package]
name = "zero-copy"
version = "0.1.0"
description = "Created with Anchor"
edition = "2018"
[lib]
crate-type = ["cdylib", "lib"]
name = "zero_copy"
[features]
no-entrypoint = []
no-idl = []
cpi = ["no-entrypoint"]
default = []
[dependencies]
anchor-lang = { path = "../../../../lang" }

View File

@ -0,0 +1,2 @@
[target.bpfel-unknown-unknown.dependencies.std]
features = []

View File

@ -0,0 +1,144 @@
//! This example demonstrates the use of zero copy deserialization for accounts.
//! The main noticeable benefit one achieves using zero copy is the ability
//! to create accounts larger than the size of the stack or heap, as is
//! demonstrated by the event queue in this example.
use anchor_lang::prelude::*;
#[program]
pub mod zero_copy {
use super::*;
pub fn create_foo(ctx: Context<CreateFoo>) -> ProgramResult {
let foo = &mut ctx.accounts.foo.load_init()?;
foo.authority = *ctx.accounts.authority.key;
foo.set_second_authority(ctx.accounts.authority.key);
Ok(())
}
pub fn update_foo(ctx: Context<UpdateFoo>, data: u64) -> ProgramResult {
let mut foo = ctx.accounts.foo.load_mut()?;
foo.data = data;
Ok(())
}
pub fn update_foo_second(ctx: Context<UpdateFooSecond>, second_data: u64) -> ProgramResult {
let mut foo = ctx.accounts.foo.load_mut()?;
foo.second_data = second_data;
Ok(())
}
pub fn create_bar(ctx: Context<CreateBar>) -> ProgramResult {
let bar = &mut ctx.accounts.bar.load_init()?;
bar.authority = *ctx.accounts.authority.key;
Ok(())
}
pub fn update_bar(ctx: Context<UpdateBar>, data: u64) -> ProgramResult {
let bar = &mut ctx.accounts.bar.load_mut()?;
bar.data = data;
Ok(())
}
pub fn create_large_account(_ctx: Context<CreateLargeAccount>) -> ProgramResult {
Ok(())
}
pub fn update_large_account(
ctx: Context<UpdateLargeAccount>,
idx: u32,
data: u64,
) -> ProgramResult {
let event_q = &mut ctx.accounts.event_q.load_mut()?;
event_q.events[idx as usize] = Event {
data,
from: *ctx.accounts.from.key,
};
Ok(())
}
}
#[derive(Accounts)]
pub struct CreateFoo<'info> {
#[account(init)]
foo: Loader<'info, Foo>,
#[account(signer)]
authority: AccountInfo<'info>,
rent: Sysvar<'info, Rent>,
}
#[derive(Accounts)]
pub struct UpdateFoo<'info> {
#[account(mut, has_one = authority)]
foo: Loader<'info, Foo>,
#[account(signer)]
authority: AccountInfo<'info>,
}
#[derive(Accounts)]
pub struct UpdateFooSecond<'info> {
#[account(mut, "&foo.load()?.get_second_authority() == second_authority.key")]
foo: Loader<'info, Foo>,
#[account(signer)]
second_authority: AccountInfo<'info>,
}
#[derive(Accounts)]
pub struct CreateBar<'info> {
#[account(associated = authority, with = foo)]
bar: Loader<'info, Bar>,
#[account(signer)]
authority: AccountInfo<'info>,
foo: Loader<'info, Foo>,
rent: Sysvar<'info, Rent>,
system_program: AccountInfo<'info>,
}
#[derive(Accounts)]
pub struct UpdateBar<'info> {
#[account(mut, has_one = authority)]
bar: Loader<'info, Bar>,
#[account(signer)]
authority: AccountInfo<'info>,
}
#[derive(Accounts)]
pub struct CreateLargeAccount<'info> {
#[account(init)]
event_q: Loader<'info, EventQ>,
rent: Sysvar<'info, Rent>,
}
#[derive(Accounts)]
pub struct UpdateLargeAccount<'info> {
#[account(mut)]
event_q: Loader<'info, EventQ>,
#[account(signer)]
from: AccountInfo<'info>,
}
#[account(zero_copy)]
pub struct Foo {
pub authority: Pubkey,
pub data: u64,
pub second_data: u64,
#[accessor(Pubkey)] // The `accessor` api will likely be removed.
pub second_authority: [u8; 32],
}
#[associated(zero_copy)]
pub struct Bar {
pub authority: Pubkey,
pub data: u64,
}
#[account(zero_copy)]
pub struct EventQ {
pub events: [Event; 25000],
}
#[zero_copy]
pub struct Event {
pub from: Pubkey,
pub data: u64,
}

View File

@ -0,0 +1,229 @@
const anchor = require("@project-serum/anchor");
const assert = require("assert");
describe("zero-copy", () => {
// Configure the client to use the local cluster.
anchor.setProvider(anchor.Provider.env());
const program = anchor.workspace.ZeroCopy;
const foo = new anchor.web3.Account();
it("Is creates a zero copy account", async () => {
await program.rpc.createFoo({
accounts: {
foo: foo.publicKey,
authority: program.provider.wallet.publicKey,
rent: anchor.web3.SYSVAR_RENT_PUBKEY,
},
instructions: [await program.account.foo.createInstruction(foo)],
signers: [foo],
});
const account = await program.account.foo(foo.publicKey);
assert.ok(
JSON.stringify(account.authority.toBuffer()) ===
JSON.stringify(program.provider.wallet.publicKey.toBuffer())
);
assert.ok(account.data.toNumber() === 0);
assert.ok(account.secondData.toNumber() === 0);
assert.ok(
JSON.stringify(account.secondAuthority) ===
JSON.stringify([...program.provider.wallet.publicKey.toBuffer()])
);
});
it("Updates a zero copy account field", async () => {
await program.rpc.updateFoo(new anchor.BN(1234), {
accounts: {
foo: foo.publicKey,
authority: program.provider.wallet.publicKey,
},
});
const account = await program.account.foo(foo.publicKey);
assert.ok(
JSON.stringify(account.authority.toBuffer()) ===
JSON.stringify(program.provider.wallet.publicKey.toBuffer())
);
assert.ok(account.data.toNumber() === 1234);
assert.ok(account.secondData.toNumber() === 0);
assert.ok(
JSON.stringify(account.secondAuthority) ===
JSON.stringify([...program.provider.wallet.publicKey.toBuffer()])
);
});
it("Updates a a second zero copy account field", async () => {
await program.rpc.updateFooSecond(new anchor.BN(55), {
accounts: {
foo: foo.publicKey,
secondAuthority: program.provider.wallet.publicKey,
},
});
const account = await program.account.foo(foo.publicKey);
assert.ok(
JSON.stringify(account.authority.toBuffer()) ===
JSON.stringify(program.provider.wallet.publicKey.toBuffer())
);
assert.ok(account.data.toNumber() === 1234);
assert.ok(account.secondData.toNumber() === 55);
assert.ok(
JSON.stringify(account.secondAuthority) ===
JSON.stringify([...program.provider.wallet.publicKey.toBuffer()])
);
});
it("Creates an associated zero copy account", async () => {
await program.rpc.createBar({
accounts: {
bar: await program.account.bar.associatedAddress(
program.provider.wallet.publicKey,
foo.publicKey
),
authority: program.provider.wallet.publicKey,
foo: foo.publicKey,
rent: anchor.web3.SYSVAR_RENT_PUBKEY,
systemProgram: anchor.web3.SystemProgram.programId,
},
});
const bar = await program.account.bar.associated(
program.provider.wallet.publicKey,
foo.publicKey
);
assert.ok(bar.authority.equals(program.provider.wallet.publicKey));
assert.ok(bar.data.toNumber() === 0);
});
it("Updates an associated zero copy account", async () => {
await program.rpc.updateBar(new anchor.BN(99), {
accounts: {
bar: await program.account.bar.associatedAddress(
program.provider.wallet.publicKey,
foo.publicKey
),
authority: program.provider.wallet.publicKey,
},
});
const bar = await program.account.bar.associated(
program.provider.wallet.publicKey,
foo.publicKey
);
assert.ok(bar.authority.equals(program.provider.wallet.publicKey));
assert.ok(bar.data.toNumber() === 99);
});
const eventQ = new anchor.web3.Account();
const size = 1000000 + 8; // Account size in bytes.
it("Creates a large event queue", async () => {
await program.rpc.createLargeAccount({
accounts: {
eventQ: eventQ.publicKey,
rent: anchor.web3.SYSVAR_RENT_PUBKEY,
},
instructions: [
await program.account.eventQ.createInstruction(eventQ, size),
],
signers: [eventQ],
});
const account = await program.account.eventQ(eventQ.publicKey);
assert.ok(account.events.length === 25000);
account.events.forEach((event) => {
assert.ok(event.from.equals(new anchor.web3.PublicKey()));
assert.ok(event.data.toNumber() === 0);
});
});
it("Updates a large event queue", async () => {
// Set index 0.
await program.rpc.updateLargeAccount(0, new anchor.BN(48), {
accounts: {
eventQ: eventQ.publicKey,
from: program.provider.wallet.publicKey,
},
});
// Verify update.
let account = await program.account.eventQ(eventQ.publicKey);
assert.ok(account.events.length === 25000);
account.events.forEach((event, idx) => {
if (idx === 0) {
assert.ok(event.from.equals(program.provider.wallet.publicKey));
assert.ok(event.data.toNumber() === 48);
} else {
assert.ok(event.from.equals(new anchor.web3.PublicKey()));
assert.ok(event.data.toNumber() === 0);
}
});
// Set index 11111.
await program.rpc.updateLargeAccount(11111, new anchor.BN(1234), {
accounts: {
eventQ: eventQ.publicKey,
from: program.provider.wallet.publicKey,
},
});
// Verify update.
account = await program.account.eventQ(eventQ.publicKey);
assert.ok(account.events.length === 25000);
account.events.forEach((event, idx) => {
if (idx === 0) {
assert.ok(event.from.equals(program.provider.wallet.publicKey));
assert.ok(event.data.toNumber() === 48);
} else if (idx === 11111) {
assert.ok(event.from.equals(program.provider.wallet.publicKey));
assert.ok(event.data.toNumber() === 1234);
} else {
assert.ok(event.from.equals(new anchor.web3.PublicKey()));
assert.ok(event.data.toNumber() === 0);
}
});
// Set last index.
await program.rpc.updateLargeAccount(24999, new anchor.BN(99), {
accounts: {
eventQ: eventQ.publicKey,
from: program.provider.wallet.publicKey,
},
});
// Verify update.
account = await program.account.eventQ(eventQ.publicKey);
assert.ok(account.events.length === 25000);
account.events.forEach((event, idx) => {
if (idx === 0) {
assert.ok(event.from.equals(program.provider.wallet.publicKey));
assert.ok(event.data.toNumber() === 48);
} else if (idx === 11111) {
assert.ok(event.from.equals(program.provider.wallet.publicKey));
assert.ok(event.data.toNumber() === 1234);
} else if (idx === 24999) {
assert.ok(event.from.equals(program.provider.wallet.publicKey));
assert.ok(event.data.toNumber() === 99);
} else {
assert.ok(event.from.equals(new anchor.web3.PublicKey()));
assert.ok(event.data.toNumber() === 0);
}
});
});
it("Errors when setting an out of bounds index", async () => {
// Fail to set non existing index.
await assert.rejects(
async () => {
await program.rpc.updateLargeAccount(25000, new anchor.BN(1), {
accounts: {
eventQ: eventQ.publicKey,
from: program.provider.wallet.publicKey,
},
});
},
(err) => {
console.log("err", err);
return true;
}
);
});
});

View File

@ -18,9 +18,10 @@ anchor-attribute-error = { path = "./attribute/error", version = "0.4.3" }
anchor-attribute-program = { path = "./attribute/program", version = "0.4.3" }
anchor-attribute-state = { path = "./attribute/state", version = "0.4.3" }
anchor-attribute-interface = { path = "./attribute/interface", version = "0.4.3" }
anchor-derive-accounts = { path = "./derive/accounts", version = "0.4.3" }
anchor-attribute-event = { path = "./attribute/event", version = "0.4.3" }
anchor-derive-accounts = { path = "./derive/accounts", version = "0.4.3" }
base64 = "0.13.0"
borsh = "0.8.2"
bytemuck = "1.4.0"
solana-program = "1.6.3"
thiserror = "1.0.20"
base64 = "0.13.0"

View File

@ -17,12 +17,39 @@ use syn::parse_macro_input;
/// As a result, any calls to `AccountDeserialize`'s `try_deserialize` will
/// check this discriminator. If it doesn't match, an invalid account was given,
/// and the account deserialization will exit with an error.
///
/// # Zero Copy Deserialization
///
/// To enable zero-copy-deserialization, one can pass in the `zero_copy`
/// argument to the macro as follows:
///
/// ```ignore
/// #[account(zero_copy)]
/// ```
///
/// This can be used to conveniently implement
/// [`ZeroCopy`](./trait.ZeroCopy.html) so that the account can be used
/// with [`AccountLoader`](./struct.AccountLoader.html).
///
/// Other than being more efficient, the most salient benefit this provides is
/// the ability to define account types larger than the max stack or heap size.
/// This is used in special cases, for example, the Serum DEX event queue. When
/// using borsh, one is limited, since the account has to be copied and
/// deserialized into a new data structure. With zero copy deserialization,
/// everything is, effectively, lazy loaded on field access.
#[proc_macro_attribute]
pub fn account(
args: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let namespace = args.to_string().replace("\"", "");
let is_zero_copy = match args.into_iter().next() {
None => false,
Some(tt) => match tt {
proc_macro::TokenTree::Literal(_) => false,
_ => namespace == "zero_copy",
},
};
let account_strct = parse_macro_input!(input as syn::ItemStruct);
let account_name = &account_strct.ident;
@ -30,7 +57,8 @@ pub fn account(
let discriminator: proc_macro2::TokenStream = {
// Namespace the discriminator to prevent collisions.
let discriminator_preimage = {
if namespace.is_empty() {
// For now, zero copy accounts can't be namespaced.
if is_zero_copy || namespace.is_empty() {
format!("account:{}", account_name.to_string())
} else {
format!("{}:{}", namespace, account_name.to_string())
@ -44,61 +72,111 @@ pub fn account(
format!("{:?}", discriminator).parse().unwrap()
};
let coder = quote! {
impl anchor_lang::AccountSerialize for #account_name {
fn try_serialize<W: std::io::Write>(&self, writer: &mut W) -> std::result::Result<(), ProgramError> {
writer.write_all(&#discriminator).map_err(|_| ProgramError::InvalidAccountData)?;
AnchorSerialize::serialize(
self,
writer
)
.map_err(|_| ProgramError::InvalidAccountData)?;
Ok(())
}
}
proc_macro::TokenStream::from({
if is_zero_copy {
quote! {
#[zero_copy]
#account_strct
impl anchor_lang::AccountDeserialize for #account_name {
fn try_deserialize(buf: &mut &[u8]) -> std::result::Result<Self, ProgramError> {
if buf.len() < #discriminator.len() {
return Err(ProgramError::AccountDataTooSmall);
unsafe impl anchor_lang::__private::bytemuck::Pod for #account_name {}
unsafe impl anchor_lang::__private::bytemuck::Zeroable for #account_name {}
impl anchor_lang::ZeroCopy for #account_name {}
impl anchor_lang::Discriminator for #account_name {
fn discriminator() -> [u8; 8] {
#discriminator
}
}
let given_disc = &buf[..8];
if &#discriminator != given_disc {
return Err(ProgramError::InvalidInstructionData);
// This trait is useful for clients deserializing accounts.
// It's expected on-chain programs deserialize via zero-copy.
impl anchor_lang::AccountDeserialize for #account_name {
fn try_deserialize(buf: &mut &[u8]) -> std::result::Result<Self, ProgramError> {
if buf.len() < #discriminator.len() {
return Err(ProgramError::AccountDataTooSmall);
}
let given_disc = &buf[..8];
if &#discriminator != given_disc {
return Err(ProgramError::InvalidInstructionData);
}
Self::try_deserialize_unchecked(buf)
}
fn try_deserialize_unchecked(buf: &mut &[u8]) -> std::result::Result<Self, ProgramError> {
let data: &[u8] = &buf[8..];
// Re-interpret raw bytes into the POD data structure.
let account = anchor_lang::__private::bytemuck::from_bytes(data);
// Copy out the bytes into a new, owned data structure.
Ok(*account)
}
}
Self::try_deserialize_unchecked(buf)
}
} else {
quote! {
#[derive(AnchorSerialize, AnchorDeserialize, Clone)]
#account_strct
fn try_deserialize_unchecked(buf: &mut &[u8]) -> std::result::Result<Self, ProgramError> {
let mut data: &[u8] = &buf[8..];
AnchorDeserialize::deserialize(&mut data)
.map_err(|_| ProgramError::InvalidAccountData)
impl anchor_lang::AccountSerialize for #account_name {
fn try_serialize<W: std::io::Write>(&self, writer: &mut W) -> std::result::Result<(), ProgramError> {
writer.write_all(&#discriminator).map_err(|_| ProgramError::InvalidAccountData)?;
AnchorSerialize::serialize(
self,
writer
)
.map_err(|_| ProgramError::InvalidAccountData)?;
Ok(())
}
}
impl anchor_lang::AccountDeserialize for #account_name {
fn try_deserialize(buf: &mut &[u8]) -> std::result::Result<Self, ProgramError> {
if buf.len() < #discriminator.len() {
return Err(ProgramError::AccountDataTooSmall);
}
let given_disc = &buf[..8];
if &#discriminator != given_disc {
return Err(ProgramError::InvalidInstructionData);
}
Self::try_deserialize_unchecked(buf)
}
fn try_deserialize_unchecked(buf: &mut &[u8]) -> std::result::Result<Self, ProgramError> {
let mut data: &[u8] = &buf[8..];
AnchorDeserialize::deserialize(&mut data)
.map_err(|_| ProgramError::InvalidAccountData)
}
}
impl anchor_lang::Discriminator for #account_name {
fn discriminator() -> [u8; 8] {
#discriminator
}
}
}
}
impl anchor_lang::Discriminator for #account_name {
fn discriminator() -> [u8; 8] {
#discriminator
}
}
};
proc_macro::TokenStream::from(quote! {
#[derive(AnchorSerialize, AnchorDeserialize, Clone)]
#account_strct
#coder
})
}
/// Extends the `#[account]` attribute to allow one to create associated token
/// Extends the `#[account]` attribute to allow one to create associated
/// accounts. This includes a `Default` implementation, which means all fields
/// in an `#[associated]` struct must implement `Default` and an
/// `anchor_lang::Bump` trait implementation, which allows the account to be
/// used as a program derived address.
///
/// # Zero Copy Deserialization
///
/// Similar to the `#[account]` attribute one can enable zero copy
/// deserialization by using the `zero_copy` argument:
///
/// ```ignore
/// #[associated(zero_copy)]
/// ```
///
/// For more, see the [`account`](./attr.account.html) attribute.
#[proc_macro_attribute]
pub fn associated(
_args: proc_macro::TokenStream,
args: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let mut account_strct = parse_macro_input!(input as syn::ItemStruct);
@ -132,8 +210,9 @@ pub fn associated(
_ => panic!("Fields must be named"),
}
let args: proc_macro2::TokenStream = args.into();
proc_macro::TokenStream::from(quote! {
#[anchor_lang::account]
#[anchor_lang::account(#args)]
#[derive(Default)]
#account_strct
@ -144,3 +223,87 @@ pub fn associated(
}
})
}
#[proc_macro_derive(ZeroCopyAccessor, attributes(accessor))]
pub fn derive_zero_copy_accessor(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let account_strct = parse_macro_input!(item as syn::ItemStruct);
let account_name = &account_strct.ident;
let fields = match &account_strct.fields {
syn::Fields::Named(n) => n,
_ => panic!("Fields must be named"),
};
let methods: Vec<proc_macro2::TokenStream> = fields
.named
.iter()
.filter_map(|field: &syn::Field| {
field
.attrs
.iter()
.filter(|attr| {
let name = anchor_syn::parser::tts_to_string(&attr.path);
if name != "accessor" {
return false;
}
return true;
})
.next()
.map(|attr| {
let mut tts = attr.tokens.clone().into_iter();
let g_stream = match tts.next().expect("Must have a token group") {
proc_macro2::TokenTree::Group(g) => g.stream(),
_ => panic!("Invalid syntax"),
};
let accessor_ty = match g_stream.into_iter().next() {
Some(token) => token,
_ => panic!("Missing accessor type"),
};
let field_name = field.ident.as_ref().unwrap();
let get_field: proc_macro2::TokenStream =
format!("get_{}", field_name.to_string()).parse().unwrap();
let set_field: proc_macro2::TokenStream =
format!("set_{}", field_name.to_string()).parse().unwrap();
quote! {
pub fn #get_field(&self) -> #accessor_ty {
anchor_lang::__private::ZeroCopyAccessor::get(&self.#field_name)
}
pub fn #set_field(&mut self, input: &#accessor_ty) {
self.#field_name = anchor_lang::__private::ZeroCopyAccessor::set(input);
}
}
})
})
.collect();
proc_macro::TokenStream::from(quote! {
impl #account_name {
#(#methods)*
}
})
}
/// A data structure that can be used as an internal field for a zero copy
/// deserialized account, i.e., a struct marked with `#[account(zero_copy)]`.
///
/// This is just a convenient alias for
///
/// ```ignore
/// #[derive(Copy, Clone)]
/// #[repr(packed)]
/// struct MyStruct {...}
/// ```
#[proc_macro_attribute]
pub fn zero_copy(
_args: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let account_strct = parse_macro_input!(item as syn::ItemStruct);
proc_macro::TokenStream::from(quote! {
#[derive(anchor_lang::__private::ZeroCopyAccessor, Copy, Clone)]
#[repr(packed)]
#account_strct
})
}

View File

@ -23,6 +23,7 @@
extern crate self as anchor_lang;
use bytemuck::{Pod, Zeroable};
use solana_program::account_info::AccountInfo;
use solana_program::instruction::AccountMeta;
use solana_program::program_error::ProgramError;
@ -38,28 +39,21 @@ mod ctor;
mod error;
#[doc(hidden)]
pub mod idl;
mod loader;
mod program_account;
mod state;
mod sysvar;
mod vec;
// Internal module used by macros.
#[doc(hidden)]
pub mod __private {
pub use crate::ctor::Ctor;
pub use crate::error::Error;
pub use anchor_attribute_event::EventIndex;
pub use base64;
}
pub use crate::context::{Context, CpiContext, CpiStateContext};
pub use crate::cpi_account::CpiAccount;
pub use crate::cpi_state::CpiState;
pub use crate::loader::Loader;
pub use crate::program_account::ProgramAccount;
pub use crate::state::ProgramState;
pub use crate::sysvar::Sysvar;
pub use anchor_attribute_access_control::access_control;
pub use anchor_attribute_account::{account, associated};
pub use anchor_attribute_account::{account, associated, zero_copy};
pub use anchor_attribute_error::error;
pub use anchor_attribute_event::{emit, event};
pub use anchor_attribute_interface::interface;
@ -172,6 +166,9 @@ pub trait AccountDeserialize: Sized {
fn try_deserialize_unchecked(buf: &mut &[u8]) -> Result<Self, ProgramError>;
}
/// An account data structure capable of zero copy deserialization.
pub trait ZeroCopy: Discriminator + Copy + Clone + Zeroable + Pod {}
/// Calculates the data for an instruction invocation, where the data is
/// `Sha256(<namespace>::<method_name>)[..8] || BorshSerialize(args)`.
/// `args` is a borsh serialized struct of named fields for each argument given
@ -215,10 +212,10 @@ pub trait Bump {
pub mod prelude {
pub use super::{
access_control, account, associated, emit, error, event, interface, program, state,
AccountDeserialize, AccountSerialize, Accounts, AccountsExit, AccountsInit,
zero_copy, AccountDeserialize, AccountSerialize, Accounts, AccountsExit, AccountsInit,
AnchorDeserialize, AnchorSerialize, Context, CpiAccount, CpiContext, CpiState,
CpiStateContext, ProgramAccount, ProgramState, Sysvar, ToAccountInfo, ToAccountInfos,
ToAccountMetas,
CpiStateContext, Loader, ProgramAccount, ProgramState, Sysvar, ToAccountInfo,
ToAccountInfos, ToAccountMetas,
};
pub use borsh;
@ -241,3 +238,31 @@ pub mod prelude {
pub use solana_program::sysvar::Sysvar as SolanaSysvar;
pub use thiserror;
}
// Internal module used by macros.
#[doc(hidden)]
pub mod __private {
use solana_program::pubkey::Pubkey;
pub use crate::ctor::Ctor;
pub use crate::error::Error;
pub use anchor_attribute_account::ZeroCopyAccessor;
pub use anchor_attribute_event::EventIndex;
pub use base64;
pub use bytemuck;
// Very experimental trait.
pub trait ZeroCopyAccessor<Ty> {
fn get(&self) -> Ty;
fn set(input: &Ty) -> Self;
}
impl ZeroCopyAccessor<Pubkey> for [u8; 32] {
fn get(&self) -> Pubkey {
Pubkey::new(self)
}
fn set(input: &Pubkey) -> [u8; 32] {
input.to_bytes()
}
}
}

198
lang/src/loader.rs Normal file
View File

@ -0,0 +1,198 @@
use crate::{
Accounts, AccountsExit, AccountsInit, ToAccountInfo, ToAccountInfos, ToAccountMetas, ZeroCopy,
};
use solana_program::account_info::AccountInfo;
use solana_program::entrypoint::ProgramResult;
use solana_program::instruction::AccountMeta;
use solana_program::program_error::ProgramError;
use solana_program::pubkey::Pubkey;
use std::cell::{Ref, RefMut};
use std::io::Write;
use std::marker::PhantomData;
use std::ops::DerefMut;
/// Account loader facilitating on demand zero copy deserialization.
/// Note that using accounts in this way is distinctly different from using,
/// for example, the [`ProgramAccount`](./struct.ProgramAccount.html). Namely,
/// one must call `load`, `load_mut`, or `load_init`, before reading or writing
/// to the account. For more details on zero-copy-deserialization, see the
/// [`account`](./attr.account.html) attribute.
///
/// When using it's important to be mindful of any calls to `load` so as not to
/// induce a `RefCell` panic, especially when sharing accounts across CPI
/// boundaries. When in doubt, one should make sure all refs resulting from a
/// call to `load` are dropped before CPI.
pub struct Loader<'info, T: ZeroCopy> {
acc_info: AccountInfo<'info>,
phantom: PhantomData<&'info T>,
}
impl<'info, T: ZeroCopy> Loader<'info, T> {
fn new(acc_info: AccountInfo<'info>) -> Loader<'info, T> {
Self {
acc_info,
phantom: PhantomData,
}
}
/// Constructs a new `Loader` from a previously initialized account.
#[inline(never)]
pub fn try_from(acc_info: &AccountInfo<'info>) -> Result<Loader<'info, T>, ProgramError> {
let data: &[u8] = &acc_info.try_borrow_data()?;
// Discriminator must match.
let mut disc_bytes = [0u8; 8];
disc_bytes.copy_from_slice(&data[..8]);
if disc_bytes != T::discriminator() {
return Err(ProgramError::InvalidAccountData);
}
Ok(Loader::new(acc_info.clone()))
}
/// Constructs a new `Loader` from an uninitialized account.
#[inline(never)]
pub fn try_from_init(acc_info: &AccountInfo<'info>) -> Result<Loader<'info, T>, ProgramError> {
let data = acc_info.try_borrow_data()?;
// The discriminator should be zero, since we're initializing.
let mut disc_bytes = [0u8; 8];
disc_bytes.copy_from_slice(&data[..8]);
let discriminator = u64::from_le_bytes(disc_bytes);
if discriminator != 0 {
return Err(ProgramError::InvalidAccountData);
}
Ok(Loader::new(acc_info.clone()))
}
/// Returns a Ref to the account data structure for reading.
pub fn load(&self) -> Result<Ref<T>, ProgramError> {
let data = self.acc_info.try_borrow_data()?;
let mut disc_bytes = [0u8; 8];
disc_bytes.copy_from_slice(&data[..8]);
if disc_bytes != T::discriminator() {
return Err(ProgramError::InvalidAccountData);
}
Ok(Ref::map(data, |data| bytemuck::from_bytes(&data[8..])))
}
/// Returns a `RefMut` to the account data structure for reading or writing.
pub fn load_mut(&self) -> Result<RefMut<T>, ProgramError> {
// AccountInfo api allows you to borrow mut even if the account isn't
// writable, so add this check for a better dev experience.
if !self.acc_info.is_writable {
return Err(ProgramError::Custom(87)); // todo: proper error
}
let data = self.acc_info.try_borrow_mut_data()?;
let mut disc_bytes = [0u8; 8];
disc_bytes.copy_from_slice(&data[..8]);
if disc_bytes != T::discriminator() {
return Err(ProgramError::InvalidAccountData);
}
Ok(RefMut::map(data, |data| {
bytemuck::from_bytes_mut(&mut data.deref_mut()[8..])
}))
}
/// Returns a `RefMut` to the account data structure for reading or writing.
/// Should only be called once, when the account is being initialized.
pub fn load_init(&self) -> Result<RefMut<T>, ProgramError> {
// AccountInfo api allows you to borrow mut even if the account isn't
// writable, so add this check for a better dev experience.
if !self.acc_info.is_writable {
return Err(ProgramError::Custom(87)); // todo: proper error
}
let data = self.acc_info.try_borrow_mut_data()?;
// The discriminator should be zero, since we're initializing.
let mut disc_bytes = [0u8; 8];
disc_bytes.copy_from_slice(&data[..8]);
let discriminator = u64::from_le_bytes(disc_bytes);
if discriminator != 0 {
return Err(ProgramError::InvalidAccountData);
}
Ok(RefMut::map(data, |data| {
bytemuck::from_bytes_mut(&mut data.deref_mut()[8..])
}))
}
}
impl<'info, T: ZeroCopy> Accounts<'info> for Loader<'info, T> {
#[inline(never)]
fn try_accounts(
program_id: &Pubkey,
accounts: &mut &[AccountInfo<'info>],
) -> Result<Self, ProgramError> {
if accounts.is_empty() {
return Err(ProgramError::NotEnoughAccountKeys);
}
let account = &accounts[0];
*accounts = &accounts[1..];
let l = Loader::try_from(account)?;
if l.acc_info.owner != program_id {
return Err(ProgramError::Custom(1)); // todo: proper error
}
Ok(l)
}
}
impl<'info, T: ZeroCopy> AccountsInit<'info> for Loader<'info, T> {
#[inline(never)]
fn try_accounts_init(
program_id: &Pubkey,
accounts: &mut &[AccountInfo<'info>],
) -> Result<Self, ProgramError> {
if accounts.is_empty() {
return Err(ProgramError::NotEnoughAccountKeys);
}
let account = &accounts[0];
*accounts = &accounts[1..];
let l = Loader::try_from_init(account)?;
if l.acc_info.owner != program_id {
return Err(ProgramError::Custom(1)); // todo: proper error
}
Ok(l)
}
}
impl<'info, T: ZeroCopy> AccountsExit<'info> for Loader<'info, T> {
// The account *cannot* be loaded when this is called.
fn exit(&self, _program_id: &Pubkey) -> ProgramResult {
let mut data = self.acc_info.try_borrow_mut_data()?;
let dst: &mut [u8] = &mut data;
let mut cursor = std::io::Cursor::new(dst);
cursor.write_all(&T::discriminator()).unwrap();
Ok(())
}
}
impl<'info, T: ZeroCopy> ToAccountMetas for Loader<'info, T> {
fn to_account_metas(&self, is_signer: Option<bool>) -> Vec<AccountMeta> {
let is_signer = is_signer.unwrap_or(self.acc_info.is_signer);
let meta = match self.acc_info.is_writable {
false => AccountMeta::new_readonly(*self.acc_info.key, is_signer),
true => AccountMeta::new(*self.acc_info.key, is_signer),
};
vec![meta]
}
}
impl<'info, T: ZeroCopy> ToAccountInfos<'info> for Loader<'info, T> {
fn to_account_infos(&self) -> Vec<AccountInfo<'info>> {
vec![self.acc_info.clone()]
}
}
impl<'info, T: ZeroCopy> ToAccountInfo<'info> for Loader<'info, T> {
fn to_account_info(&self) -> AccountInfo<'info> {
self.acc_info.clone()
}
}

View File

@ -90,7 +90,7 @@ where
{
#[inline(never)]
fn try_accounts_init(
_program_id: &Pubkey,
program_id: &Pubkey,
accounts: &mut &[AccountInfo<'info>],
) -> Result<Self, ProgramError> {
if accounts.is_empty() {
@ -98,7 +98,11 @@ where
}
let account = &accounts[0];
*accounts = &accounts[1..];
ProgramAccount::try_from_init(account)
let pa = ProgramAccount::try_from_init(account)?;
if pa.inner.info.owner != program_id {
return Err(ProgramError::Custom(1)); // todo: proper error
}
Ok(pa)
}
}

View File

@ -399,8 +399,12 @@ pub fn generate_constraint_belongs_to(
) -> proc_macro2::TokenStream {
let target = c.join_target.clone();
let ident = &f.ident;
let field = match &f.ty {
Ty::Loader(_) => quote! {#ident.load()?},
_ => quote! {#ident},
};
quote! {
if &#ident.#target != #target.to_account_info().key {
if &#field.#target != #target.to_account_info().key {
return Err(anchor_lang::solana_program::program_error::ProgramError::Custom(1)); // todo: error codes
}
}
@ -454,6 +458,7 @@ pub fn generate_constraint_rent_exempt(
let info = match f.ty {
Ty::AccountInfo => quote! { #ident },
Ty::ProgramAccount(_) => quote! { #ident.to_account_info() },
Ty::Loader(_) => quote! { #ident.to_account_info() },
_ => panic!("Invalid syntax: rent exemption cannot be specified."),
};
match c {
@ -497,7 +502,7 @@ pub fn generate_constraint_state(f: &Field, c: &ConstraintState) -> proc_macro2:
let ident = &f.ident;
let account_ty = match &f.ty {
Ty::CpiState(ty) => &ty.account_ident,
_ => panic!("Invalid syntax"),
_ => panic!("Invalid state constraint"),
};
quote! {
// Checks the given state account is the canonical state account for
@ -517,15 +522,28 @@ pub fn generate_constraint_associated(
) -> proc_macro2::TokenStream {
let associated_target = c.associated_target.clone();
let field = &f.ident;
let account_ty = match &f.ty {
Ty::ProgramAccount(ty) => &ty.account_ident,
_ => panic!("Invalid syntax"),
let (account_ty, is_zero_copy) = match &f.ty {
Ty::ProgramAccount(ty) => (&ty.account_ident, false),
Ty::Loader(ty) => (&ty.account_ident, true),
_ => panic!("Invalid associated constraint"),
};
let space = match &f.space {
None => quote! {
let space = 8 + #account_ty::default().try_to_vec().unwrap().len();
// If no explicit space param was given, serialize the type to bytes
// and take the length (with +8 for the discriminator.)
None => match is_zero_copy {
false => {
quote! {
let space = 8 + #account_ty::default().try_to_vec().unwrap().len();
}
}
true => {
quote! {
let space = 8 + anchor_lang::__private::bytemuck::bytes_of(&#account_ty::default()).len();
}
}
},
// Explicit account size given. Use it.
Some(s) => quote! {
let space = #s;
},
@ -579,8 +597,24 @@ pub fn generate_constraint_associated(
}
};
let account_wrapper_ty = match is_zero_copy {
false => quote! {
anchor_lang::ProgramAccount
},
true => quote! {
anchor_lang::Loader
},
};
let nonce_assignment = match is_zero_copy {
false => quote! {},
// Zero copy is not deserialized, so the data must be lazy loaded.
true => quote! {
.load_init()?
},
};
quote! {
let #field: anchor_lang::ProgramAccount<#account_ty> = {
let #field: #account_wrapper_ty<#account_ty> = {
#space
#payer
@ -617,10 +651,10 @@ pub fn generate_constraint_associated(
})?;
// For now, we assume all accounts created with the `associated`
// attribute have a `nonce` field in their account.
let mut pa: anchor_lang::ProgramAccount<#account_ty> = anchor_lang::ProgramAccount::try_from_init(
let mut pa: #account_wrapper_ty<#account_ty> = #account_wrapper_ty::try_from_init(
&#field,
)?;
pa.__nonce = nonce;
pa#nonce_assignment.__nonce = nonce;
pa
};
}

View File

@ -128,6 +128,7 @@ pub enum IdlType {
Defined(String),
Option(Box<IdlType>),
Vec(Box<IdlType>),
Array(Box<IdlType>, usize),
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
@ -140,7 +141,6 @@ impl std::str::FromStr for IdlType {
// Eliminate whitespace.
let mut s = s.to_string();
s.retain(|c| !c.is_whitespace());
let r = match s.as_str() {
"bool" => IdlType::Bool,
"u8" => IdlType::U8,
@ -158,7 +158,17 @@ impl std::str::FromStr for IdlType {
"Pubkey" => IdlType::PublicKey,
_ => match s.to_string().strip_prefix("Option<") {
None => match s.to_string().strip_prefix("Vec<") {
None => IdlType::Defined(s.to_string()),
None => match s.to_string().strip_prefix("[") {
None => IdlType::Defined(s.to_string()),
Some(inner) => {
let inner = &inner[..inner.len() - 1];
let mut parts = inner.split(";");
let ty = IdlType::from_str(parts.next().unwrap()).unwrap();
let len = parts.next().unwrap().parse::<usize>().unwrap();
assert!(parts.next().is_none());
IdlType::Array(Box::new(ty), len)
}
},
Some(inner) => {
let inner_ty = Self::from_str(
inner

View File

@ -204,6 +204,12 @@ impl Field {
ProgramAccount<#account>
}
}
Ty::Loader(ty) => {
let account = &ty.account_ident;
quote! {
Loader<#account>
}
}
Ty::CpiAccount(ty) => {
let account = &ty.account_ident;
quote! {
@ -242,6 +248,7 @@ pub enum Ty {
ProgramState(ProgramStateTy),
CpiState(CpiStateTy),
ProgramAccount(ProgramAccountTy),
Loader(LoaderTy),
CpiAccount(CpiAccountTy),
Sysvar(SysvarTy),
}
@ -282,6 +289,12 @@ pub struct CpiAccountTy {
pub account_ident: syn::Ident,
}
#[derive(Debug, PartialEq)]
pub struct LoaderTy {
// The struct type of the account.
pub account_ident: syn::Ident,
}
// An access control constraint for an account.
#[derive(Debug)]
pub enum Constraint {

View File

@ -2,7 +2,7 @@ use crate::{
AccountField, AccountsStruct, CompositeField, Constraint, ConstraintAssociated,
ConstraintBelongsTo, ConstraintExecutable, ConstraintLiteral, ConstraintOwner,
ConstraintRentExempt, ConstraintSeeds, ConstraintSigner, ConstraintState, CpiAccountTy,
CpiStateTy, Field, ProgramAccountTy, ProgramStateTy, SysvarTy, Ty,
CpiStateTy, Field, LoaderTy, ProgramAccountTy, ProgramStateTy, SysvarTy, Ty,
};
pub fn parse(strct: &syn::ItemStruct) -> AccountsStruct {
@ -72,7 +72,7 @@ fn parse_field(f: &syn::Field, anchor: Option<&syn::Attribute>) -> AccountField
fn is_field_primitive(f: &syn::Field) -> bool {
match ident_string(f).as_str() {
"ProgramState" | "ProgramAccount" | "CpiAccount" | "Sysvar" | "AccountInfo"
| "CpiState" => true,
| "CpiState" | "Loader" => true,
_ => false,
}
}
@ -89,6 +89,7 @@ fn parse_ty(f: &syn::Field) -> Ty {
"CpiAccount" => Ty::CpiAccount(parse_cpi_account(&path)),
"Sysvar" => Ty::Sysvar(parse_sysvar(&path)),
"AccountInfo" => Ty::AccountInfo,
"Loader" => Ty::Loader(parse_program_account_zero_copy(&path)),
_ => panic!("invalid account type"),
}
}
@ -124,6 +125,11 @@ fn parse_program_account(path: &syn::Path) -> ProgramAccountTy {
ProgramAccountTy { account_ident }
}
fn parse_program_account_zero_copy(path: &syn::Path) -> LoaderTy {
let account_ident = parse_account(path);
LoaderTy { account_ident }
}
fn parse_account(path: &syn::Path) -> syn::Ident {
let segments = &path.segments[0];
match &segments.arguments {

View File

@ -20,7 +20,7 @@
"prepublishOnly": "yarn build"
},
"dependencies": {
"@project-serum/borsh": "^0.1.0",
"@project-serum/borsh": "^0.1.1",
"@solana/web3.js": "^1.2.0",
"@types/bn.js": "^4.11.6",
"@types/bs58": "^4.0.1",

View File

@ -1,6 +1,6 @@
import camelCase from "camelcase";
import { snakeCase } from "snake-case";
import { Layout } from "buffer-layout";
import { Layout, seq } from "buffer-layout";
import * as sha256 from "js-sha256";
import * as borsh from "@project-serum/borsh";
import {
@ -349,6 +349,20 @@ class IdlCoder {
throw new IdlError(`Type not found: ${JSON.stringify(field)}`);
}
return IdlCoder.typeDefLayout(filtered[0], types, fieldName);
// @ts-ignore
} else if (field.type.array) {
// @ts-ignore
let arrayTy = field.type.array[0];
// @ts-ignore
let arrayLen = field.type.array[1];
let innerLayout = IdlCoder.fieldLayout(
{
name: undefined,
type: arrayTy,
},
types
);
return borsh.array(innerLayout, arrayLen, fieldName);
} else {
throw new Error(`Not yet implemented: ${field}`);
}
@ -464,6 +478,15 @@ function typeSize(idl: Idl, ty: IdlType): number {
return accountSize(idl, typeDef);
}
// @ts-ignore
if (ty.array !== undefined) {
// @ts-ignore
let arrayTy = ty.array[0];
// @ts-ignore
let arraySize = ty.array[1];
// @ts-ignore
return typeSize(idl, arrayTy) * arraySize;
}
throw new Error(`Invalid type ${JSON.stringify(ty)}`);
}
}

View File

@ -654,10 +654,10 @@
"@nodelib/fs.scandir" "2.1.4"
fastq "^1.6.0"
"@project-serum/borsh@^0.1.0":
version "0.1.0"
resolved "https://registry.yarnpkg.com/@project-serum/borsh/-/borsh-0.1.0.tgz#cdbff90d06901f8206afb6e1998e5c45aae0aea7"
integrity sha512-AWZ/cjThXmb7o2/fMocc8/VaEsqH29yXEwdHnzTXzglxg1vLPZXpBHqGuPfonSfbd7WszgnGXAIHc+9artwMGg==
"@project-serum/borsh@^0.1.1":
version "0.1.1"
resolved "https://registry.yarnpkg.com/@project-serum/borsh/-/borsh-0.1.1.tgz#a810aad74e4b458fbd1ab63188f175bee765a8af"
integrity sha512-bzCea8KTyc7CNkcMn0V2HW/FdU9pnJUcVRrwLqvJkYeo+mhkXE4AxxWErN3q+UxhEm8ypGIX1OYTKJaTJvj5cQ==
dependencies:
bn.js "^5.1.2"
buffer-layout "^1.2.0"