diff --git a/CHANGELOG.md b/CHANGELOG.md index 28bbc284..509754a5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,18 @@ incremented for features. ## [Unreleased] +### Features + +* cli: Adds a [registry] section in the Anchor toml ([#570](https://github.com/project-serum/anchor/pull/570)). +* cli: Adds the anchor login command ([#570](https://github.com/project-serum/anchor/pull/570)). +* cli: Adds the anchor publish command ([#570](https://github.com/project-serum/anchor/pull/570)). +* cli: Adds a root level anchor_version field to the Anchor.toml for specifying the cli version to build with for verifiable build ([#570](https://github.com/project-serum/anchor/pull/570)). + +### Breaking + +* cli: `[clusters.]` Anchor.toml section has been renamed to `[programs.]` ([#570](https://github.com/project-serum/anchor/pull/570)). +* cli: `[workspace]` member and exclude arrays must now be filepaths relative to the workpsace root ([#570](https://github.com/project-serum/anchor/pull/570)). + ## [0.12.0] - 2021-08-03 ### Features diff --git a/Cargo.lock b/Cargo.lock index a2fbd119..4f5c4045 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -140,11 +140,14 @@ dependencies = [ "anchor-lang", "anchor-syn", "anyhow", + "cargo_toml", "clap 3.0.0-beta.2", "dirs", "flate2", "heck", + "pathdiff", "rand 0.7.3", + "reqwest", "serde", "serde_json", "serum-common", @@ -153,6 +156,8 @@ dependencies = [ "solana-program", "solana-sdk", "syn 1.0.67", + "tar", + "tokio 1.4.0", "toml", ] @@ -549,6 +554,17 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "cargo_toml" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3596addfb02dcdc06f5252ddda9f3785f9230f5827fb4284645240fa05ad92" +dependencies = [ + "serde", + "serde_derive", + "toml", +] + [[package]] name = "cc" version = "1.0.68" @@ -1542,6 +1558,19 @@ dependencies = [ "webpki", ] +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes 1.0.1", + "hyper", + "native-tls", + "tokio 1.4.0", + "tokio-native-tls", +] + [[package]] name = "idna" version = "0.2.2" @@ -1827,6 +1856,16 @@ version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" +[[package]] +name = "mime_guess" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2684d4c2e97d99848d30b324b00c8fcc7e5c897b7cbb5819b09e7c90e8baf212" +dependencies = [ + "mime", + "unicase", +] + [[package]] name = "miniz_oxide" version = "0.4.4" @@ -2185,6 +2224,12 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "pathdiff" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877630b3de15c0b64cc52f659345724fbf6bdad9bd9566699fc53688f3c34a34" + [[package]] name = "pbkdf2" version = "0.4.0" @@ -2524,9 +2569,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.2" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf12057f289428dbf5c591c74bf10392e4a8003f993405a902f20117019022d4" +checksum = "246e9f61b9bb77df069a947682be06e31ac43ea37862e244a69f177694ea6d22" dependencies = [ "base64 0.13.0", "bytes 1.0.1", @@ -2537,11 +2582,14 @@ dependencies = [ "http-body", "hyper", "hyper-rustls", + "hyper-tls", "ipnet", "js-sys", "lazy_static", "log", "mime", + "mime_guess", + "native-tls", "percent-encoding", "pin-project-lite", "rustls", @@ -2549,6 +2597,7 @@ dependencies = [ "serde_json", "serde_urlencoded", "tokio 1.4.0", + "tokio-native-tls", "tokio-rustls", "url", "wasm-bindgen", @@ -3592,9 +3641,9 @@ dependencies = [ [[package]] name = "tar" -version = "0.4.33" +version = "0.4.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0bcfbd6a598361fda270d82469fff3d65089dc33e175c9a131f7b4cd395f228" +checksum = "7d779dc6aeff029314570f666ec83f19df7280bb36ef338442cfa8c604021b80" dependencies = [ "filetime", "libc", @@ -3832,6 +3881,16 @@ dependencies = [ "syn 1.0.67", ] +[[package]] +name = "tokio-native-tls" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b" +dependencies = [ + "native-tls", + "tokio 1.4.0", +] + [[package]] name = "tokio-reactor" version = "0.1.12" @@ -4035,6 +4094,15 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" +[[package]] +name = "unicase" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +dependencies = [ + "version_check", +] + [[package]] name = "unicode-bidi" version = "0.3.4" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index f64816db..c9e8dfa6 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -6,7 +6,7 @@ edition = "2018" [[bin]] name = "anchor" -path = "src/main.rs" +path = "src/bin/main.rs" [features] dev = [] @@ -31,3 +31,8 @@ dirs = "3.0" heck = "0.3.1" flate2 = "1.0.19" rand = "0.7.3" +tar = "0.4.35" +reqwest = { version = "0.11.4", features = ["multipart", "blocking"] } +tokio = "1.0" +pathdiff = "0.2.0" +cargo_toml = "0.9.2" \ No newline at end of file diff --git a/cli/src/bin/main.rs b/cli/src/bin/main.rs new file mode 100644 index 00000000..b0684984 --- /dev/null +++ b/cli/src/bin/main.rs @@ -0,0 +1,7 @@ +use anchor_cli::Opts; +use anyhow::Result; +use clap::Clap; + +fn main() -> Result<()> { + anchor_cli::entry(Opts::parse()) +} diff --git a/cli/src/config.rs b/cli/src/config.rs index 8a5310a3..b8d730ca 100644 --- a/cli/src/config.rs +++ b/cli/src/config.rs @@ -1,7 +1,7 @@ -use crate::ConfigOverride; use anchor_client::Cluster; use anchor_syn::idl::Idl; use anyhow::{anyhow, Error, Result}; +use clap::Clap; use serde::{Deserialize, Serialize}; use solana_sdk::pubkey::Pubkey; use solana_sdk::signature::Keypair; @@ -13,13 +13,138 @@ use std::path::Path; use std::path::PathBuf; use std::str::FromStr; +#[derive(Default, Debug, Clap)] +pub struct ConfigOverride { + /// Cluster override. + #[clap(global = true, long = "provider.cluster")] + pub cluster: Option, + /// Wallet override. + #[clap(global = true, long = "provider.wallet")] + pub wallet: Option, +} + +pub struct WithPath { + inner: T, + path: PathBuf, +} + +impl WithPath { + pub fn new(inner: T, path: PathBuf) -> Self { + Self { inner, path } + } + + pub fn path(&self) -> &PathBuf { + &self.path + } + + pub fn into_inner(self) -> T { + self.inner + } +} + +impl std::convert::AsRef for WithPath { + fn as_ref(&self) -> &T { + &self.inner + } +} + +impl WithPath { + pub fn get_program_list(&self) -> Result> { + // Canonicalize the workspace filepaths to compare with relative paths. + let (members, exclude) = self.canonicalize_workspace()?; + + // Get all candidate programs. + // + // If [workspace.members] exists, then use that. + // Otherwise, default to `programs/*`. + let program_paths: Vec = { + if members.is_empty() { + let path = self.path().parent().unwrap().join("programs"); + fs::read_dir(path)? + .map(|dir| dir.map(|d| d.path().canonicalize().unwrap())) + .collect::>>() + .into_iter() + .collect::, std::io::Error>>()? + } else { + members + } + }; + + // Filter out everything part of the exclude array. + Ok(program_paths + .into_iter() + .filter(|m| !exclude.contains(m)) + .collect()) + } + + // TODO: this should read idl dir instead of parsing source. + pub fn read_all_programs(&self) -> Result> { + let mut r = vec![]; + for path in self.get_program_list()? { + let idl = anchor_syn::idl::file::parse(path.join("src/lib.rs"))?; + let lib_name = extract_lib_name(&path.join("Cargo.toml"))?; + r.push(Program { + lib_name, + path, + idl, + }); + } + Ok(r) + } + + pub fn canonicalize_workspace(&self) -> Result<(Vec, Vec)> { + let members = self + .workspace + .members + .iter() + .map(|m| PathBuf::from(m).canonicalize().unwrap()) + .collect(); + let exclude = self + .workspace + .exclude + .iter() + .map(|m| PathBuf::from(m).canonicalize().unwrap()) + .collect(); + Ok((members, exclude)) + } +} + +impl std::ops::Deref for WithPath { + type Target = T; + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +impl std::ops::DerefMut for WithPath { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} + #[derive(Debug, Default)] pub struct Config { + pub anchor_version: Option, + pub solana_version: Option, + pub registry: RegistryConfig, pub provider: ProviderConfig, - pub clusters: ClustersConfig, + pub programs: ProgramsConfig, pub scripts: ScriptsConfig, - pub test: Option, pub workspace: WorkspaceConfig, + pub test: Option, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct RegistryConfig { + pub url: String, +} + +impl Default for RegistryConfig { + fn default() -> Self { + Self { + url: "https://anchor.projectserum.com".to_string(), + } + } } #[derive(Debug, Default)] @@ -30,7 +155,7 @@ pub struct ProviderConfig { pub type ScriptsConfig = BTreeMap; -pub type ClustersConfig = BTreeMap>; +pub type ProgramsConfig = BTreeMap>; #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct WorkspaceConfig { @@ -41,11 +166,19 @@ pub struct WorkspaceConfig { } impl Config { + pub fn docker(&self) -> String { + let ver = self + .anchor_version + .clone() + .unwrap_or_else(|| crate::DOCKER_BUILDER_VERSION.to_string()); + format!("projectserum/build:v{}", ver) + } + pub fn discover( cfg_override: &ConfigOverride, - ) -> Result)>> { + ) -> Result, Option)>> { Config::_discover().map(|opt| { - opt.map(|(mut cfg, cfg_path, cargo_toml)| { + opt.map(|(mut cfg, cargo_toml)| { if let Some(cluster) = cfg_override.cluster.clone() { cfg.provider.cluster = cluster; } @@ -53,13 +186,13 @@ impl Config { if let Some(wallet) = cfg_override.wallet.clone() { cfg.provider.wallet = wallet; } - (cfg, cfg_path, cargo_toml) + (cfg, cargo_toml) }) }) } // Searches all parent directories for an Anchor.toml file. - fn _discover() -> Result)>> { + fn _discover() -> Result, Option)>> { // Set to true if we ever see a Cargo.toml file when traversing the // parent directories. let mut cargo_toml = None; @@ -87,12 +220,17 @@ impl Config { } } - if let Some((cfg, parent)) = anchor_toml { - return Ok(Some((cfg, parent, cargo_toml))); + // Set the Cargo.toml if it's for a single package, i.e., not the + // root workspace Cargo.toml. + if cargo_toml.is_none() && cargo_toml_level.is_some() { + let toml = cargo_toml::Manifest::from_path(cargo_toml_level.as_ref().unwrap())?; + if toml.workspace.is_none() { + cargo_toml = cargo_toml_level; + } } - if cargo_toml.is_none() { - cargo_toml = cargo_toml_level; + if let Some((cfg, parent)) = anchor_toml { + return Ok(Some((WithPath::new(cfg, parent), cargo_toml))); } cwd_opt = cwd.parent(); @@ -105,61 +243,17 @@ impl Config { solana_sdk::signature::read_keypair_file(&self.provider.wallet.to_string()) .map_err(|_| anyhow!("Unable to read keypair file")) } - - pub fn get_program_list(&self, path: PathBuf) -> Result> { - let mut programs = vec![]; - for f in fs::read_dir(path)? { - let path = f?.path(); - let program = path - .components() - .last() - .map(|c| c.as_os_str().to_string_lossy().into_owned()) - .expect("failed to get program from path"); - - match ( - self.workspace.members.is_empty(), - self.workspace.exclude.is_empty(), - ) { - (true, true) => programs.push(path), - (true, false) => { - if !self.workspace.exclude.contains(&program) { - programs.push(path); - } - } - (false, _) => { - if self.workspace.members.contains(&program) { - programs.push(path); - } - } - } - } - Ok(programs) - } - - // TODO: this should read idl dir instead of parsing source. - pub fn read_all_programs(&self) -> Result> { - let mut r = vec![]; - for path in self.get_program_list("programs".into())? { - let idl = anchor_syn::idl::file::parse(path.join("src/lib.rs"))?; - let lib_name = extract_lib_name(&path.join("Cargo.toml"))?; - r.push(Program { - lib_name, - path, - idl, - }); - } - Ok(r) - } } -// Pubkey serializes as a byte array so use this type a hack to serialize -// into base 58 strings. #[derive(Debug, Serialize, Deserialize)] struct _Config { + anchor_version: Option, + solana_version: Option, + registry: Option, provider: Provider, test: Option, scripts: Option, - clusters: Option>>, + programs: Option>>, workspace: Option, } @@ -171,8 +265,8 @@ struct Provider { impl ToString for Config { fn to_string(&self) -> String { - let clusters = { - let c = ser_clusters(&self.clusters); + let programs = { + let c = ser_programs(&self.programs); if c.is_empty() { None } else { @@ -180,6 +274,9 @@ impl ToString for Config { } }; let cfg = _Config { + anchor_version: self.anchor_version.clone(), + solana_version: self.solana_version.clone(), + registry: Some(self.registry.clone()), provider: Provider { cluster: format!("{}", self.provider.cluster), wallet: self.provider.wallet.to_string(), @@ -189,7 +286,7 @@ impl ToString for Config { true => None, false => Some(self.scripts.clone()), }, - clusters, + programs, workspace: (!self.workspace.members.is_empty() || !self.workspace.exclude.is_empty()) .then(|| self.workspace.clone()), }; @@ -205,34 +302,25 @@ impl FromStr for Config { let cfg: _Config = toml::from_str(s) .map_err(|e| anyhow::format_err!("Unable to deserialize config: {}", e.to_string()))?; Ok(Config { + anchor_version: cfg.anchor_version, + solana_version: cfg.solana_version, + registry: cfg.registry.unwrap_or_default(), provider: ProviderConfig { cluster: cfg.provider.cluster.parse()?, wallet: shellexpand::tilde(&cfg.provider.wallet).parse()?, }, scripts: cfg.scripts.unwrap_or_else(BTreeMap::new), test: cfg.test, - clusters: cfg.clusters.map_or(Ok(BTreeMap::new()), deser_clusters)?, - workspace: cfg.workspace.map(|workspace| { - let (members, exclude) = match (workspace.members.is_empty(), workspace.exclude.is_empty()) { - (true, true) => (vec![], vec![]), - (true, false) => (vec![], workspace.exclude), - (false, is_empty) => { - if !is_empty { - println!("Fields `members` and `exclude` in `[workspace]` section are not compatible, only `members` will be used."); - } - (workspace.members, vec![]) - } - }; - WorkspaceConfig { members, exclude } - }).unwrap_or_default() + programs: cfg.programs.map_or(Ok(BTreeMap::new()), deser_programs)?, + workspace: cfg.workspace.unwrap_or_default(), }) } } -fn ser_clusters( - clusters: &BTreeMap>, +fn ser_programs( + programs: &BTreeMap>, ) -> BTreeMap> { - clusters + programs .iter() .map(|(cluster, programs)| { let cluster = cluster.to_string(); @@ -249,11 +337,10 @@ fn ser_clusters( }) .collect::>>() } - -fn deser_clusters( - clusters: BTreeMap>, +fn deser_programs( + programs: BTreeMap>, ) -> Result>> { - clusters + programs .iter() .map(|(cluster, programs)| { let cluster: Cluster = cluster.parse()?; @@ -265,6 +352,7 @@ fn deser_clusters( ProgramDeployment::try_from(match &program_id { serde_json::Value::String(address) => _ProgramDeployment { address: address.parse()?, + path: None, idl: None, }, serde_json::Value::Object(_) => { @@ -294,25 +382,15 @@ pub struct GenesisEntry { pub program: String, } -pub fn extract_lib_name(path: impl AsRef) -> Result { - let mut toml = File::open(path)?; - let mut contents = String::new(); - toml.read_to_string(&mut contents)?; - - let cargo_toml: toml::Value = contents.parse()?; - - match cargo_toml { - toml::Value::Table(t) => match t.get("lib") { - None => Err(anyhow!("lib not found in Cargo.toml")), - Some(lib) => match lib - .get("name") - .ok_or_else(|| anyhow!("lib name not found in Cargo.toml"))? - { - toml::Value::String(n) => Ok(n.to_string()), - _ => Err(anyhow!("lib name must be a string")), - }, - }, - _ => Err(anyhow!("Invalid Cargo.toml")), +pub fn extract_lib_name(cargo_toml: impl AsRef) -> Result { + let cargo_toml = cargo_toml::Manifest::from_path(cargo_toml)?; + if cargo_toml.lib.is_some() && cargo_toml.lib.as_ref().unwrap().name.is_some() { + Ok(cargo_toml.lib.unwrap().name.unwrap()) + } else { + Ok(cargo_toml + .package + .ok_or_else(|| anyhow!("Package section not provided"))? + .name) } } @@ -320,7 +398,7 @@ pub fn extract_lib_name(path: impl AsRef) -> Result { pub struct Program { pub lib_name: String, pub path: PathBuf, - pub idl: Idl, + pub idl: Option, } impl Program { @@ -343,6 +421,7 @@ impl Program { #[derive(Debug, Default)] pub struct ProgramDeployment { pub address: Pubkey, + pub path: Option, pub idl: Option, } @@ -351,6 +430,7 @@ impl TryFrom<_ProgramDeployment> for ProgramDeployment { fn try_from(pd: _ProgramDeployment) -> Result { Ok(ProgramDeployment { address: pd.address.parse()?, + path: pd.path, idl: pd.idl, }) } @@ -359,6 +439,7 @@ impl TryFrom<_ProgramDeployment> for ProgramDeployment { #[derive(Debug, Default, Serialize, Deserialize)] pub struct _ProgramDeployment { pub address: String, + pub path: Option, pub idl: Option, } @@ -366,6 +447,7 @@ impl From<&ProgramDeployment> for _ProgramDeployment { fn from(pd: &ProgramDeployment) -> Self { Self { address: pd.address.to_string(), + path: pd.path.clone(), idl: pd.idl.clone(), } } @@ -377,4 +459,40 @@ pub struct ProgramWorkspace { pub idl: Idl, } +#[derive(Debug, Serialize, Deserialize)] +pub struct AnchorPackage { + pub name: String, + pub address: String, + pub path: String, + pub idl: Option, +} + +impl AnchorPackage { + pub fn from(name: String, cfg: &WithPath) -> Result { + let cluster = &cfg.provider.cluster; + if cluster != &Cluster::Mainnet { + return Err(anyhow!("Publishing requires the mainnet cluster")); + } + let program_details = cfg + .programs + .get(cluster) + .ok_or_else(|| anyhow!("Program not provided in Anchor.toml"))? + .get(&name) + .ok_or_else(|| anyhow!("Program not provided in Anchor.toml"))?; + let path = program_details + .path + .clone() + // TODO: use a default path if one isn't provided? + .ok_or_else(|| anyhow!("Path to program binary not provided"))?; + let idl = program_details.idl.clone(); + let address = program_details.address.to_string(); + Ok(Self { + name, + path, + address, + idl, + }) + } +} + serum_common::home_path!(WalletPath, ".config/solana/id.json"); diff --git a/cli/src/main.rs b/cli/src/lib.rs similarity index 69% rename from cli/src/main.rs rename to cli/src/lib.rs index 96b81c4c..c0e17a6a 100644 --- a/cli/src/main.rs +++ b/cli/src/lib.rs @@ -1,6 +1,4 @@ -//! CLI for workspace management of anchor programs. - -use crate::config::{Config, Program, ProgramWorkspace, WalletPath}; +use crate::config::{AnchorPackage, Config, ConfigOverride, Program, ProgramWorkspace, WithPath}; use anchor_client::Cluster; use anchor_lang::idl::{IdlAccount, IdlInstruction}; use anchor_lang::{AccountDeserialize, AnchorDeserialize, AnchorSerialize}; @@ -8,9 +6,11 @@ use anchor_syn::idl::Idl; use anyhow::{anyhow, Context, Result}; use clap::Clap; use flate2::read::ZlibDecoder; -use flate2::write::ZlibEncoder; +use flate2::write::{GzEncoder, ZlibEncoder}; use flate2::Compression; use rand::rngs::OsRng; +use reqwest::blocking::multipart::{Form, Part}; +use reqwest::blocking::Client; use serde::{Deserialize, Serialize}; use solana_client::rpc_client::RpcClient; use solana_client::rpc_config::RpcSendTransactionConfig; @@ -30,12 +30,12 @@ use std::path::{Path, PathBuf}; use std::process::{Child, Stdio}; use std::string::ToString; -mod config; -mod template; +pub mod config; +pub mod template; // Version of the docker image. -const VERSION: &str = env!("CARGO_PKG_VERSION"); -const DOCKER_BUILDER_VERSION: &str = VERSION; +pub const VERSION: &str = env!("CARGO_PKG_VERSION"); +pub const DOCKER_BUILDER_VERSION: &str = VERSION; #[derive(Debug, Clap)] #[clap(version = VERSION)] @@ -46,16 +46,6 @@ pub struct Opts { pub command: Command, } -#[derive(Debug, Clap)] -pub struct ConfigOverride { - /// Cluster override. - #[clap(global = true, long = "provider.cluster")] - cluster: Option, - /// Wallet override. - #[clap(global = true, long = "provider.wallet")] - wallet: Option, -} - #[derive(Debug, Clap)] pub enum Command { /// Initializes a workspace. @@ -74,6 +64,10 @@ pub enum Command { verifiable: bool, #[clap(short, long)] program_name: Option, + /// Version of the Solana toolchain to use. For --verifiable builds + /// only. + #[clap(short, long)] + solana_version: Option, }, /// Verifies the on-chain bytecode matches the locally compiled artifact. /// Run this command inside a program subdirectory, i.e., in the dir @@ -81,6 +75,12 @@ pub enum Command { Verify { /// The deployed program to compare against. program_id: Pubkey, + #[clap(short, long)] + program_name: Option, + /// Version of the Solana toolchain to use. For --verifiable builds + /// only. + #[clap(short, long)] + solana_version: Option, }, /// Runs integration tests against a localnetwork. Test { @@ -150,6 +150,16 @@ pub enum Command { /// The name of the script to run. script: String, }, + /// Saves an api token from the registry locally. + Login { + /// API access token. + token: String, + }, + /// Publishes a verified build to the Anchor registry. + Publish { + /// The name of the program to publish. + program: String, + }, } #[derive(Debug, Clap)] @@ -230,8 +240,7 @@ pub enum ClusterCommand { List, } -fn main() -> Result<()> { - let opts = Opts::parse(); +pub fn entry(opts: Opts) -> Result<()> { match opts.command { Command::Init { name, typescript } => init(&opts.cfg_override, name, typescript), Command::New { name } => new(&opts.cfg_override, name), @@ -239,8 +248,21 @@ fn main() -> Result<()> { idl, verifiable, program_name, - } => build(&opts.cfg_override, idl, verifiable, program_name), - Command::Verify { program_id } => verify(&opts.cfg_override, program_id), + solana_version, + } => build( + &opts.cfg_override, + idl, + verifiable, + program_name, + solana_version, + None, + None, + ), + Command::Verify { + program_id, + program_name, + solana_version, + } => verify(&opts.cfg_override, program_id, program_name, solana_version), Command::Deploy { program_name } => deploy(&opts.cfg_override, program_name), Command::Upgrade { program_id, @@ -269,6 +291,8 @@ fn main() -> Result<()> { Command::Cluster { subcmd } => cluster(subcmd), Command::Shell => shell(&opts.cfg_override), Command::Run { script } => run(&opts.cfg_override, script), + Command::Login { token } => login(&opts.cfg_override, token), + Command::Publish { program } => publish(&opts.cfg_override, program), } } @@ -340,8 +364,8 @@ fn init(cfg_override: &ConfigOverride, name: String, typescript: bool) -> Result // Creates a new program crate in the `programs/` directory. fn new(cfg_override: &ConfigOverride, name: String) -> Result<()> { - with_workspace(cfg_override, |_cfg, path, _cargo| { - match path.parent() { + with_workspace(cfg_override, |cfg, _cargo| { + match cfg.path().parent() { None => { println!("Unable to make new program"); } @@ -368,26 +392,54 @@ fn new_program(name: &str) -> Result<()> { Ok(()) } -fn build( +pub fn build( cfg_override: &ConfigOverride, idl: Option, verifiable: bool, program_name: Option, + solana_version: Option, + stdout: Option, // Used for the package registry server. + stderr: Option, // Used for the package registry server. ) -> Result<()> { - let (cfg, path, cargo) = Config::discover(cfg_override)?.expect("Not in workspace."); + // Change directories to the given `program_name`, if given. + let (cfg, _cargo) = Config::discover(cfg_override)?.expect("Not in workspace."); - if let Some(program_name) = program_name { + let mut did_find_program = false; + if let Some(program_name) = program_name.as_ref() { for program in cfg.read_all_programs()? { - let p = program.path.file_name().unwrap().to_str().unwrap(); - if program_name.as_str() == p { - std::env::set_current_dir(&program.path)?; + let cargo_toml = program.path.join("Cargo.toml"); + if !cargo_toml.exists() { + return Err(anyhow!( + "Did not find Cargo.toml at the path: {}", + program.path.display() + )); + } + let p_lib_name = config::extract_lib_name(&cargo_toml)?; + if program_name.as_str() == p_lib_name { + let program_path = cfg + .path() + .parent() + .unwrap() + .canonicalize()? + .join(program.path); + std::env::set_current_dir(&program_path)?; + did_find_program = true; + break; } } } + if !did_find_program && program_name.is_some() { + return Err(anyhow!( + "{} is not part of the workspace", + program_name.as_ref().unwrap() + )); + } + + let (cfg, cargo) = Config::discover(cfg_override)?.expect("Not in workspace."); let idl_out = match idl { Some(idl) => Some(PathBuf::from(idl)), None => { - let cfg_parent = match path.parent() { + let cfg_parent = match cfg.path().parent() { None => return Err(anyhow!("Invalid Anchor.toml")), Some(parent) => parent, }; @@ -395,9 +447,23 @@ fn build( Some(cfg_parent.join("target/idl")) } }; + + let solana_version = match solana_version.is_some() { + true => solana_version, + false => cfg.solana_version.clone(), + }; + match cargo { - None => build_all(&cfg, path, idl_out, verifiable)?, - Some(ct) => build_cwd(path.as_path(), ct, idl_out, verifiable)?, + None => build_all(&cfg, cfg.path(), idl_out, verifiable, solana_version)?, + Some(ct) => build_cwd( + &cfg, + ct, + idl_out, + verifiable, + solana_version, + stdout, + stderr, + )?, }; set_workspace_dir_or_exit(); @@ -406,21 +472,25 @@ fn build( } fn build_all( - cfg: &Config, - cfg_path: PathBuf, + cfg: &WithPath, + cfg_path: &Path, idl_out: Option, verifiable: bool, + solana_version: Option, ) -> Result<()> { let cur_dir = std::env::current_dir()?; let r = match cfg_path.parent() { None => Err(anyhow!("Invalid Anchor.toml at {}", cfg_path.display())), - Some(parent) => { - for p in cfg.get_program_list(parent.join("programs"))? { + Some(_parent) => { + for p in cfg.get_program_list()? { build_cwd( - cfg_path.as_path(), + cfg, p.join("Cargo.toml"), idl_out.clone(), verifiable, + solana_version.clone(), + None, + None, )?; } Ok(()) @@ -432,10 +502,13 @@ fn build_all( // Runs the build command outside of a workspace. fn build_cwd( - cfg_path: &Path, + cfg: &WithPath, cargo_toml: PathBuf, idl_out: Option, verifiable: bool, + solana_version: Option, + stdout: Option, + stderr: Option, ) -> Result<()> { match cargo_toml.parent() { None => return Err(anyhow!("Unable to find parent")), @@ -443,51 +516,207 @@ fn build_cwd( }; match verifiable { false => _build_cwd(idl_out), - true => build_cwd_verifiable(cfg_path.parent().unwrap()), + true => build_cwd_verifiable(cfg, cargo_toml, solana_version, stdout, stderr), } } // Builds an anchor program in a docker image and copies the build artifacts // into the `target/` directory. -fn build_cwd_verifiable(workspace_dir: &Path) -> Result<()> { - // Docker vars. - let container_name = "anchor-program"; - let image_name = format!("projectserum/build:v{}", DOCKER_BUILDER_VERSION); - let volume_mount = format!( - "{}:/workdir", - workspace_dir.canonicalize()?.display().to_string() - ); - +fn build_cwd_verifiable( + cfg: &WithPath, + cargo_toml: PathBuf, + solana_version: Option, + stdout: Option, + stderr: Option, +) -> Result<()> { // Create output dirs. + let workspace_dir = cfg.path().parent().unwrap().canonicalize()?; fs::create_dir_all(workspace_dir.join("target/deploy"))?; fs::create_dir_all(workspace_dir.join("target/idl"))?; - // Build the program in docker. + let container_name = "anchor-program"; + + // Build the binary in docker. + let result = docker_build( + cfg, + container_name, + cargo_toml, + solana_version, + stdout, + stderr, + ); + + // Wipe the generated docker-target dir. + println!("Cleaning up the docker target directory"); let exit = std::process::Command::new("docker") .args(&[ - "run", - "--name", + "exec", container_name, - "-v", - &volume_mount, - &image_name, - "anchor", - "build", + "rm", + "-rf", + "/workdir/docker-target", ]) .stdout(Stdio::inherit()) .stderr(Stdio::inherit()) .output() + .map_err(|e| anyhow::format_err!("Docker rm docker-target failed: {}", e.to_string()))?; + if !exit.status.success() { + return Err(anyhow!("Failed to build program")); + } + + // Remove the docker image. + println!("Removing the docker image"); + let exit = std::process::Command::new("docker") + .args(&["rm", "-f", container_name]) + .stdout(Stdio::inherit()) + .stderr(Stdio::inherit()) + .output() .map_err(|e| anyhow::format_err!("{}", e.to_string()))?; if !exit.status.success() { - println!("Error building program"); - return Ok(()); + println!("Unable to remove docker container"); + std::process::exit(exit.status.code().unwrap_or(1)); } - let idl = extract_idl("src/lib.rs")?; + // Build the idl. + if let Ok(Some(idl)) = extract_idl("src/lib.rs") { + println!("Extracting the IDL"); + let out_file = workspace_dir.join(format!("target/idl/{}.json", idl.name)); + write_idl(&idl, OutFile::File(out_file))?; + } + + result +} + +fn docker_build( + cfg: &WithPath, + container_name: &str, + cargo_toml: PathBuf, + solana_version: Option, + stdout: Option, + stderr: Option, +) -> Result<()> { + let binary_name = config::extract_lib_name(&cargo_toml)?; + + // Docker vars. + let image_name = cfg.docker(); + let volume_mount = format!( + "{}:/workdir", + cfg.path().parent().unwrap().canonicalize()?.display() + ); + println!("Using image {:?}", image_name); + + // Start the docker image running detached in the background. + println!("Run docker image"); + let exit = std::process::Command::new("docker") + .args(&[ + "run", + "-it", + "-d", + "--name", + container_name, + "--env", + "CARGO_TARGET_DIR=/workdir/docker-target", + "-v", + &volume_mount, + &image_name, + "bash", + ]) + .stdout(Stdio::inherit()) + .stderr(Stdio::inherit()) + .output() + .map_err(|e| anyhow::format_err!("Docker build failed: {}", e.to_string()))?; + if !exit.status.success() { + return Err(anyhow!("Failed to build program")); + } + + // Set the solana version in the container, if given. Otherwise use the + // default. + if let Some(solana_version) = solana_version { + println!("Using solana version: {}", solana_version); + + // Fetch the installer. + let exit = std::process::Command::new("docker") + .args(&[ + "exec", + container_name, + "curl", + "-sSfL", + &format!("https://release.solana.com/v{0}/install", solana_version,), + "-o", + "solana_installer.sh", + ]) + .stdout(Stdio::inherit()) + .stderr(Stdio::inherit()) + .output() + .map_err(|e| anyhow!("Failed to set solana version: {:?}", e))?; + if !exit.status.success() { + return Err(anyhow!("Failed to set solana version")); + } + + // Run the installer. + let exit = std::process::Command::new("docker") + .args(&["exec", container_name, "sh", "solana_installer.sh"]) + .stdout(Stdio::inherit()) + .stderr(Stdio::inherit()) + .output() + .map_err(|e| anyhow!("Failed to set solana version: {:?}", e))?; + if !exit.status.success() { + return Err(anyhow!("Failed to set solana version")); + } + } + + let manifest_path = pathdiff::diff_paths( + cargo_toml.canonicalize()?, + cfg.path().parent().unwrap().canonicalize()?, + ) + .ok_or_else(|| anyhow!("Unable to diff paths"))?; + println!( + "Building {} manifest: {:?}", + binary_name, + manifest_path.display().to_string() + ); + + // Execute the build. + let exit = std::process::Command::new("docker") + .args(&[ + "exec", + container_name, + "cargo", + "build-bpf", + "--manifest-path", + &manifest_path.display().to_string(), + ]) + .stdout(match stdout { + None => Stdio::inherit(), + Some(f) => f.into(), + }) + .stderr(match stderr { + None => Stdio::inherit(), + Some(f) => f.into(), + }) + .output() + .map_err(|e| anyhow::format_err!("Docker build failed: {}", e.to_string()))?; + if !exit.status.success() { + return Err(anyhow!("Failed to build program")); + } // Copy the binary out of the docker image. - let out_file = format!("../../target/deploy/{}.so", idl.name); - let bin_artifact = format!("{}:/workdir/target/deploy/{}.so", container_name, idl.name); + println!("Copying out the build artifacts"); + let out_file = cfg + .path() + .parent() + .unwrap() + .canonicalize()? + .join(format!("target/deploy/{}.so", binary_name)) + .display() + .to_string(); + + // This requires the target directory of any built program to be located at + // the root of the workspace. + let bin_artifact = format!( + "{}:/workdir/docker-target/deploy/{}.so", + container_name, binary_name + ); let exit = std::process::Command::new("docker") .args(&["cp", &bin_artifact, &out_file]) .stdout(Stdio::inherit()) @@ -495,33 +724,12 @@ fn build_cwd_verifiable(workspace_dir: &Path) -> Result<()> { .output() .map_err(|e| anyhow::format_err!("{}", e.to_string()))?; if !exit.status.success() { - return Ok(()); - } - - // Copy the idl out of the docker image. - let out_file = format!("../../target/idl/{}.json", idl.name); - let idl_artifact = format!("{}:/workdir/target/idl/{}.json", container_name, idl.name); - let exit = std::process::Command::new("docker") - .args(&["cp", &idl_artifact, &out_file]) - .stdout(Stdio::inherit()) - .stderr(Stdio::inherit()) - .output() - .map_err(|e| anyhow::format_err!("{}", e.to_string()))?; - if !exit.status.success() { - return Ok(()); - } - - // Remove the docker image. - let exit = std::process::Command::new("docker") - .args(&["rm", container_name]) - .stdout(Stdio::inherit()) - .stderr(Stdio::inherit()) - .output() - .map_err(|e| anyhow::format_err!("{}", e.to_string()))?; - if !exit.status.success() { - std::process::exit(exit.status.code().unwrap_or(1)); + return Err(anyhow!( + "Failed to copy binary out of docker. Is the target directory set correctly?" + )); } + // Done. Ok(()) } @@ -537,41 +745,115 @@ fn _build_cwd(idl_out: Option) -> Result<()> { } // Always assume idl is located ar src/lib.rs. - let idl = extract_idl("src/lib.rs")?; + if let Some(idl) = extract_idl("src/lib.rs")? { + let out = match idl_out { + None => PathBuf::from(".").join(&idl.name).with_extension("json"), + Some(o) => PathBuf::from(&o.join(&idl.name).with_extension("json")), + }; - let out = match idl_out { - None => PathBuf::from(".").join(&idl.name).with_extension("json"), - Some(o) => PathBuf::from(&o.join(&idl.name).with_extension("json")), - }; + write_idl(&idl, OutFile::File(out))?; + } - write_idl(&idl, OutFile::File(out)) + Ok(()) } -fn verify(cfg_override: &ConfigOverride, program_id: Pubkey) -> Result<()> { - let (cfg, _path, cargo) = Config::discover(cfg_override)?.expect("Not in workspace."); - let cargo = cargo.ok_or_else(|| anyhow!("Must be inside program subdirectory."))?; +fn verify( + cfg_override: &ConfigOverride, + program_id: Pubkey, + program_name: Option, + solana_version: Option, +) -> Result<()> { + // Change directories to the given `program_name`, if given. + let (cfg, _cargo) = Config::discover(cfg_override)?.expect("Not in workspace."); + let mut did_find_program = false; + if let Some(program_name) = program_name.as_ref() { + for program in cfg.read_all_programs()? { + let cargo_toml = program.path.join("Cargo.toml"); + if !cargo_toml.exists() { + return Err(anyhow!( + "Did not find Cargo.toml at the path: {}", + program.path.display() + )); + } + let p_lib_name = config::extract_lib_name(&cargo_toml)?; + if program_name.as_str() == p_lib_name { + let program_path = cfg + .path() + .parent() + .unwrap() + .canonicalize()? + .join(program.path); + std::env::set_current_dir(&program_path)?; + did_find_program = true; + break; + } + } + } + if !did_find_program && program_name.is_some() { + return Err(anyhow!( + "{} is not part of the workspace", + program_name.as_ref().unwrap() + )); + } + + // Proceed with the command. + let (cfg, cargo) = Config::discover(cfg_override)?.expect("Not in workspace."); + let cargo = cargo.ok_or_else(|| { + anyhow!("Must be inside program subdirectory if no program name is given.") + })?; let program_dir = cargo.parent().unwrap(); // Build the program we want to verify. let cur_dir = std::env::current_dir()?; - build(cfg_override, None, true, None)?; + build( + cfg_override, + None, + true, + None, + match solana_version.is_some() { + true => solana_version, + false => cfg.solana_version.clone(), + }, + None, + None, + )?; std::env::set_current_dir(&cur_dir)?; - let local_idl = extract_idl("src/lib.rs")?; - // Verify binary. - let bin_path = program_dir - .join("../../target/deploy/") - .join(format!("{}.so", local_idl.name)); - let is_buffer = verify_bin(program_id, &bin_path, cfg.provider.cluster.url())?; + let binary_name = { + let cargo_toml = cargo_toml::Manifest::from_path(&cargo)?; + match cargo_toml.lib { + None => { + cargo_toml + .package + .ok_or_else(|| anyhow!("Package section not provided"))? + .name + } + Some(lib) => lib.name.ok_or_else(|| anyhow!("Name not provided"))?, + } + }; + let bin_path = cfg + .path() + .parent() + .ok_or_else(|| anyhow!("Unable to find workspace root"))? + .join("target/deploy/") + .join(format!("{}.so", binary_name)); + + let bin_ver = verify_bin(program_id, &bin_path, cfg.provider.cluster.url())?; + if !bin_ver.is_verified { + println!("Error: Binaries don't match"); + std::process::exit(1); + } // Verify IDL (only if it's not a buffer account). - if !is_buffer { - std::env::set_current_dir(program_dir)?; - let deployed_idl = fetch_idl(cfg_override, program_id)?; - if local_idl != deployed_idl { - println!("Error: IDLs don't match"); - std::process::exit(1); + if let Some(local_idl) = extract_idl("src/lib.rs")? { + if bin_ver.state != BinVerificationState::Buffer { + std::env::set_current_dir(program_dir)?; + let deployed_idl = fetch_idl(cfg_override, program_id)?; + if local_idl != deployed_idl { + println!("Error: IDLs don't match"); + std::process::exit(1); + } } } @@ -580,11 +862,11 @@ fn verify(cfg_override: &ConfigOverride, program_id: Pubkey) -> Result<()> { Ok(()) } -fn verify_bin(program_id: Pubkey, bin_path: &Path, cluster: &str) -> Result { +pub fn verify_bin(program_id: Pubkey, bin_path: &Path, cluster: &str) -> Result { let client = RpcClient::new(cluster.to_string()); // Get the deployed build artifacts. - let (deployed_bin, is_buffer) = { + let (deployed_bin, state) = { let account = client .get_account_with_commitment(&program_id, CommitmentConfig::default())? .value @@ -592,18 +874,35 @@ fn verify_bin(program_id: Pubkey, bin_path: &Path, cluster: &str) -> Result ( - client + } => { + let account = client .get_account_with_commitment(&programdata_address, CommitmentConfig::default())? .value - .map_or(Err(anyhow!("Account not found")), Ok)? - .data[UpgradeableLoaderState::programdata_data_offset().unwrap_or(0)..] - .to_vec(), - false, - ), + .map_or(Err(anyhow!("Account not found")), Ok)?; + let bin = account.data + [UpgradeableLoaderState::programdata_data_offset().unwrap_or(0)..] + .to_vec(); + + if let UpgradeableLoaderState::ProgramData { + slot, + upgrade_authority_address, + } = account.state()? + { + let state = BinVerificationState::ProgramData { + slot, + upgrade_authority_address, + }; + (bin, state) + } else { + return Err(anyhow!("Expected program data")); + } + } UpgradeableLoaderState::Buffer { .. } => { let offset = UpgradeableLoaderState::buffer_data_offset().unwrap_or(0); - (account.data[offset..].to_vec(), true) + ( + account.data[offset..].to_vec(), + BinVerificationState::Buffer, + ) } _ => return Err(anyhow!("Invalid program id")), } @@ -622,12 +921,24 @@ fn verify_bin(program_id: Pubkey, bin_path: &Path, cluster: &str) -> Result, + }, } // Fetches an IDL for the given program_id. @@ -660,7 +971,7 @@ fn fetch_idl(cfg_override: &ConfigOverride, idl_addr: Pubkey) -> Result { serde_json::from_slice(&s[..]).map_err(Into::into) } -fn extract_idl(file: &str) -> Result { +fn extract_idl(file: &str) -> Result> { let file = shellexpand::tilde(file); anchor_syn::idl::file::parse(&*file) } @@ -695,7 +1006,7 @@ fn idl(cfg_override: &ConfigOverride, subcmd: IdlCommand) -> Result<()> { } fn idl_init(cfg_override: &ConfigOverride, program_id: Pubkey, idl_filepath: String) -> Result<()> { - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { let keypair = cfg.provider.wallet.to_string(); let bytes = std::fs::read(idl_filepath)?; @@ -713,7 +1024,7 @@ fn idl_write_buffer( program_id: Pubkey, idl_filepath: String, ) -> Result { - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { let keypair = cfg.provider.wallet.to_string(); let bytes = std::fs::read(idl_filepath)?; @@ -729,7 +1040,7 @@ fn idl_write_buffer( } fn idl_set_buffer(cfg_override: &ConfigOverride, program_id: Pubkey, buffer: Pubkey) -> Result<()> { - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { let keypair = solana_sdk::signature::read_keypair_file(&cfg.provider.wallet.to_string()) .map_err(|_| anyhow!("Unable to read keypair file"))?; let client = RpcClient::new(cfg.provider.cluster.url().to_string()); @@ -783,7 +1094,7 @@ fn idl_upgrade( } fn idl_authority(cfg_override: &ConfigOverride, program_id: Pubkey) -> Result<()> { - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { let client = RpcClient::new(cfg.provider.cluster.url().to_string()); let idl_address = { let account = client @@ -813,7 +1124,7 @@ fn idl_set_authority( address: Option, new_authority: Pubkey, ) -> Result<()> { - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { // Misc. let idl_address = match address { None => IdlAccount::address(&program_id), @@ -945,7 +1256,7 @@ fn idl_write(cfg: &Config, program_id: &Pubkey, idl: &Idl, idl_address: Pubkey) } fn idl_parse(file: String, out: Option) -> Result<()> { - let idl = extract_idl(&file)?; + let idl = extract_idl(&file)?.ok_or_else(|| anyhow!("IDL not parsed"))?; let out = match out { None => OutFile::Stdout, Some(out) => OutFile::File(PathBuf::from(out)), @@ -984,10 +1295,10 @@ fn test( skip_build: bool, extra_args: Vec, ) -> Result<()> { - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { // Build if needed. if !skip_build { - build(cfg_override, None, false, None)?; + build(cfg_override, None, false, None, None, None, None)?; } // Run the deploy against the cluster in two cases: @@ -1064,15 +1375,15 @@ fn test( // Returns the solana-test-validator flags to embed the workspace programs // in the genesis block. This allows us to run tests without every deploying. -fn genesis_flags(cfg: &Config) -> Result> { - let clusters = cfg.clusters.get(&Cluster::Localnet); +fn genesis_flags(cfg: &WithPath) -> Result> { + let programs = cfg.programs.get(&Cluster::Localnet); let mut flags = Vec::new(); for mut program in cfg.read_all_programs()? { let binary_path = program.binary_path().display().to_string(); - let address = clusters - .and_then(|m| m.get(&program.idl.name)) + let address = programs + .and_then(|m| m.get(&program.lib_name)) .map(|deployment| deployment.address.to_string()) .unwrap_or_else(|| { let kp = Keypair::generate(&mut OsRng); @@ -1083,14 +1394,16 @@ fn genesis_flags(cfg: &Config) -> Result> { flags.push(address.clone()); flags.push(binary_path); - // Add program address to the IDL. - program.idl.metadata = Some(serde_json::to_value(IdlTestMetadata { address })?); + if let Some(mut idl) = program.idl.as_mut() { + // Add program address to the IDL. + idl.metadata = Some(serde_json::to_value(IdlTestMetadata { address })?); - // Persist it. - let idl_out = PathBuf::from("target/idl") - .join(&program.idl.name) - .with_extension("json"); - write_idl(&program.idl, OutFile::File(idl_out))?; + // Persist it. + let idl_out = PathBuf::from("target/idl") + .join(&idl.name) + .with_extension("json"); + write_idl(idl, OutFile::File(idl_out))?; + } } if let Some(test) = cfg.test.as_ref() { for entry in &test.genesis { @@ -1102,7 +1415,7 @@ fn genesis_flags(cfg: &Config) -> Result> { Ok(flags) } -fn stream_logs(config: &Config) -> Result> { +fn stream_logs(config: &WithPath) -> Result> { let program_logs_dir = ".anchor/program-logs"; if Path::new(program_logs_dir).exists() { std::fs::remove_dir_all(program_logs_dir)?; @@ -1121,7 +1434,7 @@ fn stream_logs(config: &Config) -> Result> { let log_file = File::create(format!( "{}/{}.{}.log", - program_logs_dir, metadata.address, program.idl.name + program_logs_dir, metadata.address, program.lib_name, ))?; let stdio = std::process::Stdio::from(log_file); let child = std::process::Command::new("solana") @@ -1195,7 +1508,7 @@ fn _deploy( cfg_override: &ConfigOverride, program_str: Option, ) -> Result> { - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { let url = cfg.provider.cluster.url().to_string(); let keypair = cfg.provider.wallet.to_string(); @@ -1246,16 +1559,18 @@ fn _deploy( std::process::exit(exit.status.code().unwrap_or(1)); } - // Add program address to the IDL. - program.idl.metadata = Some(serde_json::to_value(IdlTestMetadata { - address: program_kp.pubkey().to_string(), - })?); + if let Some(mut idl) = program.idl.as_mut() { + // Add program address to the IDL. + idl.metadata = Some(serde_json::to_value(IdlTestMetadata { + address: program_kp.pubkey().to_string(), + })?); - // Persist it. - let idl_out = PathBuf::from("target/idl") - .join(&program.idl.name) - .with_extension("json"); - write_idl(&program.idl, OutFile::File(idl_out))?; + // Persist it. + let idl_out = PathBuf::from("target/idl") + .join(&idl.name) + .with_extension("json"); + write_idl(idl, OutFile::File(idl_out))?; + } programs.push((program_kp.pubkey(), program)) } @@ -1274,7 +1589,7 @@ fn upgrade( let path: PathBuf = program_filepath.parse().unwrap(); let program_filepath = path.canonicalize()?.display().to_string(); - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { let exit = std::process::Command::new("solana") .arg("program") .arg("deploy") @@ -1303,17 +1618,27 @@ fn launch( program_name: Option, ) -> Result<()> { // Build and deploy. - build(cfg_override, None, verifiable, program_name.clone())?; + build( + cfg_override, + None, + verifiable, + program_name.clone(), + None, + None, + None, + )?; let programs = _deploy(cfg_override, program_name)?; - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { let keypair = cfg.provider.wallet.to_string(); // Add metadata to all IDLs. for (address, program) in programs { - // Store the IDL on chain. - let idl_address = create_idl_account(cfg, &keypair, &address, &program.idl)?; - println!("IDL account created: {}", idl_address.to_string()); + if let Some(idl) = program.idl.as_ref() { + // Store the IDL on chain. + let idl_address = create_idl_account(cfg, &keypair, &address, idl)?; + println!("IDL account created: {}", idl_address.to_string()); + } } // Run migration script. @@ -1331,7 +1656,7 @@ fn launch( fn clear_program_keys(cfg_override: &ConfigOverride) -> Result<()> { let config = Config::discover(cfg_override) .unwrap_or_default() - .unwrap_or_default() + .unwrap() .0; for program in config.read_all_programs()? { @@ -1476,7 +1801,7 @@ fn serialize_idl_ix(ix_inner: anchor_lang::idl::IdlInstruction) -> Result Result<()> { - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { println!("Running migration deploy script"); let url = cfg.provider.cluster.url().to_string(); @@ -1538,8 +1863,8 @@ fn set_workspace_dir_or_exit() { println!("Not in anchor workspace."); std::process::exit(1); } - Some((_cfg, cfg_path, _inside_cargo)) => { - match cfg_path.parent() { + Some((cfg, _inside_cargo)) => { + match cfg.path().parent() { None => { println!("Unable to make new program"); } @@ -1587,15 +1912,22 @@ fn cluster(_cmd: ClusterCommand) -> Result<()> { } fn shell(cfg_override: &ConfigOverride) -> Result<()> { - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { let programs = { + // Create idl map from all workspace programs. let mut idls: HashMap = cfg .read_all_programs()? .iter() - .map(|program| (program.idl.name.clone(), program.idl.clone())) + .filter(|program| program.idl.is_some()) + .map(|program| { + ( + program.idl.as_ref().unwrap().name.clone(), + program.idl.clone().unwrap(), + ) + }) .collect(); // Insert all manually specified idls into the idl map. - if let Some(programs) = cfg.clusters.get(&cfg.provider.cluster) { + if let Some(programs) = cfg.programs.get(&cfg.provider.cluster) { let _ = programs .iter() .map(|(name, pd)| { @@ -1608,20 +1940,20 @@ fn shell(cfg_override: &ConfigOverride) -> Result<()> { }) .collect::>(); } - match cfg.clusters.get(&cfg.provider.cluster) { + // Finalize program list with all programs with IDLs. + match cfg.programs.get(&cfg.provider.cluster) { None => Vec::new(), Some(programs) => programs .iter() - .map(|(name, program_deployment)| ProgramWorkspace { - name: name.to_string(), - program_id: program_deployment.address, - idl: match idls.get(name) { - None => { - println!("Unable to find IDL for {}", name); - std::process::exit(1); - } - Some(idl) => idl.clone(), - }, + .filter_map(|(name, program_deployment)| { + Some(ProgramWorkspace { + name: name.to_string(), + program_id: program_deployment.address, + idl: match idls.get(name) { + None => return None, + Some(idl) => idl.clone(), + }, + }) }) .collect::>(), } @@ -1647,7 +1979,7 @@ fn shell(cfg_override: &ConfigOverride) -> Result<()> { } fn run(cfg_override: &ConfigOverride, script: String) -> Result<()> { - with_workspace(cfg_override, |cfg, _path, _cargo| { + with_workspace(cfg_override, |cfg, _cargo| { let script = cfg .scripts .get(&script) @@ -1666,6 +1998,134 @@ fn run(cfg_override: &ConfigOverride, script: String) -> Result<()> { }) } +fn login(_cfg_override: &ConfigOverride, token: String) -> Result<()> { + let dir = shellexpand::tilde("~/.config/anchor"); + if !Path::new(&dir.to_string()).exists() { + fs::create_dir(dir.to_string())?; + } + + std::env::set_current_dir(dir.to_string())?; + + // Freely overwrite the entire file since it's not used for anything else. + let mut file = File::create("credentials")?; + file.write_all(template::credentials(&token).as_bytes())?; + Ok(()) +} + +fn publish(cfg_override: &ConfigOverride, program_name: String) -> Result<()> { + // Discover the various workspace configs. + let (cfg, _cargo_path) = Config::discover(cfg_override)?.expect("Not in workspace."); + + if !Path::new("Cargo.lock").exists() { + return Err(anyhow!("Cargo.lock must exist for a verifiable build")); + } + + println!("Publishing will make your code public. Are you sure? Enter (yes)/no:"); + + let answer = std::io::stdin().lock().lines().next().unwrap().unwrap(); + if answer != "yes" { + println!("Aborting"); + return Ok(()); + } + + let anchor_package = AnchorPackage::from(program_name.clone(), &cfg)?; + let anchor_package_bytes = serde_json::to_vec(&anchor_package)?; + + // Build the program before sending it to the server. + build( + cfg_override, + None, + true, + Some(program_name.clone()), + cfg.solana_version.clone(), + None, + None, + )?; + + // Set directory to top of the workspace. + let workspace_dir = cfg.path().parent().unwrap(); + std::env::set_current_dir(workspace_dir)?; + + // Create the workspace tarball. + let dot_anchor = workspace_dir.join(".anchor"); + fs::create_dir_all(&dot_anchor)?; + let tarball_filename = dot_anchor.join(format!("{}.tar.gz", program_name)); + let tar_gz = File::create(&tarball_filename)?; + let enc = GzEncoder::new(tar_gz, Compression::default()); + let mut tar = tar::Builder::new(enc); + + // Files that will always be included if they exist. + tar.append_path("Anchor.toml")?; + tar.append_path("Cargo.lock")?; + if Path::new("Cargo.toml").exists() { + tar.append_path("Cargo.toml")?; + } + if Path::new("LICENSE").exists() { + tar.append_path("LICENSE")?; + } + if Path::new("README.md").exists() { + tar.append_path("README.md")?; + } + + // All workspace programs. + for path in cfg.get_program_list()? { + let mut relative_path = pathdiff::diff_paths(path, cfg.path().parent().unwrap()) + .ok_or_else(|| anyhow!("Unable to diff paths"))?; + + // HACK for workspaces wtih single programs. Change this. + if relative_path.display().to_string() == *"" { + relative_path = "src".into(); + } + tar.append_dir_all(relative_path.clone(), relative_path)?; + } + tar.into_inner()?; + + // Upload the tarball to the server. + let token = registry_api_token(cfg_override)?; + let form = Form::new() + .part("manifest", Part::bytes(anchor_package_bytes)) + .part("workspace", { + let file = File::open(&tarball_filename)?; + Part::reader(file) + }); + let client = Client::new(); + let resp = client + .post(&format!("{}/api/v0/build", cfg.registry.url)) + .bearer_auth(token) + .multipart(form) + .send()?; + + if resp.status() == 200 { + println!("Build triggered"); + } else { + println!( + "{:?}", + resp.text().unwrap_or_else(|_| "Server error".to_string()) + ); + } + + Ok(()) +} + +fn registry_api_token(_cfg_override: &ConfigOverride) -> Result { + #[derive(Debug, Deserialize)] + struct Registry { + token: String, + } + #[derive(Debug, Deserialize)] + struct Credentials { + registry: Registry, + } + let filename = shellexpand::tilde("~/.config/anchor/credentials"); + let mut file = File::open(filename.to_string())?; + let mut contents = String::new(); + file.read_to_string(&mut contents)?; + + let credentials_toml: Credentials = toml::from_str(&contents)?; + + Ok(credentials_toml.registry.token) +} + // with_workspace ensures the current working directory is always the top level // workspace directory, i.e., where the `Anchor.toml` file is located, before // and after the closure invocation. @@ -1674,17 +2134,17 @@ fn run(cfg_override: &ConfigOverride, script: String) -> Result<()> { // to be outside the workspace. Doing so will have undefined behavior. fn with_workspace( cfg_override: &ConfigOverride, - f: impl FnOnce(&Config, PathBuf, Option) -> R, + f: impl FnOnce(&WithPath, Option) -> R, ) -> R { set_workspace_dir_or_exit(); clear_program_keys(cfg_override).unwrap(); - let (cfg, cfg_path, cargo_toml) = Config::discover(cfg_override) + let (cfg, cargo_toml) = Config::discover(cfg_override) .expect("Previously set the workspace dir") .expect("Anchor.toml must always exist"); - let r = f(&cfg, cfg_path, cargo_toml); + let r = f(&cfg, cargo_toml); set_workspace_dir_or_exit(); clear_program_keys(cfg_override).unwrap(); diff --git a/cli/src/template.rs b/cli/src/template.rs index ad391c04..21118e89 100644 --- a/cli/src/template.rs +++ b/cli/src/template.rs @@ -11,6 +11,15 @@ members = [ "# } +pub fn credentials(token: &str) -> String { + format!( + r#"[registry] +token = "{}" +"#, + token + ) +} + pub fn cargo_toml(name: &str) -> String { format!( r#"[package] diff --git a/examples/cfo/Anchor.toml b/examples/cfo/Anchor.toml index c84f0eaa..7e1ee1eb 100644 --- a/examples/cfo/Anchor.toml +++ b/examples/cfo/Anchor.toml @@ -2,7 +2,7 @@ cluster = "localnet" wallet = "~/.config/solana/id.json" -[clusters.localnet] +[programs.localnet] registry = { address = "GrAkKfEpTKQuVHG2Y97Y2FF4i7y7Q5AHLK94JBy7Y5yv", idl = "./deps/stake/target/idl/registry.json" } lockup = { address = "6ebQNeTPZ1j7k3TtkCCtEPRvG7GQsucQrZ7sSEDQi9Ks", idl = "./deps/stake/target/idl/lockup.json" } diff --git a/examples/misc/Anchor.toml b/examples/misc/Anchor.toml index 9afebcc1..bff6e4ee 100644 --- a/examples/misc/Anchor.toml +++ b/examples/misc/Anchor.toml @@ -7,7 +7,7 @@ address = "FtMNMKp9DZHKWUyVAsj3Q5QV8ow4P3fUPP7ZrWEQJzKr" program = "./target/deploy/misc.so" [workspace] -exclude = ["shared"] +exclude = ["programs/shared"] [scripts] test = "mocha -t 1000000 tests/" diff --git a/examples/typescript/Anchor.toml b/examples/typescript/Anchor.toml index 98c23964..8f3a162a 100644 --- a/examples/typescript/Anchor.toml +++ b/examples/typescript/Anchor.toml @@ -3,8 +3,7 @@ cluster = "localnet" wallet = "~/.config/solana/id.json" [workspace] -members = ["typescript"] -exclude = ["typescript"] +members = ["programs/typescript"] [scripts] test = "ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" diff --git a/examples/zero-copy/Anchor.toml b/examples/zero-copy/Anchor.toml index 721554fe..cfd93f02 100644 --- a/examples/zero-copy/Anchor.toml +++ b/examples/zero-copy/Anchor.toml @@ -3,7 +3,7 @@ cluster = "localnet" wallet = "~/.config/solana/id.json" [workspace] -members = ["zero-copy"] +members = ["programs/zero-copy"] [scripts] test = "mocha -t 1000000 tests/" diff --git a/lang/syn/src/idl/file.rs b/lang/syn/src/idl/file.rs index c36673e0..f0eb0bad 100644 --- a/lang/syn/src/idl/file.rs +++ b/lang/syn/src/idl/file.rs @@ -13,10 +13,14 @@ const DERIVE_NAME: &str = "Accounts"; const ERROR_CODE_OFFSET: u32 = 300; // Parse an entire interface file. -pub fn parse(filename: impl AsRef) -> Result { +pub fn parse(filename: impl AsRef) -> Result> { let ctx = CrateContext::parse(filename)?; - let p = program::parse(parse_program_mod(&ctx))?; + let program_mod = match parse_program_mod(&ctx) { + None => return Ok(None), + Some(m) => m, + }; + let p = program::parse(program_mod)?; let accs = parse_account_derives(&ctx); @@ -218,7 +222,7 @@ pub fn parse(filename: impl AsRef) -> Result { } } - Ok(Idl { + Ok(Some(Idl { version: "0.0.0".to_string(), name: p.name.to_string(), state, @@ -232,11 +236,11 @@ pub fn parse(filename: impl AsRef) -> Result { }, errors: error_codes, metadata: None, - }) + })) } // Parse the main program mod. -fn parse_program_mod(ctx: &CrateContext) -> syn::ItemMod { +fn parse_program_mod(ctx: &CrateContext) -> Option { let root = ctx.root_module(); let mods = root .items() @@ -256,9 +260,9 @@ fn parse_program_mod(ctx: &CrateContext) -> syn::ItemMod { }) .collect::>(); if mods.len() != 1 { - panic!("Did not find program attribute"); + return None; } - mods[0].clone() + Some(mods[0].clone()) } fn parse_error_enum(ctx: &CrateContext) -> Option { diff --git a/ts/src/workspace.ts b/ts/src/workspace.ts index 892241c0..867ac1df 100644 --- a/ts/src/workspace.ts +++ b/ts/src/workspace.ts @@ -67,10 +67,10 @@ const workspace = new Proxy({} as any, { fs.readFileSync(path.join(projectRoot, "Anchor.toml"), "utf-8") ); const clusterId = anchorToml.provider.cluster; - if (anchorToml.clusters && anchorToml.clusters[clusterId]) { + if (anchorToml.programs && anchorToml.programs[clusterId]) { attachWorkspaceOverride( workspaceCache, - anchorToml.clusters[clusterId], + anchorToml.programs[clusterId], idlMap ); }